diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..6facf0d14 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{kt,kts,java}] +indent_style = space +indent_size = 4 + +[*.xml] +indent_style = space +indent_size = 4 + +[*.{gradle,gradle.kts}] +indent_style = space +indent_size = 4 + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false + +[*.json] +indent_style = space +indent_size = 2 diff --git a/.github/DISCUSSION_TEMPLATE/ideas.yml b/.github/DISCUSSION_TEMPLATE/ideas.yml new file mode 100644 index 000000000..28a3c259b --- /dev/null +++ b/.github/DISCUSSION_TEMPLATE/ideas.yml @@ -0,0 +1,39 @@ +title: "[Idea] " +labels: ["enhancement", "idea"] +body: + - type: markdown + attributes: + value: | + Suggest a new feature or improvement. We love hearing what the community needs. + + - type: textarea + id: idea + attributes: + label: Describe the idea + description: What should SceneView be able to do? + validations: + required: true + + - type: textarea + id: use-case + attributes: + label: Use case + description: What problem does this solve? What are you building? + validations: + required: true + + - type: textarea + id: alternatives + attributes: + label: Alternatives considered + description: Other approaches or workarounds you've considered. + + - type: dropdown + id: module + attributes: + label: Module + options: + - sceneview (3D) + - arsceneview (AR) + - New module + - Not sure diff --git a/.github/DISCUSSION_TEMPLATE/q-a.yml b/.github/DISCUSSION_TEMPLATE/q-a.yml new file mode 100644 index 000000000..34fe4c2ee --- /dev/null +++ b/.github/DISCUSSION_TEMPLATE/q-a.yml @@ -0,0 +1,41 @@ +title: "[Q&A] " +labels: ["question"] +body: + - type: markdown + attributes: + value: | + Ask a question about SceneView. Please search existing discussions first. + + - type: textarea + id: question + attributes: + label: Question + description: What do you need help with? + placeholder: "How do I load a model from a URL instead of assets?" + validations: + required: true + + - type: textarea + id: context + attributes: + label: What I've tried + description: Code you've tried, error messages, or approaches that didn't work. + render: kotlin + + - type: dropdown + id: module + attributes: + label: Module + options: + - sceneview (3D) + - arsceneview (AR) + - Both + - Not sure + validations: + required: true + + - type: input + id: version + attributes: + label: SceneView version + placeholder: "3.2.0" diff --git a/.github/DISCUSSION_TEMPLATE/show-and-tell.yml b/.github/DISCUSSION_TEMPLATE/show-and-tell.yml new file mode 100644 index 000000000..f4d70c0d7 --- /dev/null +++ b/.github/DISCUSSION_TEMPLATE/show-and-tell.yml @@ -0,0 +1,34 @@ +title: "[Show & Tell] " +labels: ["showcase"] +body: + - type: markdown + attributes: + value: | + Share what you've built with SceneView! Screenshots, videos, and links welcome. + + - type: textarea + id: project + attributes: + label: What did you build? + description: Describe your project and how SceneView is used. + validations: + required: true + + - type: textarea + id: screenshots + attributes: + label: Screenshots or video + description: Drag and drop images or paste links to videos. + + - type: textarea + id: code + attributes: + label: Interesting code snippets (optional) + description: Any SceneView patterns you're proud of or found useful. + render: kotlin + + - type: input + id: link + attributes: + label: Play Store / GitHub link (optional) + placeholder: "https://play.google.com/store/apps/details?id=..." diff --git a/.github/workflows/docs-on-release.yml b/.github/workflows/docs-on-release.yml new file mode 100644 index 000000000..e881114cf --- /dev/null +++ b/.github/workflows/docs-on-release.yml @@ -0,0 +1,33 @@ +name: Update docs on release + +# Automatically rebuilds and deploys the docs site when a new release is published +# or when key API/marketing files change. This ensures the website always reflects +# the latest state of the SDK. + +on: + release: + types: [published] + push: + branches: [main] + paths: + - 'llms.txt' + - 'sceneview/Module.md' + - 'arsceneview/Module.md' + - 'ROADMAP.md' + +jobs: + trigger-docs-deploy: + name: Trigger docs rebuild + runs-on: ubuntu-latest + steps: + - name: Trigger docs workflow + uses: actions/github-script@v7 + with: + script: | + await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docs.yml', + ref: 'main' + }); + console.log('Triggered docs rebuild'); diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index bae57484a..6fd235f08 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -5,9 +5,12 @@ on: branches: [main] paths: - 'docs/**' - - 'marketing/codelabs/**' + - 'marketing/**' - 'CHANGELOG.md' - 'MIGRATION.md' + - 'CONTRIBUTING.md' + - 'ROADMAP.md' + - 'llms.txt' workflow_dispatch: jobs: @@ -25,10 +28,14 @@ jobs: - name: Sync content into docs run: | + # Codelabs from marketing cp marketing/codelabs/codelab-3d-compose.md docs/docs/codelabs/codelab-3d-compose.md cp marketing/codelabs/codelab-ar-compose.md docs/docs/codelabs/codelab-ar-compose.md + + # Root docs cp MIGRATION.md docs/docs/migration.md cp CHANGELOG.md docs/docs/changelog.md + cp CONTRIBUTING.md docs/docs/contributing.md - name: Build site run: mkdocs build --config-file docs/mkdocs.yml --site-dir site diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..8b086de4e --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,62 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in the +SceneView community a harassment-free experience for everyone, regardless of +age, body size, visible or invisible disability, ethnicity, sex characteristics, +gender identity and expression, level of experience, education, socio-economic +status, nationality, personal appearance, race, caste, color, religion, or +sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior: + +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +**thomas@sceneview.com**. + +All complaints will be reviewed and investigated promptly and fairly. All +community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..e7641d2e0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,33 @@ +# Security Policy + +## Supported Versions + +| Version | Support Level | +| ------- | ---------------------- | +| 3.2.x | Fully supported | +| 3.1.x | Security fixes only | +| < 3.1 | Not supported | + +## Reporting a Vulnerability + +If you discover a security vulnerability in SceneView, please report it responsibly by emailing **security@sceneview.com**. Do not open a public GitHub issue for security vulnerabilities. + +When reporting, please include: + +- A description of the vulnerability and its potential impact +- Steps to reproduce the issue +- Affected version(s) +- Any suggested mitigation or fix, if available + +You can expect an initial response within **48 hours** of your report. We will work with you to understand and validate the issue, and will provide updates on our progress toward a fix. + +## Disclosure Policy + +SceneView follows a **90-day coordinated disclosure** policy. After a vulnerability is reported: + +1. We will confirm receipt within 48 hours. +2. We will investigate and work on a fix within the 90-day disclosure window. +3. Once a fix is available, we will release a patched version and publish a security advisory. +4. The reporter may publicly disclose the vulnerability after the fix has been released or after the 90-day window has elapsed, whichever comes first. + +We appreciate the efforts of security researchers and community members who help keep SceneView and its users safe. diff --git a/docs/docs/ai-development.md b/docs/docs/ai-development.md new file mode 100644 index 000000000..344689461 --- /dev/null +++ b/docs/docs/ai-development.md @@ -0,0 +1,132 @@ +# AI-Assisted Development + +SceneView is the first 3D/AR library designed for AI-assisted development. Every API is documented in a machine-readable format that AI tools understand natively. + +--- + +## Why this matters + +When you ask an AI to help you build a 3D scene, it needs to know the exact API — function names, parameter types, threading rules, common patterns. Most 3D libraries have large, complex APIs that AI tools hallucinate about. + +SceneView solves this with three layers: + +1. **`llms.txt`** — a machine-readable API reference at the repo root +2. **`@sceneview/mcp`** — an MCP server that gives AI tools full API context +3. **Claude Code skills** — guided workflows for contributing, reviewing, and documenting + +--- + +## For app developers + +### Use with Claude Code + +Install [Claude Code](https://claude.ai/code), then in your project: + +```bash +# Add SceneView MCP server to your project +echo '{ + "mcpServers": { + "sceneview": { "command": "npx", "args": ["-y", "@sceneview/mcp"] } + } +}' > .claude/mcp.json +``` + +Now Claude has the full SceneView API. Ask it to: + +- "Add a 3D model viewer to my product detail screen" +- "Add AR tap-to-place with pinch-to-scale" +- "Add a dynamic sky with fog that changes based on a slider" +- "Show a loading indicator while the model loads" + +The AI will generate correct SceneView code — no hallucinated methods, no outdated patterns. + +### Use with Cursor / Windsurf / other editors + +Copy `llms.txt` from the SceneView repo into your project root, or add the MCP server to your editor's MCP config. The AI tools will pick it up automatically. + +### Use with ChatGPT / Claude web + +Paste the contents of [`llms.txt`](https://github.com/SceneView/sceneview-android/blob/main/llms.txt) into your conversation, then ask your question. The AI will use the correct API. + +--- + +## For SceneView contributors + +### Slash commands + +Inside the SceneView repo with Claude Code: + +| Command | What it does | +|---|---| +| `/contribute` | Full guided workflow — understand the codebase, make changes, prepare a PR | +| `/review` | Check threading rules, Compose API patterns, Kotlin style, module boundaries | +| `/document` | Generate/update KDoc for changed public APIs, update `llms.txt` | +| `/test` | Audit test coverage and generate missing tests | + +### Example workflow + +```bash +cd sceneview-android +claude + +# Then in Claude Code: +> /contribute +# Claude walks you through understanding the codebase, +# making changes, running checks, and preparing a PR. +``` + +--- + +## What's in `llms.txt` + +A 500-line, machine-readable API reference covering: + +- All composable signatures with parameter types and defaults +- Code examples for every node type +- Threading rules and common pitfalls +- Resource loading patterns +- Gesture and interaction APIs +- Math types and coordinate system +- AR-specific APIs (anchors, image tracking, face mesh, cloud anchors) + +The file is maintained alongside the source code and updated with every release. + +--- + +## What's in the MCP server + +The `@sceneview/mcp` package provides tools that AI assistants can call: + +- **`get_api_reference`** — returns the full `llms.txt` content +- **`get_node_reference`** — look up a specific node type's API +- **`get_sample_code`** — get working example code for a use case +- **`get_threading_rules`** — threading and lifecycle rules + +### Setup + +```json +{ + "mcpServers": { + "sceneview": { + "command": "npx", + "args": ["-y", "@sceneview/mcp"] + } + } +} +``` + +Works with Claude Code, Claude Desktop, Cursor, Windsurf, and any MCP-compatible tool. + +--- + +## Why no other 3D library has this + +| Library | AI support | +|---|---| +| **SceneView** | `llms.txt` + MCP server + Claude Code skills | +| Unity | Generic docs, frequent hallucinations on API | +| Sceneform | Archived, AI trained on outdated code | +| Raw ARCore | Low-level API, AI struggles with GL/Vulkan boilerplate | +| Rajawali | Minimal docs, AI has no training data | + +SceneView's AI tooling means faster development, fewer bugs, and correct code on the first try. This is a competitive advantage that compounds — the more developers use AI tools, the more SceneView's AI-first approach matters. diff --git a/docs/docs/architecture.md b/docs/docs/architecture.md new file mode 100644 index 000000000..d4d6b8462 --- /dev/null +++ b/docs/docs/architecture.md @@ -0,0 +1,308 @@ +# Architecture + +How SceneView turns Jetpack Compose composables into real-time 3D and AR experiences, +from your Kotlin code all the way down to the GPU. + +--- + +## The layer cake + +SceneView is a stack of five layers. Each layer only talks to the one directly below it, +keeping responsibilities clean and dependencies one-directional. + +``` + ┌──────────────────────────────────────────────────┐ + │ Your Android App (Kotlin/Compose) │ + ├──────────────────────────────────────────────────┤ + │ SceneView Composables (Scene, ARScene, nodes) │ + ├──────────────────────────────────────────────────┤ + │ SceneNodeManager (Compose ↔ Filament bridge) │ + ├──────────────────────────────────────────────────┤ + │ Google Filament (PBR rendering, JNI) │ + ├──────────────────────────────────────────────────┤ + │ ARCore (motion tracking, plane detection) │ + │ ↑ only present in arsceneview │ + └──────────────────────────────────────────────────┘ +``` + +**From top to bottom:** + +| Layer | Role | +|---|---| +| **App** | Your composables, state, and business logic. You call `Scene { }` or `ARScene { }` and declare nodes. | +| **SceneView composables** | `Scene`, `ARScene`, `SceneScope`, `ARSceneScope`, and every node type (`ModelNode`, `LightNode`, `CubeNode`, etc.). These are `@Composable` functions that translate Compose state into scene-graph operations. | +| **SceneNodeManager** | An internal class that bridges the Compose snapshot world and the Filament scene graph. It adds/removes Filament entities as nodes enter and leave the Compose tree. | +| **Google Filament** | The C++ physically-based rendering engine, accessed through JNI. Owns the `Engine`, `Scene`, `View`, `Renderer`, and all GPU resources. | +| **ARCore** | Google's AR SDK. Provides camera pose, plane detection, anchors, image tracking, and light estimation. Only linked by the `arsceneview` module. | + +--- + +## Compose to Filament bridge + +The central challenge SceneView solves is keeping Compose's reactive, declarative model +in sync with Filament's imperative, mutable scene graph. Three mechanisms make this work. + +### 1. Node enter/exit via `DisposableEffect` + +Every node composable in `SceneScope` ends with a call to `NodeLifecycle`: + +```kotlin +// Simplified from SceneScope.kt +@Composable +fun NodeLifecycle(node: Node, content: (@Composable NodeScope.() -> Unit)?) { + DisposableEffect(node) { + attach(node) // adds to SnapshotStateList → triggers SceneNodeManager.addNode() + onDispose { + detach(node) // removes from list → triggers SceneNodeManager.removeNode() + node.destroy() // releases Filament entity + components + } + } + // child nodes compose inside a NodeScope tied to this parent + if (content != null) { + NodeScope(parentNode = node, scope = this).content() + } +} +``` + +When a node composable enters the Compose tree, `DisposableEffect` fires and the node is +attached to a `SnapshotStateList`. A `LaunchedEffect` in the `Scene` composable collects +changes to that list via `snapshotFlow` and calls `SceneNodeManager.addNode()` / +`removeNode()` to insert or remove entities from the Filament `Scene`. + +When the composable leaves the tree, `onDispose` detaches the node synchronously (so the +Filament entity is gone before `node.destroy()` releases its material/mesh resources) and +then destroys it. + +### 2. Property updates via `SideEffect` + +Position, rotation, scale, visibility, and other node properties are pushed to the Filament +entity inside a `SideEffect` block that runs after every recomposition: + +```kotlin +// From SceneScope.Node() +val node = remember(engine) { Node(engine = engine).apply(apply) } +SideEffect { + node.position = position + node.rotation = rotation + node.scale = scale + node.isVisible = isVisible +} +``` + +Because `SideEffect` runs on the main thread after composition, Filament's JNI calls (which +*must* happen on the main thread) are naturally satisfied. + +### 3. Scene-level sync via `snapshotFlow` + +The `Scene` composable uses a `LaunchedEffect` that watches `scopeChildNodes` (a +`SnapshotStateList`) through `snapshotFlow`. Every time the Compose snapshot system +detects an add or remove, the diff is forwarded to `SceneNodeManager`: + +```kotlin +LaunchedEffect(nodeManager) { + var prevNodes = emptyList() + snapshotFlow { scopeChildNodes.toList() }.collect { newNodes -> + (prevNodes - newNodes.toSet()).forEach { nodeManager.removeNode(it) } + (newNodes - prevNodes.toSet()).forEach { nodeManager.addNode(it) } + prevNodes = newNodes + } +} +``` + +`SceneNodeManager` itself is straightforward -- it calls `scene.addEntities()` and +`scene.removeEntities()` on the Filament `Scene`, wires up child-node listeners, and +maintains an idempotent `managedNodes` set to prevent double-add/remove. + +--- + +## Threading model + +!!! danger "Main thread only" + **All Filament JNI calls must execute on the main (UI) thread.** Calling + `modelLoader.createModel*`, `materialLoader.*`, or any Filament API from a background + coroutine will cause a native crash (SIGABRT). + +### How the threading works in practice + +``` + ┌────────────────────┐ ┌──────────────────────┐ + │ Dispatchers.IO │ │ Main Thread │ + │ │ │ │ + │ Read file bytes │─────▶│ createModelInstance │ + │ (assets, network) │ │ (Filament JNI) │ + └────────────────────┘ │ │ + │ SideEffect { ... } │ + │ (property updates) │ + │ │ + │ withFrameNanos { } │ + │ (render loop) │ + └──────────────────────┘ +``` + +**`rememberModelInstance`** demonstrates the correct pattern: + +1. `produceState` launches on the main thread's coroutine context. +2. File bytes are read on `Dispatchers.IO` via `withContext`. +3. Execution returns to `Main`, where `modelLoader.createModelInstance(buffer)` calls + Filament's `AssetLoader` through JNI -- safely on the main thread. + +```kotlin +@Composable +fun rememberModelInstance(modelLoader: ModelLoader, assetFileLocation: String): ModelInstance? { + val context = LocalContext.current + return produceState(initialValue = null, modelLoader, assetFileLocation) { + val buffer = withContext(Dispatchers.IO) { + context.assets.readBuffer(assetFileLocation) + } ?: return@produceState + // Back on Main -- safe for Filament JNI + value = modelLoader.createModelInstance(buffer) + }.value +} +``` + +**The render loop** runs on `Main` via Compose's `withFrameNanos`, which is backed by +`Choreographer` frame callbacks: + +```kotlin +LaunchedEffect(engine, renderer, view, scene) { + while (true) { + withFrameNanos { frameTimeNanos -> + // all of this executes on Main + modelLoader.updateLoad() + nodes.forEach { it.onFrame(frameTimeNanos) } + if (renderer.beginFrame(swapChain, frameTimeNanos)) { + renderer.render(view) + renderer.endFrame() + } + } + } +} +``` + +!!! tip "Safe async loading for imperative code" + Outside of composables, use `modelLoader.loadModelAsync(fileLocation) { model -> ... }`. + The callback is delivered on IO, but you must marshal any Filament calls back to Main. + +--- + +## Resource lifecycle + +SceneView ties every Filament resource to Compose's lifecycle through `remember` + +`DisposableEffect`, following a consistent pattern: + +``` +remember { create resource } → DisposableEffect { onDispose { destroy resource } } +``` + +### Engine and loaders + +| Resource | Created by | Destroyed by | +|---|---|---| +| `Engine` + EGL context | `rememberEngine()` | `DisposableEffect.onDispose` calls `engine.safeDestroy()` + `eglContext.destroy()` | +| `ModelLoader` | `rememberModelLoader(engine)` | `DisposableEffect.onDispose` destroys `AssetLoader`, `ResourceLoader`, `MaterialProvider` | +| `MaterialLoader` | `rememberMaterialLoader(engine)` | `DisposableEffect.onDispose` | +| `EnvironmentLoader` | `rememberEnvironmentLoader(engine)` | `DisposableEffect.onDispose` | +| `Renderer`, `View`, `Scene` | `rememberRenderer()`, `rememberView()`, `rememberScene()` | `DisposableEffect.onDispose` | + +### Nodes + +Every node composable calls `NodeLifecycle`, which: + +1. **On enter:** attaches the node to the scene via `SceneNodeManager.addNode()`. +2. **On exit:** detaches the node synchronously, then calls `node.destroy()` which releases + the Filament entity and all associated components (transform, renderable, light, etc.). + +### Model instances + +`rememberModelInstance` returns `null` while loading and a `ModelInstance` once ready. The +underlying `Model` (Filament asset) is tracked by `ModelLoader`, which destroys all +registered models when the loader itself is disposed. + +!!! info "No manual cleanup needed" + If you use the composable API (`Scene { }` + node composables), you never need to call + `destroy()` yourself. Resource cleanup follows the Compose tree automatically. + +--- + +## Scene rendering pipeline + +Every frame follows this sequence: + +``` + 1. Compose recomposition + └─ SideEffect pushes updated node properties to Filament entities + + 2. Choreographer frame callback (withFrameNanos) + ├─ modelLoader.updateLoad() ← finishes async resource loads + ├─ node.onFrame(frameTimeNanos) ← per-node frame tick (animations, etc.) + ├─ CameraManipulator.update() ← orbit/pan/zoom from gestures + │ └─ cameraNode.transform = ... ← updates Filament camera transform + ├─ onFrame callback ← user-supplied per-frame hook + └─ renderer.beginFrame / render / endFrame + └─ Filament PBR pipeline: + ├─ Shadow map passes + ├─ Color pass (PBR shading, IBL, fog) + └─ Post-processing (tone mapping, FXAA, bloom) + + 3. Result composited onto SurfaceView or TextureView +``` + +For AR scenes (`ARScene`), step 2 additionally: + +- Calls `session.update()` to get the latest ARCore `Frame`. +- Updates the camera projection and view matrix from the ARCore `Camera` pose. +- Runs `LightEstimator` to adjust the main light and environment from the real-world + lighting conditions. +- Feeds the camera texture stream to `ARCameraStream` for the passthrough background. + +--- + +## Module boundaries + +SceneView is split into two Gradle modules with a strict dependency direction: + +``` + ┌─────────────────────┐ ┌─────────────────────────┐ + │ sceneview/ │◀────────│ arsceneview/ │ + │ │ depends │ │ + │ Scene │ on │ ARScene │ + │ SceneScope │ │ ARSceneScope │ + │ SceneNodeManager │ │ ARCameraNode │ + │ Node, ModelNode, │ │ AnchorNode, PoseNode, │ + │ LightNode, ... │ │ AugmentedImageNode, │ + │ ModelLoader │ │ TrackableNode, ... │ + │ Engine utilities │ │ ArSession, ARCameraStream│ + │ CollisionSystem │ │ LightEstimator │ + │ CameraManipulator │ │ PlaneRenderer │ + └─────────────────────┘ └─────────────────────────┘ + No ARCore dependency Depends on ARCore SDK +``` + +### `sceneview/` -- pure 3D + +Contains everything needed for 3D rendering without AR: the `Scene` composable, +`SceneScope` DSL, all base node types, model/material/environment loaders, the collision +system, gesture detectors, and camera manipulation. Has **zero dependency on ARCore**. + +Artifact: `io.github.sceneview:sceneview` + +### `arsceneview/` -- AR layer + +Depends on `sceneview/` and adds: + +- **`ARScene`** composable -- manages the ARCore `Session` lifecycle, camera stream, and + light estimation. +- **`ARSceneScope`** -- extends `SceneScope` with AR-specific node composables like + `AnchorNode`, `PoseNode`, `AugmentedImageNode`, `HitResultNode`, + `AugmentedFaceNode`, `CloudAnchorNode`, and `TrackableNode`. +- **`ARCameraNode`** -- syncs the Filament camera with the ARCore camera pose each frame. +- **`ArSession`**, **`ARCameraStream`**, **`LightEstimator`**, **`PlaneRenderer`** -- + ARCore integration utilities. + +Artifact: `io.github.sceneview:arsceneview` + +!!! note "Scope inheritance" + `ARSceneScope` extends `SceneScope`, so all base node composables (`ModelNode`, + `LightNode`, `CubeNode`, etc.) are available inside `ARScene { }` blocks. AR-specific + nodes are only available at the `ARSceneScope` level, not inside nested `NodeScope` + child blocks. diff --git a/docs/docs/cheatsheet.md b/docs/docs/cheatsheet.md new file mode 100644 index 000000000..80ce1ddcc --- /dev/null +++ b/docs/docs/cheatsheet.md @@ -0,0 +1,195 @@ +# API Cheatsheet + +A quick reference for SceneView's most-used APIs. Print it, pin it, keep it next to your keyboard. + +--- + +## Setup + +```kotlin +// build.gradle +implementation("io.github.sceneview:sceneview:3.2.0") // 3D +implementation("io.github.sceneview:arsceneview:3.2.0") // AR + 3D +``` + +--- + +## Core Remember Hooks + +```kotlin +val engine = rememberEngine() +val modelLoader = rememberModelLoader(engine) +val materialLoader = rememberMaterialLoader(engine) +val environmentLoader = rememberEnvironmentLoader(engine) + +val model = rememberModelInstance(modelLoader, "models/file.glb") // null while loading +val env = rememberEnvironment(environmentLoader) { + createHDREnvironment("environments/sky.hdr")!! +} + +val cameraManipulator = rememberCameraManipulator() +val mainLight = rememberMainLightNode(engine) { intensity = 100_000f } +val cameraNode = rememberCameraNode(engine) { position = Position(0f, 2f, 5f) } +val viewNodeManager = rememberViewNodeManager() +``` + +--- + +## Scene + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = cameraManipulator, // orbit/pan/zoom + cameraNode = cameraNode, // OR fixed camera + environment = env, + mainLightNode = mainLight, + surfaceType = SurfaceType.Surface, // or TextureSurface + isOpaque = true, + viewNodeWindowManager = viewNodeManager, // for ViewNode + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, node -> }, + onDoubleTap = { event, node -> }, + onLongPress = { event, node -> } + ), + onTouchEvent = { event, hitResult -> false }, + onFrame = { frameTimeNanos -> } +) { + // SceneScope — declare nodes here +} +``` + +--- + +## ARScene + +```kotlin +ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + planeRenderer = true, + sessionConfiguration = { session, config -> + config.depthMode = Config.DepthMode.AUTOMATIC + config.lightEstimationMode = Config.LightEstimationMode.ENVIRONMENTAL_HDR + }, + sessionFeatures = setOf(), // e.g., Session.Feature.FRONT_CAMERA + onSessionUpdated = { session, frame -> }, + onTouchEvent = { event, hitResult -> true } +) { + // ARSceneScope — declare AR nodes here +} +``` + +--- + +## Node Types — 3D + +| Node | Key Parameters | +|---|---| +| `ModelNode` | `modelInstance`, `scaleToUnits`, `centerOrigin`, `position`, `rotation`, `isEditable`, `autoAnimate`, `animationName`, `animationLoop` | +| `CubeNode` | `size: Size`, `materialInstance` | +| `SphereNode` | `radius: Float`, `materialInstance` | +| `CylinderNode` | `radius`, `height`, `materialInstance` | +| `PlaneNode` | `size: Size`, `materialInstance` | +| `LightNode` | `type: LightManager.Type`, `apply = { intensity(); color(); castShadows() }` | +| `ImageNode` | `imageFileLocation` / `imageResId` / `bitmap`, `size` | +| `VideoNode` | `player: MediaPlayer`, `chromaKeyColor`, `size` | +| `ViewNode` | `windowManager`, content = `@Composable` | +| `TextNode` | `text`, `fontSize`, `textColor`, `backgroundColor`, `widthMeters` | +| `BillboardNode` | `bitmap`, `widthMeters`, `heightMeters` | +| `LineNode` | `start`, `end`, `materialInstance` | +| `PathNode` | `points: List`, `closed`, `materialInstance` | +| `DynamicSkyNode` | `timeOfDay` (0-24), `turbidity`, `sunIntensity` | +| `FogNode` | `view`, `density`, `height`, `color`, `enabled` | +| `ReflectionProbeNode` | `filamentScene`, `environment`, `position`, `radius`, `cameraPosition` | +| `PhysicsNode` | `node`, `mass`, `restitution`, `linearVelocity`, `floorY`, `radius` | +| `MeshNode` | `primitiveType`, `vertexBuffer`, `indexBuffer`, `materialInstance` | +| `Node` | `position`, `rotation`, `scale` + child content | +| `CameraNode` | (via `rememberCameraNode`) | + +--- + +## Node Types — AR + +| Node | Key Parameters | +|---|---| +| `AnchorNode` | `anchor: Anchor` + child content | +| `HitResultNode` | `xPx`, `yPx` + child content (reticle) | +| `AugmentedImageNode` | `augmentedImage` + child content | +| `AugmentedFaceNode` | `augmentedFace`, `meshMaterialInstance` | +| `CloudAnchorNode` | `anchor`, `cloudAnchorId`, `onHosted` + child content | + +--- + +## Common Node Properties + +```kotlin +node.position = Position(x, y, z) // meters +node.rotation = Rotation(x, y, z) // degrees +node.scale = Scale(x, y, z) // multiplier +node.isVisible = true +node.isEditable = true // pinch-scale, drag-move, rotate +node.isTouchable = true +node.onSingleTapConfirmed = { event -> true } +node.onFrame = { frameTimeNanos -> } + +// Smooth movement +node.transform(position = Position(2f, 0f, 0f), smooth = true, smoothSpeed = 5f) +node.lookAt(targetNode) + +// Animation +node.animateRotations(Rotation(0f), Rotation(y = 360f)).also { + it.duration = 2000 + it.repeatCount = ValueAnimator.INFINITE +}.start() +``` + +--- + +## Math Types + +```kotlin +import io.github.sceneview.math.* + +Position(x = 0f, y = 1f, z = -2f) // Float3, meters +Rotation(x = 0f, y = 90f, z = 0f) // Float3, degrees +Scale(1.5f) // uniform +Scale(x = 2f, y = 1f, z = 2f) // non-uniform +Direction(x = 0f, y = 1f, z = 0f) // unit vector +Size(width = 1f, height = 0.5f) // Float2 +``` + +--- + +## Resource Loading + +```kotlin +// Composable (preferred) +val model = rememberModelInstance(modelLoader, "models/file.glb") + +// Imperative +val model = modelLoader.loadModelInstance("models/file.glb") +modelLoader.loadModelInstanceAsync("models/file.glb") { instance -> } + +// Environment +environmentLoader.createHDREnvironment("environments/sky.hdr") +environmentLoader.createKtxEnvironment("environments/studio.ktx") + +// Material +materialLoader.createColorInstance(Color.Red) +``` + +--- + +## Threading Rules + +| Safe | Unsafe | +|---|---| +| `rememberModelInstance(...)` | `modelLoader.createModelInstance(...)` on IO | +| `loadModelInstanceAsync(...)` | `materialLoader.createMaterial(...)` on IO | +| Any composable in `Scene { }` | Direct Filament API on background thread | + +**Rule:** Filament JNI = main thread only. `remember*` hooks handle this for you. diff --git a/docs/docs/codelabs/index.md b/docs/docs/codelabs/index.md index 23df069b2..46d0f8ccc 100644 --- a/docs/docs/codelabs/index.md +++ b/docs/docs/codelabs/index.md @@ -6,7 +6,7 @@ Step-by-step guides for building 3D and AR apps with SceneView and Jetpack Compo
-- :material-cube-outline: **3D with Compose** +- :octicons-package-24: **3D with Compose** --- @@ -16,7 +16,7 @@ Step-by-step guides for building 3D and AR apps with SceneView and Jetpack Compo [:octicons-arrow-right-24: Start](codelab-3d-compose.md) -- :material-augmented-reality: **AR with Compose** +- :octicons-eye-24: **AR with Compose** --- diff --git a/docs/docs/comparison.md b/docs/docs/comparison.md new file mode 100644 index 000000000..986091545 --- /dev/null +++ b/docs/docs/comparison.md @@ -0,0 +1,174 @@ +# SceneView vs. the alternatives + +An honest comparison for developers evaluating 3D and AR options. + +--- + +## The landscape + +| Library | Approach | Status | +|---|---|---| +| **SceneView** | Jetpack Compose composables, Filament + ARCore | Active, v3.2.0 | +| **Google Sceneform** | View-based, custom renderer, ARCore | Abandoned (archived 2021) | +| **Raw ARCore SDK** | Low-level session/frame API, bring your own renderer | Active but no UI layer | +| **Unity** | Full game engine embedded via `UnityPlayerActivity` | Active, heavy | +| **Rajawali** | OpenGL ES wrapper, imperative scene graph | Maintenance mode | + +--- + +## Side-by-side: adding a 3D model viewer + +=== "SceneView" + + ```kotlin + // build.gradle + implementation("io.github.sceneview:sceneview:3.2.0") + + // One composable + @Composable + fun ModelViewer() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/helmet.glb") + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator() + ) { + model?.let { ModelNode(modelInstance = it, scaleToUnits = 1.0f) } + } + } + ``` + + **~15 lines** · 1 file · 0 XML · 0 lifecycle callbacks · 0 manual cleanup + +=== "Sceneform (archived)" + + ```kotlin + // XML layout required + // + + class ModelViewerActivity : AppCompatActivity() { + private lateinit var arFragment: ArFragment + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_model_viewer) + arFragment = supportFragmentManager + .findFragmentById(R.id.arFragment) as ArFragment + + arFragment.setOnTapArPlaneListener { hitResult, _, _ -> + val anchor = hitResult.createAnchor() + ModelRenderable.builder() + .setSource(this, Uri.parse("helmet.sfb")) + .build() + .thenAccept { renderable -> + val anchorNode = AnchorNode(anchor) + anchorNode.setParent(arFragment.arSceneView.scene) + val modelNode = TransformableNode( + arFragment.transformationSystem) + modelNode.renderable = renderable + modelNode.setParent(anchorNode) + } + } + } + + override fun onResume() { super.onResume() } + override fun onPause() { super.onPause() } + override fun onDestroy() { super.onDestroy() } + } + ``` + + **~80+ lines** · 3+ files · Manual lifecycle · `.sfb` format deprecated + +=== "Raw ARCore" + + ```kotlin + // You get a Session, Frame, and Camera. That's it. + // Bring your own renderer (OpenGL ES or Vulkan). + // Manage GL surface, shader compilation, mesh uploading, + // lighting, shadows, and frame timing yourself. + ``` + + **500–1000+ lines** before rendering a single triangle + +=== "Unity" + + ```kotlin + // Unity export as Android library + implementation(project(":unityLibrary")) + + class UnityViewerActivity : UnityPlayerActivity() { + // All logic in C# inside Unity + } + ``` + + **40–350 MB** APK overhead · No Compose integration · Separate build pipeline + +--- + +## Feature matrix + +| Feature | SceneView | Sceneform | Raw ARCore | Unity | +|---|---|---|---|---| +| Jetpack Compose | **Native** | No | No | No | +| Declarative nodes | **Yes** | No (imperative) | No API | No (C#) | +| Auto lifecycle | **Yes** | Manual | Manual | Unity-managed | +| PBR rendering | **Filament** | Limited | DIY | Unity renderer | +| glTF/GLB | **Yes** | .sfb (deprecated) | DIY | Yes | +| Physics | **Built-in** | No | No | Built-in | +| Post-processing | **Bloom, DOF, SSAO** | No | DIY | Yes | +| Dynamic sky | **Yes** | No | No | Yes (HDRP) | +| AR planes | **Yes** | Yes | Yes | Yes | +| AR image tracking | **Yes** | Yes | Yes | Yes | +| AR face tracking | **Yes** | Yes | Yes | Yes | +| Cloud anchors | **Yes** | Yes | Yes | Yes | +| Geospatial API | **Yes** | No | Yes | Yes | +| ViewNode (Compose in 3D) | **Yes** | No | No | No | +| AI tooling (MCP) | **Yes** | No | No | No | +| APK size impact | **~5 MB** | ~3 MB | ~1 MB | 40–350 MB | +| Active maintenance | **Yes** | Abandoned | Google | Yes | + +--- + +## Common objections + +!!! question "We already use Unity for 3D" + Unity is right for 3D-first games. But for adding 3D to an existing Compose app — a product + viewer, an AR feature, data visualization — Unity's 60–350 MB runtime, separate C# pipeline, + and no Compose integration make it overkill. SceneView adds ~5 MB and works inside your + existing Compose screens. + +!!! question "Can't we just use ARCore directly?" + ARCore gives you tracking data (planes, anchors, poses) but no rendering. You'd need your + own OpenGL/Vulkan renderer — months of work for a team with graphics expertise. SceneView + gives you ARCore + Filament rendering, wrapped in Compose composables. + +!!! question "Sceneform worked fine for us" + Google archived Sceneform in 2021. The `.sfb` format is deprecated. No Compose support. + No new ARCore features (geospatial, streetscape, depth). The community fork has unresolved + issues including 16 KB page size compliance required by Android 15 (API 35). + See the [Migration guide](migration.md) for a step-by-step walkthrough. + +!!! question "Is it production-ready?" + Yes. SceneView is built on Filament (Google's production rendering engine) and ARCore + (Google's production AR platform). Used in production apps on Google Play. The API is + stable and versioned with migration guides for breaking changes. + +--- + +## Migration from Sceneform + +| Sceneform | SceneView | +|---|---| +| `ArFragment` | `ARScene { }` composable | +| `ModelRenderable.builder()` | `rememberModelInstance(modelLoader, path)` | +| `AnchorNode(anchor).setParent(scene)` | `AnchorNode(anchor = a) { ... }` | +| `TransformableNode` | `ModelNode` with gesture parameters | +| `.sfb` model format | `.glb` / `.gltf` (standard glTF) | +| `onResume` / `onPause` / `onDestroy` | Automatic (Compose lifecycle) | +| `node.setParent(null); node.destroy()` | Remove from composition | + +[:octicons-arrow-right-24: Full migration guide](migration.md) diff --git a/docs/docs/contributing.md b/docs/docs/contributing.md new file mode 100644 index 000000000..5e3d954dc --- /dev/null +++ b/docs/docs/contributing.md @@ -0,0 +1,99 @@ +# Contributing to SceneView + +We welcome contributions of all kinds — bug fixes, new features, documentation, and samples. + +--- + +## Quick start + +```bash +# Fork and clone +git clone https://github.com/YOUR_USERNAME/sceneview-android.git +cd sceneview-android + +# Open in Android Studio, build, and run a sample to verify setup +``` + +## AI-assisted workflow (recommended) + +SceneView ships with a full [Claude Code](https://claude.ai/code) setup so you can contribute +with AI assistance from the first keystroke: + +```bash +# Install Claude Code, then inside the project root: +claude +``` + +| Command | What it does | +|---|---| +| `/contribute` | Full guided workflow from understanding to PR | +| `/review` | Checks threading, Compose API, Kotlin style, module boundaries | +| `/document` | Generates/updates KDoc and `llms.txt` for changed APIs | +| `/test` | Audits coverage and generates missing tests | + +--- + +## Code style + +We follow the official [Kotlin style guide](https://developer.android.com/kotlin/style-guide). +The code style is stored in the repository — Android Studio picks it up automatically. + +Key rules: + +- **4-space indentation** (no tabs) +- **Trailing commas** in multi-line parameter lists +- **`internal`** visibility for implementation details +- **No wildcard imports** + +--- + +## Pull request guidelines + +1. **Fork → branch → PR** — create a feature branch from `main` +2. **Keep changes minimal** — fix what you came to fix, don't refactor the world +3. **Start PR title with uppercase** — e.g., "Add PhysicsNode collision callbacks" +4. **Describe your changes** — a short summary helps reviewers +5. **Same Git name/email as your GitHub account** — for contributor role attribution + +--- + +## Module structure + +| Module | What to change | +|---|---| +| `sceneview/` | Core 3D library — nodes, scene, rendering, materials | +| `arsceneview/` | AR layer — ARCore integration, AR-specific nodes | +| `samples/` | Sample apps — add new samples or improve existing ones | +| `docs/` | This documentation site | + +--- + +## Threading rules + +!!! warning "Critical" + Filament JNI calls **must** run on the main thread. Never call `modelLoader.createModel*` + or `materialLoader.*` from a background coroutine. Use `rememberModelInstance` in composables + or `loadModelInstanceAsync` for imperative code. + +--- + +## Filament materials + +If you modify `.mat` files, recompile them using the +[current Filament version](https://github.com/google/filament/releases). +Enable the Filament plugin in `gradle.properties` and rebuild. + +--- + +## Issues & discussions + +- **Bug reports** → [GitHub Issues](https://github.com/SceneView/sceneview-android/issues) (use the templates) +- **Questions** → [GitHub Discussions](https://github.com/SceneView/sceneview-android/discussions) +- **Chat** → [Discord](https://discord.gg/UbNDDBTNqb) + +--- + +## License + +By contributing, you agree that your contributions will be licensed under the +[Apache License 2.0](https://github.com/SceneView/sceneview-android/blob/main/LICENSE). diff --git a/docs/docs/faq.md b/docs/docs/faq.md new file mode 100644 index 000000000..1e8116ea3 --- /dev/null +++ b/docs/docs/faq.md @@ -0,0 +1,146 @@ +# FAQ + +## General + +### What is SceneView? + +SceneView is a Jetpack Compose library for 3D and AR on Android. It wraps Google Filament (rendering) and ARCore (augmented reality) in declarative composables — the same way you write `Column { }` or `Row { }`. + +### Is it free? + +Yes. SceneView is open source under the [Apache 2.0 license](https://github.com/SceneView/sceneview-android/blob/main/LICENSE). Free for personal and commercial use. + +### What's the relationship to Sceneform? + +Google created Sceneform and archived it in 2021. SceneView started as a community continuation, then was completely rewritten in v3.0 as a Compose-native library. It's not a fork — it's a new codebase. + +### What Android versions are supported? + +**Min SDK 24** (Android 7.0). This covers 99%+ of active devices. + +--- + +## Setup + +### Do I need to install the NDK? + +No. SceneView bundles pre-compiled native libraries. No NDK or CMake setup required. + +### Can I use it without Compose? + +SceneView 3.x is Compose-only. If you need a View-based API, use SceneView 2.x (legacy, no longer actively developed). + +### What model formats are supported? + +**glTF 2.0** (`.gltf` + `.bin`) and **GLB** (binary glTF). These are the industry standard — exported by Blender, Maya, 3ds Max, and most 3D tools. + +### Where do I put my model files? + +In `src/main/assets/models/`. Reference them by path: `"models/helmet.glb"`. + +--- + +## 3D + +### How do I change a model's position at runtime? + +Use Compose state: + +```kotlin +var pos by remember { mutableStateOf(Position(0f, 0f, -2f)) } + +Scene(...) { + model?.let { + ModelNode(modelInstance = it, position = pos) + } +} + +// Update pos from a button, slider, or animation — the node moves automatically. +``` + +### How do I play a specific animation? + +```kotlin +ModelNode( + modelInstance = instance, + autoAnimate = false, + animationName = "Walk", // name from the glTF file + animationLoop = true, + animationSpeed = 1f +) +``` + +### Can I render multiple models? + +Yes — just add multiple `ModelNode` calls inside `Scene { }`: + +```kotlin +Scene(...) { + ModelNode(modelInstance = helmet, position = Position(x = -1f)) + ModelNode(modelInstance = sword, position = Position(x = 1f)) +} +``` + +### How do I add lighting? + +Use `LightNode` with the **named** `apply` parameter: + +```kotlin +LightNode( + type = LightManager.Type.SUN, + apply = { intensity(100_000f); castShadows(true) } +) +``` + +Or use `rememberMainLightNode(engine) { intensity = 100_000f }` as a Scene parameter. + +### What is `scaleToUnits`? + +It scales the model so its longest dimension equals the given value in meters. `scaleToUnits = 1.0f` means the model fits in a 1-meter bounding box. This normalizes models of any original size. + +--- + +## AR + +### Does AR work on emulators? + +Partially. ARCore has limited emulator support. For reliable AR development, use a [physical device with ARCore support](https://developers.google.com/ar/devices). + +### How do I detect when the user taps a plane? + +```kotlin +ARScene( + onTouchEvent = { event, hitResult -> + if (event.action == MotionEvent.ACTION_UP && hitResult != null) { + anchor = hitResult.createAnchor() + } + true + } +) { /* ... */ } +``` + +### Can I track real-world images? + +Yes — use `AugmentedImageNode` with an `AugmentedImageDatabase`. See the [ar-augmented-image sample](samples.md#ar-augmented-image). + +--- + +## Performance + +### What FPS should I expect? + +60fps on mid-range devices (2020+) with typical scenes (1-3 models, 1 light, HDR environment). Complex scenes with many models or post-processing effects may need optimization. + +### How much does SceneView add to APK size? + +Approximately **5 MB** (native Filament libraries + Kotlin code). ARSceneView adds ARCore (~1 MB extra, shared with Play Services). + +### Can I share the Engine across screens? + +Yes — create the engine at a higher scope (e.g., ViewModel or CompositionLocal) and pass it to each `Scene`. This avoids creating multiple Filament engines. + +--- + +## Troubleshooting + +See the full [Troubleshooting guide](troubleshooting.md) for detailed solutions to common issues. diff --git a/docs/docs/index.md b/docs/docs/index.md index 450d5150b..950ae4bdf 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1,75 +1,111 @@ -# SceneView — 3D and AR with Compose +# SceneView -
+

3D and AR as Compose UI — on Android, XR headsets, and soon iOS

-![Hero Banner](assets/images/hero-banner.svg) +## Scenes are composables. -
+Write a `Scene { }` the same way you write a `Column { }`. Nodes are composables. +State drives the scene. Lifecycle is automatic. One API — every platform. -
+=== "3D Model Viewer" -## 3D is just Compose UI. + ```kotlin + Scene(modifier = Modifier.fillMaxSize()) { + rememberModelInstance(modelLoader, "models/helmet.glb")?.let { instance -> + ModelNode(modelInstance = instance, scaleToUnits = 1.0f, autoAnimate = true) + } + LightNode(type = LightManager.Type.SUN, apply = { intensity(100_000.0f) }) + } + ``` -SceneView brings the full power of **Google Filament** and **ARCore** into **Jetpack Compose**. -Write a `Scene { }` the same way you write a `Column { }`. Nodes are composables. -Lifecycle is automatic. State drives everything. + Five lines. Production-quality 3D. Same Kotlin you write every day. -[:octicons-rocket-24: Get started](#install){ .md-button .md-button--primary } -[:octicons-eye-24: Showcase](showcase.md){ .md-button } +=== "AR Placement" -
+ ```kotlin + ARScene(planeRenderer = true, onSessionUpdated = { _, frame -> + anchor = frame.getUpdatedPlanes() + .firstOrNull { it.type == Plane.Type.HORIZONTAL_UPWARD_FACING } + ?.let { frame.createAnchorOrNull(it.centerPose) } + }) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = sofa, scaleToUnits = 0.5f, isEditable = true) + } + } + } + ``` ---- + Tap to place. Pinch to scale. Two-finger rotate. All built in. -## Visual showcase +=== "XR Spatial" -See what developers are building with SceneView — 3D scenes, AR experiences, and interactive demos, all in pure Compose. + ```kotlin + XRScene(modifier = Modifier.fillMaxSize()) { + ModelNode( + modelInstance = furniture, + position = Position(0f, 0f, -2f) + ) + ViewNode(position = Position(0.5f, 1.5f, -1.5f)) { + Card { + Text("Tap to customize") + ColorPicker(onColorSelected = { /* update material */ }) + } + } + } + ``` -
+ Same composable API — now in spatial computing headsets. -
-![3D Model Viewer](assets/images/showcase-3d-model-viewer.svg) +=== "Physics" -**3D Model Viewer** + ```kotlin + Scene(modifier = Modifier.fillMaxSize()) { + val ball = rememberModelInstance(modelLoader, "models/ball.glb") + ball?.let { + val node = ModelNode(modelInstance = it, scaleToUnits = 0.1f) + PhysicsNode(node = node, mass = 1f, restitution = 0.6f, + linearVelocity = Position(0f, 5f, -3f), floorY = 0f) + } + } + ``` -Photorealistic glTF rendering with HDR environment, orbit camera, and pinch-to-zoom gestures. + Rigid body simulation. Gravity, bounce, collision — no game engine needed. -[:octicons-code-24: Source](https://github.com/SceneView/sceneview-android/tree/main/samples/model-viewer){ .showcase-link } -
+[:octicons-rocket-24: Get started](#get-started){ .md-button .md-button--primary } +[:octicons-book-24: Why SceneView](showcase.md){ .md-button } -
-![AR Model Viewer](assets/images/showcase-ar-model-viewer.svg) +--- -**AR Tap-to-Place** +## One API, every surface -Detect real surfaces, tap to place 3D models, pinch to scale, drag to rotate — all with ARCore plane detection. +
-[:octicons-code-24: Source](https://github.com/SceneView/sceneview-android/tree/main/samples/ar-model-viewer){ .showcase-link } -
+- :octicons-device-mobile-24: **Android** -
-![Augmented Image](assets/images/showcase-ar-augmented-image.svg) + --- -**Augmented Image** + `Scene {}` and `ARScene {}` — Jetpack Compose composables backed by Google Filament and ARCore. Production-ready today. -Point your camera at a real-world image and overlay interactive 3D content — product previews, AR catalogs, educational models. +- :octicons-device-desktop-24: **XR headsets** -[:octicons-code-24: Source](https://github.com/SceneView/sceneview-android/tree/main/samples/ar-augmented-image){ .showcase-link } -
+ --- -
-![Autopilot Demo](assets/images/showcase-autopilot.svg) + `XRScene {}` brings the same composable patterns to spatial computing. Your existing code and skills transfer directly. -**Autopilot HUD** +- :octicons-globe-24: **Kotlin Multiplatform** -Full autonomous driving interface built entirely with SceneView geometry nodes and Compose UI — no model files needed. + --- -[:octicons-code-24: Source](https://github.com/SceneView/sceneview-android/tree/main/samples/autopilot-demo){ .showcase-link } -
+ iOS via Filament's Metal backend. Share scene definitions across Android and iOS from one Kotlin codebase. -
+- :octicons-cpu-24: **Rendering engine** -[:octicons-arrow-right-24: See all samples in the Showcase](showcase.md){ .md-button } + --- + + Google Filament — physically-based rendering, HDR environment lighting, post-processing. 60fps on mid-range devices. + +
--- @@ -91,26 +127,116 @@ Full autonomous driving interface built entirely with SceneView geometry nodes a } ``` +=== "XR (v4.0)" + + ```kotlin + dependencies { + implementation("io.github.sceneview:sceneview-xr:4.0.0") + } + ``` + +!!! tip "That's it" + No XML layouts. No fragments. No OpenGL boilerplate. Just add the dependency and start composing. + --- -## Quick start +## What you get -```kotlin -@Composable -fun ModelViewerScreen() { - val engine = rememberEngine() - val modelLoader = rememberModelLoader(engine) +### 26+ composable node types - Scene(modifier = Modifier.fillMaxSize()) { - rememberModelInstance(modelLoader, "models/helmet.glb")?.let { instance -> - ModelNode(modelInstance = instance, scaleToUnits = 1.0f, autoAnimate = true) - } - LightNode(type = LightManager.Type.SUN, apply = { intensity(100_000f) }) - } -} -``` +
+ +- :octicons-package-24: **3D Models** + + --- + + `ModelNode` loads glTF/GLB with animations, gestures, and automatic scaling. + Geometry primitives — `CubeNode`, `SphereNode`, `CylinderNode`, `PlaneNode` — need no asset files. + +- :octicons-sun-24: **Lighting & Atmosphere** + + --- -That's it. No engine lifecycle callbacks. No `addChildNode()` or `destroy()` calls. The Compose runtime handles all of it. + `LightNode` (sun, point, spot, directional), `DynamicSkyNode` (time-of-day), + `FogNode`, `ReflectionProbeNode`. All driven by Compose state. + +- :octicons-image-24: **Media & UI in 3D** + + --- + + `ImageNode`, `VideoNode` (with chromakey), and `ViewNode` — render **any Composable** + directly inside 3D space. Text, buttons, cards — floating in your scene. + +- :octicons-zap-24: **Physics** + + --- + + `PhysicsNode` — rigid body simulation with gravity, collision, and tap-to-throw. + Interactive 3D worlds without a game engine. + +- :octicons-paintbrush-24: **Drawing & Text** + + --- + + `LineNode`, `PathNode` for 3D polylines and animated paths. + `TextNode`, `BillboardNode` for camera-facing labels. + +- :octicons-eye-24: **AR & spatial** + + --- + + `AnchorNode`, `AugmentedImageNode`, `AugmentedFaceNode`, `CloudAnchorNode`, + `StreetscapeGeometryNode`. Plane detection, geospatial, environmental HDR. + +
+ +[:octicons-arrow-right-24: Full feature showcase](showcase.md) + +--- + +### Production rendering + +Built on [Google Filament](https://github.com/google/filament) — the same physically-based +rendering engine used inside Google Search and Google Play Store. + +- **PBR** with metallic/roughness workflow +- **HDR environment lighting** from `.hdr` and `.ktx` files +- **Post-processing**: bloom, depth-of-field, SSAO, fog +- **60fps** on mid-range devices + +--- + +### v4.0 — what's next + +
+ +- :octicons-stack-24: **Multiple scenes** + + --- + + Multiple independent `Scene {}` on one screen — dashboards, feeds, product cards — each with its own camera and environment. + +- :octicons-mirror-24: **Portal rendering** + + --- + + `PortalNode` — a window into another scene. AR portals, product showcases with custom lighting, level transitions. + +- :octicons-iterations-24: **Particles & animation** + + --- + + `ParticleNode` for GPU particles (fire, smoke, sparkles). `AnimationController` for blending, cross-fading, and layering. + +- :octicons-plug-24: **Collision detection** + + --- + + `CollisionNode` — declarative collision detection between scene nodes. No manual raycasting. + +
+ +[:octicons-arrow-right-24: v4.0 preview](v4-preview.md) --- @@ -118,23 +244,23 @@ That's it. No engine lifecycle callbacks. No `addChildNode()` or `destroy()` cal
-- :material-cube-outline: **3D with Compose** +- :octicons-play-24: **3D with Compose** --- Build your first 3D scene with a rotating glTF model, HDR lighting, and orbit camera gestures. - **~25 minutes · Beginner** + **~25 minutes** [:octicons-arrow-right-24: Start the codelab](codelabs/codelab-3d-compose.md) -- :material-augmented-reality: **AR with Compose** +- :octicons-play-24: **AR with Compose** --- Place 3D objects in the real world using ARCore plane detection and anchor tracking. - **~20 minutes · Beginner** + **~20 minutes** [:octicons-arrow-right-24: Start the codelab](codelabs/codelab-ar-compose.md) @@ -146,53 +272,82 @@ That's it. No engine lifecycle callbacks. No `addChildNode()` or `destroy()` cal ### Nodes are composables -Every 3D object — models, lights, geometry, cameras — is a `@Composable` function inside `Scene { }`. No manual `addChildNode()` or `destroy()` calls. +Every 3D object — models, lights, geometry, cameras — is a `@Composable` function inside `Scene { }`. No manual `addChildNode()` or `destroy()` calls. Nodes enter the scene on composition and are cleaned up when they leave. ### State drives the scene -Pass Compose state into node parameters. The scene updates on the next frame. Toggle a `Boolean` to show/hide a node. Update a `mutableStateOf` to place content in AR. +Pass Compose state into node parameters. The scene updates on the next frame. Toggle a `Boolean` to show/hide a node. Animate a `Float` for smooth transitions. Update a `mutableStateOf` to place content in AR. ### Everything is `remember` -The Filament engine, model loaders, environment, camera — all are `remember`-ed values with automatic cleanup. Create them, use them, forget about them. +The Filament engine, model loaders, environment, camera — all are `remember`-ed values with automatic cleanup: ---- +```kotlin +val engine = rememberEngine() +val modelLoader = rememberModelLoader(engine) +val model = rememberModelInstance(modelLoader, "models/product.glb") +val environment = rememberEnvironment(rememberEnvironmentLoader(engine)) { + createHDREnvironment("environments/sky_2k.hdr")!! +} +// All resources destroyed automatically when composable leaves the tree +``` -## Coming next +### Thread safety by default -
+Filament requires JNI calls on the main thread. `rememberModelInstance` handles the IO-to-main-thread transition automatically. You never think about it. -- :material-cellphone-link: **Kotlin Multiplatform (iOS)** +--- - --- +## Samples + +15 working sample apps ship with the repository: + +| Sample | What it demonstrates | +|---|---| +| `model-viewer` | 3D model, HDR environment, orbit camera, animation playback | +| `ar-model-viewer` | Tap-to-place, plane detection, pinch/rotate gestures | +| `camera-manipulator` | Orbit / pan / zoom camera with gesture hints | +| `gltf-camera` | Cameras imported from a glTF file | +| `dynamic-sky` | Time-of-day sun, turbidity, fog controls | +| `reflection-probe` | Metallic surfaces with cubemap reflections | +| `physics-demo` | Tap-to-throw balls, collision, gravity | +| `post-processing` | Bloom, depth-of-field, SSAO, fog toggles | +| `line-path` | 3D line drawing, gizmos, spirals, animated paths | +| `text-labels` | Camera-facing text labels on 3D objects | +| `ar-augmented-image` | Real-world image detection + overlay | +| `ar-cloud-anchor` | Persistent cross-device anchors | +| `ar-point-cloud` | ARCore feature point visualization | +| `autopilot-demo` | Autonomous AR demo | - SceneView is exploring **KMP support** via Filament's Metal backend. Same Compose DSL, running natively on iOS. +--- - **Roadmap: v4.0** +## Switching from another library? - [:octicons-arrow-right-24: See the full roadmap](https://github.com/SceneView/sceneview-android/blob/main/ROADMAP.md) +
-- :material-virtual-reality: **Android XR** +- :octicons-arrow-switch-24: **Coming from Sceneform?** --- - Spatial computing support with a dedicated **SceneView-XR** module for Android XR headsets and passthrough AR. + Sceneform was archived by Google in 2021. SceneView is the successor — modern Compose API, active development, full ARCore support. - **Roadmap: v4.0** + [:octicons-arrow-right-24: Migration guide](migration.md) - [:octicons-arrow-right-24: See the full roadmap](https://github.com/SceneView/sceneview-android/blob/main/ROADMAP.md) +- :octicons-git-compare-24: **Evaluating options?** -
+ --- ---- + Side-by-side comparison with Sceneform, Unity, raw ARCore, Rajawali, and other alternatives. -## Upgrading from v2.x? + [:octicons-arrow-right-24: Comparison](comparison.md) -See the [Migration guide](migration.md) for a step-by-step walkthrough of every breaking change. +
--- ## Community -[:simple-discord: Discord](https://discord.gg/UbNDDBTNqb){ .md-button } -[:simple-github: GitHub](https://github.com/SceneView/sceneview-android){ .md-button .md-button--primary } +[:octicons-comment-discussion-24: Discord](https://discord.gg/UbNDDBTNqb){ .md-button } +[:octicons-mark-github-24: GitHub](https://github.com/SceneView/sceneview-android){ .md-button .md-button--primary } + + diff --git a/docs/docs/integrations.md b/docs/docs/integrations.md new file mode 100644 index 000000000..b16d4b750 --- /dev/null +++ b/docs/docs/integrations.md @@ -0,0 +1,265 @@ +# Integrations + +How to use SceneView with the rest of your Android app stack. + +--- + +## Jetpack Compose Navigation + +Use SceneView inside navigation destinations. The scene is created when you navigate to it and destroyed when you leave — no manual cleanup. + +```kotlin +@Composable +fun AppNavigation() { + val navController = rememberNavController() + + NavHost(navController, startDestination = "home") { + composable("home") { + HomeScreen(onViewProduct = { id -> + navController.navigate("product/$id") + }) + } + composable("product/{id}") { backStackEntry -> + val productId = backStackEntry.arguments?.getString("id") ?: return@composable + ProductViewerScreen(productId) + } + composable("ar-preview") { + ARPreviewScreen() + } + } +} + +@Composable +fun ProductViewerScreen(productId: String) { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/$productId.glb") + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator() + ) { + model?.let { ModelNode(modelInstance = it, scaleToUnits = 1.0f) } + } +} +``` + +!!! tip "Engine lifecycle" + Each `rememberEngine()` call creates a new Filament engine. If you navigate between multiple 3D screens frequently, consider sharing the engine via a ViewModel or CompositionLocal to avoid repeated initialization. + +### Shared engine across destinations + +```kotlin +// In your Application or top-level composable +val LocalEngine = staticCompositionLocalOf { error("No engine") } + +@Composable +fun App() { + val engine = rememberEngine() + + CompositionLocalProvider(LocalEngine provides engine) { + AppNavigation() + } +} + +// In any destination +@Composable +fun ProductViewer() { + val engine = LocalEngine.current + val modelLoader = rememberModelLoader(engine) + // ... +} +``` + +--- + +## Material 3 / Material Design + +SceneView renders inside a standard Compose layout. Wrap it with Material 3 components freely. + +### 3D viewer in a Material 3 card + +```kotlin +@Composable +fun ProductCard(product: Product) { + Card( + modifier = Modifier.fillMaxWidth().padding(16.dp), + shape = RoundedCornerShape(16.dp) + ) { + Column { + // 3D viewer as the card hero + Scene( + modifier = Modifier.fillMaxWidth().height(250.dp), + cameraManipulator = rememberCameraManipulator() + ) { + rememberModelInstance(modelLoader, product.modelPath)?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } + + // Standard Material 3 content below + Column(modifier = Modifier.padding(16.dp)) { + Text(product.name, style = MaterialTheme.typography.headlineSmall) + Text(product.price, style = MaterialTheme.typography.bodyLarge) + Button(onClick = { /* add to cart */ }) { + Text("Add to Cart") + } + } + } + } +} +``` + +### Bottom sheet with AR + +```kotlin +@OptIn(ExperimentalMaterial3Api::class) +@Composable +fun ARWithBottomSheet() { + val sheetState = rememberModalBottomSheetState() + var showSheet by remember { mutableStateOf(false) } + + Box(modifier = Modifier.fillMaxSize()) { + ARScene( + modifier = Modifier.fillMaxSize(), + planeRenderer = true + ) { + // AR content + } + + // Floating action button + FloatingActionButton( + onClick = { showSheet = true }, + modifier = Modifier.align(Alignment.BottomEnd).padding(16.dp) + ) { + Icon(Icons.Default.Settings, "Settings") + } + } + + if (showSheet) { + ModalBottomSheet(onDismissRequest = { showSheet = false }, sheetState = sheetState) { + // Model picker, settings, etc. + ModelPickerContent() + } + } +} +``` + +--- + +## ViewModel integration + +Keep scene state in a ViewModel so it survives configuration changes. + +```kotlin +class SceneViewModel : ViewModel() { + var selectedModel by mutableStateOf("helmet") + private set + + var isAnimating by mutableStateOf(true) + private set + + var lightIntensity by mutableFloatStateOf(100_000f) + private set + + fun selectModel(name: String) { selectedModel = name } + fun toggleAnimation() { isAnimating = !isAnimating } + fun setLight(intensity: Float) { lightIntensity = intensity } +} + +@Composable +fun SceneScreen(viewModel: SceneViewModel = viewModel()) { + val model = rememberModelInstance(modelLoader, "models/${viewModel.selectedModel}.glb") + + Scene(modifier = Modifier.fillMaxSize()) { + model?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + autoAnimate = viewModel.isAnimating + ) + } + LightNode( + type = LightManager.Type.SUN, + apply = { intensity(viewModel.lightIntensity) } + ) + } +} +``` + +--- + +## Hilt / dependency injection + +Inject model paths, environment configurations, or feature flags. + +```kotlin +@HiltViewModel +class ProductViewModel @Inject constructor( + private val productRepository: ProductRepository +) : ViewModel() { + val product = productRepository.getProduct(productId) + val modelUrl get() = product.value?.modelUrl +} + +@Composable +fun ProductScreen(viewModel: ProductViewModel = hiltViewModel()) { + val product by viewModel.product.collectAsStateWithLifecycle() + + product?.modelUrl?.let { url -> + Scene(modifier = Modifier.fillMaxSize()) { + rememberModelInstance(modelLoader, url)?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } + } +} +``` + +--- + +## Room / local database + +Store anchor data for persistent AR experiences. + +```kotlin +@Entity +data class SavedAnchor( + @PrimaryKey val id: String, + val cloudAnchorId: String, + val label: String, + val timestamp: Long +) + +@Dao +interface AnchorDao { + @Insert(onConflict = OnConflictStrategy.REPLACE) + suspend fun save(anchor: SavedAnchor) + + @Query("SELECT * FROM SavedAnchor ORDER BY timestamp DESC") + fun getAll(): Flow> +} + +// In your AR composable +ARScene(...) { + CloudAnchorNode( + anchor = localAnchor, + onHosted = { cloudId, state -> + if (state == CloudAnchorState.SUCCESS && cloudId != null) { + scope.launch { + anchorDao.save(SavedAnchor( + id = UUID.randomUUID().toString(), + cloudAnchorId = cloudId, + label = "My anchor", + timestamp = System.currentTimeMillis() + )) + } + } + } + ) { + ModelNode(modelInstance = model!!) + } +} +``` diff --git a/docs/docs/performance.md b/docs/docs/performance.md new file mode 100644 index 000000000..032ff6ae9 --- /dev/null +++ b/docs/docs/performance.md @@ -0,0 +1,417 @@ +# Performance Guide + +Ship smooth 60fps 3D and AR experiences on Android. This guide covers profiling, +asset optimization, scene tuning, Compose best practices, and device-tier strategies +for the SceneView SDK. + +--- + +## Measuring Performance + +Before optimizing, measure. Guessing where time is spent leads to wasted effort. + +### Frame budget + +At 60fps your app has **16.6ms per frame** for everything: CPU logic, GPU rendering, +Compose layout, and ARCore tracking. Anything over that budget causes dropped frames +and visible jank. + +!!! info "The 16.6ms rule" + 60fps = 1000ms / 60 = **16.6ms per frame**. That includes CPU work, GPU rendering, + and any Compose recomposition. Aim for headroom — target **12ms** so spikes don't + push you over. + +### Android Studio Profiler + +Use the built-in profiler in Android Studio to identify bottlenecks: + +- **CPU Profiler** — look for long `onFrame` or `onSessionUpdated` calls, excessive + allocations, or blocking I/O on the main thread. +- **GPU Profiler** — check for overdraw (red = 4x overdraw), long fragment shader + times, or GPU-bound frames. +- **Memory Profiler** — watch for repeated allocations each frame (GC pauses cause + jank). Look for leaked `ModelInstance` or `Material` objects. + +### Filament debug stats + +Enable Filament's built-in frame statistics to see draw calls, triangle counts, and +GPU timing without leaving your app: + +```kotlin +Scene( + engine = engine, + modelLoader = modelLoader, + // ... +) { + // Access the Filament view for debug options +} +``` + +!!! tip "Quick debug overlay" + Use `adb shell dumpsys gfxinfo ` for a quick frame-time histogram + without any code changes. + +--- + +## Model Optimization + +Models are usually the biggest performance lever. A poorly optimized model can +single-handedly destroy your frame rate. + +### Polygon count + +| Target | Triangle budget | +|---|---| +| Interactive objects | < 100K triangles | +| Hero/showcase models | < 200K triangles (high-end only) | +| Background/environment | < 50K triangles | + +!!! warning "Triangles add up fast" + A single model might be 50K triangles, but if you have 10 in the scene that is + 500K — well beyond mobile budgets. Always count **total scene triangles**. + +**Reduction tools:** + +- **Blender** — Decimate modifier (collapse or un-subdivide) +- **meshoptimizer** — `meshopt_simplify` for automated LOD generation +- **gltfpack** — CLI tool that simplifies, compresses, and optimizes glTF/GLB files + +Use **LOD (Level of Detail)** when available: show high-poly when the camera is close, +swap to low-poly at distance. This can cut triangle count by 50-80% for complex scenes. + +### Textures + +Textures consume the most GPU memory and bandwidth on mobile. + +| Rule | Recommendation | +|---|---| +| Format | **KTX2** with Basis Universal compression | +| Max size | **2048x2048** for mobile (1024x1024 for low-end) | +| Mipmaps | Always enable for objects viewed at varying distances | +| Channels | Use single-channel textures for roughness/metallic, not full RGBA | + +!!! tip "KTX2 saves memory and load time" + KTX2 with Basis Universal (ETC1S or UASTC) compresses textures 4-8x compared to + raw PNG/JPEG, and they stay compressed in GPU memory. Convert with + `toktx --t2 --bcmp input.png output.ktx2`. + +### File size + +Smaller files mean faster loading and less memory pressure: + +| Target | Size | +|---|---| +| Interactive models | < 10MB | +| Hero/showcase models | < 50MB | +| Quick-load previews | < 2MB | + +**Optimization checklist:** + +- [x] Use **GLB** (binary glTF) instead of glTF + separate .bin/.png files +- [x] Enable **Draco geometry compression** for mesh data +- [x] Strip unused animations, blend shapes, and extra UV sets +- [x] Run `gltfpack` as a final optimization pass + +```bash +# Example: optimize a model with gltfpack +gltfpack -i model.glb -o model_optimized.glb -tc -cc -si 0.5 +# -tc = texture compression, -cc = codec compression, -si = simplification ratio +``` + +--- + +## Scene Optimization + +### Limit draw calls + +Each visible node typically generates one or more draw calls. On mobile, aim for +**fewer than 100 draw calls** per frame. + +- **Fewer separate nodes** = fewer draw calls. Merge static geometry in your 3D + tool before export. +- **Use instancing** for repeated objects (trees, rocks, particles). Filament + supports GPU instancing for identical meshes. +- **Frustum culling** is automatic in Filament — objects outside the camera view + are not rendered. But they still cost CPU time if they exist as nodes. + +!!! example "Merge before export" + If you have 50 static building meshes in Blender, join them into one object + before exporting to GLB. This turns 50 draw calls into 1. + +### Lights + +Lights are one of the most expensive parts of a scene. Each additional light +increases per-fragment shading cost. + +| Light type | Cost | Recommendation | +|---|---|---| +| Directional (sun) | Low | Use 1 as your main light | +| Point / Spot | Medium | Limit to 2-3 total | +| Shadow-casting | High | Limit to 1-2 lights with shadows | + +```kotlin +val mainLight = rememberMainLightNode(engine) { + intensity = 100_000f + // Shadows on the main light only +} +``` + +!!! tip "Use IBL instead of many point lights" + Image-Based Lighting (IBL) from an HDR environment map provides realistic ambient + lighting at nearly zero per-frame cost. One directional light + IBL covers most + use cases better than 5+ point lights. + +```kotlin +val environment = rememberEnvironment(environmentLoader) { + createHDREnvironment("environments/studio.hdr")!! +} +``` + +### Post-processing + +Post-processing effects look great but eat into your frame budget: + +| Effect | Typical cost | Notes | +|---|---|---| +| Bloom | ~1-2ms | Acceptable on mid-tier and above | +| Depth of Field | ~1-2ms | Use sparingly, mainly for screenshots | +| SSAO | ~2-3ms | Most expensive — skip on low-end devices | +| Anti-aliasing (FXAA) | ~0.5ms | Cheap, usually worth enabling | + +!!! warning "SSAO on budget devices" + Screen-Space Ambient Occlusion is the most expensive post-process effect. On + low-end devices it can take 3ms+ alone — nearly 20% of your frame budget. + Disable it on devices below your mid-tier threshold. + +Enable effects selectively based on device tier (see [Device Tiers](#device-tiers) +below). + +--- + +## Compose Integration + +SceneView is a Jetpack Compose library, so Compose performance rules apply directly. + +### Avoid unnecessary recompositions + +Recomposition during rendering can cause frame drops. Follow these rules: + +```kotlin +// BAD — creates new Position every recomposition, triggering node updates +Scene(/* ... */) { + ModelNode( + modelInstance = model, + position = Position(0f, 1f, 0f) // new object every recomposition! + ) +} + +// GOOD — stable reference, no unnecessary updates +val position = remember { Position(0f, 1f, 0f) } + +Scene(/* ... */) { + ModelNode( + modelInstance = model, + position = position + ) +} +``` + +!!! danger "No allocations in the composition body" + Never create new `Position`, `Rotation`, `Scale`, or `Quaternion` objects + directly inside `Scene { }` without `remember`. Each recomposition creates + a new instance, causing the node to update every frame. + +**Key rules:** + +- Use `remember` for stable `Position`, `Rotation`, and `Scale` references +- Use `key` on model instances to avoid unnecessary reload when list order changes +- Use `derivedStateOf` when computing values from other state + +### Share the Engine + +The Filament `Engine` is expensive to create. Never create more than one. + +```kotlin +// At the app/activity level +val engine = rememberEngine() +val modelLoader = rememberModelLoader(engine) +val materialLoader = rememberMaterialLoader(engine) +val environmentLoader = rememberEnvironmentLoader(engine) + +// Share across all scenes via CompositionLocal or parameter passing +Scene(engine = engine, modelLoader = modelLoader, /* ... */) { } +``` + +!!! failure "One Engine per app — not per screen" + Creating multiple `Engine` instances wastes GPU memory and can cause crashes + on devices with limited resources. Create one at the top level and pass it down. + +### Lazy loading + +Load models on-demand rather than all at startup: + +```kotlin +// rememberModelInstance loads asynchronously and returns null while loading +val model = rememberModelInstance(modelLoader, "models/character.glb") + +Scene(/* ... */) { + if (model != null) { + ModelNode(modelInstance = model) + } else { + // Show a placeholder while loading + CubeNode(materialInstance = placeholderMaterial) + } +} +``` + +- `rememberModelInstance` handles async loading and main-thread marshalling correctly +- Show placeholder geometry (a simple cube or spinner) while the model loads +- For imperative code outside Compose, use `modelLoader.loadModelInstanceAsync` + +!!! warning "Threading: Filament calls must be on the main thread" + Never call `modelLoader.createModel*` or `materialLoader.*` from a background + coroutine. `rememberModelInstance` handles this automatically. For imperative + code, use `loadModelInstanceAsync`. + +--- + +## AR-Specific Optimization + +AR adds the camera feed and ARCore tracking to your frame budget, leaving less room +for rendering. + +### Camera frame processing + +`onSessionUpdated` runs **every single frame**. Any work you do here directly +impacts frame rate. + +```kotlin +ARScene( + // ... + onSessionUpdated = { session, frame -> + // FAST: read a cached value + val planes = frame.getUpdatedPlanes() + + // SLOW: don't do this! + // val bitmap = frame.acquireCameraImage().toBitmap() // allocation + conversion + } +) +``` + +!!! danger "No allocations in onSessionUpdated" + This callback runs 30-60 times per second. Allocating objects here causes GC + pressure and frame drops. Cache references outside the callback and reuse them. + +**Rules for `onSessionUpdated`:** + +- Do not allocate objects (no `listOf`, `map`, `filter` on every frame) +- Cache plane and anchor references outside the callback +- If you need heavy processing (image analysis, ML), dispatch to a background + thread and read results on the next frame + +### Plane rendering + +The AR plane renderer draws detected surfaces with a semi-transparent overlay. +This causes **GPU overdraw** — especially problematic when multiple planes overlap. + +```kotlin +ARScene( + planeRenderer = planeRenderer, + // ... +) + +// Disable after the user has placed their object +LaunchedEffect(objectPlaced) { + if (objectPlaced) { + planeRenderer.isVisible = false // reduces overdraw + } +} +``` + +!!! tip "Disable planes after placement" + Once the user has placed an object, disable `planeRenderer`. This removes + overdraw from plane visualization and saves 1-2ms per frame on most devices. + +--- + +## Device Tiers + +Not all Android devices are equal. Adapt your scene complexity based on hardware +capability. + +| Tier | Example devices | Triangle budget | Post-processing | Shadows | +|---|---|---|---|---| +| **High** | Pixel 8 Pro, Samsung S24, OnePlus 12 | 200K triangles | Full (Bloom, SSAO, DoF) | 2 shadow-casting lights | +| **Mid** | Pixel 6a, Samsung A54, Pixel 7 | 100K triangles | Basic (Bloom only) | 1 shadow-casting light | +| **Low** | Older budget phones, 2GB RAM devices | 50K triangles | None | No shadows | + +### Detecting device tier + +```kotlin +fun getDeviceTier(context: Context): DeviceTier { + val activityManager = context.getSystemService(Context.ACTIVITY_SERVICE) + as ActivityManager + val memInfo = ActivityManager.MemoryInfo() + activityManager.getMemoryInfo(memInfo) + + val totalRamGb = memInfo.totalMem / (1024.0 * 1024.0 * 1024.0) + + return when { + totalRamGb >= 8.0 -> DeviceTier.HIGH + totalRamGb >= 4.0 -> DeviceTier.MID + else -> DeviceTier.LOW + } +} + +enum class DeviceTier { HIGH, MID, LOW } +``` + +!!! note "RAM is a proxy, not a guarantee" + Total RAM is a rough proxy for device capability. For more accurate tiering, + also consider GPU model (`GLES20.glGetString(GLES20.GL_RENDERER)`), Android + version, and the [Android Performance Tuner](https://developer.android.com/games/sdk/performance-tuner) + library. + +### Applying tiers + +```kotlin +val tier = remember { getDeviceTier(context) } + +Scene( + engine = engine, + modelLoader = modelLoader, + // ... +) { + // Adjust quality based on tier + val modelPath = when (tier) { + DeviceTier.HIGH -> "models/character_high.glb" + DeviceTier.MID -> "models/character_mid.glb" + DeviceTier.LOW -> "models/character_low.glb" + } + + val model = rememberModelInstance(modelLoader, modelPath) + if (model != null) { + ModelNode(modelInstance = model) + } +} +``` + +--- + +## Performance Checklist + +Use this checklist before shipping: + +- [ ] **Profile first** — measured with Android Studio Profiler, not guessing +- [ ] **Models** — all models under 100K triangles (per model), total scene under 200K +- [ ] **Textures** — KTX2 compressed, max 2048x2048, mipmaps enabled +- [ ] **File size** — GLB format, Draco compressed, under 10MB for interactive models +- [ ] **Draw calls** — under 100 per frame, static geometry merged +- [ ] **Lights** — 1 directional + IBL, max 2-3 additional point/spot lights +- [ ] **Shadows** — limited to 1-2 shadow-casting lights +- [ ] **Post-processing** — adapted to device tier, SSAO disabled on low-end +- [ ] **Compose** — no allocations in composition body, `remember` for stable refs +- [ ] **Engine** — single shared Engine, ModelLoader, and MaterialLoader +- [ ] **AR callbacks** — no allocations in `onSessionUpdated` +- [ ] **Plane renderer** — disabled after object placement +- [ ] **Device tiers** — different asset quality levels for high/mid/low devices diff --git a/docs/docs/quickstart.md b/docs/docs/quickstart.md new file mode 100644 index 000000000..a067be7f8 --- /dev/null +++ b/docs/docs/quickstart.md @@ -0,0 +1,134 @@ +# Quickstart + +**Time:** ~10 minutes | +**Goal:** Go from an empty Android Studio project to a 3D model you can orbit with touch gestures. + +--- + +## Prerequisites + +- **Android Studio Ladybug** (2024.2.1) or newer +- An Android device or emulator running **API 24+** +- Basic familiarity with Kotlin and Jetpack Compose + +--- + +## Step 1: Create a new project + +1. Open Android Studio and select **New Project**. +2. Choose the **Empty Activity** template (the one that generates a `ComponentActivity` with `setContent`). +3. Set the minimum SDK to **API 24**. +4. Finish the wizard and let Gradle sync. + +You should have a working Compose app that displays "Hello Android" or similar. + +--- + +## Step 2: Add the dependency + +Open your **app-level** `build.gradle.kts` and add SceneView: + +```kotlin +dependencies { + implementation("io.github.sceneview:sceneview:3.2.0") +} +``` + +Sync Gradle. + +--- + +## Step 3: Add a 3D model + +You need a glTF/GLB file in the assets folder. The **Damaged Helmet** from Khronos is a good first model. + +1. Create the directory `app/src/main/assets/models/`. +2. Download the model: + - [DamagedHelmet.glb](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DamagedHelmet/glTF-Binary/DamagedHelmet.glb) +3. Save it as `app/src/main/assets/models/damaged_helmet.glb`. + +!!! tip + Any `.glb` or `.gltf` file works. If you have your own model, drop it in the same folder and update the path in the next step. + +--- + +## Step 4: Write the Scene composable + +Replace the contents of `MainActivity.kt` with the following: + +```kotlin +package com.example.my3dapp + +import android.os.Bundle +import androidx.activity.ComponentActivity +import androidx.activity.compose.setContent +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.ui.Modifier +import io.github.sceneview.Scene +import io.github.sceneview.rememberCameraManipulator +import io.github.sceneview.rememberEngine +import io.github.sceneview.rememberEnvironmentLoader +import io.github.sceneview.rememberModelInstance +import io.github.sceneview.rememberModelLoader + +class MainActivity : ComponentActivity() { + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContent { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val environmentLoader = rememberEnvironmentLoader(engine) + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator(), + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb") + ?.let { instance -> + ModelNode( + modelInstance = instance, + scaleToUnits = 1.0f, + autoAnimate = true, + ) + } + } + } + } +} +``` + +That is the entire app. Here is what each piece does: + +| Call | Purpose | +|---|---| +| `rememberEngine()` | Creates the Filament rendering engine (one per screen) | +| `rememberModelLoader(engine)` | Loads glTF/GLB models from assets | +| `rememberEnvironmentLoader(engine)` | Loads HDR environment maps for lighting | +| `rememberCameraManipulator()` | Adds built-in orbit, pan, and zoom touch gestures | +| `Scene { }` | The 3D viewport composable — nodes go inside the trailing lambda | +| `rememberModelInstance(...)` | Asynchronously loads a model; returns `null` until ready | +| `ModelNode(...)` | Places the loaded model in the scene | + +!!! warning "Always handle the null case" + `rememberModelInstance` returns `null` while the model is loading. The `?.let` pattern shown above is the idiomatic way to handle this. Do not force-unwrap with `!!`. + +--- + +## Step 5: Run it + +1. Click **Run** (or press `Shift+F10`). +2. After a brief loading moment, you will see the Damaged Helmet rendered in your viewport. +3. **Drag** to orbit around the model, **pinch** to zoom, and **two-finger drag** to pan. + +That is a production-quality, physically-based 3D viewer in under 30 lines of code. + +--- + +## Next steps + +- **Add HDR lighting** — Download a `.hdr` environment map and pass it via `rememberEnvironment(environmentLoader) { environmentLoader.createHDREnvironment("environments/sky_2k.hdr")!! }` to the `environment` parameter of `Scene`. +- **Try AR** — Follow the [AR Compose codelab](codelabs/codelab-ar-compose.md) to place models in the real world using `ARScene`. +- **Explore the samples** — The [samples page](samples.md) covers model animation, camera manipulation, cloud anchors, and more. +- **Browse the API** — See the full [API reference](https://sceneview.github.io/api/) for every composable, node type, and loader. diff --git a/docs/docs/recipes.md b/docs/docs/recipes.md new file mode 100644 index 000000000..0664e01de --- /dev/null +++ b/docs/docs/recipes.md @@ -0,0 +1,1168 @@ +# Recipes / Cookbook + +Copy-paste patterns for the most common SceneView tasks. +Every snippet targets **SceneView 3.2.0** and uses Jetpack Compose. + +--- + +## Loading & Display + +### Load a model from a URL + +`rememberModelInstance` accepts both **asset paths** and **https URLs**. +It returns `null` while the file downloads, so always handle the null case. + +```kotlin +@Composable +fun RemoteModelScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + val instance = rememberModelInstance( + modelLoader, + "https://example.com/models/robot.glb" + ) + instance?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } +} +``` + +### Load multiple models + +Call `rememberModelInstance` once per model. Each loads independently and appears +when ready. + +```kotlin +@Composable +fun MultiModelScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + val helmet = rememberModelInstance(modelLoader, "models/damaged_helmet.glb") + val fox = rememberModelInstance(modelLoader, "models/Fox.glb") + + helmet?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + position = Position(x = -1f, z = -2f) + ) + } + fox?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + position = Position(x = 1f, z = -2f), + autoAnimate = true + ) + } + } +} +``` + +### Show a loading indicator while model loads + +`rememberModelInstance` returns `null` while loading. Use that to drive a Compose +overlay. + +```kotlin +@Composable +fun ModelWithLoadingIndicator() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + val instance = rememberModelInstance(modelLoader, "models/large_scene.glb") + + Box(modifier = Modifier.fillMaxSize()) { + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + instance?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } + + // Overlay a spinner while the model is still null + if (instance == null) { + CircularProgressIndicator( + modifier = Modifier.align(Alignment.Center) + ) + } + } +} +``` + +### Switch between models dynamically + +Change the asset path via Compose state. `rememberModelInstance` automatically +loads the new model when the path changes. + +```kotlin +private val models = listOf( + "models/damaged_helmet.glb" to 1.0f, + "models/Fox.glb" to 0.012f, +) + +@Composable +fun ModelSwitcherScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + var selectedIndex by remember { mutableIntStateOf(0) } + val (path, scale) = models[selectedIndex] + val instance = rememberModelInstance(modelLoader, path) + + Box(modifier = Modifier.fillMaxSize()) { + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + instance?.let { + ModelNode(modelInstance = it, scaleToUnits = scale) + } + } + + Row( + modifier = Modifier + .align(Alignment.BottomCenter) + .padding(16.dp), + horizontalArrangement = Arrangement.spacedBy(8.dp) + ) { + models.forEachIndexed { index, (name, _) -> + FilterChip( + selected = index == selectedIndex, + onClick = { selectedIndex = index }, + label = { Text(name.substringAfterLast("/")) } + ) + } + } + } +} +``` + +--- + +## Animation + +### Auto-play all animations + +Set `autoAnimate = true` on `ModelNode`. All glTF animations play simultaneously. + +```kotlin +Scene(...) { + rememberModelInstance(modelLoader, "models/Fox.glb")?.let { instance -> + ModelNode( + modelInstance = instance, + scaleToUnits = 1.0f, + autoAnimate = true + ) + } +} +``` + +### Play a specific animation by name + +Set `autoAnimate = false` and pass the animation name. The name must match one +defined in the glTF file. + +```kotlin +var currentAnimation by remember { mutableStateOf("Walk") } + +Scene(...) { + rememberModelInstance(modelLoader, "models/Fox.glb")?.let { instance -> + ModelNode( + modelInstance = instance, + scaleToUnits = 1.0f, + autoAnimate = false, + animationName = currentAnimation, + animationLoop = true, + animationSpeed = 1f + ) + } +} + +// Change currentAnimation to "Idle", "Run", etc. to switch animations. +``` + +### Loop an animation + +Set `animationLoop = true`. Works with both `autoAnimate` and named animations. + +```kotlin +Scene(...) { + rememberModelInstance(modelLoader, "models/Fox.glb")?.let { instance -> + ModelNode( + modelInstance = instance, + scaleToUnits = 1.0f, + autoAnimate = false, + animationName = "Walk", + animationLoop = true, + animationSpeed = 1.5f // 1.5x speed + ) + } +} +``` + +### Rotate a model continuously + +Use Compose's `rememberInfiniteTransition` with SceneView's `animateRotation` +extension, then apply the rotation via an `onFrame` callback or a parent node. + +```kotlin +@Composable +fun SpinningModelScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + val centerNode = rememberNode(engine) + + val cameraNode = rememberCameraNode(engine) { + position = Position(y = -0.5f, z = 2.0f) + lookAt(centerNode) + centerNode.addChildNode(this) + } + + val transition = rememberInfiniteTransition(label = "Spin") + val rotation by transition.animateRotation( + initialValue = Rotation(y = 0f), + targetValue = Rotation(y = 360f), + animationSpec = infiniteRepeatable( + animation = tween(durationMillis = 7000) + ) + ) + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraNode = cameraNode, + onFrame = { + centerNode.rotation = rotation + cameraNode.lookAt(centerNode) + } + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } +} +``` + +### Animate position with Compose + +Drive a node's `position` from standard Compose animation APIs. + +```kotlin +@Composable +fun BouncingModelScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + val transition = rememberInfiniteTransition(label = "Bounce") + val yOffset by transition.animateFloat( + initialValue = 0f, + targetValue = 0.5f, + animationSpec = infiniteRepeatable( + animation = tween(1000, easing = FastOutSlowInEasing), + repeatMode = RepeatMode.Reverse + ), + label = "Y" + ) + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + position = Position(y = yOffset, z = -2f) + ) + } + } +} +``` + +--- + +## Camera + +### Orbit camera with custom home position + +Use `rememberCameraManipulator` with `orbitHomePosition` and `targetPosition`. +The user can orbit, pan, and zoom. Double-tap resets to the home position. + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(x = 0f, y = 2f, z = 4f), + targetPosition = Position(x = 0f, y = 0f, z = 0f) + ) +) { + // nodes here +} +``` + +### Fixed camera looking at a point + +Use `rememberCameraNode` instead of a manipulator for a static viewpoint. + +```kotlin +val cameraNode = rememberCameraNode(engine) { + position = Position(x = 3f, y = 2f, z = 5f) + lookAt(Position(0f, 0f, 0f)) +} + +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraNode = cameraNode, + cameraManipulator = null // disable orbit gestures +) { + // nodes here +} +``` + +### Smooth camera transition + +Use `node.transform()` with `smooth = true` to animate the camera to a new +position. + +```kotlin +val cameraNode = rememberCameraNode(engine) { + position = Position(0f, 2f, 5f) + lookAt(Position(0f, 0f, 0f)) +} + +// Call this from a button click or any event: +fun flyToPosition(target: Position) { + cameraNode.transform( + position = target, + smooth = true, + smoothSpeed = 3f + ) +} +``` + +### Limit zoom range + +Create the manipulator with a custom builder to control zoom speed. +Combine with `editableScaleRange` on interactive nodes to limit pinch-to-zoom +on individual objects. + +```kotlin +// On the scene level — control orbit camera zoom speed +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(0f, 1f, 3f), + targetPosition = Position(0f, 0f, 0f) + ) +) { + // On a per-node level — clamp pinch-to-scale range + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + isEditable = true, + apply = { editableScaleRange = 0.5f..2.0f } + ) + } +} +``` + +--- + +## Interaction + +### Tap to select a node + +Use `onGestureListener` with `onSingleTapConfirmed`. The `node` parameter is +the tapped node (or `null` if the user tapped empty space). + +```kotlin +var selectedNode by remember { mutableStateOf(null) } + +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, node -> + selectedNode = node?.name + } + ) +) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + isTouchable = true, + apply = { name = "helmet" } + ) + } +} +``` + +### Drag to move a node + +Set `isEditable = true` on the node. Single-finger drag moves the node in the +scene. + +```kotlin +Scene(...) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + isEditable = true // enables drag, pinch-scale, and two-finger rotate + ) + } +} +``` + +### Pinch to scale + +`isEditable = true` enables pinch-to-scale automatically. Use +`editableScaleRange` to clamp the allowed range. + +```kotlin +Scene(...) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 0.5f, + isEditable = true, + apply = { + editableScaleRange = 0.2f..2.0f + } + ) + } +} +``` + +### Double-tap to reset + +Use the `onDoubleTap` gesture callback to reset a node's transform. + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + onGestureListener = rememberOnGestureListener( + onDoubleTap = { event, node -> + node?.apply { + position = Position(0f, 0f, -2f) + rotation = Rotation(0f) + scale = Scale(1f) + } + } + ) +) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + isEditable = true, + isTouchable = true + ) + } +} +``` + +### Long-press context menu + +Combine `onLongPress` with Compose state to show a dropdown or bottom sheet. + +```kotlin +var showMenu by remember { mutableStateOf(false) } +var menuNode by remember { mutableStateOf(null) } + +Box(modifier = Modifier.fillMaxSize()) { + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + onGestureListener = rememberOnGestureListener( + onLongPress = { event, node -> + node?.let { + menuNode = it.name + showMenu = true + } + } + ) + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + isTouchable = true, + apply = { name = "helmet" } + ) + } + } + + DropdownMenu(expanded = showMenu, onDismissRequest = { showMenu = false }) { + DropdownMenuItem(text = { Text("Delete ${menuNode}") }, onClick = { showMenu = false }) + DropdownMenuItem(text = { Text("Duplicate") }, onClick = { showMenu = false }) + } +} +``` + +--- + +## Lighting & Environment + +### HDR environment from assets + +Place your `.hdr` file in the `assets/environments/` folder. + +```kotlin +val engine = rememberEngine() +val environmentLoader = rememberEnvironmentLoader(engine) + +val environment = rememberEnvironment(environmentLoader) { + environmentLoader.createHDREnvironment("environments/sky_2k.hdr")!! +} + +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = rememberModelLoader(engine), + environment = environment, + mainLightNode = rememberMainLightNode(engine) { intensity = 100_000f } +) { + // nodes here +} +``` + +### Dynamic time-of-day lighting + +Use `DynamicSkyNode` to simulate a sun that moves across the sky. + +```kotlin +var timeOfDay by remember { mutableFloatStateOf(14f) } // 0-24 + +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader +) { + DynamicSkyNode( + timeOfDay = timeOfDay, // 0=midnight, 6=sunrise, 12=noon, 18=sunset + turbidity = 2f, // atmospheric haze [1-10] + sunIntensity = 110_000f + ) + + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } +} + +// Drive timeOfDay from a Slider, animation, or system clock. +``` + +### Add fog + +Use `FogNode` with a `rememberView` reference. + +```kotlin +val engine = rememberEngine() +val view = rememberView(engine) + +var fogEnabled by remember { mutableStateOf(true) } + +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = rememberModelLoader(engine), + view = view +) { + FogNode( + view = view, + density = 0.05f, + height = 1.0f, + color = Color(0xFFCCDDFF), + enabled = fogEnabled + ) + + // scene content... +} +``` + +### Multiple lights in a scene + +Combine the main directional light with additional point or spot lights using +`LightNode`. Remember: `apply` is a **named parameter**, not a trailing lambda. + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + mainLightNode = rememberMainLightNode(engine) { intensity = 50_000f } +) { + // Warm point light on the left + LightNode( + type = LightManager.Type.POINT, + position = Position(x = -2f, y = 2f, z = 0f), + apply = { + color(1.0f, 0.8f, 0.6f) + intensity(80_000f) + falloff(10.0f) + } + ) + + // Cool point light on the right + LightNode( + type = LightManager.Type.POINT, + position = Position(x = 2f, y = 2f, z = 0f), + apply = { + color(0.6f, 0.8f, 1.0f) + intensity(80_000f) + falloff(10.0f) + } + ) + + // Spot light from above + LightNode( + type = LightManager.Type.FOCUSED_SPOT, + position = Position(y = 3f), + apply = { + intensity(100_000f) + falloff(8.0f) + castShadows(true) + } + ) + + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } +} +``` + +--- + +## AR Patterns + +### Tap-to-place on a plane + +Tap the screen to place a model on a detected AR plane. + +```kotlin +@Composable +fun TapToPlaceScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + var anchor by remember { mutableStateOf(null) } + var frame by remember { mutableStateOf(null) } + + val instance = rememberModelInstance(modelLoader, "models/damaged_helmet.glb") + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + planeRenderer = true, + sessionConfiguration = { session, config -> + config.depthMode = Config.DepthMode.AUTOMATIC + config.instantPlacementMode = Config.InstantPlacementMode.LOCAL_Y_UP + config.lightEstimationMode = Config.LightEstimationMode.ENVIRONMENTAL_HDR + }, + onSessionUpdated = { _, updatedFrame -> frame = updatedFrame }, + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, node -> + if (node == null) { + frame?.hitTest(event.x, event.y) + ?.firstOrNull { it.isValid(depthPoint = false, point = false) } + ?.createAnchorOrNull() + ?.let { anchor = it } + } + } + ) + ) { + anchor?.let { a -> + AnchorNode(anchor = a) { + instance?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 0.5f, + isEditable = true + ) + } + } + } + } +} +``` + +### Place multiple objects + +Store a list of anchors. Each tap adds a new anchor with its own model. + +```kotlin +@Composable +fun MultiPlaceScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + var anchors by remember { mutableStateOf(listOf()) } + var frame by remember { mutableStateOf(null) } + + val instance = rememberModelInstance(modelLoader, "models/damaged_helmet.glb") + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + planeRenderer = true, + sessionConfiguration = { session, config -> + config.instantPlacementMode = Config.InstantPlacementMode.LOCAL_Y_UP + config.lightEstimationMode = Config.LightEstimationMode.ENVIRONMENTAL_HDR + }, + onSessionUpdated = { _, updatedFrame -> frame = updatedFrame }, + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, node -> + if (node == null) { + frame?.hitTest(event.x, event.y) + ?.firstOrNull { it.isValid(depthPoint = false, point = false) } + ?.createAnchorOrNull() + ?.let { anchors = anchors + it } + } + } + ) + ) { + anchors.forEach { a -> + AnchorNode(anchor = a) { + instance?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 0.3f, + isEditable = true + ) + } + } + } + } +} +``` + +### Show a reticle cursor + +Use `HitResultNode` with the screen center coordinates to show a cursor that +tracks the detected surface. + +```kotlin +@Composable +fun ReticleScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val materialLoader = rememberMaterialLoader(engine) + val view = LocalView.current + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + materialLoader = materialLoader, + planeRenderer = true, + sessionConfiguration = { _, config -> + config.lightEstimationMode = Config.LightEstimationMode.ENVIRONMENTAL_HDR + } + ) { + HitResultNode( + xPx = view.width / 2f, + yPx = view.height / 2f + ) { + SphereNode(radius = 0.02f) + } + } +} +``` + +### Track a real-world image + +Use `AugmentedImageNode` to overlay 3D content on a detected real-world image. + +```kotlin +@Composable +fun ImageTrackingScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val context = LocalContext.current + + var augmentedImages by remember { + mutableStateOf>(emptyMap()) + } + + val instance = rememberModelInstance(modelLoader, "models/rabbit.glb") + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + sessionConfiguration = { session, config -> + config.addAugmentedImage( + session, + "target", + context.assets.open("augmentedimages/target.jpg") + .use(BitmapFactory::decodeStream) + ) + }, + onSessionUpdated = { _, frame -> + frame.getUpdatedAugmentedImages().forEach { image -> + augmentedImages = augmentedImages.toMutableMap().apply { + this[image.name] = image + } + } + } + ) { + augmentedImages.values.forEach { image -> + AugmentedImageNode(augmentedImage = image) { + instance?.let { + ModelNode( + modelInstance = it, + scaleToUnits = image.extentX + ) + } + } + } + } +} +``` + +### Face filter with front camera + +Use the front camera with `AugmentedFaceNode` for face mesh effects. + +```kotlin +@Composable +fun FaceFilterScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val materialLoader = rememberMaterialLoader(engine) + + var trackedFaces by remember { + mutableStateOf(listOf()) + } + + val faceMaterial = remember(materialLoader) { + materialLoader.createColorInstance( + colorOf(r = 0.5f, g = 0.8f, b = 1.0f, a = 0.4f) + ) + } + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + materialLoader = materialLoader, + sessionFeatures = setOf(Session.Feature.FRONT_CAMERA), + sessionConfiguration = { _, config -> + config.augmentedFaceMode = Config.AugmentedFaceMode.MESH3D + }, + onSessionUpdated = { session, _ -> + trackedFaces = session.getAllTrackables(AugmentedFace::class.java) + .filter { it.trackingState == TrackingState.TRACKING } + } + ) { + trackedFaces.forEach { face -> + AugmentedFaceNode( + augmentedFace = face, + meshMaterialInstance = faceMaterial + ) + } + } +} +``` + +--- + +## Layout & Composition + +### 3D viewer in a scrollable list + +Wrap the `Scene` in a fixed-height container inside a `LazyColumn`. + +```kotlin +@Composable +fun ProductListScreen(products: List) { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + LazyColumn(modifier = Modifier.fillMaxSize()) { + items(products) { product -> + Card( + modifier = Modifier + .fillMaxWidth() + .padding(16.dp) + ) { + Column { + val instance = rememberModelInstance(modelLoader, product.modelPath) + Scene( + modifier = Modifier + .fillMaxWidth() + .height(250.dp), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(0f, 0.5f, 2f), + targetPosition = Position(0f) + ) + ) { + instance?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f + ) + } + } + Text( + product.name, + modifier = Modifier.padding(16.dp), + style = MaterialTheme.typography.titleMedium + ) + } + } + } + } +} +``` + +### Split screen: 3D + Compose UI + +Use a `Column` or `Row` to place the 3D viewport alongside regular Compose UI. + +```kotlin +@Composable +fun SplitScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + var scale by remember { mutableFloatStateOf(1.0f) } + + Column(modifier = Modifier.fillMaxSize()) { + // Top half: 3D scene + Scene( + modifier = Modifier + .fillMaxWidth() + .weight(1f), + engine = engine, + modelLoader = modelLoader + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = scale) + } + } + + // Bottom half: Controls + Column( + modifier = Modifier + .fillMaxWidth() + .weight(1f) + .padding(16.dp) + ) { + Text("Scale: %.1f".format(scale)) + Slider( + value = scale, + onValueChange = { scale = it }, + valueRange = 0.1f..3.0f + ) + } + } +} +``` + +### Overlay Compose UI on 3D scene + +Use a `Box` to layer Compose widgets on top of the `Scene`. + +```kotlin +@Composable +fun OverlayScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + + Box(modifier = Modifier.fillMaxSize()) { + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader + ) { + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } + } + + // Floating action button overlay + FloatingActionButton( + onClick = { /* action */ }, + modifier = Modifier + .align(Alignment.BottomEnd) + .padding(16.dp) + ) { + Icon(Icons.Default.Add, contentDescription = "Add") + } + + // Top status bar overlay + Surface( + modifier = Modifier + .align(Alignment.TopCenter) + .padding(top = 48.dp), + color = Color.Black.copy(alpha = 0.5f), + shape = RoundedCornerShape(50) + ) { + Text( + "Model Viewer", + color = Color.White, + modifier = Modifier.padding(horizontal = 20.dp, vertical = 8.dp) + ) + } + } +} +``` + +### ViewNode: Compose inside 3D space + +Use `ViewNode` to render Compose UI as a texture mapped onto a plane in the 3D +scene. + +```kotlin +@Composable +fun ViewNodeScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val windowManager = rememberViewNodeManager() + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + viewNodeWindowManager = windowManager + ) { + ViewNode(windowManager = windowManager) { + Card( + modifier = Modifier.padding(8.dp), + colors = CardDefaults.cardColors( + containerColor = Color.White.copy(alpha = 0.9f) + ) + ) { + Column(modifier = Modifier.padding(16.dp)) { + Text("Hello 3D World!", style = MaterialTheme.typography.titleLarge) + Text("This is a Compose Card rendered in 3D space.") + } + } + } + + rememberModelInstance(modelLoader, "models/damaged_helmet.glb")?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 1.0f, + position = Position(x = 1f) + ) + } + } +} +``` + +--- + +## Materials + +### Create a solid color material + +Use `materialLoader.createColorInstance()` to create a material with a flat +color. Use `colorOf()` to convert from Compose `Color`. + +```kotlin +val engine = rememberEngine() +val materialLoader = rememberMaterialLoader(engine) + +val redMaterial = remember(materialLoader) { + materialLoader.createColorInstance(colorOf(Color.Red)) +} + +val customMaterial = remember(materialLoader) { + materialLoader.createColorInstance( + colorOf(r = 0.2f, g = 0.6f, b = 1.0f, a = 1.0f) + ) +} +``` + +### Apply a material to geometry nodes + +Pass the `materialInstance` to any geometry node: `CubeNode`, `SphereNode`, +`CylinderNode`, `PlaneNode`, `LineNode`, or `PathNode`. + +```kotlin +@Composable +fun MaterialDemoScreen() { + val engine = rememberEngine() + val materialLoader = rememberMaterialLoader(engine) + + val redMaterial = remember(materialLoader) { + materialLoader.createColorInstance(colorOf(Color.Red)) + } + val blueMaterial = remember(materialLoader) { + materialLoader.createColorInstance(colorOf(Color.Blue)) + } + val greenMaterial = remember(materialLoader) { + materialLoader.createColorInstance(colorOf(Color.Green)) + } + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = rememberModelLoader(engine), + materialLoader = materialLoader + ) { + CubeNode( + size = Size(0.5f, 0.5f, 0.5f), + materialInstance = redMaterial, + position = Position(x = -1f, z = -2f) + ) + SphereNode( + radius = 0.3f, + materialInstance = blueMaterial, + position = Position(x = 0f, z = -2f) + ) + CylinderNode( + radius = 0.2f, + height = 0.8f, + materialInstance = greenMaterial, + position = Position(x = 1f, z = -2f) + ) + PlaneNode( + size = Size(5f, 5f), + materialInstance = remember(materialLoader) { + materialLoader.createColorInstance(colorOf(rgb = 0.3f)) + }, + position = Position(y = -0.5f) + ) + } +} +``` diff --git a/docs/docs/samples.md b/docs/docs/samples.md new file mode 100644 index 000000000..72fdc7561 --- /dev/null +++ b/docs/docs/samples.md @@ -0,0 +1,179 @@ +# Samples + +15 working sample apps ship with the repository. Clone and run them to see SceneView in action. + +```bash +git clone https://github.com/SceneView/sceneview-android.git +``` + +Open in Android Studio, select a sample module, and run on a device or emulator. + +--- + +## 3D samples + +### Model Viewer + +![Model Viewer](screenshots/model-viewer.png){ width=320 } + +Load a glTF/GLB model with HDR environment lighting, orbit camera, and animation playback controls. + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + cameraManipulator = rememberCameraManipulator(), + environment = environment +) { + rememberModelInstance(modelLoader, "models/helmet.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f, autoAnimate = true) + } +} +``` + +**Demonstrates:** `ModelNode`, `rememberModelInstance`, `rememberCameraManipulator`, HDR environments, animation controls + +--- + +### Camera Manipulator + +![Camera Manipulator](screenshots/camera-manipulator.png){ width=320 } + +Orbit, pan, and zoom with gesture hints. + +**Demonstrates:** `CameraManipulator`, one-finger orbit, pinch-zoom, two-finger pan + +--- + +### glTF Camera + +![glTF Camera](screenshots/gltf-camera.png){ width=320 } + +Use cameras defined inside a glTF file. The scene animates between camera viewpoints imported from Blender. + +**Demonstrates:** `CameraNode`, glTF-embedded cameras, animated transitions + +--- + +### Dynamic Sky + +Time-of-day sun positioning with turbidity and fog controls. Sunrise, noon, sunset — all driven by a Compose slider. + +**Demonstrates:** `DynamicSkyNode`, `FogNode`, reactive lighting from Compose state + +--- + +### Reflection Probe + +Metallic surfaces with local cubemap reflections that override the global environment. + +**Demonstrates:** `ReflectionProbeNode`, IBL override, metallic materials + +--- + +### Physics Demo + +Tap the screen to throw balls. They bounce off the floor and each other with rigid body physics. + +**Demonstrates:** `PhysicsNode`, gravity, collision detection, tap-to-throw interaction + +--- + +### Post-Processing + +Toggle visual effects: bloom, depth-of-field, SSAO, and fog. See the difference each makes. + +**Demonstrates:** Filament post-processing pipeline, `View` options, composable toggles + +--- + +### Line & Path + +3D line drawing, axis gizmos, spiral curves, and animated sine-wave paths. + +**Demonstrates:** `LineNode`, `PathNode`, `updateGeometry()`, GPU line primitives + +--- + +### Text Labels + +Camera-facing text labels floating above 3D spheres. Tap to cycle text. + +**Demonstrates:** `TextNode`, `BillboardNode`, Canvas-rendered text in 3D space + +--- + +### Autopilot Demo + +A fully autonomous 3D scene — no user interaction needed. Compose state drives everything. + +![Autopilot Demo](screenshots/autopilot-demo.png){ width=320 } + +**Demonstrates:** Pure state-driven 3D, `rememberInfiniteTransition`, automatic animation + +--- + +## AR samples + +### AR Model Viewer + +![AR Model Viewer](screenshots/ar-model-viewer.png){ width=320 } + +Tap to place a 3D model on a detected plane. Pinch to scale, drag to move, two-finger rotate. Multiple models supported. + +```kotlin +ARScene( + planeRenderer = true, + onSessionUpdated = { _, frame -> + // Detect planes and create anchors + } +) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = model, scaleToUnits = 0.5f) + } + } +} +``` + +**Demonstrates:** `ARScene`, `AnchorNode`, `ModelNode` gestures, plane detection, persistent plane mesh + +--- + +### AR Augmented Image + +![AR Augmented Image](screenshots/ar-augmented-image.png){ width=320 } + +Detect a real-world image and overlay 3D content on it. + +**Demonstrates:** `AugmentedImageNode`, `AugmentedImageDatabase`, image tracking, video overlay + +--- + +### AR Cloud Anchor + +Host an anchor to Google Cloud and resolve it on another device. Cross-device AR persistence. + +**Demonstrates:** `CloudAnchorNode`, anchor hosting/resolving, `CloudAnchorState` + +--- + +### AR Point Cloud + +![AR Point Cloud](screenshots/ar-point-cloud.png){ width=320 } + +Visualize ARCore's feature points as a real-time point cloud. + +**Demonstrates:** ARCore feature points, point cloud rendering, `onSessionUpdated` + +--- + +## Running AR samples + +AR samples require: + +- A physical device with [ARCore support](https://developers.google.com/ar/devices) +- ARCore (Google Play Services for AR) installed from Play Store +- Camera permission granted + +!!! tip + For best AR tracking, use a well-lit environment with textured surfaces (wood tables, carpet — not glass or plain white surfaces). diff --git a/docs/docs/screenshots/ar-augmented-image.png b/docs/docs/screenshots/ar-augmented-image.png new file mode 100644 index 000000000..c1adbd975 Binary files /dev/null and b/docs/docs/screenshots/ar-augmented-image.png differ diff --git a/docs/docs/screenshots/ar-model-viewer.png b/docs/docs/screenshots/ar-model-viewer.png new file mode 100644 index 000000000..bfc284664 Binary files /dev/null and b/docs/docs/screenshots/ar-model-viewer.png differ diff --git a/docs/docs/screenshots/ar-point-cloud.png b/docs/docs/screenshots/ar-point-cloud.png new file mode 100644 index 000000000..20c69c8e2 Binary files /dev/null and b/docs/docs/screenshots/ar-point-cloud.png differ diff --git a/docs/docs/screenshots/autopilot-demo.png b/docs/docs/screenshots/autopilot-demo.png new file mode 100644 index 000000000..c130c17c2 Binary files /dev/null and b/docs/docs/screenshots/autopilot-demo.png differ diff --git a/docs/docs/screenshots/camera-manipulator.png b/docs/docs/screenshots/camera-manipulator.png new file mode 100644 index 000000000..a9eb01a26 Binary files /dev/null and b/docs/docs/screenshots/camera-manipulator.png differ diff --git a/docs/docs/screenshots/gltf-camera.png b/docs/docs/screenshots/gltf-camera.png new file mode 100644 index 000000000..7d944f87e Binary files /dev/null and b/docs/docs/screenshots/gltf-camera.png differ diff --git a/docs/docs/screenshots/model-viewer.png b/docs/docs/screenshots/model-viewer.png new file mode 100644 index 000000000..07994f868 Binary files /dev/null and b/docs/docs/screenshots/model-viewer.png differ diff --git a/docs/docs/showcase.md b/docs/docs/showcase.md index b3193b67a..be391c1b8 100644 --- a/docs/docs/showcase.md +++ b/docs/docs/showcase.md @@ -1,122 +1,184 @@ -# Showcase +# Why SceneView -Real apps and demos built with SceneView — all running on Jetpack Compose. +SceneView is the only actively maintained, Compose-native 3D and AR library for Kotlin. +It delivers Filament's physically-based rendering and ARCore's full AR capabilities through +a declarative API that developers already understand — and v4.0 extends that to XR headsets +and iOS via Kotlin Multiplatform. --- -## 3D Scenes +## The pitch in 10 seconds -
+```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + ModelNode(modelInstance = helmet, scaleToUnits = 1.0f, autoAnimate = true) + LightNode(type = LightManager.Type.SUN, apply = { intensity(100_000.0f) }) +} +``` + +That's a production-quality 3D viewer. Five lines. Same Kotlin you write every day. -
-![3D Model Viewer](assets/images/showcase-3d-model-viewer.svg) +--- + +## What makes SceneView different + +### Compose-native — not a wrapper -### Model Viewer +60% of the top 1,000 Play Store apps use Jetpack Compose. SceneView's scene graph **is** the +Compose tree. The Compose runtime owns it. -Load any glTF/GLB model with HDR environment lighting. Orbit camera with drag, pinch-to-zoom, and double-tap-to-scale gestures. Async model loading with `rememberModelInstance`. +- `if/else` controls whether nodes exist +- `State` drives animations, positions, visibility +- `LaunchedEffect` and `DisposableEffect` work inside scenes +- Nesting nodes = nesting `Column { Row { Text() } }` + +### Zero boilerplate lifecycle ```kotlin -Scene(modifier = Modifier.fillMaxSize()) { - rememberModelInstance(modelLoader, "helmet.glb")?.let { - ModelNode(modelInstance = it, scaleToUnits = 1.0f) - } -} +val engine = rememberEngine() +val modelLoader = rememberModelLoader(engine) +val model = rememberModelInstance(modelLoader, "models/product.glb") +// All resources destroyed automatically when composable leaves the tree ``` -[:octicons-code-24: View source](https://github.com/SceneView/sceneview-android/tree/main/samples/model-viewer){ .showcase-link } -
+No `onPause`/`onResume` dance. No `destroy()` calls. No leaked Filament objects. -
-![Autopilot Demo](assets/images/showcase-autopilot.svg) +### Thread safety by default -### Autopilot HUD +Filament requires all JNI calls on the main thread. `rememberModelInstance` handles the +IO-to-main-thread transition automatically. You never think about it. -Full autonomous driving interface — road, lane markers, speed HUD, object detection panel — built entirely with `CubeNode`, `PlaneNode`, `ViewNode` and Compose state. Zero model files. +### Gesture handling built in -Demonstrates how SceneView can power data-driven 3D dashboards and HUDs with reactive state updates. +`ModelNode` supports pinch-to-scale, drag-to-rotate, and two-finger-rotate out of the box. +Orbit camera in one line: -[:octicons-code-24: View source](https://github.com/SceneView/sceneview-android/tree/main/samples/autopilot-demo){ .showcase-link } -
+```kotlin +Scene(cameraManipulator = rememberCameraManipulator()) { ... } +``` -
+### Multi-platform trajectory + +v4.0 brings Kotlin Multiplatform support (iOS via Filament Metal backend) and `XRScene` for +spatial computing headsets. One composable API across all target platforms. + +### AI-assisted development + +SceneView ships with an MCP server (`@sceneview/mcp`) and a machine-readable `llms.txt` API +reference. Claude, Cursor, and other AI tools always have the current API — no hallucinated +methods, no outdated patterns. --- -## AR Experiences +## 26+ composable node types -
+| Category | Nodes | +|---|---| +| **Models** | `ModelNode` — glTF/GLB with animations, gestures, scaling | +| **Geometry** | `CubeNode`, `SphereNode`, `CylinderNode`, `PlaneNode` — no asset files needed | +| **Lighting** | `LightNode` (sun, point, spot, directional), `DynamicSkyNode`, `ReflectionProbeNode` | +| **Atmosphere** | `FogNode` — distance/height fog driven by Compose state | +| **Media** | `ImageNode`, `VideoNode` (with chromakey), `ViewNode` (any Composable in 3D) | +| **Text** | `TextNode`, `BillboardNode` — camera-facing labels and UI callouts | +| **Drawing** | `LineNode`, `PathNode` — 3D polylines, measurements, animated paths | +| **Physics** | `PhysicsNode` — rigid body simulation, collision, gravity | +| **AR** | `AnchorNode`, `HitResultNode`, `AugmentedImageNode`, `AugmentedFaceNode`, `CloudAnchorNode`, `StreetscapeGeometryNode` | +| **XR** | `XRScene` — spatial computing with the same composable API | +| **Structure** | `Node` (grouping/pivots), `CameraNode`, `MeshNode` | -
-![AR Tap-to-Place](assets/images/showcase-ar-model-viewer.svg) +--- + +## Production rendering — Google Filament + +Built on [Filament](https://github.com/google/filament), the same physically-based +rendering engine used inside Google Search and Google Play Store. -### AR Tap-to-Place +- Physically-based rendering (PBR) with metallic/roughness workflow +- HDR environment lighting (IBL) from `.hdr` and `.ktx` files +- Dynamic shadows, reflections, ambient occlusion +- Post-processing: bloom, depth-of-field, SSAO, fog +- 60fps on mid-range devices + +--- -Detect horizontal surfaces with ARCore, tap to create an anchor, and place a 3D model. Supports model picker, pinch-to-scale, and drag-to-rotate gestures. +## Full ARCore integration ```kotlin -ARScene(planeRenderer = true, onSessionUpdated = { _, frame -> - anchor = frame.getUpdatedPlanes().firstOrNull() - ?.let { frame.createAnchorOrNull(it.centerPose) } -}) { - anchor?.let { AnchorNode(it) { ModelNode(instance) } } +ARScene( + planeRenderer = true, + onSessionUpdated = { _, frame -> + anchor = frame.getUpdatedPlanes() + .firstOrNull { it.type == Plane.Type.HORIZONTAL_UPWARD_FACING } + ?.let { frame.createAnchorOrNull(it.centerPose) } + } +) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = sofa, scaleToUnits = 0.5f) + } + } } ``` -[:octicons-code-24: View source](https://github.com/SceneView/sceneview-android/tree/main/samples/ar-model-viewer){ .showcase-link } -
- -
-![Augmented Image](assets/images/showcase-ar-augmented-image.svg) - -### Augmented Image +- Plane detection (horizontal + vertical) with persistent mesh rendering +- Image detection and tracking (`AugmentedImageNode`) +- Face mesh tracking and augmentation (`AugmentedFaceNode`) +- Cloud anchors for cross-device persistence (`CloudAnchorNode`) +- Environmental HDR — real-world light estimation +- Streetscape geometry — city-scale 3D building meshes +- Geospatial API support — place content at lat/long coordinates -Detect real-world printed images and overlay interactive 3D content. Use cases: product catalogs, educational materials, AR business cards, museum exhibits. +--- -ARCore tracks the image in real-time and `AugmentedImageNode` keeps the 3D content anchored to it. +## Real-world use cases -[:octicons-code-24: View source](https://github.com/SceneView/sceneview-android/tree/main/samples/ar-augmented-image){ .showcase-link } -
+### E-commerce: product viewer in 10 lines -
-![Cloud Anchor](assets/images/showcase-ar-cloud-anchor.svg) +Replace a static `Image()` with a `Scene {}` on your product detail page: -### Cloud Anchors +```kotlin +Scene( + modifier = Modifier.fillMaxWidth().height(300.dp), + cameraManipulator = rememberCameraManipulator() +) { + rememberModelInstance(modelLoader, "models/shoe.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } +} +``` -Persistent, cross-device AR anchors powered by Google Cloud. Place a 3D object, share the anchor ID, and another device can resolve and see the same object in the same location. +### Furniture & interior design -Perfect for collaborative AR, shared installations, and multiplayer experiences. +Let customers see how a sofa looks in their living room. Tap to place, pinch to resize, +rotate with two fingers. Compose UI floats alongside via `ViewNode`. -[:octicons-code-24: View source](https://github.com/SceneView/sceneview-android/tree/main/samples/ar-cloud-anchor){ .showcase-link } -
+### Education & training -
+Interactive 3D anatomy models, molecular structures, mechanical assemblies — controlled +by standard Compose sliders, buttons, and state. ---- +### Data visualization -## Node types available +3D bar charts, globes, network graphs. The data is Compose `State` — update it and +the visualization reacts instantly. -SceneView 3.2.0 ships with a rich library of composable node types: +### Social & communication -| Node | Description | -|---|---| -| `ModelNode` | glTF/GLB model with animations, gestures, PBR materials | -| `LightNode` | Directional, point, spot, sun lights | -| `CubeNode` / `SphereNode` / `CylinderNode` / `PlaneNode` | Built-in geometry primitives | -| `ImageNode` | Image rendered on a 3D plane | -| `ViewNode` | Any Compose UI as a 3D billboard | -| `TextNode` | 3D text geometry | -| `LineNode` / `PathNode` | 3D polylines and paths | -| `BillboardNode` | Always faces the camera (labels, tooltips) | -| `PhysicsNode` | Rigid body with gravity and collisions | -| `DynamicSkyNode` | Time-of-day sun position | -| `FogNode` | Distance/height fog | -| `ReflectionProbeNode` | Zone-based IBL reflections | -| `AnchorNode` | AR world anchor (ARScene only) | -| `AugmentedImageNode` | AR image tracking (ARScene only) | -| `CloudAnchorNode` | Persistent cross-device anchor (ARScene only) | +`AugmentedFaceNode` for face filters and effects. Apply materials to the face mesh, +attach 3D objects to landmarks. Front-camera AR. --- -## Build something? +## The numbers -We'd love to feature your project here. [Open a PR](https://github.com/SceneView/sceneview-android/pulls) or share it on [Discord](https://discord.gg/UbNDDBTNqb). +| Metric | Value | +|---|---| +| **Node types** | 26+ composable nodes | +| **Rendering** | Google Filament — PBR, 60fps mobile | +| **AR backend** | ARCore — latest features | +| **Platforms** | Android today, XR + iOS in v4.0 | +| **Setup** | 1 Gradle line, 0 XML | +| **Model viewer** | ~5 lines of Kotlin | +| **AR placement** | ~15 lines of Kotlin | +| **APK size impact** | ~5 MB | +| **License** | Apache 2.0 | diff --git a/docs/docs/stylesheets/extra.css b/docs/docs/stylesheets/extra.css index ab15d5830..b36976fbf 100644 --- a/docs/docs/stylesheets/extra.css +++ b/docs/docs/stylesheets/extra.css @@ -5,129 +5,58 @@ --md-primary-fg-color--light: #9c65da; --md-primary-fg-color--dark: #4a1d8a; --md-accent-fg-color: #9c27b0; - --sv-glow: rgba(108, 53, 170, 0.25); } -/* ─── Hero section ─────────────────────────────────────────────────── */ - -.hero-section img { - width: 100%; - max-width: 1100px; - display: block; - margin: 0 auto 1rem; - border-radius: 16px; - box-shadow: 0 8px 40px rgba(108, 53, 170, 0.2); -} - -.hero-tagline { - text-align: center; - max-width: 700px; - margin: 0 auto 2rem; -} - -.hero-tagline h2 { - font-size: 2.4rem; - background: linear-gradient(135deg, #6c35aa, #e040fb, #00E5FF); - -webkit-background-clip: text; - -webkit-text-fill-color: transparent; - background-clip: text; - margin-bottom: 0.5rem; -} - -/* Remove codelab step counter from hero and showcase pages */ -.hero-tagline h2::before, -.showcase-grid h3::before { - display: none !important; -} - -/* ─── Showcase grid ────────────────────────────────────────────────── */ - -.showcase-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); - gap: 1.5rem; - margin: 2rem 0; -} - -.showcase-grid--full { - grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); +/* Force all inline emoji/icons to monochrome tinted with theme color */ +.md-typeset .twemoji svg, +.md-typeset .gemoji img, +.md-typeset .emojione svg, +.md-content .twemoji svg { + fill: currentColor; + filter: none; } -.showcase-card { - background: var(--md-default-bg-color); - border: 1px solid var(--md-default-fg-color--lightest); - border-radius: 16px; - overflow: hidden; - transition: transform 0.3s ease, box-shadow 0.3s ease; - padding-bottom: 1rem; -} - -.showcase-card:hover { - transform: translateY(-6px); - box-shadow: 0 12px 40px var(--sv-glow); -} - -.showcase-card img { - width: 100%; - max-height: 420px; - object-fit: contain; - background: linear-gradient(135deg, #0a0015, #1a0a30); - padding: 1rem; - display: block; +/* Grid card icons — tinted with theme primary */ +.grid.cards .twemoji svg { + fill: var(--md-primary-fg-color); + width: 2rem; + height: 2rem; } -.showcase-card p, -.showcase-card h3 { - padding: 0 1.2rem; +/* Buttons — icon inherits button text color */ +.md-button .twemoji svg { + fill: currentColor; } -.showcase-card h3 { - margin-top: 0.8rem; - color: var(--md-primary-fg-color); -} - -.showcase-card .highlight { - margin: 0 1rem; - border-radius: 8px; +/* Hero tagline on homepage */ +.hero-tagline { + font-size: 1.25rem; + color: var(--md-default-fg-color--light); + margin-top: -0.5rem; + margin-bottom: 1.5rem; } -.showcase-link { - display: inline-block; - margin: 0.5rem 1.2rem; +/* Footer note */ +.footer-note { font-size: 0.85rem; - font-weight: 600; - color: var(--md-primary-fg-color) !important; - text-decoration: none; - transition: color 0.2s; -} - -.showcase-link:hover { - color: var(--md-accent-fg-color) !important; + color: var(--md-default-fg-color--lighter); + text-align: center; + margin-top: 2rem; } -/* ─── Step numbers for codelab-style headers (only on codelab pages) ── */ - +/* Step numbers for codelab-style headers */ .md-content h2 { counter-increment: step; } -.md-content { - counter-reset: step; -} - /* Only show step counters on codelab pages */ -.md-content[data-md-component="content"] h2::before { - display: none; -} - -/* Codelab pages get step numbers via body class */ -body[data-md-color-scheme] .md-typeset .codelab-steps h2 { +.md-content[data-page="codelabs"] h2 { display: flex; align-items: center; gap: 12px; } -body[data-md-color-scheme] .md-typeset .codelab-steps h2::before { +.md-content[data-page="codelabs"] h2::before { content: counter(step); background: var(--md-primary-fg-color); color: white; @@ -142,14 +71,16 @@ body[data-md-color-scheme] .md-typeset .codelab-steps h2::before { flex-shrink: 0; } -/* ─── Code blocks ──────────────────────────────────────────────────── */ +.md-content { + counter-reset: step; +} +/* Code blocks */ .highlight code { font-size: 0.85rem; } -/* ─── Card grid (get started, roadmap teasers) ─────────────────────── */ - +/* Card grid */ .grid.cards > ul { display: grid; grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); @@ -160,55 +91,63 @@ body[data-md-color-scheme] .md-typeset .codelab-steps h2::before { .grid.cards > ul > li { border: 1px solid var(--md-default-fg-color--lightest); - border-radius: 12px; + border-radius: 8px; padding: 1.5rem; - transition: box-shadow 0.3s ease, transform 0.3s ease; + transition: box-shadow 0.2s; } .grid.cards > ul > li:hover { - box-shadow: 0 6px 24px var(--sv-glow); - transform: translateY(-3px); + box-shadow: 0 4px 16px rgba(108, 53, 170, 0.15); } -/* ─── Hero image centering ─────────────────────────────────────────── */ - +/* Hero image centering */ .md-content img[width] { display: block; margin: 0 auto 2rem; } -/* ─── Smooth animations on page load ───────────────────────────────── */ - -@keyframes fadeInUp { - from { opacity: 0; transform: translateY(20px); } - to { opacity: 1; transform: translateY(0); } +/* Feature matrix — highlight SceneView column */ +table th:nth-child(2), +table td:nth-child(2) { + font-weight: 500; } -.hero-section, -.hero-tagline { - animation: fadeInUp 0.6s ease-out; +/* Admonition question style for FAQ */ +.md-typeset .admonition.question { + border-color: var(--md-primary-fg-color); } -.showcase-card { - animation: fadeInUp 0.6s ease-out; - animation-fill-mode: both; +/* Cheatsheet — compact tables */ +.md-typeset table:not([class]) { + font-size: 0.88rem; } -.showcase-card:nth-child(1) { animation-delay: 0.1s; } -.showcase-card:nth-child(2) { animation-delay: 0.2s; } -.showcase-card:nth-child(3) { animation-delay: 0.3s; } -.showcase-card:nth-child(4) { animation-delay: 0.4s; } +/* Mobile: full-width code blocks */ +@media screen and (max-width: 76.1875em) { + .md-content .highlight pre { + margin-left: -0.8rem; + margin-right: -0.8rem; + border-radius: 0; + } -/* ─── Dark mode adjustments ────────────────────────────────────────── */ + /* Stack grid cards on small screens */ + .grid.cards > ul { + grid-template-columns: 1fr; + } +} -[data-md-color-scheme="slate"] .showcase-card { - border-color: rgba(255, 255, 255, 0.08); +/* Smooth scrolling */ +html { + scroll-behavior: smooth; } -[data-md-color-scheme="slate"] .showcase-card img { - background: linear-gradient(135deg, #0a0015, #1a0a30); +/* Better inline code readability */ +.md-typeset code:not(.highlight code) { + padding: 0.1em 0.35em; + border-radius: 3px; } -[data-md-color-scheme="default"] .showcase-card img { - background: linear-gradient(135deg, #f0e8f8, #e8e0f0); +/* Navigation tab active indicator */ +.md-tabs__link--active { + border-bottom: 2px solid var(--md-accent-fg-color); } diff --git a/docs/docs/testing.md b/docs/docs/testing.md new file mode 100644 index 000000000..44a326a71 --- /dev/null +++ b/docs/docs/testing.md @@ -0,0 +1,192 @@ +# Testing SceneView Apps + +Strategies for testing 3D and AR features in your Android app. + +--- + +## Unit testing node logic + +Business logic that drives your scene — model selection, animation state, anchor management — can be tested with standard JUnit tests. Keep scene logic in ViewModels or plain Kotlin classes. + +```kotlin +// ViewModel +class SceneViewModel : ViewModel() { + var selectedModel by mutableStateOf("helmet") + private set + + var isAnimating by mutableStateOf(true) + private set + + fun selectModel(name: String) { selectedModel = name } + fun toggleAnimation() { isAnimating = !isAnimating } +} + +// Test +@Test +fun `selecting a model updates state`() { + val vm = SceneViewModel() + vm.selectModel("sword") + assertEquals("sword", vm.selectedModel) +} +``` + +!!! tip "Keep Filament out of unit tests" + Filament requires native libraries and a GPU context. Don't instantiate `Engine`, `ModelLoader`, or any Filament objects in unit tests. Test the state logic, not the rendering. + +--- + +## Compose UI testing + +Use `composeTestRule` to test the Compose UI around your scene — buttons, sliders, model pickers. You can't render the actual 3D scene in instrumented tests (no GPU in CI), but you can verify that state changes propagate. + +```kotlin +@get:Rule +val composeTestRule = createComposeRule() + +@Test +fun modelPickerUpdatesSelection() { + val vm = SceneViewModel() + composeTestRule.setContent { + ModelPickerUI(viewModel = vm) + } + + composeTestRule.onNodeWithText("Sword").performClick() + assertEquals("sword", vm.selectedModel) +} +``` + +--- + +## Screenshot testing + +For visual regression testing, use [Paparazzi](https://github.com/cashapp/paparazzi) or [Roborazzi](https://github.com/takahirom/roborazzi). These render Compose UI without a device but **cannot render Filament 3D content** (they use layoutlib, not a real GPU). + +**What you can screenshot-test:** + +- Compose UI overlays (buttons, HUD, model pickers) +- Loading states (skeleton/placeholder before model loads) +- Error states + +**What you cannot screenshot-test:** + +- The 3D scene itself +- AR camera feed +- Filament rendering output + +For 3D visual testing, use on-device screenshot tests with [Shot](https://github.com/pedrovgs/Shot) or manual QA. + +--- + +## Instrumented testing + +For on-device tests that exercise the full rendering pipeline: + +```kotlin +@get:Rule +val composeTestRule = createAndroidComposeRule() + +@Test +fun sceneRendersWithoutCrash() { + // Just verify the Scene composable doesn't throw + composeTestRule.waitForIdle() + // If we get here, the scene initialized successfully +} + +@Test +fun tappingPlacesModel() { + // Wait for scene to initialize + composeTestRule.waitForIdle() + + // Simulate a tap + composeTestRule.onRoot().performClick() + + // Verify state changed (model placed) + // Check via ViewModel or test tag +} +``` + +!!! warning "AR tests require a physical device" + AR features need a real camera and ARCore. Run AR instrumented tests on physical devices only — not emulators. + +--- + +## CI pipeline + +### What to run in CI + +| Check | CI-safe? | Tool | +|---|---|---| +| Unit tests (state logic) | Yes | JUnit | +| Compose UI tests | Yes | `composeTestRule` | +| Lint / ktlint | Yes | `./gradlew lint` | +| Build verification | Yes | `./gradlew assembleDebug` | +| Screenshot tests (UI only) | Yes | Paparazzi/Roborazzi | +| 3D rendering tests | No (needs GPU) | On-device only | +| AR tests | No (needs camera) | Physical device only | + +### Sample CI config + +```yaml +# .github/workflows/test.yml +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + - run: ./gradlew test # Unit tests + - run: ./gradlew lintDebug # Lint + - run: ./gradlew assembleDebug # Build verification +``` + +--- + +## Testing patterns + +### Mock the model loader + +For tests that need a `ModelInstance`, create a fake: + +```kotlin +// In test +val fakeInstance = mockk(relaxed = true) + +// Pass to composable under test +ModelNode(modelInstance = fakeInstance, scaleToUnits = 1.0f) +``` + +### Test animation state transitions + +```kotlin +@Test +fun `animation toggles between walk and idle`() { + val vm = SceneViewModel() + + assertEquals("Idle", vm.currentAnimation) + + vm.startWalking() + assertEquals("Walk", vm.currentAnimation) + + vm.stopWalking() + assertEquals("Idle", vm.currentAnimation) +} +``` + +### Test AR anchor lifecycle + +```kotlin +@Test +fun `clearing anchor removes placed model`() { + val vm = ARViewModel() + + vm.placeAnchor(mockAnchor) + assertTrue(vm.hasPlacedModel) + + vm.clearAnchor() + assertFalse(vm.hasPlacedModel) + assertNull(vm.currentAnchor) +} +``` diff --git a/docs/docs/troubleshooting.md b/docs/docs/troubleshooting.md new file mode 100644 index 000000000..2d36306d0 --- /dev/null +++ b/docs/docs/troubleshooting.md @@ -0,0 +1,211 @@ +# Troubleshooting + +Common issues and fixes when working with SceneView. + +--- + +## Build errors + +### "Duplicate class" or dependency conflicts + +SceneView pulls in Filament, ARCore, and Kotlin coroutines transitively. If another library in +your project brings in a different version, Gradle will report duplicate classes. + +**Fix:** + +- Use the Compose BOM to align Compose-related versions across your project. +- Exclude the conflicting transitive dependency from whichever library introduces it: + +```kotlin +implementation("com.example:some-library:1.0") { + exclude(group = "com.google.ar", module = "core") +} +``` + +- Run `./gradlew app:dependencies` to inspect the resolved dependency tree and identify the + conflict source. + +### NDK / CMake errors + +SceneView bundles pre-built native libraries (Filament .so files). You do **not** need to install +the NDK or configure CMake yourself. + +If you see NDK-related errors: + +- Make sure you are not forcing an `ndkVersion` that conflicts with the bundled binaries. +- Run a clean build after switching ABIs or updating SceneView: + +```bash +./gradlew clean assembleDebug +``` + +--- + +## Runtime errors + +### Model not loading / null ModelInstance + +`rememberModelInstance` returns `null` while the model is still loading **and** if the load fails. + +**Checklist:** + +1. **Verify the asset path.** Paths are relative to `src/main/assets/`. If your file is at + `src/main/assets/models/helmet.glb`, pass `"models/helmet.glb"`. +2. **Confirm the file exists.** Open the APK in Android Studio's APK Analyzer and check that the + file is present under `assets/`. +3. **Handle the null case.** The model is not available on the first frame: + +```kotlin +rememberModelInstance(modelLoader, "models/helmet.glb")?.let { instance -> + ModelNode(modelInstance = instance, scaleToUnits = 1.0f) +} +``` + +4. **Check Logcat.** Filter by `Filament` or `SceneView` — load failures are logged there. + +### Black screen / no rendering + +A completely black or empty viewport usually means the scene is set up but nothing is visible to +the camera. + +**Checklist:** + +- **Environment:** If no HDR environment is set, the scene has no ambient light and everything + appears black. Load an environment: + +```kotlin +environment = rememberEnvironment(environmentLoader) { + environmentLoader.createHDREnvironment("environments/sky_2k.hdr")!! +} +``` + +- **Engine:** Make sure `rememberEngine()` is called and passed to `Scene`. Without a valid engine, + nothing renders. +- **SurfaceType:** If you are embedding the scene in a `TextureView` or composing it with other + Compose layers, confirm you are using the correct `SurfaceType`. The default (`SurfaceType.SURFACE`) + works for most cases; switch to `SurfaceType.TEXTURE_SURFACE` only when overlay compositing is + required. + +### Crash on background thread + +Filament's JNI layer is **not thread-safe** and must be called on the **main thread**. A crash +with a native stacktrace (often in `libfilament-jni.so`) almost always means a Filament call ran +on the wrong thread. + +**Rules:** + +- **Use `rememberModelInstance`** in composables — it handles threading correctly. +- **Never call `modelLoader.createModelInstance()`** (or any `modelLoader` / `materialLoader` + method) from `Dispatchers.IO` or any background coroutine. +- For imperative (non-Compose) code, use `modelLoader.loadModelInstanceAsync` which dispatches to + the correct thread internally. + +```kotlin +// WRONG — will crash +viewModelScope.launch(Dispatchers.IO) { + val model = modelLoader.createModelInstance("models/helmet.glb") // native crash +} + +// RIGHT — use the async helper +modelLoader.loadModelInstanceAsync("models/helmet.glb") { instance -> + // already on main thread +} +``` + +### AR session fails to start + +If the AR session never initialises or immediately throws an exception: + +1. **Camera permission.** `ARScene` requires `android.permission.CAMERA`. Request it before + displaying the AR composable. +2. **ARCore installed.** The device needs *Google Play Services for AR*. On devices that do not + ship with it pre-installed, the user must install it from the Play Store. +3. **Physical device required.** ARCore has very limited emulator support. Always test AR features + on a physical device. +4. **Manifest metadata.** Your `AndroidManifest.xml` must declare ARCore support: + +```xml + + + +``` + +Set the value to `"optional"` if your app can function without AR. + +--- + +## Performance + +### Low FPS / jank + +- **Reduce polygon count.** Use decimated or LOD versions of your models. Tools like Blender or + `gltfpack` can optimise meshes. +- **Compress textures.** Convert textures to KTX2 with Basis Universal compression. This reduces + GPU memory bandwidth and upload time. +- **Limit lights.** Each additional dynamic light adds rendering cost. One directional main light + plus an HDR environment is sufficient for most scenes. +- **Avoid unnecessary recomposition.** If Compose state changes trigger recomposition of the + `Scene` block, `rememberModelInstance` may re-execute. Keep scene-unrelated state outside the + `Scene` content lambda. + +### High memory usage + +- **Share the engine.** If your app has multiple screens with 3D content, pass the same `Engine` + instance to each `Scene` rather than creating a new one per screen. +- **Use `scaleToUnits`.** Models exported at real-world scale (e.g. a building at 50 m) allocate + large bounding volumes. Set `scaleToUnits` on `ModelNode` to normalise them. +- **Dispose unused instances.** When a model is no longer displayed, make sure its `ModelInstance` + is not held in a long-lived reference. Let the composable lifecycle handle disposal, or call + `destroy()` manually in imperative code. + +--- + +## Common mistakes + +### LightNode: `apply` is a named parameter + +`LightNode`'s `apply` block is a **named parameter**, not a trailing lambda. The compiler may +accept the trailing-lambda form without error, but the block will not execute. + +```kotlin +// WRONG — compiles but the apply block is ignored +LightNode(type = LightManager.Type.SUN) { + intensity(100_000f) +} + +// RIGHT — use the named parameter +LightNode(type = LightManager.Type.SUN, apply = { + intensity(100_000f) +}) +``` + +### Missing null check on model load + +`rememberModelInstance` returns `ModelInstance?`. Passing a nullable instance where a non-null is +expected causes a compile error — or worse, a force-unwrap crash at runtime. + +```kotlin +// WRONG — force-unwrap can crash +val model = rememberModelInstance(modelLoader, "models/helmet.glb")!! + +// RIGHT — safe handling +rememberModelInstance(modelLoader, "models/helmet.glb")?.let { instance -> + ModelNode(modelInstance = instance, scaleToUnits = 1.0f) +} +``` + +### Forgetting the HDR environment + +A scene with no environment and no explicit light is completely dark. Always set at least one of: + +- An HDR environment via `rememberEnvironment` +- A main light via `rememberMainLightNode` + +--- + +## Still stuck? + +- Search existing [GitHub Issues](https://github.com/SceneView/sceneview-android/issues) — + many questions have already been answered. +- Open a new issue with your SceneView version, device model, Android version, and a minimal + reproducible snippet. diff --git a/docs/docs/use-cases.md b/docs/docs/use-cases.md new file mode 100644 index 000000000..9e5f440fd --- /dev/null +++ b/docs/docs/use-cases.md @@ -0,0 +1,294 @@ +# Use Cases + +Real-world applications of SceneView across industries. Each example includes the key composables used and approximate line counts. + +--- + +## E-commerce: 3D product viewer + +Replace static product images with an interactive 3D viewer. Customers can orbit, zoom, and inspect products from every angle. + +```kotlin +@Composable +fun ProductViewer(productGlb: String) { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, productGlb) + + Scene( + modifier = Modifier.fillMaxWidth().height(400.dp), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator(), + environment = rememberEnvironment(rememberEnvironmentLoader(engine)) { + createHDREnvironment("environments/studio.hdr")!! + } + ) { + model?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f, autoAnimate = true) + } + } +} +``` + +**~15 lines** to replace a product image with an interactive 3D viewer. + +**Key nodes:** `ModelNode`, `Scene` +**Features used:** orbit camera, HDR lighting, auto-animation + +--- + +## E-commerce: AR try-before-you-buy + +Let customers see how furniture looks in their room before purchasing. + +```kotlin +@Composable +fun ARFurniturePlacer(furnitureGlb: String) { + var anchor by remember { mutableStateOf(null) } + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val furniture = rememberModelInstance(modelLoader, furnitureGlb) + + ARScene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + planeRenderer = anchor == null, // hide planes after placement + onTouchEvent = { event, hitResult -> + if (event.action == MotionEvent.ACTION_UP && hitResult != null) { + anchor = hitResult.createAnchor() + } + true + } + ) { + anchor?.let { a -> + AnchorNode(anchor = a) { + furniture?.let { + ModelNode( + modelInstance = it, + scaleToUnits = 0.8f, + isEditable = true // pinch to scale, drag to move + ) + } + } + } + } +} +``` + +**~25 lines.** Tap to place, pinch to scale, drag to reposition. All built in. + +--- + +## Education: interactive 3D anatomy + +Students can rotate and explore a 3D model with labeled parts. Tap a part to see information. + +```kotlin +@Composable +fun AnatomyViewer() { + var selectedPart by remember { mutableStateOf(null) } + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/heart.glb") + + Box(modifier = Modifier.fillMaxSize()) { + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator(), + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { _, node -> + selectedPart = node?.name + } + ) + ) { + model?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.5f) + } + + // Labels float in 3D space + TextNode(text = "Left Ventricle", position = Position(-0.2f, 0f, 0.3f)) + TextNode(text = "Right Atrium", position = Position(0.3f, 0.4f, 0f)) + } + + // Compose overlay + selectedPart?.let { part -> + Card(modifier = Modifier.align(Alignment.BottomCenter).padding(16.dp)) { + Text("Selected: $part", modifier = Modifier.padding(16.dp)) + } + } + } +} +``` + +**Key nodes:** `ModelNode`, `TextNode`, `Scene` +**Features:** tap interaction, 3D text labels, Compose overlay + +--- + +## Data visualization: 3D globe + +Display data points on a rotating globe. Each point is a sphere positioned by latitude/longitude. + +```kotlin +@Composable +fun DataGlobe(dataPoints: List) { + Scene( + modifier = Modifier.size(300.dp), + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(0f, 0f, 3f) + ) + ) { + // Earth sphere + SphereNode(radius = 1f, materialInstance = earthMaterial) + + // Data point markers + dataPoints.forEach { point -> + val pos = point.toCartesian(radius = 1.02f) // slightly above surface + SphereNode( + radius = 0.02f, + materialInstance = redMaterial, + position = pos + ) + } + } +} +``` + +--- + +## Social: AR face filters + +Apply effects to the user's face using the front camera. + +```kotlin +@Composable +fun FaceFilter() { + var faces by remember { mutableStateOf>(emptyList()) } + + ARScene( + modifier = Modifier.fillMaxSize(), + sessionFeatures = setOf(Session.Feature.FRONT_CAMERA), + sessionConfiguration = { _, config -> + config.augmentedFaceMode = Config.AugmentedFaceMode.MESH3D + }, + onSessionUpdated = { session, _ -> + faces = session.getAllTrackables(AugmentedFace::class.java) + .filter { it.trackingState == TrackingState.TRACKING } + } + ) { + faces.forEach { face -> + AugmentedFaceNode( + augmentedFace = face, + meshMaterialInstance = filterMaterial + ) + } + } +} +``` + +--- + +## Architecture: 3D floor plan walkthrough + +Navigate through a building with interactive room labels and dynamic lighting. + +```kotlin +@Composable +fun FloorPlanViewer() { + var timeOfDay by remember { mutableFloatStateOf(12f) } + + Column { + Scene( + modifier = Modifier.weight(1f).fillMaxWidth(), + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(0f, 5f, 8f) + ) + ) { + rememberModelInstance(modelLoader, "models/apartment.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 4f) + } + + DynamicSkyNode(timeOfDay = timeOfDay) + FogNode(view = view, density = 0.02f, enabled = timeOfDay < 7f || timeOfDay > 19f) + + // Room labels + TextNode(text = "Living Room", position = Position(0f, 2.5f, 0f)) + TextNode(text = "Kitchen", position = Position(3f, 2.5f, -2f)) + } + + // Time of day slider + Slider( + value = timeOfDay, + onValueChange = { timeOfDay = it }, + valueRange = 0f..24f, + modifier = Modifier.padding(16.dp) + ) + } +} +``` + +**Key nodes:** `ModelNode`, `DynamicSkyNode`, `FogNode`, `TextNode` + +--- + +## Gaming: physics playground + +Interactive scene where users throw objects that bounce and collide. + +```kotlin +@Composable +fun PhysicsPlayground() { + val balls = remember { mutableStateListOf() } + + Scene( + modifier = Modifier.fillMaxSize(), + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, _ -> + balls.add(BallState( + velocity = Position(0f, 5f, -8f), + position = Position(0f, 2f, 0f) + )) + } + ) + ) { + // Floor + PlaneNode(size = Size(10f, 10f), materialInstance = floorMaterial) + + // Balls with physics + balls.forEach { ball -> + val model = rememberModelInstance(modelLoader, "models/ball.glb") + model?.let { + val node = ModelNode(modelInstance = it, scaleToUnits = 0.15f) + PhysicsNode( + node = node, + mass = 1f, + restitution = 0.7f, + linearVelocity = ball.velocity, + floorY = 0f, + radius = 0.075f + ) + } + } + } +} +``` + +**Key nodes:** `ModelNode`, `PhysicsNode`, `PlaneNode` + +--- + +## Industry fit + +| Industry | Primary nodes | Key features | +|---|---|---| +| **E-commerce** | `ModelNode`, `AnchorNode` | Orbit camera, AR placement, gestures | +| **Education** | `ModelNode`, `TextNode` | Labels, tap interaction, animation control | +| **Real estate** | `ModelNode`, `DynamicSkyNode`, `FogNode` | Time-of-day, atmospheric effects | +| **Social** | `AugmentedFaceNode` | Front camera, face mesh, material overlay | +| **Data viz** | `SphereNode`, `LineNode`, `PathNode` | Geometry primitives, 3D charts | +| **Gaming** | `PhysicsNode`, `ModelNode` | Physics, collision, tap-to-throw | +| **Navigation** | `StreetscapeGeometryNode`, `AnchorNode` | Geospatial, streetscape, waypoints | diff --git a/docs/docs/v4-preview.md b/docs/docs/v4-preview.md new file mode 100644 index 000000000..443e930b6 --- /dev/null +++ b/docs/docs/v4-preview.md @@ -0,0 +1,186 @@ +# SceneView 4.0 preview + +The next major release takes SceneView beyond Android — multi-platform, spatial computing, and a more powerful scene graph. + +!!! info "v3.2.0 is production-ready today" + You don't need to wait for 4.0. Everything below adds capabilities on top — it doesn't + replace anything. [Get started now](index.md#get-started). + +--- + +## The journey + +| Version | Theme | +|---|---| +| **2.x** | View-based Sceneform successor | +| **3.0** | Compose rewrite — "3D is just Compose UI" | +| **3.1** | `rememberModelInstance`, camera manipulator, gesture polish | +| **3.2** | Physics, dynamic sky, fog, reflections, lines, text, post-processing | +| **4.0** | Multi-scene, portals, XR, Kotlin Multiplatform | + +--- + +## Multiple `Scene {}` on one screen + +Today, you get one `Scene` per screen. In 4.0, multiple independent scenes share a single +Filament `Engine`, each with its own camera, environment, and node tree. + +```kotlin +@Composable +fun DashboardScreen() { + Column { + // Product hero + Scene( + modifier = Modifier.fillMaxWidth().height(300.dp), + engine = engine, + environment = studioEnvironment + ) { + ModelNode(modelInstance = product, scaleToUnits = 1.0f) + } + + // Inline data globe — different camera, different lighting + Scene( + modifier = Modifier.size(200.dp), + engine = engine, + environment = darkEnvironment + ) { + SphereNode(radius = 0.5f, materialInstance = globeMaterial) + } + + // Standard Compose content + LazyColumn { /* cards, charts, text */ } + } +} +``` + +Dashboards, e-commerce feeds, social timelines — 3D elements mixed freely with +`LazyColumn`, `Pager`, `BottomSheet`. + +--- + +## `PortalNode` — a scene inside a scene + +Render a secondary scene inside a 3D frame. A window into another world. + +```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + ModelNode(modelInstance = room, scaleToUnits = 2.0f) + + // A portal on the wall + PortalNode( + position = Position(0f, 1.5f, -2f), + size = Size(1.2f, 1.8f), + scene = portalScene + ) { + ModelNode(modelInstance = fantasyLandscape, scaleToUnits = 5.0f) + DynamicSkyNode(sunPosition = Position(0.2f, 0.8f, 0.3f)) + FogNode(density = 0.05f, color = Color(0.6f, 0.7f, 1.0f)) + } +} +``` + +**Use cases:** AR portals, product showcases with custom lighting, game level transitions, +real estate walkthroughs. + +--- + +## SceneView-XR — spatial computing + +A new module for XR headsets and passthrough AR. +Same composable API — now in spatial environments. + +```kotlin +implementation("io.github.sceneview:sceneview-xr:4.0.0") +``` + +```kotlin +XRScene(modifier = Modifier.fillMaxSize()) { + ModelNode( + modelInstance = furniture, + position = Position(0f, 0f, -2f) + ) + + ViewNode(position = Position(0.5f, 1.5f, -1.5f)) { + Card { + Text("Tap to customize") + ColorPicker(onColorSelected = { /* update material */ }) + } + } +} +``` + +Your existing 3D/AR code patterns transfer directly to spatial computing. + +--- + +## Kotlin Multiplatform + +Share scene definitions between Android and iOS from a single Kotlin codebase. iOS rendering via Filament's Metal backend. + +```kotlin +// commonMain — shared across platforms +@Composable +fun ProductViewer(modelPath: String) { + Scene(modifier = Modifier.fillMaxSize()) { + rememberModelInstance(modelLoader, modelPath)?.let { instance -> + ModelNode(modelInstance = instance, scaleToUnits = 1.0f) + } + } +} +``` + +Write once, render natively on both platforms. + +--- + +## Also in 4.0 + +- **Filament 2.x migration** — improved performance, better materials, reduced memory +- **`ParticleNode`** — GPU particle system for fire, smoke, sparkles, confetti +- **`AnimationController`** — composable-level animation blending, cross-fading, and layering +- **`CollisionNode`** — declarative collision detection between scene nodes + +--- + +## Who should care about 4.0 + +
+ +- :octicons-package-24: **E-commerce teams** + + --- + + Multi-scene lets you embed 3D product viewers in `LazyColumn` feeds, `BottomSheet` configurators, and `Pager` carousels — all on one screen, all with independent cameras. + +- :octicons-home-24: **Real estate / architecture** + + --- + + `PortalNode` lets users peek through doors into furnished rooms, walk through 3D floor plans, and compare lighting conditions — all without loading separate screens. + +- :octicons-device-desktop-24: **XR teams** + + --- + + `SceneView-XR` means the same code and patterns you build for phone AR transfer directly to XR headsets. No new framework to learn. + +- :octicons-globe-24: **Cross-platform teams** + + --- + + Kotlin Multiplatform means you can share scene definitions between Android and iOS. One Kotlin codebase, two platforms. + +
+ +--- + +## Summary + +| Limitation today | v4.0 solution | +|---|---| +| One Scene per screen | Multiple independent Scenes | +| Flat scene graph | `PortalNode` — scenes within scenes | +| Android only | Kotlin Multiplatform (iOS) | +| Phone/tablet only | `SceneView-XR` for spatial computing | + +[:octicons-arrow-right-24: Full roadmap on GitHub](https://github.com/SceneView/sceneview-android/blob/main/ROADMAP.md) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index b3a86c0bc..254409cef 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -1,5 +1,5 @@ site_name: SceneView -site_description: 3D and AR for Jetpack Compose, powered by Google Filament and ARCore +site_description: 3D & AR for Kotlin — Compose-native scenes on Android, XR, and beyond. Powered by Google Filament. site_url: https://sceneview.github.io/ repo_url: https://github.com/SceneView/sceneview-android repo_name: SceneView/sceneview-android @@ -23,6 +23,8 @@ theme: font: text: Roboto code: JetBrains Mono + icon: + logo: material/cube-outline features: - navigation.tabs - navigation.tabs.sticky @@ -34,24 +36,39 @@ theme: - search.highlight - content.code.copy - content.code.annotate + - content.tabs.link - toc.follow nav: - Home: index.md - - Showcase: showcase.md + - Get Started: + - Quickstart: quickstart.md + - API Cheatsheet: cheatsheet.md + - Recipes: recipes.md + - FAQ: faq.md + - Troubleshooting: troubleshooting.md + - Guides: + - Architecture: architecture.md + - Integrations: integrations.md + - Performance: performance.md + - Testing: testing.md + - Features: + - Why SceneView: showcase.md + - Use Cases: use-cases.md + - AI-Assisted Development: ai-development.md + - vs. Alternatives: comparison.md + - v4.0 Preview: v4-preview.md - Codelabs: - codelabs/index.md - 3D with Compose: codelabs/codelab-3d-compose.md - AR with Compose: codelabs/codelab-ar-compose.md - - "Guide: Physics": codelabs/guide-physics.md - - "Guide: Dynamic sky & fog": codelabs/guide-dynamic-sky.md - - "Guide: Reflection probes": codelabs/guide-reflection-probes.md - - "Guide: Lines & text": codelabs/guide-lines-text.md - - Migration guide: migration.md - - Changelog: changelog.md - - API reference: + - API: - API (3D — sceneview): https://sceneview.github.io/api/sceneview-android/sceneview/ - API (AR — arsceneview): https://sceneview.github.io/api/sceneview-android/arsceneview/ + - Samples: samples.md + - Contribute: contributing.md + - Changelog: changelog.md + - Migration: migration.md plugins: - search @@ -76,6 +93,9 @@ markdown_extensions: - tables - toc: permalink: true + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg extra: social: @@ -83,9 +103,9 @@ extra: link: https://github.com/SceneView/sceneview-android - icon: fontawesome/brands/discord link: https://discord.gg/UbNDDBTNqb - # analytics: - # provider: google - # property: G-XXXXXXXXXX # TODO: replace with real property ID + analytics: + provider: google + property: G-XXXXXXXXXX extra_css: - stylesheets/extra.css diff --git a/docs/site-screenshots/architecture.png b/docs/site-screenshots/architecture.png new file mode 100644 index 000000000..074662c0c Binary files /dev/null and b/docs/site-screenshots/architecture.png differ diff --git a/docs/site-screenshots/changelog.png b/docs/site-screenshots/changelog.png new file mode 100644 index 000000000..fe8825922 Binary files /dev/null and b/docs/site-screenshots/changelog.png differ diff --git a/docs/site-screenshots/cheatsheet.png b/docs/site-screenshots/cheatsheet.png new file mode 100644 index 000000000..067a6dd0d Binary files /dev/null and b/docs/site-screenshots/cheatsheet.png differ diff --git a/docs/site-screenshots/codelab-3d.png b/docs/site-screenshots/codelab-3d.png new file mode 100644 index 000000000..703a9c2fd Binary files /dev/null and b/docs/site-screenshots/codelab-3d.png differ diff --git a/docs/site-screenshots/codelabs-codelab-3d-compose.png b/docs/site-screenshots/codelabs-codelab-3d-compose.png new file mode 100644 index 000000000..a357bcb57 Binary files /dev/null and b/docs/site-screenshots/codelabs-codelab-3d-compose.png differ diff --git a/docs/site-screenshots/comparison.png b/docs/site-screenshots/comparison.png new file mode 100644 index 000000000..78ee90120 Binary files /dev/null and b/docs/site-screenshots/comparison.png differ diff --git a/docs/site-screenshots/contributing.png b/docs/site-screenshots/contributing.png new file mode 100644 index 000000000..b8fa020b1 Binary files /dev/null and b/docs/site-screenshots/contributing.png differ diff --git a/docs/site-screenshots/faq.png b/docs/site-screenshots/faq.png new file mode 100644 index 000000000..02ffcdc63 Binary files /dev/null and b/docs/site-screenshots/faq.png differ diff --git a/docs/site-screenshots/home-v2.png b/docs/site-screenshots/home-v2.png new file mode 100644 index 000000000..701c9fd44 Binary files /dev/null and b/docs/site-screenshots/home-v2.png differ diff --git a/docs/site-screenshots/home.png b/docs/site-screenshots/home.png new file mode 100644 index 000000000..738b53020 Binary files /dev/null and b/docs/site-screenshots/home.png differ diff --git a/docs/site-screenshots/performance.png b/docs/site-screenshots/performance.png new file mode 100644 index 000000000..ec2bb8b13 Binary files /dev/null and b/docs/site-screenshots/performance.png differ diff --git a/docs/site-screenshots/quickstart.png b/docs/site-screenshots/quickstart.png new file mode 100644 index 000000000..fdbeab3c3 Binary files /dev/null and b/docs/site-screenshots/quickstart.png differ diff --git a/docs/site-screenshots/recipes.png b/docs/site-screenshots/recipes.png new file mode 100644 index 000000000..741610f0c Binary files /dev/null and b/docs/site-screenshots/recipes.png differ diff --git a/docs/site-screenshots/samples.png b/docs/site-screenshots/samples.png new file mode 100644 index 000000000..35b51f361 Binary files /dev/null and b/docs/site-screenshots/samples.png differ diff --git a/docs/site-screenshots/showcase.png b/docs/site-screenshots/showcase.png new file mode 100644 index 000000000..d0909176e Binary files /dev/null and b/docs/site-screenshots/showcase.png differ diff --git a/docs/site-screenshots/testing.png b/docs/site-screenshots/testing.png new file mode 100644 index 000000000..c1c39ef51 Binary files /dev/null and b/docs/site-screenshots/testing.png differ diff --git a/docs/site-screenshots/troubleshooting.png b/docs/site-screenshots/troubleshooting.png new file mode 100644 index 000000000..a80d33e6d Binary files /dev/null and b/docs/site-screenshots/troubleshooting.png differ diff --git a/docs/site-screenshots/v4-preview.png b/docs/site-screenshots/v4-preview.png new file mode 100644 index 000000000..cb5f8efb1 Binary files /dev/null and b/docs/site-screenshots/v4-preview.png differ diff --git a/marketing/README.md b/marketing/README.md index 2e9bc5611..a4ca66ed4 100644 --- a/marketing/README.md +++ b/marketing/README.md @@ -1,39 +1,78 @@ # Marketing assets -Content ready to publish for the SceneView 3.0 launch. +Content for positioning SceneView as the #1 3D & AR library for Android. -## Files +--- + +## Showcase & positioning + +| File | What it is | Audience | +|---|---|---| +| [showcase.md](showcase.md) | Full capability showcase — why SceneView is #1 | Developers evaluating 3D/AR options | +| [comparison.md](comparison.md) | Honest comparison vs. Sceneform, Unity, raw ARCore, etc. | Developers who haven't switched yet | +| [v4-preview.md](v4-preview.md) | v4.0 vision — multi-scene, portals, XR, KMP | Existing users + forward-looking devs | + +## Social & content | File | What it is | Where to publish | |---|---|---| -| [medium-article.md](medium-article.md) | Full article, paste-ready | Medium, Dev.to, Hashnode, Android Developers blog | +| [x-thread.md](x-thread.md) | 12-tweet thread template | Twitter/X, Bluesky, Mastodon | +| [linkedin-post.md](linkedin-post.md) | 3 caption variants + hashtags | LinkedIn | +| [linkedin-video-storyboard.md](linkedin-video-storyboard.md) | 60s video — shot-by-shot production guide | LinkedIn (film → upload) | +| [devto-article.md](devto-article.md) | Updated article for 3.2.0 with all new features | Dev.to, Hashnode | +| [medium-article.md](medium-article.md) | Original 3.0 launch article | Medium | | [youtube-script.md](youtube-script.md) | 10-min video script + code + b-roll notes | YouTube | -| [linkedin-video-storyboard.md](linkedin-video-storyboard.md) | Shot-by-shot production guide for the LinkedIn video | Film → LinkedIn | -| [linkedin-post.md](linkedin-post.md) | 3 caption variants + hashtags + timing tips | LinkedIn | -| [codelabs/codelab-3d-compose.md](codelabs/codelab-3d-compose.md) | Step-by-step 3D getting started guide | Google CodeLabs, GitHub Pages, docs site | -| [codelabs/codelab-ar-compose.md](codelabs/codelab-ar-compose.md) | Step-by-step AR getting started guide | Google CodeLabs, GitHub Pages, docs site | -## LinkedIn video — 3D assets to download +## Assets & production -| Asset | Use in video | Download | -|---|---|---| -| `ChronographWatch.glb` | Shot 3 — hero rotating watch | [Download](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/ChronographWatch/glTF-Binary/ChronographWatch.glb) | -| `MaterialsVariantsShoe.glb` | Shot 4 — product page demo | [Download](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/MaterialsVariantsShoe/glTF-Binary/MaterialsVariantsShoe.glb) | -| Kenney Furniture Kit | Shot 6 — AR sofa placement | [Download](https://kenney.nl/assets/furniture-kit) | +| File | What it is | +|---|---| +| [assets-catalog.md](assets-catalog.md) | Curated catalog of free glTF models, HDR environments, and recommended combos for marketing screenshots | +| [github-profile.md](github-profile.md) | GitHub repo description, topics, org profile README, social preview image specs, release template, badges | + +## Tutorials -## LinkedIn video — production order +| File | Where to publish | +|---|---| +| [codelabs/codelab-3d-compose.md](codelabs/codelab-3d-compose.md) | Docs site, Google CodeLabs | +| [codelabs/codelab-ar-compose.md](codelabs/codelab-ar-compose.md) | Docs site, Google CodeLabs | -1. Download the 3 assets above, add them to the sample app -2. Record the right side (3D rendering) first — it's the hard part -3. Record the left side (AI code generation) — simulate with a Compose type-animation if needed -4. Edit in CapCut or Premiere: split screen, hard cuts, white flash transitions -5. Export 9:16, H.264, ~50MB max -6. Post with Variant A caption from `linkedin-post.md` +--- -## Medium article — publish checklist +## Publishing checklist -- [ ] Create Medium account if needed +### Dev.to article +- [ ] Create Dev.to account or log in +- [ ] Paste content from `devto-article.md` (front matter included) +- [ ] Add cover image: 1000x420 screenshot of SceneView render (use assets from `assets-catalog.md`) +- [ ] Publish and cross-post to Hashnode + +### X/Twitter thread +- [ ] Post tweet 1 from `x-thread.md` +- [ ] Reply-chain tweets 2–12 +- [ ] Pin tweet 1 to profile for the week +- [ ] Cross-post to Bluesky and Mastodon + +### LinkedIn video +- [ ] Download assets: ChronographWatch, MaterialsVariantsShoe, Kenney Furniture Kit +- [ ] Record 3D renders (right side), then AI code gen (left side) +- [ ] Edit: split screen, hard cuts, white flash transitions +- [ ] Export 9:16, H.264, ~50 MB max +- [ ] Post with caption from `linkedin-post.md` + +### GitHub +- [ ] Update repo description (see `github-profile.md`) +- [ ] Add topic tags +- [ ] Upload social preview image (1280x640) +- [ ] Update org profile README if exists + +### Medium article - [ ] Paste content from `medium-article.md` -- [ ] Add 3 images: helmet render, code side-by-side, product viewer screenshot +- [ ] Add 3 images: render, code side-by-side, product viewer - [ ] Tags: `android`, `jetpack-compose`, `kotlin`, `augmented-reality`, `3d` -- [ ] Submit to `Better Programming` or `Android Weekly` publication for amplification +- [ ] Submit to `Better Programming` or `Android Weekly` publication + +### Website +- [ ] Merge docs branch to trigger deployment +- [ ] Verify sceneview.github.io shows updated MkDocs site (not old Flutter/RN page) +- [ ] Check all nav links work diff --git a/marketing/RESUME.md b/marketing/RESUME.md new file mode 100644 index 000000000..39d6aca4c --- /dev/null +++ b/marketing/RESUME.md @@ -0,0 +1,78 @@ +# Resume Instructions + +If the session crashes, paste this to Claude Code to resume: + +``` +Continue the "make SceneView #1" weekend push on branch `claude/sceneview-marketing-showcase-CJCtV`. +Read `marketing/RESUME.md` for full context on what's done and what's left. +``` + +## Branch +`claude/sceneview-marketing-showcase-CJCtV` + +## What's DONE — Full inventory + +### Documentation Website (20 pages, MkDocs Material) + +**Get Started:** +- quickstart.md — empty project to first 3D scene +- cheatsheet.md — dense API quick reference, all 26+ node types +- recipes.md — 25+ copy-paste code patterns (1,168 lines) +- faq.md — common questions (setup, 3D, AR, performance) +- troubleshooting.md — build errors, runtime crashes, threading + +**Guides:** +- architecture.md — internals deep-dive (Compose→Filament bridge) +- performance.md — optimization guide (models, textures, device tiers) +- testing.md — unit, UI, screenshot, CI pipeline strategies + +**Features:** +- showcase.md — why SceneView is #1 +- use-cases.md — 7 industry examples with working code +- ai-development.md — llms.txt, MCP, Claude Code integration +- comparison.md — vs. Sceneform, Unity, raw ARCore, Rajawali +- v4-preview.md — multi-scene, portals, XR, KMP + +**Codelabs:** +- codelab-3d-compose.md — 3D with Compose (~25 min) +- codelab-ar-compose.md — AR with Compose (~20 min) + +**Reference:** +- samples.md — all 15 samples with screenshots +- contributing.md — guide with AI workflow +- changelog.md — v3.2.0 + v3.0.0 sections +- migration.md — v2.x → v3.0 step-by-step + +**Infrastructure:** +- mkdocs.yml — Material theme, dark mode, responsive +- extra.css — custom styling, mobile responsiveness +- 18+ site screenshots in docs/site-screenshots/ + +### Marketing Content (16 files) +- x-thread.md, linkedin-post.md, linkedin-video-storyboard.md +- devto-article.md, medium-article.md, youtube-script.md +- showcase.md, comparison.md, v4-preview.md +- assets-catalog.md (24 Khronos models, HDR URLs) +- github-profile.md +- 2 codelabs +- v4-launch-plan.md (timeline, templates, metrics) + +### Community Health +- CODE_OF_CONDUCT.md, SECURITY.md, .editorconfig +- 3 Discussion templates (Q&A, Show & Tell, Ideas) + +### Core Files Updated +- README.md — v3.2.0 with "What's new" section, 8 new nodes, 6 new samples +- llms.txt — v3.2.0 with 8 new node types, 14 samples +- ROADMAP.md — v3.2 shipped, v3.3 planned, v4.0 detailed +- 6 sample READMEs + +### CI/CD +- docs.yml, docs-on-release.yml workflows + +## Stats +- ~10,000+ lines of new content +- 20+ commits +- 70+ files changed +- 20 docs pages +- 18 site screenshots diff --git a/marketing/assets-catalog.md b/marketing/assets-catalog.md new file mode 100644 index 000000000..dd881d243 --- /dev/null +++ b/marketing/assets-catalog.md @@ -0,0 +1,284 @@ +# 3D Assets & Environments Catalog + +Free, high-quality assets for SceneView demos, marketing screenshots, and sample apps. +All assets below are CC0, CC-BY, or Apache 2.0 — safe for marketing and open source. + +--- + +## Hero models (glTF/GLB) + +### KhronosGroup glTF Sample Assets + +The official Khronos showcase models — 24 showcase-tagged models, all free. +Browse interactively at [github.khronos.org/glTF-Assets](https://github.khronos.org/glTF-Assets/). + +**E-commerce / product demos:** + +| Model | Description | Features | Download | +|---|---|---|---| +| **MaterialsVariantsShoe** | Shoe with 3 material variants | PBR, `KHR_materials_variants` | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/MaterialsVariantsShoe/glTF-Binary/MaterialsVariantsShoe.glb) | +| **ChronographWatch** | Wrist watch with material variants | PBR, animation, variants | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/ChronographWatch/glTF-Binary/ChronographWatch.glb) | +| **SheenChair** | Chair with material variants and sheen | PBR, sheen, variants | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/SheenChair/glTF-Binary/SheenChair.glb) | +| **SheenWoodLeatherSofa** | Sofa with sheen and specular | PBR, fabric, tiling | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/SheenWoodLeatherSofa/glTF-Binary/SheenWoodLeatherSofa.glb) | +| **SpecularSilkPouf** | Silk pouf with sheen and specular | PBR, fabric/textile | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/SpecularSilkPouf/glTF-Binary/SpecularSilkPouf.glb) | +| **CommercialRefrigerator** | Fridge with glass, bottles, door animation | PBR, animation, glass | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/CommercialRefrigerator/glTF-Binary/CommercialRefrigerator.glb) | +| **StainedGlassLamp** | Tiffany-style stained glass lamp | PBR, transmission | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/StainedGlassLamp/glTF-Binary/StainedGlassLamp.glb) | +| **SunglassesKhronos** | Sunglasses with iridescent lenses | PBR, iridescence | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/SunglassesKhronos/glTF-Binary/SunglassesKhronos.glb) | +| **WaterBottle** | Simple product, clean PBR | PBR | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/WaterBottle/glTF-Binary/WaterBottle.glb) | + +**Automotive:** + +| Model | Description | Download | +|---|---|---| +| **CarConcept** | Concept car with clearcoat paint, material variants | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/CarConcept/glTF-Binary/CarConcept.glb) | +| **ToyCar** | Toy car with transmission, clearcoat, sheen | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/ToyCar/glTF-Binary/ToyCar.glb) | + +**"Wow factor" hero renders:** + +| Model | Description | Download | +|---|---|---| +| **DamagedHelmet** | The classic glTF demo — full PBR, battle-worn | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DamagedHelmet/glTF-Binary/DamagedHelmet.glb) | +| **FlightHelmet** | Detailed flight helmet on wooden stand (CC0) | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/FlightHelmet/glTF-Binary/FlightHelmet.glb) | +| **ABeautifulGame** | Chess set with transmission and volume — stunning glass | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/ABeautifulGame/glTF-Binary/ABeautifulGame.glb) | +| **MosquitoInAmber** | Mosquito in amber — transmission, IOR, volume | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/MosquitoInAmber/glTF-Binary/MosquitoInAmber.glb) | +| **GlassHurricaneCandleHolder** | Glass candle holder with transmission | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/GlassHurricaneCandleHolder/glTF-Binary/GlassHurricaneCandleHolder.glb) | +| **IridescenceLamp** | Lamp with transmission, volume, iridescence | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/IridescenceLamp/glTF-Binary/IridescenceLamp.glb) | +| **DragonAttenuation** | Glass dragon with subsurface scattering | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DragonAttenuation/glTF-Binary/DragonAttenuation.glb) | +| **PotOfCoals** | Copper pot with clearcoat, emissive hot coals | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/PotOfCoals/glTF-Binary/PotOfCoals.glb) | +| **AntiqueCamera** | Detailed model, many materials | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/AntiqueCamera/glTF-Binary/AntiqueCamera.glb) | +| **Lantern** | Emissive materials, glow | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/Lantern/glTF-Binary/Lantern.glb) | + +**Botanical / architectural:** + +| Model | Description | Download | +|---|---|---| +| **DiffuseTransmissionPlant** | Potted plant — light through leaves | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DiffuseTransmissionPlant/glTF-Binary/DiffuseTransmissionPlant.glb) | +| **DiffuseTransmissionTeacup** | Teacup with diffuse transmission | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/DiffuseTransmissionTeacup/glTF-Binary/DiffuseTransmissionTeacup.glb) | + +**Character / animation:** + +| Model | Description | Download | +|---|---|---| +| **Fox** | Walk/run/idle animation | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/Fox/glTF-Binary/Fox.glb) | +| **BrainStem** | Character skeletal animation | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/BrainStem/glTF-Binary/BrainStem.glb) | +| **CesiumMan** | Walk cycle | [GLB](https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Assets/main/Models/CesiumMan/glTF-Binary/CesiumMan.glb) | + +--- + +### Sketchfab (CC0 / CC-BY) + +All Sketchfab models have a glTF export option. Filter by license for safety. + +**Curated collections:** + +| Collection | Models | Link | +|---|---|---| +| **plaggy's CC0 collection** | 193 models across categories | [sketchfab.com/plaggy/collections/cc0-public-domain](https://sketchfab.com/plaggy/collections/cc0-public-domain-free-models-c1af6539a9ee49f4b3d51fabd6c25a85) | +| **NeatWolf's glTF CC-BY** | Quality glTF models | [sketchfab.com/NeatWolf/collections/gltf-free-cc-attribution](https://sketchfab.com/NeatWolf/collections/gltf-free-cc-attribution-ef6c361507b54935b1fe84d7a0cf57c4) | +| **CC0 tag page** | All CC0 models | [sketchfab.com/tags/cc0](https://sketchfab.com/tags/cc0) | + +**Specific automotive models (CC0):** + +| Model | Author | Link | +|---|---|---| +| Concept Car 003 | @unityfan777 | [sketchfab.com/3d-models/free-concept-car-003](https://sketchfab.com/3d-models/free-concept-car-003-public-domain-cc0-77664fc474c444f4947e9834ed0d30ad) | +| Concept Car 006 | @unityfan777 | [sketchfab.com/3d-models/free-concept-car-006](https://sketchfab.com/3d-models/free-concept-car-006-public-domain-cc0-a4e19bb6e6394c4a95d4319ef7d265ba) | +| Concept Car 025 | @unityfan777 | [sketchfab.com/3d-models/free-concept-car-025](https://sketchfab.com/3d-models/free-concept-car-025-public-domain-cc0-e3a65443d3e44c33b594cec591c01c05) | + +--- + +### Kenney Assets (CC0) + +40k+ game-quality assets, all CC0. Use `.glb` exports. + +| Pack | Use case | Download | +|---|---|---| +| **Furniture Kit** | AR furniture placement demo | [kenney.nl/assets/furniture-kit](https://kenney.nl/assets/furniture-kit) | +| **Food Kit** | AR restaurant menu | [kenney.nl/assets/food-kit](https://kenney.nl/assets/food-kit) | +| **Car Kit** | Vehicle showcase | [kenney.nl/assets/car-kit](https://kenney.nl/assets/car-kit) | +| **Nature Kit** | Outdoor AR scenes | [kenney.nl/assets/nature-kit](https://kenney.nl/assets/nature-kit) | +| **Space Kit** | Sci-fi demos | [kenney.nl/assets/space-kit](https://kenney.nl/assets/space-kit) | + +--- + +### Other free model sources + +| Source | License | Best for | URL | +|---|---|---|---| +| **Poly Haven Models** | CC0 | Realistic 3D scans | [polyhaven.com/models](https://polyhaven.com/models) | +| **Poimandres Market** | Free | Curated quality glTF | [market.pmnd.rs](https://market.pmnd.rs) | +| **CGTrader** | Mix free/paid | 338k+ GLB files | [cgtrader.com/3d-models/ext/glb](https://www.cgtrader.com/3d-models/ext/glb) | +| **awesome-cc0** | CC0 | Master list of all CC0 sources | [github.com/madjin/awesome-cc0](https://github.com/madjin/awesome-cc0) | + +--- + +## HDR Environments + +### Poly Haven (CC0) + +All CC0, available in multiple resolutions. Use **1K or 2K** for mobile. + +**Direct download URL pattern:** +``` +https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/{resolution}/{name}_{resolution}.hdr +``` + +**Studio / product lighting:** + +| Environment | Best for | Direct download (2K) | +|---|---|---| +| **studio_small_02** | Clean product renders | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/studio_small_02_2k.hdr) | +| **studio_small_03** | Product renders | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/studio_small_03_2k.hdr) | +| **studio_small_08** | Warm product lighting | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/studio_small_08_2k.hdr) | +| **poly_haven_studio** | General studio | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/poly_haven_studio_2k.hdr) | + +Browse all studio HDRIs: [polyhaven.com/hdris/studio](https://polyhaven.com/hdris/studio) + +**Automotive / showroom:** + +| Environment | Description | Direct download (2K) | +|---|---|---| +| **autoshop_01** | Garage, cool fluorescent + skylight | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/autoshop_01_2k.hdr) | +| **royal_esplanade** | Indoor mall, warm spotlights | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/royal_esplanade_2k.hdr) | + +**Outdoor / architectural:** + +| Environment | Description | Direct download (2K) | +|---|---|---| +| **venice_sunset** | Golden sunset, water reflections | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/venice_sunset_2k.hdr) | +| **kloppenheim_06** | Soft sunset, god rays | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/kloppenheim_06_2k.hdr) | +| **evening_road_01** | Evening golden hour | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/evening_road_01_2k.hdr) | +| **industrial_sunset_02** | Sunset, factory silhouettes | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/industrial_sunset_02_2k.hdr) | +| **lebombo** | Bright daylight, general use | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/lebombo_2k.hdr) | +| **syferfontein_0d_clear** | Afternoon sun, architecture | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/syferfontein_0d_clear_2k.hdr) | +| **symmetrical_garden_02** | Bright garden | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/symmetrical_garden_02_2k.hdr) | +| **spruit_sunrise** | Golden sunrise, warm | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/spruit_sunrise_2k.hdr) | + +**Dark / dramatic:** + +| Environment | Description | Direct download (2K) | +|---|---|---| +| **moonless_golf** | Night sky | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/moonless_golf_2k.hdr) | +| **dikhololo_night** | Starfield | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/dikhololo_night_2k.hdr) | + +**Festive:** + +| Environment | Description | Direct download (2K) | +|---|---|---| +| **christmas_photo_studio_01** | Warm studio, Christmas tree lights | [Download](https://dl.polyhaven.org/file/ph-assets/HDRIs/hdr/2k/christmas_photo_studio_01_2k.hdr) | + +**Full catalog:** [polyhaven.com/hdris](https://polyhaven.com/hdris) (800+ free HDRIs) + +--- + +### Other free HDRI sources + +| Source | License | Notes | URL | +|---|---|---|---| +| **AmbientCG** | CC0 | 2000+ materials, HDRIs, models | [ambientcg.com](https://ambientcg.com) | +| **HDRI-Skies.com** | CC0 | 60+ free, great for archviz | [hdri-skies.com/free-hdris](https://hdri-skies.com/free-hdris/) | +| **iHDRI.com** | Free commercial | Up to 16K, 91-image panoramas | [ihdri.com](https://ihdri.com) | +| **HDRI Hub** | Free samples | Up to 10240x5120 | [hdri-hub.com](https://www.hdri-hub.com/hdrishop/freesamples/freehdri) | + +--- + +### How to use in SceneView + +```kotlin +val environment = rememberEnvironment(rememberEnvironmentLoader(engine)) { + createHDREnvironment("environments/studio_small_03_2k.hdr")!! +} + +Scene( + modifier = Modifier.fillMaxSize(), + environment = environment +) { + // Your nodes here +} +``` + +Place `.hdr` files in `src/main/assets/environments/`. + +--- + +## Recommended combos for marketing screenshots + +### 1. Product Showcase (e-commerce hero) +- **Model:** ChronographWatch or MaterialsVariantsShoe +- **Environment:** studio_small_03 (clean studio) +- **Features:** PBR rendering, orbit camera, reflections +- **Mood:** Professional, clean, commercial + +### 2. Luxury / Automotive +- **Model:** CarConcept or Sketchfab Concept Car 003 +- **Environment:** autoshop_01 or royal_esplanade +- **Features:** Clearcoat, reflections, material variants +- **Mood:** Showroom, premium + +### 3. AR Furniture Placement +- **Model:** SheenWoodLeatherSofa or Kenney Furniture Kit +- **Environment:** Real camera feed (AR mode) +- **Features:** ARScene, AnchorNode, plane detection, gestures +- **Mood:** Real-world integration + +### 4. Dramatic Hero Render +- **Model:** DamagedHelmet or ABeautifulGame +- **Environment:** venice_sunset or industrial_sunset_02 +- **Features:** HDR lighting, PBR, post-processing (bloom) +- **Mood:** Cinematic, impressive + +### 5. Glass / Transparency +- **Model:** MosquitoInAmber or GlassHurricaneCandleHolder +- **Environment:** poly_haven_studio +- **Features:** Transmission, volume, IOR +- **Mood:** Technical showcase + +### 6. Character Animation +- **Model:** Fox or BrainStem +- **Environment:** symmetrical_garden_02 +- **Features:** autoAnimate, animation controls, natural lighting +- **Mood:** Friendly, approachable + +### 7. Night / Atmosphere +- **Model:** Lantern (emissive) or PotOfCoals +- **Environment:** moonless_golf +- **Features:** Emissive materials, DynamicSkyNode, FogNode +- **Mood:** Atmospheric, moody + +### 8. Physics Demo +- **Model:** SphereNode/CubeNode primitives (no assets needed) +- **Environment:** lebombo +- **Features:** PhysicsNode, collision, gravity, tap interaction +- **Mood:** Fun, interactive + +--- + +## Asset optimization for mobile + +### Performance targets +- **Model polygons:** Under 100K triangles for 60fps on mid-range Android +- **Model file size:** Under 5 MB for demos, under 2 MB for production apps +- **HDR resolution:** 1K for development, 2K for production quality + +### HDR to KTX (faster runtime loading) + +```bash +# cmgen ships with the Filament SDK +cmgen --format=ktx --size=256 environment.hdr +``` + +KTX loads faster than HDR at runtime. Use HDR for development, KTX for production. + +### Optimizing GLB file size + +```bash +npx @gltf-transform/cli optimize input.glb output.glb +npx @gltf-transform/cli draco input.glb output.glb # Draco mesh compression +``` + +### Checking model stats + +```bash +npx @gltf-transform/cli inspect model.glb +# Shows: triangle count, material count, texture sizes, file size +``` diff --git a/marketing/comparison.md b/marketing/comparison.md new file mode 100644 index 000000000..b174460bb --- /dev/null +++ b/marketing/comparison.md @@ -0,0 +1,239 @@ +# SceneView vs. the alternatives + +*An honest comparison for Android developers evaluating 3D and AR options.* + +--- + +## The landscape + +If you want 3D or AR in an Android app today, here are your options: + +| Library | Approach | Status | +|---|---|---| +| **SceneView** | Jetpack Compose composables, Filament rendering, ARCore | Active, v3.2.0 | +| **Google Sceneform** | View-based, custom renderer, ARCore | Abandoned (archived 2021) | +| **Raw ARCore SDK** | Low-level session/frame API, bring your own renderer | Active but no UI layer | +| **Unity** | Full game engine embedded via `UnityPlayerActivity` | Active, heavy | +| **Rajawali** | OpenGL ES wrapper, imperative scene graph | Maintenance mode | +| **three.js (WebView)** | JavaScript 3D in a WebView | Active, but web-only perf | +| **Babylon Native** | C++ cross-platform runtime | Early stage on Android | + +--- + +## Side-by-side: adding a 3D model viewer + +### SceneView (Compose) + +```kotlin +// build.gradle +implementation("io.github.sceneview:sceneview:3.2.0") + +// One composable, that's it +@Composable +fun ModelViewer() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/helmet.glb") + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator() + ) { + model?.let { ModelNode(modelInstance = it, scaleToUnits = 1.0f) } + } +} +``` + +**Lines of code:** ~15 +**Files touched:** 1 (your composable) +**XML layouts:** 0 +**Lifecycle callbacks:** 0 +**Manual cleanup:** 0 + +--- + +### Google Sceneform (legacy, archived) + +```kotlin +// build.gradle — must use a community fork, original is archived +implementation("com.gorisse.thomas.sceneform:sceneform:1.21.0") + +// XML layout +// + +// Activity code (~80 lines) +class ModelViewerActivity : AppCompatActivity() { + private lateinit var arFragment: ArFragment + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_model_viewer) + arFragment = supportFragmentManager.findFragmentById(R.id.arFragment) as ArFragment + + arFragment.setOnTapArPlaneListener { hitResult, plane, motionEvent -> + val anchor = hitResult.createAnchor() + ModelRenderable.builder() + .setSource(this, Uri.parse("helmet.sfb")) + .build() + .thenAccept { renderable -> + val anchorNode = AnchorNode(anchor) + anchorNode.setParent(arFragment.arSceneView.scene) + val modelNode = TransformableNode(arFragment.transformationSystem) + modelNode.renderable = renderable + modelNode.setParent(anchorNode) + modelNode.select() + } + } + } + + override fun onResume() { super.onResume(); /* check AR availability */ } + override fun onPause() { super.onPause(); /* release resources */ } + override fun onDestroy() { super.onDestroy(); /* cleanup */ } +} +``` + +**Lines of code:** ~80+ +**Files touched:** 3+ (Activity, XML layout, manifest) +**Manual lifecycle:** Yes — `onResume`, `onPause`, `onDestroy` +**Status:** Archived. No updates since 2021. `.sfb` format deprecated. + +--- + +### Raw ARCore SDK + +```kotlin +// You get a Session, Frame, and Camera. That's it. +// You must bring your own renderer (OpenGL ES, Vulkan, or Filament directly). +// You must manage the GL surface, shader compilation, mesh uploading, +// lighting, shadow maps, and frame timing yourself. +// Typical setup: 500–1000 lines before rendering a single triangle. +``` + +**Lines of code:** 500–1000+ for basic rendering +**Skill required:** OpenGL/Vulkan expertise +**When it makes sense:** You're building a custom rendering engine + +--- + +### Unity (embedded) + +```kotlin +// build.gradle — Unity export as Android library +implementation(project(":unityLibrary")) + +// Activity +class UnityViewerActivity : UnityPlayerActivity() { + // All rendering logic lives in C# inside Unity + // Communication via UnitySendMessage / JNI bridge +} +``` + +**APK size increase:** 40–80 MB (Unity runtime) +**Build time increase:** Significant (Unity build pipeline) +**Compose integration:** None — Unity owns the entire Activity +**When it makes sense:** Full 3D game with existing Unity assets + +--- + +## Feature comparison + +| Feature | SceneView | Sceneform | Raw ARCore | Unity | +|---|---|---|---|---| +| **Jetpack Compose** | Native | No | No | No | +| **Declarative nodes** | Yes | No (imperative) | No API | No (C# scripts) | +| **Auto lifecycle** | Yes | Manual | Manual | Unity-managed | +| **PBR rendering** | Filament | Custom (limited) | DIY | Unity renderer | +| **glTF/GLB models** | Yes | .sfb (deprecated) | DIY | Yes | +| **Physics** | Built-in | No | No | Built-in | +| **Post-processing** | Bloom, DOF, SSAO, fog | No | DIY | Yes | +| **Dynamic sky** | Yes | No | No | Yes (HDRP) | +| **AR plane detection** | Yes | Yes | Yes | Yes (AR Foundation) | +| **AR image tracking** | Yes | Yes | Yes | Yes | +| **AR face tracking** | Yes | Yes | Yes | Yes | +| **Cloud anchors** | Yes | Yes | Yes | Yes | +| **Geospatial API** | Yes | No | Yes | Yes | +| **ViewNode (Compose in 3D)** | Yes | No | No | No | +| **AI tooling (MCP)** | Yes | No | No | No | +| **APK size impact** | ~5 MB | ~3 MB | ~1 MB | 40–80 MB | +| **Active maintenance** | Yes (2024–) | Abandoned | Google-maintained | Yes | +| **License** | Apache 2.0 | Apache 2.0 | Proprietary | Commercial | + +--- + +## Common objections + +### "We already use Unity for 3D" + +Unity is the right choice if you're building a 3D-first game. But if you're adding 3D to an +existing Compose app — a product viewer, an AR feature, a data visualization — Unity's 60–350 MB +runtime overhead, separate C# build pipeline, and inability to integrate with Compose make it +overkill. (Developers have reported 350 MB minimum APK size for a basic Unity AR app on Android.) + +SceneView adds ~5 MB and works inside your existing Compose screens. + +### "Can't we just use ARCore directly?" + +ARCore gives you tracking data (planes, anchors, poses) but no rendering. You'd need to build +your own renderer on top of OpenGL ES or Vulkan. That's months of work for a team with graphics +expertise. SceneView gives you ARCore's full feature set with Filament's rendering, wrapped in +Compose composables. + +### "Sceneform worked fine for us" + +Google archived Sceneform in 2021. The `.sfb` model format is deprecated. No Compose support. +No new ARCore features (geospatial, streetscape, depth). The community fork ("Sceneform +Maintained") has unresolved compatibility issues including 16 KB page size compliance required +by Android 15 (API 35). SceneView was created as Sceneform's successor — the migration path +is straightforward and documented in +[MIGRATION.md](https://github.com/SceneView/sceneview-android/blob/main/MIGRATION.md). + +### "What about Kotlin Multiplatform / iOS?" + +SceneView is Android-first today. A KMP proof of concept (iOS via Filament's Metal backend) +is on the v4.0 roadmap. For cross-platform AR today, Unity or Babylon Native are options — +but they don't integrate with Compose. + +### "Is it production-ready?" + +SceneView is used in production apps on Google Play. It's built on Filament (Google's +production rendering engine) and ARCore (Google's production AR platform). The API surface +is stable and versioned. Breaking changes follow semantic versioning with migration guides. + +--- + +## Migration from Sceneform + +If you have an existing Sceneform app, the migration is documented step by step: + +| Sceneform concept | SceneView equivalent | +|---|---| +| `ArFragment` | `ARScene { }` composable | +| `ModelRenderable.builder()` | `rememberModelInstance(modelLoader, path)` | +| `AnchorNode(anchor).setParent(scene)` | `AnchorNode(anchor = a) { ... }` composable | +| `TransformableNode` | `ModelNode` with gesture parameters | +| `.sfb` model format | `.glb` / `.gltf` (standard glTF) | +| `onResume` / `onPause` / `onDestroy` | Automatic (Compose lifecycle) | +| `node.setParent(null); node.destroy()` | Remove from composition (conditional) | + +Full guide: [MIGRATION.md](https://github.com/SceneView/sceneview-android/blob/main/MIGRATION.md) + +--- + +## The bottom line + +| If you need... | Use | +|---|---| +| 3D in a Compose app | **SceneView** | +| AR features in a Compose app | **SceneView** | +| A full 3D game | Unity | +| A custom rendering engine | Raw ARCore + OpenGL/Vulkan | +| Nothing — it's a 2D app | Nothing (but SceneView makes "subtle 3D" trivial) | + +For the vast majority of Android apps that want to add 3D or AR, SceneView is the answer. +It's the only library that treats 3D as a first-class Compose citizen. + +--- + +*[github.com/SceneView/sceneview-android](https://github.com/SceneView/sceneview-android) — Apache 2.0 — built on Filament & ARCore* diff --git a/marketing/devto-article.md b/marketing/devto-article.md new file mode 100644 index 000000000..de1516ce3 --- /dev/null +++ b/marketing/devto-article.md @@ -0,0 +1,220 @@ +--- +title: "3D is just Compose UI — what SceneView 3.2 makes possible on Android" +published: true +description: "26 composable node types, physics, dynamic sky, fog, reflections — and it still feels like writing a Column." +tags: android, kotlin, jetpackcompose, augmentedreality +canonical_url: https://sceneview.github.io/showcase/ +cover_image: # Add a 1000x420 image of a SceneView render here +--- + +You already know how to build a Compose screen. A `Column` with some children. A `Box` with overlapping layers. You've done it a hundred times. + +What if a 3D scene worked exactly the same way? + +```kotlin +// A Compose UI screen +Column { + Text("Title") + Image(painter = painterResource(R.drawable.cover), contentDescription = null) +} + +// A 3D scene with SceneView +Scene(modifier = Modifier.fillMaxSize()) { + ModelNode(modelInstance = helmet, scaleToUnits = 1.0f, autoAnimate = true) + LightNode(type = LightManager.Type.SUN, apply = { intensity(100_000.0f) }) +} +``` + +Same pattern. Same Kotlin. Same mental model — now with depth. + +--- + +## What changed since 3.0 + +SceneView 3.0 introduced the core idea: **nodes are composables**. Since then, the library has grown to 26+ node types. Here's what 3.2 added: + +### Physics + +`PhysicsNode` brings rigid body simulation. Gravity, collision detection, tap-to-throw. + +```kotlin +Scene { + PhysicsNode( + shape = SphereShape(radius = 0.1f), + mass = 1.0f, + restitution = 0.8f + ) { + SphereNode(radius = 0.1f, materialInstance = ballMaterial) + } +} +``` + +### Dynamic sky + +`DynamicSkyNode` drives sun position from a single `timeOfDay: Float` value. Sunrise, noon, golden hour, sunset — all reactive to Compose state. + +```kotlin +var timeOfDay by remember { mutableStateOf(0.5f) } + +Scene { + DynamicSkyNode(timeOfDay = timeOfDay, turbidity = 4.0f) +} + +Slider(value = timeOfDay, onValueChange = { timeOfDay = it }) +``` + +### Fog, reflections, lines, text + +- `FogNode` — atmospheric fog with density and height falloff +- `ReflectionProbeNode` — local cubemap reflections for metallic surfaces +- `LineNode` / `PathNode` — 3D polylines (measurements, drawing, animated paths) +- `TextNode` / `BillboardNode` — camera-facing text labels in 3D space + +### Post-processing + +Bloom, depth-of-field, SSAO, fog — all toggleable from Compose state. + +--- + +## The use case nobody talks about + +Most 3D demos show a rotating helmet on a black background. Cool — but who needs that? + +The real opportunity: **subtle 3D**. Replace a flat `Image()` on your product page with a `Scene {}`: + +```kotlin +// Before +Image(painter = painterResource(R.drawable.shoe), contentDescription = "Shoe") + +// After — interactive 3D in 10 extra lines +val model = rememberModelInstance(modelLoader, "models/shoe.glb") +Scene( + modifier = Modifier.fillMaxWidth().height(300.dp), + cameraManipulator = rememberCameraManipulator() +) { + model?.let { ModelNode(modelInstance = it, scaleToUnits = 1.0f) } +} +``` + +The customer orbits the product with one finger. No separate "3D viewer" screen. No Unity integration project. Just a composable. + +--- + +## AR works the same way + +`ARScene` is `Scene` with ARCore wired in: + +```kotlin +ARScene( + planeRenderer = true, + onSessionUpdated = { _, frame -> + anchor = frame.getUpdatedPlanes() + .firstOrNull { it.type == Plane.Type.HORIZONTAL_UPWARD_FACING } + ?.let { frame.createAnchorOrNull(it.centerPose) } + } +) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = sofa, scaleToUnits = 0.5f) + } + } +} +``` + +Plane detection, image tracking, face mesh, cloud anchors, geospatial API — all as composables. + +--- + +## ViewNode — the feature nobody else has + +Render **any Composable** directly inside 3D space: + +```kotlin +AnchorNode(anchor = sofaAnchor) { + ModelNode(modelInstance = sofa) + ViewNode { + Card { + Text("Sofa Pro", style = MaterialTheme.typography.titleMedium) + Text("€ 599", style = MaterialTheme.typography.headlineMedium) + Button(onClick = {}) { Text("Buy in AR") } + } + } +} +``` + +A real Compose `Card` with buttons, text fields, images — floating in 3D space next to your AR content. No other Android 3D library does this. + +--- + +## The complete setup + +```kotlin +@Composable +fun ModelViewerScreen() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/helmet.glb") + val environment = rememberEnvironment(rememberEnvironmentLoader(engine)) { + createHDREnvironment("environments/sky_2k.hdr")!! + } + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + environment = environment, + cameraManipulator = rememberCameraManipulator(), + mainLightNode = rememberMainLightNode(engine) { intensity = 100_000.0f } + ) { + model?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f, autoAnimate = true) + } + } +} +// All resources destroyed automatically when composable leaves the tree +``` + +No XML. No fragments. No lifecycle callbacks. No OpenGL boilerplate. + +--- + +## vs. the alternatives + +| | SceneView | Sceneform | Unity | Raw ARCore | +|---|---|---|---|---| +| **Compose** | Native | No | No | No | +| **Setup** | 1 Gradle line | Archived | Separate pipeline | 500+ lines | +| **APK size** | ~5 MB | ~3 MB | 40–350 MB | ~1 MB | +| **Physics** | Built-in | No | Built-in | No | +| **Status** | Active | Dead (2021) | Active | No UI layer | + +--- + +## What's next: v4.0 + +- Multiple `Scene {}` composables on one screen +- `PortalNode` — scene inside a scene (AR portals) +- `SceneView-XR` — Android XR spatial computing +- Kotlin Multiplatform proof of concept (iOS) + +--- + +## Get started + +```gradle +// 3D only +implementation("io.github.sceneview:sceneview:3.2.0") + +// 3D + AR +implementation("io.github.sceneview:arsceneview:3.2.0") +``` + +15 sample apps. Full API docs. MCP server for AI-assisted development. + +- **GitHub**: [github.com/SceneView/sceneview-android](https://github.com/SceneView/sceneview-android) +- **Docs**: [sceneview.github.io](https://sceneview.github.io) +- **Discord**: [discord.gg/UbNDDBTNqb](https://discord.gg/UbNDDBTNqb) + +--- + +*SceneView is open source (Apache 2.0). Built on Google Filament 1.70 and ARCore 1.53.* diff --git a/marketing/github-profile.md b/marketing/github-profile.md new file mode 100644 index 000000000..2ba59f107 --- /dev/null +++ b/marketing/github-profile.md @@ -0,0 +1,110 @@ +# GitHub repository profile updates + +Suggested updates for the SceneView GitHub repositories and organization. + +--- + +## Repository description + +**Short (visible in search results):** + +> 3D and AR as Jetpack Compose composables. 26+ node types, physics, dynamic sky, fog — powered by Google Filament and ARCore. + +**Topics/tags to add:** + +`android`, `kotlin`, `jetpack-compose`, `3d`, `ar`, `augmented-reality`, `arcore`, `filament`, `sceneview`, `gltf`, `compose`, `physics`, `pbr-rendering` + +--- + +## Organization profile README (SceneView/.github) + +If you have a `SceneView/.github` repo with a profile README, update it: + +```markdown +# SceneView + +The #1 3D & AR library for Android. + +SceneView brings Google Filament and ARCore into Jetpack Compose. +Nodes are composables. State drives the scene. Lifecycle is automatic. + +## Repositories + +| Repository | What it is | +|---|---| +| [sceneview-android](https://github.com/SceneView/sceneview-android) | Core SDK — `Scene {}` and `ARScene {}` composables for Android | +| [sceneview.github.io](https://github.com/SceneView/sceneview.github.io) | Documentation website | + +## Quick start + +Add the dependency and start composing: + + implementation("io.github.sceneview:sceneview:3.2.0") + +- [Documentation](https://sceneview.github.io) +- [Samples](https://github.com/SceneView/sceneview-android/tree/main/samples) +- [Discord](https://discord.gg/UbNDDBTNqb) +``` + +--- + +## Social preview image + +Create a 1280x640 image for the GitHub social preview (Settings > Social preview): + +**Layout:** +- Left half: SceneView logo + "3D & AR for Compose" tagline +- Right half: a screenshot of the model-viewer sample (helmet with HDR reflections) +- Bottom strip: `sceneview:3.2.0` · `26+ nodes` · `Filament` · `ARCore` · `Apache 2.0` +- Background: dark gradient (#1a1a2e to #16213e) +- Text: white, clean sans-serif + +**Tools:** Figma, Canva, or any image editor. Export as PNG, 1280x640. + +--- + +## GitHub Releases — template for future releases + +When publishing a new release, use this format: + +```markdown +## What's new + +[1-3 sentence summary of the release theme] + +### New nodes +- **`NodeName`** — one-line description + +### Improvements +- Bullet points + +### Dependencies +- Filament X.Y → **X.Z** +- Kotlin X.Y → **X.Z** + +### Migration +[Link to migration section if breaking changes] + +--- + +**Get started:** +``` +implementation("io.github.sceneview:sceneview:X.Y.Z") +implementation("io.github.sceneview:arsceneview:X.Y.Z") +``` + +[Full changelog](https://github.com/SceneView/sceneview-android/blob/main/CHANGELOG.md) · [Documentation](https://sceneview.github.io) +``` + +--- + +## README.md badge row + +Add these badges to the top of the main README: + +```markdown +[![Maven Central](https://img.shields.io/maven-central/v/io.github.sceneview/sceneview)](https://central.sonatype.com/artifact/io.github.sceneview/sceneview) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) +[![Discord](https://img.shields.io/discord/DISCORD_ID?label=Discord&logo=discord)](https://discord.gg/UbNDDBTNqb) +[![API](https://img.shields.io/badge/API-24%2B-brightgreen.svg)](https://developer.android.com/about/versions/nougat) +``` diff --git a/marketing/release-notes-3.2.0.md b/marketing/release-notes-3.2.0.md new file mode 100644 index 000000000..f9fb22fc1 --- /dev/null +++ b/marketing/release-notes-3.2.0.md @@ -0,0 +1,89 @@ +# SceneView 3.2.0 Release Notes + +**Copy-paste template for GitHub Release.** + +--- + +## Title +SceneView 3.2.0 — Physics, Atmosphere, Drawing, and Text + +## Body + +### Highlights + +SceneView 3.2.0 adds **8 new composable node types** and **6 new sample apps**, making it the biggest feature release since the Compose rewrite. + +### New node types + +| Node | What it does | +|---|---| +| `DynamicSkyNode` | Time-of-day sun with colour model (sunrise → noon → sunset) | +| `FogNode` | Atmospheric fog driven by Compose state | +| `ReflectionProbeNode` | Local IBL override for reflective surfaces | +| `PhysicsNode` | Rigid body simulation — gravity, collision, bounce | +| `LineNode` | Single 3D line segment | +| `PathNode` | 3D polyline through a list of points | +| `TextNode` | Camera-facing text label | +| `BillboardNode` | Camera-facing image quad | + +### New samples + +- **dynamic-sky** — interactive time-of-day, turbidity, and fog controls +- **reflection-probe** — metallic spheres with local cubemap reflections +- **physics-demo** — tap-to-throw balls with gravity and bounce +- **post-processing** — toggle bloom, depth-of-field, SSAO, and fog +- **line-path** — 3D lines, spirals, axis gizmos, animated sine wave +- **text-labels** — camera-facing text labels on 3D spheres + +### Documentation + +- **Full docs site launched** at [sceneview.github.io](https://sceneview.github.io/) +- 20 pages including quickstart, recipes cookbook, FAQ, architecture guide, performance guide, and more +- `llms.txt` updated with all new APIs + +### Getting started + +```kotlin +// 3D only +implementation("io.github.sceneview:sceneview:3.2.0") + +// 3D + AR +implementation("io.github.sceneview:arsceneview:3.2.0") +``` + +### Example: Dynamic sky with fog + +```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + DynamicSkyNode(timeOfDay = 14f, turbidity = 2f) + FogNode(view = view, density = 0.05f, color = Color(0xFFCCDDFF)) + rememberModelInstance(modelLoader, "models/scene.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 2.0f) + } +} +``` + +### Example: Physics + +```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + val ball = rememberModelInstance(modelLoader, "models/ball.glb") + ball?.let { + val node = ModelNode(modelInstance = it, scaleToUnits = 0.1f) + PhysicsNode(node = node, mass = 1f, restitution = 0.6f, + linearVelocity = Position(0f, 5f, -3f), floorY = 0f) + } +} +``` + +### Migration + +No breaking changes from 3.1.x. Just update the version number. + +### Full changelog + +See [CHANGELOG](https://sceneview.github.io/changelog/) for the complete list of changes. + +--- + +**Thank you to all contributors!** Join the discussion on [Discord](https://discord.gg/UbNDDBTNqb). diff --git a/marketing/showcase.md b/marketing/showcase.md new file mode 100644 index 000000000..1ab7799a9 --- /dev/null +++ b/marketing/showcase.md @@ -0,0 +1,258 @@ +# SceneView — The #1 3D & AR Library for Android + +*Why thousands of Android developers chose SceneView — and why you should too.* + +--- + +## The pitch in 10 seconds + +SceneView makes 3D and AR work **exactly like Jetpack Compose UI**. Nodes are composables. +State drives the scene. Lifecycle is automatic. No boilerplate, no manual cleanup, no learning +a separate rendering paradigm. + +```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + ModelNode(modelInstance = helmet, scaleToUnits = 1.0f, autoAnimate = true) + LightNode(type = LightManager.Type.SUN, apply = { intensity(100_000.0f) }) +} +``` + +That's a production-quality 3D viewer. Five lines. Same Kotlin you write every day. + +--- + +## What you get + +### 26+ node types — all composable + +| Category | Nodes | +|---|---| +| **Models** | `ModelNode` — glTF/GLB with animations, gestures, scaling | +| **Geometry** | `CubeNode`, `SphereNode`, `CylinderNode`, `PlaneNode` — no asset files needed | +| **Lighting** | `LightNode` (sun, point, spot, directional), `DynamicSkyNode`, `ReflectionProbeNode` | +| **Atmosphere** | `FogNode` — distance/height fog driven by Compose state | +| **Media** | `ImageNode`, `VideoNode` (with chromakey), `ViewNode` (any Composable in 3D) | +| **Text** | `TextNode`, `BillboardNode` — camera-facing labels and UI callouts | +| **Drawing** | `LineNode`, `PathNode` — 3D polylines, measurements, animated paths | +| **Physics** | `PhysicsNode` — rigid body simulation, collision, gravity | +| **AR** | `AnchorNode`, `HitResultNode`, `AugmentedImageNode`, `AugmentedFaceNode`, `CloudAnchorNode`, `StreetscapeGeometryNode` | +| **Structure** | `Node` (grouping/pivots), `CameraNode`, `MeshNode` | + +Every one of these is a `@Composable` function. They enter the scene on composition, +update when state changes, and destroy themselves when they leave. Zero imperative code. + +--- + +### Production rendering — Google Filament + +SceneView is built on [Filament](https://github.com/google/filament), the same physically-based +rendering engine used inside Google's own apps (Google Search 3D viewer, Google Play Store). + +- Physically-based rendering (PBR) with metallic/roughness workflow +- HDR environment lighting (IBL) from `.hdr` and `.ktx` files +- Dynamic shadows, reflections, ambient occlusion +- Post-processing: bloom, depth-of-field, SSAO, fog +- 60fps on mid-range devices — Filament is optimized for mobile + +You get AAA-quality rendering without touching OpenGL, Vulkan, or shader code. + +--- + +### Full ARCore integration + +`ARScene` wraps `Scene` with ARCore wired in. Same composable pattern, now in the real world: + +```kotlin +ARScene( + planeRenderer = true, + onSessionUpdated = { _, frame -> + anchor = frame.getUpdatedPlanes() + .firstOrNull { it.type == Plane.Type.HORIZONTAL_UPWARD_FACING } + ?.let { frame.createAnchorOrNull(it.centerPose) } + } +) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = sofa, scaleToUnits = 0.5f) + } + } +} +``` + +**AR features included:** +- Plane detection (horizontal + vertical) with persistent mesh rendering +- Image detection and tracking (`AugmentedImageNode`) +- Face mesh tracking and augmentation (`AugmentedFaceNode`) +- Cloud anchors for cross-device persistence (`CloudAnchorNode`) +- Environmental HDR — real-world light estimation +- Streetscape geometry — city-scale 3D building meshes +- Geospatial API support — place content at lat/long coordinates + +--- + +## What makes SceneView different + +### 1. It's Compose — not a wrapper around something else + +60% of the top 1,000 Play Store apps use Jetpack Compose. It's the standard. Other 3D libraries +give you a `SurfaceView` to embed in your layout and an imperative API to manage the scene graph. +SceneView's scene graph **is** the Compose tree. The Compose runtime owns it. + +This means: +- `if/else` controls whether nodes exist +- `State` drives animations, positions, visibility +- `LaunchedEffect` and `DisposableEffect` work inside scenes +- Nesting nodes is the same as nesting `Column { Row { Text() } }` + +### 2. Zero boilerplate lifecycle + +```kotlin +// This is ALL the setup you need +val engine = rememberEngine() +val modelLoader = rememberModelLoader(engine) +val environment = rememberEnvironment(rememberEnvironmentLoader(engine)) { + createHDREnvironment("environments/sky_2k.hdr")!! +} +``` + +Every resource is `remember`-ed. Created once, cleaned up when the composable leaves. +No `onPause`/`onResume` dance. No `destroy()` calls. No leaked Filament objects. + +### 3. Thread safety by default + +Filament requires all JNI calls on the main thread. `rememberModelInstance` handles the +IO-to-main-thread transition automatically. You never think about it. + +### 4. Gesture handling built in + +`ModelNode` supports pinch-to-scale, drag-to-rotate, and two-finger-rotate out of the box. +`CameraManipulator` gives you orbit/pan/zoom with one line: + +```kotlin +Scene(cameraManipulator = rememberCameraManipulator()) { ... } +``` + +### 5. AI-assisted development + +SceneView ships with an MCP server (`@sceneview/mcp`) and a machine-readable `llms.txt` API +reference. Claude, Cursor, and other AI tools always have the current API — no hallucinated +methods, no outdated patterns. + +--- + +## Real-world use cases + +### E-commerce: product viewer in 10 lines + +Replace a static `Image()` with a `Scene {}` on your product detail page. The customer orbits +the product with one finger. No separate "3D viewer" screen. No SDK integration project. + +```kotlin +// Before: static image +Image(painter = painterResource(R.drawable.shoe), contentDescription = "Shoe") + +// After: interactive 3D viewer +Scene( + modifier = Modifier.fillMaxWidth().height(300.dp), + cameraManipulator = rememberCameraManipulator() +) { + rememberModelInstance(modelLoader, "models/shoe.glb")?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f) + } +} +``` + +### Furniture & interior design: AR placement + +Let customers see how a sofa looks in their living room. Tap to place, pinch to resize, +rotate with two fingers. Compose UI floats alongside in 3D space via `ViewNode`. + +### Education & training + +Interactive 3D anatomy models, molecular structures, mechanical assemblies — all controlled +by standard Compose sliders, buttons, and state. Students manipulate, not just watch. + +### Gaming & entertainment + +`PhysicsNode` provides rigid body simulation. Tap-to-throw, collision detection, gravity. +Combined with `DynamicSkyNode` for time-of-day lighting and `FogNode` for atmosphere. + +### Data visualization + +3D bar charts, globes, network graphs. The data is Compose `State` — update the state and +the 3D visualization reacts instantly. No manual scene graph manipulation. + +### Social & communication + +`AugmentedFaceNode` for face filters and effects. Apply materials to the face mesh, attach +3D objects to landmarks. Front-camera AR, no separate SDK. + +--- + +## The numbers + +| Metric | Value | +|---|---| +| **Node types** | 26+ composable nodes | +| **Rendering** | Google Filament 1.70 — physically-based, 60fps mobile | +| **AR backend** | ARCore 1.53 — latest features | +| **Min SDK** | 24 (Android 7.0) | +| **Setup** | 1 Gradle line, 0 XML | +| **Model viewer** | ~5 lines of Kotlin | +| **AR placement** | ~15 lines of Kotlin | +| **License** | Apache 2.0 — use it anywhere | + +--- + +## Get started in 60 seconds + +**Step 1:** Add the dependency + +```gradle +// 3D only +implementation("io.github.sceneview:sceneview:3.2.0") + +// 3D + AR +implementation("io.github.sceneview:arsceneview:3.2.0") +``` + +**Step 2:** Drop a scene into any composable + +```kotlin +@Composable +fun ProductViewer() { + val engine = rememberEngine() + val modelLoader = rememberModelLoader(engine) + val model = rememberModelInstance(modelLoader, "models/product.glb") + + Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + modelLoader = modelLoader, + cameraManipulator = rememberCameraManipulator() + ) { + model?.let { + ModelNode(modelInstance = it, scaleToUnits = 1.0f, autoAnimate = true) + } + } +} +``` + +**Step 3:** Ship it. + +No XML. No fragments. No lifecycle callbacks. No OpenGL boilerplate. Just Compose. + +--- + +## Links + +- **GitHub**: [github.com/SceneView/sceneview-android](https://github.com/SceneView/sceneview-android) +- **Maven Central**: `io.github.sceneview:sceneview:3.2.0` +- **API docs**: [sceneview.github.io](https://sceneview.github.io/api/sceneview-android/sceneview/) +- **Discord**: [discord.gg/UbNDDBTNqb](https://discord.gg/UbNDDBTNqb) +- **MCP server**: `npx sceneview-mcp` for AI-assisted development +- **Samples**: 15 working sample apps in the repository + +--- + +*SceneView is open source. Built on Google Filament and ARCore. Used in production by apps on Google Play.* diff --git a/marketing/v4-launch-plan.md b/marketing/v4-launch-plan.md new file mode 100644 index 000000000..1c56e3c3f --- /dev/null +++ b/marketing/v4-launch-plan.md @@ -0,0 +1,165 @@ +# SceneView 4.0 Launch Plan + +Marketing plan for the v4.0 release — the biggest update since the Compose rewrite. + +--- + +## Key messages + +### Primary +> **"3D everywhere in your app."** SceneView 4.0 lets you put multiple 3D scenes anywhere in your Compose layout — LazyColumn, Pager, BottomSheet. Same API, no limits. + +### Secondary +> **"A window into another world."** PortalNode renders a scene inside a scene — AR portals, product showcases, game level transitions. + +### Tertiary +> **"Your Compose skills work in XR."** SceneView-XR brings the same composable API to Android XR spatial computing headsets. + +--- + +## Launch timeline + +### T-4 weeks: Pre-announcement +- [ ] Teaser tweet: "Something big is coming to SceneView" + multi-scene screenshot +- [ ] Discord announcement: v4.0 beta branch available for testing +- [ ] Blog post: "The road to SceneView 4.0" — engineering deep-dive on multi-scene architecture + +### T-2 weeks: Beta release +- [ ] Publish `4.0.0-beta01` to Maven Central +- [ ] Update docs site with v4.0 API additions (keep v3.2 docs intact) +- [ ] Share beta with top contributors for feedback + +### T-0: Launch day +- [ ] Publish `4.0.0` to Maven Central +- [ ] GitHub Release with full changelog +- [ ] X/Twitter thread (template below) +- [ ] LinkedIn post + video (template below) +- [ ] Dev.to article (template below) +- [ ] Update docs site: home page, cheatsheet, architecture page +- [ ] Discord announcement +- [ ] Reddit: r/androiddev, r/augmentedreality, r/kotlin + +### T+1 week: Amplification +- [ ] YouTube video: "SceneView 4.0 — 3D everywhere in your Compose app" +- [ ] Submit to Android Weekly newsletter +- [ ] Submit to Kotlin Weekly newsletter +- [ ] Reach out to Android GDEs for coverage + +--- + +## X/Twitter thread template + +**Tweet 1 (hook):** +SceneView 4.0 is here. + +Multiple 3D scenes on one screen. +Portals — scenes inside scenes. +Android XR support. +Same Compose API. + +Thread below. 🧵 + +**Tweet 2 (multi-scene):** +Put 3D in a LazyColumn. In a BottomSheet. In a Pager. + +Each Scene has its own camera, lighting, and environment. They share one Engine. Zero overhead. + +```kotlin +Column { + Scene(...) { ModelNode(...) } // product hero + Scene(...) { SphereNode(...) } // data globe + LazyColumn { /* regular content */ } +} +``` + +**Tweet 3 (portals):** +PortalNode — render a scene inside a scene. + +A door that opens to another world. A product configurator with custom lighting zones. A game level transition. + +All declarative. All Compose. + +**Tweet 4 (XR):** +SceneView-XR — the same API you know, now in spatial computing. + +XRScene { } works exactly like Scene { }. Your 3D skills and code patterns transfer directly to Android XR headsets. + +**Tweet 5 (CTA):** +Try it now: +``` +implementation("io.github.sceneview:sceneview:4.0.0") +``` + +Docs: sceneview.github.io +Discord: discord.gg/UbNDDBTNqb +Star us: github.com/SceneView/sceneview-android + +--- + +## LinkedIn post template + +**Caption:** +We just shipped SceneView 4.0 — the biggest update to Android's #1 3D & AR library since the Compose rewrite. + +What's new: +→ Multiple 3D scenes on one screen (LazyColumn, BottomSheet, Pager) +→ PortalNode — render a scene inside a scene +→ SceneView-XR for Android XR spatial computing +→ Kotlin Multiplatform proof of concept + +This matters because 3D is no longer a special case. It's just another Compose composable. Put it anywhere. Mix it with anything. + +Same API. Same Kotlin. Now limitless. + +Docs → sceneview.github.io +Source → github.com/SceneView/sceneview-android + +#Android #JetpackCompose #Kotlin #3D #AR #XR #OpenSource + +--- + +## Dev.to article outline + +**Title:** SceneView 4.0: Multiple 3D Scenes, Portals, and Android XR — All in Jetpack Compose + +**Sections:** +1. What is SceneView? (recap for new readers) +2. What's new in 4.0 + - Multi-scene: 3D in LazyColumn (code example + diagram) + - PortalNode: scenes inside scenes (code example + use cases) + - SceneView-XR: same API in spatial computing (code example) +3. Migration from 3.2.0 (what changes, what stays the same) +4. Benchmarks: multi-scene performance, shared Engine efficiency +5. What's next: ParticleNode, AnimationController, KMP iOS +6. Getting started + +**Tags:** android, kotlin, compose, 3d, augmentedreality + +--- + +## Key metrics to track + +| Metric | Source | Goal (launch week) | +|---|---|---| +| GitHub stars | GitHub API | +500 | +| Maven downloads | Maven Central | +2,000 | +| Discord members | Discord analytics | +200 | +| Dev.to views | Dev.to dashboard | 10,000 | +| X impressions | Twitter analytics | 50,000 | +| Docs page views | Google Analytics | 5,000 | + +--- + +## Demo apps for launch content + +### Multi-scene dashboard +A Compose screen with: +- Product viewer (Scene #1) — 3D shoe with orbit camera +- Data globe (Scene #2) — rotating Earth with markers +- AR preview (Scene #3) — small AR window + +### Portal walkthrough +A room with a door frame. Walk through the portal into a fantasy landscape with different sky and fog. + +### XR furniture configurator +Place furniture in a room, customize materials via floating Compose UI panels. diff --git a/marketing/v4-preview.md b/marketing/v4-preview.md new file mode 100644 index 000000000..5fec45cb8 --- /dev/null +++ b/marketing/v4-preview.md @@ -0,0 +1,244 @@ +# SceneView 4.0 — What's coming + +*A look at the next major release and why it matters for Android 3D & AR.* + +--- + +## The journey so far + +| Version | Theme | Key moment | +|---|---|---| +| **2.x** | View-based Sceneform successor | First Filament + ARCore integration | +| **3.0** | Compose rewrite | Nodes become composables — "3D is just Compose UI" | +| **3.1** | Stability + DX | `rememberModelInstance`, camera manipulator, gesture polish | +| **3.2** | Physics & Spatial UI | `PhysicsNode`, `DynamicSkyNode`, `FogNode`, `LineNode`, `TextNode`, `ReflectionProbeNode`, post-processing | +| **4.0** | Multi-scene, portals, XR | The release that makes SceneView a platform | + +Each release has expanded what's possible without changing the core principle: +**nodes are composables, state drives the scene, lifecycle is automatic.** + +v4.0 continues that trajectory — but the scope jumps significantly. + +--- + +## 4.0 feature preview + +### Multiple `Scene {}` composables on one screen + +Today, you get one `Scene` per screen. In 4.0, multiple independent scenes share a single +Filament `Engine`, each with its own camera, environment, and node tree. + +```kotlin +@Composable +fun DashboardScreen() { + Column { + // Product hero — rotating model with HDR environment + Scene( + modifier = Modifier.fillMaxWidth().height(300.dp), + engine = engine, + environment = studioEnvironment + ) { + ModelNode(modelInstance = product, scaleToUnits = 1.0f) + } + + // Inline data globe — different camera, different lighting + Scene( + modifier = Modifier.size(200.dp), + engine = engine, // same engine, shared GPU resources + environment = darkEnvironment + ) { + SphereNode(radius = 0.5f, materialInstance = globeMaterial) + dataPoints.forEach { point -> + CubeNode( + position = point.position, + size = Size(0.02f), + materialInstance = point.material + ) + } + } + + // Rest of the dashboard — standard Compose + LazyColumn { /* cards, charts, text */ } + } +} +``` + +**Why it matters:** Dashboards, e-commerce feeds, social timelines — anywhere you want +multiple 3D elements on the same screen without a single `Scene` owning the full viewport. +Each scene is just another composable in your layout. Mix freely with `LazyColumn`, `Pager`, +`BottomSheet` — whatever your app needs. + +--- + +### `PortalNode` — a scene inside a scene + +Render a secondary scene inside a 3D frame. Think of it as a window into another world, +placed inside your current scene. + +```kotlin +Scene(modifier = Modifier.fillMaxSize()) { + // The real world (or your main 3D scene) + ModelNode(modelInstance = room, scaleToUnits = 2.0f) + + // A portal on the wall — look through it into a different scene + PortalNode( + position = Position(0f, 1.5f, -2f), + size = Size(1.2f, 1.8f), + scene = portalScene // independent scene with its own environment + ) { + // Inside the portal: different lighting, different world + ModelNode(modelInstance = fantasyLandscape, scaleToUnits = 5.0f) + DynamicSkyNode(sunPosition = Position(0.2f, 0.8f, 0.3f)) + FogNode(density = 0.05f, color = Color(0.6f, 0.7f, 1.0f)) + } +} +``` + +**Use cases:** +- AR portals — look through a "window" in your room into a virtual space +- Product showcases — each product in its own lighting environment +- Games — level transitions, dimensional rifts +- Real estate — stand in one room, see another through the portal + +--- + +### SceneView-XR — Android XR & spatial computing + +A new module for Android XR (spatial computing headsets and passthrough AR). +Same composable API, now in spatial environments. + +```kotlin +// New module +implementation("io.github.sceneview:sceneview-xr:4.0.0") +``` + +```kotlin +// Same familiar pattern — now in spatial computing +XRScene(modifier = Modifier.fillMaxSize()) { + // Content placed in the user's physical space + ModelNode( + modelInstance = furniture, + scaleToUnits = 1.0f, + position = Position(0f, 0f, -2f) // 2 meters in front + ) + + // Spatial UI — Compose panels floating in space + ViewNode(position = Position(0.5f, 1.5f, -1.5f)) { + Card { + Text("Tap to customize") + ColorPicker(onColorSelected = { color -> /* update material */ }) + } + } +} +``` + +**Why it matters:** Android XR is Google's push into spatial computing. SceneView-XR means +your existing 3D/AR skills and code patterns transfer directly. No new paradigm to learn. +The same `ModelNode`, `LightNode`, `ViewNode` composables — just placed in spatial space. + +--- + +### Filament 2.x migration + +When Filament 2.x stabilizes, SceneView 4.0 will adopt it for: +- Improved rendering performance +- Better material system +- New shader capabilities +- Reduced memory footprint + +This is transparent to SceneView users — the composable API stays the same. + +--- + +### Kotlin Multiplatform proof of concept + +An experimental KMP target using Filament's Metal backend for iOS. +Same `Scene {}` composable, rendering natively on both platforms. + +This is a proof of concept in 4.0 — not production-ready — but it signals the direction: +**one 3D composable API, multiple platforms.** + +--- + +## What v3.x already delivers (and v4.0 builds on) + +For developers evaluating SceneView today, here's the current feature set you get +immediately with v3.2.0: + +### Rendering +- Physically-based rendering via Filament 1.70 +- HDR environment lighting (`.hdr`, `.ktx`) +- Dynamic shadows, ambient occlusion +- Post-processing: bloom, depth-of-field, SSAO, fog +- 60fps on mid-range devices + +### 3D nodes (all composable) +- `ModelNode` — glTF/GLB with animations, gestures +- `CubeNode`, `SphereNode`, `CylinderNode`, `PlaneNode` — geometry primitives +- `LightNode` — sun, point, spot, directional +- `DynamicSkyNode` — time-of-day sun positioning +- `FogNode` — atmospheric fog +- `ReflectionProbeNode` — local cubemap reflections +- `ImageNode`, `VideoNode` (with chromakey) +- `ViewNode` — any Composable rendered in 3D space +- `TextNode`, `BillboardNode` — camera-facing text and labels +- `LineNode`, `PathNode` — 3D polylines and paths +- `PhysicsNode` — rigid body simulation +- `CameraNode`, `MeshNode`, `Node` (grouping) + +### AR (ARScene composable) +- Plane detection with persistent mesh rendering +- Image detection and tracking +- Face mesh tracking and augmentation +- Cloud anchors (cross-device) +- Environmental HDR lighting +- Streetscape geometry (city-scale 3D) +- Geospatial API support + +### Developer experience +- `remember*` for all resources — automatic lifecycle +- Thread-safe model loading +- Orbit/pan/zoom camera in one line +- Multi-touch gestures built into nodes +- MCP server for AI-assisted development +- 15 working sample apps + +--- + +## The v4.0 vision + +SceneView started as "make 3D easy on Android." v3.0 proved that 3D could work like Compose. +v3.2 added physics, atmosphere, and spatial UI. + +v4.0 is about removing the last limitations: + +| Limitation today | v4.0 solution | +|---|---| +| One Scene per screen | Multiple independent Scenes | +| Flat scene graph | `PortalNode` — scenes within scenes | +| Android only | KMP proof of concept (iOS) | +| Phone/tablet only | `SceneView-XR` for spatial computing | +| Filament 1.x | Filament 2.x (when stable) | + +The goal: **SceneView becomes the standard way to do 3D on Android** — from a product +thumbnail to a spatial computing experience, all with the same composable API. + +--- + +## Timeline + +v4.0 is on the [roadmap](https://github.com/SceneView/sceneview-android/blob/main/ROADMAP.md) +as the next major release following the 3.x feature series. Follow the repo for updates. + +**You don't need to wait for 4.0.** Everything in v3.2.0 is production-ready today. +v4.0 adds capabilities on top — it doesn't replace anything. + +```gradle +// Start building today +implementation("io.github.sceneview:sceneview:3.2.0") +implementation("io.github.sceneview:arsceneview:3.2.0") +``` + +--- + +*[github.com/SceneView/sceneview-android](https://github.com/SceneView/sceneview-android) — Apache 2.0 — the #1 3D & AR library for Android* diff --git a/marketing/x-thread.md b/marketing/x-thread.md new file mode 100644 index 000000000..b4690f1db --- /dev/null +++ b/marketing/x-thread.md @@ -0,0 +1,176 @@ +# Twitter/X Thread — SceneView: #1 3D & AR for Android + +*Copy-paste each numbered block as a separate tweet. Thread format.* + +--- + +## Thread + +**1/12 — Hook** + +3D on Android used to require 500+ lines of boilerplate, lifecycle management, and OpenGL knowledge. + +SceneView reduced it to this: + +``` +Scene { + ModelNode(modelInstance = helmet, scaleToUnits = 1.0f) +} +``` + +A thread on why it's the #1 3D & AR library for Android: + +--- + +**2/12 — The core idea** + +The insight: 3D nodes should work like Compose UI. + +- `ModelNode` = like `Image()` but 3D +- `LightNode` = lighting as a composable +- `if/else` = controls what's in the scene +- `State` = drives animations + +No new paradigm. Just Compose — with depth. + +--- + +**3/12 — Before vs. After** + +Before (Sceneform / raw ARCore): +- XML layout + Fragment +- `ModelRenderable.builder().build().thenAccept { ... }` +- `onResume`, `onPause`, `onDestroy` +- Manual `setParent()`, manual `destroy()` + +After (SceneView): +- One `Scene { }` composable +- `rememberModelInstance()` → null while loading, auto-recompose when ready +- Lifecycle = automatic + +--- + +**4/12 — 26+ node types** + +All composable: + +Models: `ModelNode` +Geometry: `CubeNode`, `SphereNode`, `CylinderNode`, `PlaneNode` +Lighting: `LightNode`, `DynamicSkyNode` +Atmosphere: `FogNode`, `ReflectionProbeNode` +Media: `ImageNode`, `VideoNode`, `ViewNode` +Text: `TextNode`, `BillboardNode` +Drawing: `LineNode`, `PathNode` +Physics: `PhysicsNode` +AR: `AnchorNode`, `AugmentedImageNode`, `AugmentedFaceNode`, `CloudAnchorNode` + +--- + +**5/12 — The rendering engine** + +Built on Google Filament — the same PBR engine Google uses in Search and Play Store. + +- Physically-based rendering +- HDR environment lighting +- Dynamic shadows +- Post-processing: bloom, DOF, SSAO +- 60fps on mid-range phones + +Not a toy renderer. Production quality. + +--- + +**6/12 — AR in the same pattern** + +``` +ARScene(planeRenderer = true) { + anchor?.let { a -> + AnchorNode(anchor = a) { + ModelNode(modelInstance = sofa) + } + } +} +``` + +Plane detection, image tracking, face mesh, cloud anchors, geospatial API — all as composables inside `ARScene { }`. + +--- + +**7/12 — The killer feature nobody talks about** + +`ViewNode` — render ANY Compose UI inside 3D space. + +A `Card` with price and "Buy Now" button floating next to an AR-placed product. A tooltip hovering over a 3D model. A real `TextField` in a 3D scene. + +No other Android 3D library does this. + +--- + +**8/12 — Physics** + +`PhysicsNode` — rigid body simulation in a composable. + +Gravity, collision, tap-to-throw. Combined with `DynamicSkyNode` for time-of-day lighting and `FogNode` for atmosphere. + +Interactive 3D worlds in Compose. Not a game engine — but enough for most apps. + +--- + +**9/12 — vs. the alternatives** + +| | SceneView | Sceneform | Unity | Raw ARCore | +|---|---|---|---|---| +| Compose | Native | No | No | No | +| APK size | ~5 MB | ~3 MB | 40-80 MB | ~1 MB | +| Setup | 1 line | Archived | Separate build | 500+ lines | +| Status | Active | Dead | Active | No UI layer | + +--- + +**10/12 — The use case that matters most** + +Most apps won't be "3D apps." + +But replacing `Image()` with `Scene {}` on a product page? That's 10 extra lines for a noticeably better experience. + +3D as a finishing touch — not a feature. That's the real opportunity. + +--- + +**11/12 — What's next: v4.0** + +Coming in v4.0: +- Multiple `Scene {}` composables on one screen +- `PortalNode` — a scene inside a scene (AR portals) +- `SceneView-XR` — Android XR spatial computing +- Kotlin Multiplatform proof of concept (iOS) + +The platform is expanding. + +--- + +**12/12 — Get started** + +``` +implementation("io.github.sceneview:sceneview:3.2.0") +implementation("io.github.sceneview:arsceneview:3.2.0") +``` + +15 sample apps. Full API docs. MCP server for AI-assisted development. + +Open source. Apache 2.0. + +github.com/SceneView/sceneview-android + +#AndroidDev #JetpackCompose #3D #AR #Kotlin #SceneView + +--- + +## Posting tips + +- **Best time:** Tuesday–Thursday, 9–11 AM EST (US dev audience) or 3–5 PM CET (EU) +- **Thread format:** Post tweet 1, then reply chain for 2–12 +- **Engagement:** Quote-tweet #1 with the code screenshot from tweet 6 or 7 for visual appeal +- **Pin:** Pin tweet 1 to your profile for the week +- **Cross-post:** Copy to Bluesky and Mastodon (Android dev community is active there) +- **Follow-up:** Reply to your own thread 24h later with a link to the Medium article diff --git a/samples/dynamic-sky/README.md b/samples/dynamic-sky/README.md new file mode 100644 index 000000000..07aee4911 --- /dev/null +++ b/samples/dynamic-sky/README.md @@ -0,0 +1,46 @@ +# Dynamic Sky + +Control time-of-day lighting, atmospheric turbidity, and volumetric fog in real time using interactive sliders. + +## What it demonstrates +- `DynamicSkyNode` — procedural sun position, colour, and intensity driven by a single `timeOfDay` parameter +- `FogNode` — height-based atmospheric fog that writes `View.fogOptions` reactively +- Split-screen layout with a 3D viewport and a Compose control panel +- Loading a glTF model (`Fox.glb`) with `rememberModelInstance` and `autoAnimate = true` +- HDR environment lighting via `.hdr` file + +## Key code + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + view = view, + environment = environment +) { + DynamicSkyNode( + timeOfDay = timeOfDay, // 0–24, drives sun position + turbidity = turbidity, // 1–10, atmospheric haze + sunIntensity = sunIntensity + ) + + FogNode( + view = view, + density = fogDensity, + height = fogHeight, + color = Color(0.80f, 0.88f, 1.00f, 1f), + enabled = fogEnabled + ) + + foxInstance?.let { instance -> + ModelNode( + modelInstance = instance, + scaleToUnits = 0.012f, + autoAnimate = true + ) + } +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:dynamic-sky` configuration. diff --git a/samples/line-path/README.md b/samples/line-path/README.md new file mode 100644 index 000000000..9a9de6bc9 --- /dev/null +++ b/samples/line-path/README.md @@ -0,0 +1,36 @@ +# Line & Path + +Draw 3D polylines and paths using `LineNode` and `PathNode`, with an animated sine wave driven by Compose animation. + +## What it demonstrates +- `LineNode` — a single line segment between two 3D points (used here for an XYZ axis gizmo) +- `PathNode` — a smooth polyline through a list of 3D points (spiral and sine wave) +- Compose `animateFloat` driving a real-time sine-wave animation via the `phase` parameter +- Procedural geometry generation (helix + sine wave) with no model files +- Colour materials created with `materialLoader.createColorInstance` + +## Key code + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + engine = engine, + cameraManipulator = rememberCameraManipulator( + orbitHomePosition = Position(0f, 1f, 3.5f) + ) +) { + // Axis gizmo + LineNode(start = Position(0f, 0f, 0f), end = Position(1f, 0f, 0f), materialInstance = redMaterial) + LineNode(start = Position(0f, 0f, 0f), end = Position(0f, 1f, 0f), materialInstance = greenMaterial) + LineNode(start = Position(0f, 0f, 0f), end = Position(0f, 0f, 1f), materialInstance = blueMaterial) + + // Spiral + PathNode(points = spiralPts, closed = false, materialInstance = yellowMaterial) + + // Animated sine wave + PathNode(points = sineWavePts, closed = false, materialInstance = cyanMaterial) +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:line-path` configuration. diff --git a/samples/physics-demo/README.md b/samples/physics-demo/README.md new file mode 100644 index 000000000..6b8566fd9 --- /dev/null +++ b/samples/physics-demo/README.md @@ -0,0 +1,45 @@ +# Physics Demo + +Tap anywhere to throw balls that fall under gravity and bounce off the floor — a pure-Kotlin physics simulation. + +## What it demonstrates +- `PhysicsNode` — Euler-integration physics driving node position each frame (gravity, velocity, restitution) +- `SphereNode` and `CubeNode` — built-in primitive geometry (no model files needed) +- Tap gesture spawning via `onGestureListener` with `onSingleTapConfirmed` +- `SnapshotStateList` to trigger recomposition when balls are added/removed +- Performance management by capping the maximum number of simultaneous objects + +## Key code + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + onGestureListener = rememberOnGestureListener( + onSingleTapConfirmed = { event, _ -> + val ball = SphereNode(engine = engine, radius = 0.15f) + balls.add(ball) + if (balls.size > MAX_BALLS) balls.removeAt(0) + true + } + ) +) { + // Floor + CubeNode(size = Size(6f, 0.1f, 6f), position = Position(y = -0.05f)) + + // Physics-driven balls + for (ball in balls) { + Node(apply = { addChildNode(ball) }) + PhysicsNode( + node = ball, + mass = 1f, + restitution = 0.65f, + linearVelocity = Position(x = lateralSpeed, y = 0f, z = 0f), + floorY = 0f, + radius = 0.15f + ) + } +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:physics-demo` configuration. diff --git a/samples/post-processing/README.md b/samples/post-processing/README.md new file mode 100644 index 000000000..0c79945d9 --- /dev/null +++ b/samples/post-processing/README.md @@ -0,0 +1,42 @@ +# Post-Processing + +Toggle and tune Bloom, Depth-of-Field, Screen-Space Ambient Occlusion (SSAO), and Fog in real time with interactive sliders. + +## What it demonstrates +- Filament `View` post-processing options: `bloomOptions`, `depthOfFieldOptions`, `ambientOcclusionOptions`, `fogOptions` +- Passing a custom `View` via `rememberView(engine) { createView(engine).apply { … } }` +- Reactive option updates — Compose state writes directly to Filament view options each recomposition +- Auto-orbiting camera using `animateRotation` with `infiniteRepeatable` +- No new SceneView API needed — all effects use the existing `view` parameter on `Scene` + +## Key code + +```kotlin +val view = rememberView(engine) { + createView(engine).apply { setShadowingEnabled(true) } +} + +// Bloom — reactive from Compose state +view.bloomOptions = view.bloomOptions.also { + it.enabled = bloomEnabled + it.strength = bloomStrength + it.lensFlare = bloomLensFlare +} + +// SSAO +view.ambientOcclusionOptions = view.ambientOcclusionOptions.also { + it.enabled = ssaoEnabled + it.intensity = ssaoIntensity +} + +Scene( + modifier = Modifier.fillMaxSize(), + view = view, + environment = environment +) { + modelInstance?.let { ModelNode(modelInstance = it, scaleToUnits = 0.5f) } +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:post-processing` configuration. diff --git a/samples/reflection-probe/README.md b/samples/reflection-probe/README.md new file mode 100644 index 000000000..496333947 --- /dev/null +++ b/samples/reflection-probe/README.md @@ -0,0 +1,40 @@ +# Reflection Probe + +Override the scene's image-based lighting (IBL) with a `ReflectionProbeNode` to create high-quality reflections on metallic surfaces. + +## What it demonstrates +- `ReflectionProbeNode` — injects a high-quality HDR cubemap as the IBL source for the entire scene (or a zone) +- Metallic material creation with `materialLoader.createColorInstance(metallic = 1.0f, roughness = 0.1f)` +- `SphereNode` — built-in primitive showing mirror-like IBL reflections +- Passing a Filament `scene` explicitly via `rememberScene(engine)` for probe access +- Zone-based probe system with configurable radius (0 = global) + +## Key code + +```kotlin +val scene = rememberScene(engine) +val probeEnvironment = rememberEnvironment(environmentLoader) { + environmentLoader.createHDREnvironment("environments/sky_2k.hdr")!! +} + +Scene( + modifier = Modifier.fillMaxSize(), + scene = scene, + environment = defaultEnvironment // dark fallback +) { + ReflectionProbeNode( + filamentScene = scene, + environment = probeEnvironment, + radius = 0f, // global — applies everywhere + cameraPosition = cameraPosition + ) + + SphereNode( + radius = 0.8f, + materialInstance = metallicMaterial + ) +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:reflection-probe` configuration. diff --git a/samples/text-labels/README.md b/samples/text-labels/README.md new file mode 100644 index 000000000..5fc26580f --- /dev/null +++ b/samples/text-labels/README.md @@ -0,0 +1,46 @@ +# Text Labels + +Camera-facing 3D text labels attached to coloured spheres, with tap-to-cycle interaction. + +## What it demonstrates +- `TextNode` — billboard text labels that always face the camera via `cameraPositionProvider` +- `SphereNode` with `isTouchable = true` — built-in touch handling on primitive geometry +- Tap interaction via `onSingleTapConfirmed` to cycle label text +- Auto-orbiting camera using `animateRotation` with `infiniteRepeatable` +- `SnapshotStateList` for reactive label text updates + +## Key code + +```kotlin +Scene( + modifier = Modifier.fillMaxSize(), + onFrame = { + centerNode.rotation = cameraRotation + cameraPos = cameraNode.worldPosition + } +) { + objects.forEachIndexed { index, obj -> + SphereNode( + radius = obj.radius, + materialInstance = sphereMaterial, + apply = { + position = obj.position + isTouchable = true + onSingleTapConfirmed = { labels[index] = nextLabel(labels[index]); true } + } + ) + + TextNode( + text = labels[index], + fontSize = 52f, + widthMeters = 0.55f, + heightMeters = 0.18f, + position = Position(x = obj.position.x, y = obj.position.y + 0.37f, z = obj.position.z), + cameraPositionProvider = { cameraPos } + ) + } +} +``` + +## Running the sample +Open the project in Android Studio and run the `:samples:text-labels` configuration.