Skip to content

SceneView/sceneview-android

Repository files navigation

logo

3D and AR Android Jetpack Compose and Layout View based on Google Filament and ARCore

Sceneview ARSceneview

Filament ARCore

Discord Open Collective

3D Scene (Filament)

Dependency

app/build.gradle

dependencies {
    // Sceneview
    implementation("io.github.sceneview:sceneview:2.1.1")
}

API

https://sceneview.github.io/api/sceneview-android/sceneview/

Usage

val engine = rememberEngine()
val modelLoader = rememberModelLoader(engine)
val environmentLoader = rememberEnvironmentLoader(engine)
val cameraNode = rememberCameraNode(engine).apply {
position = Position(z = 4.0f)
}
val centerNode = rememberNode(engine)
.addChildNode(cameraNode)
val cameraTransition = rememberInfiniteTransition(label = "CameraTransition")
val cameraRotation by cameraTransition.animateRotation(
initialValue = Rotation(y = 0.0f),
targetValue = Rotation(y = 360.0f),
animationSpec = infiniteRepeatable(
animation = tween(durationMillis = 7.seconds.toInt(MILLISECONDS))
)
)
Scene(
modifier = Modifier.fillMaxSize(),
engine = engine,
modelLoader = modelLoader,
cameraNode = cameraNode,
childNodes = listOf(centerNode,
rememberNode {
ModelNode(
modelInstance = modelLoader.createModelInstance(
assetFileLocation = "models/damaged_helmet.glb"
),
scaleToUnits = 1.0f
)
}),
environment = environmentLoader.createHDREnvironment(
assetFileLocation = "environments/sky_2k.hdr"
)!!,
onFrame = {
centerNode.rotation = cameraRotation
cameraNode.lookAt(centerNode)
}
)

Current Version

Filament

Included Dependencies

// Filament
def filament_version = '1.51.0'
api "com.google.android.filament:filament-android:$filament_version"
api "com.google.android.filament:gltfio-android:$filament_version"
api "com.google.android.filament:filament-utils-android:$filament_version"

AR ARScene (Scene + ARCore)

Dependency

app/build.gradle

dependencies {
    // ARSceneview
    implementation 'io.github.sceneview:arsceneview:2.1.1'
}

API Reference

Usage

val engine = rememberEngine()
val modelLoader = rememberModelLoader(engine)
val environmentLoader = rememberEnvironmentLoader(engine)
Scene(
    modifier = Modifier.fillMaxSize(),
    engine = engine,
    modelLoader = modelLoader,
    childNodes = rememberNodes {
        add(ModelNode(modelLoader.createModelInstance("model.glb")).apply {
            // Move the node 4 units in Camera front direction
            position = Position(z = -4.0f)
        })
    },
    environment = environmentLoader.createHDREnvironment("environment.hdr")!!
)

Sample

AR Model Viewer

val engine = rememberEngine()
val modelLoader = rememberModelLoader(engine)
val model = modelLoader.createModel("model.glb")
var frame by remember { mutableStateOf<Frame?>(null) }
val childNodes = rememberNodes()
ARScene(
    modifier = Modifier.fillMaxSize(),
    engine = engine,
    modelLoader = modelLoader,
    onSessionUpdated = { session, updatedFrame ->
        frame = updatedFrame
    },
    onGestureListener = rememberOnGestureListener(
        onSingleTapConfirmed = { motionEvent, node ->
            val hitResults = frame?.hitTest(motionEvent.x, motionEvent.y)
            val anchor = hitResults?.firstOrNull {
                it.isValid(depthPoint = false, point = false)
            }?.createAnchorOrNull()

            if (anchor != null) {
                val anchorNode = AnchorNode(engine = engine, anchor = anchor)
                anchorNode.addChildNode(
                    ModelNode(modelInstance = modelLoader.createInstance(model)!!)
                )
                childNodes += anchorNode
            }
        }
    )
)

Sample

3D @Composable Scene()

@Composable
fun Scene(
modifier: Modifier = Modifier,
/**
* Provide your own instance if you want to share Filament resources between multiple views.
*/
engine: Engine = rememberEngine(),
/**
* Consumes a blob of glTF 2.0 content (either JSON or GLB) and produces a [Model] object, which is
* a bundle of Filament textures, vertex buffers, index buffers, etc.
*
* A [Model] is composed of 1 or more [ModelInstance] objects which contain entities and components.
*/
modelLoader: ModelLoader = rememberModelLoader(engine),
/**
* A Filament Material defines the visual appearance of an object.
*
* Materials function as a templates from which [MaterialInstance]s can be spawned.
*/
materialLoader: MaterialLoader = rememberMaterialLoader(engine),
/**
* Utility for decoding an HDR file or consuming KTX1 files and producing Filament textures,
* IBLs, and sky boxes.
*
* KTX is a simple container format that makes it easy to bundle miplevels and cubemap faces
* into a single file.
*/
environmentLoader: EnvironmentLoader = rememberEnvironmentLoader(engine),
/**
* Encompasses all the state needed for rendering a {@link Scene}.
*
* [View] instances are heavy objects that internally cache a lot of data needed for
* rendering. It is not advised for an application to use many View objects.
*
* For example, in a game, a [View] could be used for the main scene and another one for the
* game's user interface. More <code>View</code> instances could be used for creating special
* effects (e.g. a [View] is akin to a rendering pass).
*/
view: View = rememberView(engine),
/**
* Controls whether the render target (SurfaceView) is opaque or not.
* The render target is considered opaque by default.
*/
isOpaque: Boolean = true,
/**
* A [Renderer] instance represents an operating system's window.
*
* Typically, applications create a [Renderer] per window. The [Renderer] generates drawing
* commands for the render thread and manages frame latency.
*/
renderer: Renderer = rememberRenderer(engine),
/**
* Provide your own instance if you want to share [Node]s' scene between multiple views.
*/
scene: Scene = rememberScene(engine),
/**
* Defines the lighting environment and the skybox of the scene.
*
* Environments are usually captured as high-resolution HDR equirectangular images and processed
* by the cmgen tool to generate the data needed by IndirectLight.
*
* You can also process an hdr at runtime but this is more consuming.
*
* - Currently IndirectLight is intended to be used for "distant probes", that is, to represent
* global illumination from a distant (i.e. at infinity) environment, such as the sky or distant
* mountains.
* Only a single IndirectLight can be used in a Scene. This limitation will be lifted in the
* future.
*
* - When added to a Scene, the Skybox fills all untouched pixels.
*
* @see [EnvironmentLoader]
*/
environment: Environment = rememberEnvironment(environmentLoader, isOpaque = isOpaque),
/**
* Always add a direct light source since it is required for shadowing.
*
* We highly recommend adding an [IndirectLight] as well.
*/
mainLightNode: LightNode? = rememberMainLightNode(engine),
/**
* Represents a virtual camera, which determines the perspective through which the scene is
* viewed.
*
* All other functionality in Node is supported. You can access the position and rotation of the
* camera, assign a collision shape to it, or add children to it.
*/
cameraNode: CameraNode = rememberCameraNode(engine),
/**
* List of the scene's nodes that can be linked to a `mutableStateOf<List<Node>>()`
*/
childNodes: List<Node> = rememberNodes(),
/**
* Physics system to handle collision between nodes, hit testing on a nodes,...
*/
collisionSystem: CollisionSystem = rememberCollisionSystem(view),
/**
* Helper that enables camera interaction similar to sketchfab or Google Maps.
*
* Needs to be a callable function because it can be reinitialized in case of viewport change
* or camera node manual position changed.
*
* The first onTouch event will make the first manipulator build. So you can change the camera
* position before any user gesture.
*
* Clients notify the camera manipulator of various mouse or touch events, then periodically
* call its getLookAt() method so that they can adjust their camera(s). Three modes are
* supported: ORBIT, MAP, and FREE_FLIGHT. To construct a manipulator instance, the desired mode
* is passed into the create method.
*/
cameraManipulator: Manipulator? = rememberCameraManipulator(),
/**
* Used for Node's that can display an Android [View]
*
* Manages a [FrameLayout] that is attached directly to a [WindowManager] that other views can be
* added and removed from.
*
* To render a [View], the [View] must be attached to a [WindowManager] so that it can be properly
* drawn. This class encapsulates a [FrameLayout] that is attached to a [WindowManager] that other
* views can be added to as children. This allows us to safely and correctly draw the [View]
* associated with a [RenderableManager] [Entity] and a [MaterialInstance] while keeping them
* isolated from the rest of the activities View hierarchy.
*
* Additionally, this manages the lifecycle of the window to help ensure that the window is
* added/removed from the WindowManager at the appropriate times.
*/
viewNodeWindowManager: ViewNode.WindowManager? = null,
/**
* The listener invoked for all the gesture detector callbacks.
*
* Detects various gestures and events.
* The gesture listener callback will notify users when a particular motion event has occurred.
* Responds to Android touch events with listeners.
*/
onGestureListener: GestureDetector.OnGestureListener? = rememberOnGestureListener(),
onTouchEvent: ((e: MotionEvent, hitResult: HitResult?) -> Boolean)? = null,
activity: ComponentActivity? = LocalContext.current as? ComponentActivity,
lifecycle: Lifecycle = LocalLifecycleOwner.current.lifecycle,
/**
* Invoked when an frame is processed.
*
* Registers a callback to be invoked when a valid Frame is processing.
*
* The callback to be invoked once per frame **immediately before the scene is updated.
*
* The callback will only be invoked if the Frame is considered as valid.
*/
onFrame: ((frameTimeNanos: Long) -> Unit)? = null,
onViewCreated: (SceneView.() -> Unit)? = null,
onViewUpdated: (SceneView.() -> Unit)? = null
) {

AR @Composable ARScene()

@Composable
fun ARScene(
modifier: Modifier = Modifier,
/**
* Provide your own instance if you want to share Filament resources between multiple views.
*/
engine: Engine = rememberEngine(),
/**
* Consumes a blob of glTF 2.0 content (either JSON or GLB) and produces a [Model] object, which is
* a bundle of Filament textures, vertex buffers, index buffers, etc.
*
* A [Model] is composed of 1 or more [ModelInstance] objects which contain entities and components.
*/
modelLoader: ModelLoader = rememberModelLoader(engine),
/**
* A Filament Material defines the visual appearance of an object.
*
* Materials function as a templates from which [MaterialInstance]s can be spawned.
*/
materialLoader: MaterialLoader = rememberMaterialLoader(engine),
/**
* Utility for decoding an HDR file or consuming KTX1 files and producing Filament textures,
* IBLs, and sky boxes.
*
* KTX is a simple container format that makes it easy to bundle miplevels and cubemap faces
* into a single file.
*/
environmentLoader: EnvironmentLoader = rememberEnvironmentLoader(engine),
/**
* Fundamental session features that can be requested.
* @see Session.Feature
*/
sessionFeatures: Set<Session.Feature> = setOf(),
/**
* Sets the camera config to use.
* The config must be one returned by [Session.getSupportedCameraConfigs].
* Provides details of a camera configuration such as size of the CPU image and GPU texture.
*
* @see Session.setCameraConfig
*/
sessionCameraConfig: ((Session) -> CameraConfig)? = null,
/**
* Configures the session and verifies that the enabled features in the specified session config
* are supported with the currently set camera config.
*
* @see Session.configure
*/
sessionConfiguration: ((session: Session, Config) -> Unit)? = null,
/**
* Enable the plane renderer.
*/
planeRenderer: Boolean = true,
/**
* The [ARCameraStream] to render the camera texture.
*
* Use it to control if the occlusion should be enabled or disabled
*/
cameraStream: ARCameraStream? = rememberARCameraStream(materialLoader),
/**
* Encompasses all the state needed for rendering a {@link Scene}.
*
* [View] instances are heavy objects that internally cache a lot of data needed for
* rendering. It is not advised for an application to use many View objects.
*
* For example, in a game, a [View] could be used for the main scene and another one for the
* game's user interface. More <code>View</code> instances could be used for creating special
* effects (e.g. a [View] is akin to a rendering pass).
*/
view: View = rememberView(engine),
/**
* Controls whether the render target (SurfaceView) is opaque or not.
* The render target is considered opaque by default.
*/
isOpaque: Boolean = true,
/**
* A [Renderer] instance represents an operating system's window.
*
* Typically, applications create a [Renderer] per window. The [Renderer] generates drawing
* commands for the render thread and manages frame latency.
*/
renderer: Renderer = rememberRenderer(engine),
/**
* Provide your own instance if you want to share [Node]s' scene between multiple views.
*/
scene: Scene = rememberScene(engine),
/**
* Defines the lighting environment and the skybox of the scene.
*
* Environments are usually captured as high-resolution HDR equirectangular images and processed
* by the cmgen tool to generate the data needed by IndirectLight.
*
* You can also process an hdr at runtime but this is more consuming.
*
* - Currently IndirectLight is intended to be used for "distant probes", that is, to represent
* global illumination from a distant (i.e. at infinity) environment, such as the sky or distant
* mountains.
* Only a single IndirectLight can be used in a Scene. This limitation will be lifted in the
* future.
*
* - When added to a Scene, the Skybox fills all untouched pixels.
*
* @see [EnvironmentLoader]
*/
environment: Environment = rememberAREnvironment(engine),
/**
* Always add a direct light source since it is required for shadowing.
*
* We highly recommend adding an [IndirectLight] as well.
*/
mainLightNode: LightNode? = rememberMainLightNode(engine),
cameraNode: ARCameraNode = rememberARCameraNode(engine),
/**
* List of the scene's nodes that can be linked to a `mutableStateOf<List<Node>>()`
*/
childNodes: List<Node> = rememberNodes(),
/**
* Physics system to handle collision between nodes, hit testing on a nodes,...
*/
collisionSystem: CollisionSystem = rememberCollisionSystem(view),
/**
* Used for Node's that can display an Android [View]
*
* Manages a [FrameLayout] that is attached directly to a [WindowManager] that other views can be
* added and removed from.
*
* To render a [View], the [View] must be attached to a [WindowManager] so that it can be properly
* drawn. This class encapsulates a [FrameLayout] that is attached to a [WindowManager] that other
* views can be added to as children. This allows us to safely and correctly draw the [View]
* associated with a [RenderableManager] [Entity] and a [MaterialInstance] while keeping them
* isolated from the rest of the activities View hierarchy.
*
* Additionally, this manages the lifecycle of the window to help ensure that the window is
* added/removed from the WindowManager at the appropriate times.
*/
viewNodeWindowManager: WindowManager? = null,
/**
* The session is ready to be accessed.
*/
onSessionCreated: ((session: Session) -> Unit)? = null,
/**
* The session has been resumed
*/
onSessionResumed: ((session: Session) -> Unit)? = null,
/**
* The session has been paused
*/
onSessionPaused: ((session: Session) -> Unit)? = null,
/**
* Invoked when an ARCore error occurred.
*
* Registers a callback to be invoked when the ARCore Session cannot be initialized because
* ARCore is not available on the device or the camera permission has been denied.
*/
onSessionFailed: ((exception: Exception) -> Unit)? = null,
/**
* Updates of the state of the ARCore system.
*
* This includes: receiving a new camera frame, updating the location of the device, updating
* the location of tracking anchors, updating detected planes, etc.
*
* This call may update the pose of all created anchors and detected planes. The set of updated
* objects is accessible through [Frame.getUpdatedTrackables].
*
* Invoked once per [Frame] immediately before the Scene is updated.
*/
onSessionUpdated: ((session: Session, frame: Frame) -> Unit)? = null,
/**
* Listen for camera tracking failure.
*
* The reason that [Camera.getTrackingState] is [TrackingState.PAUSED] or `null` if it is
* [TrackingState.TRACKING]
*/
onTrackingFailureChanged: ((trackingFailureReason: TrackingFailureReason?) -> Unit)? = null,
/**
* The listener invoked for all the gesture detector callbacks.
*/
onGestureListener: GestureDetector.OnGestureListener? = rememberOnGestureListener(),
onTouchEvent: ((e: MotionEvent, hitResult: HitResult?) -> Boolean)? = null,
activity: ComponentActivity? = LocalContext.current as? ComponentActivity,
lifecycle: Lifecycle = LocalLifecycleOwner.current.lifecycle,
onViewUpdated: (ARSceneView.() -> Unit)? = null,
onViewCreated: (ARSceneView.() -> Unit)? = null
) {

Samples

https://github.com/SceneView/sceneview-android/tree/main/samples

Links

Tutorials

Youtube Videos

Filament

GitHub

https://github.com/google/filament

Dependencies

// Filament
def filament_version = '1.51.0'
api "com.google.android.filament:filament-android:$filament_version"
api "com.google.android.filament:gltfio-android:$filament_version"
api "com.google.android.filament:filament-utils-android:$filament_version"

Filament Dependency

// ARCore
api "com.google.ar:core:1.42.0"

Support our work

Help us

  • Buy devices to test the SDK on
  • Equipment for decent video recording Tutorials and Presentations
  • Sceneview Hosting Fees

How To Contribute

Open Collective


⚠️ Geospatial API: Be sure to follow the official Google Geospatial Developer guide to enable Geospatial API in your application.