代码之家  ›  专栏  ›  技术社区  ›  yasin

ArCore场景窗体:检测图像时播放.mp4视频

  •  1
  • yasin  · 技术社区  · 6 年前

    当我找到一个图像时,我想在上面放一个文本和一个视频。文本视图放在场景中,但视频不是,它只是添加到中间的我的主要布局。

    override fun onCreate(savedInstanceState: Bundle?) {
             (....)
            arFragment!!.arSceneView.scene.addOnUpdateListener { this.onUpdateFrame(it) }
            arSceneView = arFragment!!.arSceneView
    
    }
    
    private fun onUpdateFrame(frameTime: FrameTime) {
        val frame = arFragment!!.arSceneView.arFrame
    
        val augmentedImages = frame.getUpdatedTrackables(AugmentedImage::class.java)
    
        for (augmentedImage in augmentedImages) {
            if (augmentedImage.trackingState == TrackingState.TRACKING) {
    
                if (augmentedImage.name.contains("car") && !modelCarAdded) {
                    renderView(arFragment!!,
                            augmentedImage.createAnchor(augmentedImage.centerPose))
                    modelCarAdded = true
                }
            }
        }
    
    }
    

       private fun renderView(fragment: ArFragment, anchor: Anchor) {
        //WORKING
        ViewRenderable.builder()
                .setView(this, R.layout.text_info)
                .build()
                .thenAccept { renderable ->
                    (renderable.view as TextView).text = "Example"
                    addNodeToScene(fragment, anchor, renderable, Vector3(0f, 0.2f, 0f))
    
                }
                .exceptionally { throwable ->
                    val builder = AlertDialog.Builder(this)
                    builder.setMessage(throwable.message)
                            .setTitle("Error!")
                    val dialog = builder.create()
                    dialog.show()
                    null
                }
        //NOT WORKING
        ViewRenderable.builder()
                .setView(this, R.layout.video_youtube)
                .build()
                .thenAccept { renderable ->
                    val view = renderable.view
                    videoRenderable = renderable
                    val path = "android.resource://" + packageName + "/" + R.raw.googlepixel
                    view.video_player.setVideoURI(Uri.parse(path))
                    renderable.material.setExternalTexture("videoTexture", texture)
                    val videoNode = addNodeToScene(fragment, anchor, renderable, Vector3(0.2f, 0.5f, 0f))
                    if (!view.video_player.isPlaying) {
                        view.video_player.start()
                        texture
                                .surfaceTexture
                                .setOnFrameAvailableListener {
                                    videoNode.renderable = videoRenderable
                                    texture.surfaceTexture.setOnFrameAvailableListener(null)
                                }
                    } else {
                        videoNode.renderable = videoRenderable
                    }
    
                }
                .exceptionally { throwable ->
                    null
                }
    }
    
    private fun addNodeToScene(fragment: ArFragment, anchor: Anchor, renderable: Renderable, vector3: Vector3): Node {
            val anchorNode = AnchorNode(anchor)
            val node = TransformableNode(fragment.transformationSystem)
            node.renderable = renderable
            node.setParent(anchorNode)
            node.localPosition = vector3
            fragment.arSceneView.scene.addChild(anchorNode)
            return node
        }
    

    我试着使用色度键视频的例子,但我不希望视频的白色部分是透明的。我不确定是否需要模型(.sfb)来显示视频。

    print of the problem

    1 回复  |  直到 6 年前
        1
  •  3
  •   Clayton Wilkinson    6 年前

    我用了 ChromaKey sample 作为起点。

    首先,我更改了视频使用的自定义材质,添加了一个标志来禁用chromakey过滤。

    material {
        "name" : "Chroma Key Video Material",
        "defines" : [
            "baseColor"
        ],
        "parameters" : [
            {
               // The texture displaying the frames of the video.
               "type" : "samplerExternal",
               "name" : "videoTexture"
            },
            {
                // The color to filter out of the video.
                "type" : "float4",
                "name" : "keyColor"
            },
            {
                "type" : "bool",
                "name" : "disableChromaKey",
            }
        ],
        "requires" : [
            "position",
            "uv0"
        ],
        "shadingModel" : "unlit",
        // Blending is "masked" instead of "transparent" so that the shadows account for the
        // transparent regions of the video instead of just the shape of the mesh.
        "blending" : "masked",
        // Material is double sided so that the video is visible when walking behind it.
        "doubleSided" : true
    }
    
    fragment {
        vec3 desaturate(vec3 color, float amount) {
            // Convert color to grayscale using Luma formula:
            // https://en.wikipedia.org/wiki/Luma_%28video%29
            vec3 gray = vec3(dot(vec3(0.2126, 0.7152, 0.0722), color));
    
            return vec3(mix(color, gray, amount));
        }
    
        void material(inout MaterialInputs material) {
            prepareMaterial(material);
    
            vec2 uv = getUV0();
    
            if (!gl_FrontFacing) {
              uv.x = 1.0 - uv.x;
            }
    
            vec4 color = texture(materialParams_videoTexture, uv).rgba;
    
            if (!materialParams.disableChromaKey) {
                vec3 keyColor = materialParams.keyColor.rgb;
    
                float threshold = 0.675;
                float slope = 0.2;
    
                float distance = abs(length(abs(keyColor - color.rgb)));
                float edge0 = threshold * (1.0 - slope);
                float alpha = smoothstep(edge0, threshold, distance);
                color.rgb = desaturate(color.rgb, 1.0 - (alpha * alpha * alpha));
    
                material.baseColor.a = alpha;
                material.baseColor.rgb = inverseTonemapSRGB(color.rgb);
                material.baseColor.rgb *= material.baseColor.a;
            } else {
                material.baseColor = color;
            }
        }
    }
    

    然后在.sfa文件中将“disableChromaKey”设置为false:

     materials: [
        {
          name: 'DefaultMaterial',
          parameters: [
            {
              videoTexture: {
                external_path: 'MISSING_PATH',
              },
            },
            {
              keyColor: [
                0,
                0,
                0,
                0,
              ],
            },
            {
                disableChromaKey : true,
            }
          ],
          source: 'sampledata/models/chroma_key_video_material.mat',
        },
      ],
    

    然后,我基于hittest中的锚点放置了视频节点,并在其上方放置了一个ViewRenderable节点用于文本。

       private Node createVideoDisplay(final AnchorNode parent, Vector3 localPosition, String title) {
            // Create a node to render the video and add it to the anchor.
            Node videoNode = new Node();
            videoNode.setParent(parent);
            videoNode.setLocalPosition(localPosition);
    
            // Set the scale of the node so that the aspect ratio of the video is correct.
            float videoWidth = mediaPlayer.getVideoWidth();
            float videoHeight = mediaPlayer.getVideoHeight();
            videoNode.setLocalScale(
                    new Vector3(
                            VIDEO_HEIGHT_METERS * (videoWidth / videoHeight),
                            VIDEO_HEIGHT_METERS, 1.0f));
    
            // Place the text above the video
            final float videoNodeHeight = VIDEO_HEIGHT_METERS+ localPosition.y;
            ViewRenderable.builder().setView(this,R.layout.video_title)
                    .build().thenAccept(viewRenderable -> {
                       Node titleNode =  new Node();
                       titleNode.setLocalPosition(new Vector3(0,videoNodeHeight,0));
                       titleNode.setParent(parent);
                       titleNode.setRenderable(viewRenderable);
                ((TextView)viewRenderable.getView().findViewById(R.id.video_text))
                               .setText(title);
            });
    
            return videoNode;
        }
    

    enter image description here

    推荐文章