[muzzik教程]:web相机渲染到 sprite

没啥好说出的直接上代码,当做笔记了

import * as cc from "cc";
import { _decorator, Component, Node } from "cc";

const { ccclass, property } = _decorator;
@ccclass("webcam")
export class webcam extends Component {
	/* --------------- 属性 --------------- */
	/** 摄像机输出 */
	@property({ displayName: "摄像机输出", type: cc.Sprite })
	camera_sprite: cc.Sprite = null!;
	/* --------------- private --------------- */
	/** 初始化状态 */
	private _init_b = false;
	private _h5_canvas!: HTMLCanvasElement;
	private _h5_video!: HTMLVideoElement;
	private _image = new cc.ImageAsset();
	private _spriteFrame = new cc.SpriteFrame();
	/* ------------------------------- 生命周期 ------------------------------- */
	async onLoad() {
		this._h5_canvas = document.createElement("canvas");
		this._h5_video = document.createElement("video");

		this._h5_video.setAttribute("autoplay", "");
		this._h5_video.setAttribute("muted", "");
		this._h5_video.setAttribute("playsinline", "");

		self.navigator.mediaDevices
			.getUserMedia({
				video: {
					facingMode: "environment",
				},
			})
			.then(async (stream) => {
				this._h5_video.srcObject = stream;
				this._h5_video.play();
				this.camera_sprite.spriteFrame = this._spriteFrame;
				this._init_b = true;
			})
			.catch((error) => {
				console.log(error.code, error.message, error);
			});
	}

	update() {
		if (!this._init_b) {
			return;
		}
		this.updateTexture();
	}
	/* ------------------------------- 功能 ------------------------------- */
	updateTexture() {
		/** 绘制到 canvas */
		this._h5_canvas
			.getContext("2d")!
			.drawImage(this._h5_video, 0, 0, this._h5_canvas.width, this._h5_canvas.height);
		let new_texture = new cc.Texture2D();
		this._image.reset(this._h5_canvas);
		new_texture.image = this._image;
		this.camera_sprite.spriteFrame!.texture?.decRef();
		this.camera_sprite.spriteFrame!.texture = new_texture;
		this.camera_sprite.markForUpdateRenderData();
	}
}

# 注意事项

  • ios 平台必须使用 https 才可以访问摄像头

# 参考链接

3赞

toDataURL 没必要, 我猜可以直接

new_texture.uploadData(_h5_video)

猜的很好,下次不要猜了,先运行一次自己的代码再说

额, 笔误, 应该是:

new_texture.uploadData(_h5_canvas)

不过我记得在哪看过 gl.texImage2D 是可以支持 video 的.

toDataURL … 即使能工作, 效率也太低了.

晚上回去试试,公司电脑没摄像头

我测试了下, uploadData(video) 是可以的, 只要 texture 尺寸与 video 一致.
我用的 cc.VideoPlayer + mp4, 不确定摄像头视频是否不同.

const {player, sp} = this;
player.node.on(VideoPlayer.EventType.READY_TO_PLAY, ()=>
{
	const texture = new Texture2D();
	texture.reset({width:960, height:640, format: Texture2D.PixelFormat.RGB888});
	texture.setWrapMode(Texture2D.WrapMode.CLAMP_TO_BORDER, Texture2D.WrapMode.CLAMP_TO_BORDER, Texture2D.WrapMode.CLAMP_TO_BORDER);

	const sf = Object.assign(new SpriteFrame(), {texture});
	this.schedule(()=>{
		texture.uploadData(<any>player.nativeVideo);
		sp.spriteFrame = sf;
	});
})
1赞

测试过了,这样最快

	updateTexture() {
		/** 绘制到 canvas */
		this._h5_canvas
			.getContext("2d")!
			.drawImage(this._h5_video, 0, 0, this._h5_canvas.width, this._h5_canvas.height);
		let new_texture = new cc.Texture2D();
		this._image.reset(this._h5_canvas);
		new_texture.image = this._image;
		this.camera_sprite.spriteFrame!.texture?.decRef();
		this.camera_sprite.spriteFrame!.texture = new_texture;
		this.camera_sprite.markForUpdateRenderData();
	}
1赞

不过不论怎么使用,相机输出纹理都会根据节点尺寸拉伸,大佬有没有办法避免被拉伸?

你确定每帧 new Texture2D 比单纯 tex.uploadData(video) 快?

HTMLVideoElement 应该有可以获得分辨率的接口吧.

不仅每帧时间比 uploadData 小,gfx内存也最小

也说得过去, 毕竟通过 canvas 重绘成小尺寸了.

但我觉得重绘到 canvas 的开销也要考虑进去吧, 只是不好测量. 如果小尺寸视频放大, 应该就得不偿失了.