将awrtc.js拷贝会原来的awrtc.jspre,

同时在awrtc_unity.js里面添加上相应的测试接口

var Unity_DebugTool={
	Unity_H5Stream_GetVideo: function(e)
	{
		console.log("------- Unity_H5Stream_GetVideo",e);
		return awrtc.CAPI_H5Stream_GetVideo(e);
	},
	Unity_SetRtcSourceType: function(e)
	{
		console.log("------- Unity_SetRtcSourceType",e);
		return awrtc.CAPI_SetRtcSourceType(e);
	},
	Unity_SetDebugShowElements: function(e)
	{
		console.log("------- Unity_SetDebugShowElements",e);
		return awrtc.CAPI_SetDebugShowElements(e);
	}
};

创建Unity场景和脚本

为了不影响原来的例子,创建脚本WebRtcVideo和WebRtcVideoUI,内容先和CallApp以及CalAppUI一样,然后在CallAppUI中删除无关的界面元素和相关代码。

创建场景,并测试,结果

unity 摄像头人脸特效_ide

Start对应的WebRtcVideo的代码

public void StartPlay()
    {
        SetupCall();

        //mCall.Call("{'type': 'open'}");//不行有问题

        //mCall.Call("{\"type\": \"open\"}");//可以

        var cmd = new Cmd { type="open"};
        var json = JsonUtility.ToJson(cmd);//JsonUtility不能用匿名类,也不支持属性。
        Debug.Log("json:" + json);
        mCall.Call(json);//可以
    }

    private class Cmd
    {
        public string type;
    }
public virtual void SetupCall()
    {
        Append("Setting up ...");

        //hacks to turn off certain connection types. If both set to true only
        //turn servers are used. This helps simulating a NAT that doesn't support
        //opening ports.
        //hack to turn off direct connections
        //Byn.Awrtc.Native.InternalDataPeer.sDebugIgnoreTypHost = true;
        //Byn.Awrtc.Native.InternalDataPeer.sDebugIgnoreTypSrflx = true;

        NetworkConfig netConfig = CreateNetworkConfig();


        Debug.Log("Creating call using NetworkConfig:" + netConfig);
        mCall = CreateCall(netConfig);
        if (mCall == null)
        {
            Append("Failed to create the call");
            return;
        }

        mCall.LocalFrameEvents = mLocalFrameEvents;
        string[] devices = UnityCallFactory.Instance.GetVideoDevices();
        if (devices == null || devices.Length == 0)
        {
            Debug.Log("no device found or no device information available");
        }
        else
        {
            foreach (string s in devices)
                Debug.Log("device found: " + s + " IsFrontFacing: " + UnityCallFactory.Instance.IsFrontFacing(s));
        }
        Append("Call created!:" + mCall);
        mCall.CallEvent += Call_CallEvent;



        //make a deep clone to avoid confusion if settings are changed
        //at runtime. 
        mMediaConfigInUse = mMediaConfig.DeepClone();

        //try to pick a good default video device if the user wants to send video but
        //didn't bother to pick a specific device
        if (mMediaConfigInUse.Video && string.IsNullOrEmpty(mMediaConfigInUse.VideoDeviceName))
        {
            mMediaConfigInUse.VideoDeviceName = UnityCallFactory.Instance.GetDefaultVideoDevice();
        }

        Debug.Log("Configure call using MediaConfig: " + mMediaConfigInUse);
        mCall.Configure(mMediaConfigInUse);
        mUi.SetGuiState(false);
    }

整个过程和上一篇的html里面测试的代码一样。应该说那个过程是我根据unity里面的CallApp写的。

1.创建config

2.创建Call

3.调用Call方法

4.在事件中显示显示并刷新图片

刷新图片用到了CallEventType.FrameUpdate,也就是和awrtc.js里面的FrameToCallEvent连接上了。

unity 摄像头人脸特效_播放视频_02

unity 摄像头人脸特效_json_03

unity 摄像头人脸特效_ide_04

public static bool UpdateTexture(IFrame frame, ref Texture2D tex)
        {
            var format = frame.Format;
            if (frame.Format == FramePixelFormat.ABGR || frame.Format == FramePixelFormat.YUY2)
            {
                bool newTextureCreated = false;
                //texture exists but has the wrong height /width? -> destroy it and set the value to null
                if (tex != null && (tex.width != frame.Width || tex.height != frame.Height))
                {
                    Texture2D.Destroy(tex);
                    tex = null;
                }
                //no texture? create a new one first
                if (tex == null)
                {
                    newTextureCreated = true;
                    Debug.Log("Creating new texture with resolution " + frame.Width + "x" + frame.Height + " Format:" + format);

                    //so far only ABGR is really supported. this will change later
                    if (format == FramePixelFormat.ABGR)
                    {
                        tex = new Texture2D(frame.Width, frame.Height, TextureFormat.RGBA32, false);
                    }
                    else
                    {
                        Debug.LogWarning("YUY2 texture is set. This is only for testing");
                        tex = new Texture2D(frame.Width, frame.Height, TextureFormat.YUY2, false);
                    }
                    tex.wrapMode = TextureWrapMode.Clamp;
                }
                //copy image data into the texture and apply
                //Watch out the RawImage has the top pixels in the top row but
                //unity has the top pixels in the bottom row. Result is an image that is
                //flipped. Fixing this here would waste a lot of CPU power thus
                //the UI will simply set scale.Y of the UI element to -1 to reverse this.
                tex.LoadRawTextureData(frame.Buffer);
                tex.Apply();
                return newTextureCreated;
            }
            else if (frame.Format == FramePixelFormat.I420p && frame is IDirectMemoryFrame)
            {
                //this one shouldn't be used. It squeezes all planes into a single texture 
                var dframe = frame as IDirectMemoryFrame;
                int texHeight = (int)(frame.Height * 1.5f);
                bool newTextureCreated = EnsureTex(frame.Width, texHeight, TextureFormat.R8, ref tex);
                tex.LoadRawTextureData(dframe.GetIntPtr(), dframe.GetSize());
                dframe.Dispose();
                tex.Apply();
                return newTextureCreated;
            }
            else
            {
                Debug.LogError("Format not supported");
                return false;
            }
        }

从结果来看,可以显示在三维物体上。

------------------------------------------------------------------------------------------------------------

而awrtc.js里面的核心代码是context.drawImage和context.getImageData。

unity 摄像头人脸特效_json_05

总之,视频还是由html5的video来获取,至于怎么获取,怎么解码我们不用管,从它上面获取图片就行了。

这么看来,用h5splayer.js获取也能够结合unity来获取到视频图片。

而且不一定用webrtc,用websocket也是可以的。

但是写出这么一大套的完整代码,那就不知道要多久了。

115美元,再加上2星期的阅读理解和修改,才到能够使用的水平。

一天的工资就不止115美元了....。

还是花钱效率高,可惜只找到这么一个插件。

接下来是和UMP结合,还要结合后端,根据摄像头获取相应的h5stream的ws地址。

另外还有音频的问题,不过,既然本质上是在网页中的video,音频是不是不用特别处理,也是能播放的。

----------------------------------------------------------------------------------------------------------------------------------------

注意到在播放视频后,三维里面旋转的物体的速度就变慢了,说明帧率下降了。

测试一下,既然播放视频是用Update->EvenCall来的,改成InvokeRepeating来调整刷新的频率。

先将GetFrame里面的打印语句去掉,打印语句有和没有还是有差别的,5帧左右。

1.播放视频前60,播放后30上下,25-35。

2.按时间播放:0.02,30左右;0.1,40左右;0.2,44左右,0.3,能到50了。再多看到的图像就不连续了,能够识别出区别了。

0.1左右比较合适,0.02就没必要了。

代码如下:

protected virtual void FixedUpdate()//0.02s,20ms
    {
        if (UpdateByFixed && IsPlayUpdating)
        {
            CallUpdate();
        }
    }

    private void CallUpdate()
    {
        if (mCall != null)
        {
            //update the call object. This will trigger all buffered events to be fired
            //to ensure it is done in the unity thread at a convenient time.
            mCall.Update();
        }
    }

    public bool IsPlayUpdating = false;

    public bool UpdateByFixed = true;

    public bool UpdateByInvoke = false;

    public float PlayUpdateInterval = 0.1f;

    public void StartPlayUpdate()
    {
        IsPlayUpdating = true;
        if (UpdateByInvoke)
        {
            InvokeRepeating("CallUpdate", 0, PlayUpdateInterval);
        }
    }

    public void StopPlayUpdate()
    {
        IsPlayUpdating = false;
        if (UpdateByInvoke)
        {
            CancelInvoke("CallUpdate");
        }
    }
protected virtual NetworkConfig CreateNetworkConfig()
    {
        if (mUi.ToggleIsByInterval.isOn)
        {
            UpdateByFixed = false;
            UpdateByInvoke = true;
            PlayUpdateInterval = float.Parse(mUi.InputInterval.text);
        }
        else
        {
            UpdateByFixed = true;
            UpdateByInvoke = false;
        }
    。。。。。。。
}