IT俱乐部 JavaScript 如何基于uniapp开发android播放webrtc流详解

如何基于uniapp开发android播放webrtc流详解

一、播放rtsp协议流

如果 webrtc 流以 rtsp 协议返回,流地址如:rtsp://127.0.0.1:5115/session.mpg,uniapp的  编译到android上直接就能播放,但通常会有2-3秒的延迟。

二、播放webrtc协议流

如果 webrtc 流以 webrtc 协议返回,流地址如:webrtc://127.0.0.1:1988/live/livestream,我们需要通过sdp协商、连接推流服务端、搭建音视频流通道来播放音视频流,通常有500毫秒左右的延迟。

封装 WebrtcVideo 组件

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
<video id="rtc_media_player" width="100%" height="100%" autoplay="" playsinline="" data-origwidth="100" data-origheight="100" style="width: 1264px; height: 1264px;"></video>
    import $ from "./jquery-1.10.2.min.js";
    import {prepareUrl} from "./utils.js";
     
    export default {
        data() {
            return {
                //RTCPeerConnection 对象
                peerConnection: null,
                //需要播放的webrtc流地址
                playUrl: 'webrtc://127.0.0.1:1988/live/livestream'
            }
        },
        methods: {
              createPeerConnection() {
                const that = this
                //创建 WebRTC 通信通道
                that.peerConnection = new RTCPeerConnection(null);
                //添加一个单向的音视频流收发器
                that.peerConnection.addTransceiver("audio", { direction: "recvonly" });
                that.peerConnection.addTransceiver("video", { direction: "recvonly" });
                //收到服务器码流,将音视频流写入播放器
                that.peerConnection.ontrack = (event) => {
                    const remoteVideo = document.getElementById("rtc_media_player");
                    if (remoteVideo.srcObject !== event.streams[0]) {
                        remoteVideo.srcObject = event.streams[0];
                    }
                };
            },
            async makeCall() {
                const that = this
                const url = this.playUrl
                this.createPeerConnection()
                //拼接服务端请求地址,如:http://192.168.0.1:1988/rtc/v1/play/
                const conf = prepareUrl(url);
                //生成 offer sdp
                const offer = await this.peerConnection.createOffer();
                await this.peerConnection.setLocalDescription(offer);
                var session = await new Promise(function (resolve, reject) {
                    $.ajax({
                       type: "POST",
                       url: conf.apiUrl,
                       data: offer.sdp,
                       contentType: "text/plain",
                       dataType: "json",
                       crossDomain: true,
                   })
                   .done(function (data) {
                       //服务端返回 answer sdp
                       if (data.code) {
                            reject(data);
                            return;
                        }
                        resolve(data);
                    })
                    .fail(function (reason) {
                        reject(reason);
                    });
                });
                //设置远端的描述信息,协商sdp,通过后搭建通道成功
                await this.peerConnection.setRemoteDescription(
                    new RTCSessionDescription({ type: "answer", sdp: session.sdp })
                );
                session.simulator = conf.schema + '//' + conf.urlObject.server + ':' + conf.port + '/rtc/v1/nack/'
                return session;
            }
        },
           mounted() {
               try {
                this.makeCall().then((res) => {
                    // webrtc 通道建立成功
                })
               } catch (error) {
                   // webrtc 通道建立失败
                   console.log(error)
               }
           }
    }

utils.js

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
const defaultPath = "/rtc/v1/play/";
 
export const prepareUrl = webrtcUrl => {
    var urlObject = parseUrl(webrtcUrl);
    var schema = "http:";
    var port = urlObject.port || 1985;
    if (schema === "https:") {
        port = urlObject.port || 443;
    }
 
    var api = urlObject.user_query.play || defaultPath;
    if (api.lastIndexOf("/") !== api.length - 1) {
        api += "/";
    }
 
    apiUrl = schema + "//" + urlObject.server + ":" + port + api;
    for (var key in urlObject.user_query) {
        if (key !== "api" && key !== "play") {
            apiUrl += "&" + key + "=" + urlObject.user_query[key];
        }
    }
    // Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
    var apiUrl = apiUrl.replace(api + "&", api + "?");
 
    var streamUrl = urlObject.url;
 
    return {
        apiUrl: apiUrl,
        streamUrl: streamUrl,
        schema: schema,
        urlObject: urlObject,
        port: port,
        tid: Number(parseInt(new Date().getTime() * Math.random() * 100))
            .toString(16)
            .substr(0, 7)
    };
};
export const parseUrl = url => {
    var a = document.createElement("a");
    a.href = url
        .replace("rtmp://", "http://")
        .replace("webrtc://", "http://")
        .replace("rtc://", "http://");
 
    var vhost = a.hostname;
    var app = a.pathname.substr(1, a.pathname.lastIndexOf("/") - 1);
    var stream = a.pathname.substr(a.pathname.lastIndexOf("/") + 1);
 
    // parse the vhost in the params of app, that srs supports.
    app = app.replace("...vhost...", "?vhost=");
    if (app.indexOf("?") >= 0) {
        var params = app.substr(app.indexOf("?"));
        app = app.substr(0, app.indexOf("?"));
 
        if (params.indexOf("vhost=") > 0) {
            vhost = params.substr(params.indexOf("vhost=") + "vhost=".length);
            if (vhost.indexOf("&") > 0) {
                vhost = vhost.substr(0, vhost.indexOf("&"));
            }
        }
    }
 
    // when vhost equals to server, and server is ip,
    // the vhost is __defaultVhost__
    if (a.hostname === vhost) {
        var re = /^(d+).(d+).(d+).(d+)$/;
        if (re.test(a.hostname)) {
            vhost = "__defaultVhost__";
        }
    }
 
    // parse the schema
    var schema = "rtmp";
    if (url.indexOf("://") > 0) {
        schema = url.substr(0, url.indexOf("://"));
    }
 
    var port = a.port;
    if (!port) {
        if (schema === "http") {
            port = 80;
        } else if (schema === "https") {
            port = 443;
        } else if (schema === "rtmp") {
            port = 1935;
        }
    }
 
    var ret = {
        url: url,
        schema: schema,
        server: a.hostname,
        port: port,
        vhost: vhost,
        app: app,
        stream: stream
    };
    fill_query(a.search, ret);
 
    // For webrtc API, we use 443 if page is https, or schema specified it.
    if (!ret.port) {
        if (schema === "webrtc" || schema === "rtc") {
            if (ret.user_query.schema === "https") {
                ret.port = 443;
            } else if (window.location.href.indexOf("https://") === 0) {
                ret.port = 443;
            } else {
                // For WebRTC, SRS use 1985 as default API port.
                ret.port = 1985;
            }
        }
    }
 
    return ret;
};
export const fill_query = (query_string, obj) => {
    // pure user query object.
    obj.user_query = {};
 
    if (query_string.length === 0) {
        return;
    }
 
    // split again for angularjs.
    if (query_string.indexOf("?") >= 0) {
        query_string = query_string.split("?")[1];
    }
 
    var queries = query_string.split("&");
    for (var i = 0; i

页面中使用

1
import VideoWebrtc from "@/components/videoWebrtc";

需要注意的事项:

1.spd 协商的重要标识之一为媒体描述: m=xxx ,示例行如下:

一个完整的媒体描述,从第一个m=xxx 开始,到下一个m=xxx 结束,以video为例,媒体描述包含了当前设备允许播放的视频流编码格式,常见如:VP8/VP9/H264 等:

对照 m=video 后边的编码发现,其包含所有 a=rtpmap 后的编码,a=rtpmap 编码后的字符串代表视频流格式,但视频编码与视频流格式却不是固定的匹配关系,也就是说,在设备A中,可能存在 a=rtpmap:106 H264/90000 表示h264,在设备B中,a=rtpmap:100 H264/90000 表示h264。

因此,如果要鉴别设备允许播放的视频流格式,我们需要观察 a=rtpmap code 后的字符串。

协商通过的部分标准为:

  • offer sdp 的 m=xxx 数量需要与 answer sdp 的 m=xxx 数量保持一致;
  • offer sdp 的 m=xxx 顺序需要与 answer sdp 的 m=xxx 顺序保持一致;如两者都需要将 m=audio 放在第一位,m=video放在第二位,或者反过来;
  • answer sdp 返回的 m=audio 后的 ,需要被包含在 offer sdp 的 m=audio 后的中;

offer sdp 的 m=xxx 由 addTransceiver 创建,首个参数为 audio 时,生成 m=audio,首个参数为video时,生成 m=video ,创建顺序对应 m=xxx 顺序

1
2
"recvonly" }); that.peerConnection.addTransceiver("video", {
direction: "recvonly" }); ```
  • 在 sdp 中存在一项 a=mid:xxx xxx在浏览器中可能为 audiovideo ,在 android 设备上为 01,服务端需注意与 offer sdp 匹配。
  • 关于音视频流收发器,上面使用的api是 addTransceiver ,但在部分android设备上会提示没有这个api,我们可以替换为 getUserMedia + addTrack
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
data() {
    return {
        ......
        localStream: null,
        ......
    }
},
methods: {
    createPeerConnection() {
        const that = this
        //创建 WebRTC 通信通道
        that.peerConnection = new RTCPeerConnection(null);
        that.localStream.getTracks().forEach((track) => {
          that.peerConnection.addTrack(track, that.localStream);
        });
        //收到服务器码流,将音视频流写入播放器
        that.peerConnection.ontrack = (event) => {
            ......
        };
    },   
    async makeCall() {
        const that = this
        that.localStream = await navigator.mediaDevices.getUserMedia({
            video: true,
            audio: true,
        });
        const url = this.playUrl
        ......
        ......
    }
}

需要注意的是,navigator.mediaDevices.getUserMedia获取的是设备摄像头、录音的媒体流,所以设备首先要具备摄像、录音功能,并开启对应权限,否则 api 将调用失败。

三、音视频实时通讯

这种 p2p 场景的流播放,通常需要使用 websocket 建立服务器连接,然后同时播放本地、服务端的流。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
<div>Local Video</div>
    <video id="localVideo" autoplay="" playsinline="" data-origwidth="0" data-origheight="0" style="width: 1264px;"></video><div>Remote Video</div>
    <video id="remoteVideo" autoplay="" playsinline="" data-origwidth="0" data-origheight="0" style="width: 1264px;"></video>
    import $ from "./jquery-1.10.2.min.js";
    export default {
        data() {
            return {
                signalingServerUrl: "ws://127.0.0.1:8085",
                iceServersUrl: 'stun:stun.l.google.com:19302',
                localStream: null,
                peerConnection: null
            }
        },
         methods: {
            async startLocalStream(){
                try {
                    this.localStream = await navigator.mediaDevices.getUserMedia({
                        video: true,
                        audio: true,
                    });
                    document.getElementById("localVideo").srcObject = this.localStream;
                }catch (err) {
                    console.error("Error accessing media devices.", err);
                }
            },
            createPeerConnection() {
                const configuration = { iceServers: [{
                    urls: this.iceServersUrl
                }]};
                this.peerConnection = new RTCPeerConnection(configuration);
                this.localStream.getTracks().forEach((track) => {
                    this.peerConnection.addTrack(track, this.localStream);
                });
                this.peerConnection.onicecandidate = (event) => {
                    if (event.candidate) {
                        ws.send(
                          JSON.stringify({
                            type: "candidate",
                            candidate: event.candidate,
                          })
                        );
                     }
                };
                this.peerConnection.ontrack = (event) => {
                  const remoteVideo = document.getElementById("remoteVideo");
                  if (remoteVideo.srcObject !== event.streams[0]) {
                    remoteVideo.srcObject = event.streams[0];
                  }
                };
            },
            async makeCall() {
                this.createPeerConnection();
                const offer = await this.peerConnection.createOffer();
                await this.peerConnection.setLocalDescription(offer);
                ws.send(JSON.stringify(offer));
            }
         },
         mounted() {
            this.makeCall()
            const ws = new WebSocket(this.signalingServerUrl);
             ws.onopen = () => {
                console.log("Connected to the signaling server");
                this.startLocalStream();
            };
            ws.onmessage = async (message) => {
                const data = JSON.parse(message.data);
                if (data.type === "offer") {
                    if (!this.peerConnection) createPeerConnection();
                    await this.peerConnection.setRemoteDescription(
                        new RTCSessionDescription(data)
                    );
                    const answer = await this.peerConnection.createAnswer();
                    await this.peerConnection.setLocalDescription(answer);
                    ws.send(JSON.stringify(this.peerConnection.localDescription));
                } else if (data.type === "answer") {
                    if (!this.peerConnection) createPeerConnection();
                        await this.peerConnection.setRemoteDescription(
                            new RTCSessionDescription(data)
                        );
                    } else if (data.type === "candidate") {
                        if (this.peerConnection) {
                            try {
                              await this.peerConnection.addIceCandidate(
                                new RTCIceCandidate(data.candidate)
                              );
                            } catch (e) {
                              console.error("Error adding received ICE candidate", e);
                            }
                        }
                    }
                }
         }
    }

与播放webrtc协议流相比,p2p 以 WebSocket 替代 ajax 实现 sdp 的发送与接收,增加了本地流的播放功能,其他与播放协议流的代码一致。

总结

到此这篇关于如何基于uniapp开发android播放webrtc流的文章就介绍到这了,更多相关uniapp开发android播放webrtc流内容请搜索IT俱乐部以前的文章或继续浏览下面的相关文章希望大家以后多多支持IT俱乐部!

本文收集自网络,不代表IT俱乐部立场,转载请注明出处。https://www.2it.club/navsub/js/14652.html
上一篇
下一篇
联系我们

联系我们

在线咨询: QQ交谈

邮箱: 1120393934@qq.com

工作时间:周一至周五,9:00-17:30,节假日休息

关注微信
微信扫一扫关注我们

微信扫一扫关注我们

返回顶部