WebRTC(Stream capture:)

今回は、Stream capture:

(1)Stream from a video element to a video element

一方の画面で動画を再生し、それをキャプチャーしてもう一方の画面に表示するサンプルです。キーになる関数は、 .captureStream() または .mozCaptureStream() です。オリジナルのコードを若干書き換えています。また再生する動画ファイルは、”chrome.webm”と”chrome.mp4″は、ここに有ります。ダウンロードしてコードと同じディレクトリーに保存して下さい。

sample01.html

<!DOCTYPE html>
<html>
    <head>
        <title>captureStream(): video to video</title>
        <style>
            video
            {
                margin: 0 10px 0 0;
                width: calc(50% - 7px);
            }

            video:last-of-type
            {
                margin-right: 0;
            }

            @media screen and (max-width: 400px)
            {
                video
                {
                    margin: 0 5px 20px 0;
                    width: calc(50% - 5px);
                }
            }
        </style>
    </head>
    <body>
        <h1><span>captureStream(): video to video</span> </h1>
        <video id="leftVideo" playsinline controls loop muted>
            <source src="./chrome.webm" type="video/webm"/>
            <source src="./chrome.mp4" type="video/mp4"/>
            <p>This browser does not support the video element.</p>
        </video>
        <video id="rightVideo" playsinline autoplay muted></video>
        <p>Press play on the left video to start the demo.</p>

        <script>
            'use strict';
            const leftVideo = document.getElementById('leftVideo');
            const rightVideo = document.getElementById('rightVideo');
            leftVideo.addEventListener('canplay', () =>
            {
                let stream;
                const fps = 0;
                if (leftVideo.captureStream)
                {
                    stream = leftVideo.captureStream(fps);
                }
                else if (leftVideo.mozCaptureStream)
                {
                    stream = leftVideo.mozCaptureStream(fps);
                }
                else
                {
                    console.error('Stream capture is not supported');
                    stream = null;
                }
                rightVideo.srcObject = stream;
            });
        </script>
    </body>
</html>

コード自体は簡単です。画面をクリックすると41行 leftVideo.addEventListener(‘canplay’, () => が実行されます。

  • 45から52行
    • ここで、画面のキャプチャーを行います。ブラウザで使う関数が違う様です。
    • 51行 .mozCaptureStream()が FireFoxの場合。
    • 47行 .captureStream()が それ以外(多分)
  • 58行 rightVideo.srcObject = stream;
    • 取得したStreamをもう一方の画面に入力して動画を再生する。
  • FireFox
    • 画面のキャプチャーは出来るのですが音が出ません。
  • Chrome
    • 音は再生されるのですが画面のキャプチャーでエラーが出ます。

上手く行かないものです。

(2)Stream from a video element to a peer connection

このサンプルはキャプチャーしたデータをpeer to peerで送信するプログラムです。下記は簡単にしたHTMLコード

samp02.html

<!DOCTYPE html>
<html>
    <head>
        <title>Video to peer connection</title>
        <style>
            video
            {
                margin: 0 10px 0 0;
                width: calc(50% - 7px);
            }

            video:last-of-type
            {
                margin-right: 0;
            }

            @media screen and (max-width: 400px)
            {
                video
                {
                    margin: 0 5px 20px 0;
                    width: calc(50% - 5px);
                }
            }
        </style>
    </head>
    <body>
        <video id="leftVideo" playsinline controls muted>
            <source src="./chrome.webm" type="video/webm"/>
            <source src="./chrome.mp4" type="video/mp4"/>
            <p>This browser does not support the video element.</p>
        </video>
        <video id="rightVideo" playsinline autoplay controls></video>
        <script src="./main.js" async></script>
    </body>
</html>

今回はJavaScriptを別ファイルにしています。HTMLファイルと同じフォルダーに保存して下さい。(file name “main.js”)

main.js

/*
*  Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
*  Use of this source code is governed by a BSD-style license
*  that can be found in the LICENSE file in the root of the source
*  tree.
*/

'use strict';

const leftVideo = document.getElementById('leftVideo');
const rightVideo = document.getElementById('rightVideo');

let stream;

let pc1;
let pc2;
const offerOptions =
{
    offerToReceiveAudio: 1,
    offerToReceiveVideo: 1
};

let startTime;

function maybeCreateStream()
{
    if (stream) return;
    if (leftVideo.captureStream)
    {
        stream = leftVideo.captureStream();
        console.log('Captured stream from leftVideo with captureStream',stream);
        call();
    }
    else if (leftVideo.mozCaptureStream)
    {
        stream = leftVideo.mozCaptureStream();
        console.log('Captured stream from leftVideo with mozCaptureStream()',stream);
        call();
    }
    else console.log('captureStream() not supported');
}

// Video tag capture must be set up after video tracks are enumerated.
leftVideo.oncanplay = maybeCreateStream;
if (leftVideo.readyState >= 3)
{ // HAVE_FUTURE_DATA
  // Video is already ready to play, call maybeCreateStream in case oncanplay
  // fired before we registered the event handler.
    maybeCreateStream();
}

//leftVideo.play();

rightVideo.onloadedmetadata = () =>
{
    console.log(`Remote video videoWidth: ${rightVideo.videoWidth}px,  videoHeight: ${rightVideo.videoHeight}px`);
};

rightVideo.onresize = () =>
{
    console.log(`Remote video size changed to ${rightVideo.videoWidth}x${rightVideo.videoHeight}`);
  // We'll use the first onresize callback as an indication that
  // video has started playing out.
    if (startTime)
    {
        const elapsedTime = window.performance.now() - startTime;
        console.log('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
        startTime = null;
    }
};

function call()
{
    console.log('Starting call');

    startTime = window.performance.now();
    const videoTracks = stream.getVideoTracks();
    const audioTracks = stream.getAudioTracks();

    if (videoTracks.length > 0)
        console.log(`Using video device: ${videoTracks[0].label}`);
    if (audioTracks.length > 0)
        console.log(`Using audio device: ${audioTracks[0].label}`);

    const servers = null;
    pc1 = new RTCPeerConnection(servers);
    console.log('Created local peer connection object pc1');
    pc1.onicecandidate = e => onIceCandidate(pc1, e);

    pc2 = new RTCPeerConnection(servers);
    console.log('Created remote peer connection object pc2');
    pc2.onicecandidate = e => onIceCandidate(pc2, e);

    pc1.oniceconnectionstatechange = e => onIceStateChange(pc1, e);
    pc2.oniceconnectionstatechange = e => onIceStateChange(pc2, e);
    pc2.ontrack = gotRemoteStream;

    stream.getTracks().forEach(track => pc1.addTrack(track, stream));
    console.log('Added local stream to pc1');

    console.log('pc1 createOffer start');
    pc1.createOffer(onCreateOfferSuccess, onCreateSessionDescriptionError, offerOptions);
}

function onCreateSessionDescriptionError(error)
{
    console.log(`Failed to create session description: ${error.toString()}`);
}

function onCreateOfferSuccess(desc)
{
    console.log(`Offer from pc1 ${desc.sdp}`);
    console.log('pc1 setLocalDescription start');
    pc1.setLocalDescription(desc, () => onSetLocalSuccess(pc1), onSetSessionDescriptionError);
    console.log('pc2 setRemoteDescription start');
    pc2.setRemoteDescription(desc, () => onSetRemoteSuccess(pc2), onSetSessionDescriptionError);
    console.log('pc2 createAnswer start');
  // Since the 'remote' side has no media stream we need
  // to pass in the right constraints in order for it to
  // accept the incoming offer of audio and video.
    pc2.createAnswer(onCreateAnswerSuccess, onCreateSessionDescriptionError);
}

function onSetLocalSuccess(pc)
{
    console.log(`${getName(pc)} setLocalDescription complete`);
}

function onSetRemoteSuccess(pc)
{
    console.log(`${getName(pc)} setRemoteDescription complete`);
}

function onSetSessionDescriptionError(error)
{
    console.log(`Failed to set session description: ${error.toString()}`);
}

function gotRemoteStream(event)
{
    if (rightVideo.srcObject !== event.streams[0])
    {
        rightVideo.srcObject = event.streams[0];
        console.log('pc2 received remote stream', event);
    }
}

function onCreateAnswerSuccess(desc)
{
    console.log(`Answer from pc2: ${desc.sdp}`);
    console.log('pc2 setLocalDescription start');
    pc2.setLocalDescription(desc, () => onSetLocalSuccess(pc2), onSetSessionDescriptionError);
    console.log('pc1 setRemoteDescription start');
    pc1.setRemoteDescription(desc, () => onSetRemoteSuccess(pc1), onSetSessionDescriptionError);
}

function onIceCandidate(pc, event)
{
    getOtherPc(pc).addIceCandidate(event.candidate)
      .then(
          () => onAddIceCandidateSuccess(pc),
          err => onAddIceCandidateError(pc, err)
      );
    console.log(`${getName(pc)} ICE candidate: ${event.candidate ? event.candidate.candidate : '(null)'}`);
}

function onAddIceCandidateSuccess(pc)
{
    console.log(`${getName(pc)} addIceCandidate success`);
}

function onAddIceCandidateError(pc, error)
{
    console.log(`${getName(pc)} failed to add ICE Candidate: ${error.toString()}`);
}

function onIceStateChange(pc, event)
{
    if (pc)
    {
        console.log(`${getName(pc)} ICE state: ${pc.iceConnectionState}`);
        console.log('ICE state change event: ', event);
    }
}

function getName(pc)
{
    return (pc === pc1) ? 'pc1' : 'pc2';
}

function getOtherPc(pc)
{
    return (pc === pc1) ? pc2 : pc1;
}

動画ファイル、”chrome.webm”と”chrome.mp4″は同じフォルダーに保存します。

HTMLファイルを実行して左の画面の再生を始めるとJavaScreptが起動し右の画面にそのコピーが表示されます。プログラムの流れは下記の通り。

前回と違い今回はデータをPeerConnectionを用いて転送しています。ただ転送両者間で通信用のサーバを使用していません。通信関係のイベントフラグが立つと対象オブジェクトに値を代入して通信しているようです。

今回、FireFoxで音が出ました。Chromeは相変わらず動きませんでした。