今回は、Pause/Resume、 Mute/Unmute、 Start Recording 、 Save as 機能を追加しました。
開発環境
前回と同じです。
- ルータの下にRaspberry PI(3 model B+) とPCが接続
- Raspberry PI には、USBのWebカメラとスピーカーが接続。
- PCのOSは、Ubuntus 20 LTS。 Raspberry PIのOSは、OS Lite(32-bit)。
- WebブラウザはFireFox 90.0
Web画面とプログラムの説明
今回のプログラムを実行すると以下の様になります。前回の画面に、Pause/Resume、 Mute/Unmute、 Start Recording 、 Save as ボタンを追加しています。
今回の追加分はオリジナルHPのソースコードに有った関数をHTMLでボタンと繋いだものです。各ボタンと関数の対応は以下の通り。
ボタン | 関数 |
---|---|
Pause/Resume | pause() |
Mute/Unmute | mute() |
Start Recording | start_stop_record() |
Save as | download() |
コードは以下の通り。前回からの追加分は203行以降です。
webrtc02.html
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>UV4L WebRTC</title>
<script type="text/javascript">
var ws = null;
var pc;
var audio_video_stream;
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
var iceCandidates = [];
var recorder = null;
function createPeerConnection() {
try {
pc = new RTCPeerConnection();
pc.onicecandidate = onIceCandidate;
pc.ontrack = onTrack;
pc.onremovestream = onRemoteStreamRemoved;
console.log("peer connection successfully created!");
} catch (e) {
console.error("createPeerConnection() failed");
}
}
function onIceCandidate(event) {
if (event.candidate && event.candidate.candidate) {
var candidate = {
sdpMLineIndex: event.candidate.sdpMLineIndex,
sdpMid: event.candidate.sdpMid,
candidate: event.candidate.candidate
};
var request = {
what: "addIceCandidate",
data: JSON.stringify(candidate)
};
ws.send(JSON.stringify(request));
} else {
console.log("End of candidates.");
}
}
function addIceCandidates() {
iceCandidates.forEach(function (candidate) {
pc.addIceCandidate(candidate,
function () {
console.log("IceCandidate added: " + JSON.stringify(candidate));
},
function (error) {
console.error("addIceCandidate error: " + error);
}
);
});
iceCandidates = [];
}
function onTrack(event) {
console.log("Remote track!");
var remoteVideoElement = document.getElementById('remote-video');
remoteVideoElement.srcObject = event.streams[0];
}
function onRemoteStreamRemoved(event) {
var remoteVideoElement = document.getElementById('remote-video');
remoteVideoElement.srcObject = null;
remoteVideoElement.src = ''; // TODO: remove
}
function start() {
if ("WebSocket" in window) {
document.getElementById("stop").disabled = false;
document.getElementById("start").disabled = true;
document.documentElement.style.cursor = 'wait';
var protocol = location.protocol === "https:" ? "wss:" : "ws:";
ws = new WebSocket(protocol + '//raspberrypi.local:8090/stream/webrtc');
ws.onopen = async function () {
iceCandidates = [];
remoteDesc = false;
audio_video_stream = await navigator.mediaDevices.getUserMedia({video: false, audio: true});
createPeerConnection();
audio_video_stream.getTracks().forEach(track => pc.addTrack(track, audio_video_stream));
var request = {
what: "call",
options: {
force_hw_vcodec: false,
vformat: "30",
trickle_ice: true
}
};
ws.send(JSON.stringify(request));
console.log("call(), request=" + JSON.stringify(request));
};
ws.onmessage = function (evt) {
var msg = JSON.parse(evt.data);
if (msg.what !== 'undefined') {
var what = msg.what;
var data = msg.data;
}
console.log("message =" + what);
switch (what) {
case "offer":
pc.setRemoteDescription(new RTCSessionDescription(JSON.parse(data)),
function onRemoteSdpSuccess() {
remoteDesc = true;
addIceCandidates();
console.log('onRemoteSdpSucces()');
pc.createAnswer(function (sessionDescription) {
pc.setLocalDescription(sessionDescription);
var request = {
what: "answer",
data: JSON.stringify(sessionDescription)
};
ws.send(JSON.stringify(request));
console.log(request);
}, function (error) {
alert("Failed to createAnswer: " + error);
}, mediaConstraints);
},
function onRemoteSdpError(event) {
alert('Failed to set remote description (unsupported codec on this browser?): ' + event);
stop();
}
);
break;
case "iceCandidate": // when trickle is enabled
if (!msg.data) {
console.log("Ice Gathering Complete");
break;
}
var elt = JSON.parse(msg.data);
let candidate = new RTCIceCandidate({sdpMLineIndex: elt.sdpMLineIndex, candidate: elt.candidate});
iceCandidates.push(candidate);
if (remoteDesc)
addIceCandidates();
document.documentElement.style.cursor = 'default';
break;
}
};
ws.onclose = function (evt) {
if (pc) {
pc.close();
pc = null;
}
document.getElementById("stop").disabled = true;
document.getElementById("start").disabled = false;
document.documentElement.style.cursor = 'default';
};
ws.onerror = function (evt) {
alert("An error has occurred!");
ws.close();
};
} else {
alert("Sorry, this browser does not support WebSockets.");
}
}
function stop() {
if (audio_video_stream) {
try {
if (audio_video_stream.getVideoTracks().length)
audio_video_stream.getVideoTracks()[0].stop();
if (audio_video_stream.getAudioTracks().length)
audio_video_stream.getAudioTracks()[0].stop();
audio_video_stream.stop(); // deprecated
} catch (e) {
for (var i = 0; i < audio_video_stream.getTracks().length; i++)
audio_video_stream.getTracks()[i].stop();
}
audio_video_stream = null;
}
document.getElementById('remote-video').srcObject = null;
document.getElementById('remote-video').src = ''; // TODO; remove
if (pc) {
pc.close();
pc = null;
}
if (ws) {
ws.close();
ws = null;
}
document.getElementById("stop").disabled = true;
document.getElementById("start").disabled = false;
document.documentElement.style.cursor = 'default';
}
function mute() {
var remoteVideo = document.getElementById("remote-video");
remoteVideo.muted = !remoteVideo.muted;
}
function pause() {
var remoteVideo = document.getElementById("remote-video");
if (remoteVideo.paused)
remoteVideo.play();
else
remoteVideo.pause();
}
function handleDataAvailable(event) {
//console.log(event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function stop_record() {
if (recorder) {
recorder.stop();
console.log("recording stopped");
document.getElementById('record').innerHTML = 'Start Recording'
recorder = null;
}
}
function startRecording(stream) {
recordedBlobs = [];
var options = {mimeType: 'video/webm;codecs=vp9'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
//options = {mimeType: 'video/webm;codecs=vp8'};
options = {mimeType: 'video/webm;codecs=vp8,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm;codecs=h264'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: ''};
}
}
}
}
try {
recorder = new MediaRecorder(stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
alert('Exception while creating MediaRecorder: ' + e + '. mimeType: ' + options.mimeType);
return;
}
console.log('Created MediaRecorder', recorder, 'with options', options);
recorder.ondataavailable = handleDataAvailable;
recorder.onwarning = function (e) {
console.log('Warning: ' + e);
};
recorder.start();
console.log('MediaRecorder started', recorder);
}
function start_stop_record() {
if (pc && !recorder) {
var streams = pc.getRemoteStreams();
if (streams.length) {
console.log("starting recording");
startRecording(streams[0]);
document.getElementById('record').innerHTML = 'Stop Recording';
}
} else {
stop_record();
}
}
function download() {
if (recordedBlobs !== undefined) {
var blob = new Blob(recordedBlobs, {type: 'video/webm'});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'video.webm';
document.body.appendChild(a);
a.click();
setTimeout(function () {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
}
</script>
<style>
video {
background: #eee none repeat scroll 0 0;
border: 1px solid #aaa;
}
</style>
</head>
<body>
<h1><span>WebRTC two-way Audio/Video Intercom</span></h1>
<video id="remote-video" autoplay="" width="640" height="480">
Your browser does not support the video tag.
</video><br>
<button id="start" style="background-color: green; color: white" onclick="start();">Call!</button>
<button disabled id="stop" style="background-color: red; color: white" onclick="stop();">Hang up</button>
<button type=button id="pause" onclick="pause();" title="pause or resume local player">Pause/Resume</button>
<button type=button id="mute" onclick="mute();" title="mute or unmute remote audio source">Mute/Unmute</button>
<button type=button id="record" onclick="start_stop_record();" title="start or stop recording audio/video">Start Recording</button>
<button type=button id="download" onclick="download();" title="save recorded audio/video">Save as</button>
</body>
</html>
- 203から206行: function mute() {
- Mute/Unmuteボタンが押されるとこの関数を実行。
- remoteVideo.muted = !remoteVideo.muted;の様に値を反転しているだけです。
- 208から214行: function pause() {
- Pause/Resumeボタンが押されとこの関数を実行。
- if (remoteVideo.paused)で状態を判断し、
- remoteVideo.play();(再生)。remoteVideo.pause();(停止)
- 268行: function start_stop_record() {
- Start Recordingボタンが押されとこの関数を実行。
- このボタンは録画の開始と終了を兼ねています。
- 録画開始時
- ボタン表示を’Stop Recording’。startRecording()を実行。
- 録画終了時
- ボタン表示を’Start Recording’。 stop_record() を実行。
- 281から296行: function download() {
- Save asボタンが押されるとこの関数を実行。
- 録画してデータのダウンロードを行う。
- 313行: Pause/Resumeボタンの追加
- 314行: Mute/Unmuteボタンの追加
- 315行: Start Recordingボタンの追加
- 316行: Save asボタンの追加
プラグラムの確認
- Raspberry PI側:
- 前回の ”音声を送る(3)” と同じ
- PC側:
- 今回のHTMLコードを、”webrtc02.html”としてPCに保存。
- PCのFireFoxを使って実行。
- 実行:
- FireFoxのURL欄にコードのPathを入力する
- (ファイルエクスプローラーで、”webrtc02.html”をダブルクリックする)
- 確認は以下で行えます。
- Call ボタンを押す
- メディアの使用許可が表示されるので許可を選択
- Stramingが始まるまで待つ
- Pause/Resumeボタンを押してStreamingが停止、再開することを確認
- Mute/Unmuteボタンを押して音声がオンオフすることを確認
- Start Recordingボタン押して録画を開始。ボタンが Stop Recording に変わる
- しばらくして、Stop Recordingを押して録画を終了。
- Save asボタンを押して録画したファイルをダウンロードする。
- FireFoxのURL欄にコードのPathを入力する
新しいHP画面を作って見ました
機能をそのままにHPの画面を変更しました。使用したプログラムは、intercom.html、intercom.js、intercom.css の3つ。これらをPCの同じフォルダに保存し、intercom.html をダブルクリックすると以下の様な画面が表示されます。
動作の説明
- Straming行 Streamingの管理
- Startボタン Streamingの開始
- Pauseボタン Streamingの停止と再開
- Muteボタン Muteのオンオフ
- Stopボタン Streamingの終了
- Take & Play行 写真とビデオの録画と保存
- Photoボタン 写真の撮影
- Videoボタン ビデオの録画
- Playボタン 録画したビデオの再生
- Download行 写真とビデオのダウンロード
- Photoボタン 写真のダウンロード
- Videoボタン ビデオのダウンロード
- その時点で使用出来る機能に対するボタンが有効になります
- 立ち上げた直後は、Streamingを開始するボタン Start のみ有効になります。
- 写真の撮影やビデオの再生等の処理に入って時に Pause ボタンが Back ボタンに変わります。
- Back ボタンを押すと、その処理から抜けて Streamingモードになります。
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<link rel='stylesheet' type='text/css' href='./intercom.css' >
<title>UV4L WebRTC</title>
</head>
<body>
<div class='b_frame'>
<div class='t_font'><u>UV4L WebRTC 1.0</u></div>
<div class='p_frame'>
<video id="remote-video" autoplay="" width="640" height="480"> </video>
<video id="play-video" controls style='display:none'>Your browser does not support the video tag.</video>
</div>
<div class="_menu">
<div class="input-group">
<label>Stream</label>
<button type='button' id='start' onclick='start();'>Start</button>
<button type='button' id="pause" onclick="pause();" title="pause or resume">Pause</button>
<button type='button' id="mute" onclick="mute();" title="mute or unmute ">Mute</button>
<button type='button' id='stop' onclick="stop();">Stop</button><br>
<label>Take & Play</label>
<button type='button' id='photo' onclick="take_photo();" title="take phato">Photo</button>
<button type='button' id="record" onclick="start_stop_record();" title="start or stop recording">Video</button>
<button type='button' id="play" onclick="play_video();" title="play video">Play</button><br>
<label>Download</label>
<button type='button' id='dn_photo' onclick="dn_photo();" title="save photo">Photo</button>
<button type='button' id='dn_video' onclick="download();" title="save video">Video</button>
<canvas style='display:none'></canvas>
</div>
</div>
</div>
<script src='intercom.js'></script>
</body>
</html>
intercom.js
var ws = null;
var pc;
var audio_video_stream;
var recorder = null;
var aa_streams = [];
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
var iceCandidates = [];
var botton_buff = ["pause", "mute", 'stop', 'photo', "record", "play", 'dn_photo', 'dn_video'];
var flg_view = 0;
var flg_record = 0;
var remoteVideo = document.getElementById("remote-video");
botton_buff.forEach(element => set_Btn(element, 0));
function createPeerConnection() {
try {
pc = new RTCPeerConnection();
pc.onicecandidate = onIceCandidate;
pc.ontrack = onTrack;
pc.onremovestream = onRemoteStreamRemoved;
console.log("peer connection successfully created!");
} catch (e) {
console.error("createPeerConnection() failed");
}
}
function onIceCandidate(event) {
if (event.candidate && event.candidate.candidate) {
var candidate = {
sdpMLineIndex: event.candidate.sdpMLineIndex,
sdpMid: event.candidate.sdpMid,
candidate: event.candidate.candidate
};
var request = {
what: "addIceCandidate",
data: JSON.stringify(candidate)
};
ws.send(JSON.stringify(request));
} else {
console.log("End of candidates.");
}
}
function addIceCandidates() {
iceCandidates.forEach(function (candidate) {
pc.addIceCandidate(candidate,
function () {
console.log("IceCandidate added: " + JSON.stringify(candidate));
},
function (error) {
console.error("addIceCandidate error: " + error);
}
);
});
iceCandidates = [];
}
function onTrack(event) {
console.log("Remote track!");
var remoteVideoElement = document.getElementById('remote-video');
remoteVideoElement.srcObject = event.streams[0];
aa_streams[0] = event.streams[0];
botton_buff = ["pause", "mute", 'stop', 'photo', "record"];
botton_buff.forEach(element => set_Btn(element, 1));
}
function onRemoteStreamRemoved(event) {
var remoteVideoElement = document.getElementById('remote-video');
remoteVideoElement.srcObject = null;
remoteVideoElement.src = ''; // TODO: remove
}
function start() {
set_Btn('start', 0)
if ("WebSocket" in window) {
document.getElementById("stop").disabled = false;
document.getElementById("start").disabled = true;
document.documentElement.style.cursor = 'wait';
var protocol = location.protocol === "https:" ? "wss:" : "ws:";
ws = new WebSocket(protocol + '//raspberrypi.local:8090/stream/webrtc');
ws.onopen = async function () {
iceCandidates = [];
remoteDesc = false;
audio_video_stream = await navigator.mediaDevices.getUserMedia({video: false, audio: true});
createPeerConnection();
audio_video_stream.getTracks().forEach(track => pc.addTrack(track, audio_video_stream));
var request = {
what: "call",
options: {
force_hw_vcodec: false,
vformat: "30",
trickle_ice: true
}
};
ws.send(JSON.stringify(request));
console.log("call(), request=" + JSON.stringify(request));
};
ws.onmessage = function (evt) {
var msg = JSON.parse(evt.data);
if (msg.what !== 'undefined') {
var what = msg.what;
var data = msg.data;
}
console.log("message =" + what);
switch (what) {
case "offer":
pc.setRemoteDescription(new RTCSessionDescription(JSON.parse(data)),
function onRemoteSdpSuccess() {
remoteDesc = true;
addIceCandidates();
console.log('onRemoteSdpSucces()');
pc.createAnswer(function (sessionDescription) {
pc.setLocalDescription(sessionDescription);
var request = {
what: "answer",
data: JSON.stringify(sessionDescription)
};
ws.send(JSON.stringify(request));
console.log(request);
}, function (error) {
alert("Failed to createAnswer: " + error);
}, mediaConstraints);
},
function onRemoteSdpError(event) {
alert('Failed to set remote description (unsupported codec on this browser?): ' + event);
stop();
}
);
break;
case "iceCandidate": // when trickle is enabled
if (!msg.data) {
console.log("Ice Gathering Complete");
break;
}
var elt = JSON.parse(msg.data);
let candidate = new RTCIceCandidate({sdpMLineIndex: elt.sdpMLineIndex, candidate: elt.candidate});
iceCandidates.push(candidate);
if (remoteDesc)
addIceCandidates();
document.documentElement.style.cursor = 'default';
break;
}
};
ws.onclose = function (evt) {
if (pc) {
pc.close();
pc = null;
}
document.getElementById("stop").disabled = true;
document.getElementById("start").disabled = false;
document.documentElement.style.cursor = 'default';
};
ws.onerror = function (evt) {
alert("An error has occurred!");
ws.close();
};
} else {
alert("Sorry, this browser does not support WebSockets.");
}
}
function stop() {
botton_buff = ["pause", "mute", 'stop', 'photo', "record", "play", 'dn_photo', 'dn_video'];
botton_buff.forEach(element => set_Btn(element, 0));
set_Btn('start', 1);
if (audio_video_stream) {
try {
if (audio_video_stream.getVideoTracks().length)
audio_video_stream.getVideoTracks()[0].stop();
if (audio_video_stream.getAudioTracks().length)
audio_video_stream.getAudioTracks()[0].stop();
audio_video_stream.stop(); // deprecated
} catch (e) {
for (var i = 0; i < audio_video_stream.getTracks().length; i++)
audio_video_stream.getTracks()[i].stop();
}
audio_video_stream = null;
}
document.getElementById('remote-video').srcObject = null;
document.getElementById('remote-video').src = ''; // TODO; remove
if (pc) {
pc.close();
pc = null;
}
if (ws) {
ws.close();
ws = null;
}
document.getElementById("stop").disabled = true;
document.getElementById("start").disabled = false;
document.documentElement.style.cursor = 'default';
}
function pause() {
botton_buff = ["play", 'dn_photo', 'dn_video'];
botton_buff.forEach(element => set_Btn(element, 0));
botton_buff = ["mute", 'stop', 'photo', "record"];
botton_buff.forEach(element => set_Btn(element, remoteVideo.paused));
if (remoteVideo.paused){
if(flg_view == 1){
remoteVideo.style.display = "";
remoteVideo.muted = "false";
document.getElementById('play-video').style.display = "none";
flg_view = 0;
remoteVideo.muted = "false";
}
document.getElementById("pause").innerHTML = 'Pause';
remoteVideo.play();
}
else{
remoteVideo.pause();
document.getElementById('pause').innerHTML = 'Back';
}
set_Btn("pause",1);
}
function mute() {
remoteVideo.muted = !remoteVideo.muted;
if (remoteVideo.muted){
document.getElementById("mute").innerHTML = 'No mute';
}
else{
document.getElementById('mute').innerHTML = 'Mute';
}
}
function download() {
if (recordedBlobs !== undefined) {
var blob = new Blob(recordedBlobs, {type: 'video/webm'});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'video.webm';
document.body.appendChild(a);
a.click();
setTimeout(function () {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
}
function start_stop_record() {
botton_buff = ["start", "pause", "mute", 'stop', 'photo', "play", 'dn_video', 'dn_photo'];
botton_buff.forEach(element => set_Btn(element, 0));
set_Btn("record",1);
document.getElementById('pause').innerHTML = 'Back';
if (pc && !flg_record) {
if (aa_streams.length) {
console.log("starting recording");
startRecording(aa_streams[0]);
document.getElementById('record').innerHTML = 'Stop';
flg_record = 1;
}
} else {
console.log("Enter Stop");
botton_buff = ["pause", "record", "play", 'dn_video'];
botton_buff.forEach(element => set_Btn(element, 1));
stop_record();
document.getElementById('record').innerHTML = 'Video';
remoteVideo.pause();
set_Btn("record",0);
flg_record = 0;
}
}
function startRecording(stream) {
recordedBlobs = [];
var options = {mimeType: 'video/webm;codecs=vp9,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
//options = {mimeType: 'video/webm;codecs=vp8'};
options = {mimeType: 'video/webm;codecs=vp8,opus'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm;codecs=h264'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: ''};
}
}
}
}
try {
recorder = new MediaRecorder(stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
alert('Exception while creating MediaRecorder: ' + e + '. mimeType: ' + options.mimeType);
return;
}
console.log('Created MediaRecorder', recorder, 'with options', options);
recorder.ondataavailable = handleDataAvailable;
recorder.onwarning = function (e) {
console.log('Warning: ' + e);
};
recorder.start();
console.log('MediaRecorder started', recorder);
}
function stop_record() {
if (recorder) {
recorder.stop();
console.log("recording stopped");
document.getElementById('record').innerHTML = 'Video';
recorder = null;
}
}
function handleDataAvailable(event) {
//console.log(event);
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function play_video() {
botton_buff = ["start", "mute", 'stop', 'photo', "record", 'dn_photo'];
botton_buff.forEach(element => set_Btn(element, 0));
botton_buff = ["pause", "play", 'dn_video'];
botton_buff.forEach(element => set_Btn(element, 1));
document.getElementById('pause').innerHTML = 'Back';
console.log('Play video');
recorder = null;
flg_view = 1;
remoteVideo.style.display = "none";
remoteVideo.pause();
remoteVideo.muted = "true";
document.getElementById('play-video').style.display = "";
var superBuffer = new Blob(recordedBlobs, {type: 'video/webm'});
var recordedVideoElement = document.getElementById('play-video');
recordedVideoElement.src = URL.createObjectURL(superBuffer);
}
function dn_photo(){
var canvas = document.querySelector('canvas');
var url = canvas.toDataURL('image/jpeg', 1.0);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'pic.jpg';
document.body.appendChild(a);
a.click();
setTimeout(function () {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
function take_photo() {
botton_buff = ['start', "mute", 'stop', "record", "play", 'dn_video'];
botton_buff.forEach(element => set_Btn(element, 0));
botton_buff = ['pause', "photo", 'dn_photo'];
botton_buff.forEach(element => set_Btn(element, 1));
document.getElementById('pause').innerHTML = 'Back';
remoteVideo.pause();
var canvas = document.querySelector('canvas');
canvas.width = remoteVideo.videoWidth;
canvas.height = remoteVideo.videoHeight;
canvas.getContext('2d').drawImage(remoteVideo, 0, 0, canvas.width, canvas.height);
}
function set_Btn(_id, flg) {
var flg_able = true;
var flg_back = "gray";
if(flg){
flg_able = false;
flg_back = "#228b22";
}
document.getElementById(_id).disabled = flg_able;
document.getElementById(_id).style.backgroundColor = flg_back;
}
intercom.css
@charset "UTF-8";
.t_font {
font-size: 32px;
font-weight: bold;
font-style: italic;
text-align: center;
color: #0ff;
}
.b_frame {
width: 670px;
background: #363636;
border-radius: 50px;
border-style: ridge;
border-width: 5px 15px;
border-color: sienna;
margin: 0 auto
}
.p_frame {
width: 644px;
height: 480px;
background: #363636;
border-radius: 10px;
border-style: ridge;
border-width: 5px;
border-color: sienna;
margin: 10px auto;
text-align: center;
}
button {
display: inline;
margin: 4px 10px;
line-height: 15px;
width: 80px;
font-size: 15px;
font-weight: bold;
font-style: italic;
cursor: pointer;
color: #fff;
background: #228b22;
border-radius: 20px;
text-align: center;
padding-left: 4px;
}
.input-group>label {
display: inline-block;
font-size: 16px;
font-weight: bold;
font-style: italic;
padding-left: 15px;
min-width: 20%;
color: #0ff;
margin-left: 50px;
}
._menu {
display: block;
flex-wrap: nowrap;
min-width: 450px;
background: #363636;
border-radius: 4px;
margin: 15px auto 30px;
}
全てのファイルをここに保存しました。
次は
この状態で玄関先に置けはインターフォンとして使用できそうです。これに機能を追加したい(例えば暗くなったら電灯を点ける、不在時の録画機能等)と思ったのですが、それにはそれ用のサーバが必要になります。次回はその辺を検討したいと思います。