概述
main.js
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
var instantMeter = document.querySelector('#instant meter');
var slowMeter = document.querySelector('#slow meter');
var clipMeter = document.querySelector('#clip meter');
var instantValueDisplay = document.querySelector('#instant .value');
var slowValueDisplay = document.querySelector('#slow .value');
var clipValueDisplay = document.querySelector('#clip .value');
var videoElement = document.querySelector('video');
var audioSelect = document.querySelector('select#audioSource');
var videoSelect = document.querySelector('select#videoSource');
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.audioContext = new AudioContext();
} catch (e) {
alert('Web Audio API not supported.');
}
function gotSources(sourceInfos) {
for (var i = 0; i !== sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
var option = document.createElement('option');
option.value = sourceInfo.id;
if (sourceInfo.kind === 'audio') {
option.text = sourceInfo.label ||
'microphone ' + (audioSelect.length + 1);
audioSelect.appendChild(option);
} else if (sourceInfo.kind === 'video') {
option.text = sourceInfo.label || 'camera ' + (videoSelect.length + 1);
videoSelect.appendChild(option);
} else {
console.log('Some other kind of source: ', sourceInfo);
}
}
}
if (typeof MediaStreamTrack === 'undefined') {
alert('This browser does not support MediaStreamTrack.nnTry Chrome.');
} else {
MediaStreamTrack.getSources(gotSources);
}
function successCallback(stream) {
var videoTracks = stream.getVideoTracks();
var audioTracks = stream.getAudioTracks();
if (audioTracks.length === 1 && videoTracks.length === 0) {
console.log('Got stream with constraints:', constraints);
console.log('Using audio device: ' + audioTracks[0].label);
stream.onended = function() {
console.log('Stream ended');
};
}
window.stream = stream; // make stream available to console
videoElement.src = window.URL.createObjectURL(stream);
var soundMeter = window.soundMeter = new SoundMeter(window.audioContext);
soundMeter.connectToSource(stream);
setInterval(function() {
instantMeter.value = instantValueDisplay.innerText =
soundMeter.instant.toFixed(2);
slowMeter.value = slowValueDisplay.innerText =
soundMeter.slow.toFixed(2);
clipMeter.value = clipValueDisplay.innerText =
soundMeter.clip;
}, 200);
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
function start() {
if (window.stream) {
videoElement.src = null;
window.stream.stop();
}
var audioSource = audioSelect.value;
var videoSource = videoSelect.value;
var constraints = {
audio: {
optional: [{
sourceId: audioSource
}]
},
video: {
optional: [{
sourceId: videoSource
}]
}
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
}
audioSelect.onchange = start;
videoSelect.onchange = start;
start();
soundmeter.js--->音频显示
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
// Meter class that generates a number correlated to audio volume.
// The meter class itself displays nothing, but it makes the
// instantaneous and time-decaying volumes available for inspection.
// It also reports on the fraction of samples that were at or near
// the top of the measurement range.
function SoundMeter(context) {
this.context = context;
this.instant = 0.0;
this.slow = 0.0;
this.clip = 0.0;
this.script = context.createScriptProcessor(2048, 1, 1);
var that = this;
this.script.onaudioprocess = function(event) {
var input = event.inputBuffer.getChannelData(0);
var i;
var sum = 0.0;
var clipcount = 0;
for (i = 0; i
sum += input[i] * input[i];
if (Math.abs(input[i]) > 0.99) {
clipcount += 1;
}
}
that.instant = Math.sqrt(sum / input.length);
that.slow = 0.95 * that.slow + 0.05 * that.instant;
that.clip = clipcount / input.length;
};
}
SoundMeter.prototype.connectToSource = function(stream) {
console.log('SoundMeter connecting');
this.mic = this.context.createMediaStreamSource(stream);
this.mic.connect(this.script);
// necessary to make sample run, but should not be.
this.script.connect(this.context.destination);
};
SoundMeter.prototype.stop = function() {
this.mic.disconnect();
this.script.disconnect();
};
---------------------------目前仍未完成本篇博客,后续会继续补充RTCPeerConnection方面的知识--------------------------------
最后
以上就是暴躁帆布鞋为你收集整理的html 调用 webrtc,HTML WebRTC只摄像头和麦克风的调用的全部内容,希望文章能够帮你解决html 调用 webrtc,HTML WebRTC只摄像头和麦克风的调用所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
发表评论 取消回复