191 lines
7.4 KiB
HTML
191 lines
7.4 KiB
HTML
<!doctype html>
|
|
<html lang="en-us">
|
|
<head>
|
|
<title>ggwave : javascript example</title>
|
|
</head>
|
|
<body>
|
|
<div id="main-container">
|
|
Minimal <b>ggwave</b> example using Javascript bindings
|
|
|
|
<br><br>
|
|
|
|
<div>Tx Data:</div> <textarea name="textarea" id="txData" style="width:300px;height:100px;">Hello javascript</textarea><br>
|
|
|
|
<button onclick="onSend();">Send</button>
|
|
|
|
<br><br>
|
|
|
|
<div>Rx data:</div> <textarea name="textarea" id="rxData" style="width:300px;height:100px;" disabled></textarea><br>
|
|
|
|
<button id="captureStart">Start capturing</button>
|
|
<button id="captureStop" hidden>Stop capturing</button>
|
|
|
|
<br><br>
|
|
|
|
<div class="cell-version">
|
|
<span>
|
|
|
|
|
Build time: <span class="nav-link">@GIT_DATE@</span> |
|
|
Commit hash: <a class="nav-link" href="https://github.com/ggerganov/ggwave/commit/@GIT_SHA1@">@GIT_SHA1@</a> |
|
|
Commit subject: <span class="nav-link">@GIT_COMMIT_SUBJECT@</span> |
|
|
<a class="nav-link" href="https://github.com/ggerganov/ggwave/tree/master/examples/ggwave-js">Source Code</a> |
|
|
</span>
|
|
</div>
|
|
</div>
|
|
|
|
<script type="text/javascript" src="ggwave.js"></script>
|
|
<script type='text/javascript'>
|
|
window.AudioContext = window.AudioContext || window.webkitAudioContext;
|
|
window.OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext;
|
|
|
|
var context = null;
|
|
var recorder = null;
|
|
|
|
// the ggwave module instance
|
|
var ggwave = null;
|
|
var parameters = null;
|
|
var instance = null;
|
|
|
|
// instantiate the ggwave instance
|
|
// ggwave_factory comes from the ggwave.js module
|
|
ggwave_factory().then(function(obj) {
|
|
ggwave = obj;
|
|
});
|
|
|
|
var txData = document.getElementById("txData");
|
|
var rxData = document.getElementById("rxData");
|
|
var captureStart = document.getElementById("captureStart");
|
|
var captureStop = document.getElementById("captureStop");
|
|
|
|
// helper function
|
|
function convertTypedArray(src, type) {
|
|
var buffer = new ArrayBuffer(src.byteLength);
|
|
var baseView = new src.constructor(buffer).set(src);
|
|
return new type(buffer);
|
|
}
|
|
|
|
// initialize audio context and ggwave
|
|
function init() {
|
|
if (!context) {
|
|
context = new AudioContext({sampleRate: 48000});
|
|
|
|
parameters = ggwave.getDefaultParameters();
|
|
parameters.sampleRateInp = context.sampleRate;
|
|
parameters.sampleRateOut = context.sampleRate;
|
|
instance = ggwave.init(parameters);
|
|
}
|
|
}
|
|
|
|
//
|
|
// Tx
|
|
//
|
|
|
|
function onSend() {
|
|
init();
|
|
|
|
// pause audio capture during transmission
|
|
captureStop.click();
|
|
|
|
// generate audio waveform
|
|
var waveform = ggwave.encode(instance, txData.value, ggwave.ProtocolId.GGWAVE_PROTOCOL_AUDIBLE_FAST, 10)
|
|
|
|
// play audio
|
|
var buf = convertTypedArray(waveform, Float32Array);
|
|
var buffer = context.createBuffer(1, buf.length, context.sampleRate);
|
|
buffer.getChannelData(0).set(buf);
|
|
var source = context.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(context.destination);
|
|
source.start(0);
|
|
}
|
|
|
|
//
|
|
// Rx
|
|
//
|
|
|
|
captureStart.addEventListener("click", function () {
|
|
init();
|
|
|
|
let constraints = {
|
|
audio: {
|
|
// not sure if these are necessary to have
|
|
echoCancellation: false,
|
|
autoGainControl: false,
|
|
noiseSuppression: false
|
|
}
|
|
};
|
|
|
|
navigator.mediaDevices.getUserMedia(constraints).then(function (e) {
|
|
mediaStream = context.createMediaStreamSource(e);
|
|
|
|
var bufferSize = 1024;
|
|
var numberOfInputChannels = 1;
|
|
var numberOfOutputChannels = 1;
|
|
|
|
if (context.createScriptProcessor) {
|
|
recorder = context.createScriptProcessor(
|
|
bufferSize,
|
|
numberOfInputChannels,
|
|
numberOfOutputChannels);
|
|
} else {
|
|
recorder = context.createJavaScriptNode(
|
|
bufferSize,
|
|
numberOfInputChannels,
|
|
numberOfOutputChannels);
|
|
}
|
|
|
|
recorder.onaudioprocess = function (e) {
|
|
var source = e.inputBuffer;
|
|
var res = ggwave.decode(instance, convertTypedArray(new Float32Array(source.getChannelData(0)), Int8Array));
|
|
|
|
if (res && res.length > 0) {
|
|
res = new TextDecoder("utf-8").decode(res);
|
|
rxData.value = res;
|
|
}
|
|
|
|
// obsolete javascript resampling
|
|
// since ggwave v0.2.0 the resampling is built-in ggwave
|
|
//var offlineCtx = new OfflineAudioContext(source.numberOfChannels, 48000*source.duration, 48000);
|
|
//var offlineSource = offlineCtx.createBufferSource();
|
|
|
|
//offlineSource.buffer = source;
|
|
//offlineSource.connect(offlineCtx.destination);
|
|
//offlineSource.start();
|
|
//offlineCtx.startRendering();
|
|
//offlineCtx.oncomplete = function(e) {
|
|
// var resampled = e.renderedBuffer.getChannelData(0);
|
|
// var res = ggwave.decode(instance, convertTypedArray(new Float32Array(resampled), Int8Array));
|
|
// if (res) {
|
|
// rxData.value = res;
|
|
// }
|
|
//};
|
|
}
|
|
|
|
mediaStream.connect(recorder);
|
|
recorder.connect(context.destination);
|
|
}).catch(function (e) {
|
|
console.error(e);
|
|
});
|
|
|
|
rxData.value = 'Listening ...';
|
|
captureStart.hidden = true;
|
|
captureStop.hidden = false;
|
|
});
|
|
|
|
captureStop.addEventListener("click", function () {
|
|
if (recorder) {
|
|
recorder.disconnect(context.destination);
|
|
mediaStream.disconnect(recorder);
|
|
recorder = null;
|
|
}
|
|
|
|
rxData.value = 'Audio capture is paused! Press the "Start capturing" button to analyze audio from the microphone';
|
|
captureStart.hidden = false;
|
|
captureStop.hidden = true;
|
|
});
|
|
|
|
captureStop.click();
|
|
</script>
|
|
</body>
|
|
</html>
|