Skip to content

Instantly share code, notes, and snippets.

@mootari
Last active November 9, 2016 20:33
Show Gist options
  • Save mootari/ee910a072703e03bdefdbdb1f2fcd4f4 to your computer and use it in GitHub Desktop.
Save mootari/ee910a072703e03bdefdbdb1f2fcd4f4 to your computer and use it in GitHub Desktop.
text to music
license: mit
border: no
scrolling: yes
height: 500
<!DOCTYPE html>
<html>
<head>
<style>
body { font-family: sans-serif; font-size: 14px; padding-right: 300px;}
textarea {height: 20em; }
input[type=submit] {display: inline-block; margin: 2em;}
textarea,
#now-playing {
box-sizing: border-box;
padding: .5em;
display: block;
width: 100%;
max-width: 60rem;
}
#now-playing {
margin-top: 1em;
overflow: hidden;
}
#now-playing:before {
content: "Scheduled: "attr(data-queued);
display: block;
}
#now-playing span {
display: block;
float:left;
clear:left;
font-size: 1em;
line-height: .9em;
margin-top: .5em;
}
#now-playing span:before {
content: attr(data-id);
display: inline-block;
border-radius: 1em;
width: 2em;
height: 2em;
line-height: 2em;
background: #000;
color: white;
text-align: center;
margin-right: .7em;
font-size: .7em;
vertical-align: middle;
}
</style>
</head>
<body>
<div id="app">
<!--<textarea id="text">A distinct</textarea>-->
<textarea id="text">A distinct use of the term sound from its use in physics is that in physiology and psychology, where the term refers to the subject of perception by the brain. The field of psychoacoustics is dedicated to such studies. Historically the word &quot;sound&quot; referred exclusively to an effect in the mind. Webster's 1947 dictionary defined sound as: &quot;that which is heard; the effect which is produced by the vibration of a body affecting the ear.&quot;[10] This meant (at least in 1947) the correct response to the question: &quot;if a tree falls in the forest with no one to hear it fall, does it make a sound?&quot; was &quot;no&quot;. However, owing to contemporary usage, definitions of sound as a physical effect are prevalent in most dictionaries. Consequently, the answer to the same question (see above) is now &quot;yes, a tree falling in the forest with no one to hear it fall does make a sound&quot;.</textarea>
<div>
<input id="play" type="submit" value="Play">
<input id="stop" type="submit" value="Stop">
</div>
<div>Now playing: <div id="now-playing"></div></div>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.6.1/dat.gui.min.js"></script>
<script>(function() {
'use strict';
// function playTone(f, start) {
// var tNow = audio.currentTime + start;
// var tAttack = tNow + .01;
// var tDecay = tAttack + .001;
//
// var gain = audio.createGain();
// gain.gain.setValueAtTime(0, tNow);
// gain.gain.linearRampToValueAtTime(1, tAttack);
// gain.gain.linearRampToValueAtTime(0, tDecay);
// gain.connect(master);
//
// var osc = audio.createOscillator();
// osc.type = 'sine';
// osc.frequency.value = f;
// osc.connect(gain);
// osc.start(tNow);
// osc.stop(tDecay);
// }
function playSeries(frequencies, start) {
var attack = _O_.audio.attack;
var decay = _O_.audio.decay;
var blend = _O_.audio.blend;
var length = _O_.audio.noteLength;
var tNow = Math.max(0, start);
var tAttack = tNow + attack;
var tPlayed = tNow + attack + frequencies.length * length - decay;
var tDecay = tNow + attack + frequencies.length * length;
var gain = _G_.audio.createGain();
gain.gain.setValueAtTime(0, tNow);
// Linear is used for a smoother attack.
gain.gain.linearRampToValueAtTime(1, tAttack);
if(tPlayed > 0) {
gain.gain.setValueAtTime(1, tPlayed);
}
gain.gain.linearRampToValueAtTime(0, tDecay);
gain.connect(_G_.destination);
var osc = _G_.audio.createOscillator();
osc.type = _O_.audio.waveShape;
osc.frequency.setValueAtTime(frequencies[0], tNow);
// var gain2 = _G_.audio.createGain();
// gain2.gain.value = .7;
// gain2.connect(gain);
osc.connect(gain);
osc.start(tNow);
osc.stop(tDecay);
// var osc2 = _G_.audio.createOscillator();
// osc2.type = _O_.audio.waveShape;
// osc2.frequency.setValueAtTime(frequencies[0], tNow);
// osc2.connect(gain);
// osc2.start(tNow);
// osc2.stop(tDecay);
var f0 = frequencies[0], f;
var i = -1;
while(++i < frequencies.length) {
f = frequencies[i];
if(_O_.audio.relative) {
f = !i ? f0 : f0 + (frequencies[i] - f0);
}
osc.frequency.exponentialRampToValueAtTime(f, tNow + i * length + blend * length);
osc.frequency.setValueAtTime(f, tNow + (i+1) * length);
}
return osc;
}
function playText(text, spacing, pause, frequencies) {
var i = -1, offset = 0;
while(++i < text.length) {
if(!frequencies.hasOwnProperty(text[i])) {
offset += pause;
continue;
}
playTone(frequencies[text[i]], offset);
offset += spacing;
}
return offset;
}
function playText2(text, charMap, startOffset) {
function queue() {
_G_.stats.add({
queue: queueId,
stamp: _G_.audio.currentTime + startOffset,
label: word.join(''),
items: word
});
word = [];
}
var queueId = Date.now(), word = [], o = -1;
while(++o < text.length) {
if(charMap.hasOwnProperty(text[o])) {
word.push(text[o]);
continue;
}
if(word.length) {
queue();
}
}
if(word.length) {
queue();
}
return startOffset;
}
function playingStats(element) {
function processItem(item, offset) {
var dict = _G_.frequencies;
var hold = _O_.audio.noteLength;
var pause = _O_.audio.wordSpace;
var length = item.items.length * hold;
item.start = offset;
item.end = item.start + length;
var freqs = item.items.map(function(v) { return dict[v] });
var node = playSeries(freqs, item.start);
item.cancel = node.stop.bind(node);
return item.end + pause * hold;
}
function cancelItem(item) {
item.cancel();
}
var queued = [], loaded = [], playing = [], nextId = 0, currentOffset = _G_.audio.currentTime;
return {
reset: function() {
[].concat(playing, loaded).map(cancelItem);
queued = [].concat(playing, loaded, queued);
},
cancel: function() {
[].concat(playing, loaded).map(cancelItem);
playing = [];
loaded = [];
queued = [];
element.innerText = [];
},
update: function() {
function processQueue(queue, callback) {
var i = 0, item, newQueue = [];
while(item = queue[i++]) {
if(callback(item)) {
newQueue.push(item);
}
}
return newQueue;
}
var tNow = _G_.audio.currentTime;
var tPrefetch = tNow + 1;
// if prefetch: queued > loaded
queued = processQueue(queued, function(item) {
if(currentOffset > tPrefetch) {
return true;
}
currentOffset = processItem(item, currentOffset);
item.id = nextId++;
loaded.push(item);
});
// if item.start: loaded > playing
loaded = processQueue(loaded, function(item) {
if(item.start > tNow) {
return true;
}
playing.push(item);
});
while(element.firstChild) {
element.removeChild(element.firstChild);
}
// if item.end: playing > POOF!
element.dataset.queued = loaded.length + queued.length;
playing = processQueue(playing, function(item) {
if(item.end > tNow) {
var el = document.createElement('span');
el.innerText = item.label;
el.dataset.id = item.id;
element.appendChild(el);
return true;
}
});
},
add: function(item) {
currentOffset = currentOffset || item.stamp;
queued.push(item);
}
}
}
function textMapper() {
function frequencyScale(base, scale) {
function getStep(n, scale) {
var s = 0;
while(n--) {
s += scale[n % scale.length];
}
return s;
}
var p = Math.pow(2, 1/12);
return function(n) {
return base * Math.pow(p, getStep(n, scale));
}
}
function shuffle(a) {
var i = a.length, j, tmp;
while(i--) {
j = ~~(Math.random() * (i + 1));
tmp = a[i];
a[i] = a[j];
a[j] = tmp;
}
}
function getSortedChars(text) {
var stats = {}, chars = [], i = -1, c;
while(++i < text.length) {
c = text[i];
if(!stats.hasOwnProperty(c)) {
stats[c] = {count: 0, offset: i};
chars.push(c);
}
stats[c].count++;
}
chars.sort(function(a, b) {
return stats[a].count === stats[b].count
// Sort offset ascending.
? stats[a].offset - stats[b].offset
// Sort count descending.
: stats[b].count - stats[a].count;
});
return chars;
}
function filterText(text) {
return text.split('')
.map(function(c) {
// Only keep letters and numbers.
if(c.toLowerCase() === c.toUpperCase() && !c.match(/\d/)) {
return '';
}
if(_O_.mapper.lowerCase) {
c = c.toLowerCase();
}
return c;
})
.join('');
}
function sortToCenter(chars) {
var sorted = [], i = -1;
while(++i < chars.length) {
i % 2 ? sorted.push(chars[i]) : sorted.unshift(chars[i]);
}
return sorted;
}
function charsToScale(chars) {
var factor = (_O_.mapper.notes || chars.length) / (chars.length || 1);
return chars.reduce(function(o, v, i) {
o[v] = scale(Math.round(factor * i));
if(_O_.mapper.lowerCase) {
o[v.toUpperCase()] = o[v];
}
return o;
}, {});
}
var scale = frequencyScale(_O_.mapper.baseFrequency, _O_.mapper.scale);
return {
getFrequencies: function(text) {
var chars;
switch(_O_.mapper.method) {
case 'fixed': chars = 'aeioubcddfghjklmnpqrstvwxyz1234567890'.split(''); break;
case 'count': chars = getSortedChars(filterText(text)); break;
default: return {};
}
if(_O_.mapper.center) {
chars = sortToCenter(chars);
}
if(_O_.mapper.vowelBase) {
chars = 'aeiou'.split('')
.concat(chars)
.filter(function(c, i, a) { return a.indexOf(c) === i;});
}
if(_O_.mapper.reverse) {
chars = chars.reverse();
}
console.log(chars.join(' '));
return charsToScale(chars);
}
}
}
function initDatGui(options) {
var waveShapes = ['sine', 'triangle', 'square', 'sawtooth'];
var biquadTypes = ['lowpass', 'highpass', 'bandpass', 'lowshelf', 'highshelf', 'peaking', 'notch', 'allpass'];
var resetMapper = function() {
if(typeof _O_.mapper.scale === 'string') {
_O_.mapper.scale = _O_.mapper.scale.split(',').map(parseFloat);
}
_G_.mapper = textMapper();
_G_.frequencies = _G_.mapper.getFrequencies(_G_.eText.value);
};
var ui = new dat.GUI();
ui.add(options.audio, 'gain').min(.1).max(.7).step(.01).onChange(function(v) {
_G_.master.gain.linearRampToValueAtTime(v, _G_.audio.currentTime + .1);
});
ui.add(options.mapper, 'baseFrequency').min(20).max(300).step(5).onChange(resetMapper);
ui.add(options.mapper, 'scale', _G_.chromaticScales).onChange(resetMapper);
ui.add(options.audio, 'waveShape', waveShapes).onChange(resetMapper);
ui.add(options.audio, 'noteLength').min(.1).max(1).step(.01);
ui.add(options.audio, 'wordSpace').min(-3).max(3).step(1);
ui.add(options.audio, 'attack').min(.01).max(2).step(.01);
ui.add(options.audio, 'decay').min(.01).max(2).step(.01);
ui.add(options.audio, 'blend').min(.01).max(.99).step(.01);
ui.add(options.audio, 'relative');
ui.add(options.mapper, 'method', ['count', 'fixed']).onChange(resetMapper);
ui.add(options.mapper, 'lowerCase').onChange(resetMapper);
ui.add(options.mapper, 'center').onChange(resetMapper);
ui.add(options.mapper, 'reverse').onChange(resetMapper);
ui.add(options.mapper, 'vowelBase').onChange(resetMapper);
ui.add(options.mapper, 'notes').min(5).max(30).step(1).onChange(resetMapper);
// ui.add(_G_.filter, 'type', biquadTypes);
// ui.add(_G_.filter.Q, 'value').min(.0001).max(10).name('q low');
// ui.add(_G_.filter.Q, 'value').min(.0001).max(1000).name('q high');
// ui.add(_G_.filter.frequency, 'value').min(0).max(5000).name('frequency');
// ui.add(_G_.filter.gain, 'value').min(0).max(25).name('gain');
}
var chromaticScales = {
pentaMajor: [ 2, 2, 3, 2, 3],
pentaMinor: [ 3, 2, 2, 3, 2],
custom: [ 1, 2, 2, 1, 2, 2, 2]
};
var _O_ = {
audio: {
// Base gain.
gain: .3,
// Waveform type.
waveShape: 'triangle',
// Length per character in sec.
noteLength: .17,
// Pause between words, in character count.
// Be careful with values < -2.
wordSpace: -1,
// Envelope attack.
attack: .01,
// Envelope decay.
decay: .05,
// Frequency blending as percentage of length.
blend: .15,
// Trigger additional play calls on beat.
lockstep: true,
relative: false,
filter: {
type: 'highpass',
frequency: 0,
q: 0,
gain: 0
}
},
mapper: {
// Base frequency.
baseFrequency: 55 * 2,
// Steps on chromatic scale.
scale: chromaticScales.pentaMajor,
method: 'count',
// Treat uppercase and lowercase as identical.
lowerCase: true,
// Arrange notes around the center of the range.
center: false,
notes: 20,
reverse: false,
// Move vowels to the front.
vowelBase: false
}
};
var _G_ = {options: _O_};
_G_.chromaticScales = chromaticScales;
_G_.audio = new AudioContext();
_G_.master = _G_.audio.createGain();
_G_.master = _G_.audio.createGain();
_G_.master.gain.setValueAtTime(_O_.audio.gain, 0);
_G_.master.connect(_G_.audio.destination);
_G_.destination = _G_.master;
// _G_.filter = _G_.audio.createBiquadFilter();
// _G_.filter.type = _O_.audio.filter.type;
// _G_.filter.Q.setValueAtTime(_O_.audio.filter.q, 0);
// _G_.filter.frequency.setValueAtTime(_O_.audio.filter.frequency, 0);
// _G_.filter.gain.setValueAtTime(_O_.audio.filter.gain, 0);
// _G_.filter.connect(_G_.master);
// _G_.destination = _G_.filter;
_G_.stats = playingStats(document.getElementById('now-playing'));
initDatGui(_O_);
_G_.mapper = textMapper();
_G_.frequencies = {};
_G_.beatOffset = 0;
_G_.eText = document.getElementById('text');
document.getElementById('play').addEventListener('click', function() {
var step = _O_.audio.noteLength, offset = 0;
if(_O_.audio.lockstep) {
offset = _G_.beatOffset ? step - (_G_.audio.currentTime - _G_.beatOffset) % step : 0;
_G_.beatOffset = _G_.audio.currentTime + offset;
}
_G_.frequencies = _G_.mapper.getFrequencies(_G_.eText.value);
playText2(_G_.eText.value, _G_.frequencies, offset);
});
document.getElementById('stop').addEventListener('click', function() {
_G_.stats.cancel();
});
var render = function() {
_G_.stats.update();
setTimeout(render, 100);
};
render();
window.G = _G_;
window.O = _O_;
}());</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment