Procedural arp harp episode 1
I don't know where I left the original algorighm, but now I want to reconstruct the generative procedure used by the first self-playing harp that I made. This code mostly simulates a harp situation, and finally there is the still unfinished procedural playback algorithm. Might revise it in the future
Two features that it's still missing:
- choosing keys automatically and playing in key
- randomization of the parameters throughout time
- the original was quite wild, this one feels somewhat tamer
// @ts-check
const canvas = document.createElement('canvas');
const container = document.getElementById('code-target')
const restartButton = document.createElement('button');
if (!container) {
throw new Error('Container not found');
}
const context = canvas.getContext('2d');
if (!context) {
throw new Error('Canvas context not found');
}
container.appendChild(canvas);
const waitUserInput = () => {
context.fillStyle = 'white';
const contRect = container.getBoundingClientRect();
const mid = [contRect.width / 2, contRect.height / 2];
const div = document.createElement("div");
container.appendChild(div);
div.innerHTML = '- Click to start -';
div.style.position = 'absolute';
div.style.top = mid[1] + 'px';
div.style.left = mid[0] + 'px';
div.style.transform = 'translate(-50%, -50%)';
div.style.fontSize = '2em';
return new Promise((resolve) => {
const listener = (event) => {
window.removeEventListener('click', listener);
resolve(event);
div.remove();
}
window.addEventListener('click', listener);
});
}
const ParamType = {
number: 1,
progress: 2,
infoText: 3,
};
/**
* @typedef {Object} Arpeggiator
* @property {()=>void} step
* @property {(number)=>void} trigger
* @property {number} count
* @property {number} reset
* @property {number} offsetValue
* @property {number} multiplier
* @property {Arpeggiator[]} resetChild
* @property {Arpeggiator[]} stepChild
* @property {Arpeggiator[]} stepOnResetChild
* @property {Arpeggiator[]} offsetChild
*/
class ArpeggiatorsContext {
minimumString = 0;
maximumString = 0;
/** @type {boolean[]} */
stringsMask = [];
/**
* @returns {Arpeggiator}
*/
createArpeggiator = () => {
/**
* @param {Arpeggiator[]} list
* @param {(child: Arpeggiator) => void} cb
*/
const eachChild = (list, cb) => {
list.forEach((child) => {
cb(child);
});
}
const arpeggiator = {
count: 0,
offsetValue: 0,
multiplier: 1,
reset: 3,
child: undefined,
resetChild: [],
stepChild: [],
stepOnResetChild: [],
offsetChild: [],
trigger: (stringIndex) => {
},
step() {
this.count++;
const result = this.offsetValue + this.count * this.multiplier;
eachChild(this.stepChild, c => c.step());
if (this.count >= this.reset) {
this.count = 0;
eachChild(this.resetChild, c => c.step());
if (this.stepOnResetChild) {
eachChild(this.stepOnResetChild, c => c.step());
}
}
eachChild(this.offsetChild, c => c.offsetValue = result);
this.trigger(result);
}
};
return arpeggiator;
}
}
/**
* @param {AudioContext} audioContext
* @param {SampleSource[]} [sampleSources=[]]
* @returns {SynthVoice}
*/
const samplerVoice = (audioContext, sampleSources = []) => {
let sampleSoruces = sampleSources;
let velocityToStartPoint = 0;
let bufferSource;
const output = audioContext.createGain();
output.gain.value = 0;
let timeAccumulator = 0;
let currentAdsr = [0, 0, 0, 0];
const voiceState = {
inUse: false,
};
const cancelScheduledValues = () => {
output.gain.cancelScheduledValues(0);
// this.output.gain.value = 0;
};
const resetBufferSource = (sampleSource) => {
if (bufferSource) {
bufferSource.removeEventListener("ended", releaseVoice);
bufferSource.disconnect();
bufferSource.stop();
bufferSource = undefined;
}
if (!sampleSource)
return;
cancelScheduledValues();
bufferSource = audioContext.createBufferSource();
bufferSource.connect(output);
if (!sampleSource.sampleBuffer) return console.warn(new Error("sample buffer not loaded"));
bufferSource.buffer = sampleSource.sampleBuffer;
};
const findSampleSourceClosestToFrequency = (frequency, velocity) => {
let closestSampleSource = sampleSoruces[0];
if (sampleSoruces.length == 1)
return closestSampleSource;
if (velocity == undefined) {
let closestSampleSourceDifference = Math.abs(frequency - closestSampleSource.sampleInherentFrequency);
for (let i = 1; i < sampleSoruces.length; i++) {
const sampleSource = sampleSoruces[i];
if (!sampleSource.isLoaded)
continue;
const difference = Math.abs(frequency - sampleSource.sampleInherentFrequency);
if (difference < closestSampleSourceDifference) {
closestSampleSource = sampleSource;
closestSampleSourceDifference = difference;
}
}
}
else {
const sampleSourcesWithVelocityAboveOrEqual = sampleSoruces.filter((sampleSource) => {
if (!sampleSource.sampleInherentVelocity)
return true;
return sampleSource.sampleInherentVelocity >= velocity;
});
if (sampleSourcesWithVelocityAboveOrEqual.length == 0) {
findSampleSourceClosestToFrequency(frequency);
}
let closestSampleWithLeastVelocityDifference = sampleSourcesWithVelocityAboveOrEqual[0];
let closestSampleWithLeastVelocityDifferenceDifference = Math.abs(frequency - closestSampleWithLeastVelocityDifference.sampleInherentFrequency);
for (let i = 1; i < sampleSourcesWithVelocityAboveOrEqual.length; i++) {
const sampleSource = sampleSourcesWithVelocityAboveOrEqual[i];
if (!sampleSource.isLoaded)
continue;
const difference = Math.abs(frequency - sampleSource.sampleInherentFrequency);
if (difference < closestSampleWithLeastVelocityDifferenceDifference) {
closestSampleWithLeastVelocityDifference = sampleSource;
closestSampleWithLeastVelocityDifferenceDifference = difference;
}
}
closestSampleSource = closestSampleWithLeastVelocityDifference;
}
return closestSampleSource;
};
const releaseVoice = () => {
voiceState.inUse = false;
resetBufferSource();
};
const stop = () => {
releaseVoice();
};
return {
scheduleStart(frequency, absoluteStartTime, { velocity, adsr, }) {
currentAdsr = adsr;
if (voiceState.inUse)
throw new Error("Polyphony fail: voice already in use");
let noteStartedTimeAgo = audioContext.currentTime - absoluteStartTime;
let skipSample = 0;
if (noteStartedTimeAgo > 0) {
skipSample = noteStartedTimeAgo;
}
voiceState.inUse = true;
const sampleSource = findSampleSourceClosestToFrequency(frequency, velocity);
resetBufferSource(sampleSource);
if (!bufferSource)
throw new Error("bufferSource not created");
bufferSource.playbackRate.value = frequency / sampleSource.sampleInherentFrequency;
output.gain.value = 0;
timeAccumulator = absoluteStartTime;
output.gain.setValueAtTime(0, timeAccumulator);
timeAccumulator += currentAdsr[0];
output.gain.linearRampToValueAtTime(velocity, timeAccumulator);
timeAccumulator += currentAdsr[1];
output.gain.linearRampToValueAtTime(/**value!*/ currentAdsr[2], timeAccumulator);
if (velocityToStartPoint) {
if (velocity > 1) {
console.error("velocity > 1");
}
skipSample += velocityToStartPoint * (1 - velocity);
}
bufferSource.start(absoluteStartTime, skipSample);
bufferSource.addEventListener("ended", releaseVoice);
return this;
},
scheduleEnd(absoluteEndTime) {
output.gain.linearRampToValueAtTime(0, absoluteEndTime + currentAdsr[3]);
return this;
},
stop,
output,
get inUse() {
return voiceState.inUse;
},
set inUse(value) {
voiceState.inUse = value;
}
};
};
class SampleSource {
/**
* @private
*/
audioContext;
sampleBuffer;
sampleInherentFrequency;
sampleInherentVelocity;
/** @default false */
isLoaded = false;
/** @default false */
isLoading = false;
/** @default () => Promise<void> */
load = async () => {
console.error("samplesource constructed wrong");
};
/**
* @param {AudioContext} audioContext
* @param {SampleFileDefinition} sampleDefinition
*/
constructor(audioContext, sampleDefinition) {
this.audioContext = audioContext;
this.sampleInherentFrequency = sampleDefinition.frequency;
this.load = async () => {
if (this.isLoaded || this.isLoading)
throw new Error("redundant load call");
this.isLoading = true;
// const fetchHeaders = new Headers();
const response = await fetch(sampleDefinition.path, {
cache: "default",
});
console.groupCollapsed("header: " + sampleDefinition.path);
response.headers.forEach((value, key) => {
if (key.match('date')) {
console.log("loaded:", (Date.now() - Date.parse(value)) / 1000 / 60, " minutes ago");
}
else if (key.match('cache-control')) {
console.log(key + ":", value);
}
});
console.groupEnd();
const arrayBuffer = await response.arrayBuffer();
this.sampleBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
this.isLoaded = true;
this.isLoading = false;
};
}
}
const makeOneShotSampler = (audioContext, sampleDefinitions, name, credits) => {
/**
* voice instances
* @default undefined[]
*/
const instances = [];
let loadingProgress = 0;
let velocityToStartPoint = 0;
let adsr = [0.01, 10, 0, 0.2];
const output = audioContext.createGain();
output.gain.value = 0.3;
/**
* @param {AudioContext} audioContext
* @param {SampleFileDefinition[]} sampleDefinitions
* @param {string} [name]
* @param {string} [credits]
*/
const sampleSources = sampleDefinitions.map((sampleDefinition) => {
return new SampleSource(audioContext, sampleDefinition);
});
sampleSources.forEach(async (sampleSource) => {
if (sampleSource.isLoading || sampleSource.isLoaded)
return;
await sampleSource.load();
loadingProgress += 1;
});
/** @type {SynthParam[]} */
const params = [{
displayName: "Level",
type: ParamType.number,
min: 0, max: 4,
get value() {
if (!output) {
console.warn("output node not set");
return 1;
}
return output.gain.value;
},
set value(value) {
if (!output)
return;
output.gain.value = value;
},
}, {
displayName: "Loading progress",
type: ParamType.progress,
min: 0, max: sampleDefinitions.length,
get value() {
return loadingProgress;
},
}, {
displayName: "Velocity to start point, seconds",
type: ParamType.number,
min: 0, max: 3,
get value() {
return velocityToStartPoint;
},
set value(value) {
velocityToStartPoint = value;
},
curve: 'log',
}];
adsr.forEach((v, i) => {
params.push({
displayName: ['attack', 'decay', 'sustain', 'release'][i],
type: ParamType.number,
min: 0, max: 10,
get value() {
return adsr[i];
},
set value(value) {
adsr[i] = value;
},
curve: 'log',
});
});
/**
* @returns {SynthVoice}
*/
let findFreeVoice = () => {
const freeVoice = instances.find((voice) => !voice.inUse);
return freeVoice || null;
}
/**
* @returns {SynthVoice}
*/
let allocateVoice = () => {
const freeVoice = findFreeVoice();
if (freeVoice) {
return freeVoice;
}
else {
const voice = createVoice();
instances.push(voice);
return voice;
}
}
/** @default () => void */
let stop = () => {
instances.forEach((voice) => {
voice.stop();
});
};
const createVoice = () => {
const voice = samplerVoice(audioContext, sampleSources);
if (voice.output)
voice.output.connect(output);
return voice;
};
/**
* @param {number} frequency
* @param {number} absoluteStartTime
* @param {EventParamsBase & {adsr?:number[]}} noteParameters
* @returns {SynthVoice}
*/
const schedulePerc = (frequency, absoluteStartTime, noteParameters) => {
const voice = allocateVoice();
(!noteParameters.adsr) ? noteParameters.adsr = [0, 5, 0, 5] : 0;
voice.scheduleStart(frequency, absoluteStartTime, noteParameters);
voice.scheduleEnd(absoluteStartTime + adsr[0]);
return voice;
}
return {
name,
params,
output,
schedulePerc,
stop,
}
}
/**
* @param {string} path
* @param {string[]} files
*/
const makeSamplerDefinitions = (path, files) => {
return files.map((file) => {
return {
name: file,
frequency: 440,
path: path + file,
};
});
}
/**
* @type {{
* pos: Vector2,
* frame: (dt: number, context: CanvasRenderingContext2D) => void,
* makeSound: (frequency: number) => SynthVoice | void,
* trigger: () => void | SynthVoice
* }[]}
*/
const listOfStrings = [];
/**
* @param {number} frequency
*/
const makeString = (frequency) => {
let index = listOfStrings.length;
/** @type {Vector2} */
const pos = [0, 0];
let phase = 0;
const divs = 100;
let initd = false;
let start_y = 0;
let end_y = 1;
const frequencyFraction = frequency / 100;
/**
* @type {SynthVoice | void}
*/
let currentVoice;
/**
* @param {number} dt delta time
* @param {CanvasRenderingContext2D} context
*/
const frame = (dt, context) => {
if (!initd) {
initd = true;
pos[0] = (index + 0.5) / listOfStrings.length;
pos[1] = 0;
start_y = pos[1];
end_y = pos[1] + 1;
}
context.beginPath();
context.moveTo(...spaceToScreenCoords([pos[0], start_y]));
// context.strokeStyle = 'white';
if (phase <= 0) {
phase = 0;
context.lineTo(...spaceToScreenCoords([pos[0], end_y]));
} else {
phase -= dt * 0.005;
const pp = currentVoice ? currentVoice.output.gain.value : 0;
const int = pp * pp;
for (let i = 0; i < divs; i++) {
const x = pos[0] + Math.sin(50 * phase + frequencyFraction * i / divs) * 0.001 * phase;
const drawPoint = spaceToScreenCoords([
x, i / divs
]);
if (i) {
context.lineTo(...drawPoint);
} else {
context.moveTo(...drawPoint);
}
}
}
context.stroke();
}
const newThing = {
pos,
frame,
makeSound: (frequency) => { console.log('no sound', frequency) },
trigger() {
this.makeSound(frequency);
phase = 10;
}
}
listOfStrings.push(newThing);
return newThing;
}
/**
* @type {DrawScope}
*/
const drawScope = {
context,
width: 0,
height: 0,
}
const view = {
range: [1, 1],
offset: [0, 0],
}
drawScope.width = canvas.width;
drawScope.height = canvas.height;
/**
* @param {Vector2} coords
* @returns {Vector2}
*/
const screenToSpaceCoords = ([x, y]) => {
const viewTransformed = [
x / view.range[0] - view.offset[0],
y / view.range[1] - view.offset[1]
];
return [
viewTransformed[0] / drawScope.width,
viewTransformed[1] / drawScope.height
]
}
/**
* @param {Vector2} coords
* @returns {Vector2}
*/
const spaceToScreenCoords = ([x, y]) => {
const viewTransformed = [
(x + view.offset[0]) * view.range[0],
(y + view.offset[1]) * view.range[1]
];
return [
viewTransformed[0] * drawScope.width,
viewTransformed[1] * drawScope.height
]
}
let lastTime = new Date().getTime();
const windowResizedListener = () => {
const containerRect = container.getBoundingClientRect();
canvas.width = containerRect.width;
canvas.height = containerRect.width;
drawScope.width = canvas.width;
drawScope.height = canvas.height;
}
const start = () => {
console.log("start");
const nums = new Array(32).fill(0).map((_, i) => 220 * Math.pow(2, i / 12));
nums.forEach((num) => {
makeString(num);
});
const audioContext = new AudioContext();
const oneShotSampler = makeOneShotSampler(
audioContext,
makeSamplerDefinitions('/forbidden-music/audio/Celtic_Harp/', [
'103.703.wav', '1249.631.wav', '1572.673.wav', '208.509.wav', '313.642.wav', '468.759.wav', '65.093.wav', '787.319.wav',
'97.635.wav', '1052.710.wav', '130.278.wav', '175.645.wav', '233.900.wav', '347.901.wav', '524.435.wav', '699.581.wav',
'833.920.wav', '116.091.wav', '146.869.wav', '1773.146.wav', '262.529.wav', '391.864.wav', '588.693.wav', '73.295.wav',
'87.270.wav', '1187.651.wav', '155.713.wav', '196.602.wav', '294.405.wav', '417.106.wav', '626.848.wav', '77.600.wav',
'936.842.wav'
]),
'celtic harp',
'by tarane468 ( https://freesound.org/people/tarane468/ )'
);
oneShotSampler.output.connect(audioContext.destination);
const arpeggiatorsContext = new ArpeggiatorsContext();
arpeggiatorsContext.maximumString = listOfStrings.length;
arpeggiatorsContext.stringsMask = listOfStrings.map(() => true);
const dividerArp = arpeggiatorsContext.createArpeggiator();
const baseArp = arpeggiatorsContext.createArpeggiator();
const melo1Arp = arpeggiatorsContext.createArpeggiator();
const melo2Arp = arpeggiatorsContext.createArpeggiator();
dividerArp.stepOnResetChild = [baseArp];
dividerArp.reset = 3;
baseArp.reset = 5;
melo1Arp.reset = 3;
melo2Arp.reset = 3;
baseArp.resetChild = [melo1Arp, melo2Arp];
baseArp.stepChild = [melo1Arp, melo2Arp];
baseArp.offsetChild = [melo1Arp, melo2Arp];
dividerArp.offsetChild = [baseArp];
dividerArp.multiplier = 2;
melo1Arp.offsetChild = [melo2Arp];
// melo1Arp.trigger = (stringIndex) => {
// const string = listOfStrings[stringIndex];
// string ? string.trigger() : 0;
// }
melo2Arp.trigger = (stringIndex) => {
const string = listOfStrings[stringIndex];
string ? string.trigger() : 0;
}
const controllableValues = [
['dividerArp.multiplier', -5, 5],
['dividerArp.reset', 0, 10],
['dividerArp.offsetValue', 0, 10],
['baseArp.multiplier', -5, 5],
['baseArp.reset', 0, 10],
['baseArp.offsetValue', 0, 10],
['melo1Arp.multiplier', -5, 5],
['melo1Arp.reset', 0, 10],
// ['melo1Arp.offsetValue', 0, 10],
['melo2Arp.multiplier', -5, 5],
['melo2Arp.reset', 0, 10],
// ['melo1Arp.offsetValue', 0, 10],
];
const accessSting = (baseObject, accessor, set = undefined) => {
const parts = accessor.split('.');
let current = baseObject;
for (let i = 0; i < parts.length; i++) {
const part = parts[i];
if (i == parts.length - 1) {
if (set !== undefined) {
current[part] = set;
return set;
}
return current[part];
}
current = current[part];
if (current === undefined) {
console.warn('accessSting', accessor, 'failed');
return undefined;
}
}
}
const controllers = controllableValues.map(([valueAccessor, min, max]) => {
const baseObject = {
dividerArp,
baseArp,
melo1Arp,
melo2Arp,
};
return {
name: valueAccessor,
set value(value) {
accessSting(baseObject, valueAccessor, value);
},
get value() {
return accessSting(baseObject, valueAccessor);
},
min, max,
}
});
controllers.map(appendControllerSlider);
setInterval(() => {
dividerArp.step();
}, 250);
listOfStrings.forEach((string, i) => {
string.makeSound = (frequency) => {
return oneShotSampler.schedulePerc(
frequency,
audioContext.currentTime,
{ velocity: 1 }
);
}
});
/**
* @param {number} time
* @param {DrawScope} drawScope
*/
let draw = (time, {
context,
width,
height
}) => {
const deltaTime = time - lastTime;
lastTime = time;
context.strokeStyle = 'white';
context.fillStyle = 'white';
context.clearRect(0, 0, width, height);
listOfStrings.forEach((thing, i) => {
thing.frame(deltaTime, context);
});
}
const frame = () => {
const time = new Date().getTime();
draw(time, drawScope);
requestAnimationFrame(frame);
}
frame();
windowResizedListener();
}
const controllersDiv = document.createElement('div');
container.appendChild(controllersDiv);
/**
* @param {{
* name: string,
* value: number,
* max: number,
* min: number,
* }} controller
*/
const appendControllerSlider = (controller) => {
const container = document.createElement('div');
const slider = document.createElement('input');
const label = document.createElement('label');
const readout = document.createElement('span');
container.style.color = 'white';
container.style.display = 'flex';
label.textContent = controller.name;
readout.textContent = controller.value.toString();
slider.type = 'range';
slider.min = controller.min.toString();
slider.max = controller.max.toString();
slider.value = controller.value.toString();
slider.addEventListener('input', () => {
controller.value = parseFloat(slider.value);
readout.textContent = slider.value;
});
[label, slider, readout].forEach((el) => container.appendChild(el));
controllersDiv.appendChild(container);
}
waitUserInput().then(() => start());
addEventListener('resize', () => windowResizedListener());
windowResizedListener();
window.addEventListener('load', () => {
windowResizedListener();
});
/**
* @typedef {Object} DrawScope
* @property {CanvasRenderingContext2D} context - canvas context
* @property {number} width -
* @property {number} height -
*/
/**
* @typedef {[number, number]} Vector2
*/
/**
* @typedef {(
* audioContext: AudioContext,
* synthParams: SynthParam[]
* ) => VoiceGen} synthVoiceFactory
* @template {SynthVoice} VoiceGen
* @template [A=any]
*/
/**
* @typedef {Object} NumberSynthParam
* @property {number} type
* @property {number} value
* @property {string} displayName
* @property {number} min
* @property {number} max
* @property {number} [default]
* @property {(destTime: number, destValue: number) => void} [schedule]
* @property {(destTime: number, destValue: number) => void} [animate]
* @property {'linear' | 'log'} [curve]
*/
/**
* @typedef {Object} ProgressSynthParam
* @property {number} type
* @property {number} value
* @property {string} displayName
* @property {number} min
* @property {number} max
*/
/**
* @typedef {Object} InfoTextSynthParam
* @property {number} type
* @property {string} value
* @property {string} displayName
*/
/**
* @typedef {Object} SynthParamStored
* @property {string} displayName
* @property {any} value
*/
/**
* @typedef {Object} SynthParamMinimum
* @property {string} displayName
* @property {ParamType} type
* @property {number | string} value
* @property {boolean} exportable
*/
/**
* @typedef {NumberSynthParam |
* InfoTextSynthParam |
* ProgressSynthParam } SynthParam
*/
/** @typedef {ReturnType<typeof samplerVoice>} SamplerVoice */
/**
* @typedef {Object} EventParamsBase
* @property {number} velocity
*/
/**
* @typedef {Object} SynthVoice
* @property {GainNode} output
* @property {boolean} inUse
* @property {(frequency: number,absoluteStartTime: number,noteParameters: any) => {}} scheduleStart
* @property {(absoluteStopTime: number,) => {}} scheduleEnd
* @property {() => void} stop
*/
/**
* @typedef {Object} SampleFileDefinition
* @property {string} name
* @property {number} frequency
* @property {string} path
*/