Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
cbossut committed May 3, 2021
2 parents a50cd8f + f011629 commit d065bb9
Show file tree
Hide file tree
Showing 19 changed files with 2,062 additions and 775 deletions.
1 change: 1 addition & 0 deletions elm.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"Chadtech/id": "4.2.0",
"NoRedInk/elm-json-decode-pipeline": "1.0.0",
"avh4/elm-color": "1.0.0",
"debois/elm-dom": "1.3.0",
"elm/browser": "1.0.2",
"elm/core": "1.0.2",
"elm/file": "1.0.5",
Expand Down
6 changes: 5 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,11 @@ const internCallback = staticRoute({dir:__dirname, tryfiles:['ports.html']})
res.end()
return;
}
fs.renameSync(files.file.path, soundPath + files.file.name)
let path = soundPath
+ (fields.from ? fields.from + '-' : '')
+ (fields.type ? fields.type + '/' : '')
if (!fs.existsSync(path)) fs.mkdirSync(path, {recursive:true})
fs.renameSync(files.file.path, path + files.file.name)
res.end()
})
} else {
Expand Down
146 changes: 81 additions & 65 deletions ports.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,41 +19,49 @@ function sendSize(entries) {
app.ports.newSVGSize.send(entries[0].contentRect)
}

function drawSound(soundName) {
if (buffers[soundName]) {
drawSamples(Array.from(buffers[soundName].getChannelData(0))) // TODO mix channels ?
app.ports.soundDrawn.send(soundName)
} else console.log(soundName + ' isn’t loaded, cannot draw')
function drawSound(sv) {
let buf = buffers[sv.soundPath]
if (buf) {
let size = Math.round(buf.length / sv.zoomFactor)
, start = Math.round(buf.length * sv.startPercent)
, drawFunc = () => { // TODO mix channels ?
drawSamples('waveform', Array.from(buf.getChannelData(0).slice(start, start + size)))
if (sv.waveformMap) drawSamples('waveformMap', Array.from(buf.getChannelData(0)))
}
if (sv.wait) setTimeout(drawFunc, 10)
else drawFunc()
app.ports.soundDrawn.send(sv.soundPath)
} else console.log(sv.soundPath + ' isn’t loaded, cannot draw')
}

function loadSound(soundName) {
if (buffers[soundName]) {
app.ports.soundLoaded.send(soundName + ' already Loaded')
function loadSound(soundPath) {
if (buffers[soundPath]) {
app.ports.soundLoaded.send(soundPath + ' already Loaded')
} else {
createBuffer(soundName).then(b => {
buffers[soundName] = b
loadOk(soundName)
}).catch(err => loadErr(err, soundName))
createBuffer(soundPath).then(b => {
buffers[soundPath] = b
loadOk(soundPath)
}).catch(err => loadErr(err, soundPath))
}
}

async function createBuffer(soundName) {
const response = await fetch('./sons/' + soundName)
async function createBuffer(soundPath) {
const response = await fetch('./sons/' + soundPath)
, arrayBuffer = await response.arrayBuffer()
, audioBuffer = await ctx.decodeAudioData(arrayBuffer)
return audioBuffer
}

function loadOk(soundName) {
function loadOk(soundPath) {
app.ports.soundLoaded.send(
{ path : soundName
, length : buffers[soundName].duration
{ path : soundPath
, length : buffers[soundPath].duration
})
}

function loadErr(err, soundName) {
function loadErr(err, soundPath) {
console.error(err)
app.ports.soundLoaded.send(soundName + ' got ' + err)
app.ports.soundLoaded.send(soundPath + ' got ' + err)
}

function toggleRecord(bool) {
Expand All @@ -78,26 +86,32 @@ function openMic() {
}).catch(console.error)
}

function inputRec(name) {
function inputRec(args) {
let name = args[0]
, start = args[1]
if (name) {
micRecorder.stop()
micRecorder.exportWAV(bl => app.ports.gotNewSample.send(new File([bl], name + ".wav", {type: "audio/wav"})))
micRecorder.exportWAV(bl => app.ports.gotNewSample.send(
{ type : "rec"
, file : new File([bl], name + ".wav", {type: "audio/wav"})
}))
micRecorder.clear()
recording = false
if (!scheduler.running) ctx.suspend()
} else {
if (mic) {
ctx.resume()
if (start) ctx.resume()
micRecorder.record()
recording = true
} else console.error("won’t record mic if it ain’t opened !")
}
}

function cutSample(infos) {
if (!buffers[infos.fromFileName]) {console.error(infos.fromFileName + " ain’t loaded, cannot cut");return;}
if (!buffers[infos.fromSoundPath]) {console.error(infos.fromFileName + " ain’t loaded, cannot cut");return;}

let buf = buffers[infos.fromFileName]
let buf = buffers[infos.fromSoundPath]
// TODO maybe round ?
, start = infos.percents[0] * buf.length - 1
, end = infos.percents[1] * buf.length + 1
, newBuf = new AudioBuffer(
Expand All @@ -111,7 +125,11 @@ function cutSample(infos) {
newBuf.copyToChannel(chan, i)
}

app.ports.gotNewSample.send(new File([audioBufferToWav(newBuf)], infos.newFileName + ".wav", {type: "audio/wav"}))
app.ports.gotNewSample.send(
{ type : "cut"
, from : infos.fromSoundPath
, file : new File([audioBufferToWav(newBuf)], infos.newFileName + ".wav", {type: "audio/wav"})
})
}

function engine(o) {
Expand Down Expand Up @@ -150,53 +168,51 @@ function engine(o) {
}
}

function drawSamples(samples) {
setTimeout(() => {
let canvas = document.getElementById('waveform')
, ctx = canvas.getContext('2d')
, {width, height} = canvas
, pxPerSample = width / samples.length
function drawSamples(id, samples) {
let canvas = document.getElementById(id)
, ctx = canvas.getContext('2d')
, {width, height} = canvas
, pxPerSample = width / samples.length

ctx.clearRect(0, 0, width, height)

ctx.clearRect(0, 0, width, height)
ctx.strokeStyle = 'black'
ctx.beginPath()
ctx.moveTo(0, height / 2)
ctx.lineTo(width, height / 2)
ctx.stroke()

ctx.strokeRect(0, 0, width, height)

if (pxPerSample < 0.5) {
for (let x = 0 ; x < width ; x++) {
let px = samples.slice(Math.floor(x / pxPerSample), Math.floor((x + 1) / pxPerSample))
, minPoint = (Math.min.apply(null, px) + 1) * height / 2
, maxPoint = (Math.max.apply(null, px) + 1) * height / 2
ctx.strokeStyle = 'black'
ctx.beginPath()
ctx.moveTo(0, height / 2)
ctx.lineTo(width, height / 2)
ctx.moveTo(x, minPoint)
ctx.lineTo(x, maxPoint)
ctx.stroke()

ctx.strokeRect(0, 0, width, height)

if (pxPerSample < 0.5) {
for (let x = 0 ; x < width ; x++) {
let px = samples.slice(Math.floor(x / pxPerSample), Math.floor((x + 1) / pxPerSample))
, minPoint = (Math.min.apply(null, px) + 1) * height / 2
, maxPoint = (Math.max.apply(null, px) + 1) * height / 2
ctx.strokeStyle = 'black'
let rms = Math.sqrt(px.reduce((acc,v,i,a) => acc + Math.pow(v, 2)) / px.length)
, minRmsPoint = (1 - rms) * height / 2
, maxRmsPoint = (1 + rms) * height / 2
if (minRmsPoint > minPoint && maxRmsPoint < maxPoint) {
ctx.strokeStyle = 'gray'
ctx.beginPath()
ctx.moveTo(x, minPoint)
ctx.lineTo(x, maxPoint)
ctx.moveTo(x, minRmsPoint)
ctx.lineTo(x, maxRmsPoint)
ctx.stroke()

let rms = Math.sqrt(px.reduce((acc,v,i,a) => acc + Math.pow(v, 2)) / px.length)
, minRmsPoint = (1 - rms) * height / 2
, maxRmsPoint = (1 + rms) * height / 2
if (minRmsPoint > minPoint && maxRmsPoint < maxPoint) {
ctx.strokeStyle = 'gray'
ctx.beginPath()
ctx.moveTo(x, minRmsPoint)
ctx.lineTo(x, maxRmsPoint)
ctx.stroke()
}
}
} else {
ctx.strokeStyle = 'black'
ctx.beginPath()
ctx.moveTo(0, (samples[0] + 1) * height / 2)
for (let i = 1 ; i < samples.length ; i++) {
ctx.lineTo(i * pxPerSample, (samples[i] + 1) * height / 2)
}
ctx.stroke()
}
}, 10)
}
} else {
ctx.strokeStyle = 'black'
ctx.beginPath()
ctx.moveTo(0, (samples[0] + 1) * height / 2)
for (let i = 1 ; i < samples.length ; i++) {
ctx.lineTo(i * pxPerSample, (samples[i] + 1) * height / 2)
}
ctx.stroke()
}
}
16 changes: 8 additions & 8 deletions scheduler.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ let scheduler = {
this.running = false

let stopWheel = model => {
if (model.soundName) {
if (model.soundPath) {
model.players.forEach(pl => pl.node.stop())
}
if (model.collar || model.mobile) {
Expand All @@ -63,7 +63,7 @@ let scheduler = {
model.view.moveTo(0)
}

if (!recording) ctx.suspend()
ctx.suspend()

this.intervalId = -1
this.nextRequestId = -1
Expand Down Expand Up @@ -97,7 +97,7 @@ let scheduler = {
} // TODO volume should rather be in dB
model.updateVolume()

if (model.soundName) {
if (model.soundPath) {
// WARNING in model, startPercent is of whole sound, here it’s of content
model.startPercent = (model.startPercent - model.loopPercents[0]) / (model.loopPercents[1] - model.loopPercents[0])

Expand All @@ -108,7 +108,7 @@ let scheduler = {
, scheduler.lookAhead
)
}
model.buffer = buffers[model.soundName]
model.buffer = buffers[model.soundPath]
// TODO beware, buffer duration could differ from saved duration in Elm model (due to resampling)
// probably it’s preferable to use saved duration from elm
// but, is it compensated by downward TODO ? (in schedulePlayer)
Expand Down Expand Up @@ -229,7 +229,7 @@ let scheduler = {
t = nextState.date // Bring back the time and undo
if (t <= now) console.error("undoing the past, now : " + now + " scheduler : " + t)

if (model.soundName) {
if (model.soundPath) {
for (let pl of model.players) {
if (pl.startTime <= t && t <= pl.stopTime) {
pl.node.stop(this.toCtxTime(t))
Expand Down Expand Up @@ -277,7 +277,7 @@ let scheduler = {

} else { // Normal pause

if (model.soundName) {
if (model.soundPath) {
if (nextState.date <= t) { // No need to play more, even partially

nextState.percent = clampPercent(0 - model.startPercent)
Expand Down Expand Up @@ -335,7 +335,7 @@ let scheduler = {

} else { // And keep playing

if (model.soundName) {
if (model.soundPath) {
let newPlayers = this.scheduleLoop(t, max, model)
model.players = model.players.concat(newPlayers)
t = model.players[model.players.length - 1].stopTime
Expand Down Expand Up @@ -363,7 +363,7 @@ let scheduler = {

let contentPercent = clampPercent(lastState.percent + model.startPercent)

if (model.soundName) {
if (model.soundPath) {
let offsetDur = contentPercent * model.duration + model.loopStartDur
, newPlayer = this.scheduleStart(t, model, offsetDur)
model.players.push(newPlayer)
Expand Down
Loading

0 comments on commit d065bb9

Please sign in to comment.