From b03d58c981840e13ca5a243bcc5fadd7df798ad9 Mon Sep 17 00:00:00 2001 From: Jim O'Mulloy Date: Mon, 13 Mar 2023 13:38:36 +0000 Subject: [PATCH] Synthesis --- .../jomu/instrument/audio/AudioTuner.java | 9 +-- .../audio/analysis/Autocorrelation.java | 57 ++++++++++++++++++ .../audio/features/SACFFeatures.java | 31 +++++----- .../instrument/audio/features/SACFInfo.java | 18 ++++-- .../instrument/audio/features/SACFSource.java | 2 +- .../cognition/cell/AudioSACFProcessor.java | 7 --- .../cell/AudioSynthesisProcessor.java | 19 ++++++ .../control/InstrumentParameterNames.java | 4 +- .../workspace/tonemap/NoteTracker.java | 55 +++++++++++------- .../workspace/tonemap/TonePredictor.java | 58 ++++++++++++++++--- .../workspace/tonemap/ToneTimeFrame.java | 11 +++- .../desktop/monitor/swing/BeatsView.java | 22 ------- .../desktop/monitor/swing/ChromaView.java | 22 ------- .../monitor/swing/ParametersPanel.java | 19 ++++++ .../desktop/monitor/swing/ToneMapView.java | 22 ------- .../src/main/resources/instrument.properties | 4 +- 16 files changed, 226 insertions(+), 134 deletions(-) diff --git a/instrument-core/src/main/java/jomu/instrument/audio/AudioTuner.java b/instrument-core/src/main/java/jomu/instrument/audio/AudioTuner.java index 10c057ed..8fd5f693 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/AudioTuner.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/AudioTuner.java @@ -440,8 +440,8 @@ public boolean noteScan(ToneMap toneMap, int sequence) { if (amplitude >= noteHighThresholdhWithHysteresis / 100.0) { noteStatusElement.highFlag = true; } - LOG.finer(">>>Note scan ON - PENDING NEW NOTE PARTIAL CONTINUING seq: " + sequence + ", " + note - + ", " + time + ", " + amplitude + ", " + noteOffThresholdhWithHysteresis); + LOG.finer(">>>Note scan ON - PENDING NEW NOTE PARTIAL CONTINUING seq: " + sequence + ", " + + note + ", " + time + ", " + amplitude + ", " + noteOffThresholdhWithHysteresis); // Process partial note here processNote(toneMap, noteStatusElement, processedNotes); noteStatusElement.state = ON; @@ -523,7 +523,8 @@ public boolean noteScan(ToneMap toneMap, int sequence) { previousToneMapElement.noteState = OFF; previousNoteStatusElement.state = OFF; // Process candidate note - LOG.finer(">>>Note scan PENDING low - PROCESS NEW NOTE OFF seq: " + sequence + ", " + note); + LOG.finer( + ">>>Note scan PENDING low - PROCESS NEW NOTE OFF seq: " + sequence + ", " + note); processNote(toneMap, previousNoteStatusElement, processedNotes); noteStatusElement.state = OFF; noteStatusElement.onTime = 0.0; @@ -873,7 +874,7 @@ private void processNote(ToneMap toneMap, NoteStatusElement noteStatusElement, // Cross-Register NoteList element against ToneMapMatrix elements for (ToneTimeFrame toneTimeFrame : timeFrames) { - if (toneTimeFrame.getStartTime() > noteStatusElement.offTime) { + if (toneTimeFrame.getStartTime() >= noteStatusElement.offTime / 1000.0) { break; } ToneMapElement element = toneTimeFrame.getElement(noteStatusElement.index); diff --git a/instrument-core/src/main/java/jomu/instrument/audio/analysis/Autocorrelation.java b/instrument-core/src/main/java/jomu/instrument/audio/analysis/Autocorrelation.java index a73460bf..9975e815 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/analysis/Autocorrelation.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/analysis/Autocorrelation.java @@ -2,6 +2,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.logging.Logger; import org.apache.commons.math3.complex.Complex; import org.apache.commons.math3.transform.DftNormalization; @@ -14,8 +15,12 @@ * R(t) = IFFT(S(f)) * */ public class Autocorrelation { + + private static final Logger LOG = Logger.getLogger(Autocorrelation.class.getName()); + public double[] correlations; // Autocorrelation public double maxACF = 0; // Max autocorrelation peak + public int length = 0; private FastFourierTransformer fftTran = new FastFourierTransformer(DftNormalization.STANDARD); private double ACF_THRESH = 0.2; // Minimum correlation threshold @@ -59,6 +64,7 @@ private double[] formatData(double[] data) { /* Calculate autocorrelation for the given list of Datum */ public void evaluate(double[] data) { double[] values = formatData(data); + length = values.length; // FFT fft = fftTran.transform(values, TransformType.FORWARD); // Multiply by complex conjugate @@ -72,6 +78,57 @@ public void evaluate(double[] data) { for (int i = 1; i < maxLag; i++) { correlations[i] = fft[i].getReal() / fft[0].getReal(); } + + sacfCorrelations(); + } + + private void sacfCorrelations() { + double[] clippedCorrelations = new double[correlations.length]; + double[] enhancedCorrelations = new double[correlations.length * 2]; + double[] enhanced2Correlations = new double[correlations.length * 4]; + + for (int i = 0; i < correlations.length; i++) { + if (correlations[i] < 0) { + clippedCorrelations[i] = 0; + } else { + clippedCorrelations[i] = correlations[i]; + } + } + + for (int i = 0; i < clippedCorrelations.length; i++) { + enhancedCorrelations[i * 2] = clippedCorrelations[i]; + if (i > 0) { + enhancedCorrelations[(i * 2) - 1] = clippedCorrelations[i - 1] + + (clippedCorrelations[i] - clippedCorrelations[i - 1]) / 2; + } + // clippedCorrelations[i] -= enhancedCorrelations[i]; + // if (clippedCorrelations[i] < 0) { + // clippedCorrelations[i] = 0; + // } + } + + for (int i = 0; i < clippedCorrelations.length; i++) { + enhanced2Correlations[i * 4] = clippedCorrelations[i]; + if (i > 0) { + enhanced2Correlations[(i * 4) - 1] = clippedCorrelations[i - 1] + + 3 * (clippedCorrelations[i] - clippedCorrelations[i - 1]) / 4; + enhanced2Correlations[(i * 4) - 2] = clippedCorrelations[i - 1] + + 2 * (clippedCorrelations[i] - clippedCorrelations[i - 1]) / 4; + enhanced2Correlations[(i * 4) - 3] = clippedCorrelations[i - 1] + + (clippedCorrelations[i] - clippedCorrelations[i - 1]) / 4; + } + // clippedCorrelations[i] -= enhanced2Correlations[i]; + // if (clippedCorrelations[i] < 0) { + // clippedCorrelations[i] = 0; + // } + } + + for (int i = 0; i < correlations.length; i++) { + correlations[i] = correlations[i] - enhancedCorrelations[i] - enhanced2Correlations[i]; + if (correlations[i] < 0) { + correlations[i] = 0; + } + } } public int getLength() { diff --git a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFFeatures.java b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFFeatures.java index 1c3b7160..98f21ae7 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFFeatures.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFFeatures.java @@ -1,8 +1,6 @@ package jomu.instrument.audio.features; -import java.util.HashSet; import java.util.Map.Entry; -import java.util.Set; import java.util.logging.Logger; import jomu.instrument.workspace.tonemap.PitchSet; @@ -29,22 +27,6 @@ void initialise(AudioFeatureFrame audioFeatureFrame) { this.features = getSource().getAndClearFeatures(); } - public float[] getSpectrum() { - float[] spectrum = new float[getSource().getWindowSize() / 2 + 1]; - Set peakIndexes = new HashSet(); - for (SACFInfo feature : features.values()) { - for (int peak : feature.peaks) { - peakIndexes.add((int)feature.correlations[peak]); - } - } - for (int i = 0; i < spectrum.length; i++) { - if (peakIndexes.contains(i)) { - spectrum[i] = 1.0F; - } - } - return spectrum; - } - public void buildToneMapFrame(ToneMap toneMap) { if (features.size() > 0) { @@ -67,6 +49,19 @@ public void buildToneMapFrame(ToneMap toneMap) { ToneTimeFrame ttf = new ToneTimeFrame(timeSet, pitchSet); toneMap.addTimeFrame(ttf); + + if (features.size() > 0) { + for (SACFInfo feature : features.values()) { + for (int peak : feature.peaks) { + // float frequency = feature.getLength() / peak; + float frequency = getSource().getSampleRate() / peak; + int tmIndex = pitchSet.getIndex(frequency); + ttf.getElement(tmIndex).amplitude += feature.correlations[peak]; + } + } + ttf.reset(); + } + } else { double timeStart = this.audioFeatureFrame.getStart() / 1000.0; double timeEnd = this.audioFeatureFrame.getEnd() / 1000.0; diff --git a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFInfo.java b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFInfo.java index 7e7ec71a..3ff329bb 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFInfo.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFInfo.java @@ -27,14 +27,16 @@ public class SACFInfo { List peaks; - double[] correlations; - double maxACF = 0; - - public SACFInfo(List peaks, double[] correlations, double maxACF) { + double[] correlations; + double maxACF = 0; + int length; + + public SACFInfo(List peaks, double[] correlations, double maxACF, int length) { super(); this.peaks = peaks; this.correlations = correlations; this.maxACF = maxACF; + this.length = length; } public List getPeaks() { @@ -45,11 +47,15 @@ public double[] getCorrelations() { return correlations; } + public int getLength() { + return length; + } + public double getMaxACF() { return maxACF; } - + public SACFInfo clone() { - return new SACFInfo(peaks, correlations, maxACF); + return new SACFInfo(peaks, correlations, maxACF, length); } } diff --git a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFSource.java b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFSource.java index 83816930..f479f4de 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/features/SACFSource.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/features/SACFSource.java @@ -117,7 +117,7 @@ void initialise() { public boolean process(AudioEvent audioEvent) { ac.evaluate(convertFloatsToDoubles(audioEvent.getFloatBuffer())); List sacfPeaks = ac.findPeaks(); - SACFInfo sacfInfo = new SACFInfo(sacfPeaks, ac.correlations, ac.maxACF); + SACFInfo sacfInfo = new SACFInfo(sacfPeaks, ac.correlations, ac.maxACF, ac.length); SACFSource.this.putFeature(audioEvent.getTimeStamp(), sacfInfo); return true; } diff --git a/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSACFProcessor.java b/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSACFProcessor.java index 16407758..70ae18bb 100644 --- a/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSACFProcessor.java +++ b/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSACFProcessor.java @@ -6,7 +6,6 @@ import jomu.instrument.audio.features.AudioFeatureFrame; import jomu.instrument.audio.features.AudioFeatureProcessor; import jomu.instrument.audio.features.SACFFeatures; -import jomu.instrument.workspace.tonemap.FFTSpectrum; import jomu.instrument.workspace.tonemap.ToneMap; public class AudioSACFProcessor extends ProcessorCommon { @@ -29,12 +28,6 @@ public void accept(List messages) throws Exception { SACFFeatures features = aff.getSACFFeatures(); features.buildToneMapFrame(toneMap); - float[] spectrum = features.getSpectrum(); - - FFTSpectrum fftSpectrum = new FFTSpectrum(features.getSource().getSampleRate(), - features.getSource().getBufferSize(), spectrum); - - toneMap.getTimeFrame().loadFFTSpectrum(fftSpectrum); console.getVisor().updateToneMapView(toneMap, this.cell.getCellType().toString()); cell.send(streamId, sequence); } diff --git a/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSynthesisProcessor.java b/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSynthesisProcessor.java index 60a6459d..fca759a3 100644 --- a/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSynthesisProcessor.java +++ b/instrument-core/src/main/java/jomu/instrument/cognition/cell/AudioSynthesisProcessor.java @@ -4,7 +4,11 @@ import java.util.logging.Logger; import jomu.instrument.cognition.cell.Cell.CellTypes; +import jomu.instrument.control.InstrumentParameterNames; +import jomu.instrument.workspace.tonemap.ChordListElement; import jomu.instrument.workspace.tonemap.ToneMap; +import jomu.instrument.workspace.tonemap.TonePredictor; +import jomu.instrument.workspace.tonemap.ToneTimeFrame; public class AudioSynthesisProcessor extends ProcessorCommon { @@ -20,10 +24,25 @@ public void accept(List messages) throws Exception { int sequence = getMessagesSequence(messages); LOG.finer(">>AudioSynthesisProcessor accept: " + sequence + ", streamId: " + streamId); + boolean synthesisSwitchChords = parameterManager + .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_SYNTHESIS_CHORDS_SWITCH); + ToneMap synthesisToneMap = workspace.getAtlas().getToneMap(buildToneMapKey(this.cell.getCellType(), streamId)); ToneMap notateToneMap = workspace.getAtlas().getToneMap(buildToneMapKey(CellTypes.AUDIO_NOTATE, streamId)); + ToneMap chromaToneMap = workspace.getAtlas().getToneMap(buildToneMapKey(CellTypes.AUDIO_POST_CHROMA, streamId)); synthesisToneMap.addTimeFrame(notateToneMap.getTimeFrame(sequence).clone()); + if (synthesisSwitchChords) { + TonePredictor chordPredictor = chromaToneMap.getTonePredictor(); + ToneTimeFrame chromaFrame = chromaToneMap.getTimeFrame(sequence); + + chordPredictor.predictChord(chromaFrame); + ChordListElement chord = chromaFrame.getChord(); + if (chord != null) { + chromaToneMap.trackChord(chord); + } + } + console.getVisor().updateToneMapView(synthesisToneMap, this.cell.getCellType().toString()); cell.send(streamId, sequence); } diff --git a/instrument-core/src/main/java/jomu/instrument/control/InstrumentParameterNames.java b/instrument-core/src/main/java/jomu/instrument/control/InstrumentParameterNames.java index 43c0562a..e29342ca 100644 --- a/instrument-core/src/main/java/jomu/instrument/control/InstrumentParameterNames.java +++ b/instrument-core/src/main/java/jomu/instrument/control/InstrumentParameterNames.java @@ -106,7 +106,9 @@ public class InstrumentParameterNames { public static final String PERCEPTION_HEARING_HPS_MASK_FACTOR = "perception.hearing.hps.maskFactor"; public static final String PERCEPTION_HEARING_HPS_CQ_ORIGIN_SWITCH = "perception.hearing.hps.cqOriginSwitch"; - public static final String PERCEPTION_HEARING_INTEGRATION_HPS_SWITCH = "perception.hearing.integration.hpsSwitchSwitch"; + public static final String PERCEPTION_HEARING_INTEGRATION_HPS_SWITCH = "perception.hearing.integration.hpsSwitch"; + + public static final String PERCEPTION_HEARING_SYNTHESIS_CHORDS_SWITCH = "perception.hearing.synthesis.chordsSwitch"; public static final String PERCEPTION_HEARING_ONSET_SMOOTHING_FACTOR = "perception.hearing.onset.smoothingFactor"; public static final String PERCEPTION_HEARING_ONSET_EDGE_FACTOR = "perception.hearing.onset.edgeFactor"; diff --git a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/NoteTracker.java b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/NoteTracker.java index e2573baf..1d2a505e 100644 --- a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/NoteTracker.java +++ b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/NoteTracker.java @@ -125,33 +125,41 @@ public NoteTrack getTrack(NoteListElement noteListElement) { } private NoteTrack getSalientTrack(NoteTrack[] candidateTracks, NoteListElement noteListElement) { - double maxSalience = -1, salience = -1; - NoteTrack salientTrack = null; + NoteTrack pitchSalientTrack = null; + NoteTrack timeSalientTrack = null; + int pitchProximity = Integer.MAX_VALUE; + double timeProximity = Double.MAX_VALUE; for (NoteTrack track : candidateTracks) { NoteListElement lastNote = track.getLastNote(); - salience = calculateSalience(noteListElement, lastNote); - if (salience > maxSalience) { - salientTrack = track; - maxSalience = salience; + if (pitchProximity > noteListElement.note - lastNote.note) { + pitchProximity = noteListElement.note - lastNote.note; + pitchSalientTrack = track; + } + if (timeProximity > noteListElement.startTime - lastNote.endTime) { + timeProximity = noteListElement.startTime - lastNote.endTime; + timeSalientTrack = track; } + // double timbreFactor = noteListElement.noteTimbre. - lastNote.endTime; } - return salientTrack; + if (pitchSalientTrack == timeSalientTrack) { + return pitchSalientTrack; + } + if ((noteListElement.note - timeSalientTrack.getLastNote().note) > 2 + * (noteListElement.note - pitchSalientTrack.getLastNote().note)) { + return pitchSalientTrack; + } + return timeSalientTrack; } private NoteTrack getPendingSalientTrack(NoteTrack[] candidateTracks, NoteListElement noteListElement) { - double maxSalience = -1, salience = -1; NoteTrack salientTrack = null; for (NoteTrack track : candidateTracks) { NoteListElement lastNote = track.getLastNote(); NoteListElement penultimateNote = track.getPenultimateNote(); if (penultimateNote != null) { if (compareSalience(noteListElement, lastNote, penultimateNote)) { - salience = calculateSalience(noteListElement, lastNote); - if (salience > maxSalience) { - salientTrack = track; - maxSalience = salience; - } + salientTrack = track; } } } @@ -160,13 +168,20 @@ private NoteTrack getPendingSalientTrack(NoteTrack[] candidateTracks, NoteListEl private boolean compareSalience(NoteListElement newNote, NoteListElement lastNote, NoteListElement penultimateNote) { - double salienceNewNote = calculateSalience(newNote, penultimateNote); - double salienceCurrentNote = calculateSalience(lastNote, penultimateNote); - return salienceNewNote > salienceCurrentNote; - } - - private double calculateSalience(NoteListElement noteListElement, NoteListElement lastNote) { - return 1.0; + int pitchProximity = Integer.MAX_VALUE; + double timeProximity = Double.MAX_VALUE; + pitchProximity = newNote.note - penultimateNote.note; + timeProximity = newNote.startTime - penultimateNote.endTime; + if (pitchProximity >= lastNote.note - penultimateNote.note) { + if (timeProximity >= lastNote.startTime - penultimateNote.endTime) { + return false; + } + } else { + if (timeProximity < lastNote.startTime - penultimateNote.endTime) { + return true; + } + } + return true; } private NoteTrack[] getPendingTracks(NoteListElement noteListElement) { diff --git a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/TonePredictor.java b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/TonePredictor.java index 2cf0d4c1..5cb44a83 100644 --- a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/TonePredictor.java +++ b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/TonePredictor.java @@ -1,7 +1,10 @@ package jomu.instrument.workspace.tonemap; +import java.util.ArrayList; import java.util.LinkedList; +import java.util.List; import java.util.Optional; +import java.util.TreeSet; import java.util.logging.Logger; public class TonePredictor { @@ -18,49 +21,86 @@ public TonePredictor(ToneMap toneMap) { this.toneMap = toneMap; } - void addNote(NoteListElement note) { + public void addNote(NoteListElement note) { LOG.finer(">>Tone Predictor add note: " + note); notes.add(note); } - void addChord(ChordListElement chord) { + public void addChord(ChordListElement chord) { LOG.finer(">>Tone Predictor add chord: " + chord); chords.add(chord); } - void addBeat(BeatListElement beat) { + public void predictChord(ToneTimeFrame targetFrame) { + ChordListElement chord = targetFrame.getChord(); + if (chord != null && chord.getChordNotes().size() > 2) { + return; + } + List previousFrames = new ArrayList<>(); + ToneTimeFrame ptf = toneMap.getPreviousTimeFrame(targetFrame.getStartTime()); + + int i = 60; + ChordListElement previousChord = null; + TreeSet chordNotes = new TreeSet<>(); + if (chord != null) { + chordNotes.addAll(chord.getChordNotes()); + } + while (ptf != null && i > 0) { + previousChord = ptf.getChord(); + if (previousChord != null) { + if (previousChord.getChordNotes().size() > 2) { + break; + } + chordNotes.addAll(previousChord.getChordNotes()); + previousChord = null; + } + previousFrames.add(ptf); + ptf = toneMap.getPreviousTimeFrame(ptf.getStartTime()); + i--; + } + if (previousChord != null) { + chordNotes.addAll(previousChord.getChordNotes()); + } + chord = new ChordListElement(chordNotes.toArray(new ChordNote[chordNotes.size()]), targetFrame.getStartTime(), + targetFrame.getEndTime()); + targetFrame.setChord(new ChordListElement(chordNotes.toArray(new ChordNote[chordNotes.size()]), + targetFrame.getStartTime(), targetFrame.getEndTime())); + targetFrame.sharpenChord(); + } + + public void addBeat(BeatListElement beat) { LOG.finer(">>Tone Predictor add beat: " + beat); beats.add(beat); } - boolean hasNote(double time) { + public boolean hasNote(double time) { return notes.stream() .anyMatch(noteListElement -> (noteListElement.startTime >= time && noteListElement.endTime <= time)); } - Optional getNote(double time) { + public Optional getNote(double time) { return notes.stream() .filter(noteListElement -> (noteListElement.startTime >= time && noteListElement.endTime <= time)) .findFirst(); } - boolean hasBeat(double time) { + public boolean hasBeat(double time) { return notes.stream() .anyMatch(beatListElement -> (beatListElement.startTime >= time && beatListElement.endTime <= time)); } - Optional getBeat(double time) { + public Optional getBeat(double time) { return notes.stream() .filter(beatListElement -> (beatListElement.startTime >= time && beatListElement.endTime <= time)) .findFirst(); } - boolean hasChord(double time) { + public boolean hasChord(double time) { return notes.stream() .anyMatch(chordListElement -> (chordListElement.startTime >= time && chordListElement.endTime <= time)); } - Optional getChord(double time) { + public Optional getChord(double time) { return notes.stream() .filter(chordListElement -> (chordListElement.startTime >= time && chordListElement.endTime <= time)) .findFirst(); diff --git a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/ToneTimeFrame.java b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/ToneTimeFrame.java index 40754065..3814c216 100644 --- a/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/ToneTimeFrame.java +++ b/instrument-core/src/main/java/jomu/instrument/workspace/tonemap/ToneTimeFrame.java @@ -305,6 +305,10 @@ public Double getStartTime() { return getTimeSet().getStartTime(); } + public Double getEndTime() { + return getTimeSet().getEndTime(); + } + public TimeSet getTimeSet() { return timeSet; } @@ -661,7 +665,7 @@ public ToneTimeFrame chromaChordify(double threshold, boolean sharpen) { return this; } - private void sharpenChord() { + public void sharpenChord() { LOG.finer(">>Sharpen: " + getStartTime() + ", " + chordNotes); TreeSet result = new TreeSet<>(); ChordNote lastCandidate = null; @@ -1198,6 +1202,11 @@ public ChordListElement getChord() { return chordListElement; } + public void setChord(ChordListElement chord) { + chordNotes.clear(); + chordNotes.addAll(chord.getChordNotes()); + } + public BeatListElement getBeat() { BeatListElement beatListElement = null; if (beatAmplitude > AMPLITUDE_FLOOR) { diff --git a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/BeatsView.java b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/BeatsView.java index 7e8a0347..34250519 100644 --- a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/BeatsView.java +++ b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/BeatsView.java @@ -177,28 +177,6 @@ private void renderToneMap(ToneTimeFrame ttf) { if (timeStart >= timeAxisEnd) { return; } -// if (timeStart > timeAxisEnd) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart == 0) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart < timeAxisStart) { -// timeAxisStart -= 20000.0; -// timeAxisEnd -= 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } bufferedGraphics.setColor(Color.black); diff --git a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ChromaView.java b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ChromaView.java index f6e26d23..947df12b 100644 --- a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ChromaView.java +++ b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ChromaView.java @@ -183,28 +183,6 @@ private void renderToneMap(ToneTimeFrame ttf) { if (timeStart >= timeAxisEnd) { return; } -// if (timeStart > timeAxisEnd) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart == 0) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart < timeAxisStart) { -// timeAxisStart -= 20000.0; -// timeAxisEnd -= 20000.0; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } bufferedGraphics.setColor(Color.black); diff --git a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ParametersPanel.java b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ParametersPanel.java index 8ae7447a..dce48ea3 100644 --- a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ParametersPanel.java +++ b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ParametersPanel.java @@ -175,6 +175,7 @@ public class ParametersPanel extends JPanel { private Console console; private InstrumentStoreService iss; private JCheckBox chromaChordifySharpenSwitchCB; + private JCheckBox synthesisChordsSwitchCB; public ParametersPanel() { super(new BorderLayout()); @@ -1167,6 +1168,22 @@ public void itemStateChanged(ItemEvent e) { .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_INTEGRATION_HPS_SWITCH)); cqSwitchPanel.add(integrateHpsSwitchCB); + synthesisChordsSwitchCB = new JCheckBox("synthesisChordsSwitchCB"); + synthesisChordsSwitchCB.setText("Synthesis Chords"); + synthesisChordsSwitchCB.addItemListener(new ItemListener() { + + public void itemStateChanged(ItemEvent e) { + JCheckBox cb = (JCheckBox) e.getSource(); + boolean newValue = cb.isSelected(); + parameterManager.setParameter(InstrumentParameterNames.PERCEPTION_HEARING_SYNTHESIS_CHORDS_SWITCH, + Boolean.toString(newValue)); + } + }); + + synthesisChordsSwitchCB.setSelected(parameterManager + .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_SYNTHESIS_CHORDS_SWITCH)); + cqSwitchPanel.add(synthesisChordsSwitchCB); + notateCompressionSwitchCB = new JCheckBox("notateCompressionSwitchCB"); notateCompressionSwitchCB.setText("Notate Compression"); notateCompressionSwitchCB.addItemListener(new ItemListener() { @@ -2772,6 +2789,8 @@ public void updateParameters() { .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_CHROMA_CHORDIFY_SWITCH)); integrateHpsSwitchCB.setSelected(parameterManager .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_INTEGRATION_HPS_SWITCH)); + synthesisChordsSwitchCB.setSelected(parameterManager + .getBooleanParameter(InstrumentParameterNames.PERCEPTION_HEARING_SYNTHESIS_CHORDS_SWITCH)); beatsThresholdInput .setText(parameterManager.getParameter(InstrumentParameterNames.PERCEPTION_HEARING_BEATS_THRESHOLD)); diff --git a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ToneMapView.java b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ToneMapView.java index daa594ef..588d585f 100644 --- a/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ToneMapView.java +++ b/instrument-desktop/src/main/java/jomu/instrument/desktop/monitor/swing/ToneMapView.java @@ -208,28 +208,6 @@ private void renderToneMap(ToneTimeFrame ttf) { if (timeStart >= timeAxisEnd) { return; } -// if (timeStart > timeAxisEnd) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + timeAxisRange; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart == 0) { -// timeAxisStart = timeStart; -// timeAxisEnd = timeStart + timeAxisRange; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } else if (timeStart < timeAxisStart) { -// timeAxisStart -= timeAxisRange; -// timeAxisEnd -= timeAxisRange; -// this.currentWidth = getWidth(); -// this.currentHeight = getHeight(); -// bufferedImage = new BufferedImage(currentWidth, currentHeight, BufferedImage.TYPE_INT_RGB); -// bufferedGraphics = bufferedImage.createGraphics(); -// } bufferedGraphics.setColor(Color.black); diff --git a/instrument-desktop/src/main/resources/instrument.properties b/instrument-desktop/src/main/resources/instrument.properties index c90a32de..de68bf11 100644 --- a/instrument-desktop/src/main/resources/instrument.properties +++ b/instrument-desktop/src/main/resources/instrument.properties @@ -111,7 +111,9 @@ perception.hearing.hps.cqOriginSwitch=false perception.hearing.notate.compression=30.0 perception.hearing.notate.compressionSwitch=true -perception.hearing.integration.hpsSwitchSwitch=true +perception.hearing.integration.hpsSwitch=true + +perception.hearing.synthesis.chordsSwitch=true audio.tuner.formantFactor=0 audio.tuner.formantHighSetting=100