Skip to content

Commit

Permalink
Fix issue where some note durations were not clipped
Browse files Browse the repository at this point in the history
  • Loading branch information
WolfgangDrescher committed Mar 14, 2023
1 parent 0a07e11 commit 3002315
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 30 deletions.
2 changes: 1 addition & 1 deletion include/humlib.h
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//
// Programmer: Craig Stuart Sapp <[email protected]>
// Creation Date: Sat Aug 8 12:24:49 PDT 2015
// Last Modified: Di 14 Mär 2023 21:42:31 CET
// Last Modified: Di 14 Mär 2023 22:41:15 CET
// Filename: humlib.h
// URL: https://github.com/craigsapp/humlib/blob/master/include/humlib.h
// Syntax: C++11
Expand Down
30 changes: 15 additions & 15 deletions src/humlib.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//
// Programmer: Craig Stuart Sapp <[email protected]>
// Creation Date: Sat Aug 8 12:24:49 PDT 2015
// Last Modified: Di 14 Mär 2023 21:42:31 CET
// Last Modified: Di 14 Mär 2023 22:41:15 CET
// Filename: /include/humlib.cpp
// URL: https://github.com/craigsapp/humlib/blob/master/src/humlib.cpp
// Syntax: C++11
Expand Down Expand Up @@ -103564,25 +103564,25 @@ void Tool_myank::printDataLine(HLp line,
continue;
}
HumNum dur = lastLineDurationsFromNoteStart[i];
if (resolvedToken->getDuration() > dur) {
HumRegex hre;
string recip = Convert::durationToRecip(dur);
vector<string> subtokens = resolvedToken->getSubtokens();
for (int i=0; i<(int)subtokens.size(); i++) {
if (hre.search(subtokens[i], recipRegex)) {
string before = hre.getPrefix();
string after = hre.getSuffix();
hre.replaceDestructive(after, "", recipRegex, "g");
string subtokenText;
HumRegex hre;
string recip = Convert::durationToRecip(dur);
vector<string> subtokens = resolvedToken->getSubtokens();
for (int i=0; i<(int)subtokens.size(); i++) {
if (hre.search(subtokens[i], recipRegex)) {
string before = hre.getPrefix();
string after = hre.getSuffix();
hre.replaceDestructive(after, "", recipRegex, "g");
string subtokenText;
if (resolvedToken->getDuration() > dur) {
// Add a tie start if not already in a tie group
if (!hre.search(subtokens[i], "[_\\[]")) {
subtokenText += "[";
}
// Replace the old duration with the clipped one
subtokenText += before + recip + after;
token->replaceSubtoken(i, subtokenText);
lineChange = true;
}
// Replace the old duration with the clipped one
subtokenText += before + recip + after;
token->replaceSubtoken(i, subtokenText);
lineChange = true;
}
}
}
Expand Down
28 changes: 14 additions & 14 deletions src/tool-myank.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1383,25 +1383,25 @@ void Tool_myank::printDataLine(HLp line,
continue;
}
HumNum dur = lastLineDurationsFromNoteStart[i];
if (resolvedToken->getDuration() > dur) {
HumRegex hre;
string recip = Convert::durationToRecip(dur);
vector<string> subtokens = resolvedToken->getSubtokens();
for (int i=0; i<(int)subtokens.size(); i++) {
if (hre.search(subtokens[i], recipRegex)) {
string before = hre.getPrefix();
string after = hre.getSuffix();
hre.replaceDestructive(after, "", recipRegex, "g");
string subtokenText;
HumRegex hre;
string recip = Convert::durationToRecip(dur);
vector<string> subtokens = resolvedToken->getSubtokens();
for (int i=0; i<(int)subtokens.size(); i++) {
if (hre.search(subtokens[i], recipRegex)) {
string before = hre.getPrefix();
string after = hre.getSuffix();
hre.replaceDestructive(after, "", recipRegex, "g");
string subtokenText;
if (resolvedToken->getDuration() > dur) {
// Add a tie start if not already in a tie group
if (!hre.search(subtokens[i], "[_\\[]")) {
subtokenText += "[";
}
// Replace the old duration with the clipped one
subtokenText += before + recip + after;
token->replaceSubtoken(i, subtokenText);
lineChange = true;
}
// Replace the old duration with the clipped one
subtokenText += before + recip + after;
token->replaceSubtoken(i, subtokenText);
lineChange = true;
}
}
}
Expand Down

0 comments on commit 3002315

Please sign in to comment.