Add files using upload-large-folder tool
Browse files- evalFiloBass.py +258 -0
- musicxml/All-the-Things-You-Are.xml +0 -0
- musicxml/Alone-Together.xml +0 -0
- musicxml/Apple-Jump.xml +0 -0
- musicxml/Autumn-Leaves.xml +0 -0
- musicxml/Blues-By-Five.xml +0 -0
- musicxml/Bye-Bye-Blackbird.xml +0 -0
- musicxml/C-Jam-Blues.xml +0 -0
- musicxml/Come-Rain-or-Come-Shine.xml +0 -0
- musicxml/Cottontail.xml +0 -0
- musicxml/Dear-Old-Stockholm.xml +0 -0
- musicxml/Doxy.xml +0 -0
- musicxml/For-Regulars-Only.xml +0 -0
- musicxml/Four.xml +0 -0
- musicxml/Fried-Bananas.xml +0 -0
- musicxml/Gone-with-the-wind.xml +0 -0
- musicxml/Have-You-Met-Miss-Jones.xml +0 -0
- musicxml/Honeysuckle-Rose.xml +0 -0
- musicxml/I-ll-Remember-April.xml +0 -0
- musicxml/In-Your-Own-Sweet-Way.xml +0 -0
- musicxml/In-a-Mellow-Tone.xml +0 -0
- musicxml/Invitation.xml +0 -0
- musicxml/Isotope.xml +0 -0
- musicxml/Just-Friends.xml +0 -0
- musicxml/Ladybird.xml +0 -0
- musicxml/Milestones.xml +0 -0
- musicxml/Montmartre.xml +0 -0
- musicxml/Namely-You.xml +0 -0
- musicxml/Nows-the-time.xml +0 -0
- musicxml/Oleo.xml +0 -0
- musicxml/On-Green-Dolphin-Street.xml +0 -0
- musicxml/Out-of-the-Night.xml +0 -0
- musicxml/Parisian-Thoroughfare.xml +0 -0
- musicxml/Pennies-from-Heaven.xml +0 -0
- musicxml/Satin-Doll.xml +0 -0
- musicxml/Scrapple-From-the-Apple.xml +0 -0
- musicxml/Soon.xml +0 -0
- musicxml/Star-Eyes.xml +0 -0
- musicxml/Stella-By-Starlight.xml +0 -0
- musicxml/Take-the-A-Train.xml +0 -0
- musicxml/Tangerine.xml +0 -0
- musicxml/The-Rainbow-People.xml +0 -0
- musicxml/There-will-never-be-another-you.xml +0 -0
- musicxml/Three-Little-Words.xml +0 -0
- musicxml/UMMG.xml +0 -0
- notebooks_and_scripts/Compile_Backing.py +195 -0
- notebooks_and_scripts/Edit_Data.csv +49 -0
- notebooks_and_scripts/Remove_Repeats.py +99 -0
- notebooks_and_scripts/analysis.ipynb +0 -0
- notebooks_and_scripts/note_data.csv +0 -0
evalFiloBass.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
# -*- coding: utf-8 -*-
|
| 3 |
+
"""
|
| 4 |
+
Created on Mon Feb 5 14:02:31 2024
|
| 5 |
+
@author: jacquema
|
| 6 |
+
|
| 7 |
+
Evaluation of the score of the Fake Real Book dataset
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import logging
|
| 13 |
+
|
| 14 |
+
sys.path.append('/Users/xavriley/Projects/pse/lib')
|
| 15 |
+
import pse
|
| 16 |
+
|
| 17 |
+
import os
|
| 18 |
+
from pathlib import Path, PosixPath
|
| 19 |
+
from datetime import datetime
|
| 20 |
+
import re
|
| 21 |
+
from operator import itemgetter, attrgetter
|
| 22 |
+
import pandas
|
| 23 |
+
import music21 as m21
|
| 24 |
+
import PSeval as ps
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
########################
|
| 28 |
+
## ##
|
| 29 |
+
## global variables ##
|
| 30 |
+
## ##
|
| 31 |
+
########################
|
| 32 |
+
|
| 33 |
+
# path to ASAP dataset
|
| 34 |
+
_dataset_root = '/Users/xavriley/Dropbox/PhD/Datasets/FiloBass ISMIR Publication/musicxml/'
|
| 35 |
+
|
| 36 |
+
# default score file name
|
| 37 |
+
_score_suffix = '.xml'
|
| 38 |
+
|
| 39 |
+
# root of evaluation dir
|
| 40 |
+
_eval_root = '../../PSeval'
|
| 41 |
+
|
| 42 |
+
Path(_eval_root).mkdir(parents=True, exist_ok=True)
|
| 43 |
+
|
| 44 |
+
# name of dir for evaluation output
|
| 45 |
+
_output_dir = 'augASAP'
|
| 46 |
+
|
| 47 |
+
timestamp = str(datetime.today().strftime('%Y%m%d-%H%M'))
|
| 48 |
+
|
| 49 |
+
# MuseScore commandline executable
|
| 50 |
+
_mscore = '/Applications/MuseScore 4.app/Contents/MacOS/mscore'
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
#################################
|
| 54 |
+
## ##
|
| 55 |
+
## extraction of dataset files ##
|
| 56 |
+
## ##
|
| 57 |
+
#################################
|
| 58 |
+
|
| 59 |
+
# corpus can be 'leads' or 'piano'
|
| 60 |
+
def FiloBassCorpus(corpus):
|
| 61 |
+
"""build a list of scores in a subdirectory of FiloBass"""
|
| 62 |
+
global _dataset_root
|
| 63 |
+
global _score_suffix
|
| 64 |
+
dataset_path = Path(_dataset_root)
|
| 65 |
+
assert isinstance(dataset_path, PosixPath)
|
| 66 |
+
if not os.path.exists(dataset_path):
|
| 67 |
+
print(dataset_path, 'not found')
|
| 68 |
+
return
|
| 69 |
+
# map: opus_name -> path
|
| 70 |
+
dataset = dict()
|
| 71 |
+
files = os.listdir(dataset_path)
|
| 72 |
+
for file in files:
|
| 73 |
+
filepath = dataset_path/file
|
| 74 |
+
# skip directories
|
| 75 |
+
if os.path.isdir(filepath):
|
| 76 |
+
continue
|
| 77 |
+
# check the extension in the file name
|
| 78 |
+
if (os.path.splitext(file)[1] == _score_suffix):
|
| 79 |
+
# map score name to file path
|
| 80 |
+
dataset[os.path.splitext(file)[0]] = filepath
|
| 81 |
+
# sort the list alphabetically
|
| 82 |
+
dataset = dict(sorted(dataset.items()))
|
| 83 |
+
return dataset
|
| 84 |
+
|
| 85 |
+
def accids(ks, notes):
|
| 86 |
+
c = 0
|
| 87 |
+
for note in notes:
|
| 88 |
+
if note.pitch.accidental != ks.accidentalByStep(note.name):
|
| 89 |
+
c += 1
|
| 90 |
+
return c
|
| 91 |
+
|
| 92 |
+
def FiloBass_table(corpus='leads'):
|
| 93 |
+
assert(corpus == 'leads' or corpus == 'piano')
|
| 94 |
+
table = []
|
| 95 |
+
dataset = FiloBassCorpus(corpus)
|
| 96 |
+
names = sorted(list(dataset)) # list of index in dataset
|
| 97 |
+
for name in names:
|
| 98 |
+
if (dataset.get(name) == None):
|
| 99 |
+
print(name, "not found in dataset", corpus)
|
| 100 |
+
continue
|
| 101 |
+
file = dataset[name]
|
| 102 |
+
score = m21.converter.parse(file.as_posix())
|
| 103 |
+
assert(len(score.parts) > 0)
|
| 104 |
+
part = score.parts[0]
|
| 105 |
+
fpart = part.flatten()
|
| 106 |
+
keys = fpart.getElementsByClass([m21.key.Key, m21.key.KeySignature])
|
| 107 |
+
notes = fpart.getElementsByClass(m21.note.Note)
|
| 108 |
+
row = []
|
| 109 |
+
row.append(name)
|
| 110 |
+
row.append(keys[0].sharps if len(keys) > 0 else None)
|
| 111 |
+
row.append(len(part.getElementsByClass(m21.stream.Measure)))
|
| 112 |
+
row.append(len(notes))
|
| 113 |
+
row.append(accids(keys[0], notes) if len(keys) > 0 else None)
|
| 114 |
+
row.append(len(score.parts))
|
| 115 |
+
row.append(len(keys))
|
| 116 |
+
table.append(row)
|
| 117 |
+
df = pandas.DataFrame(table)
|
| 118 |
+
df.columns = ['name', 'KS','# bars', '# notes', '# accids', '# parts', '# keys']
|
| 119 |
+
df['KS'] = df['KS'].map('{:n}'.format)
|
| 120 |
+
return df
|
| 121 |
+
|
| 122 |
+
# df.fillna('NaN').to_csv(file, header=True, index=False)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
###########################################
|
| 126 |
+
## ##
|
| 127 |
+
## automatic evaluation of whole dataset ##
|
| 128 |
+
## ##
|
| 129 |
+
###########################################
|
| 130 |
+
|
| 131 |
+
# list of opus names with issues
|
| 132 |
+
skip = ['All-the-Things-You-Are',
|
| 133 |
+
'Alone-Together',
|
| 134 |
+
'Apple-Jump',]
|
| 135 |
+
|
| 136 |
+
def eval_FiloBass(corpus='leads', algo=ps.pse.Algo_PSE,
|
| 137 |
+
tons=104, kpre=33, kpost=23,
|
| 138 |
+
output_dir='', filename='',
|
| 139 |
+
debug=True, mark=True):
|
| 140 |
+
global _eval_root
|
| 141 |
+
assert(corpus == 'leads' or corpus == 'piano')
|
| 142 |
+
timestamp = datetime.today().strftime('%Y%m%d-%H%M')
|
| 143 |
+
# default output dir name
|
| 144 |
+
if output_dir == '':
|
| 145 |
+
output_dir = timestamp
|
| 146 |
+
output_path = Path(_eval_root)/'evalFiloBass'/output_dir
|
| 147 |
+
if not os.path.isdir(output_path):
|
| 148 |
+
if not os.path.isdir(Path(_eval_root)/'evalFiloBass'):
|
| 149 |
+
os.mkdir(Path(_eval_root)/'evalFiloBass')
|
| 150 |
+
os.mkdir(output_path)
|
| 151 |
+
else:
|
| 152 |
+
print('WARNING: dir', output_path, 'exists')
|
| 153 |
+
stat = ps.Stats()
|
| 154 |
+
dataset = FiloBassCorpus(corpus)
|
| 155 |
+
names = sorted(list(dataset)) # list of index in dataset
|
| 156 |
+
print('\n', 'starting evaluation of FiloBass dataset -', len(names), 'entries\n')
|
| 157 |
+
for name in names:
|
| 158 |
+
if (name in skip):
|
| 159 |
+
print('\n', name, 'SKIP\n')
|
| 160 |
+
continue
|
| 161 |
+
if (dataset.get(name) == None):
|
| 162 |
+
print(name, "not found in dataset", corpus)
|
| 163 |
+
continue
|
| 164 |
+
file = dataset[name]
|
| 165 |
+
print('\n', name, '\n')
|
| 166 |
+
s = m21.converter.parse(file.as_posix())
|
| 167 |
+
(ls, lld) = ps.eval_score(score=s, stat=stat,
|
| 168 |
+
sid=0, title=name, composer='',
|
| 169 |
+
algo=algo,
|
| 170 |
+
nbtons=tons, # for PSE
|
| 171 |
+
kpre=kpre, kpost=kpost, # for PS13
|
| 172 |
+
debug=debug, mark=mark)
|
| 173 |
+
if mark and not ps.empty_difflist(lld):
|
| 174 |
+
write_score(s, output_path, name)
|
| 175 |
+
# display and save evaluation table
|
| 176 |
+
# default table file name
|
| 177 |
+
if filename == '':
|
| 178 |
+
filename = 'FRWeval'+'_'+corpus+str(tons)+'_'+timestamp
|
| 179 |
+
stat.show()
|
| 180 |
+
df = stat.get_dataframe() # create pands dataframe
|
| 181 |
+
df.pop('part') # del column part number (always 0)
|
| 182 |
+
df.to_csv(output_path/(filename+'.csv') , header=True, index=False)
|
| 183 |
+
stat.write_datasum(output_path/(filename+'_sum.csv'))
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
def eval_FiloBassitem(name, corpus='leads', algo=ps.pse.Algo_PSE,
|
| 187 |
+
tons=104, kpre=33, kpost=23, dflag=True, mflag=True):
|
| 188 |
+
assert(len(name) > 0)
|
| 189 |
+
assert(corpus == 'leads' or corpus == 'piano')
|
| 190 |
+
dataset = FiloBassCorpus(corpus)
|
| 191 |
+
if (dataset.get(name) == None):
|
| 192 |
+
print(name, "not found in dataset", corpus)
|
| 193 |
+
return
|
| 194 |
+
file = dataset[name]
|
| 195 |
+
score = m21.converter.parse(file.as_posix())
|
| 196 |
+
stat = ps.Stats()
|
| 197 |
+
# ground truth ks, estimated ks, nnb of nontes and list of diff notes
|
| 198 |
+
#(k_gt, gt_est, nn, ld) = ps.eval_part(part=part, stat=stat, nbtons=tons,
|
| 199 |
+
# debug=dflag, mark=mflag)
|
| 200 |
+
(ls, lld) = ps.eval_score(score=score, stat=stat,
|
| 201 |
+
sid=0, title=name, composer='',
|
| 202 |
+
algo=algo,
|
| 203 |
+
nbtons=tons, # for PSE
|
| 204 |
+
kpre=kpre, kpost=kpost, # for PS13
|
| 205 |
+
debug=dflag, mark=mflag)
|
| 206 |
+
stat.show()
|
| 207 |
+
assert(len(lld) == 1) # always 1 unique part in LG dataset
|
| 208 |
+
if mflag and len(lld[0]) > 0:
|
| 209 |
+
score.show()
|
| 210 |
+
write_score(score, Path(os.getcwd()), name)
|
| 211 |
+
|
| 212 |
+
def write_score(score, output_path, outname):
|
| 213 |
+
if not os.path.isdir(output_path):
|
| 214 |
+
os.mkdir(output_path)
|
| 215 |
+
xmlfile = output_path/(outname+'.musicxml')
|
| 216 |
+
score.write('musicxml', fp=xmlfile)
|
| 217 |
+
|
| 218 |
+
def write_score2(score, output_path, outname):
|
| 219 |
+
assert(len(outname) > 0)
|
| 220 |
+
if not os.path.isdir(output_path):
|
| 221 |
+
os.mkdir(output_path)
|
| 222 |
+
output_path = output_path/outname
|
| 223 |
+
if not os.path.isdir(output_path):
|
| 224 |
+
os.mkdir(output_path)
|
| 225 |
+
xmlfile = output_path/(outname+'.musicxml')
|
| 226 |
+
score.write('musicxml', fp=xmlfile)
|
| 227 |
+
# pdffile = dirname+'/'+outname+'.pdf'
|
| 228 |
+
# os.system(_mscore + ' -o ' + pdffile + ' ' + xmlfile)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def debug(name, corpus='leads'):
|
| 232 |
+
assert(len(name) > 0)
|
| 233 |
+
dataset = FiloBassCorpus(corpus)
|
| 234 |
+
if (dataset.get(name) == None):
|
| 235 |
+
print(name, "not found in dataset", corpus)
|
| 236 |
+
return
|
| 237 |
+
file = dataset[name]
|
| 238 |
+
score = m21.converter.parse(file)
|
| 239 |
+
lp = score.getElementsByClass(m21.stream.Part)
|
| 240 |
+
ln = ps.extract_part(lp[0]) # first and unique part
|
| 241 |
+
for (n, b, s) in ln:
|
| 242 |
+
a = 'sp.add('
|
| 243 |
+
a += str(n.pitch.midi)
|
| 244 |
+
a += ', '
|
| 245 |
+
a += str(b)
|
| 246 |
+
a += ', '
|
| 247 |
+
a += 'true' if s else 'false'
|
| 248 |
+
a += ');'
|
| 249 |
+
print(a)
|
| 250 |
+
#sp = ps.Speller()
|
| 251 |
+
#sp.debug(True)
|
| 252 |
+
#ps.add_tons(0, sp)
|
| 253 |
+
#sp.add_notes(ln1[:61], sp)
|
| 254 |
+
#sp.spell()
|
| 255 |
+
|
| 256 |
+
if __name__=="__main__":
|
| 257 |
+
eval_FiloBass()
|
| 258 |
+
|
musicxml/All-the-Things-You-Are.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Alone-Together.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Apple-Jump.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Autumn-Leaves.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Blues-By-Five.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Bye-Bye-Blackbird.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/C-Jam-Blues.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Come-Rain-or-Come-Shine.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Cottontail.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Dear-Old-Stockholm.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Doxy.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/For-Regulars-Only.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Four.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Fried-Bananas.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Gone-with-the-wind.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Have-You-Met-Miss-Jones.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Honeysuckle-Rose.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/I-ll-Remember-April.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/In-Your-Own-Sweet-Way.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/In-a-Mellow-Tone.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Invitation.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Isotope.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Just-Friends.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Ladybird.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Milestones.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Montmartre.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Namely-You.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Nows-the-time.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Oleo.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/On-Green-Dolphin-Street.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Out-of-the-Night.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Parisian-Thoroughfare.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Pennies-from-Heaven.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Satin-Doll.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Scrapple-From-the-Apple.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Soon.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Star-Eyes.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Stella-By-Starlight.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Take-the-A-Train.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Tangerine.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/The-Rainbow-People.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/There-will-never-be-another-you.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/Three-Little-Words.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
musicxml/UMMG.xml
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
notebooks_and_scripts/Compile_Backing.py
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import numpy as np
|
| 2 |
+
import soundfile as sf
|
| 3 |
+
import csv
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
import librosa
|
| 6 |
+
import hashlib
|
| 7 |
+
import argparse
|
| 8 |
+
import warnings
|
| 9 |
+
# Silence librosa MP3 warning
|
| 10 |
+
warnings.filterwarnings("ignore", category=Warning)
|
| 11 |
+
|
| 12 |
+
INPUT_FOLDER = "Aebersold"
|
| 13 |
+
OUTPUT_FOLDER = "Backing"
|
| 14 |
+
SCRIPTS_FOLDER = "Scripts"
|
| 15 |
+
CSV_FILENAME = "Edit_Data.csv"
|
| 16 |
+
CSV_CHECKSUM = "799806dbc9756b3baf0c7df8027dea95"
|
| 17 |
+
CROSS_FADE_SAMPLES = 4410
|
| 18 |
+
SR = 44100
|
| 19 |
+
NORMALISE_TARGET = 0.95
|
| 20 |
+
VERSION_OPTIONS = ['full', 'lite']
|
| 21 |
+
INPUT_OPTIONS = ['-version', '-overwrite']
|
| 22 |
+
|
| 23 |
+
def md5(fname):
|
| 24 |
+
hash_md5 = hashlib.md5()
|
| 25 |
+
with open(fname, "rb") as f:
|
| 26 |
+
for chunk in iter(lambda: f.read(4096), b""):
|
| 27 |
+
hash_md5.update(chunk)
|
| 28 |
+
return hash_md5.hexdigest()
|
| 29 |
+
|
| 30 |
+
def check_files():
|
| 31 |
+
# Check if backing folder exists
|
| 32 |
+
backing_path = Path.cwd() / OUTPUT_FOLDER
|
| 33 |
+
if not backing_path.exists():
|
| 34 |
+
raise OSError("Error: folder structure incomplete. Run this script inside the Filosax folder, which should contain a 'Backing' folder.")
|
| 35 |
+
|
| 36 |
+
# Check if Aebersold folder exists
|
| 37 |
+
aebersold_path = Path.cwd() / INPUT_FOLDER
|
| 38 |
+
if not aebersold_path.exists():
|
| 39 |
+
raise OSError("Error: folder structure incomplete. Run this script inside the Filosax folder, which should contain an 'Aebersold' folder.")
|
| 40 |
+
|
| 41 |
+
# Check if CSV file exists and matches checksum
|
| 42 |
+
csv_path = Path.cwd() / SCRIPTS_FOLDER / CSV_FILENAME
|
| 43 |
+
if not csv_path.exists():
|
| 44 |
+
raise OSError("Error: %s file missing." % CSV_FILENAME)
|
| 45 |
+
if md5(csv_path) != CSV_CHECKSUM:
|
| 46 |
+
raise OSError("Error: %s wrong file version." % CSV_FILENAME)
|
| 47 |
+
|
| 48 |
+
def load_data():
|
| 49 |
+
# Load data from CSV file
|
| 50 |
+
csv_path = Path.cwd() / SCRIPTS_FOLDER / CSV_FILENAME
|
| 51 |
+
csv_data = []
|
| 52 |
+
with open(csv_path) as csv_file:
|
| 53 |
+
csv_reader = csv.reader(csv_file, delimiter=',')
|
| 54 |
+
line_count = 0
|
| 55 |
+
for row in csv_reader:
|
| 56 |
+
if line_count == 0:
|
| 57 |
+
line_count += 1
|
| 58 |
+
else:
|
| 59 |
+
csv_data.append(row)
|
| 60 |
+
return csv_data
|
| 61 |
+
|
| 62 |
+
def crossfade_ramps(sample_count, fade_type='equal_power'):
|
| 63 |
+
if fade_type == 'equal_power':
|
| 64 |
+
in_ramp = np.zeros(sample_count)
|
| 65 |
+
for i in np.arange(sample_count):
|
| 66 |
+
in_ramp[i] = np.sqrt(i/sample_count)
|
| 67 |
+
out_ramp = np.flip(in_ramp)
|
| 68 |
+
else:
|
| 69 |
+
# Linear ramps
|
| 70 |
+
in_ramp = np.linspace(0, 1, sample_count)
|
| 71 |
+
out_ramp = np.flip(in_ramp)
|
| 72 |
+
return in_ramp, out_ramp
|
| 73 |
+
|
| 74 |
+
def perform_edits(filosax_version='full', overwrite_files=False):
|
| 75 |
+
|
| 76 |
+
num_files = 0
|
| 77 |
+
if filosax_version == 'full':
|
| 78 |
+
num_files = 48
|
| 79 |
+
if filosax_version == 'lite':
|
| 80 |
+
num_files = 5
|
| 81 |
+
|
| 82 |
+
csv_data = load_data()
|
| 83 |
+
in_ramp, out_ramp = crossfade_ramps(CROSS_FADE_SAMPLES)
|
| 84 |
+
num_files_processed = 0
|
| 85 |
+
|
| 86 |
+
# Iterate over tracks
|
| 87 |
+
for track_num in np.arange(0, num_files):
|
| 88 |
+
track_data = csv_data[track_num]
|
| 89 |
+
track_id = track_data[0]
|
| 90 |
+
track_name = track_data[1]
|
| 91 |
+
print("Processing Track", track_id, ": '%s'... " % track_name, end = '')
|
| 92 |
+
|
| 93 |
+
# Does folder exist?
|
| 94 |
+
this_folder = Path.cwd() / OUTPUT_FOLDER / track_id
|
| 95 |
+
if not this_folder.exists():
|
| 96 |
+
print("Backing folder does not exist.")
|
| 97 |
+
continue
|
| 98 |
+
|
| 99 |
+
# Does Aebersold track exist? Does it match the checksum?
|
| 100 |
+
file_name = track_data[3]
|
| 101 |
+
file_ext = track_data[4]
|
| 102 |
+
aebersold_path = Path.cwd() / INPUT_FOLDER / (file_name + file_ext)
|
| 103 |
+
if not aebersold_path.exists():
|
| 104 |
+
print("Aebersold file does not exist.")
|
| 105 |
+
continue
|
| 106 |
+
if (md5(aebersold_path) != track_data[5]):
|
| 107 |
+
print("Aebersold file wrong version.")
|
| 108 |
+
continue
|
| 109 |
+
|
| 110 |
+
# Do the files already exist?
|
| 111 |
+
if not(overwrite_files):
|
| 112 |
+
bass_drums_file = this_folder / "Bass_Drums.wav"
|
| 113 |
+
piano_drums_file = this_folder / "Piano_Drums.wav"
|
| 114 |
+
if bass_drums_file.exists() or piano_drums_file.exists():
|
| 115 |
+
print("One or both backing tracks already exists.")
|
| 116 |
+
continue
|
| 117 |
+
|
| 118 |
+
# If all this works, proceed
|
| 119 |
+
track_length = int(track_data[7])
|
| 120 |
+
bass_drums = np.zeros(track_length)
|
| 121 |
+
piano_drums = np.zeros(track_length)
|
| 122 |
+
file_adjust = int(track_data[6])
|
| 123 |
+
num_edits = int(track_data[9])
|
| 124 |
+
fade_dur = int(track_data[8])
|
| 125 |
+
|
| 126 |
+
audio_data, sr = librosa.load(aebersold_path, sr=SR, mono=False)
|
| 127 |
+
audio_data_l = audio_data[0]
|
| 128 |
+
audio_data_r = audio_data[1]
|
| 129 |
+
|
| 130 |
+
# Normalise audio data
|
| 131 |
+
audio_data_l = librosa.util.normalize(audio_data_l) * NORMALISE_TARGET
|
| 132 |
+
audio_data_r = librosa.util.normalize(audio_data_r) * NORMALISE_TARGET
|
| 133 |
+
|
| 134 |
+
# Iterate over edits
|
| 135 |
+
for n in np.arange(num_edits):
|
| 136 |
+
file_ref = int(track_data[10+(n*3)])
|
| 137 |
+
edit_start = int(track_data[11+(n*3)])
|
| 138 |
+
edit_dur = int(track_data[12+(n*3)])
|
| 139 |
+
if (n > 0):
|
| 140 |
+
edit_start -= CROSS_FADE_SAMPLES
|
| 141 |
+
edit_dur += CROSS_FADE_SAMPLES
|
| 142 |
+
|
| 143 |
+
#print(file_adjust, file_ref, edit_start, edit_dur)
|
| 144 |
+
|
| 145 |
+
start_sample = edit_start - file_ref - file_adjust
|
| 146 |
+
end_sample = start_sample + edit_dur
|
| 147 |
+
edit_end = edit_start + edit_dur
|
| 148 |
+
if edit_end > track_length:
|
| 149 |
+
edit_end = track_length
|
| 150 |
+
end_sample = start_sample + (edit_end - edit_start)
|
| 151 |
+
|
| 152 |
+
#print(start_sample, end_sample, edit_start, edit_end)
|
| 153 |
+
|
| 154 |
+
l_splice = audio_data_l[start_sample:end_sample].copy()
|
| 155 |
+
r_splice = audio_data_r[start_sample:end_sample].copy()
|
| 156 |
+
|
| 157 |
+
# Cross fade in
|
| 158 |
+
#if (n > 0):
|
| 159 |
+
l_splice[:CROSS_FADE_SAMPLES] = l_splice[:CROSS_FADE_SAMPLES] * in_ramp
|
| 160 |
+
r_splice[:CROSS_FADE_SAMPLES] = r_splice[:CROSS_FADE_SAMPLES] * in_ramp
|
| 161 |
+
|
| 162 |
+
if (n == num_edits-1) and (fade_dur > 0) and (fade_dur <= len(l_splice)):
|
| 163 |
+
# Longer fade out for final edit if required
|
| 164 |
+
fade_ramp = np.flip(np.linspace(0, 1, fade_dur+1, endpoint=False))[:-1]
|
| 165 |
+
l_splice[-fade_dur:] = l_splice[-fade_dur:] * fade_ramp
|
| 166 |
+
r_splice[-fade_dur:] = r_splice[-fade_dur:] * fade_ramp
|
| 167 |
+
else:
|
| 168 |
+
# Cross fade out
|
| 169 |
+
l_splice[-CROSS_FADE_SAMPLES:] = l_splice[-CROSS_FADE_SAMPLES:] * out_ramp
|
| 170 |
+
r_splice[-CROSS_FADE_SAMPLES:] = r_splice[-CROSS_FADE_SAMPLES:] * out_ramp
|
| 171 |
+
|
| 172 |
+
bass_drums[edit_start:edit_end] += l_splice
|
| 173 |
+
piano_drums[edit_start:edit_end] += r_splice
|
| 174 |
+
|
| 175 |
+
# Export audio files
|
| 176 |
+
bass_drums_file = this_folder / "Bass_Drums.wav"
|
| 177 |
+
piano_drums_file = this_folder / "Piano_Drums.wav"
|
| 178 |
+
sf.write(bass_drums_file, bass_drums, sr, subtype="PCM_24")
|
| 179 |
+
sf.write(piano_drums_file, piano_drums, sr, subtype="PCM_24")
|
| 180 |
+
print("Edited backing tracks created.")
|
| 181 |
+
num_files_processed += 1
|
| 182 |
+
|
| 183 |
+
print("***")
|
| 184 |
+
print("%d of %d files processed." % (num_files_processed, num_files))
|
| 185 |
+
print("***")
|
| 186 |
+
|
| 187 |
+
def main(args):
|
| 188 |
+
check_files()
|
| 189 |
+
perform_edits(args.version, args.overwrite)
|
| 190 |
+
|
| 191 |
+
if __name__ == "__main__":
|
| 192 |
+
PARSER = argparse.ArgumentParser(description="Make Filosax backing edits")
|
| 193 |
+
PARSER.add_argument("-version", choices=VERSION_OPTIONS, default='full', help="which Filosax version?")
|
| 194 |
+
PARSER.add_argument("-overwrite", action="store_true", help="overwrite existing?")
|
| 195 |
+
main(PARSER.parse_args())
|
notebooks_and_scripts/Edit_Data.csv
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Track ID,Track Name,Soloist,File Name,Extension,Checksum,Adjust Samples,Duration,Fade Out Duration,Num Edits,Edit 1 Ref,Edit 1 Start,Edit 1 Duration,Edit 2 Ref,Edit 2 Start,Edit 2 Duration,Edit 3 Ref,Edit 3 Start,Edit 3 Duration,Edit 4 Ref,Edit 4 Start,Edit 4 Duration,Edit 5 Ref,Edit 5 Start,Edit 5 Duration,Edit 6 Ref,Edit 6 Start,Edit 6 Duration,Edit 7 Ref,Edit 7 Start,Edit 7 Duration,Edit 8 Ref,Edit 8 Start,Edit 8 Duration
|
| 2 |
+
1,All The Things You Are,Getz,V36_03 Prince Albert,.mp3,1d88ea5493a53fddcaf299e8a4f6109e,3369,14878364,0,1,-90843,33075,14844387,,,,,,,,,,,,,,,,,,,,,
|
| 3 |
+
2,Alone Together,Hayes,V41_1-02 Alone Together,.mp3,f27708fcca211ca20dbec4bbce8f2f69,-7795,15915335,0,6,-70095,126590,6161685,-70095,6288275,1379395,-70095,7667670,2401351,3710651,10069021,1379395,3710651,11448416,2401351,3710651,13849767,2061356,,,,,,
|
| 4 |
+
3,Apple Jump,Gordon,V82_04 Apple Jump,.mp3,f47fb41fff47477f3761c4d9919a856d,-34,16250448,0,6,-118718,38587,6136414,-118718,6174995,1961906,-118718,8136891,1949344,3792482,10086235,1961906,3792472,12048131,1949344,3792472,13997475,2252947,,,,,,
|
| 5 |
+
4,Autumn Leaves,Webster,V54_14 Autmn Leaves,.mp3,18e1e0e1791c91948625422308fe4e61,-1359,15344691,0,4,-80188,78066,5891057,-80188,5969123,2852507,2772319,8821630,2852507,2772319,11674137,3664729,,,,,,,,,,,,
|
| 6 |
+
5,Blues By Five,Hayes,V54_10 F Blues,.mp3,94af1d573f74ede36e435a4ae35e8063,-1349,13903307,0,4,-77409,132118,5754363,-77409,5886481,2220286,2142876,8106766,2220286,2142875,10327051,3576254,,,,,,,,,,,,
|
| 7 |
+
6,Bye Bye Blackbird,Rollins,V39_06 Bye Bye Blackbird,.mp3,1d45807fa24d9c3849375d5c1cf720bc,3,17036628,0,2,-117304,39413,15940278,-420111,15979792,1056936,,,,,,,,,,,,,,,,,,
|
| 8 |
+
7,C Jam Blues,Webster,V48_06 C Jam Blues,.mp3,1b67279417388e1f641515513109ba55,774,13360868,0,4,-135429,42261,7389921,-135428,7432183,918765,783337,8350948,918765,783337,9269713,4091155,,,,,,,,,,,,
|
| 9 |
+
8,Come Rain Or Shine,Gordon,04 - Come Rain Or Shine,.mp3,ed1de7de075765e232553bc834773d9a,4,15793109,0,4,-131234,40424,4388278,-131221,4428715,2027246,1896025,6455961,2027246,1896025,8483207,7309419,,,,,,,,,,,,
|
| 10 |
+
9,Cottontail,Henderson,14 Serpent's Tooth Fast,.mp3,754ec3666da8116d6488cc23775994f1,-1062,16951467,0,4,-81959,41342,1547565,-81958,1588908,1524888,1442930,3113796,1524888,1442930,4638684,12312783,,,,,,,,,,,,
|
| 11 |
+
10,Dear Old Stockholm,Getz,Vol. 91_05 Dear Old Stockholm,.mp3,2a05c5ae761c597fb97419ad782726ed,-717,15897180,0,1,-132631,44100,15786533,,,,,,,,,,,,,,,,,,,,,
|
| 12 |
+
11,Dolphin Dance,Hayes,V11_07 Dolphin Dance,.mp3,eac33b5542e3cad136cd674c268ff2c8,-4264,14959333,0,5,-134976,43959,6047492,-134975,6091452,2871188,2736213,8962640,2871188,2736213,11833828,2518216,-783475,14352044,607290,,,,,,,,,
|
| 13 |
+
12,Doxy,Rollins,V08_02 Doxy,.mp3,754ba20fa73ca89e6373d5d7431b6965,-1371,17300036,0,4,-63835,122306,7518777,-63834,7641084,3752772,3688938,11393856,3752772,3688938,15146628,1235376,3688938,16382004,848029,,,,,,,,,
|
| 14 |
+
13,For Regulars Only,Gordon,V82_07 For Regulars Only,.mp3,6971ab4cd5810961be31dc5d975e8f64,-43,16051748,0,4,-47073,97506,6657274,-47073,6754780,3617832,3570759,10372612,3617832,3570759,13990444,2060495,,,,,,,,,,,,
|
| 15 |
+
14,Four,Rollins,02-Four,.mp3,87167844495e4b3e1342bd4a095e80b8,4,16498147,0,8,-18371,77010,425277,-19452,502294,1132636,-19377,1635005,1593216,-19377,3228221,1508705,-19377,4736926,1465650,2954978,6202576,1508705,2954979,7711282,1465650,2955009,9176962,7321185
|
| 16 |
+
15,Fried Bananas,Gordon,V82_01 Fried Bananas,.mp3,01a61d2254fca9533d4c4de2cb311400,-34,17479571,0,4,-44197,97583,9313089,-44197,9410672,1803648,1759451,11214320,1803648,1759451,13017968,4461603,,,,,,,,,,,,
|
| 17 |
+
16,Gone With The Wind,Getz,V58_02 Gone With the Wind,.mp3,afad7f464b6d94c411f4a91a22babbfa,-10639,16739108,0,4,-101414,37541,10462878,-101414,10500420,1954376,1852963,12454796,1954376,1852963,14409172,2099546,,,,,,,,,,,,
|
| 18 |
+
17,Have You Met Miss Jones?,Getz,02 - Have You Met Miss Jones,.mp3,48a9240d69d6b3130f04d3a57a1d0ab0,-1,17055554,0,4,-81743,44100,9159747,-81763,9203827,2935060,2853297,12138887,2935060,2853313,15073963,1979487,,,,,,,,,,,,
|
| 19 |
+
18,Honeysuckle Rose,Webster,Vol. 130_2-06 Honeysuckle Rose,.mp3,5f95b0710b1fc54dc75ecf5d545c8ade,0,17550984,0,5,-132831,33075,8620873,-132831,8653948,2091312,1958481,10745260,2091312,1958481,12836572,2089700,1958481,14926272,2624712,,,,,,,,,
|
| 20 |
+
19,I’ll Remember April,Getz,V43_08 I'll Remember April,.mp3,1e78a409410396dec3f1c6a69e96dcbc,766,16558411,23632,4,-2791418,22048,1969139,-451301,1991188,9611511,1828135,11602699,4446456,422376,16049155,486592,,,,,,,,,,,,
|
| 21 |
+
20,In A Mellow Tone,Webster,V48_02 In A Mellow Tone,.mp3,d8930039dfb3f24b7bf8dc42d79fe521,776,18586498,0,5,-66895,119563,8881829,-66895,9001392,2956572,2889677,11957964,2956572,2889677,14914536,2728104,2889677,17642640,936537,,,,,,,,,
|
| 22 |
+
21,In Your Own Sweet Way,Rollins,Vol. 105_02 In Your Own Sweet Way,.mp3,d2449b542f957a5b896c3c8ecf5eb6b3,119,19229465,0,1,-85316,117811,19105847,,,,,,,,,,,,,,,,,,,,,
|
| 23 |
+
22,Invitation,Getz,06 - Invitation,.mp3,637f2ecef21c33b6ee14832bb0df8232,-2,16459224,0,4,-87546,43421,7773214,-87545,7816636,2594036,2506491,10410672,2594036,2506491,13004708,3449230,,,,,,,,,,,,
|
| 24 |
+
23,Isotope,Henderson,Vol. 108_07 Isotope,.mp3,38ac7e05c73d0a9132f159670a0f94c0,165,16795362,0,6,-45838,103112,7183932,-45838,7287044,726908,-45838,8013952,3632207,4313277,11646159,726908,4313277,12393067,3632207,4313277,16005274,790088,,,,,,
|
| 25 |
+
24,Just Friends,Gordon,03 - Just Friends,.mp3,18fb9e945e173ba77822741c1b86710b,-14,16363891,0,4,-33502,100876,7028560,-32793,7130145,3415612,3382820,10545758,3415612,3382820,13961371,2400904,,,,,,,,,,,,
|
| 26 |
+
25,Lady Bird,Gordon,V36_02 Lady Bird,.mp3,b7466d88ce702d625183e4b065da00ff,3372,16795588,0,3,-84068,38585,15368881,-84067,15407467,679039,-2477730,16086502,701365,,,,,,,,,,,,,,,
|
| 27 |
+
26,Milestones,Henderson,08-Milestones,.mp3,6b6c9f2f3cbe602d5e32a8f91c75563c,-5,16811716,0,4,-118972,44100,9283488,-118972,9327588,2343596,2224624,11671184,2343596,2224624,14014780,2796936,,,,,,,,,,,,
|
| 28 |
+
27,Montmartre,Gordon,V82_03 Montmartre,.mp3,8d31b01d2bfd88a6b5e213eb3922297d,-40,16037428,0,4,-288224,43088,5514103,-288223,5557192,5031336,4743113,10588528,5031336,4743113,15619864,366874,,,,,,,,,,,,
|
| 29 |
+
28,Namely You,Rollins,V40_2-02 Namely You,.mp3,5968d5f697ccb2e8250d1f5e737ea093,153,15243178,0,4,-132630,42261,7345822,-132629,7388084,2463517,2330906,9851619,2463514,2330906,12315151,2803603,,,,,,,,,,,,
|
| 30 |
+
29,Now’s The Time,Hayes,V02_10 Fast Blues In F Concert,.mp3,4ea41fdf288a62d3ea1bfc3e52dfe5c8,809,16018068,0,6,-40692,100348,1326252,-40692,1426600,2631476,-40692,4058076,3914184,3873492,7972260,3914184,7787676,11886444,3914184,7787676,15800628,217440,,,,,,
|
| 31 |
+
30,Oleo,Webster,V51_04 I Got Rhythm,.mp3,c5013f8d4b1dca38d4fb952289bfd09f,-1767,16974603,0,6,-70392,42548,3033671,-70391,3076220,2922484,-70391,5998704,2900828,2830443,8899538,2900828,5731277,11800372,2900828,5731290,14701213,2134109,,,,,,
|
| 32 |
+
31,On Green Dolphin Street,Rollins,02 - On Green Dolphin Street,.mp3,4f9e87eea431c5da2e83e67e4a5b50bf,2,16159626,0,4,-765947,44100,6756747,-765936,6800858,3199440,2433504,10000298,3199440,2433504,13199738,2809588,,,,,,,,,,,,
|
| 33 |
+
32,Out Of The Night,Henderson,Vol. 108_04 Out Of The Night,.mp3,ee46bc05dd6c1d9a562c045fcd06ba06,172,16330212,0,5,-73227,66384,6613568,-73227,6679952,3313188,3239961,9993140,3313188,3239961,13306328,1092960,3239961,14399288,1930924,,,,,,,,,
|
| 34 |
+
33,Parisian Thoroughfare,Hayes,Vol. 95_04 Parisian Thoroughfare,.mp3,d4ac4139c92ee172bf0d5fdbcd988eaa,27,17343468,0,5,-43204,94730,8634510,-43204,8729240,3355404,3312200,12084644,3355404,3312200,15440048,829352,3312200,16269400,1023212,,,,,,,,,
|
| 35 |
+
34,Pennies From Heaven,Webster,Vol. 130_1-01 Pennies From Heaven,.mp3,4aef5bcfcf3c8547e3160d08084361e8,11,15567102,0,5,-170312,42262,2957414,-170312,2999676,2917588,-170312,5917264,2909844,2739532,8827108,2909844,2739532,11736952,3830150,,,,,,,,,
|
| 36 |
+
35,Satin Doll,Webster,V12_02 Satin Doll,.mp3,5a8be2388d4f0deda52812f490917441,475,17604154,0,4,-141487,38586,2963497,-141490,3002080,5437043,5295553,8439123,5437043,5295553,13876166,3625387,,,,,,,,,,,,
|
| 37 |
+
36,Scrapple From The Apple,Getz,V06_09 Scrapple From The Apple,.mp3,207be4a808c1ac69ea188b32a5b63b41,1049,16553124,0,4,-72421,42261,4894698,-72420,4936960,4843276,4770856,9780236,4843276,4770856,14623512,1880532,,,,,,,,,,,,
|
| 38 |
+
37,Sonnymoon For Two,Rollins,V82_02 Sticky Wicket,.mp3,59dd6ecca5ece1b345411c4d5803689a,-32,15845727,0,5,-144019,44100,3606468,-144019,3650568,3623676,3479657,7274244,3623676,3479657,10897920,3558187,3479657,14456107,1369317,,,,,,,,,
|
| 39 |
+
38,Soon,Hayes,V22_2-02 Soon,.mp3,90e1d297b5b18b527bd302d73c8a6ac8,-1998,15640120,0,4,-107426,43444,9444051,-107425,9487496,1843684,1736259,11331180,1843684,1736259,13194864,2465256,,,,,,,,,,,,
|
| 40 |
+
39,Star Eyes,Hayes,08 - Star Eyes,.mp3,ea2b7bcf2c4af77d7cfd74436667c351,3,16448094,0,5,-519362,44099,11046411,-519362,11090510,1739000,1219638,12829510,1739000,1219638,14568510,1677880,905954,16246390,49229,,,,,,,,,
|
| 41 |
+
40,Stella By Starlight,Henderson,03 - Stella,.mp3,05cc4934bcbaa474c2304dc812c3c986,-2,17670523,0,8,-36811,98382,5859891,3867181,5958273,1791618,3867181,7929891,1932374,5799555,9862265,1932374,5799555,11794639,954852,5799555,12749491,940952,5799555,13690443,1858440,5799555,15548883,2003848
|
| 42 |
+
41,Step Lightly,Henderson,Vol. 108_01 Step Lightly,.mp3,fb1b77c5dacfd1a742022c6f6a6f2763,162,16848298,0,5,-174106,43180,4649715,-174106,4692896,1587290,1413185,6280186,1587290,1413185,7867476,6249342,1413185,14116818,2731480,,,,,,,,,
|
| 43 |
+
42,Sweet Georgia Brown,Webster,V70_09 Sweet Georgia Brown,.mp3,9316b004a2e05c816ae97a073cfdde63,21030,16001571,0,1,-184261,42262,15959309,,,,,,,,,,,,,,,,,,,,,
|
| 44 |
+
43,Take The A Train,Henderson,V12_09 Take The 'a' Train,.mp3,37a019b3662d3fd1a08e87fd864ab484,471,15530960,0,5,-585807,39745,2010335,-585807,2050080,2050452,1464645,4100532,2050452,1464645,6150984,6290372,1464645,12441356,3020420,,,,,,,,,
|
| 45 |
+
44,Tangerine,Getz,V22_2-07 Tangerine,.mp3,f8d6522a5c1f65c7029023bd69604221,-814,16904430,0,4,-32920,93867,4908461,-32920,5002328,4842971,4797210,9845298,4817290,4797210,14662588,2041554,,,,,,,,,,,,
|
| 46 |
+
45,The Rainbow People,Gordon,V82_05 The Rainbow People,.mp3,254c33daa3b15102ed3902297a88b898,-48,16939336,0,4,-184294,38587,8515929,-184294,8554516,2810448,2604550,11364963,2767241,2604550,14132204,2636428,,,,,,,,,,,,
|
| 47 |
+
46,There Will Never Be Another You,Hayes,V44_08 There Will Never Be Another You,.mp3,ac9d668930eb6558fd03d062956e27ad,7794,16391080,0,4,-44719,98349,10580943,-44719,10679292,1685088,1640369,12364380,1685088,1640369,14049468,2341612,,,,,,,,,,,,
|
| 48 |
+
47,Three Little Words,Rollins,V51_07 Three Little Words,.mp3,ffb98f6fd813577e2b731dc0ee704c46,-5864,16764213,0,4,-74589,44217,4599186,-74603,4641389,1474592,1400040,6116032,1381948,1400040,7498032,9264270,,,,,,,,,,,,
|
| 49 |
+
48,UMMG,Henderson,V66_02 UMMG,.mp3,b54bb70a26ac917cfec07c49b293a6a0,223,17027202,0,6,-86636,41342,3622193,-296507,3663536,1701226,-505017,5364762,5636196,-505017,11000958,1862880,1357863,12863838,1862880,1357863,14726718,2300485,,,,,,
|
notebooks_and_scripts/Remove_Repeats.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pathlib import Path
|
| 2 |
+
import librosa
|
| 3 |
+
import csv
|
| 4 |
+
import json
|
| 5 |
+
import numpy as np
|
| 6 |
+
import music21 as m21
|
| 7 |
+
|
| 8 |
+
CSV_FILENAME = "./Edit_Data.csv"
|
| 9 |
+
SR = 44100
|
| 10 |
+
CROSS_FADE_SAMPLES = int(SR / 10)
|
| 11 |
+
|
| 12 |
+
def load_data():
|
| 13 |
+
# Load data from CSV file
|
| 14 |
+
csv_path = Path(__file__).parent / Path(CSV_FILENAME)
|
| 15 |
+
|
| 16 |
+
with open(csv_path) as csv_file:
|
| 17 |
+
csv_reader = csv.DictReader(csv_file, delimiter=',')
|
| 18 |
+
return list(csv_reader)
|
| 19 |
+
|
| 20 |
+
def load_syncpoints(score):
|
| 21 |
+
json_path = Path(__file__).parent / Path("../syncpoints/") / score.with_suffix(".json").name
|
| 22 |
+
return json.loads(json_path.read_text())
|
| 23 |
+
|
| 24 |
+
def get_nearest_measure(syncpoints, sample):
|
| 25 |
+
for measure_no, time_s in syncpoints:
|
| 26 |
+
if np.isclose(sample / SR, time_s, atol=0.6):
|
| 27 |
+
return measure_no
|
| 28 |
+
return None
|
| 29 |
+
|
| 30 |
+
def remove_measure_ranges(score, ranges):
|
| 31 |
+
for part in score.parts:
|
| 32 |
+
measures_to_remove = []
|
| 33 |
+
for measure in part.getElementsByClass(m21.stream.Measure):
|
| 34 |
+
if any(start <= measure.number <= end for start, end in ranges):
|
| 35 |
+
measures_to_remove.append(measure)
|
| 36 |
+
|
| 37 |
+
for measure in measures_to_remove:
|
| 38 |
+
part.remove(measure)
|
| 39 |
+
|
| 40 |
+
return score
|
| 41 |
+
|
| 42 |
+
musicxml_files = sorted((Path(__file__).parent / Path("../musicxml/")).glob("*.xml"))
|
| 43 |
+
|
| 44 |
+
for row in load_data():
|
| 45 |
+
edits = []
|
| 46 |
+
|
| 47 |
+
score = musicxml_files[int(row['Track ID']) - 1]
|
| 48 |
+
num_edits = int(row['Num Edits'])
|
| 49 |
+
adjust_samples = int(row['Adjust Samples'])
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
for n in range(num_edits):
|
| 53 |
+
edit_idx = str(n + 1)
|
| 54 |
+
edit_ref = int(row['Edit ' + edit_idx + ' Ref'])
|
| 55 |
+
edit_start = int(row['Edit ' + edit_idx + ' Start'])
|
| 56 |
+
edit_dur = int(row['Edit ' + edit_idx + ' Duration'])
|
| 57 |
+
|
| 58 |
+
# not needed for our purpose here
|
| 59 |
+
# if (n > 0):
|
| 60 |
+
# edit_start -= CROSS_FADE_SAMPLES
|
| 61 |
+
# edit_dur += CROSS_FADE_SAMPLES
|
| 62 |
+
|
| 63 |
+
source_start = edit_start - edit_ref - adjust_samples
|
| 64 |
+
source_end = source_start + edit_dur
|
| 65 |
+
|
| 66 |
+
edits.append([source_start, source_end, edit_start, (edit_start+edit_dur), row['Track ID']])
|
| 67 |
+
|
| 68 |
+
if len(edits) > 1:
|
| 69 |
+
seen_sections = []
|
| 70 |
+
repeated_measures = []
|
| 71 |
+
syncpoints = load_syncpoints(score)
|
| 72 |
+
for edit in edits:
|
| 73 |
+
source_section_times = (edit[0], edit[1])
|
| 74 |
+
if source_section_times in seen_sections:
|
| 75 |
+
target_section_times = (edit[2], edit[3])
|
| 76 |
+
|
| 77 |
+
# lookup measure number via syncpoints file
|
| 78 |
+
measure_from = get_nearest_measure(syncpoints, edit[2])
|
| 79 |
+
measure_to = get_nearest_measure(syncpoints, edit[3])
|
| 80 |
+
|
| 81 |
+
# print duration in minutes that were repeated
|
| 82 |
+
print(f"{score.name} {measure_from} to {measure_to} ({(edit[3] - edit[2]) / SR / 60:.2f} minutes)")
|
| 83 |
+
# collect measure ranges
|
| 84 |
+
repeated_measures.append((measure_from, measure_to))
|
| 85 |
+
|
| 86 |
+
seen_sections.append(source_section_times)
|
| 87 |
+
|
| 88 |
+
m21_score = m21.converter.parse(score)
|
| 89 |
+
remove_measure_ranges(m21_score, repeated_measures)
|
| 90 |
+
else:
|
| 91 |
+
m21_score = m21.converter.parse(score)
|
| 92 |
+
|
| 93 |
+
# make a new folder for the scores
|
| 94 |
+
new_folder = Path(__file__).parent / Path("../musicxml_no_repeats/")
|
| 95 |
+
new_folder.mkdir(parents=True, exist_ok=True)
|
| 96 |
+
|
| 97 |
+
# save new score
|
| 98 |
+
new_score_path = new_folder / Path(f"{row['Track ID']}.xml")
|
| 99 |
+
m21_score.write('musicxml', fp=new_score_path)
|
notebooks_and_scripts/analysis.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
notebooks_and_scripts/note_data.csv
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|