plot_seq_1.py : Many changes and updates.
This commit is contained in:
parent
e32f224d12
commit
f36e7c8a64
166
plot_seq_1.py
166
plot_seq_1.py
@ -1,6 +1,6 @@
|
||||
##| Copyright: (C) 2019-2020 Kevin Larke <contact AT larke DOT org>
|
||||
##| License: GNU GPL version 3.0 or above. See the accompanying LICENSE file.
|
||||
import os, sys,json
|
||||
import os, sys,json,math
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from common import parse_yaml_cfg
|
||||
@ -458,11 +458,21 @@ def _plot_us_db_takes( inDir, cfg, pitchL, takeIdL, printDir="", printFn="" ):
|
||||
|
||||
usL, dbL, durMsL, _, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, analysisArgsD['rmsAnalysisArgs'], takeId=takeId )
|
||||
|
||||
|
||||
ax.plot(usL,dbL, marker='.',label="%i:%i %s %s" % (midi_pitch,takeId,keyMapD[midi_pitch]['class'],keyMapD[midi_pitch]['type']))
|
||||
|
||||
# for i,(x,y) in enumerate(zip(usL,dbL)):
|
||||
# ax.text(x,y,str(i))
|
||||
for i,(x,y) in enumerate(zip(usL,dbL)):
|
||||
ax.text(x,y,str(i))
|
||||
|
||||
f_usL,f_dbL = filter_us_db(usL,dbL)
|
||||
|
||||
ax.plot(f_usL,f_dbL, marker='.')
|
||||
|
||||
|
||||
elbow_us,elbow_db = elbow.find_elbow(usL,dbL)
|
||||
ax.plot([elbow_us],[elbow_db],marker='*',markersize=12,color='red',linestyle='None')
|
||||
|
||||
elb_idx = nearest_sample_point( dbL, usL, elbow_db, elbow_us )
|
||||
|
||||
if printDir:
|
||||
plt.savefig(os.path.join(printDir,printFn),format="png")
|
||||
@ -470,6 +480,22 @@ def _plot_us_db_takes( inDir, cfg, pitchL, takeIdL, printDir="", printFn="" ):
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
def get_pitches_and_takes( inDir ):
|
||||
|
||||
pitchD = {}
|
||||
inDirL = os.listdir( inDir )
|
||||
|
||||
for pitch in inDirL:
|
||||
path = os.path.join( inDir, pitch )
|
||||
takeIdL = os.listdir( path )
|
||||
|
||||
takeIdL = sorted([ int(takeId) for takeId in takeIdL ])
|
||||
takeIdL = [ str(x) for x in takeIdL ]
|
||||
pitchD[int(pitch)] = takeIdL
|
||||
|
||||
|
||||
return pitchD
|
||||
|
||||
def plot_us_db_takes( inDir, cfg, pitchL, printDir=""):
|
||||
|
||||
takeIdL = None
|
||||
@ -480,14 +506,11 @@ def plot_us_db_takes( inDir, cfg, pitchL, printDir=""):
|
||||
|
||||
def plot_us_db_takes_last( inDir, cfg, pitchL, printDir ):
|
||||
|
||||
pitchD = get_pitches_and_takes(inDir)
|
||||
|
||||
takeIdL = []
|
||||
for pitch in pitchL:
|
||||
|
||||
inDirL = os.listdir( os.path.join(inDir,str(pitch)))
|
||||
|
||||
inDirL = sorted(inDirL)
|
||||
|
||||
takeIdL.append( int(inDirL[-1]) )
|
||||
takeIdL.append( int(pitchD[pitch][-1]) )
|
||||
|
||||
return _plot_us_db_takes( inDir, cfg, pitchL, takeIdL, printDir, "us_db_takes_last.png")
|
||||
|
||||
@ -535,17 +558,21 @@ def plot_all_noise_curves( inDir, cfg, pitchL=None ):
|
||||
plt.legend()
|
||||
plt.show()
|
||||
|
||||
def nearest_sample_point( dbL, usL, db0, us0 ):
|
||||
xL = np.array([ abs(us-us0) for db,us in zip(dbL,usL) ])
|
||||
|
||||
return np.argmin(xL)
|
||||
|
||||
|
||||
def plot_min_max_2_db( inDir, cfg, pitchL=None, takeId=2, printDir=None ):
|
||||
|
||||
takeIdArg = takeId
|
||||
pitchTakeD = get_pitches_and_takes(inDir)
|
||||
|
||||
pitchFolderL = os.listdir(inDir)
|
||||
|
||||
print(pitchL)
|
||||
|
||||
if pitchL is None:
|
||||
pitchL = [ int( int(pitchFolder) ) for pitchFolder in pitchFolderL ]
|
||||
|
||||
print(pitchL)
|
||||
|
||||
okL = []
|
||||
outPitchL = []
|
||||
minDbL = []
|
||||
@ -553,9 +580,11 @@ def plot_min_max_2_db( inDir, cfg, pitchL=None, takeId=2, printDir=None ):
|
||||
|
||||
for midi_pitch in pitchL:
|
||||
|
||||
print(midi_pitch)
|
||||
takeId = None
|
||||
if takeIdArg == -1:
|
||||
takeId = pitchTakeD[midi_pitch][-1]
|
||||
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'], takeId )
|
||||
|
||||
okL.append(False)
|
||||
|
||||
@ -571,6 +600,9 @@ def plot_min_max_2_db( inDir, cfg, pitchL=None, takeId=2, printDir=None ):
|
||||
minDbL.append(elbow_db)
|
||||
outPitchL.append(midi_pitch)
|
||||
|
||||
smp_idx = nearest_sample_point( dbL, usL, elbow_db, elbow_us )
|
||||
|
||||
print(" %i:[-1,%i], " % (midi_pitch,smp_idx))
|
||||
|
||||
|
||||
p_dL = sorted( zip(outPitchL,minDbL,maxDbL,okL), key=lambda x: x[0] )
|
||||
@ -593,7 +625,9 @@ def plot_min_max_2_db( inDir, cfg, pitchL=None, takeId=2, printDir=None ):
|
||||
|
||||
plt.show()
|
||||
|
||||
def plot_min_db_manual( inDir, cfg, printDir=None ):
|
||||
def plot_min_db_manual( inDir, cfg, printDir=None, absMaxDb=27, absMinDb=3 ):
|
||||
|
||||
pitchTakeD = get_pitches_and_takes(inDir)
|
||||
|
||||
pitchL = list(cfg.manualMinD.keys())
|
||||
|
||||
@ -606,29 +640,39 @@ def plot_min_db_manual( inDir, cfg, printDir=None ):
|
||||
|
||||
for midi_pitch in pitchL:
|
||||
|
||||
if cfg.manualLastFl:
|
||||
manual_take_id = pitchTakeD[midi_pitch][-1]
|
||||
takeId = manual_take_id
|
||||
else:
|
||||
manual_take_id = cfg.manualMinD[midi_pitch][0]
|
||||
takeId = None
|
||||
|
||||
manual_sample_idx = cfg.manualMinD[midi_pitch][1]
|
||||
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'], takeId )
|
||||
|
||||
okL.append(False)
|
||||
|
||||
if takeId is None:
|
||||
takeId = len(set(takeIdL))-1
|
||||
|
||||
# most pitches have 3 sample takes that do not
|
||||
if len(set(takeIdL)) == 3 and manual_take_id == takeId:
|
||||
okL[-1] = True
|
||||
else:
|
||||
okL[-1] = True
|
||||
|
||||
# maxDb is computed on all takes (not just the specified take)
|
||||
db_maxL = sorted(dbL)
|
||||
max_db = np.mean(db_maxL[-4:])
|
||||
max_db = min(absMaxDb,np.mean(db_maxL[-4:]))
|
||||
maxDbL.append( max_db )
|
||||
|
||||
# get the us,db values for the specified take
|
||||
usL,dbL = zip(*[(usL[i],dbL[i]) for i in range(len(usL)) if takeIdL[i]==manual_take_id ])
|
||||
|
||||
# most pitches have 3 sample takes that do not
|
||||
if len(set(takeIdL)) == 3 and manual_take_id == takeId:
|
||||
okL[-1] = True
|
||||
|
||||
# min db from the sample index manually specified in cfg
|
||||
manualMinDb = dbL[ manual_sample_idx ]
|
||||
manualMinDb = max(absMinDb,dbL[ manual_sample_idx ])
|
||||
|
||||
minDbL.append( manualMinDb )
|
||||
outPitchL.append(midi_pitch)
|
||||
@ -642,8 +686,6 @@ def plot_min_db_manual( inDir, cfg, printDir=None ):
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Form the complete set of min/max db levels for each pitch by interpolating the
|
||||
# db values between the manually selected anchor points.
|
||||
interpMinDbL = np.interp( pitchL, cfg.manualAnchorPitchMinDbL, anchorMinDbL )
|
||||
@ -861,27 +903,67 @@ def report_take_ids( inDir ):
|
||||
if len(takeDirL) == 0:
|
||||
print(pitch," directory empty")
|
||||
else:
|
||||
with open( os.path.join(pitchDir,'0','seq.json'), "rb") as f:
|
||||
|
||||
fn = os.path.join(pitchDir,'0','seq.json')
|
||||
|
||||
if not os.path.isfile(fn):
|
||||
print("Missing sequence file:",fn)
|
||||
else:
|
||||
with open( fn, "rb") as f:
|
||||
r = json.load(f)
|
||||
|
||||
|
||||
if len(r['eventTimeL']) != 81:
|
||||
print(pitch," ",len(r['eventTimeL']))
|
||||
|
||||
if len(takeDirL) != 3:
|
||||
print("***",pitch,len(takeDirL))
|
||||
|
||||
def filter_us_db( us0L, db0L ):
|
||||
|
||||
us1L = [us0L[-1]]
|
||||
db1L = [db0L[-1]]
|
||||
dDb = 0
|
||||
lastIdx = 0
|
||||
for i,(us,db) in enumerate(zip( us0L[::-1],db0L[::-1])):
|
||||
db1 = db1L[-1]
|
||||
if db < db1 and db1-db >= dDb/2:
|
||||
dDb = db1 - db
|
||||
us1L.append(us)
|
||||
db1L.append(db)
|
||||
lastIdx = i
|
||||
|
||||
|
||||
lastIdx = len(us0L) - lastIdx - 1
|
||||
|
||||
|
||||
usL = [ us0L[lastIdx] ]
|
||||
dbL = [ db0L[lastIdx] ]
|
||||
dDb = 0
|
||||
for us,db in zip(us0L[lastIdx::],db0L[lastIdx::]):
|
||||
db1 = dbL[-1]
|
||||
if db > db1:
|
||||
dDb = db-db1
|
||||
usL.append(us)
|
||||
dbL.append(db)
|
||||
|
||||
return usL,dbL
|
||||
|
||||
def cache_us_db( inDir, cfg, outFn ):
|
||||
|
||||
pitchTakeD = get_pitches_and_takes(inDir)
|
||||
|
||||
pitch_usDbD = {}
|
||||
pitchDirL = os.listdir(inDir)
|
||||
|
||||
for pitch in pitchDirL:
|
||||
for pitch,takeIdL in pitchTakeD.items():
|
||||
|
||||
pitch = int(pitch)
|
||||
takeId = takeIdL[-1]
|
||||
|
||||
print(pitch)
|
||||
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
|
||||
usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, pitch, cfg.analysisArgs['rmsAnalysisArgs'], takeId )
|
||||
|
||||
pitch_usDbD[pitch] = { 'usL':usL, 'dbL':dbL, 'durMsL':durMsL, 'takeIdL':takeIdL, 'holdDutyPctL': holdDutyPctL }
|
||||
|
||||
@ -897,10 +979,12 @@ def gen_vel_map( inDir, cfg, minMaxDbFn, dynLevelN, cacheFn ):
|
||||
|
||||
pitchDirL = os.listdir(inDir)
|
||||
|
||||
# pitchUsDbD = { pitch:
|
||||
with open(cacheFn,"r") as f:
|
||||
pitchUsDbD = json.load(f)
|
||||
|
||||
|
||||
# form minMaxDb = { pitch:(minDb,maxDb) }
|
||||
with open("minInterpDb.json","r") as f:
|
||||
r = json.load(f)
|
||||
minMaxDbD = { pitch:(minDb,maxDb) for pitch,minDb,maxDb in zip(r['pitchL'],r['minDbL'],r['maxDbL']) }
|
||||
@ -908,17 +992,31 @@ def gen_vel_map( inDir, cfg, minMaxDbFn, dynLevelN, cacheFn ):
|
||||
|
||||
pitchL = sorted( [ int(pitch) for pitch in pitchUsDbD.keys()] )
|
||||
|
||||
# for each pitch
|
||||
for pitch in pitchL:
|
||||
|
||||
# get the us/db map for this
|
||||
d = pitchUsDbD[str(pitch)]
|
||||
|
||||
usL = d['usL']
|
||||
dbL = np.array(d['dbL'])
|
||||
|
||||
usL, dbL = filter_us_db( d['usL'], d['dbL'] )
|
||||
#usL = d['usL']
|
||||
#dbL = np.array(d['dbL'])
|
||||
dbL = np.array(dbL)
|
||||
|
||||
|
||||
velMapD[pitch] = []
|
||||
|
||||
for i in range(dynLevelN+1):
|
||||
maxDb = minMaxDbD[pitch][1]
|
||||
minDb = minMaxDbD[pitch][0]
|
||||
|
||||
db = minMaxDbD[pitch][0] + (i * (minMaxDbD[pitch][1] - minMaxDbD[pitch][0])/ dynLevelN)
|
||||
dynLevelN = len(cfg.velTableDbL)
|
||||
|
||||
# for each dynamic level
|
||||
for i in range(dynLevelN):
|
||||
|
||||
#db = minDb + (i * (maxDb - minDb)/ dynLevelN)
|
||||
db = cfg.velTableDbL[i]
|
||||
|
||||
usIdx = np.argmin( np.abs(dbL - db) )
|
||||
|
||||
@ -946,7 +1044,7 @@ def gen_vel_map( inDir, cfg, minMaxDbFn, dynLevelN, cacheFn ):
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
printDir = None #os.path.expanduser( "~/src/picadae_ac_3/doc")
|
||||
printDir = os.path.expanduser("~/temp") # os.path.expanduser( "~/src/picadae_ac_3/doc")
|
||||
cfgFn = sys.argv[1]
|
||||
inDir = sys.argv[2]
|
||||
mode = sys.argv[3]
|
||||
@ -981,7 +1079,7 @@ if __name__ == "__main__":
|
||||
elif mode == 'manual_db':
|
||||
plot_min_db_manual( inDir, cfg, printDir=printDir )
|
||||
elif mode == 'gen_vel_map':
|
||||
gen_vel_map( inDir, cfg, "minInterpDb.json", 9, "cache_us_db.json" )
|
||||
gen_vel_map( inDir, cfg, "minInterpDb.json", 12, "cache_us_db.json" )
|
||||
elif mode == 'cache_us_db':
|
||||
cache_us_db( inDir, cfg, "cache_us_db.json")
|
||||
else:
|
||||
|
Loading…
Reference in New Issue
Block a user