2020-10-26 18:57:43 +00:00
|
|
|
##| Copyright: (C) 2019-2020 Kevin Larke <contact AT larke DOT org>
|
|
|
|
##| License: GNU GPL version 3.0 or above. See the accompanying LICENSE file.
|
2019-09-01 14:54:09 +00:00
|
|
|
import os, sys
|
2019-08-20 01:18:57 +00:00
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
import numpy as np
|
2019-09-01 14:54:09 +00:00
|
|
|
from common import parse_yaml_cfg
|
|
|
|
from rms_analysis import rms_analysis_main
|
2019-11-18 16:44:47 +00:00
|
|
|
from rms_analysis import select_first_stable_note_by_delta_db
|
|
|
|
from rms_analysis import select_first_stable_note_by_dur
|
2019-12-09 22:37:24 +00:00
|
|
|
from rms_analysis import samples_to_linear_residual
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
def is_nanV( xV ):
|
|
|
|
|
|
|
|
for i in range(xV.shape[0]):
|
|
|
|
if np.isnan( xV[i] ):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def _find_max_take_id( inDir ):
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
id = 0
|
|
|
|
while os.path.isdir( os.path.join(inDir, "%i" % id) ):
|
|
|
|
id += 1
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
if id > 0:
|
|
|
|
id -= 1
|
|
|
|
|
|
|
|
return id
|
2019-12-09 22:37:24 +00:00
|
|
|
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def form_final_pulse_list( inDir, midi_pitch, analysisArgsD, take_id=None ):
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# append the midi pitch to the input directory
|
|
|
|
#inDir = os.path.join( inDir, "%i" % (midi_pitch))
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
dirL = os.listdir(inDir)
|
|
|
|
|
|
|
|
pkL = []
|
|
|
|
|
|
|
|
# for each take in this directory
|
|
|
|
for idir in dirL:
|
|
|
|
|
|
|
|
take_number = int(idir)
|
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(os.path.join( inDir,idir, "seq.json")):
|
|
|
|
continue
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# analyze this takes audio and locate the note peaks
|
|
|
|
r = rms_analysis_main( os.path.join(inDir,idir), midi_pitch, **analysisArgsD['rmsAnalysisArgs'] )
|
|
|
|
|
|
|
|
# store the peaks in pkL[ (db,us) ]
|
|
|
|
for db,us in zip(r.pkDbL,r.pkUsL):
|
|
|
|
pkL.append( (db,us) )
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# sort the peaks on increasing attack pulse microseconds
|
|
|
|
pkL = sorted( pkL, key= lambda x: x[1] )
|
|
|
|
|
|
|
|
# merge sample points that separated by less than 'minSampleDistUs' milliseconds
|
|
|
|
pkL = merge_close_sample_points( pkL, analysisArgsD['minSampleDistUs'] )
|
|
|
|
|
|
|
|
# split pkL
|
|
|
|
pkDbL,pkUsL = tuple(zip(*pkL))
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
#-------------------------------------------
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# locate the first and last note
|
|
|
|
min_pk_idx, max_pk_idx = find_min_max_peak_index( pkDbL, analysisArgsD['minAttkDb'], analysisArgsD['maxDbOffset'] )
|
|
|
|
|
|
|
|
db1 = pkDbL[ max_pk_idx ]
|
|
|
|
db0 = pkDbL[ min_pk_idx ]
|
|
|
|
|
|
|
|
pulseUsL = []
|
|
|
|
pulseDbL = []
|
|
|
|
multValL = []
|
|
|
|
for out_idx in range(128):
|
|
|
|
|
|
|
|
# calc the target volume
|
|
|
|
db = db0 + (out_idx * (db1-db0)/127.0)
|
|
|
|
|
|
|
|
multi_value_count = 0
|
|
|
|
|
|
|
|
# look for the target between each of the sampled points
|
|
|
|
for i in range(1,len(pkDbL)):
|
|
|
|
|
|
|
|
# if the target volume is between these two sample points
|
|
|
|
if pkDbL[i-1] <= db and db < pkDbL[i]:
|
|
|
|
|
|
|
|
# if the target has not already been located
|
|
|
|
if len(pulseUsL) == out_idx:
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# interpolate the pulse time from between the sampled points
|
|
|
|
frac = (db - pkDbL[i-1]) / (pkDbL[i] - pkDbL[i-1])
|
|
|
|
us = pkUsL[i-1] + frac * (pkUsL[i] - pkUsL[i-1])
|
|
|
|
db = pkDbL[i-1] + frac * (pkDbL[i] - pkDbL[i-1])
|
|
|
|
pulseUsL.append(us)
|
|
|
|
pulseDbL.append(db)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# this target db value was found between multiple sampled points
|
|
|
|
# therefore the sampled volume function is not monotonic
|
|
|
|
multi_value_count += 1
|
|
|
|
|
|
|
|
if multi_value_count > 0:
|
|
|
|
multValL.append((out_idx,multi_value_count))
|
|
|
|
|
|
|
|
if len(multValL) > 0:
|
2019-11-09 16:13:34 +00:00
|
|
|
# print("Multi-value pulse locations were found during velocity table formation: ",multValL)
|
|
|
|
pass
|
2019-09-01 14:54:09 +00:00
|
|
|
|
2019-11-18 16:44:47 +00:00
|
|
|
return pulseUsL,pulseDbL,r.holdDutyPctL
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def merge_close_sample_points( pkDbUsL, minSampleDistanceUs ):
|
|
|
|
|
|
|
|
avg0Us = np.mean(np.diff([ x[1] for x in pkDbUsL ]))
|
|
|
|
n0 = len(pkDbUsL)
|
|
|
|
|
2019-11-18 16:44:47 +00:00
|
|
|
while True and n0>0:
|
2019-09-01 14:54:09 +00:00
|
|
|
us0 = None
|
|
|
|
db0 = None
|
|
|
|
|
|
|
|
for i,(db,us) in enumerate(pkDbUsL):
|
|
|
|
if i > 0 and us - us0 < minSampleDistanceUs:
|
|
|
|
us1 = (us0 + us)/2
|
|
|
|
db1 = (db0 + db)/2
|
|
|
|
pkDbUsL[i-1] = (db1,us1)
|
|
|
|
del pkDbUsL[i]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
us0 = us
|
|
|
|
db0 = db
|
|
|
|
|
|
|
|
if i+1 == len(pkDbUsL):
|
|
|
|
break
|
|
|
|
|
|
|
|
avg1Us = np.mean(np.diff([ x[1] for x in pkDbUsL ]))
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
print("%i sample points deleted by merging close points." % (n0 - len(pkDbUsL)))
|
|
|
|
print("Mean time between samples - before:%f after:%f " % (avg0Us,avg1Us))
|
|
|
|
print("Min time between samples: %i " % (np.min(np.diff([x[1] for x in pkDbUsL]))))
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
return pkDbUsL
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
def _calc_resample_points( dPkDb, pkUs0, pkUs1, samplePerDb, minSampleDistUs ):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
dPkUs = pkUs1 - pkUs0
|
|
|
|
sampleCnt = max(int(round(abs(dPkDb) * samplePerDb)),samplePerDb)
|
|
|
|
dUs = max(int(round(dPkUs/sampleCnt)),minSampleDistUs)
|
|
|
|
sampleCnt = int(round(dPkUs/dUs))
|
|
|
|
dUs = int(round(dPkUs/sampleCnt))
|
|
|
|
usL = [ pkUs0 + dUs*j for j in range(sampleCnt+1)]
|
|
|
|
|
|
|
|
return usL
|
|
|
|
|
|
|
|
def calc_resample_ranges( pkDbL, pkUsL, min_pk_idx, max_pk_idx, maxDeltaDb, samplePerDb, minSampleDistUs ):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
if min_pk_idx == 0:
|
|
|
|
print("No silent notes were generated. Decrease the minimum peak level or the hold voltage.")
|
|
|
|
return None
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
resampleUsSet = set()
|
|
|
|
refPkDb = pkDbL[min_pk_idx]
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
#pkDbL = pkDbL[ pkIdxL ]
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
for i in range( min_pk_idx, max_pk_idx+1 ):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
d = pkDbL[i] - pkDbL[i-1]
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
usL = []
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# if this peak is less than maxDeltaDb above the previous pk or
|
|
|
|
# it is below the previous max peak
|
|
|
|
if d > maxDeltaDb or d <= 0 or pkDbL[i] < refPkDb:
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
usL = _calc_resample_points( d, pkUsL[i-1], pkUsL[i], samplePerDb, minSampleDistUs )
|
|
|
|
|
|
|
|
if d <= 0 and i + 1 < len(pkDbL):
|
2019-09-01 14:54:09 +00:00
|
|
|
d = pkDbL[i+1] - pkDbL[i]
|
2019-11-09 16:13:34 +00:00
|
|
|
|
|
|
|
usL += _calc_resample_points( d, pkUsL[i-1], pkUsL[i], samplePerDb, minSampleDistUs )
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
|
|
|
|
if pkDbL[i] > refPkDb:
|
|
|
|
refPkDb = pkDbL[i]
|
|
|
|
|
|
|
|
if usL:
|
|
|
|
resampleUsSet = resampleUsSet.union( usL )
|
|
|
|
|
|
|
|
return resampleUsSet
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def form_resample_pulse_time_list( inDir, analysisArgsD ):
|
|
|
|
"""" This function merges all available data from previous takes to form
|
|
|
|
a new list of pulse times to sample.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# the last folder is always the midi pitch of the note under analysis
|
|
|
|
midi_pitch = int( inDir.split("/")[-1] )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
dirL = os.listdir(inDir)
|
|
|
|
|
|
|
|
pkL = []
|
|
|
|
|
|
|
|
# for each take in this directory
|
|
|
|
for idir in dirL:
|
|
|
|
|
|
|
|
take_number = int(idir)
|
|
|
|
|
|
|
|
# analyze this takes audio and locate the note peaks
|
|
|
|
r = rms_analysis_main( os.path.join(inDir,idir), midi_pitch, **analysisArgsD['rmsAnalysisArgs'] )
|
|
|
|
|
|
|
|
# store the peaks in pkL[ (db,us) ]
|
|
|
|
for db,us in zip(r.pkDbL,r.pkUsL):
|
|
|
|
pkL.append( (db,us) )
|
|
|
|
|
|
|
|
# sort the peaks on increasing attack pulse microseconds
|
|
|
|
pkL = sorted( pkL, key= lambda x: x[1] )
|
|
|
|
|
|
|
|
# merge sample points that separated by less than 'minSampleDistUs' milliseconds
|
|
|
|
pkL = merge_close_sample_points( pkL, analysisArgsD['minSampleDistUs'] )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# split pkL
|
|
|
|
pkDbL,pkUsL = tuple(zip(*pkL))
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# locate the first and last note
|
|
|
|
min_pk_idx, max_pk_idx = find_min_max_peak_index( pkDbL, analysisArgsD['minAttkDb'], analysisArgsD['maxDbOffset'] )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# estimate the microsecond locations to resample
|
2019-11-09 16:13:34 +00:00
|
|
|
resampleUsSet = calc_resample_ranges( pkDbL, pkUsL, min_pk_idx, max_pk_idx, analysisArgsD['maxDeltaDb'], analysisArgsD['samplesPerDb'], analysisArgsD['minSampleDistUs'] )
|
2019-09-01 14:54:09 +00:00
|
|
|
|
|
|
|
resampleUsL = sorted( list(resampleUsSet) )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
#print(resampleUsL)
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
return resampleUsL, pkDbL, pkUsL
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
def plot_curve( ax, pulseUsL, rmsDbV ):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
coeff = np.polyfit(pulseUsL,rmsDbV,5)
|
|
|
|
func = np.poly1d(coeff)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
ax.plot( pulseUsL, func(pulseUsL), color='red')
|
|
|
|
|
|
|
|
def plot_resample_pulse_times_0( inDir, analysisArgsD ):
|
2019-09-01 14:54:09 +00:00
|
|
|
|
|
|
|
newPulseUsL, rmsDbV, pulseUsL = form_resample_pulse_time_list( inDir, analysisArgsD )
|
|
|
|
|
|
|
|
midi_pitch = int( inDir.split("/")[-1] )
|
2019-11-18 16:44:47 +00:00
|
|
|
velTblUsL,velTblDbL,_ = form_final_pulse_list( inDir, midi_pitch, analysisArgsD, take_id=None )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
fig,ax = plt.subplots()
|
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
ax.plot(pulseUsL,rmsDbV,marker='.' )
|
2019-09-01 14:54:09 +00:00
|
|
|
|
|
|
|
for us in newPulseUsL:
|
|
|
|
ax.axvline( x = us )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
ax.plot(velTblUsL,velTblDbL,marker='.',linestyle='None')
|
2019-12-09 22:37:24 +00:00
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
plt.show()
|
2019-12-09 22:37:24 +00:00
|
|
|
|
|
|
|
def plot_resample_pulse_times( inDir, analysisArgsD ):
|
|
|
|
|
|
|
|
newPulseUsL, rmsDbV, pulseUsL = form_resample_pulse_time_list( inDir, analysisArgsD )
|
|
|
|
|
|
|
|
midi_pitch = int( inDir.split("/")[-1] )
|
|
|
|
velTblUsL,velTblDbL,_ = form_final_pulse_list( inDir, midi_pitch, analysisArgsD, take_id=None )
|
|
|
|
|
|
|
|
fig,axL = plt.subplots(2,1,gridspec_kw={'height_ratios': [2, 1]})
|
|
|
|
|
|
|
|
axL[0].plot(pulseUsL,rmsDbV,marker='.' )
|
|
|
|
|
|
|
|
#plot_curve( ax, velTblUsL,velTblDbL)
|
|
|
|
|
|
|
|
scoreV = samples_to_linear_residual( pulseUsL, rmsDbV)
|
|
|
|
|
|
|
|
axL[0].plot(pulseUsL,rmsDbV + scoreV)
|
|
|
|
axL[0].plot(pulseUsL,rmsDbV + np.power(scoreV,2.0))
|
|
|
|
axL[0].plot(pulseUsL,rmsDbV - np.power(scoreV,2.0))
|
|
|
|
|
|
|
|
axL[1].axhline(0.0,color='black')
|
|
|
|
axL[1].axhline(1.0,color='black')
|
|
|
|
axL[1].plot(pulseUsL,np.abs(scoreV * 100.0 / rmsDbV))
|
|
|
|
axL[1].set_ylim((0.0,50))
|
|
|
|
plt.show()
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def find_min_max_peak_index( pkDbL, minDb, maxDbOffs ):
|
|
|
|
"""
|
|
|
|
Find the min db and max db peak.
|
|
|
|
"""
|
|
|
|
# select only the peaks from rmsV[] to work with
|
|
|
|
yV = pkDbL
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# get the max volume note
|
|
|
|
max_i = np.argmax( yV )
|
|
|
|
maxDb = yV[ max_i ]
|
|
|
|
|
|
|
|
min_i = max_i
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# starting from the max volume peak go backwards
|
|
|
|
for i in range( max_i, 0, -1 ):
|
|
|
|
|
|
|
|
# if this peak is within maxDbOffs of the loudest then choose this one instead
|
2019-11-09 16:13:34 +00:00
|
|
|
#if maxDb - yV[i] < maxDbOffs:
|
|
|
|
# max_i = i
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
# if this peak is less than minDb then the previous note is the min note
|
|
|
|
if yV[i] < minDb:
|
|
|
|
break
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
min_i = i
|
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
if min_i >= max_i:
|
|
|
|
min_i = 0
|
|
|
|
max_i = len(pkDbL)-1
|
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
if min_i == 0:
|
|
|
|
print("No silent notes were generated. Decrease the minimum peak level or the hold voltage.")
|
|
|
|
|
|
|
|
return min_i, max_i
|
|
|
|
|
|
|
|
def find_skip_peaks( rmsV, pkIdxL, min_pk_idx, max_pk_idx ):
|
|
|
|
""" Fine peaks associated with longer attacks pulses that are lower than peaks with a shorter attack pulse.
|
|
|
|
These peaks indicate degenerate portions of the pulse/db curve which must be skipped during velocity table formation
|
|
|
|
"""
|
|
|
|
skipPkIdxL = []
|
|
|
|
yV = rmsV[pkIdxL]
|
|
|
|
refPkDb = yV[min_pk_idx]
|
|
|
|
|
|
|
|
for i in range( min_pk_idx+1, max_pk_idx+1 ):
|
|
|
|
if yV[i] > refPkDb:
|
|
|
|
refPkDb = yV[i]
|
|
|
|
else:
|
|
|
|
skipPkIdxL.append(i)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
return skipPkIdxL
|
|
|
|
|
|
|
|
def find_out_of_range_peaks( rmsV, pkIdxL, min_pk_idx, max_pk_idx, maxDeltaDb ):
|
|
|
|
""" Locate peaks which are more than maxDeltaDb from the previous peak.
|
|
|
|
If two peaks are separated by more than maxDeltaDb then the range must be resampled
|
|
|
|
"""
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
oorPkIdxL = []
|
|
|
|
yV = rmsV[pkIdxL]
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
for i in range( min_pk_idx, max_pk_idx+1 ):
|
|
|
|
if i > 0:
|
|
|
|
d = yV[i] - yV[i-1]
|
|
|
|
if d > maxDeltaDb or d < 0:
|
|
|
|
oorPkIdxL.append(i)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
return oorPkIdxL
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def plot_spectrum( ax, srate, binHz, specV, midiPitch, harmN ):
|
2019-08-21 00:45:53 +00:00
|
|
|
""" Plot a single spectrum, 'specV' and the harmonic peak location boundaries."""
|
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
binN = specV.shape[0]
|
|
|
|
harmLBinL,harmMBinL,harmUBinL = calc_harm_bins( srate, binHz, midiPitch, harmN )
|
|
|
|
|
|
|
|
fundHz = harmMBinL[0] * binHz
|
|
|
|
maxPlotHz = fundHz * (harmN+1)
|
|
|
|
maxPlotBinN = int(round(maxPlotHz/binHz))
|
|
|
|
|
|
|
|
hzV = np.arange(binN) * (srate/(binN*2))
|
|
|
|
|
|
|
|
specV = 20.0 * np.log10(specV)
|
|
|
|
|
|
|
|
ax.plot(hzV[0:maxPlotBinN], specV[0:maxPlotBinN] )
|
|
|
|
|
|
|
|
for h0,h1,h2 in zip(harmLBinL,harmMBinL,harmUBinL):
|
|
|
|
ax.axvline( x=h0 * binHz, color="blue")
|
|
|
|
ax.axvline( x=h1 * binHz, color="black")
|
|
|
|
ax.axvline( x=h2 * binHz, color="blue")
|
|
|
|
|
|
|
|
ax.set_ylabel(str(midiPitch))
|
|
|
|
|
2019-12-09 22:37:24 +00:00
|
|
|
def plot_spectral_ranges( inDir, pitchL, rmsWndMs=300, rmsHopMs=30, harmN=5, dbLinRef=0.001 ):
|
2019-08-21 00:45:53 +00:00
|
|
|
""" Plot the spectrum from one note (7th from last) in each attack pulse length sequence referred to by pitchL."""
|
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
plotN = len(pitchL)
|
|
|
|
fig,axL = plt.subplots(plotN,1)
|
|
|
|
|
|
|
|
for plot_idx,midiPitch in enumerate(pitchL):
|
|
|
|
|
|
|
|
# get the audio and meta-data file names
|
|
|
|
seqFn = os.path.join( inDir, str(midiPitch), "seq.json")
|
|
|
|
audioFn = os.path.join( inDir, str(midiPitch), "audio.wav")
|
|
|
|
|
|
|
|
# read the meta data object
|
|
|
|
with open( seqFn, "rb") as f:
|
|
|
|
r = json.load(f)
|
|
|
|
|
|
|
|
# read the audio file
|
|
|
|
srate, signalM = wavfile.read(audioFn)
|
|
|
|
sigV = signalM / float(0x7fff)
|
|
|
|
|
|
|
|
# calc. the RMS envelope in the time domain
|
2019-12-09 22:37:24 +00:00
|
|
|
rms0DbV, rms0_srate = audio_rms( srate, sigV, rmsWndMs, rmsHopMs, dbLinRef )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
# locate the sample index of the peak of each note attack
|
|
|
|
pkIdx0L = locate_peak_indexes( rms0DbV, rms0_srate, r['eventTimeL'] )
|
|
|
|
|
|
|
|
# select the 7th to last note for spectrum measurement
|
|
|
|
|
|
|
|
#
|
|
|
|
# TODO: come up with a better way to select the note to measure
|
|
|
|
#
|
|
|
|
spectrumSmpIdx = pkIdx0L[ len(pkIdx0L) - 7 ]
|
|
|
|
|
|
|
|
|
|
|
|
# calc. the RMS envelope by taking the max spectral peak in each STFT window
|
2019-12-09 22:37:24 +00:00
|
|
|
rmsDbV, rms_srate, specV, specHopIdx, binHz = audio_stft_rms( srate, sigV, rmsWndMs, rmsHopMs, dbLinRef, spectrumSmpIdx)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
# specV[] is the spectrum of the note at spectrumSmpIdx
|
|
|
|
|
|
|
|
# plot the spectrum and the harmonic selection ranges
|
|
|
|
plot_spectrum( axL[plot_idx], srate, binHz, specV, midiPitch, harmN )
|
|
|
|
plt.show()
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def td_plot( ax, inDir, midi_pitch, id, analysisArgsD ):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
r = rms_analysis_main( inDir, midi_pitch, **analysisArgsD['rmsAnalysisArgs'] )
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
min_pk_idx, max_pk_idx = find_min_max_peak_index( r.pkDbL, analysisArgsD['minAttkDb'], analysisArgsD['maxDbOffset'] )
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
skipPkIdxL = find_skip_peaks( r.rmsDbV, r.pkIdxL, min_pk_idx, max_pk_idx )
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
jmpPkIdxL = find_out_of_range_peaks( r.rmsDbV, r.pkIdxL, min_pk_idx, max_pk_idx, analysisArgsD['maxDeltaDb'] )
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
secV = np.arange(0,len(r.rmsDbV)) / r.rms_srate
|
2019-08-21 00:45:53 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
ax.plot( secV, r.rmsDbV )
|
|
|
|
ax.plot( np.arange(0,len(r.tdRmsDbV)) / r.rms_srate, r.tdRmsDbV, color="black" )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
2019-08-21 00:45:53 +00:00
|
|
|
# print beg/end boundaries
|
2019-09-01 14:54:09 +00:00
|
|
|
for i,(begMs, endMs) in enumerate(r.eventTimeL):
|
2019-11-18 16:44:47 +00:00
|
|
|
|
|
|
|
pkSec = r.pkIdxL[i] / r.rms_srate
|
|
|
|
endSec = pkSec + r.statsL[i].durMs / 1000.0
|
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
ax.axvline( x=begMs/1000.0, color="green")
|
|
|
|
ax.axvline( x=endMs/1000.0, color="red")
|
2019-11-18 16:44:47 +00:00
|
|
|
ax.axvline( x=pkSec, color="black")
|
|
|
|
ax.axvline( x=endSec, color="black")
|
2019-08-21 00:45:53 +00:00
|
|
|
ax.text(begMs/1000.0, 20.0, str(i) )
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
|
2019-08-21 00:45:53 +00:00
|
|
|
# plot peak markers
|
2019-09-01 14:54:09 +00:00
|
|
|
for i,pki in enumerate(r.pkIdxL):
|
|
|
|
marker = 4 if i==min_pk_idx or i==max_pk_idx else 5
|
2019-08-21 00:45:53 +00:00
|
|
|
color = "red" if i in skipPkIdxL else "black"
|
2019-09-01 14:54:09 +00:00
|
|
|
ax.plot( [pki / r.rms_srate], [ r.rmsDbV[pki] ], marker=marker, color=color)
|
|
|
|
|
|
|
|
if i in jmpPkIdxL:
|
|
|
|
ax.plot( [pki / r.rms_srate], [ r.rmsDbV[pki] ], marker=6, color="blue")
|
2019-08-20 01:18:57 +00:00
|
|
|
|
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
return r
|
2019-11-18 16:44:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
def do_td_plot( inDir, analysisArgs ):
|
|
|
|
|
2019-11-18 16:44:47 +00:00
|
|
|
fig,axL = plt.subplots(3,1)
|
2019-09-01 14:54:09 +00:00
|
|
|
fig.set_size_inches(18.5, 10.5, forward=True)
|
|
|
|
|
|
|
|
id = int(inDir.split("/")[-1])
|
|
|
|
midi_pitch = int(inDir.split("/")[-2])
|
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
r = td_plot(axL[0],inDir,midi_pitch,id,analysisArgs)
|
|
|
|
|
2019-11-18 16:44:47 +00:00
|
|
|
qualityV = np.array([ x.quality for x in r.statsL ]) * np.max(r.pkDbL)
|
|
|
|
durMsV = np.array([ x.durMs for x in r.statsL ])
|
|
|
|
avgV = np.array([ x.durAvgDb for x in r.statsL ])
|
|
|
|
|
|
|
|
#durMsV[ durMsV < 400 ] = 0
|
|
|
|
#durMsV = durMsV * np.max(r.pkDbL)/np.max(durMsV)
|
|
|
|
#durMsV = durMsV / 100.0
|
|
|
|
|
|
|
|
dV = np.diff(r.pkDbL) / r.pkDbL[1:]
|
|
|
|
|
|
|
|
axL[1].plot( r.pkUsL, r.pkDbL, marker='.',label="pkDb" )
|
|
|
|
axL[1].plot( r.pkUsL, qualityV, marker='.',label="quality" )
|
|
|
|
axL[1].plot( r.pkUsL, avgV, marker='.',label="avgDb" )
|
|
|
|
#axL[2].plot( r.pkUsL, durMsV, marker='.' )
|
|
|
|
axL[2].plot( r.pkUsL[1:], dV, marker='.',label='d')
|
|
|
|
axL[2].set_ylim([-1,1])
|
|
|
|
axL[1].legend()
|
|
|
|
|
|
|
|
|
|
|
|
sni = select_first_stable_note_by_dur( durMsV )
|
|
|
|
if sni is not None:
|
|
|
|
axL[1].plot( r.pkUsL[sni], r.pkDbL[sni], marker='*', color='red')
|
|
|
|
|
|
|
|
sni = select_first_stable_note_by_delta_db( r.pkDbL )
|
|
|
|
if sni is not None:
|
|
|
|
axL[2].plot( r.pkUsL[sni], dV[sni-1], marker='*', color='red')
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
|
2019-11-18 16:44:47 +00:00
|
|
|
for i,s in enumerate(r.statsL):
|
|
|
|
axL[1].text( r.pkUsL[i], r.pkDbL[i] + 1, "%i" % (i))
|
|
|
|
|
|
|
|
for i in range(1,len(r.pkUsL)):
|
|
|
|
axL[2].text( r.pkUsL[i], dV[i-1], "%i" % (i))
|
|
|
|
|
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
plt.show()
|
|
|
|
|
|
|
|
def do_td_multi_plot( inDir, analysisArgs ):
|
|
|
|
|
|
|
|
midi_pitch = int(inDir.split("/")[-1])
|
|
|
|
|
|
|
|
dirL = os.listdir(inDir)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
fig,axL = plt.subplots(len(dirL),1)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
|
|
|
|
for id,(idir,ax) in enumerate(zip(dirL,axL)):
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
td_plot(ax, os.path.join(inDir,str(id)), midi_pitch, id, analysisArgs )
|
|
|
|
|
|
|
|
plt.show()
|
|
|
|
|
|
|
|
|
2019-08-20 01:18:57 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
|
|
|
inDir = sys.argv[1]
|
2019-09-01 14:54:09 +00:00
|
|
|
cfgFn = sys.argv[2]
|
2019-11-09 16:13:34 +00:00
|
|
|
take_id = None if len(sys.argv)<4 else sys.argv[3]
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-09-01 14:54:09 +00:00
|
|
|
cfg = parse_yaml_cfg( cfgFn )
|
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
if take_id is not None:
|
|
|
|
inDir = os.path.join(inDir,take_id)
|
|
|
|
do_td_plot(inDir,cfg.analysisArgs)
|
|
|
|
else:
|
|
|
|
#do_td_multi_plot(inDir,cfg.analysisArgs)
|
2019-08-20 01:18:57 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
#plot_spectral_ranges( inDir, [ 24, 36, 48, 60, 72, 84, 96, 104] )
|
2019-09-01 14:54:09 +00:00
|
|
|
|
2019-11-09 16:13:34 +00:00
|
|
|
plot_resample_pulse_times( inDir, cfg.analysisArgs )
|
2019-09-01 14:54:09 +00:00
|
|
|
|