picadae calibration programs
您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

plot_seq_1.py 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794
  1. import os, sys,json
  2. import matplotlib.pyplot as plt
  3. import numpy as np
  4. from common import parse_yaml_cfg
  5. import rms_analysis
  6. import elbow
  7. def fit_to_reference( pkL, refTakeId ):
  8. us_outL = []
  9. db_outL = []
  10. dur_outL = []
  11. tid_outL = []
  12. dbL,usL,durMsL,takeIdL = tuple(zip(*pkL))
  13. us_refL,db_refL,dur_refL = zip(*[(usL[i],dbL[i],durMsL[i]) for i in range(len(usL)) if takeIdL[i]==refTakeId])
  14. for takeId in set(takeIdL):
  15. us0L,db0L,dur0L = zip(*[(usL[i],dbL[i],durMsL[i]) for i in range(len(usL)) if takeIdL[i]==takeId ])
  16. if takeId == refTakeId:
  17. db_outL += db0L
  18. else:
  19. db1V = elbow.fit_points_to_reference(us0L,db0L,us_refL,db_refL)
  20. db_outL += db1V.tolist()
  21. us_outL += us0L
  22. dur_outL+= dur0L
  23. tid_outL+= [takeId] * len(us0L)
  24. return zip(db_outL,us_outL,dur_outL,tid_outL)
  25. def get_merged_pulse_db_measurements( inDir, midi_pitch, analysisArgsD ):
  26. inDir = os.path.join(inDir,"%i" % (midi_pitch))
  27. takeDirL = os.listdir(inDir)
  28. pkL = []
  29. usRefL = None
  30. dbRefL = None
  31. # for each take in this directory
  32. for take_number in range(len(takeDirL)):
  33. # analyze this takes audio and locate the note peaks
  34. r = rms_analysis.rms_analysis_main( os.path.join(inDir,str(take_number)), midi_pitch, **analysisArgsD )
  35. # store the peaks in pkL[ (db,us) ]
  36. for db,us,stats in zip(r.pkDbL,r.pkUsL,r.statsL):
  37. pkL.append( (db,us,stats.durMs,take_number) )
  38. pkL = fit_to_reference( pkL, 0 )
  39. # sort the peaks on increasing attack pulse microseconds
  40. pkL = sorted( pkL, key= lambda x: x[1] )
  41. # merge sample points that separated by less than 'minSampleDistUs' milliseconds
  42. #pkL = merge_close_sample_points( pkL, analysisArgsD['minSampleDistUs'] )
  43. # split pkL
  44. pkDbL,pkUsL,durMsL,takeIdL = tuple(zip(*pkL))
  45. return pkUsL,pkDbL,durMsL,takeIdL,r.holdDutyPctL
  46. def select_resample_reference_indexes( noiseIdxL ):
  47. resampleIdxS = set()
  48. # for each noisy sample index store that index and the index
  49. # before and after it
  50. for i in noiseIdxL:
  51. resampleIdxS.add( i )
  52. if i+1 < len(noiseIdxL):
  53. resampleIdxS.add( i+1 )
  54. if i-1 >= 0:
  55. resampleIdxS.add( i-1 )
  56. resampleIdxL = list(resampleIdxS)
  57. # if a single sample point is left out of a region of
  58. # contiguous sample points then include this as a resample point also
  59. for i in resampleIdxL:
  60. if i + 1 not in resampleIdxL and i + 2 in resampleIdxL: # BUG BUG BUG: Hardcoded constant
  61. if i+1 < len(noiseIdxL):
  62. resampleIdxL.append(i+1)
  63. return resampleIdxL
  64. def locate_resample_regions( usL, dbL, resampleIdxL ):
  65. # locate regions of points to resample
  66. regionL = [] # (bi,ei)
  67. inRegionFl = False
  68. bi = None
  69. for i in range(len(usL)):
  70. if inRegionFl:
  71. if i not in resampleIdxL:
  72. regionL.append((bi,i-1))
  73. inRegionFl = False
  74. bi = None
  75. else:
  76. if i in resampleIdxL:
  77. inRegionFl = True
  78. bi = i
  79. if bi is not None:
  80. regionL.append((bi,len(usL)-1))
  81. # select points around and within the resample regions
  82. # to resample
  83. reUsL = []
  84. reDbL = []
  85. for bi,ei in regionL:
  86. for i in range(bi,ei+2):
  87. if i == 0:
  88. us = usL[i]
  89. db = dbL[i]
  90. elif i >= len(usL):
  91. us = usL[i-1]
  92. db = dbL[i-1]
  93. else:
  94. us = usL[i-1] + (usL[i]-usL[i-1])/2
  95. db = dbL[i-1] + (dbL[i]-dbL[i-1])/2
  96. reUsL.append(us)
  97. reDbL.append(db)
  98. return reUsL,reDbL
  99. def get_dur_skip_indexes( durMsL, dbL, takeIdL, scoreL, minDurMs, minDb, noiseLimitPct ):
  100. firstAudibleIdx = None
  101. firstNonSkipIdx = None
  102. # get the indexes of samples which do not meet the duration, db level, or noise criteria
  103. skipIdxL = [ i for i,(ms,db,score) in enumerate(zip(durMsL,dbL,scoreL)) if ms < minDurMs or db < minDb or score > noiseLimitPct ]
  104. # if a single sample point is left out of a region of
  105. # contiguous skipped points then skip this point also
  106. for i in range(len(durMsL)):
  107. if i not in skipIdxL and i-1 in skipIdxL and i+1 in skipIdxL:
  108. skipIdxL.append(i)
  109. # find the first set of 3 contiguous samples that
  110. # are greater than minDurMs - all samples prior
  111. # to these will be skipped
  112. xL = []
  113. for i in range(len(durMsL)):
  114. if i in skipIdxL:
  115. xL = []
  116. else:
  117. xL.append(i)
  118. if len(xL) == 3: # BUG BUG BUG: Hardcoded constant
  119. firstAudibleIdx = xL[0]
  120. break
  121. # decrease by one decibel to locate the first non-skip
  122. # TODO: what if no note exists that is one decibel less
  123. # The recordings of very quiet notes do not give reliabel decibel measures
  124. # so this may not be the best backup criteria
  125. if firstAudibleIdx is not None:
  126. i = firstAudibleIdx-1
  127. while abs(dbL[i] - dbL[firstAudibleIdx]) < 1.0: # BUG BUG BUG: Hardcoded constant
  128. i -= 1
  129. firstNonSkipIdx = i
  130. return skipIdxL, firstAudibleIdx, firstNonSkipIdx
  131. def get_resample_points( usL, dbL, durMsL, takeIdL, minDurMs, minDb, noiseLimitPct ):
  132. scoreV = np.abs( rms_analysis.samples_to_linear_residual( usL, dbL) * 100.0 / dbL )
  133. skipIdxL, firstAudibleIdx, firstNonSkipIdx = get_dur_skip_indexes( durMsL, dbL, takeIdL, scoreV.tolist(), minDurMs, minDb, noiseLimitPct )
  134. skipL = [ (usL[i],dbL[i]) for i in skipIdxL ]
  135. noiseIdxL = [ i for i in range(scoreV.shape[0]) if scoreV[i] > noiseLimitPct ]
  136. noiseL = [ (usL[i],dbL[i]) for i in noiseIdxL ]
  137. resampleIdxL = select_resample_reference_indexes( noiseIdxL )
  138. if firstNonSkipIdx is not None:
  139. resampleIdxL = [ i for i in resampleIdxL if i >= firstNonSkipIdx ]
  140. resampleL = [ (usL[i],dbL[i]) for i in resampleIdxL ]
  141. reUsL,reDbL = locate_resample_regions( usL, dbL, resampleIdxL )
  142. return reUsL, reDbL, noiseL, resampleL, skipL, firstAudibleIdx, firstNonSkipIdx
  143. def get_resample_points_wrap( inDir, midi_pitch, analysisArgsD ):
  144. usL, dbL, durMsL,_,_ = get_merged_pulse_db_measurements( inDir, midi_pitch, analysisArgsD['rmsAnalysisArgs'] )
  145. reUsL,_,_,_,_,_,_ = get_resample_points( usL, dbL, durMsL, analysisArgsD['resampleMinDurMs'], analysisArgsD['resampleMinDb'], analysisArgsD['resampleNoiseLimitPct'] )
  146. return reUsL
  147. def plot_us_db_curves( ax, inDir, keyMapD, midi_pitch, analysisArgsD, plotResamplePointsFl=False, plotTakesFl=True, usMax=None ):
  148. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, analysisArgsD['rmsAnalysisArgs'] )
  149. reUsL, reDbL, noiseL, resampleL, skipL, firstAudibleIdx, firstNonSkipIdx = get_resample_points( usL, dbL, durMsL, takeIdL, analysisArgsD['resampleMinDurMs'], analysisArgsD['resampleMinDb'], analysisArgsD['resampleNoiseLimitPct'] )
  150. # plot first audible and non-skip position
  151. if False:
  152. if firstNonSkipIdx is not None:
  153. ax.plot( usL[firstNonSkipIdx], dbL[firstNonSkipIdx], markersize=15, marker='+', linestyle='None', color='red')
  154. if firstAudibleIdx is not None:
  155. ax.plot( usL[firstAudibleIdx], dbL[firstAudibleIdx], markersize=15, marker='*', linestyle='None', color='red')
  156. # plot the resample points
  157. if plotResamplePointsFl:
  158. ax.plot( reUsL, reDbL, markersize=13, marker='x', linestyle='None', color='green')
  159. # plot the noisy sample positions
  160. if noiseL:
  161. nUsL,nDbL = zip(*noiseL)
  162. ax.plot( nUsL, nDbL, marker='o', markersize=9, linestyle='None', color='black')
  163. # plot the noisy sample positions and the neighbors included in the noisy region
  164. if resampleL:
  165. nUsL,nDbL = zip(*resampleL)
  166. ax.plot( nUsL, nDbL, marker='+', markersize=8, linestyle='None', color='red')
  167. # plot actual sample points
  168. elbow_us = None
  169. elbow_db = None
  170. elbow_len = None
  171. usL,dbL,takeIdL = zip(*[(us,dbL[i],takeIdL[i]) for i,us in enumerate(usL) if usMax is None or us <= usMax])
  172. if plotTakesFl:
  173. for takeId in list(set(takeIdL)):
  174. # get the us,db points included in this take
  175. xL,yL = zip(*[(usL[i],dbL[i]) for i in range(len(usL)) if takeIdL[i]==takeId ])
  176. ax.plot(xL,yL, marker='.',label=takeId)
  177. for i,(x,y) in enumerate(zip(xL,yL)):
  178. ax.text(x,y,str(i))
  179. #if elbow_len is None or len(xL) > elbow_len:
  180. if takeId+1 == len(set(takeIdL)):
  181. elbow_us,elbow_db = elbow.find_elbow(xL,yL)
  182. elbow_len = len(xL)
  183. else:
  184. ax.plot(usL, dbL, marker='.')
  185. ax.plot([elbow_us],[elbow_db],marker='*',markersize=12,color='red',linestyle='None')
  186. # plot the skip points in yellow
  187. if False:
  188. if skipL:
  189. nUsL,nDbL = zip(*skipL)
  190. ax.plot( nUsL, nDbL, marker='.', linestyle='None', color='yellow')
  191. # plot the locations where the hold duty cycle changes with vertical black lines
  192. for us_duty in holdDutyPctL:
  193. us,duty = tuple(us_duty)
  194. if us > 0:
  195. ax.axvline(us,color='black')
  196. # plot the 'minDb' reference line
  197. ax.axhline(analysisArgsD['resampleMinDb'] ,color='black')
  198. if os.path.isfile("minInterpDb.json"):
  199. with open("minInterpDb.json","r") as f:
  200. r = json.load(f)
  201. if midi_pitch in r['pitchL']:
  202. ax.axhline( r['minDbL'][ r['pitchL'].index(midi_pitch) ], color='blue' )
  203. ax.axhline( r['maxDbL'][ r['pitchL'].index(midi_pitch) ], color='blue' )
  204. ax.set_ylabel( "%i %s %s" % (midi_pitch, keyMapD[midi_pitch]['type'],keyMapD[midi_pitch]['class']))
  205. def plot_us_db_curves_main( inDir, cfg, pitchL, plotTakesFl=True, usMax=None ):
  206. analysisArgsD = cfg.analysisArgs
  207. keyMapD = { d['midi']:d for d in cfg.key_mapL }
  208. axN = len(pitchL)
  209. fig,axL = plt.subplots(axN,1,sharex=True)
  210. if axN == 1:
  211. axL = [axL]
  212. fig.set_size_inches(18.5, 10.5*axN)
  213. for ax,midi_pitch in zip(axL,pitchL):
  214. plot_us_db_curves( ax,inDir, keyMapD, midi_pitch, analysisArgsD, plotTakesFl=plotTakesFl, usMax=usMax )
  215. if plotTakesFl:
  216. plt.legend()
  217. plt.show()
  218. def plot_all_noise_curves( inDir, cfg, pitchL=None ):
  219. pitchFolderL = os.listdir(inDir)
  220. if pitchL is None:
  221. pitchL = [ int( int(pitchFolder) ) for pitchFolder in pitchFolderL ]
  222. fig,ax = plt.subplots()
  223. for midi_pitch in pitchL:
  224. print(midi_pitch)
  225. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  226. scoreV = np.abs( rms_analysis.samples_to_linear_residual( usL, dbL) * 100.0 / dbL )
  227. minDurMs = cfg.analysisArgs['resampleMinDurMs']
  228. minDb = cfg.analysisArgs['resampleMinDb'],
  229. noiseLimitPct = cfg.analysisArgs['resampleNoiseLimitPct']
  230. skipIdxL, firstAudibleIdx, firstNonSkipIdx = get_dur_skip_indexes( durMsL, dbL, scoreV.tolist(), takeIdL, minDurMs, minDb, noiseLimitPct )
  231. if False:
  232. ax.plot( usL[firstAudibleIdx], scoreV[firstAudibleIdx], markersize=10, marker='*', linestyle='None', color='red')
  233. ax.plot( usL, scoreV, label="%i"%(midi_pitch) )
  234. ax.set_xlabel('us')
  235. else:
  236. xL = [ (score,db,i) for i,(score,db) in enumerate(zip(scoreV,dbL)) ]
  237. xL = sorted(xL, key=lambda x: x[1] )
  238. scoreV,dbL,idxL = zip(*xL)
  239. ax.plot( dbL[idxL[firstAudibleIdx]], scoreV[idxL[firstAudibleIdx]], markersize=10, marker='*', linestyle='None', color='red')
  240. ax.plot( dbL, scoreV, label="%i"%(midi_pitch) )
  241. ax.set_xlabel('db')
  242. ax.set_ylabel("noise db %")
  243. plt.legend()
  244. plt.show()
  245. def plot_min_max_2_db( inDir, cfg, pitchL=None, takeId=2 ):
  246. pitchFolderL = os.listdir(inDir)
  247. if pitchL is None:
  248. pitchL = [ int( int(pitchFolder) ) for pitchFolder in pitchFolderL ]
  249. okL = []
  250. outPitchL = []
  251. minDbL = []
  252. maxDbL = []
  253. for midi_pitch in pitchL:
  254. print(midi_pitch)
  255. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  256. okL.append(False)
  257. takeId = len(set(takeIdL))-1
  258. db_maxL = sorted(dbL)
  259. maxDbL.append( np.mean(db_maxL[-5:]) )
  260. usL,dbL = zip(*[(usL[i],dbL[i]) for i in range(len(usL)) if takeIdL[i]==takeId ])
  261. if len(set(takeIdL)) == 3:
  262. okL[-1] = True
  263. elbow_us,elbow_db = elbow.find_elbow(usL,dbL)
  264. minDbL.append(elbow_db)
  265. outPitchL.append(midi_pitch)
  266. p_dL = sorted( zip(outPitchL,minDbL,maxDbL,okL), key=lambda x: x[0] )
  267. outPitchL,minDbL,maxDbL,okL = zip(*p_dL)
  268. fig,ax = plt.subplots()
  269. ax.plot(outPitchL,minDbL)
  270. ax.plot(outPitchL,maxDbL)
  271. keyMapD = { d['midi']:d for d in cfg.key_mapL }
  272. for pitch,min_db,max_db,okFl in zip(outPitchL,minDbL,maxDbL,okL):
  273. c = 'black' if okFl else 'red'
  274. ax.text( pitch, min_db, "%i %s %s" % (pitch, keyMapD[pitch]['type'],keyMapD[pitch]['class']), color=c)
  275. ax.text( pitch, max_db, "%i %s %s" % (pitch, keyMapD[pitch]['type'],keyMapD[pitch]['class']), color=c)
  276. plt.show()
  277. def plot_min_db_manual( inDir, cfg ):
  278. pitchL = list(cfg.manualMinD.keys())
  279. outPitchL = []
  280. maxDbL = []
  281. minDbL = []
  282. okL = []
  283. anchorMinDbL = []
  284. anchorMaxDbL = []
  285. for midi_pitch in pitchL:
  286. manual_take_id = cfg.manualMinD[midi_pitch][0]
  287. manual_sample_idx = cfg.manualMinD[midi_pitch][1]
  288. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  289. okL.append(False)
  290. takeId = len(set(takeIdL))-1
  291. # maxDb is computed on all takes (not just the specified take)
  292. db_maxL = sorted(dbL)
  293. max_db = np.mean(db_maxL[-4:])
  294. maxDbL.append( max_db )
  295. # get the us,db values for the specified take
  296. usL,dbL = zip(*[(usL[i],dbL[i]) for i in range(len(usL)) if takeIdL[i]==manual_take_id ])
  297. # most pitches have 3 sample takes that do not
  298. if len(set(takeIdL)) == 3 and manual_take_id == takeId:
  299. okL[-1] = True
  300. # min db from the sample index manually specified in cfg
  301. manualMinDb = dbL[ manual_sample_idx ]
  302. minDbL.append( manualMinDb )
  303. outPitchL.append(midi_pitch)
  304. if midi_pitch in cfg.manualAnchorPitchMinDbL:
  305. anchorMinDbL.append( manualMinDb )
  306. if midi_pitch in cfg.manualAnchorPitchMaxDbL:
  307. anchorMaxDbL.append( max_db )
  308. # Form the complete set of min/max db levels for each pitch by interpolating the
  309. # db values between the manually selected anchor points.
  310. interpMinDbL = np.interp( pitchL, cfg.manualAnchorPitchMinDbL, anchorMinDbL )
  311. interpMaxDbL = np.interp( pitchL, cfg.manualAnchorPitchMaxDbL, anchorMaxDbL )
  312. fig,ax = plt.subplots()
  313. ax.plot(outPitchL,minDbL) # plot the manually selected minDb values
  314. ax.plot(outPitchL,maxDbL) # plot the max db values
  315. # plot the interpolated minDb/maxDb values
  316. ax.plot(pitchL,interpMinDbL)
  317. ax.plot(pitchL,interpMaxDbL)
  318. keyMapD = { d['midi']:d for d in cfg.key_mapL }
  319. for pitch,min_db,max_db,okFl in zip(outPitchL,minDbL,maxDbL,okL):
  320. c = 'black' if okFl else 'red'
  321. ax.text( pitch, min_db, "%i %s %s" % (pitch, keyMapD[pitch]['type'],keyMapD[pitch]['class']), color=c)
  322. ax.text( pitch, max_db, "%i %s %s" % (pitch, keyMapD[pitch]['type'],keyMapD[pitch]['class']), color=c)
  323. with open("minInterpDb.json",'w') as f:
  324. json.dump( { "pitchL":pitchL, "minDbL":list(interpMinDbL), "maxDbL":list(interpMaxDbL) }, f )
  325. plt.show()
  326. def plot_min_max_db( inDir, cfg, pitchL=None ):
  327. pitchFolderL = os.listdir(inDir)
  328. if pitchL is None:
  329. pitchL = [ int( int(pitchFolder) ) for pitchFolder in pitchFolderL ]
  330. maxDbL = []
  331. minDbL = []
  332. for midi_pitch in pitchL:
  333. print(midi_pitch)
  334. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, midi_pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  335. scoreV = np.abs( rms_analysis.samples_to_linear_residual( usL, dbL) * 100.0 / dbL )
  336. minDurMs = cfg.analysisArgs['resampleMinDurMs']
  337. minDb = cfg.analysisArgs['resampleMinDb'],
  338. noiseLimitPct = cfg.analysisArgs['resampleNoiseLimitPct']
  339. skipIdxL, firstAudibleIdx, firstNonSkipIdx = get_dur_skip_indexes( durMsL, dbL, takeIdL, scoreV.tolist(), minDurMs, minDb, noiseLimitPct )
  340. minDbL.append( dbL[firstAudibleIdx] )
  341. dbL = sorted(dbL)
  342. x = np.mean(dbL[-3:])
  343. x = np.max(dbL)
  344. maxDbL.append( x )
  345. fig,ax = plt.subplots()
  346. fig.set_size_inches(18.5, 10.5)
  347. p_dL = sorted( zip(pitchL,maxDbL), key=lambda x: x[0] )
  348. pitchL,maxDbL = zip(*p_dL)
  349. ax.plot(pitchL,maxDbL)
  350. ax.plot(pitchL,minDbL)
  351. for pitch,db in zip(pitchL,maxDbL):
  352. keyMapD = { d['midi']:d for d in cfg.key_mapL }
  353. ax.text( pitch, db, "%i %s %s" % (pitch, keyMapD[pitch]['type'],keyMapD[pitch]['class']))
  354. plt.show()
  355. def estimate_us_to_db_map( inDir, cfg, minMapDb=16.0, maxMapDb=26.0, incrMapDb=0.5, pitchL=None ):
  356. pitchFolderL = os.listdir(inDir)
  357. if pitchL is None:
  358. pitchL = [ int( int(pitchFolder) ) for pitchFolder in pitchFolderL ]
  359. mapD = {} # pitch:{ loDb: { hiDb, us_avg, us_cls, us_std, us_min, us_max, db_avg, db_std, cnt }}
  360. # where: cnt=count of valid sample points in this db range
  361. # us_cls=us of closest point to center of db range
  362. dbS = set() # { (loDb,hiDb) } track the set of db ranges
  363. for pitch in pitchL:
  364. print(pitch)
  365. # get the sample measurements for pitch
  366. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  367. # calc the fit to local straight line curve fit at each point
  368. scoreV = np.abs( rms_analysis.samples_to_linear_residual( usL, dbL) * 100.0 / dbL )
  369. minDurMs = cfg.analysisArgs['resampleMinDurMs']
  370. minDb = cfg.analysisArgs['resampleMinDb'],
  371. noiseLimitPct = cfg.analysisArgs['resampleNoiseLimitPct']
  372. # get the set of samples that are not valid (too short, too quiet, too noisy)
  373. skipIdxL, firstAudibleIdx, firstNonSkipIdx = get_dur_skip_indexes( durMsL, dbL, takeIdL, scoreV.tolist(), minDurMs, minDb, noiseLimitPct )
  374. mapD[ pitch ] = {}
  375. # get the count of db ranges
  376. N = int(round((maxMapDb - minMapDb) / incrMapDb)) + 1
  377. # for each db range
  378. for i in range(N):
  379. loDb = minMapDb + (i*incrMapDb)
  380. hiDb = loDb + incrMapDb
  381. dbS.add((loDb,hiDb))
  382. # get the valid (pulse,db) pairs for this range
  383. u_dL = [(us,db) for i,(us,db) in enumerate(zip(usL,dbL)) if i not in skipIdxL and loDb<=db and db<hiDb ]
  384. us_avg = 0
  385. us_cls = 0
  386. us_std = 0
  387. us_min = 0
  388. us_max = 0
  389. db_avg = 0
  390. db_std = 0
  391. if len(u_dL) == 0:
  392. print("No valid samples for pitch:",pitch," db range:",loDb,hiDb)
  393. else:
  394. us0L,db0L = zip(*u_dL)
  395. if len(us0L) == 1:
  396. us_avg = us0L[0]
  397. us_cls = us_avg
  398. us_min = us_avg
  399. us_max = us_avg
  400. db_avg = db0L[0]
  401. elif len(us0L) > 1:
  402. us_avg = np.mean(us0L)
  403. us_cls = us0L[ np.argmin(np.abs(np.array(db0L)-(loDb - (hiDb-loDb)/2.0 ))) ]
  404. us_min = np.min(us0L)
  405. us_max = np.max(us0L)
  406. us_std = np.std(us0L)
  407. db_avg = np.mean(db0L)
  408. db_std = np.std(db0L)
  409. us_avg = int(round(us_avg))
  410. mapD[pitch][loDb] = { 'hiDb':hiDb, 'us_avg':us_avg, 'us_cls':us_cls, 'us_std':us_std,'us_min':us_min,'us_max':us_max, 'db_avg':db_avg, 'db_std':db_std, 'cnt':len(u_dL) }
  411. return mapD, list(dbS)
  412. def plot_us_to_db_map( inDir, cfg, minMapDb=16.0, maxMapDb=26.0, incrMapDb=1.0, pitchL=None ):
  413. fig,ax = plt.subplots()
  414. mapD, dbRefL = estimate_us_to_db_map( inDir, cfg, minMapDb, maxMapDb, incrMapDb, pitchL )
  415. # for each pitch
  416. for pitch, dbD in mapD.items():
  417. u_dL = [ (d['us_avg'],d['us_cls'],d['db_avg'],d['us_std'],d['us_min'],d['us_max'],d['db_std']) for loDb, d in dbD.items() if d['us_avg'] != 0 ]
  418. # get the us/db lists for this pitch
  419. usL,uscL,dbL,ussL,usnL,usxL,dbsL = zip(*u_dL)
  420. # plot central curve and std dev's
  421. p = ax.plot(usL,dbL, marker='.', label=str(pitch))
  422. ax.plot(uscL,dbL, marker='x', label=str(pitch), color=p[0].get_color(), linestyle='None')
  423. ax.plot(usL,np.array(dbL)+dbsL, color=p[0].get_color(), alpha=0.3)
  424. ax.plot(usL,np.array(dbL)-dbsL, color=p[0].get_color(), alpha=0.3)
  425. # plot us error bars
  426. for db,us,uss,us_min,us_max in zip(dbL,usL,ussL,usnL,usxL):
  427. ax.plot([us_min,us_max],[db,db], color=p[0].get_color(), alpha=0.3 )
  428. ax.plot([us-uss,us+uss],[db,db], color=p[0].get_color(), alpha=0.3, marker='.', linestyle='None' )
  429. plt.legend()
  430. plt.show()
  431. def report_take_ids( inDir ):
  432. pitchDirL = os.listdir(inDir)
  433. for pitch in pitchDirL:
  434. pitchDir = os.path.join(inDir,pitch)
  435. takeDirL = os.listdir(pitchDir)
  436. if len(takeDirL) == 0:
  437. print(pitch," directory empty")
  438. else:
  439. with open( os.path.join(pitchDir,'0','seq.json'), "rb") as f:
  440. r = json.load(f)
  441. if len(r['eventTimeL']) != 81:
  442. print(pitch," ",len(r['eventTimeL']))
  443. if len(takeDirL) != 3:
  444. print("***",pitch,len(takeDirL))
  445. def cache_us_db( inDir, cfg, outFn ):
  446. pitch_usDbD = {}
  447. pitchDirL = os.listdir(inDir)
  448. for pitch in pitchDirL:
  449. pitch = int(pitch)
  450. print(pitch)
  451. usL, dbL, durMsL, takeIdL, holdDutyPctL = get_merged_pulse_db_measurements( inDir, pitch, cfg.analysisArgs['rmsAnalysisArgs'] )
  452. pitch_usDbD[pitch] = { 'usL':usL, 'dbL':dbL, 'durMsL':durMsL, 'takeIdL':takeIdL, 'holdDutyPctL': holdDutyPctL }
  453. with open(outFn,"w") as f:
  454. json.dump(pitch_usDbD,f)
  455. def gen_vel_map( inDir, cfg, minMaxDbFn, dynLevelN, cacheFn ):
  456. velMapD = {} # { pitch:[ us ] }
  457. pitchDirL = os.listdir(inDir)
  458. with open(cacheFn,"r") as f:
  459. pitchUsDbD = json.load(f)
  460. with open("minInterpDb.json","r") as f:
  461. r = json.load(f)
  462. minMaxDbD = { pitch:(minDb,maxDb) for pitch,minDb,maxDb in zip(r['pitchL'],r['minDbL'],r['maxDbL']) }
  463. pitchL = sorted( [ int(pitch) for pitch in pitchUsDbD.keys()] )
  464. for pitch in pitchL:
  465. d = pitchUsDbD[str(pitch)]
  466. usL = d['usL']
  467. dbL = np.array(d['dbL'])
  468. velMapD[pitch] = []
  469. for i in range(dynLevelN+1):
  470. db = minMaxDbD[pitch][0] + (i * (minMaxDbD[pitch][1] - minMaxDbD[pitch][0])/ dynLevelN)
  471. usIdx = np.argmin( np.abs(dbL - db) )
  472. velMapD[pitch].append( (usL[ usIdx ],db) )
  473. with open("velMapD.json","w") as f:
  474. json.dump(velMapD,f)
  475. mtx = np.zeros((len(velMapD),dynLevelN+1))
  476. print(mtx.shape)
  477. for i,(pitch,usDbL) in enumerate(velMapD.items()):
  478. for j in range(len(usDbL)):
  479. mtx[i,j] = usDbL[j][1]
  480. fig,ax = plt.subplots()
  481. ax.plot(pitchL,mtx)
  482. plt.show()
  483. if __name__ == "__main__":
  484. inDir = sys.argv[1]
  485. cfgFn = sys.argv[2]
  486. mode = sys.argv[3]
  487. if len(sys.argv) <= 4:
  488. pitchL = None
  489. else:
  490. pitchL = [ int(sys.argv[i]) for i in range(4,len(sys.argv)) ]
  491. cfg = parse_yaml_cfg( cfgFn )
  492. if mode == 'us_db':
  493. plot_us_db_curves_main( inDir, cfg, pitchL, plotTakesFl=True,usMax=None )
  494. elif mode == 'noise':
  495. plot_all_noise_curves( inDir, cfg, pitchL )
  496. elif mode == 'min_max':
  497. plot_min_max_db( inDir, cfg, pitchL )
  498. elif mode == 'min_max_2':
  499. plot_min_max_2_db( inDir, cfg, pitchL )
  500. elif mode == 'us_db_map':
  501. plot_us_to_db_map( inDir, cfg, pitchL=pitchL )
  502. elif mode == 'audacity':
  503. rms_analysis.write_audacity_label_files( inDir, cfg.analysisArgs['rmsAnalysisArgs'] )
  504. elif mode == 'rpt_take_ids':
  505. report_take_ids( inDir )
  506. elif mode == 'manual_db':
  507. plot_min_db_manual( inDir, cfg )
  508. elif mode == 'gen_vel_map':
  509. gen_vel_map( inDir, cfg, "minInterpDb.json", 9, "cache_us_db.json" )
  510. elif mode == 'cache_us_db':
  511. cache_us_db( inDir, cfg, "cache_us_db.json")
  512. else:
  513. print("Unknown mode:",mode)