Let's take date for Beethoven's "Moonlight" sonata again
In [1]:
%run startup.ipy
This time we restrict data to recordings with a performer relation and a recording date, so that we have around 100 recordings.
In [2]:
moonlight_mbid = '11e7e520-f430-306c-90b8-183cbf3cc761'
recordings = sql("""
SELECT a.name AS artist,
to_date(to_char(l.begin_date_year, '9999') ||
to_char(l.begin_date_month, '99') ||
to_char(l.begin_date_day, '99'), 'YYYY MM DD') AS start_date,
to_date(to_char(l.end_date_year, '9999') ||
to_char(l.end_date_month, '99') ||
to_char(l.end_date_day, '99'), 'YYYY MM DD') AS end_date,
r.gid AS mbid,
r.length * interval '1ms' AS duration
FROM work AS w
JOIN l_recording_work AS lrw ON w.id = lrw.entity1
JOIN recording AS r ON r.id = lrw.entity0
JOIN l_artist_recording AS lar ON r.id = lar.entity1
JOIN artist AS a ON a.id = lar.entity0
JOIN link AS l ON l.id = lar.link
WHERE w.gid = %(moonlight_mbid)s
AND l.begin_date_year > 0
ORDER BY start_date;
""", moonlight_mbid=moonlight_mbid)
We can query AcousticBrainz with one request from the recordings MBIDs.
First we check which recordings have information stored at AcousticBrainz:
In [3]:
import requests
resp = requests.get('https://acousticbrainz.org/api/v1/count?recording_ids='
+ ';'.join(recordings.mbid.astype(str)))
In [4]:
resp.json()
Out[4]:
In [5]:
recordings['acousticbrainz'] = recordings.mbid.apply(
lambda mbid: resp.json().get(str(mbid), {'count': 0})['count'])
In [6]:
recordings.sort_values('acousticbrainz', ascending=False).head(10)
Out[6]:
In [7]:
mbid = '0317b896-ee9a-40a0-8e94-31e13fd723f2'
resp = requests.get('https://acousticbrainz.org/api/v1/{}/low-level'.format(mbid)).json()
In [8]:
resp['tonal']['tuning_frequency']
Out[8]:
In [9]:
print(resp['tonal']['key_key'])
print(resp['tonal']['key_scale'])
print(resp['tonal']['chords_histogram'])
In [10]:
# http://essentia.upf.edu/documentation/reference/streaming_ChordsDescriptors.html
chords = 'C, Em, G, Bm, D, F#m, A, C#m, E, G#m, B, D#m, F#, A#m, C#, Fm, G#, Cm, D#, Gm, A#, Dm, F, Am'.split(', ')
main_chord = resp['tonal']['key_key']
if resp['tonal']['key_scale']=='minor':
main_chord += 'm'
idx = chords.index(main_chord)
chords = chords[idx:] + chords[:idx]
In [11]:
iplot(go.Figure(
data=[go.Bar(x=chords, y=resp['tonal']['chords_histogram'])],
layout=go.Layout(title="Chords histogram")
))
df = pandas.DataFrame({'chords': chords,
'val': resp['tonal']['chords_histogram']}).sort_values('val', ascending=False)
iplot(go.Figure(
data=[go.Bar(x=df.chords, y=df.val)],
layout=go.Layout(title="Chords histogram")
))
Looks like tonic / dominant / subdominant are the main chords, as expected
In [12]:
resp['rhythm']['beats_count']
Out[12]:
In [13]:
resp['rhythm']['beats_position'][:10]
Out[13]:
In [14]:
import numpy as np
positions = np.array(resp['rhythm']['beats_position'])
cnt = resp['rhythm']['beats_count']
newpos = (positions - positions[0])
ref = np.linspace(0, newpos[-1], cnt)
rubato = 100 * (ref - newpos)/ref
rubato[0] = 0
In [15]:
%matplotlib inline
import matplotlib.pyplot as plt
plt.plot(rubato)
plt.title('Shift from mean value on each beat (%)')
Out[15]:
In [16]:
mbid1 = '0317b896-ee9a-40a0-8e94-31e13fd723f2'
mbid2 = 'b11143bf-231e-4017-afb3-b5f38e997bcb'
resp1 = requests.get('https://acousticbrainz.org/api/v1/{}/low-level'.format(mbid1)).json()
resp2 = requests.get('https://acousticbrainz.org/api/v1/{}/low-level'.format(mbid2)).json()
In [17]:
df = pandas.DataFrame({'chords': chords,
'Gilels': resp1['tonal']['chords_histogram'],
'Gulda': resp2['tonal']['chords_histogram']})
iplot(go.Figure(
data=[go.Bar(x=df.chords, y=df.Gilels),
go.Bar(x=df.chords, y=df.Gulda)],
layout=go.Layout(title="Chords histogram")
))
In [18]:
def rubato(resp):
positions = np.array(resp['rhythm']['beats_position'])
cnt = resp['rhythm']['beats_count']
newpos = (positions - positions[0])
ref = np.linspace(0, newpos[-1], cnt)
rubato = 100 * (ref - newpos)/ref
rubato[0] = 0
return rubato
rubato_Gilels = rubato(resp1)
rubato_Gulda = rubato(resp2)
In [19]:
plt.title('Shift from mean value on each beat (%)')
plt.plot(rubato_Gilels)
plt.plot(rubato_Gulda)
Out[19]:
Interesting, both pianists have a completely different behavior at the beginning and become very close by beat 400. So... what happens around beat 200 to explain?
In [20]:
#http://imslp.org/wiki/Piano_Sonata_No.14,_Op.27%20No.2_(Beethoven,_Ludwig_van)