I am trying to make a midi file parser that outputs something of shape [Songs, track_data], where each track_data contains it's song name and track's beats. Each beat contaings it's notes [semitone, octave] and the temporal distance in milliseconds until the next beat. I am talking about the actual, percieved time between beats, that has nothing to do with tempo, time singatures, etc. The problem is that currently, after playing the returned file, the time doesn't scale properly with changes in bpm, it becomes slower or faster than original.
I am opened to any method you suggest
Here is what I have so far, but it still messes up the tempo changes, and i think time signature too.
def midi_to_array(src_folder):
files = [f for f in os.listdir(src_folder) if f.endswith('.mid') or f.endswith('.midi')][-5:]
songs_data = []
for file in files:
mid = mido.MidiFile(os.path.join(src_folder, file))
tracks = []
for i, track in enumerate(mid.tracks):
last_msg_time = 0
current_tempo = 500000 # MIDI default tempo in microseconds per beat
track_name = ""
track_beats = []
song_artist = ""
beat_data = {'start': 0, 'notes': []}
accumulated_ticks = 0
for j, msg in enumerate(track):
if not msg.is_meta:
accumulated_ticks += msg.time
if msg.type == 'set_tempo':
current_tempo = msg.tempo
if msg.type == 'track_name':
track_name = msg.name.strip().lower()
if msg.type == 'note_on':
elapsed_time = mido.tick2second(accumulated_ticks, mid.ticks_per_beat, current_tempo)
delta_time = elapsed_time - last_msg_time
#last_msg_time = elapsed_time
if delta_time > 0: # If a new beat has started
if beat_data['notes']: # Only append if notes are not empty
track_beats.append(beat_data)
beat_data = {'start': elapsed_time * 1000, 'notes': []} # Start a new beat
beat_data['notes'].append([msg.note % 12, msg.note // 12])
if beat_data['notes']: # Append the last beat if it's not empty
track_beats.append(beat_data)
track_data = {'track_name': track_name, 'track_beats': track_beats}
tracks.append(track_data)
songs_data.append(tracks)
return songs_data