Source code for theater.midi

"""midi.py

MIDI event processing objects common to several scripts.  Uses the mido library for MIDI file and endpoint I/O."""

import logging
import time

import mido

# initialize logging for this module
log = logging.getLogger('MIDI')

#================================================================
[docs]def decode_mpd218_key(key): """Interpret a MPD218 pad event note value as a row, column, and bank position. Row 0 is the front/bottom row (Pads 1-4), row 3 is the back/top row (Pads 13-16). Column 0 is the left, column 3 is the right. Bank 0 is the A bank, bank 1 is the B bank, bank 2 is the C bank. :param key: an integer MIDI note value :return: (row, column, bank) """ # Decode the key into coordinates on the 4x4 pad grid. bank = (key - 36) // 16 pos = (key - 36) % 16 row = pos // 4 col = pos % 4 return row, col, bank
#================================================================
[docs]class Receiver: """Use the mido library to create a MIDI endpoint. Real-time events are received on a background thread and passed to a user-supplied callback.""" def __init__(self, args, midi_endpoint, event_callback): # Open the MIDI input port names = mido.get_input_names() if midi_endpoint in names: self.midi_port = mido.open_input(midi_endpoint) else: log.warning("Requested MIDI input %s not available in %s", midi_endpoint, names) self.midi_port = mido.open_input(names[0]) log.info("Opened MIDI input port %s", self.midi_port.name) # Note: callbacks will arrive on a different thread. This should be # fine since the translation system is stateless, all MIDI messages # simply generate OSC output, and the underlying network code should be # thread-safe. self.midi_port.callback = event_callback return
[docs] def close(self): self.midi_port.close()
#================================================================
[docs]class Player: """MIDI file player.""" def __init__(self, args, filename, callback): self.verbose = args.verbose self.event_callback = callback self.midifile = mido.MidiFile(filename) self.log_midifile_metadata(filename, self.midifile) # MIDI files express tempo as microseconds per quarter note; this sets # a default tempo of 120 BPM (0.5 sec per beat). self.midi_tempo = 500000 # MIDI files express times in integer ticks per quarter note. self.ticks_per_beat = self.midifile.ticks_per_beat # Merge all tracks (if more than one) into a single track in time order. # This assumes that events are identified by channel number for routing # and not simply by track. self.playtrack = mido.merge_tracks(self.midifile.tracks) return
[docs] def close(self): pass
#--------------------------------------------------------------------
[docs] def log_midifile_metadata(self, filename, midifile): log.info("Opened MIDI file %s, MIDI format %d, %d tracks, %d ticks per beat", filename, midifile.type, len(midifile.tracks), midifile.ticks_per_beat) for i, track in enumerate(midifile.tracks): log.info("MIDI file track %d: %s", i, track.name) # report some diagnostics on the current midifile to the log for track in midifile.tracks: log.info("Contents of track '%s'", track.name) event_messages = 0 for msg in track: if msg.is_meta: log.info("MIDI metadata: %s", msg) else: event_messages += 1 # end of track log.info("Track '%s' includes %d events.", track.name, event_messages)
#--------------------------------------------------------------------
[docs] def perform_event(self, event): # ignore metadata messages track_name, instrument_name, key_signature, smpte_offset, etc. if event.type == 'set_tempo': log.info("MIDI tempo change: %d usec/beat (%f BPM).", event.tempo, mido.tempo2bpm(event.tempo)) self.midi_tempo = event.tempo elif event.type in ['note_on', 'note_off']: log.debug("Note On: channel %d, note %d, velocity %d", event.channel, event.note, event.velocity) self.event_callback(event)
#--------------------------------------------------------------------
[docs] def run(self, skip=0): """Run one performance of the MIDI file and return. Eeach MIDI event is issued in real time as OSC network messages translated via the MIDI to OSC bridge object.""" # Keep track of target times in integer nanoseconds to avoid roundoff # errors. Following an absolute clock will maintain overall precision # in the presence of sleep time jitter. start_t = time.monotonic_ns() next_timepoint = start_t # play through all the events in the sequence, waiting the specified # number of ticks before each issue for event in self.playtrack: if self.verbose: log.debug("Playing event: %s", event) # don't perform the end of track, it can have an unreasonable delay if event.type == 'end_of_track': log.info("Found end event: %s", event) return if skip > 0: skip -= 1 else: # if the next event has a preceding delay, convert from ticks # at the current tempo to an absolute time in nanoseconds if event.time > 0: event_ns = event.time * 1000 * self.midi_tempo / self.ticks_per_beat next_timepoint += event_ns delay_ns = next_timepoint - time.monotonic_ns() if delay_ns > 0: time.sleep(delay_ns * 1e-9) self.perform_event(event)
#================================================================