# cpb_arpeggios.py

# 1. Demonstrate algorithmic audio composition using the built-in speaker.
# 2. Demonstrate use of a Python class to encapsulate functionality.

# This demo is specific to the Adafruit Circuit Playground Bluefruit board.
# This program uses only onboard hardware: speaker, pushbuttons, slide switch.

#================================================================
# Import the standard Python time functions.
import time, math

# Import the board-specific input/output library.
from adafruit_circuitplayground import cp

#================================================================
# Define a class to represent the algorithmic composition task.
class Arpeggiator(object):
    
    # Define a dictionary of arpeggio patterns as a class attribute.
    patterns = { 'maj': (0, 4, 7, 12),
                 'min': (0, 3, 7, 12),
                 'maj7': (0, 4, 7, 11),
                 'min7': (0, 3, 7, 11),
                 'dim7': (0, 3, 6, 10),
                 }

    # Initialize an instance of the class.
    def __init__(self):
        # Current compositional state.
        self.root_note = 60     # middle-C as a MIDI note
        self.tonality = 'maj'   # key for the patterns dictionary
        self.tempo = 60         # beats per minute

        # Internal state variables.
        self._index = 0                        # arpeggio index of next note to play
        self._direction = 1                    # index step direction
        self._next_time = time.monotonic_ns()  # clock time in nsec for next note update

        return

    # Update function to be called frequently to recompute outputs.
    def poll(self):
        now = time.monotonic_ns()
        if now >= self._next_time:
            self._next_time += 60000000000 // int(self.tempo)   # add nanoseconds per beat

            # look up the current arpeggio pattern
            pattern = self.patterns[self.tonality]

            # Select the next note play and advance the position.
            if self._index <= 0:
                # choose the root note of the arpeggio and advance up one step
                note = self.root_note                
                self._index = 1
                self._direction = 1

            elif self._index >= len(pattern)-1:
                # choose the top note of the arpeggio and advance down one step
                note = self.root_note + pattern[-1]
                self._index = len(pattern)-2
                self._direction = -1
                
            else:
                # play either a rising or falling tone within the arpeggio
                note = self.root_note + pattern[self._index]
                self._index += self._direction

            # Compute the tone to play and update the speaker output.
            freq = self.midi_to_freq(note)
            cp.stop_tone()
            cp.start_tone(freq)
            print(f"Updating at time {now}, note {note}, freq {freq}")

    # ----------------------------------------------------------------
    # Convert MIDI note value to frequency. This applies an equal temperament scale.
    def midi_to_freq(self, midi_note):
        MIDI_A0 = 21
        freq_A0 = 27.5
        return freq_A0 * math.pow(2.0, (midi_note - MIDI_A0) / 12.0)

# ----------------------------------------------------------------
# Initialize global variables for the main loop.

# Create an Arpeggiator object, an instance of the Arpeggiator class.
arpeggiator = Arpeggiator()

# ----------------------------------------------------------------
# Enter the main event loop.
while True:

    if cp.button_a and cp.button_b:
        arpeggiator.tonality = 'dim7'

    elif cp.button_a:
        arpeggiator.tonality = 'min'

    elif cp.button_b:
        arpeggiator.tonality = 'min7'

    else:
        arpeggiator.tonality = 'maj'

    if cp.switch:
        arpeggiator.tempo = 120
        
    else:
        arpeggiator.tempo = 480

    # Run the tone generator.
    arpeggiator.poll()
