Commit c265d667 authored by hackecology's avatar hackecology

add export midi function, bitrate selection and processing.py files

parent 7b9c76f1
......@@ -83,15 +83,13 @@ def fracdim(Z, threshold):
'''
SONIFICATION
============
'''
'''
Defining the starting note based on the subtraction of y_values by notes_in_key
Define the starting note based on the subtraction of y_values by notes_in_key
new_y.append create values based on the sum of y and traspose_value
'''
def make_first_number_match_key(y_values, notes_in_key):
......@@ -164,7 +162,7 @@ def get_scaled_value(old_value, old_min, old_max, new_min, new_max):
'''
set a list inside a MIDI range defined by new_min and new_max
Set a list inside a MIDI range defined by new_min and new_max
the output is based on the list_to_scale and uses get_scaled_value function
'''
def scale_list_to_range(list_to_scale, new_min, new_max):
......@@ -236,7 +234,8 @@ def convert_to_key(data, key, number_of_octaves=4):
'''
This function is applied to JSON data (in this case for climate data)
The dataset can be found here: https://www.ncdc.noaa.gov/cag/global/time-series
You can dowload the JSON file or just write the link
'''
def normalize_climate_data(climate_json):
years = [int(year) for year in climate_json['data'].keys()]
......@@ -310,9 +309,11 @@ Export the MIDIfile
data: dictionary of x, y coordinates for pitch and timing
Optional: add a string to the start of the data list to specify instrument!
type: the type of data passed to create tracks. Either 'single' or 'multiple'
The bpm is defined as 120 and might be changed according to the input data.
"""
def write_to_midifile(data, track_type='single'):
def write_to_midifile(data, bpm = 120, track_type='single'):
if track_type not in ['single', 'multiple']:
raise ValueError('Track type must be single or multiple')
......@@ -332,7 +333,7 @@ def write_to_midifile(data, track_type='single'):
for data_list in data:
midifile.addTrackName(track, time, 'Track {}'.format(track))
midifile.addTempo(track, time, 120)
midifile.addTempo(track, time, bpm)
instrument_type = 'melodic'
if type(data_list[0]) != tuple:
......@@ -357,9 +358,60 @@ def write_to_midifile(data, track_type='single'):
return memfile
'''
Export MIDI file using
'''
def export_midi(data, key=None, number_of_octaves=4, bpm = 120,
track_type='single'):
if track_type not in ['single', 'multiple']:
raise ValueError('Track type must be single or multiple')
if track_type == 'single':
data = [data]
expfile = io.BytesIO()
midifile = MIDIFile(numTracks=len(data), adjust_origin=False)
track = 0
time = 0
program = 0
channel = 0
duration = 1
volume = 90
for data_list in data:
midifile.addTrackName(track, time, 'Track {}'.format(track))
midifile.addTempo(track, time, bpm)
instrument_type = 'melodic'
if type(data_list[0]) != tuple:
program, instrument_type = get_instrument(data_list.pop(0))
if instrument_type == 'percussion':
volume = 100
channel = 9
# Write the notes we want to appear in the file
for point in data_list:
time = point[0]
pitch = int(point[1]) if instrument_type == 'melodic' else program
midifile.addNote(track, channel, pitch, time, duration, volume)
midifile.addProgramChange(track, channel, time, program)
track += 1
channel = 0
expfile = open("output.mid", 'wb')
midifile.writeFile(expfile)
midifile.close()
return expfile
'''
To play MIDI without having to save to a file
This is is using pygame as we can see.
This is making use of pygame as we can see.
'''
def play_memfile_as_midi(memfile):
pygame.init()
......@@ -404,4 +456,5 @@ def play_midi_from_data(input_data, key=None, number_of_octaves=4,
data = input_data
memfile = write_to_midifile(data, track_type)
play_memfile_as_midi(memfile)
\ No newline at end of file
play_memfile_as_midi(memfile)
......@@ -25,17 +25,14 @@ multitrack_data_with_instruments = []
for index, track in enumerate(test):
multitrack_data_with_instruments.append([instruments_to_add[index]] + track)
# test a drum track with a solid beat
max_number_of_beats = multitrack_data_with_instruments[1][-2][1]
bass_drum = []
for beat in range(0, int(max_number_of_beats + 1)):
bass_drum.append((beat, 1))
beat_track = ['bass drum 1'] + bass_drum
multitrack_data_with_instruments.append(beat_track)
print(multitrack_data_with_instruments)
'''
fractal.play_midi_from_data(multitrack_data_with_instruments,
track_type='multiple', key='f_major')
\ No newline at end of file
track_type='multiple', key='f_major')
'''
#fractal.write_multi(multitrack_data_with_instruments, track_type='multiple')
fractal.export_midi(multitrack_data_with_instruments, bpm=12,
track_type='multiple')
\ No newline at end of file
......@@ -9,8 +9,7 @@ import matplotlib.pyplot as plt
import pandas as pd
import fractaL.core as fractal
#from fractaL.core import normalize_climate_data, csv_to_MIDITime_data
from miditime.miditime import MIDITime
with open('sample_data/1880-2019.json') as data_file:
......@@ -33,29 +32,6 @@ labels = ['date', 'temperature']
climatefile = pd.DataFrame.from_records(normalized_climate_data, columns=labels)
'''export the dataframe to csv. then read it with csv_to_MIDITime function'''
climatefile.to_csv('sample_data/climatefile.csv', index=False)
climate_ex = fractal.csv_to_MIDITime_data('sample_data/climatefile.csv')
###
#Instantiate the class with a tempo (120bpm), the name file name
#MIDITime(tempo=120, outfile='miditime.mid', seconds_per_year=5, base_octave=5,
# octave_range=1, custom_epoch=None)
#*For data before 1970 the custom_epoch (UNIX Time) must be setted up
###
mymidi = MIDITime(120, 'fractal.mid',100, 4, 2)
# Make a beat based on days_sice_epoch(maybe change to UNIX time - more common)
my_data_timed = [{'beat': mymidi.beat(d['days_since_epoch']),
'magnitude': d['magnitude']} for d in climate_ex]
#Setting starting time
start_time = my_data_timed[0]['beat']
fractal.export_midi(normalized_climate_data, bpm=12,
track_type='single')
File added
add_library('minim')
add_library('peasycam')
cylindrical, isPlaying, isMute = False, False, False
pts, radius, latheRadius, segments = 10, 1, 300, 500
vertices, vertices2 = [[PVector() for e in range(pts+1)] for e in range(2)]
def setup():
global song, fftLin, fftLog, cam
size(1200, 800, P3D)
frameRate(1000)
smooth(8)
cam = PeasyCam(this, 1400)
cam.setMaximumDistance(600)
cam.rotateX(-PI/2.4)
perspective(60 * DEG_TO_RAD, width/float(height), 2, 6000)
minim = Minim(this)
song = minim.loadFile("multi.mp3")
fftLin, fftLog = [FFT(song.bufferSize(), song.sampleRate()) for e in range(2)]
fftLin.linAverages(30), fftLog.logAverages(22, 3)
def draw():
background(0, 0, 0)
lights()
ambientLight(111, 20, 185)
directionalLight(255,143,192, 1, 20, 60)
pointLight(155,215,60, width, height/2, 0)
rotateZ(frameCount*PI/560)
fill(111, 11, 179)
noStroke()
song.play() if isPlaying else song.pause()
song.mute() if isMute else song.unmute()
fftLin.forward(song.mix)
fftLog.forward(song.mix)
latheAngle = 0
for s in range(segments):
angle = 0
beginShape(QUAD_STRIP)
for i, v in enumerate(vertices2):
division = 1 if s%2 == 0 else 6
step = s*2 # select 1 freq every 2 freq (up to 256 out of 512)
c_step = (s*2)+((s%2)*2) # select 1 freq every 4 freq (up to 256 freq out of 512)
if cylindrical: sscale = map(fftLin.getBand(c_step)*((s+10)/10), 0, 35, .8, 35)
else: sscale = map(fftLin.getBand(step)*((s+10)/10), 0, 30, .8, 24)
sscale = constrain(sscale, 0, 50)
vertices[i].x = latheRadius + sin(radians(angle)) * radius * sscale
vertices[i].z = cos(radians(angle)) * radius * sscale
angle+=360.0/pts
vertex(v.x, v.y, v.z)
v.x = cos(radians(latheAngle)) * vertices[i].x
v.y = sin(radians(latheAngle)) * vertices[i].x
v.z = vertices[i].z
vertex(v.x, v.y, v.z)
latheAngle += (360.0+260)/(segments*6/division) if cylindrical else 360.0/segments
endShape()
cam.beginHUD()
text("'p' = PLAY/PAUSE", 20, 30)
text("'r' = REPLAY", 20, 50)
text("'m' = MUTE", 20, 70)
text("'c' = MODE", 20, 90)
cam.endHUD()
def keyPressed():
global isPlaying, cylindrical, isMute
if key == 'p': isPlaying = not isPlaying
if key == 'm': isMute = not isMute
if key == 'c': cylindrical = not cylindrical
if key == 'r': song.rewind()
num_lines = 100
points_per_line = 100
window_buffer = 0
# noise params
noise_mag = 200
xoff = random(10000)
yoff = random(10000)
zoff = random(10000)
dxoff = 0.06 # change in xoff between vertices\
dxoff_t = 0.01 # change in xoff over time (produces scrolling)
dyoff = 0.07 # change in yoff between lines
dzoff = 0.01
noiseDetail(2, .4)
def setup():
size(800,800)
noFill()
stroke(255)
strokeWeight(3)
#noLoop()
def draw():
background(0)
y_ = getY_()
x_ = getX_()
drawLines(x_,y_)
def getY_():
inc = ceil(float(height - 2 * window_buffer) / (num_lines + 1))
y_ = []
for l in range(0, num_lines):
y_.append(window_buffer + l * inc)
return y_
def getX_():
inc = ceil(float(width - 2 * window_buffer) / (points_per_line + 1))
x_ = []
for p in range(0, points_per_line):
x_.append(window_buffer + p * inc)
return x_
def drawLines(x_, y_):
global zoff, xoff
zoff += dzoff
xoff += dxoff_t
for i, y in enumerate(y_):
## POINTS
for j, x in enumerate(x_):
n = noise(xoff + j * dxoff, yoff + i * dyoff, zoff) # perlin noise
n_tot = noise_mag * (n - 0.5) # scaled & offset noise
stroke(n*255)
strokeWeight(n*7)
point(x, y - n_tot)
# ## LINES
beginShape()
for j, x in enumerate(x_):
n = noise(xoff + j * dxoff, yoff + i * dyoff, zoff) # perlin noise
n_tot = noise_mag * (n - 0.5) # scaled & offset noise
vertex(x, y - n_tot)
endShape()
add_library('peasycam')
add_library('minim')
isPlaying, isMute = False, False
n_points = 200
radius2 = 200
step = 20
vertices, vertices2 = [[PVector() for e in range(n_points+1)] for e in range(2)]
maximum = 450
minimum = 170
factor = .006
def setup():
global song, fftLin, fftLog, cam, lines
size(900, 750, P3D)
frameRate(1000)
background(0)
smooth(8)
cam = PeasyCam(this, 1400)
cam.rotateX(-PI/2.4)
perspective(60 * DEG_TO_RAD, width/float(height), 2, 6000)
minim = Minim(this)
song = minim.loadFile("multi.mp3")
fftLin, fftLog = [FFT(song.bufferSize(), song.sampleRate()) for e in range(2)]
fftLin.linAverages(30), fftLog.logAverages(22, 3)
n_cols, n_rows = 1000, 80
angle = radians(360) / n_cols
terrain = [[0 for e in range(n_rows)] for f in range(n_cols+1)]
# Computing noise
for y in range(n_rows):
for x in range(n_cols+1):
terrain[x][y] = map(noise(cos(x * angle) * 3, sin(y * angle) * 3 ), 0, 1, 0, maximum)
lines = createShape()
for y in range(n_rows):
lines.beginShape(LINES)
lines.strokeWeight(1)
lines.stroke(250,250,250)#try to replace by lines.stroke(random(46,255))
for x in range(n_cols+1):
lx1 = cos(angle * (x-1) ) * (radius + (y*step))
ly1 = sin(angle * (x-1) ) * (radius + (y*step))
lx2 = cos(angle * x ) * (radius + (y*step))
ly2 = sin(angle * x ) * (radius + (y*step))
lines.vertex(lx1, ly1, terrain[x-1][y])
lines.vertex(lx2, ly2, terrain[x][y])
lines.endShape(CLOSE)
def draw():
background(0)
shape(lines)
noFill()
song.play() if isPlaying else song.pause()
song.mute() if isMute else song.unmute()
fftLin.forward(song.mix)
fftLog.forward(song.mix)
latheAngle = 0
for s in range(step):
angle = 0
beginShape(QUAD_STRIP)
for i, v in enumerate(vertices2):
division = 1 if s%2 == 0 else 6
steps = s*2 # select 1 freq every 2 freq (up to 256 out of 512)
c_step = (s*2)+((s%2)*2) # select 1 freq every 4 freq (up to 256 freq out of 512)
if n_points: sscale = map(fftLin.getBand(c_step)*((s+10)/10), 0, 35, .8, 35)
else: sscale = map(fftLin.getBand(steps)*((s+10)/10), 0, 30, .8, 24)
sscale = constrain(sscale, 0, 50)
vertices[i].x = radius2 + sin(radians(angle)) * radius * sscale
vertices[i].z = cos(radians(angle)) * radius * sscale
angle+=360.0/n_points
vertex(v.x, v.y, v.z)
v.x = cos(radians(latheAngle)) * vertices[i].x
v.y = sin(radians(latheAngle)) * vertices[i].x
v.z = vertices[i].z
vertex(v.x, v.y, v.z)
radius += (360.0+260)/(step*6/division) if lines else 360.0/step
endShape()
cam.beginHUD()
text("'p' = PLAY/PAUSE", 20, 30)
text("'r' = REPLAY", 20, 50)
text("'m' = MUTE", 20, 70)
text("'c' = MODE", 20, 90)
cam.endHUD()
def keyPressed():
global isPlaying, cylindrical, isMute
if key == 'p': isPlaying = not isPlaying
if key == 'm': isMute = not isMute
if key == 'c': cylindrical = not cylindrical
if key == 'r': song.rewind()
import datetime
from random import shuffle, seed
################################################################################
# Global variables
################################################################################
# Get time
timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
# Set random seed value for both Python 'random' and Processing 'random'
rand_seed = 1138
print(rand_seed)
# Comment out seeds below to get new shape on every run
seed(rand_seed) # This only applys to the Python random functions
randomSeed(rand_seed) # This only applys to the Processing random functions
################################################################################
# Knobs to turn
################################################################################
filename = 'allegory'
record = True
animate = True
animate_mode = 'sinusoid'
# Canvas size
w = 1200 # width
h = 800 # height
steps = 1000
num_loops = 1
frame_rate = 20
inc = 0.01
scl = 20
cols = floor(w/scl)
rows = floor(h/scl)
step = TAU/steps
t1 = 0
# t2 = 1000
# t3 = 100000
c_points = [radians(x) for x in range(1, 360, 10)]
print(c_points)
def setup():
# Sets size of canvas in pixels (must be first line)
size(w, h) # (width, height)
# Sets resolution dynamically (affects resolution of saved image)
pixelDensity(displayDensity()) # 1 for low, 2 for high
# Sets color space to Hue Saturation Brightness with max values of HSB respectively
colorMode(HSB, 360, 255, 255, 255)
# Set the number of frames per second to display
frameRate(frame_rate)
# Stops draw() from running in an infinite loop (should be last line)
if not animate:
noLoop() # Comment to run draw() infinitely (or until 'count' hits limit)
background(0, 0, 0)
stroke(263,100,230)
noFill()
def draw():
global t1
global t2
global t3
t1 = t1 + 0.03;
# t2 = t2 + 2;
# t3 = t3 + 2;
if frameCount > (steps * num_loops):
#exit()
pass
beginShape()
r = w*0.03
# First 3 points of each blob line are explicitly set because
# they are needed at the end of the shape to close the loop
a = c_points[0]
n = map(noise(t1, a), 0, 1, 1, 2)
x0, y0 = circle_point(w/2, h/2, n*(r+frameCount), a)
curveVertex(x0, y0)
a = c_points[1]
n = map(noise(t1, a), 0, 1, 1, 2)
x1, y1 = circle_point(w/2, h/2, n*(r+frameCount), a)
curveVertex(x1, y1)
a = c_points[2]
n = map(noise(t1, a), 0, 1, 1, 2)
x2, y2 = circle_point(w/2, h/2, n*(r+frameCount), a)
curveVertex(x2, y2)
for i,a in enumerate(c_points):
# Limiting which points get vertices makes the "floor"
if i>3:
n = map(noise(t1, a), 0, 1, 1, 2)
x, y = circle_point(w/2, h/2, n*(r+frameCount), a)
curveVertex(x, y)
# The three first points are laid out again to smoothly close the loop
curveVertex(x0, y0)
curveVertex(x1, y1)
curveVertex(x2, y2)
endShape()
if record:
save_frame_timestamp(filename, timestamp)
def save_frame_timestamp(filename, timestamp='', output_dir='output'):
'''Saves each frame with a structured filename to allow for tracking all output'''
filename = filename.replace('\\', '')
filename = filename.replace('/', '')
output_filename = os.path.join(output_dir, '{}_{}_{}_####.png'.format(timestamp, filename, rand_seed))
saveFrame(output_filename)
print(output_filename)
def circle_point(cx, cy, r, a):
x = cx + r * cos(a)
y = cy + r * sin(a)
return x, y
'''
fractaL images - v 0.0.1 - 16th April 2019
To work with Processing.py easily you firt need to convert data to list.
To do so, we import the data and start the work setting it up.
This is a code to work with dataset, particularly with dataframe format.
The code is build in order to enable functionalities for the next steps. To know:
1 - Sensors integration: RPi and Arduino
2 - Easy to adapt to dictionaries" (like JSON files)
'''
size(500,500)
noStroke()
background('#004477')
dates = [ 'Jan 28', 'Feb 04']
scores = [
['Jan 28', 120000, 220],
['Feb 04', 80000, 260]
]
print(scores)
mode=Python
mode.id=jycessing.mode.PythonMode
from particle import Particle
import math
inc = 0.2
scl = 9 # ratio of cell size to canvas size
numrows = 999
numcols = 999
zoff = 0 # time dimension for noise
num_particles = 500
particles = []
flowfield = []
def setup():
global particles, num_particles, flowfield, numrows, numcols
size(600,600,P2D)
background(255,50)
numcols = int(math.ceil(width / scl)) + 1
numrows = int(math.ceil(height / scl)) + 1
print(numrows, numcols)
for y in range(numrows):
for x in range(numcols):
flowfield.append(None)
for i in range(num_particles):
particles.append(Particle())
def draw():
global zoff, particles, flowfield, numrows, numcols
# background(255)
stroke(255, 150)
strokeWeight(1.5)
yoff = 0
for y in range(numrows):
xoff = 0
for x in range(numcols):
# define flow vector at location
index = (x + y * numcols)
angle = noise(xoff, yoff, zoff) * TWO_PI * 3
v = PVector.fromAngle(angle)
v.setMag(.8)
flowfield[index] = v
# draw vector
# pushMatrix()
# translate(x * scl, y * scl)
# rotate(v.heading())
# line(0,0,scl,0)
# popMatrix()
xoff += inc
yoff += inc
zoff += 0.002
print(floor(frameRate))
for p in particles:
p.follow(flowfield, scl, numcols)
p.update()
p.show()
saveFrame("frames/flow_####.png")
class Particle(object):
def __init__(self):
# # OPTION 1
# self.pos = PVector(random(width),random(height))
# self.vel = PVector(0,0)
#OPTION 2
r = random(30)
a = random(TWO_PI)*4
self.pos = PVector(width/2 + r*cos(a), height/2 + r*sin(a))
self.vel = PVector.random2D().limit(4)
self.acc = PVector(0,0)
self.maxspeed = 4
self.prevpos = PVector.random2D()
def update(self):
self.prevpos = self.pos.copy()
self.vel.add(self.acc)
self.vel.limit(self.maxspeed)
self.pos.add(self.vel)
self.acc.mult(0)
self.edges()
def applyForce(self, force):
self.acc.add(force)
def show(self):
stroke(0, 5)
strokeWeight(.8)
line(self.prevpos.x, self.prevpos.y, self.pos.x, self.pos.y)
def updatePrevious(self):
self.prevpos = self.pos.copy()
def edges(self):
if self.pos.x > width:
self.pos.x = 0
self.updatePrevious()
if self.pos.x < 0:
self.pos.x = width
self.updatePrevious()
if self.pos.y > height:
self.pos.y = 0
self.updatePrevious()
if self.pos.y < 0:
self.pos.y = height
self.updatePrevious()
def follow(self, flowfield, scl, numcols):
x = floor(self.pos.x / scl)
y = floor(self.pos.y / scl)
idx = floor(x + y * numcols)