Chaos auf dem Android Gerät
(EN google-translate)
(PL google-translate)
Es soll nun versucht werden, ähnliche Anwendungen auf einem Android Gerät zu entwickeln, wie zuvor mit dem Arduino-Micro.
1) Show values of chaos function
public void setup()
{
fullScreen(); //full screen view
frameRate(3); //draw called 3 times per second
orientation(LANDSCAPE); //on Android device displayed in landscape not portrait
}
float r = 3.5;
//float r = 3.8;
//float r = 3.98;
float y_old = 0.56;
float y_new = 0.0;
int value=0;
public void draw()
{
background(100,100,255); //background light blue
y_new = r*y_old*(1.0 - y_old);
y_old = y_new;
value = (int)(100.0*y_new);
println("Chaos value: "+value); //show value on PC terminal
fill(255,0,0); //text color red
textSize(60); //font size 60pt
text("Chaos value: "+value,50,50); //show value on the screen
}
Code 0-1: code
Chaos01.zip
1b) Show values of chaos function and draw curve
//draw a curve:
int[] curve;
int index = 0;
public void setup()
{
fullScreen(); //full screen view
frameRate(30); //draw called 3 times per second
orientation(LANDSCAPE); //on Android device displayed in landscape not portrait
curve = new int[width/5];
}
float r = 3.5;
//float r = 3.8;
//float r = 3.98;
float y_old = 0.56;
float y_new = 0.0;
int value=0;
public void draw()
{
background(100,100,255); //background light blue
y_new = r*y_old*(1.0 - y_old);
y_old = y_new;
value = (int)(100.0*y_new);
println("Chaos value: "+value); //show value on PC terminal
fill(255,0,0); //text color red
textSize(60); //font size 60pt
text("Chaos value: "+value,50,50); //show value on the screen
//drawing a curve:
curve[index%curve.length] = value;
stroke(0);
int MAX = index;
if(index>curve.length)
MAX=curve.length;
int start_x = width - 5*MAX;
for(int i=1;i<MAX;i++)
{
line(start_x+(i-1)*5,height - (curve[(index+(i-1))%curve.length]*height)/100,
start_x+(i)*5,height - (curve[(index+(i))%curve.length]*height)/100);
}
index++;
}
Code 0-2: code
Chaos01b_curve.zip
2) Play tone according to value of chaos function
import info.kramann.extensions.mico;
import processing.video.*;
import ketai.camera.*;
import java.util.Properties;
public void setup()
{
fullScreen(); //full screen view
frameRate(10); //draw called 3 times per second
orientation(LANDSCAPE); //on Android device displayed in landscape not portrait
mico.soundStart();
int id = 0;
int typ = 1; //Sinus
float frequency = 330.0f;
float dt_phase_left = 0.0f;
float dt_phase_right = 0.0f;
float vol_left = 0.5f;
float vol_right = 0.5f;
mico.soundAddTone(id, typ, frequency, dt_phase_left, dt_phase_right, vol_left, vol_right);
}
//float r = 3.5;
//float r = 3.8;
float r = 3.94;
float y_old = 0.56;
float y_new = 0.0;
int value=0;
public void draw()
{
background(100,100,255); //background light blue
y_new = r*y_old*(1.0 - y_old);
y_old = y_new;
value = (int)(100.0*y_new);
println("Chaos value: "+value); //show value on PC terminal
fill(255,0,0); //text color red
textSize(60); //font size 60pt
text("Chaos value: "+value,50,50); //show value on the screen
int id = 0;
float frequency = 200.0f + 4*value;
float dt_phase_left = 0.0f;
float dt_phase_right = 0.0f;
float vol_left = 0.5f;
float vol_right = 0.5f;
mico.soundChangeTone(id, frequency, dt_phase_left, dt_phase_right, vol_left, vol_right);
}
Code 0-3: code
Chaos02.zip
2b) variant
import info.kramann.extensions.mico;
import processing.video.*;
import ketai.camera.*;
import java.util.Properties;
public void setup()
{
fullScreen(); //full screen view
frameRate(10); //draw called 3 times per second
orientation(LANDSCAPE); //on Android device displayed in landscape not portrait
mico.soundStart();
//a wav-tone is loaded
float[] ton57 = mico.wavLoadMix("ton57.wav",this);
//wav-tone is saved in soundsystem with id 100
mico.soundRegisterSample(100,44100,220.0f,ton57);
//tone taken from registered wav-file,
//sample rate changed to achieve 880Hz instead of 440Hz
//Period is played more often.
int id=1;
int typ = 100;
float frequency = 880.0f;
float dt_phase_left = 0.0f;
float dt_phase_right = 0.0f;
float vol_left = 0.2f;
float vol_right = 0.8f;
mico.soundAddTone(id, typ, frequency, dt_phase_left, dt_phase_right, vol_left, vol_right);
}
//float r = 3.5;
//float r = 3.8;
float r = 3.94;
float y_old = 0.56;
float y_new = 0.0;
int value=0;
public void draw()
{
background(100,100,255); //background light blue
y_new = r*y_old*(1.0 - y_old);
y_old = y_new;
value = (int)(100.0*y_new);
println("Chaos value: "+value); //show value on PC terminal
fill(255,0,0); //text color red
textSize(60); //font size 60pt
text("Chaos value: "+value,50,50); //show value on the screen
int id = 1;
float frequency = 200.0f + 4*value;
float dt_phase_left = 0.0f;
float dt_phase_right = 0.0f;
float vol_left = 0.5f;
float vol_right = 0.5f;
mico.soundChangeTone(id, frequency, dt_phase_left, dt_phase_right, vol_left, vol_right);
}
Code 0-4: code
Chaos02b.zip
5) Play tone according to direction of earth acceleration
TAB1 Chaos05
//Look carefully to run() method in Soundplayer!
import processing.video.*;
import ketai.camera.*;
import java.util.Properties;
import ketai.sensors.*; //using a special android library for sensors.
KetaiSensor sensor;
float accelerometerX, accelerometerY, accelerometerZ;
Soundplayer soundplayer;
public void setup()
{
sensor = new KetaiSensor(this);
sensor.start();
soundplayer = new Soundplayer();
fullScreen(); //full screen view
frameRate(10); //draw called 3 times per second
orientation(LANDSCAPE); //on Android device displayed in landscape not portrait
}
public void draw()
{
background(100,100,255); //background light blue
fill(255,0,0); //text color red
textSize(60); //font size 60pt
text("Accelerometer:
" +
"x: " + nfp(accelerometerX, 1, 3) + "
" +
"y: " + nfp(accelerometerY, 1, 3) + "
" +
"z: " + nfp(accelerometerZ, 1, 3), 0, 0, width, height);
}
void onAccelerometerEvent(float x, float y, float z)
{
accelerometerX = x;
accelerometerY = y;
accelerometerZ = z;
}
Code 0-5: code
TAB2 AudioTrack
import android.media.AudioTrack;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaRecorder;
public class AudiotrackMicAndroid
{
private AudioRecord audioRecorder = null;
AudioTrack audioTrack;
//int sr = 11025;
//Versuchen die Qualität zu verbessern:
int sr;
//int buffsize = 512;
int buffsize;
int buffsize2;
float[] targetWelle;
short[] recbuf;
public AudiotrackMicAndroid(int sr, int buffsize)
{
this.sr = sr;
this.buffsize = buffsize;
this.buffsize2 = buffsize*2;
this.recbuf = new short[this.buffsize];
targetWelle = new float[this.buffsize2];
try
{
//int pg = AudioRecord.getMinBufferSize(this.sr,
// AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int audioSource = MediaRecorder.AudioSource.MIC;
//MediaRecorder.AudioSource.DEFAULT
audioRecorder = new AudioRecord(audioSource,
this.sr, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, buffsize*10); // bufferSize
audioRecorder.startRecording();
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
}
catch(Exception ee)
{
System.out.println("FEHLER: "+ee);
}
try
{
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sr,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
buffsize*10,
AudioTrack.MODE_STREAM);
audioTrack.setStereoVolume(1.0f, 1.0f);
audioTrack.play();
}
catch(Exception eee)
{
System.out.println("FEHLER: "+eee);
}
}
public float[] write(short[] puffer)
{
audioTrack.write(puffer, 0,buffsize2);
audioRecorder.read(recbuf, 0, recbuf.length);
for(int i=0;i<recbuf.length;i++)
{
targetWelle[i*2+0] = (float)recbuf[i]/32000.0f;
targetWelle[i*2+1] = (float)recbuf[i]/32000.0f;
}
return targetWelle;
}
}
Code 0-6: code
TAB3 Soundplayer
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.ArrayList;
import processing.core.*;
public class Soundplayer implements Runnable
{
int sr = 44100;
int buffsize = 512;
int buffsize2 = buffsize*2;
public float[] mikrofon_welle = new float[buffsize2];
double dt = 1.0/(float)sr;
double dth = 0.5*dt;
protected ScheduledExecutorService schedExecService;
short[] puffer = new short[buffsize*2]; //4 Byte pro Sample == 16Bit Stereo //2 * bei short
public double t=0.0;
private AudiotrackMicAndroid track;
private double dreieck(double x)
{
double phase = 4.0*(x - Math.floor(x));
if(phase<=1.0)
return phase;
else if(phase<=3.0)
return 2.0 - phase;
else //if(phase<=4.0)
return -4.0 + phase;
}
public Soundplayer()
{
track = new AudiotrackMicAndroid(sr,buffsize);
schedExecService = Executors.newSingleThreadScheduledExecutor();
long period = (buffsize*1000)/sr; //Seconds per Beat==60/BPM, die Hälfte weil 8tel, mal 1000 weil Millisekunden.
schedExecService.scheduleAtFixedRate(this, 0, period, TimeUnit.MILLISECONDS);
}
double FREQx = 600.0;
boolean changex = true;
double FREQy = 600.0;
boolean changey = true;
public void run()
{
//Testton:
for(int i=0;i<puffer.length;i+=2)
{
double Tx = 1.0/FREQx;
double phasex = t - Tx*Math.floor(t/Tx);
if(phasex<=dt && changex==true)
{
FREQx = 600.0 + 40.0*accelerometerX;
changex=false;
}
if(phasex>Tx/2.0)
{
changex=true;
}
double Ty = 1.0/FREQy;
double phasey = t - Ty*Math.floor(t/Ty);
if(phasey<=dt && changey==true)
{
FREQy = 600.0 + 40.0*accelerometerY;
changey=false;
}
if(phasey>Ty/2.0)
{
changey=true;
}
// puffer[i+0] = (short)(32000.0*Math.sin(2.0*Math.PI*FREQx*t));
// puffer[i+1] = (short)(32000.0*Math.sin(2.0*Math.PI*FREQy*t));
puffer[i+0] = (short)(32000.0*dreieck(FREQx*t));
puffer[i+1] = (short)(32000.0*dreieck(FREQy*t));
t+=dt;
}
//audioTrack.write(puffer, 0,buffsize2);
mikrofon_welle = track.write(puffer);
}
}
Code 0-7: code
Chaos05.zip