Created
February 20, 2014 04:15
-
-
Save luisdelarosa/9107008 to your computer and use it in GitHub Desktop.
Visualize sound in Android
From http://stackoverflow.com/questions/5511250/capturing-sound-for-analysis-and-visualizing-frequencies-in-android
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.example.frequencytest; | |
import android.app.Activity; | |
import android.graphics.Bitmap; | |
import android.graphics.Canvas; | |
import android.graphics.Color; | |
import android.graphics.Paint; | |
import android.media.AudioFormat; | |
import android.media.AudioRecord; | |
import android.media.MediaRecorder; | |
import android.os.AsyncTask; | |
import android.os.Bundle; | |
import android.util.Log; | |
import android.view.Menu; | |
import android.view.View; | |
import android.view.View.OnClickListener; | |
import android.widget.Button; | |
import android.widget.ImageView; | |
import ca.uol.aig.fftpack.RealDoubleFFT; | |
public class MainActivity extends Activity implements OnClickListener { | |
int frequency = 8000; | |
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; | |
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; | |
private RealDoubleFFT transformer; | |
int blockSize = 256; | |
Button startStopButton; | |
boolean started = false; | |
RecordAudio recordTask; | |
ImageView imageView; | |
Bitmap bitmap; | |
Canvas canvas; | |
Paint paint; | |
//AudioRecord audioRecord; | |
@Override | |
public void onCreate(Bundle savedInstanceState) { | |
super.onCreate(savedInstanceState); | |
setContentView(R.layout.activity_main); | |
startStopButton = (Button) this.findViewById(R.id.start_stop_btn); | |
startStopButton.setOnClickListener(this); | |
transformer = new RealDoubleFFT(blockSize); | |
imageView = (ImageView) this.findViewById(R.id.imageView1); | |
bitmap = Bitmap.createBitmap((int) 256, (int) 100, | |
Bitmap.Config.ARGB_8888); | |
canvas = new Canvas(bitmap); | |
paint = new Paint(); | |
paint.setColor(Color.GREEN); | |
imageView.setImageBitmap(bitmap); | |
} | |
public class RecordAudio extends AsyncTask<Void, double[], Void> { | |
@Override | |
protected Void doInBackground(Void... arg0) { | |
try { | |
// int bufferSize = AudioRecord.getMinBufferSize(frequency, | |
// AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); | |
int bufferSize = AudioRecord.getMinBufferSize(frequency, | |
channelConfiguration, audioEncoding); | |
AudioRecord audioRecord = new AudioRecord( | |
MediaRecorder.AudioSource.MIC, frequency, | |
channelConfiguration, audioEncoding, bufferSize); | |
short[] buffer = new short[blockSize]; | |
double[] toTransform = new double[blockSize]; | |
audioRecord.startRecording(); | |
// started = true; hopes this should true before calling | |
// following while loop | |
while (started) { | |
int bufferReadResult = audioRecord.read(buffer, 0, | |
blockSize); | |
for (int i = 0; i < blockSize && i < bufferReadResult; i++) { | |
toTransform[i] = (double) buffer[i] / 32768.0; // signed | |
// 16 | |
} // bit | |
transformer.ft(toTransform); | |
publishProgress(toTransform); | |
} | |
audioRecord.stop(); | |
} catch (Throwable t) { | |
t.printStackTrace(); | |
Log.e("AudioRecord", "Recording Failed"); | |
} | |
return null; | |
} | |
@Override | |
protected void onProgressUpdate(double[]... toTransform) { | |
canvas.drawColor(Color.BLACK); | |
for (int i = 0; i < toTransform[0].length; i++) { | |
int x = i; | |
int downy = (int) (100 - (toTransform[0][i] * 10)); | |
int upy = 100; | |
canvas.drawLine(x, downy, x, upy, paint); | |
} | |
imageView.invalidate(); | |
// TODO Auto-generated method stub | |
// super.onProgressUpdate(values); | |
} | |
} | |
@Override | |
public boolean onCreateOptionsMenu(Menu menu) { | |
getMenuInflater().inflate(R.menu.activity_main, menu); | |
return true; | |
} | |
public void onClick(View arg0) { | |
// TODO Auto-generated method stub | |
if (started) { | |
started = false; | |
startStopButton.setText("Start"); | |
recordTask.cancel(true); | |
} else { | |
started = true; | |
startStopButton.setText("Stop"); | |
recordTask = new RecordAudio(); | |
recordTask.execute(); | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hey man, I was wandering if you could help me out with an adaptation I want to bring into you code?
I want to alter it so that the visualization will be shown across the screen width and not just in the buffer zone you allocated (256dp). I was able to get the screen width with the following code:
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
width = size.x;
Now I would just like to know how to stretch your visualization across this width?
Thank you for the help!