I am trying to write a simple Android app that displays the various pitches of a .wav file while it plays. I'm using TarsosDSP to do the pitch processing and AudioTrack to play the file.
Before I dive into the code here, I'm running Android Studio 3.4.2 with JRE 1.8.0 and my minSdkVersion is 23.
From my understanding of how TarsosDSP works, I wire up the wav stream to the AudioDispatcher object, attach the processors (the player itself, and the pitch evaluator), and then assign the dispatcher to a thread and start it to kick everything off. From my understanding, I'm also probably doing something dumb (somewhere...).
I'm running into problems when using the AudioTrack Builder because a good many number of examples I found utilize the now deprecated constructor that uses AudioManager.STREAM_MUSIC.
UPDATE: I managed to find someone who is more or less doing what I'm wanting to do (just had to have Google translate it from Korean): https://junyoung-jamong.github.io/signal/processing/2019/02/09/How-to-use-tarsosDSP-in-Android.html
After the refactoring I was able to move the AudioPlayer stuff I was doing inside of my AudioMethods class.
UPDATE So after updating the code, I'm getting the file to play correctly, and the pitch evaluation seems to be working, but I'm only getting two samples, and they register in the logcat before I actually hear the audio play. Can someone explain this? Also, if someone can tell me how to get it to take more than two samples I'd like to know where this is being set/calculated.
D/EBB Inside Run: Pitch:372.05637
D/EBB Inside Run: Pitch:412.30508
Main Activity
public class MainActivity extends AppCompatActivity {
private TextView local_NoteText;
private TextView local_PitchText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
local_PitchText = findViewById(R.id.pitchText);
local_NoteText = findViewById(R.id.noteText);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void ProcessTone(View view) throws IOException {
//get the test file
final AssetFileDescriptor afd = getResources().openRawResourceFd(R.raw.avery_test);
AudioMethods audioMethods = new AudioMethods();
TarsosDSPAudioFormat tarsosDSPAudioFormat = new TarsosDSPAudioFormat(TarsosDSPAudioFormat.Encoding.PCM_SIGNED,
16000,
16,
1,
2,
16000,
ByteOrder.BIG_ENDIAN.equals(ByteOrder.nativeOrder()));
audioMethods.getPitchFromFile(afd, MainActivity.this, tarsosDSPAudioFormat, local_NoteText,local_PitchText);
}
}
Audio Methods
public class AudioMethods {
public static AudioDispatcher dispatcher;
public float pitchInHz;
public int millSecond;
public void getPitchFromFile(final AssetFileDescriptor afd, final Activity activity, TarsosDSPAudioFormat tarsosDSPAudioFormat,final TextView pitchText,final TextView noteText) {
try {
releaseDispatcher(dispatcher);
FileInputStream fileInputStream = new FileInputStream(afd.getFileDescriptor());
fileInputStream.skip(afd.getStartOffset());
// I only need this to get the number of elapsed seconds if the dispatcher doesn't detect when the audio file is finished.
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
millSecond = Integer.parseInt(durationStr);
dispatcher = new AudioDispatcher(new UniversalAudioInputStream(fileInputStream, tarsosDSPAudioFormat), 2048, 0);
final AudioProcessor playerProcessor = new AndroidAudioPlayer(tarsosDSPAudioFormat, 16000, 0);
dispatcher.addAudioProcessor(playerProcessor);
PitchDetectionHandler pitchDetectionHandler = new PitchDetectionHandler() {
public void handlePitch(final PitchDetectionResult res, AudioEvent e) {
pitchInHz = res.getPitch();
//if(pitchInHz > 0){Log.d("EBB Outside Run","Pitch:" + pitchInHz);}
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
if(pitchInHz > 0){Log.d("EBB Inside Run","Pitch:" + pitchInHz);}
pitchText.setText(pitchInHz + "");
processPitch(pitchInHz);
}
});
}
public void processPitch(float pitchInHz) {
if(pitchInHz >= 110 && pitchInHz < 123.47) {
//A
noteText.setText("A");
}
else if(pitchInHz >= 123.47 && pitchInHz < 130.81) {
//B
noteText.setText("B");
}
else if(pitchInHz >= 130.81 && pitchInHz < 146.83) {
//C
noteText.setText("C");
}
else if(pitchInHz >= 146.83 && pitchInHz < 164.81) {
//D
noteText.setText("D");
}
else if(pitchInHz >= 164.81 && pitchInHz <= 174.61) {
//E
noteText.setText("E");
}
else if(pitchInHz >= 174.61 && pitchInHz < 185) {
//F
noteText.setText("F");
}
else if(pitchInHz >= 185 && pitchInHz < 196) {
//G
noteText.setText("G");
}
}
};
AudioProcessor pitchProcessor = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 44100, 2048, pitchDetectionHandler);
dispatcher.addAudioProcessor(pitchProcessor);
dispatcher.run();
Thread audioThread = new Thread(dispatcher, "Audio Thread");
audioThread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
public void releaseDispatcher(AudioDispatcher dispatcher)
{
if(dispatcher != null)
{
if(!dispatcher.isStopped())
dispatcher.stop();
dispatcher = null;
}
}
protected void onStop(AudioDispatcher dispatcher) {
//super.onStop();
releaseDispatcher(dispatcher);
}
//I don't need these guys yet
/*public void stopRecording()
{
releaseDispatcher();
}
@Override
protected void onStop() {
super.onStop();
releaseDispatcher();
}*/
}