안녕하세요 아즈라엘 입니다. 


Talking Tom 과 같은 어플을 구현해야 하는데 음원을 빠르게 해서

녹음한 목소리를 변조하여 출력하는 부분이 잘 안되어서 질문하게 되었습니다.


혹시 이부분 해결 하신분 계신가해서요^^



일단 AudioTrack 을 사용하여 출력해봤는데 속도가 마음에 들지 않아 문제입니다.

너무 빠르게 나오거든요..ㅠㅠ


찾아본걸로는 44100, 22050, 16000, 11025, 8000 주파수만 사용할 수 있더군요..ㅠㅠ


/Users/Lowmans/Desktop/Temp/AudioRecorder/src/teja/inox/apps/media/sound/AltAudioRecorder.java







public class AltAudioRecorder extends Activity {


	RecordAudio recordTask;
	PlayAudio playTask;
	final int CUSTOM_FREQ_SOAP = 2;
	Button startRecordingButton, stopRecordingButton, startPlaybackButton, stopPlaybackButton;
	TextView statusText;


	File recordingFile;


	boolean isRecording = false;
	boolean isPlaying = false;


//	Nexus One: {44100, 22050, 16000, 11025, 8000}, {mono, stereo}, 16bit all work
//	Droid: {44100, 22050, 16000, 11025, 8000}, mono, 16bit all work
//	Emulator: only 8000, mono, 16bit
	


// http://stackoverflow.com/questions/4781781/why-does-audiorecord-getminbuffersize-return-error-bad-value-2
// From the platform source file AudioRecord.java:
//
// static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
//    ...
//    // PCM_8BIT is not supported at the moment
//    if (audioFormat != AudioFormat.ENCODING_PCM_16BIT) {
//        loge("getMinBufferSize(): Invalid audio format.");
//        return AudioRecord.ERROR_BAD_VALUE;
//    }
//    ...
// }
// Looks like your choice is 16-bit or nothing. :\	
	
	int frequency = 16000;
	int outfrequency = frequency * CUSTOM_FREQ_SOAP;


	int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;


	@Override
	public void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		setContentView(R.layout.main);


		startRecordingButton = (Button) findViewById(R.id.StartRecordingButton);
		stopRecordingButton = (Button) findViewById(R.id.StopRecordingButton);
		startPlaybackButton = (Button) findViewById(R.id.StartPlaybackButton);
		stopPlaybackButton = (Button) findViewById(R.id.StopPlaybackButton);


		startRecordingButton.setOnClickListener(new OnClickListener() {


			@Override
			public void onClick(View v) {
				// TODO Auto-generated method stub
				record();
			}
		});
		stopRecordingButton.setOnClickListener(new OnClickListener() {


			@Override
			public void onClick(View v) {
				// TODO Auto-generated method stub
				stopRecording();


			}
		});
		startPlaybackButton.setOnClickListener(new OnClickListener() {


			@Override
			public void onClick(View v) {
				// TODO Auto-generated method stub
				play();


			}
		});
		stopPlaybackButton.setOnClickListener(new OnClickListener() {


			@Override
			public void onClick(View v) {
				// TODO Auto-generated method stub
				stopPlaying();
			}
		});


		stopRecordingButton.setEnabled(false);
		startPlaybackButton.setEnabled(false);
		stopPlaybackButton.setEnabled(false);


		File path = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/sdcard/meditest/");
		path.mkdirs();
		try {
			recordingFile = File.createTempFile("recording", ".pcm", path);
		} catch (IOException e) {
			throw new RuntimeException("Couldn't create file on SD card", e);
		}
	}


	public void play() {
		startPlaybackButton.setEnabled(true);


		playTask = new PlayAudio();
		playTask.execute();


		stopPlaybackButton.setEnabled(true);
	}


	public void stopPlaying() {
		isPlaying = false;
		stopPlaybackButton.setEnabled(false);
		startPlaybackButton.setEnabled(true);
	}


	public void record() {
		startRecordingButton.setEnabled(false);
		stopRecordingButton.setEnabled(true);


		// For Fun
		startPlaybackButton.setEnabled(true);


		recordTask = new RecordAudio();
		recordTask.execute();
	}


	public void stopRecording() {
		isRecording = false;
	}


	private class PlayAudio extends AsyncTask<Void, Integer, Void> {
		@Override
		protected Void doInBackground(Void... params) {
			isPlaying = true;


			int bufferSize = AudioTrack.getMinBufferSize(outfrequency, channelConfiguration, audioEncoding);


			short[] audiodata = new short[bufferSize];
			try {


				DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(recordingFile)));


				AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, outfrequency, channelConfiguration, audioEncoding,
						bufferSize, AudioTrack.MODE_STREAM);


				audioTrack.play();


				
				// lowmans
				// raw data write >> seek point
				while (isPlaying && dis.available() > 0) {
					int i = 0;
					while (dis.available() > 0 && i < audiodata.length) {
						audiodata[i] = dis.readShort();
						i++;
					}
					audioTrack.write(audiodata, 0, audiodata.length);
				}


				dis.close();


				startPlaybackButton.setEnabled(false);
				stopPlaybackButton.setEnabled(true);


			} catch (Throwable t) {
				Log.e("AudioTrack", "Playback Failed");
			}


			return null;
		}
	}


	private class RecordAudio extends AsyncTask<Void, Integer, Void> {
		@Override
		protected Void doInBackground(Void... params) {
			isRecording = true;


			try {
				DataOutputStream dos = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(recordingFile)));
				int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
				AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding,
						bufferSize);
				short[] buffer = new short[bufferSize];
				audioRecord.startRecording();
				while (isRecording) {
					int bufferReadResult = audioRecord.read(buffer, 0, bufferSize);
					for (int i = 0; i < bufferReadResult; i++) {
						dos.writeShort(buffer[i]);
					}
				}
				audioRecord.stop();
				dos.close();
			} catch (Throwable t) {
				Log.e("AudioRecord", "Recording Failed");
			}


			return null;
		}


		protected void onPostExecute(Void result) {
			startRecordingButton.setEnabled(true);
			stopRecordingButton.setEnabled(false);
			startPlaybackButton.setEnabled(true);
		}
	}