안드로이드 개발 질문/답변
(글 수 45,052)
ffmpeg에서 동영상을 디코딩 후 자바 단으로 데이터를 넘겨주어 음원을 플레이시켜주고 싶은데 생각보다 잘 안되네요.
public class AudioPlayerActivity extends Activity { private AudioTrack track; private FileOutputStream os; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); createEngine(); int bufSize = AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); Log.d("test", "aaaaaaaaaaaaaaaaaaaaa "+bufSize); track = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufSize, AudioTrack.MODE_STREAM); byte[] bytes = new byte[bufSize*2]; try { os = new FileOutputStream("/mnt/sdcard/test.out",false); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } //loadFile("/mnt/sdcard/test.mp3", bytes); // String file = "/mnt/sdcard/Wildlife.wmv"; String file = "/mnt/sdcard/VIDEO0002.3gp"; loadFile(file, bytes); try { os.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } void playSound(byte[] buf, int size) { if(track.getPlayState()!=AudioTrack.PLAYSTATE_PLAYING) { track.play(); } Log.d("test", "aaaaaaaaaaaaaaaaaaaaa"); track.write(buf, 0, size); Log.d("test", "aaaaaaaaaaaaaaaaaaaaa"); try { os.write(buf,0,size); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private native void createEngine(); private native int loadFile(String file, byte[] array); /** Load jni .so on initialization*/ static { System.loadLibrary("audioplayer"); } }
자바 쪽 소스 코드입니다.
jint Java_team_tj_audio_AudioPlayerActivity_loadFile(JNIEnv* env, jobject obj, jstring file, jbyteArray array) { AVFormatContext *gFormatCtx = NULL; AVCodecContext *gAudioCodecCtx = NULL; AVCodec *gAudioCodec = NULL; int gAudioStreamIdx = -1; char *gAudioBuffer = NULL; int i, outsize = 0; AVPacket packet; const char *str; str = (*env)->GetStringUTFChars(env, file, NULL); jclass cls = (*env)->GetObjectClass(env, obj); jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V");
if (gFormatCtx != NULL) return -1;
if (av_open_input_file(&gFormatCtx, str, NULL, 0, NULL) != 0) return -2;
if (av_find_stream_info(gFormatCtx) < 0) return -3;
for (i = 0; i < gFormatCtx->nb_streams; i++) { if (gFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) { gAudioStreamIdx = i; break; } }
if (gAudioStreamIdx == -1) return -4;
gAudioCodecCtx = gFormatCtx->streams[gAudioStreamIdx]->codec;
gAudioCodec = avcodec_find_decoder(gAudioCodecCtx->codec_id);
if (gAudioCodec == NULL) return -5;
if (avcodec_open(gAudioCodecCtx, gAudioCodec) < 0) return -6;
gAudioBuffer = (char *) av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE * 2);
int decode = 0;
while (av_read_frame(gFormatCtx, &packet) >= 0) { if (gFormatCtx-> streams[packet.stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO) { int data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 2; int size=packet.size; while(size > 0) { int len = avcodec_decode_audio3(gAudioCodecCtx, (int16_t *) gAudioBuffer, &data_size, &packet);
if (data_size > 0) { jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL); memcpy(bytes + decode, (int16_t *)gAudioBuffer, size); (*env)->ReleaseByteArrayElements(env, array, bytes, 0); (*env)->CallVoidMethod(env, obj, play, array, data_size); size -= len; decode += len; } } } av_free_packet(&packet); } av_close_input_file(gFormatCtx);
return 0; }
native 쪽 소스 코드입니다.
어느 부분이 잘못 되었는지 알려주시면 감사하겠습니다.