Android Media Recorderがオーディオストリームを取得する方法

19998 ワード

:http://blog.csdn.net/zgyulongfei/article/details/7753163

Android : ( ) AudioRecord , ; AudioTrack 。 , , , ilbc,speex 。  ilbc :http://blog.csdn.net/column/details/media.html ( ) MediaRecorder AMR , MediaRecorder , , , LocalSocket ; MediaPlayer AMR , MediaPlayer , 。 , ( ) , ; ( ) , 。 PS: , , 。 ( ), ( ) , 。

   
package cn.edu.xmu.zgy.audio.encoder;

import java.io.DataInputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;

import cn.edu.xmu.zgy.config.CommonConfig;

import android.app.Activity;
import android.media.MediaRecorder;
import android.net.LocalServerSocket;
import android.net.LocalSocket;
import android.net.LocalSocketAddress;
import android.util.Log;
import android.widget.Toast;

//blog.csdn.net/zgyulongfei
//Email: [email protected]

public class AmrAudioEncoder {
	private static final String TAG = "ArmAudioEncoder";

	private static AmrAudioEncoder amrAudioEncoder = null;

	private Activity activity;

	private MediaRecorder audioRecorder;

	private boolean isAudioRecording;

	private LocalServerSocket lss;
	private LocalSocket sender, receiver;

	private AmrAudioEncoder() {
	}

	public static AmrAudioEncoder getArmAudioEncoderInstance() {
		if (amrAudioEncoder == null) {
			synchronized (AmrAudioEncoder.class) {
				if (amrAudioEncoder == null) {
					amrAudioEncoder = new AmrAudioEncoder();
				}
			}
		}
		return amrAudioEncoder;
	}

	public void initArmAudioEncoder(Activity activity) {
		this.activity = activity;
		isAudioRecording = false;
	}

	public void start() {
		if (activity == null) {
			showToastText("         ,    init  ");
			return;
		}

		if (isAudioRecording) {
			showToastText("        ,      ");
			return;
		}

		if (!initLocalSocket()) {
			showToastText("        ");
			releaseAll();
			return;
		}

		if (!initAudioRecorder()) {
			showToastText("          ");
			releaseAll();
			return;
		}

		this.isAudioRecording = true;
		startAudioRecording();
	}

	private boolean initLocalSocket() {
		boolean ret = true;
		try {
			releaseLocalSocket();

			String serverName = "armAudioServer";
			final int bufSize = 1024;

			lss = new LocalServerSocket(serverName);

			receiver = new LocalSocket();
			receiver.connect(new LocalSocketAddress(serverName));
			receiver.setReceiveBufferSize(bufSize);
			receiver.setSendBufferSize(bufSize);

			sender = lss.accept();
			sender.setReceiveBufferSize(bufSize);
			sender.setSendBufferSize(bufSize);
		} catch (IOException e) {
			ret = false;
		}
		return ret;
	}

	private boolean initAudioRecorder() {
		if (audioRecorder != null) {
			audioRecorder.reset();
			audioRecorder.release();
		}
		audioRecorder = new MediaRecorder();
		audioRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
		audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.RAW_AMR);
		final int mono = 1;
		audioRecorder.setAudioChannels(mono);
		audioRecorder.setAudioSamplingRate(8000);
		audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
		audioRecorder.setOutputFile(sender.getFileDescriptor());

		boolean ret = true;
		try {
			audioRecorder.prepare();
			audioRecorder.start();
		} catch (Exception e) {
			releaseMediaRecorder();
			showToastText("          ");
			ret = false;
		}
		return ret;
	}

	private void startAudioRecording() {
		new Thread(new AudioCaptureAndSendThread()).start();
	}

	public void stop() {
		if (isAudioRecording) {
			isAudioRecording = false;
		}
		releaseAll();
	}

	private void releaseAll() {
		releaseMediaRecorder();
		releaseLocalSocket();
		amrAudioEncoder = null;
	}

	private void releaseMediaRecorder() {
		try {
			if (audioRecorder == null) {
				return;
			}
			if (isAudioRecording) {
				audioRecorder.stop();
				isAudioRecording = false;
			}
			audioRecorder.reset();
			audioRecorder.release();
			audioRecorder = null;
		} catch (Exception err) {
			Log.d(TAG, err.toString());
		}
	}

	private void releaseLocalSocket() {
		try {
			if (sender != null) {
				sender.close();
			}
			if (receiver != null) {
				receiver.close();
			}
			if (lss != null) {
				lss.close();
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		sender = null;
		receiver = null;
		lss = null;
	}

	private boolean isAudioRecording() {
		return isAudioRecording;
	}

	private void showToastText(String msg) {
		Toast.makeText(activity, msg, Toast.LENGTH_SHORT).show();
	}

	private class AudioCaptureAndSendThread implements Runnable {
		public void run() {
			try {
				sendAmrAudio();
			} catch (Exception e) {
				Log.e(TAG, "sendAmrAudio()   ");
			}
		}

		private void sendAmrAudio() throws Exception {
			DatagramSocket udpSocket = new DatagramSocket();
			DataInputStream dataInput = new DataInputStream(receiver.getInputStream());

			skipAmrHead(dataInput);

			final int SEND_FRAME_COUNT_ONE_TIME = 10;//     10    ,1   32B
			// AMR     :http://blog.csdn.net/dinggo/article/details/1966444
			final int BLOCK_SIZE[] = { 12, 13, 15, 17, 19, 20, 26, 31, 5, 0, 0, 0, 0, 0, 0, 0 };

			byte[] sendBuffer = new byte[1024];
			while (isAudioRecording()) {
				int offset = 0;
				for (int index = 0; index < SEND_FRAME_COUNT_ONE_TIME; ++index) {
					if (!isAudioRecording()) {
						break;
					}
					dataInput.read(sendBuffer, offset, 1);
					int blockIndex = (int) (sendBuffer[offset] >> 3) & 0x0F;
					int frameLength = BLOCK_SIZE[blockIndex];
					readSomeData(sendBuffer, offset + 1, frameLength, dataInput);
					offset += frameLength + 1;
				}
				udpSend(udpSocket, sendBuffer, offset);
			}
			udpSocket.close();
			dataInput.close();
			releaseAll();
		}

		private void skipAmrHead(DataInputStream dataInput) {
			final byte[] AMR_HEAD = new byte[] { 0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A };
			int result = -1;
			int state = 0;
			try {
				while (-1 != (result = dataInput.readByte())) {
					if (AMR_HEAD[0] == result) {
						state = (0 == state) ? 1 : 0;
					} else if (AMR_HEAD[1] == result) {
						state = (1 == state) ? 2 : 0;
					} else if (AMR_HEAD[2] == result) {
						state = (2 == state) ? 3 : 0;
					} else if (AMR_HEAD[3] == result) {
						state = (3 == state) ? 4 : 0;
					} else if (AMR_HEAD[4] == result) {
						state = (4 == state) ? 5 : 0;
					} else if (AMR_HEAD[5] == result) {
						state = (5 == state) ? 6 : 0;
					}

					if (6 == state) {
						break;
					}
				}
			} catch (Exception e) {
				Log.e(TAG, "read mdat error...");
			}
		}

		private void readSomeData(byte[] buffer, int offset, int length, DataInputStream dataInput) {
			int numOfRead = -1;
			while (true) {
				try {
					numOfRead = dataInput.read(buffer, offset, length);
					if (numOfRead == -1) {
						Log.d(TAG, "amr...no data get wait for data coming.....");
						Thread.sleep(100);
					} else {
						offset += numOfRead;
						length -= numOfRead;
						if (length <= 0) {
							break;
						}
					}
				} catch (Exception e) {
					Log.e(TAG, "amr..error readSomeData");
					break;
				}
			}
		}

		private void udpSend(DatagramSocket udpSocket, byte[] buffer, int sendLength) {
			try {
				InetAddress ip = InetAddress.getByName(CommonConfig.SERVER_IP_ADDRESS.trim());
				int port = CommonConfig.AUDIO_SERVER_UP_PORT;

				byte[] sendBuffer = new byte[sendLength];
				System.arraycopy(buffer, 0, sendBuffer, 0, sendLength);

				DatagramPacket packet = new DatagramPacket(sendBuffer, sendLength);
				packet.setAddress(ip);
				packet.setPort(port);
				udpSocket.send(packet);
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
}
エンコーダについて:前述したように、MediaRecorderのハードコードの方式は、コードストリームをファイルに保存することしかできません.ここでは、LocarSocketの方式でメモリにストリームを保存し、バッファからコードストリームを読み取ります.保存フォーマットのためにRAW_AMR形式のため、読み取ったデータを解析して本物のオーディオストリームを得る必要があります.AMR音声コードのフォーマットを知りたいのですが、コードに添付されているページのリンクを見ることができます.圧縮したコードの流れが小さいので、実現時にint SEND_を組み合わせました.FRAMECOUNT_ONETIME=10フレームのコードが流れた後に外に発送します.このような方式による遅延が重くなります.自分の必要に応じて修正してもいいです.遅延の原因のもう一つはLocalSocketバッファサイズです.ここで設定したサイズはfinal int bufSize=1024です.コードははっきりと書いてあります.質問があれば提出してもいいです.
プレーヤー:
package cn.edu.xmu.zgy.audio.player;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.Socket;

import cn.edu.xmu.zgy.config.CommonConfig;

import android.app.Activity;
import android.media.MediaPlayer;
import android.os.Handler;
import android.util.Log;

//blog.csdn.net/zgyulongfei
//Email: [email protected]

public class AmrAudioPlayer {
	private static final String TAG = "AmrAudioPlayer";

	private static AmrAudioPlayer playerInstance = null;

	private long alreadyReadByteCount = 0;

	private MediaPlayer audioPlayer;
	private Handler handler = new Handler();

	private final String cacheFileName = "audioCacheFile";
	private File cacheFile;
	private int cacheFileCount = 0;

	//          cacheFile         cache 
	private boolean hasMovedTheCacheFlag;

	private boolean isPlaying;
	private Activity activity;

	private boolean isChaingCacheToAnother;

	private AmrAudioPlayer() {
	}

	public static AmrAudioPlayer getAmrAudioPlayerInstance() {
		if (playerInstance == null) {
			synchronized (AmrAudioPlayer.class) {
				if (playerInstance == null) {
					playerInstance = new AmrAudioPlayer();
				}
			}
		}
		return playerInstance;
	}

	public void initAmrAudioPlayer(Activity activity) {
		this.activity = activity;
		deleteExistCacheFile();
		initCacheFile();
	}

	private void deleteExistCacheFile() {
		File cacheDir = activity.getCacheDir();
		File[] needDeleteCacheFiles = cacheDir.listFiles();
		for (int index = 0; index < needDeleteCacheFiles.length; ++index) {
			File cache = needDeleteCacheFiles[index];
			if (cache.isFile()) {
				if (cache.getName().contains(cacheFileName.trim())) {
					Log.e(TAG, "delete cache file: " + cache.getName());
					cache.delete();
				}
			}
		}
		needDeleteCacheFiles = null;
	}

	private void initCacheFile() {
		cacheFile = null;
		cacheFile = new File(activity.getCacheDir(), cacheFileName);
	}

	public void start() {
		isPlaying = true;
		isChaingCacheToAnother = false;
		setHasMovedTheCacheToAnotherCache(false);
		new Thread(new NetAudioPlayerThread()).start();
	}

	public void stop() {
		isPlaying = false;
		isChaingCacheToAnother = false;
		setHasMovedTheCacheToAnotherCache(false);
		releaseAudioPlayer();
		deleteExistCacheFile();
		cacheFile = null;
		handler = null;
	}

	private void releaseAudioPlayer() {
		playerInstance = null;
		if (audioPlayer != null) {
			try {
				if (audioPlayer.isPlaying()) {
					audioPlayer.pause();
				}
				audioPlayer.release();
				audioPlayer = null;
			} catch (Exception e) {
			}
		}
	}

	private boolean hasMovedTheCacheToAnotherCache() {
		return hasMovedTheCacheFlag;
	}

	private void setHasMovedTheCacheToAnotherCache(boolean result) {
		hasMovedTheCacheFlag = result;
	}

	private class NetAudioPlayerThread implements Runnable {
		//          ,     INIT_BUFFER_SIZE      
		private final int INIT_AUDIO_BUFFER = 2 * 1024;
		//  1             
		private final int CHANGE_CACHE_TIME = 1000;

		public void run() {
			try {
				Socket socket = createSocketConnectToServer();
				receiveNetAudioThenPlay(socket);
			} catch (Exception e) {
				Log.e(TAG, e.getMessage() + "          。。。");
			}
		}

		private Socket createSocketConnectToServer() throws Exception {
			String hostName = CommonConfig.SERVER_IP_ADDRESS;
			InetAddress ipAddress = InetAddress.getByName(hostName);
			int port = CommonConfig.AUDIO_SERVER_DOWN_PORT;
			Socket socket = new Socket(ipAddress, port);
			return socket;
		}

		private void receiveNetAudioThenPlay(Socket socket) throws Exception {
			InputStream inputStream = socket.getInputStream();
			FileOutputStream outputStream = new FileOutputStream(cacheFile);

			final int BUFFER_SIZE = 100 * 1024;// 100kb buffer size
			byte[] buffer = new byte[BUFFER_SIZE];

			//    10*350b          
			int testTime = 10;
			try {
				alreadyReadByteCount = 0;
				while (isPlaying) {
					int numOfRead = inputStream.read(buffer);
					if (numOfRead <= 0) {
						break;
					}
					alreadyReadByteCount += numOfRead;
					outputStream.write(buffer, 0, numOfRead);
					outputStream.flush();
					try {
						if (testTime++ >= 10) {
							Log.e(TAG, "cacheFile=" + cacheFile.length());
							testWhetherToChangeCache();
							testTime = 0;
						}
					} catch (Exception e) {
						// TODO: handle exception
					}

					//            cache,      
					if (hasMovedTheCacheToAnotherCache() && !isChaingCacheToAnother) {
						if (outputStream != null) {
							outputStream.close();
							outputStream = null;
						}
						//        cache  ,   0    
						// initCacheFile();
						outputStream = new FileOutputStream(cacheFile);
						setHasMovedTheCacheToAnotherCache(false);
						alreadyReadByteCount = 0;
					}

				}
			} catch (Exception e) {
				errorOperator();
				e.printStackTrace();
				Log.e(TAG, "socket disconnect...:" + e.getMessage());
				throw new Exception("socket disconnect....");
			} finally {
				buffer = null;
				if (socket != null) {
					socket.close();
				}
				if (inputStream != null) {
					inputStream.close();
					inputStream = null;
				}
				if (outputStream != null) {
					outputStream.close();
					outputStream = null;
				}
				stop();
			}
		}

		private void testWhetherToChangeCache() throws Exception {
			if (audioPlayer == null) {
				firstTimeStartPlayer();
			} else {
				changeAnotherCacheWhenEndOfCurrentCache();
			}
		}

		private void firstTimeStartPlayer() throws Exception {
			//        INIT_AUDIO_BUFFER     
			if (alreadyReadByteCount >= INIT_AUDIO_BUFFER) {
				Runnable r = new Runnable() {
					public void run() {
						try {
							File firstCacheFile = createFirstCacheFile();
							//      cache     ,       cache
							setHasMovedTheCacheToAnotherCache(true);
							audioPlayer = createAudioPlayer(firstCacheFile);
							audioPlayer.start();
						} catch (Exception e) {
							Log.e(TAG, e.getMessage() + " :in firstTimeStartPlayer() fun");
						} finally {
						}
					}
				};
				handler.post(r);
			}
		}

		private File createFirstCacheFile() throws Exception {
			String firstCacheFileName = cacheFileName + (cacheFileCount++);
			File firstCacheFile = new File(activity.getCacheDir(), firstCacheFileName);
			//         cacheFile,    cacheFile     cache,       cache?
			//          /   ,     cacheFile   ,
			// MediaPlayer      ,            。
			moveFile(cacheFile, firstCacheFile);
			return firstCacheFile;

		}

		private void moveFile(File oldFile, File newFile) throws IOException {
			if (!oldFile.exists()) {
				throw new IOException("oldFile is not exists. in moveFile() fun");
			}
			if (oldFile.length() <= 0) {
				throw new IOException("oldFile size = 0. in moveFile() fun");
			}
			BufferedInputStream reader = new BufferedInputStream(new FileInputStream(oldFile));
			BufferedOutputStream writer = new BufferedOutputStream(new FileOutputStream(newFile,
					false));

			final byte[] AMR_HEAD = new byte[] { 0x23, 0x21, 0x41, 0x4D, 0x52, 0x0A };
			writer.write(AMR_HEAD, 0, AMR_HEAD.length);
			writer.flush();

			try {
				byte[] buffer = new byte[1024];
				int numOfRead = 0;
				Log.d(TAG, "POS...newFile.length=" + newFile.length() + "  old=" + oldFile.length());
				while ((numOfRead = reader.read(buffer, 0, buffer.length)) != -1) {
					writer.write(buffer, 0, numOfRead);
					writer.flush();
				}
				Log.d(TAG, "POS..AFTER...newFile.length=" + newFile.length());
			} catch (IOException e) {
				Log.e(TAG, "moveFile error.. in moveFile() fun." + e.getMessage());
				throw new IOException("moveFile error.. in moveFile() fun.");
			} finally {
				if (reader != null) {
					reader.close();
					reader = null;
				}
				if (writer != null) {
					writer.close();
					writer = null;
				}
			}
		}

		private MediaPlayer createAudioPlayer(File audioFile) throws IOException {
			MediaPlayer mPlayer = new MediaPlayer();

			// It appears that for security/permission reasons, it is better to
			// pass
			// a FileDescriptor rather than a direct path to the File.
			// Also I have seen errors such as "PVMFErrNotSupported" and
			// "Prepare failed.: status=0x1" if a file path String is passed to
			// setDataSource(). So unless otherwise noted, we use a
			// FileDescriptor here.
			FileInputStream fis = new FileInputStream(audioFile);
			mPlayer.reset();
			mPlayer.setDataSource(fis.getFD());
			mPlayer.prepare();
			return mPlayer;
		}

		private void changeAnotherCacheWhenEndOfCurrentCache() throws IOException {
			//     cache    
			long theRestTime = audioPlayer.getDuration() - audioPlayer.getCurrentPosition();
			Log.e(TAG, "theRestTime=" + theRestTime + "  isChaingCacheToAnother="
					+ isChaingCacheToAnother);
			if (!isChaingCacheToAnother && theRestTime <= CHANGE_CACHE_TIME) {
				isChaingCacheToAnother = true;

				Runnable r = new Runnable() {
					public void run() {
						try {
							File newCacheFile = createNewCache();
							//      cache     ,       cache
							setHasMovedTheCacheToAnotherCache(true);
							transferNewCacheToAudioPlayer(newCacheFile);
						} catch (Exception e) {
							Log.e(TAG, e.getMessage()
									+ ":changeAnotherCacheWhenEndOfCurrentCache() fun");
						} finally {
							deleteOldCache();
							isChaingCacheToAnother = false;
						}
					}
				};
				handler.post(r);
			}
		}

		private File createNewCache() throws Exception {
			//         cache   newCache     
			String newCacheFileName = cacheFileName + (cacheFileCount++);
			File newCacheFile = new File(activity.getCacheDir(), newCacheFileName);
			Log.e(TAG, "before moveFile............the size=" + cacheFile.length());
			moveFile(cacheFile, newCacheFile);
			return newCacheFile;
		}

		private void transferNewCacheToAudioPlayer(File newCacheFile) throws Exception {
			MediaPlayer oldPlayer = audioPlayer;

			try {
				audioPlayer = createAudioPlayer(newCacheFile);
				audioPlayer.start();
			} catch (Exception e) {
				Log.e(TAG, "filename=" + newCacheFile.getName() + " size=" + newCacheFile.length());
				Log.e(TAG, e.getMessage() + " " + e.getCause() + " error start..in transfanNer..");
			}
			try {
				oldPlayer.pause();
				oldPlayer.reset();
				oldPlayer.release();
			} catch (Exception e) {
				Log.e(TAG, "ERROR release oldPlayer.");
			} finally {
				oldPlayer = null;
			}
		}

		private void deleteOldCache() {
			int oldCacheFileCount = cacheFileCount - 1;
			String oldCacheFileName = cacheFileName + oldCacheFileCount;
			File oldCacheFile = new File(activity.getCacheDir(), oldCacheFileName);
			if (oldCacheFile.exists()) {
				oldCacheFile.delete();
			}
		}

		private void errorOperator() {
		}
	}

}
プレーヤーについて:MediaPlayerの制限のため、cacheの方式でオーディオのリアルタイム再生を実現しました.取得したオーディオストリームをまずファイルに保存して、ある程度保存したら再生します.QQプレーヤーのようにキャッシュがありますが、ここの処理はとても粗いです.コードもかなり詳しく書いてあります.質問があれば、出してもいいです.
注:エンコーダとプレーヤーの作成は、巨人の肩に立ちました.他の資料を参考にしました.
次の文章では、サーバーとクライアントのコードを全部添付します.
友達が見終わったら意見とアドバイスをしてほしいです.