经验首页 前端设计 程序设计 Java相关 移动开发 数据库/运维 软件/图像 大数据/云计算 其他经验
当前位置:技术经验 » 移动开发 » Android » 查看文章
Android 视频通信,低延时解决方案
来源:cnblogs  作者:Supper_litt  时间:2019/1/14 9:35:07  对本文有异议

背景:

  由于,项目需要,需要进行视频通信,把a的画面,转给b。

运维部署:

  APP1:编码摄像头采集的数据,并且发送数据到服务端

  APP2:从服务端,拉取数据,并且进行解码显示

  服务端:接收APP1提交的数据,发送APP1提交数据到APP2

应用说明:

  APP1:camera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);

  1. Camera.Parameters parameters = camera.getParameters();
  2. parameters.setPreviewFormat(ImageFormat.NV21);
  3. parameters.setPreviewSize(width, height);
  4. // 设置屏幕亮度
  5. parameters.setExposureCompensation(parameters.getMaxExposureCompensation() / 2);
  6. camera.setParameters(parameters);
  7. camera.setDisplayOrientation(90);
  8. camera.setPreviewCallback(new Camera.PreviewCallback() {
  9. @Override
  10. public void onPreviewFrame(byte[] data, Camera camera) {
    // 采集视频数据,同时记录采集视频的时间点,解码需要(保证视频连续,流畅,且不花屏需要)
  11. stamptime = System.nanoTime();
  12. yuv_data = data;
  13. }
  14. });
  1. 1 public class AvcKeyFrameEncoder {
  2. 2 private final static String TAG = "MeidaCodec";
  3. 3 private int TIMEOUT_USEC = 12000;
  4. 4
  5. 5 private MediaCodec mediaCodec;
  6. 6 int m_width;
  7. 7 int m_height;
  8. 8 int m_framerate;
  9. 9
  10. 10 public byte[] configbyte;
  11. 11
  12. 12 //待解码视频缓冲队列,静态成员!
  13. 13 public byte[] yuv_data = null;
  14. 14 public long stamptime = 0;
  15. 15
  16. 16 public AvcKeyFrameEncoder(int width, int height, int framerate) {
  17. 17 m_width = width;
  18. 18 m_height = height;
  19. 19 m_framerate = framerate;
  20. 20
  21. 21 //正常的编码出来是横屏的。因为手机本身采集的数据默认就是横屏的
  22. 22 // MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime, width, height);
  23. 23 //如果你需要旋转90度或者270度,那么需要把宽和高对调。否则会花屏。因为比如你320 X 240,图像旋转90°之后宽高变成了240 X 320。
  24. 24 MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
  25. 25 mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
  26. 26 mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
  27. 27 mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate); // 30
  28. 28 mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
  29. 29 try {
  30. 30 mediaCodec = MediaCodec.createEncoderByType("video/avc");
  31. 31 } catch (IOException e) {
  32. 32 e.printStackTrace();
  33. 33 }
  34. 34
  35. 35 //配置编码器参数
  36. 36 mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
  37. 37
  38. 38 //启动编码器
  39. 39 mediaCodec.start();
  40. 40 }
  41. 41
  42. 42 public void StopEncoder() {
  43. 43 try {
  44. 44 mediaCodec.stop();
  45. 45 mediaCodec.release();
  46. 46 } catch (Exception e) {
  47. 47 e.printStackTrace();
  48. 48 }
  49. 49 }
  50. 50
  51. 51 public boolean isRuning = false;
  52. 52
  53. 53 public void StartEncoderThread(final ISaveVideo saveVideo, final ICall callback) {
  54. 54 isRuning = true;
  55. 55 new Thread(new Runnable() {
  56. 56 @Override
  57. 57 public void run() {
  58. 58 byte[] input = null;
  59. 59 long pts = 0;
  60. 60 while (isRuning) {
  61. 61 // 访问MainActivity用来缓冲待解码数据的队列
  62. 62 if(yuv_data == null){
  63. 63 continue;
  64. 64 }
  65. 65
  66. 66 if (yuv_data != null) {
  67. 67 //从缓冲队列中取出一帧
  68. 68 input = yuv_data;
  69. 69 pts = stamptime;
  70. 70 yuv_data = null;
  71. 71 byte[] yuv420sp = new byte[m_width * m_height * 3 / 2];
  72. 72
  73. 73 NV21ToNV12(input, yuv420sp, m_width, m_height);
  74. 74 input = yuv420sp;
  75. 75 }
  76. 76
  77. 77 if (input != null) {
  78. 78 try {
  79. 79 //编码器输入缓冲区
  80. 80 ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
  81. 81
  82. 82 //编码器输出缓冲区
  83. 83 ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
  84. 84 int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
  85. 85 if (inputBufferIndex >= 0) {
  86. 86 ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
  87. 87 inputBuffer.clear();
  88. 88 //把转换后的YUV420格式的视频帧放到编码器输入缓冲区中
  89. 89 inputBuffer.put(input);
  90. 90 mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
  91. 91 }
  92. 92
  93. 93 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
  94. 94 int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
  95. 95 while (outputBufferIndex >= 0) {
  96. 96 //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
  97. 97 ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
  98. 98 byte[] outData = new byte[bufferInfo.size];
  99. 99 outputBuffer.get(outData);
  100. 100 if (bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG) {
  101. 101 configbyte = new byte[bufferInfo.size];
  102. 102 configbyte = outData;
  103. 103 } else if (bufferInfo.flags == BUFFER_FLAG_KEY_FRAME) {
  104. 104 byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
  105. 105 System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
  106. 106 //把编码后的视频帧从编码器输出缓冲区中拷贝出来
  107. 107 System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
  108. 108
  109. 109 Logs.i("上传I帧 " + keyframe.length);
  110. 110 byte[] send_data = new byte[13 + keyframe.length];
  111. 111 System.arraycopy(new byte[]{0x01}, 0, send_data, 0, 1);
  112. 112 System.arraycopy(IntBytes.longToBytes(pts), 0, send_data, 1, 8);
  113. 113 System.arraycopy(IntBytes.intToByteArray(keyframe.length), 0, send_data, 9, 4);
  114. 114 System.arraycopy(keyframe, 0, send_data, 13, keyframe.length);
  115. 115 if(saveVideo != null){
  116. 116 saveVideo.SaveVideoData(send_data);
  117. 117 }
  118. 118
  119. 119 if(callback != null){
  120. 120 callback.callback(keyframe, pts);
  121. 121 }
  122. 122 } else {
  123. 123 byte[] send_data = new byte[13 + outData.length];
  124. 124 System.arraycopy(new byte[]{0x02}, 0, send_data, 0, 1);
  125. 125 System.arraycopy(IntBytes.longToBytes(pts), 0, send_data, 1, 8);
  126. 126 System.arraycopy(IntBytes.intToByteArray(outData.length), 0, send_data, 9, 4);
  127. 127 System.arraycopy(outData, 0, send_data, 13, outData.length);
  128. 128 if(saveVideo != null){
  129. 129 saveVideo.SaveVideoData(send_data);
  130. 130 }
  131. 131
  132. 132 if(callback != null){
  133. 133 callback.callback(outData, pts);
  134. 134 }
  135. 135 }
  136. 136
  137. 137 mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
  138. 138 outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
  139. 139 }
  140. 140
  141. 141 } catch (Throwable t) {
  142. 142 t.printStackTrace();
  143. 143 break;
  144. 144 }
  145. 145 }
  146. 146 }
  147. 147 }
  148. 148 }).start();
  149. 149 }
  150. 150
  151. 151 private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
  152. 152 if (nv21 == null || nv12 == null) return;
  153. 153 int framesize = width * height;
  154. 154 int i = 0, j = 0;
  155. 155 System.arraycopy(nv21, 0, nv12, 0, framesize);
  156. 156 for (i = 0; i < framesize; i++) {
  157. 157 nv12[i] = nv21[i];
  158. 158 }
  159. 159
  160. 160 for (j = 0; j < framesize / 2; j += 2) {
  161. 161 nv12[framesize + j - 1] = nv21[j + framesize];
  162. 162 }
  163. 163
  164. 164 for (j = 0; j < framesize / 2; j += 2) {
  165. 165 nv12[framesize + j] = nv21[j + framesize - 1];
  166. 166 }
  167. 167 }
  168. 168 }
视频编码类Encoder

其中使用到了,接口用于,把采集和编码后的数据,往外部传递,通过线程提交到服务端。或者通过本地解码显示,查看,编码解码时间差。

通过使用 ArrayBlockingQueue<byte[]> H264Queue = new ArrayBlockingQueue<byte[]>(10); 队列,对接口提交数据,进行暂时保存,在后台对数据,进行解码或提交到服务端。

  APP2:接入服务端,然后从I帧数据开始拿数据,(且数据是最新的I帧开始保存的数据)。同时需要把,之前采集得到的时间点传给:

  1. MediaCodec 对象的 queueInputBuffer 方法的时间戳参数(第四个)。

 服务端:一帧一帧接收APP1传入数据,对I帧开始的数据进行记录,同时对非I帧开始的数据,进行丢弃。一次只保存一帧内容。读取数据,并且移除已经添加数据,循环发送给APP2

  1. public class VideoDecoder {
  2. private Thread mDecodeThread;
  3. private MediaCodec mCodec;
  4. private boolean mStopFlag = false;
  5. private int Video_Width = 640;
  6. private int Video_Height = 480;
  7. private int FrameRate = 25;
  8. private Boolean isUsePpsAndSps = false;
  9. private ReceiveVideoThread runThread = null;
  10. public VideoDecoder(String ip, int port, byte type, int roomId){
  11. runThread = new ReceiveVideoThread(ip, port, type, roomId);
  12. new Thread(runThread).start();
  13. }
  14. public void InitReadData(Surface surface){
  15. try {
  16. //通过多媒体格式名创建一个可用的解码器
  17. mCodec = MediaCodec.createDecoderByType("video/avc");
  18. } catch (IOException e) {
  19. e.printStackTrace();
  20. }
  21. //初始化编码器
  22. final MediaFormat mediaformat = MediaFormat.createVideoFormat("video/avc", Video_Width, Video_Height);
  23. //设置帧率
  24. mediaformat.setInteger(MediaFormat.KEY_FRAME_RATE, FrameRate);
  25. //https://developer.android.com/reference/android/media/MediaFormat.html#KEY_MAX_INPUT_SIZE
  26. //设置配置参数,参数介绍 :
  27. // format 如果为解码器,此处表示输入数据的格式;如果为编码器,此处表示输出数据的格式。
  28. //surface 指定一个surface,可用作decode的输出渲染。
  29. //crypto 如果需要给媒体数据加密,此处指定一个crypto类.
  30. // flags 如果正在配置的对象是用作编码器,此处加上CONFIGURE_FLAG_ENCODE 标签。
  31. mCodec.configure(mediaformat, surface, null, 0);
  32. startDecodingThread();
  33. }
  34. private void startDecodingThread() {
  35. mCodec.start();
  36. mDecodeThread = new Thread(new decodeH264Thread());
  37. mDecodeThread.start();
  38. }
  39. @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
  40. private class decodeH264Thread implements Runnable {
  41. @Override
  42. public void run() {
  43. try {
  44. // saveDataLoop();
  45. decodeLoop_New();
  46. } catch (Exception e) {
  47. e.printStackTrace();
  48. }
  49. }
  50. private void decodeLoop_New() {
  51. // 存放目标文件的数据
  52. ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
  53. // 解码后的数据,包含每一个buffer的元数据信息,例如偏差,在相关解码器中有效的数据大小
  54. MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
  55. long timeoutUs = 1000;
  56. byte[] marker0 = new byte[]{0, 0, 0, 1};
  57. byte[] dummyFrame = new byte[]{0x00, 0x00, 0x01, 0x20};
  58. byte[] streamBuffer = null;
  59. while (true) {
  60. if(runThread.H264Queue.size() > 0){
  61. streamBuffer = runThread.H264Queue.poll();
  62. }else{
  63. try {
  64. Thread.sleep(20);
  65. }catch (Exception ex){
  66. }
  67. continue;
  68. }
  69. byte[] time_data = new byte[8];
  70. System.arraycopy(streamBuffer, 0, time_data, 0, 8);
  71. long pts = IntBytes.bytesToLong(time_data);
  72. byte[] video_data = new byte[streamBuffer.length - 8];
  73. System.arraycopy(streamBuffer, 8, video_data, 0, video_data.length);
  74. streamBuffer = video_data;
  75. Logs.i("得到 streamBuffer " + streamBuffer.length + " pts " + pts);
  76. int bytes_cnt = 0;
  77. mStopFlag = false;
  78. while (mStopFlag == false) {
  79. bytes_cnt = streamBuffer.length;
  80. if (bytes_cnt == 0) {
  81. streamBuffer = dummyFrame;
  82. }
  83. int startIndex = 0;
  84. int remaining = bytes_cnt;
  85. while (true) {
  86. if (remaining == 0 || startIndex >= remaining) {
  87. break;
  88. }
  89. int nextFrameStart = KMPMatch(marker0, streamBuffer, startIndex + 2, remaining);
  90. if (nextFrameStart == -1) {
  91. nextFrameStart = remaining;
  92. } else {
  93. }
  94. int inIndex = mCodec.dequeueInputBuffer(timeoutUs);
  95. if (inIndex >= 0) {
  96. ByteBuffer byteBuffer = inputBuffers[inIndex];
  97. byteBuffer.clear();
  98. byteBuffer.put(streamBuffer, startIndex, nextFrameStart - startIndex);
  99. //在给指定Index的inputbuffer[]填充数据后,调用这个函数把数据传给解码器
  100. mCodec.queueInputBuffer(inIndex, 0, nextFrameStart - startIndex, pts, 0);
  101. startIndex = nextFrameStart;
  102. } else {
  103. continue;
  104. }
  105. int outIndex = mCodec.dequeueOutputBuffer(info, timeoutUs);
  106. if (outIndex >= 0) {
  107. //帧控制是不在这种情况下工作,因为没有PTS H264是可用的
  108. /*
  109. while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
  110. try {
  111. Thread.sleep(100);
  112. } catch (InterruptedException e) {
  113. e.printStackTrace();
  114. }
  115. }
  116. */
  117. boolean doRender = (info.size != 0);
  118. //对outputbuffer的处理完后,调用这个函数把buffer重新返回给codec类。
  119. // TODO:添加处理,保存原始帧数据
  120. if (doRender) {
  121. Image image = mCodec.getOutputImage(outIndex);
  122. if (image != null) {
  123. // 通过反射
  124. // 发送数据到指定接口
  125. byte[] data = getDataFromImage(image, COLOR_FormatNV21);
  126. }
  127. }
  128. mCodec.releaseOutputBuffer(outIndex, doRender);
  129. } else {
  130. // Log.e(TAG, "bbbb");
  131. }
  132. }
  133. mStopFlag = true;
  134. }
  135. // Logs.i("处理单帧视频耗时:" + (System.currentTimeMillis() - c_start));
  136. }
  137. }
  138. }
  139. private static final boolean VERBOSE = false;
  140. private static final long DEFAULT_TIMEOUT_US = 10000;
  141. private static final int COLOR_FormatI420 = 1;
  142. private static final int COLOR_FormatNV21 = 2;
  143. private static boolean isImageFormatSupported(Image image) {
  144. int format = image.getFormat();
  145. switch (format) {
  146. case ImageFormat.YUV_420_888:
  147. case ImageFormat.NV21:
  148. case ImageFormat.YV12:
  149. return true;
  150. }
  151. return false;
  152. }
  153. @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
  154. private static byte[] getDataFromImage(Image image, int colorFormat) {
  155. if (colorFormat != COLOR_FormatI420 && colorFormat != COLOR_FormatNV21) {
  156. throw new IllegalArgumentException("only support COLOR_FormatI420 " + "and COLOR_FormatNV21");
  157. }
  158. if (!isImageFormatSupported(image)) {
  159. throw new RuntimeException("can't convert Image to byte array, format " + image.getFormat());
  160. }
  161. Rect crop = image.getCropRect();
  162. int format = image.getFormat();
  163. int width = crop.width();
  164. int height = crop.height();
  165. Image.Plane[] planes = image.getPlanes();
  166. byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
  167. byte[] rowData = new byte[planes[0].getRowStride()];
  168. if (VERBOSE) Logs.i("get data from " + planes.length + " planes");
  169. int channelOffset = 0;
  170. int outputStride = 1;
  171. for (int i = 0; i < planes.length; i++) {
  172. switch (i) {
  173. case 0:
  174. channelOffset = 0;
  175. outputStride = 1;
  176. break;
  177. case 1:
  178. if (colorFormat == COLOR_FormatI420) {
  179. channelOffset = width * height;
  180. outputStride = 1;
  181. } else if (colorFormat == COLOR_FormatNV21) {
  182. channelOffset = width * height + 1;
  183. outputStride = 2;
  184. }
  185. break;
  186. case 2:
  187. if (colorFormat == COLOR_FormatI420) {
  188. channelOffset = (int) (width * height * 1.25);
  189. outputStride = 1;
  190. } else if (colorFormat == COLOR_FormatNV21) {
  191. channelOffset = width * height;
  192. outputStride = 2;
  193. }
  194. break;
  195. }
  196. ByteBuffer buffer = planes[i].getBuffer();
  197. int rowStride = planes[i].getRowStride();
  198. int pixelStride = planes[i].getPixelStride();
  199. if (VERBOSE) {
  200. Logs.i("pixelStride " + pixelStride);
  201. Logs.i("rowStride " + rowStride);
  202. Logs.i("width " + width);
  203. Logs.i("height " + height);
  204. Logs.i("buffer size " + buffer.remaining());
  205. }
  206. int shift = (i == 0) ? 0 : 1;
  207. int w = width >> shift;
  208. int h = height >> shift;
  209. buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
  210. for (int row = 0; row < h; row++) {
  211. int length;
  212. if (pixelStride == 1 && outputStride == 1) {
  213. length = w;
  214. buffer.get(data, channelOffset, length);
  215. channelOffset += length;
  216. } else {
  217. length = (w - 1) * pixelStride + 1;
  218. buffer.get(rowData, 0, length);
  219. for (int col = 0; col < w; col++) {
  220. data[channelOffset] = rowData[col * pixelStride];
  221. channelOffset += outputStride;
  222. }
  223. }
  224. if (row < h - 1) {
  225. buffer.position(buffer.position() + rowStride - length);
  226. }
  227. }
  228. if (VERBOSE) Logs.i("Finished reading data from plane " + i);
  229. }
  230. return data;
  231. }
  232. private int KMPMatch(byte[] pattern, byte[] bytes, int start, int remain) {
  233. try {
  234. Thread.sleep(30);
  235. } catch (InterruptedException e) {
  236. e.printStackTrace();
  237. }
  238. int[] lsp = computeLspTable(pattern);
  239. int j = 0; // Number of chars matched in pattern
  240. for (int i = start; i < remain; i++) {
  241. while (j > 0 && bytes[i] != pattern[j]) {
  242. // Fall back in the pattern
  243. j = lsp[j - 1]; // Strictly decreasing
  244. }
  245. if (bytes[i] == pattern[j]) {
  246. // Next char matched, increment position
  247. j++;
  248. if (j == pattern.length)
  249. return i - (j - 1);
  250. }
  251. }
  252. return -1; // Not found
  253. }
  254. private int[] computeLspTable(byte[] pattern) {
  255. int[] lsp = new int[pattern.length];
  256. lsp[0] = 0; // Base case
  257. for (int i = 1; i < pattern.length; i++) {
  258. // Start by assuming we're extending the previous LSP
  259. int j = lsp[i - 1];
  260. while (j > 0 && pattern[i] != pattern[j])
  261. j = lsp[j - 1];
  262. if (pattern[i] == pattern[j])
  263. j++;
  264. lsp[i] = j;
  265. }
  266. return lsp;
  267. }
  268. public void StopDecode() {
  269. if(runThread != null){
  270. runThread.StopReceive();
  271. }
  272. }
  273. }
视频解码类Decoder

总结:

  通过对视频的处理,学习到了,一些处理视频的细节点。同时加深了,依赖导致在实际项目中的使用。to android.

 

 友情链接:直通硅谷  点职佳  北美留学生论坛

本站QQ群:前端 618073944 | Java 606181507 | Python 626812652 | C/C++ 612253063 | 微信 634508462 | 苹果 692586424 | C#/.net 182808419 | PHP 305140648 | 运维 608723728

W3xue 的所有内容仅供测试,对任何法律问题及风险不承担任何责任。通过使用本站内容随之而来的风险与本站无关。
关于我们  |  意见建议  |  捐助我们  |  报错有奖  |  广告合作、友情链接(目前9元/月)请联系QQ:27243702 沸活量
皖ICP备17017327号-2 皖公网安备34020702000426号