Android Webrtc记录来自其他同行的流的视频

前端之家收集整理的这篇文章主要介绍了Android Webrtc记录来自其他同行的流的视频前端之家小编觉得挺不错的,现在分享给大家,也给大家做个参考。
我正在开发一个webrtc视频通话 Android应用程序,它工作得很好,我需要录制其他对等(remoteVideoStream)和myStream(localVideoStream)的视频,并将其转换为某些可保存的格式,如mp4或任何其他格式,我真的在寻找它,但却无法弄清楚如何完成这项工作.

我已经阅读了有关VideoFileRenderer的内容,我尝试将其添加到我的代码中以保存视频但是也无法使用它也没有任何方法调用例如record()或save(),尽管它有一个名为release()的方法这将用于结束保存视频.如果任何人有任何想法,这是课程:

  1. @JNINamespace("webrtc::jni")
  2. public class VideoFileRenderer implements Callbacks,VideoSink {
  3. private static final String TAG = "VideoFileRenderer";
  4. private final HandlerThread renderThread;
  5. private final Handler renderThreadHandler;
  6. private final FileOutputStream videoOutFile;
  7. private final String outputFileName;
  8. private final int outputFileWidth;
  9. private final int outputFileHeight;
  10. private final int outputFrameSize;
  11. private final ByteBuffer outputFrameBuffer;
  12. private EglBase eglBase;
  13. private YuvConverter yuvConverter;
  14. private ArrayList<ByteBuffer> rawFrames = new ArrayList();
  15.  
  16. public VideoFileRenderer(String outputFile,int outputFileWidth,int outputFileHeight,final Context sharedContext) throws IOException {
  17. if (outputFileWidth % 2 != 1 && outputFileHeight % 2 != 1) {
  18. this.outputFileName = outputFile;
  19. this.outputFileWidth = outputFileWidth;
  20. this.outputFileHeight = outputFileHeight;
  21. this.outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
  22. this.outputFrameBuffer = ByteBuffer.allocateDirect(this.outputFrameSize);
  23. this.videoOutFile = new FileOutputStream(outputFile);
  24. this.videoOutFile.write(("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n").getBytes(Charset.forName("US-ASCII")));
  25. this.renderThread = new HandlerThread("VideoFileRenderer");
  26. this.renderThread.start();
  27. this.renderThreadHandler = new Handler(this.renderThread.getLooper());
  28. ThreadUtils.invokeAtFrontUninterruptibly(this.renderThreadHandler,new Runnable() {
  29. public void run() {
  30. VideoFileRenderer.this.eglBase = EglBase.create(sharedContext,EglBase.CONFIG_PIXEL_BUFFER);
  31. VideoFileRenderer.this.eglBase.createDummyPbufferSurface();
  32. VideoFileRenderer.this.eglBase.makeCurrent();
  33. VideoFileRenderer.this.yuvConverter = new YuvConverter();
  34. }
  35. });
  36. } else {
  37. throw new IllegalArgumentException("Does not support uneven width or height");
  38. }
  39. }
  40.  
  41. public void renderFrame(I420Frame i420Frame) {
  42. VideoFrame frame = i420Frame.toVideoFrame();
  43. this.onFrame(frame);
  44. frame.release();
  45. }
  46.  
  47. public void onFrame(VideoFrame frame) {
  48. frame.retain();
  49. this.renderThreadHandler.post(() -> {
  50. this.renderFrameOnRenderThread(frame);
  51. });
  52. }
  53.  
  54. private void renderFrameOnRenderThread(VideoFrame frame) {
  55. Buffer buffer = frame.getBuffer();
  56. int targetWidth = frame.getRotation() % 180 == 0 ? this.outputFileWidth : this.outputFileHeight;
  57. int targetHeight = frame.getRotation() % 180 == 0 ? this.outputFileHeight : this.outputFileWidth;
  58. float frameAspectRatio = (float)buffer.getWidth() / (float)buffer.getHeight();
  59. float fileAspectRatio = (float)targetWidth / (float)targetHeight;
  60. int cropWidth = buffer.getWidth();
  61. int cropHeight = buffer.getHeight();
  62. if (fileAspectRatio > frameAspectRatio) {
  63. cropHeight = (int)((float)cropHeight * (frameAspectRatio / fileAspectRatio));
  64. } else {
  65. cropWidth = (int)((float)cropWidth * (fileAspectRatio / frameAspectRatio));
  66. }
  67.  
  68. int cropX = (buffer.getWidth() - cropWidth) / 2;
  69. int cropY = (buffer.getHeight() - cropHeight) / 2;
  70. Buffer scaledBuffer = buffer.cropAndScale(cropX,cropY,cropWidth,cropHeight,targetWidth,targetHeight);
  71. frame.release();
  72. I420Buffer i420 = scaledBuffer.toI420();
  73. scaledBuffer.release();
  74. ByteBuffer byteBuffer = JniCommon.nativeAllocateByteBuffer(this.outputFrameSize);
  75. YuvHelper.I420Rotate(i420.getDataY(),i420.getStrideY(),i420.getDataU(),i420.getStrideU(),i420.getDataV(),i420.getStrideV(),byteBuffer,i420.getWidth(),i420.getHeight(),frame.getRotation());
  76. i420.release();
  77. byteBuffer.rewind();
  78. this.rawFrames.add(byteBuffer);
  79. }
  80.  
  81. public void release() {
  82. CountDownLatch cleanupBarrier = new CountDownLatch(1);
  83. this.renderThreadHandler.post(() -> {
  84. this.yuvConverter.release();
  85. this.eglBase.release();
  86. this.renderThread.quit();
  87. cleanupBarrier.countDown();
  88. });
  89. ThreadUtils.awaitUninterruptibly(cleanupBarrier);
  90.  
  91. try {
  92. Iterator var2 = this.rawFrames.iterator();
  93.  
  94. while(var2.hasNext()) {
  95. ByteBuffer buffer = (ByteBuffer)var2.next();
  96. this.videoOutFile.write("FRAME\n".getBytes(Charset.forName("US-ASCII")));
  97. byte[] data = new byte[this.outputFrameSize];
  98. buffer.get(data);
  99. this.videoOutFile.write(data);
  100. JniCommon.nativeFreeByteBuffer(buffer);
  101. }
  102.  
  103. this.videoOutFile.close();
  104. Logging.d("VideoFileRenderer","Video written to disk as " + this.outputFileName + ". Number frames are " + this.rawFrames.size() + " and the dimension of the frames are " + this.outputFileWidth + "x" + this.outputFileHeight + ".");
  105. } catch (IOException var5) {
  106. Logging.e("VideoFileRenderer","Error writing video to disk",var5);
  107. }
  108.  
  109. }

}

我找不到任何有用的方法可以提供帮助.

解决方法

VideoFileRenderer类只演示了如何访问远程/本地对等体的解码原始视频帧.
这不是录制有效的视频文件.
您应该手动实现编码和将原始视频帧复用到容器中的逻辑,如mp4.

主要流程如下:

>切换到最新的webrtc版本(现在为v.0.0.25331)
>创建视频容器.例如,请参阅Android SDK中的MediaMuxer
>实现接口VideoSink以从特定视频源获取原始帧.例如,请参阅apprtc/CallActivity.java类ProxyVideoSink
>使用MediaCodec对每帧进行编码并写入视频容器>敲定muxer

猜你在找的Android相关文章