如何从Android直播


诸葛神侯
2025-02-25 01:06:24 (3小时前)
  1. 我想要做


生活
</跨度>
以Periscope的方式进行广播。我在网上做了一个快速搜索,发现了一堆像ffmpeg这样的库,它们使用本机库但是根据MediaCodec编码的结果

3 条回复
  1. 0# 岁爵 | 2019-08-31 10-32



    MediaCodec API往往会因高分辨率流而变慢。我建议你应该使用FFmpeg,有一个很好的java包装器叫

    JavaCV

    (它也适用于Android)。这是一个简短的样本,可以帮助你




    1. public class MainActivity extends Activity implements OnClickListener {

    2. private PowerManager.WakeLock mWakeLock;
    3. private String ffmpeg_link = "rtmp://live:live@128.122.151.108:1935/live/test.flv";
    4. private volatile FFmpegFrameRecorder recorder;
    5. boolean recording = false;
    6. long startTime = 0;
    7. private int sampleAudioRateInHz = 44100;
    8. private int imageWidth = 320;
    9. private int imageHeight = 240;
    10. private int frameRate = 30;
    11. private Thread audioThread;
    12. volatile boolean runAudioThread = true;
    13. private AudioRecord audioRecord;
    14. private AudioRecordRunnable audioRecordRunnable;
    15. private CameraView cameraView;
    16. private IplImage yuvIplimage = null;
    17. private Button recordButton;
    18. private LinearLayout mainLayout;
    19. @Override
    20. public void onCreate(Bundle savedInstanceState) {
    21.     super.onCreate(savedInstanceState);
    22.     setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
    23.     setContentView(R.layout.activity_main);
    24.     initLayout();
    25.     initRecorder();
    26. }
    27. @Override
    28. protected void onResume() {
    29.     super.onResume();
    30.     if (mWakeLock == null) {
    31.         PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); 
    32.         mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, LOG_TAG); 
    33.         mWakeLock.acquire(); 
    34.     }
    35. }
    36. @Override
    37. protected void onPause() {
    38.     super.onPause();
    39.     if (mWakeLock != null) {
    40.         mWakeLock.release();
    41.         mWakeLock = null;
    42.     }
    43. }
    44. @Override
    45. protected void onDestroy() {
    46.     super.onDestroy();
    47.     recording = false;
    48. }
    49. private void initLayout() {
    50.     mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);
    51.     recordButton = (Button) findViewById(R.id.recorder_control);
    52.     recordButton.setText("Start");
    53.     recordButton.setOnClickListener(this);
    54.     cameraView = new CameraView(this);
    55.     LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(imageWidth, imageHeight);        
    56.     mainLayout.addView(cameraView, layoutParam);
    57.     Log.v(LOG_TAG, "added cameraView to mainLayout");
    58. }
    59. private void initRecorder() {
    60.     Log.w(LOG_TAG,"initRecorder");
    61.     if (yuvIplimage == null) {
    62.         // Recreated after frame size is set in surface change method
    63.         yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
    64.         //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
    65.         Log.v(LOG_TAG, "IplImage.create");
    66.     }
    67.     recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
    68.     Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);
    69.     recorder.setFormat("flv");
    70.     Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");
    71.     recorder.setSampleRate(sampleAudioRateInHz);
    72.     Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");
    73.     // re-set in the surface changed method as well
    74.     recorder.setFrameRate(frameRate);
    75.     Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");
    76.     // Create audio recording thread
    77.     audioRecordRunnable = new AudioRecordRunnable();
    78.     audioThread = new Thread(audioRecordRunnable);
    79. }
    80. // Start the capture
    81. public void startRecording() {
    82.     try {
    83.         recorder.start();
    84.         startTime = System.currentTimeMillis();
    85.         recording = true;
    86.         audioThread.start();
    87.     } catch (FFmpegFrameRecorder.Exception e) {
    88.         e.printStackTrace();
    89.     }
    90. }
    91. public void stopRecording() {
    92.     // This should stop the audio thread from running
    93.     runAudioThread = false;
    94.     if (recorder != null && recording) {
    95.         recording = false;
    96.         Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
    97.         try {
    98.             recorder.stop();
    99.             recorder.release();
    100.         } catch (FFmpegFrameRecorder.Exception e) {
    101.             e.printStackTrace();
    102.         }
    103.         recorder = null;
    104.     }
    105. }
    106. @Override
    107. public boolean onKeyDown(int keyCode, KeyEvent event) {
    108.     // Quit when back button is pushed
    109.     if (keyCode == KeyEvent.KEYCODE_BACK) {
    110.         if (recording) {
    111.             stopRecording();
    112.         }
    113.         finish();
    114.         return true;
    115.     }
    116.     return super.onKeyDown(keyCode, event);
    117. }
    118. @Override
    119. public void onClick(View v) {
    120.     if (!recording) {
    121.         startRecording();
    122.         Log.w(LOG_TAG, "Start Button Pushed");
    123.         recordButton.setText("Stop");
    124.     } else {
    125.         stopRecording();
    126.         Log.w(LOG_TAG, "Stop Button Pushed");
    127.         recordButton.setText("Start");
    128.     }
    129. }
    130. //---------------------------------------------
    131. // audio thread, gets and encodes audio data
    132. //---------------------------------------------
    133. class AudioRecordRunnable implements Runnable {
    134.     @Override
    135.     public void run() {
    136.         // Set the thread priority
    137.         android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    138.         // Audio
    139.         int bufferSize;
    140.         short[] audioData;
    141.         int bufferReadResult;
    142.         bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
    143.                 AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
    144.         audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, 
    145.                 AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
    146.         audioData = new short[bufferSize];
    147.         Log.d(LOG_TAG, "audioRecord.startRecording()");
    148.         audioRecord.startRecording();
    149.         // Audio Capture/Encoding Loop
    150.         while (runAudioThread) {
    151.             // Read from audioRecord
    152.             bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
    153.             if (bufferReadResult > 0) {
    154.                 //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);
    155.                 // Changes in this variable may not be picked up despite it being "volatile"
    156.                 if (recording) {
    157.                     try {
    158.                         // Write to FFmpegFrameRecorder
    159.                         recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
    160.                     } catch (FFmpegFrameRecorder.Exception e) {
    161.                         Log.v(LOG_TAG,e.getMessage());
    162.                         e.printStackTrace();
    163.                     }
    164.                 }
    165.             }
    166.         }
    167.         Log.v(LOG_TAG,"AudioThread Finished");
    168.         /* Capture/Encoding finished, release recorder */
    169.         if (audioRecord != null) {
    170.             audioRecord.stop();
    171.             audioRecord.release();
    172.             audioRecord = null;
    173.             Log.v(LOG_TAG,"audioRecord released");
    174.         }
    175.     }
    176. }
    177. class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
    178.     private boolean previewRunning = false;
    179.     private SurfaceHolder holder;
    180.     private Camera camera;
    181.     private byte[] previewBuffer;
    182.     long videoTimestamp = 0;
    183.     Bitmap bitmap;
    184.     Canvas canvas;
    185.     public CameraView(Context _context) {
    186.         super(_context);
    187.         holder = this.getHolder();
    188.         holder.addCallback(this);
    189.         holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    190.     }
    191.     @Override
    192.     public void surfaceCreated(SurfaceHolder holder) {
    193.         camera = Camera.open();
    194.         try {
    195.             camera.setPreviewDisplay(holder);
    196.             camera.setPreviewCallback(this);
    197.             Camera.Parameters currentParams = camera.getParameters();
    198.             Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
    199.             Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);
    200.             // Use these values
    201.             imageWidth = currentParams.getPreviewSize().width;
    202.             imageHeight = currentParams.getPreviewSize().height;
    203.             frameRate = currentParams.getPreviewFrameRate();                
    204.             bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ALPHA_8);
    205.             /*
    206.             Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8);
    207.             previewBuffer = new byte[imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8];
    208.             camera.addCallbackBuffer(previewBuffer);
    209.             camera.setPreviewCallbackWithBuffer(this);
    210.             */              
    211.             camera.startPreview();
    212.             previewRunning = true;
    213.         }
    214.         catch (IOException e) {
    215.             Log.v(LOG_TAG,e.getMessage());
    216.             e.printStackTrace();
    217.         }   
    218.     }
    219.     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    220.         Log.v(LOG_TAG,"Surface Changed: width " + width + " height: " + height);
    221.         // We would do this if we want to reset the camera parameters
    222.         /*
    223.         if (!recording) {
    224.             if (previewRunning){
    225.                 camera.stopPreview();
    226.             }
    227.             try {
    228.                 //Camera.Parameters cameraParameters = camera.getParameters();
    229.                 //p.setPreviewSize(imageWidth, imageHeight);
    230.                 //p.setPreviewFrameRate(frameRate);
    231.                 //camera.setParameters(cameraParameters);
    232.                 camera.setPreviewDisplay(holder);
    233.                 camera.startPreview();
    234.                 previewRunning = true;
    235.             }
    236.             catch (IOException e) {
    237.                 Log.e(LOG_TAG,e.getMessage());
    238.                 e.printStackTrace();
    239.             }   
    240.         }            
    241.         */
    242.         // Get the current parameters
    243.         Camera.Parameters currentParams = camera.getParameters();
    244.         Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
    245.         Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);
    246.         // Use these values
    247.         imageWidth = currentParams.getPreviewSize().width;
    248.         imageHeight = currentParams.getPreviewSize().height;
    249.         frameRate = currentParams.getPreviewFrameRate();
    250.         // Create the yuvIplimage if needed
    251.         yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
    252.         //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
    253.     }
    254.     @Override
    255.     public void surfaceDestroyed(SurfaceHolder holder) {
    256.         try {
    257.             camera.setPreviewCallback(null);
    258.             previewRunning = false;
    259.             camera.release();
    260.         } catch (RuntimeException e) {
    261.             Log.v(LOG_TAG,e.getMessage());
    262.             e.printStackTrace();
    263.         }
    264.     }
    265.     @Override
    266.     public void onPreviewFrame(byte[] data, Camera camera) {
    267.         if (yuvIplimage != null && recording) {
    268.             videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
    269.             // Put the camera preview frame right into the yuvIplimage object
    270.             yuvIplimage.getByteBuffer().put(data);
    271.             // FAQ about IplImage:
    272.             // - For custom raw processing of data, getByteBuffer() returns an NIO direct
    273.             //   buffer wrapped around the memory pointed by imageData, and under Android we can
    274.             //   also use that Buffer with Bitmap.copyPixelsFromBuffer() and copyPixelsToBuffer().
    275.             // - To get a BufferedImage from an IplImage, we may call getBufferedImage().
    276.             // - The createFrom() factory method can construct an IplImage from a BufferedImage.
    277.             // - There are also a few copy*() methods for BufferedImage<->IplImage data transfers.
    278.             // Let's try it..
    279.             // This works but only on transparency
    280.             // Need to find the right Bitmap and IplImage matching types
    281.             /*
    282.             bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
    283.             //bitmap.setPixel(10,10,Color.MAGENTA);
    284.             canvas = new Canvas(bitmap);
    285.             Paint paint = new Paint(); 
    286.             paint.setColor(Color.GREEN);
    287.             float leftx = 20; 
    288.             float topy = 20; 
    289.             float rightx = 50; 
    290.             float bottomy = 100; 
    291.             RectF rectangle = new RectF(leftx,topy,rightx,bottomy); 
    292.             canvas.drawRect(rectangle, paint);
    293.             bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
    294.             */
    295.             //Log.v(LOG_TAG,"Writing Frame");
    296.             try {
    297.                 // Get the correct time
    298.                 recorder.setTimestamp(videoTimestamp);
    299.                 // Record the image into FFmpegFrameRecorder
    300.                 recorder.record(yuvIplimage);
    301.             } catch (FFmpegFrameRecorder.Exception e) {
    302.                 Log.v(LOG_TAG,e.getMessage());
    303.                 e.printStackTrace();
    304.             }
    305.         }
    306.     }
    307. }
    308. }

    309. </code>

  2. 1# 那月静好 | 2019-08-31 10-32



    你是对的。你必须考虑

    Android ndk
    </强>
    用于处理密集的东西。

    1. <BR/>
    2. 研究并构建这个
    3. <a href="https://github.com/youtube/yt-watchme" rel="nofollow noreferrer">
    4. 是观察
    5. </A>
    6. 还有这个
    7. <a href="https://github.com/openstf/minicap" rel="nofollow noreferrer">
    8. minicap
    9. </A>
    10. 。也

    走过去

    的WebRTC



    另一个

    rticonnextdds videodemo android


登录 后才能参与评论