android设备间实现无线投屏的示例代码

Android提供了MediaProjection来实现录屏,通过MediaProjection可以获取当前屏幕的视频流,而视频流需要通过编解码来压缩进行传输,通过MediaCodec可实现视频的编码和解码,这篇文章主要介绍了android设备间实现无线投屏,需要的朋友可以参考下。

前言

Android提供了MediaProjection来实现录屏,通过MediaProjection可以获取当前屏幕的视频流,而视频流需要通过编解码来压缩进行传输,通过MediaCodec可实现视频的编码和解码。视频流的推送和接收可通过Socket或WebSocket来实现,服务端推送编码的视频流,客户端接收视频流并进行解码,然后渲染在SurfaceView上即可显示服务端的画面。

投屏服务端的实现

服务端主入口负责请求录屏和启动录屏服务,服务端主入口的实现如下:

1

2

3

4

5

6

扫描二维码关注公众号,回复: 14567562 查看本文章

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

package com.example.screenprojection;

import android.content.Intent;

import android.media.projection.MediaProjectionManager;

import android.os.Build;

import android.os.Bundle;

import android.view.View;

import androidx.annotation.Nullable;

import androidx.appcompat.app.AppCompatActivity;

public class MainActivity extends AppCompatActivity {

  private static final int PROJECTION_REQUEST_CODE = 1;

  private MediaProjectionManager mediaProjectionManager;

  @Override

  protected void onCreate(Bundle savedInstanceState) {

    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    init();

  }

  private void init() {

    mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);

  }

  public void onClick(View view) {

    if (view.getId() == R.id.btn_start) {

      startProjection();

    }

  }

  // 请求开始录屏

  private void startProjection() {

    Intent intent = mediaProjectionManager.createScreenCaptureIntent();

    startActivityForResult(intent, PROJECTION_REQUEST_CODE);

  }

  @Override

  protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {

    super.onActivityResult(requestCode, resultCode, data);

    if (resultCode != RESULT_OK) {

      return;

    }

    if (requestCode == PROJECTION_REQUEST_CODE) {

      Intent service = new Intent(this, ScreenService.class);

      service.putExtra("code", resultCode);

      service.putExtra("data", data);

      if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {

        startForegroundService(service);

      } else {

        startService(service);

      }

    }

  }

  @Override

  protected void onDestroy() {

    super.onDestroy();

  }

}

Android8.0后录屏需要开启前台服务通知,录屏服务开启后同时启动WebSocketServer服务端,前台服务代码如下:

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

package com.example.screenprojection;

import android.app.Notification;

import android.app.NotificationChannel;

import android.app.NotificationManager;

import android.app.PendingIntent;

import android.app.Service;

import android.content.Intent;

import android.graphics.BitmapFactory;

import android.media.projection.MediaProjection;

import android.media.projection.MediaProjectionManager;

import android.os.Build;

import android.os.IBinder;

public class ScreenService extends Service {

  private MediaProjectionManager mMediaProjectionManager;

  private SocketManager mSocketManager;

  @Override

  public void onCreate() {

    super.onCreate();

    mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);

    createNotificationChannel();

  }

  @Override

  public int onStartCommand(Intent intent, int flags, int startId) {

    int resultCode = intent.getIntExtra("code", -1);

    Intent resultData = intent.getParcelableExtra("data");

    startProject(resultCode, resultData);

    return super.onStartCommand(intent, flags, startId);

  }

  // 录屏开始后进行编码推流

  private void startProject(int resultCode, Intent data) {

    MediaProjection mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);

    if (mediaProjection == null) {

      return;

    }

    // 初始化服务器端

    mSocketManager = new SocketManager();

    mSocketManager.start(mediaProjection);

  }

  private void createNotificationChannel() {

    Notification.Builder builder = new Notification.Builder(this.getApplicationContext());

    Intent nfIntent = new Intent(this, MainActivity.class);

    builder

        .setContentIntent(PendingIntent.getActivity(this, 0, nfIntent, 0))

        .setLargeIcon(

            BitmapFactory.decodeResource(

                this.getResources(), R.mipmap.ic_launcher)) // 设置下拉列表中的图标(大图标)

        .setSmallIcon(R.mipmap.ic_launcher) // 设置状态栏内的小图标

        .setContentText("is running......") // 设置上下文内容

        .setWhen(System.currentTimeMillis()); // 设置该通知发生的时间

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {

      builder.setChannelId("notification_id");

      // 前台服务notification适配

      NotificationManager notificationManager =

          (NotificationManager) getSystemService(NOTIFICATION_SERVICE);

      NotificationChannel channel =

          new NotificationChannel(

              "notification_id", "notification_name", NotificationManager.IMPORTANCE_LOW);

      notificationManager.createNotificationChannel(channel);

    }

    Notification notification = builder.build();

    notification.defaults = Notification.DEFAULT_SOUND; // 设置为默认通知音

    startForeground(110, notification);

  }

  @Override

  public IBinder onBind(Intent intent) {

    return null;

  }

  @Override

  public void onDestroy() {

    mSocketManager.close();

    super.onDestroy();

  }

}

SocketManager为WebSocketServer的控制类,并同时将录屏对象MediaProjection与视频编码器ScreenEncoder绑定。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

package com.example.screenprojection;

import android.media.projection.MediaProjection;

import java.net.InetSocketAddress;

public class SocketManager {

  private static final String TAG = SocketManager.class.getSimpleName();

  private static final int SOCKET_PORT = 50000;

  private final ScreenSocketServer mScreenSocketServer;

  private ScreenEncoder mScreenEncoder;

  public SocketManager() {

    mScreenSocketServer = new ScreenSocketServer(new InetSocketAddress(SOCKET_PORT));

  }

  public void start(MediaProjection mediaProjection) {

    mScreenSocketServer.start();

    mScreenEncoder = new ScreenEncoder(this, mediaProjection);

    mScreenEncoder.startEncode();

  }

  public void close() {

    try {

      mScreenSocketServer.stop();

      mScreenSocketServer.close();

    } catch (InterruptedException e) {

      e.printStackTrace();

    }

    if (mScreenEncoder != null) {

      mScreenEncoder.stopEncode();

    }

  }

  public void sendData(byte[] bytes) {

    mScreenSocketServer.sendData(bytes);

  }

}

推流服务端ScreenSocketServer代码如下:

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

package com.example.screenprojection;

import android.util.Log;

import org.java_websocket.WebSocket;

import org.java_websocket.handshake.ClientHandshake;

import org.java_websocket.server.WebSocketServer;

import java.net.InetSocketAddress;

public class ScreenSocketServer extends WebSocketServer {

  private final String TAG = ScreenSocketServer.class.getSimpleName();

  private WebSocket mWebSocket;

  public ScreenSocketServer(InetSocketAddress inetSocketAddress) {

    super(inetSocketAddress);

  }

  @Override

  public void onOpen(WebSocket conn, ClientHandshake handshake) {

    Log.d(TAG, "onOpen");

    mWebSocket = conn;

  }

  @Override

  public void onClose(WebSocket conn, int code, String reason, boolean remote) {

    Log.d(TAG, "onClose:" + reason);

  }

  @Override

  public void onMessage(WebSocket conn, String message) {}

  @Override

  public void onError(WebSocket conn, Exception ex) {

    Log.d(TAG, "onError:" + ex.toString());

  }

  @Override

  public void onStart() {

    Log.d(TAG, "onStart");

  }

  public void sendData(byte[] bytes) {

    if (mWebSocket != null && mWebSocket.isOpen()) {

      // 通过WebSocket 发送数据

      Log.d(TAG, "sendData:");

      mWebSocket.send(bytes);

    }

  }

  public void close() {

    mWebSocket.close();

  }

}

WebSocketServer启动成功会回调onStart方法。

1

2

3

4

@Override

public void onStart() {

     Log.d(TAG, "onStart");

}

当有客户端连接回调onOpen方法,通过回调的WebSocket对象即可向客户端发送数据。

1

2

3

4

5

@Override

public void onOpen(WebSocket conn, ClientHandshake handshake) {

    Log.d(TAG, "onOpen");

    mWebSocket = conn;

}

视频编码器ScreenEncoder采用采用H.265编码(H.265/HEVC),需要注意不同手机支持的编码最大分辨率不同。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

package com.example.screenprojection;

import android.hardware.display.DisplayManager;

import android.media.MediaCodec;

import android.media.MediaCodecInfo;

import android.media.MediaFormat;

import android.media.projection.MediaProjection;

import android.view.Surface;

import java.io.IOException;

import java.nio.ByteBuffer;

/**

 * 采用H.265编码 H.265/HEVC的编码架构大致上和H.264/AVC的架构相似 H.265又称为HEVC(全称High Efficiency Video Coding,高效率视频编码)

 */

public class ScreenEncoder extends Thread {

  //不同手机支持的编码最大分辨率不同

  private static final int VIDEO_WIDTH = 2160;

  private static final int VIDEO_HEIGHT = 3840;

  private static final int SCREEN_FRAME_RATE = 20;

  private static final int SCREEN_FRAME_INTERVAL = 1;

  private static final long SOCKET_TIME_OUT = 10000;

  // I帧

  private static final int TYPE_FRAME_INTERVAL = 19;

  // vps帧

  private static final int TYPE_FRAME_VPS = 32;

  private final MediaProjection mMediaProjection;

  private final SocketManager mSocketManager;

  private MediaCodec mMediaCodec;

  private boolean mPlaying = true;

  // 记录vps pps sps

  private byte[] vps_pps_sps;

  public ScreenEncoder(SocketManager socketManager, MediaProjection mediaProjection) {

    mSocketManager = socketManager;

    mMediaProjection = mediaProjection;

  }

  public void startEncode() {

    MediaFormat mediaFormat =

        MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, VIDEO_WIDTH, VIDEO_HEIGHT);

    mediaFormat.setInteger(

        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);

    // 比特率(比特/秒)

    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT);

    // 帧率

    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, SCREEN_FRAME_RATE);

    // I帧的频率

    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, SCREEN_FRAME_INTERVAL);

    try {

      mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);

      mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

      Surface surface = mMediaCodec.createInputSurface();

      mMediaProjection.createVirtualDisplay(

          "screen",

          VIDEO_WIDTH,

          VIDEO_HEIGHT,

          1,

          DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,

          surface,

          null,

          null);

    } catch (IOException e) {

      e.printStackTrace();

    }

    start();

  }

  @Override

  public void run() {

    mMediaCodec.start();

    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

    while (mPlaying) {

      int outPutBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, SOCKET_TIME_OUT);

      if (outPutBufferId >= 0) {

        ByteBuffer byteBuffer = mMediaCodec.getOutputBuffer(outPutBufferId);

        encodeData(byteBuffer, bufferInfo);

        mMediaCodec.releaseOutputBuffer(outPutBufferId, false);

      }

    }

  }

  private void encodeData(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {

    int offSet = 4;

    if (byteBuffer.get(2) == 0x01) {

      offSet = 3;

    }

    int type = (byteBuffer.get(offSet) & 0x7E) >> 1;

    if (type == TYPE_FRAME_VPS) {

      vps_pps_sps = new byte[bufferInfo.size];

      byteBuffer.get(vps_pps_sps);

    } else if (type == TYPE_FRAME_INTERVAL) {

      final byte[] bytes = new byte[bufferInfo.size];

      byteBuffer.get(bytes);

      byte[] newBytes = new byte[vps_pps_sps.length + bytes.length];

      System.arraycopy(vps_pps_sps, 0, newBytes, 0, vps_pps_sps.length);

      System.arraycopy(bytes, 0, newBytes, vps_pps_sps.length, bytes.length);

      mSocketManager.sendData(newBytes);

    } else {

      byte[] bytes = new byte[bufferInfo.size];

      byteBuffer.get(bytes);

      mSocketManager.sendData(bytes);

    }

  }

  public void stopEncode() {

    mPlaying = false;

    if (mMediaCodec != null) {

      mMediaCodec.release();

    }

    if (mMediaProjection != null) {

      mMediaProjection.stop();

    }

  }

}

注意点:
1.需要依赖WebSocket相关jar。

implementation “org.java-websocket:Java-WebSocket:1.5.3”

2.AndroidManifest配置网络和前台服务权限。

1

2

<uses-permission android:name="android.permission.INTERNET"/>

<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />

3.配置前台录屏服务foregroundServiceType属性。

1

2

3

4

5

<service

        android:name=".ScreenService"

        android:enabled="true"

        android:exported="true"

        android:foregroundServiceType="mediaProjection" />

投屏客户端的实现

客户端入口负责创建SurfaceView并初始化WebSocket客户端管理器SocketClientManager。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

package com.example.screenplayer;

import android.os.Bundle;

import android.view.Surface;

import android.view.SurfaceHolder;

import android.view.SurfaceView;

import androidx.annotation.NonNull;

import androidx.appcompat.app.AppCompatActivity;

public class MainActivity extends AppCompatActivity {

  private SocketClientManager mSocketClientManager;

  @Override

  protected void onCreate(Bundle savedInstanceState) {

    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);

    SurfaceView surfaceView = findViewById(R.id.sv_screen);

    surfaceView

        .getHolder()

        .addCallback(

            new SurfaceHolder.Callback() {

              @Override

              public void surfaceCreated(@NonNull SurfaceHolder holder) {

                // 连接到服务端

                initSocketManager(holder.getSurface());

              }

              @Override

              public void surfaceChanged(

                  @NonNull SurfaceHolder holder, int format, int width, int height) {}

              @Override

              public void surfaceDestroyed(@NonNull SurfaceHolder holder) {}

            });

  }

  private void initSocketManager(Surface surface) {

    mSocketClientManager = new SocketClientManager();

    mSocketClientManager.start(surface);

  }

  @Override

  protected void onDestroy() {

    if (mSocketClientManager != null) {

      mSocketClientManager.stop();

    }

    super.onDestroy();

  }

}

SocketClientManager将Surface与解码器绑定,创建WebSocket客户端并接收服务端推送的视频流。其中URI 需要修改为服务端的IP地址和端口。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

package com.example.screenplayer;

import android.view.Surface;

import java.net.URI;

import java.net.URISyntaxException;

public class SocketClientManager implements ScreenSocketClient.SocketCallback {

  private static final int SOCKET_PORT = 50000;

  private ScreenDecoder mScreenDecoder;

  private ScreenSocketClient mSocketClient;

  public void start(Surface surface) {

    mScreenDecoder = new ScreenDecoder();

    mScreenDecoder.startDecode(surface);

    try {

      // 需要修改为服务端的IP地址与端口

      URI uri = new URI("ws://192.168.1.3:" + SOCKET_PORT);

      mSocketClient = new ScreenSocketClient(this, uri);

      mSocketClient.connect();

    } catch (URISyntaxException e) {

      e.printStackTrace();

    }

  }

  public void stop() {

    if (mSocketClient != null) {

      mSocketClient.close();

    }

    if (mScreenDecoder != null) {

      mScreenDecoder.stopDecode();

    }

  }

  @Override

  public void onReceiveData(byte[] data) {

    if (mScreenDecoder != null) {

      mScreenDecoder.decodeData(data);

    }

  }

}

ScreenSocketClient代码如下:

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

package com.example.screenplayer;

import android.util.Log;

import org.java_websocket.client.WebSocketClient;

import org.java_websocket.handshake.ServerHandshake;

import java.net.URI;

import java.nio.ByteBuffer;

public class ScreenSocketClient extends WebSocketClient {

  private static final String TAG = ScreenSocketClient.class.getSimpleName();

  private final SocketCallback mSocketCallback;

  public ScreenSocketClient(SocketCallback socketCallback, URI serverUri) {

    super(serverUri);

    mSocketCallback = socketCallback;

  }

  @Override

  public void onOpen(ServerHandshake serverHandshake) {

    Log.d(TAG, "onOpen");

  }

  @Override

  public void onMessage(String message) {}

  @Override

  public void onMessage(ByteBuffer bytes) {

    byte[] buf = new byte[bytes.remaining()];

    bytes.get(buf);

    if (mSocketCallback != null) {

      mSocketCallback.onReceiveData(buf);

    }

  }

  @Override

  public void onClose(int code, String reason, boolean remote) {

    Log.d(TAG, "onClose =" + reason);

  }

  @Override

  public void onError(Exception ex) {

    Log.d(TAG, "onError =" + ex.toString());

  }

  public interface SocketCallback {

    void onReceiveData(byte[] data);

  }

}

不同手机支持的解码最大分辨率不同,需要设置正确的分辨率才不会报错。投屏解码器ScreenDecoder代码如下:

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

package com.example.screenplayer;

import android.media.MediaCodec;

import android.media.MediaFormat;

import android.view.Surface;

import java.io.IOException;

import java.nio.ByteBuffer;

public class ScreenDecoder {

  private static final int VIDEO_WIDTH = 2160;

  private static final int VIDEO_HEIGHT = 3840;

  private static final long DECODE_TIME_OUT = 10000;

  private static final int SCREEN_FRAME_RATE = 20;

  private static final int SCREEN_FRAME_INTERVAL = 1;

  private MediaCodec mMediaCodec;

  public ScreenDecoder() {}

  public void startDecode(Surface surface) {

    try {

      // 配置MediaCodec

      mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);

      MediaFormat mediaFormat =

          MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, VIDEO_WIDTH, VIDEO_HEIGHT);

      mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT);

      mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, SCREEN_FRAME_RATE);

      mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, SCREEN_FRAME_INTERVAL);

      mMediaCodec.configure(mediaFormat, surface, null, 0);

      mMediaCodec.start();

    } catch (IOException e) {

      e.printStackTrace();

    }

  }

  public void decodeData(byte[] data) {

    int index = mMediaCodec.dequeueInputBuffer(DECODE_TIME_OUT);

    if (index >= 0) {

      ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(index);

      inputBuffer.clear();

      inputBuffer.put(data, 0, data.length);

      mMediaCodec.queueInputBuffer(index, 0, data.length, System.currentTimeMillis(), 0);

    }

    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

    int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, DECODE_TIME_OUT);

    while (outputBufferIndex > 0) {

      mMediaCodec.releaseOutputBuffer(outputBufferIndex, true);

      outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);

    }

  }

  public void stopDecode() {

    if (mMediaCodec != null) {

      mMediaCodec.release();

    }

  }

}

注意点:
1.需要依赖WebSocket相关jar。

implementation “org.java-websocket:Java-WebSocket:1.5.3”

2.AndroidManifest配置网络权限。

1

<uses-permission android:name="android.permission.INTERNET"/>

效果

左手边为服务端,右手边为客户端,服务端投屏到客户端的效果如下:

遇到的错误

1.WebSocket出现Permission denied的报错:

onerror =java.net.SocketException: socket failed: EACCES (Permission denied)

问题原因:需要网络权限。

1

<uses-permission android:name=“android.permission.INTERNET”/>

2.WebSocket出现Host unreachable报错:

onerror =java.net.NoRouteToHostException: Host unreachable

问题原因:不在同一局域网内或服务端未正常启动。

3.WebSocket出现failed to connect报错:

failed to connect to /192.168.1.3 (port 50000) from /:: (port 38262):,不在同一局域网内或服务端未正常启动

问题原因:不在同一局域网内或服务端未正常启动。

客户端连接成功回调onOpen方法

1

2

3

4

@Override

public void onOpen(ServerHandshake handshakedata) {

     Log.d(TAG,"SocketClient onOpen");

}

4.初始化MediaFormat时报IllegalStateException异常:

Caused by: java.lang.IllegalArgumentException
at android.media.MediaCodec.native_configure(Native Method)
at android.media.MediaCodec.configure(MediaCodec.java:2023)
at android.media.MediaCodec.configure(MediaCodec.java:1951)
at com.example.screenprojection.ScreenEncoder.startEncode(ScreenEncoder.java:56)

问题原因:不同手机支持的最大编解码分辨率不同,可通过adb shell cat /system/etc/media_codecs.xml的adb命令查看手机支持的编解码分辨率。未设置编码强制要求的一些配置 会抛出 IllegalStateException。

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

<MediaCodec name="OMX.hisi.video.decoder.hevc.secure" type="video/hevc" >

     <Quirk name="needs-flush-on-all-ports" />

     <Limit name="size" min="128x128" max="4096x2304" />

     <Limit name="alignment" value="2x2" />

     <Limit name="block-size" value="16x16" />

     <Limit name="block-count" range="64-36896" />

     <Limit name="blocks-per-second" range="99-1106880" />

     <Limit name="bitrate" range="1-52428800" />

     <Feature name="adaptive-playback" />

     <Feature name="secure-playback" required="true" />

     <Quirk name="requires-allocate-on-input-ports" />

     <Quirk name="requires-allocate-on-output-ports" />

     <Limit name="concurrent-instances" max="1" />

 </MediaCodec>

 <!--config suggestion for hevc -->

 <!--VT:1280x720@1Mbps@30fps,640x360@450kpbps@30fps,640x360@250kbps@30fps,640x360@170kbps@15fps-->

 <!--DestkTop Share: 1280x720@500kbps@3-7fps,720x450@200kbps@3-7fps-->

 <!--Wifi Display:1920x1080@8Mbps@30fps 4Kx2k@27M@30fps-->

 <!--Recoding:1920x1080@12Mbps@30fps,4k2k@30Mbps@30fps,1920x1080@40M@120fps,1280X720@40M@120fps-->

 <MediaCodec name="OMX.hisi.video.encoder.hevc" type="video/hevc" >

     <Limit name="size" min="176x144" max="3840x2160" />

     <Limit name="alignment" value="4x4" />

     <Limit name="block-size" value="64x64" />

     <Limit name="blocks-per-second" range="99-244800" />

     <Limit name="bitrate" range="200000-70000000" />

     <Quirk name="requires-allocate-on-output-ports" />

     <Limit name="concurrent-instances" max="3" />

 </MediaCodec>

转载于:https://www.jb51.net/article/251671.htm 

猜你喜欢

转载自blog.csdn.net/weixin_42602900/article/details/128075939
今日推荐