关于cmakeList的配置,这里就直接给出代码:
cmake_minimum_required(VERSION 3.4.1)
# 引入指定目录下的CMakeLists.txt
add_subdirectory(src/main/cpp/librtmp)
add_library(
native-lib
SHARED
src/main/cpp/native-lib.cpp
src/main/cpp/VideoChannel.cpp)
include_directories(src/main/cpp/include)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/src/main/cpp/libs/${ANDROID_ABI}")
target_link_libraries(
native-lib
rtmp
x264
log)
在视频推流中,java层主要实现初始化、设置画布、转摄像头、开始直播和停止直播操作:
MainActivity:
import androidx.appcompat.app.AppCompatActivity;
import android.hardware.Camera;
import android.os.Bundle;
import android.view.SurfaceView;
import android.view.View;
import android.widget.TextView;
import com.example.rtmp.databinding.ActivityMainBinding;
import com.example.rtmp.live.LivePusher;
public class MainActivity extends AppCompatActivity {
private LivePusher livePusher;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate( savedInstanceState );
SurfaceView surfaceView = findViewById(R.id.surfaceView);
livePusher = new LivePusher(this, 800, 480, 800_000, 10, Camera.CameraInfo.CAMERA_FACING_BACK);
// 设置摄像头预览的界面
livePusher.setPreviewDisplay(surfaceView.getHolder());
}
public void switchCamera(View view) {
}
public void startLive(View view) {
livePusher.startLive("rtmp://47.75.90.219/myapp/mystream");
}
public void stopLive(View view) {
}
}
LivePush代码:
package com.example.rtmp.live;
import android.app.Activity;
import android.view.SurfaceHolder;
import com.example.rtmp.live.channel.AudioChannel;
import com.example.rtmp.live.channel.VideoChannel;
public class LivePusher {
static {
System.loadLibrary("native-lib");
}
private AudioChannel audioChannel;
private VideoChannel videoChannel;
public LivePusher(Activity activity, int width, int height, int bitrate,
int fps, int cameraId) {
native_init();
videoChannel = new VideoChannel(this,activity, width, height, bitrate, fps, cameraId);
audioChannel = new AudioChannel();
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
videoChannel.setPreviewDisplay(surfaceHolder);
}
public void switchCamera() {
videoChannel.switchCamera();
}
public void startLive(String path) { //打开编码器之后进行开始直播
native_start(path); //启动一个线程连接服务器
videoChannel.startLive();
audioChannel.startLive();
}
public void stopLive(){
videoChannel.stopLive();
audioChannel.stopLive();
native_stop();
}
public native void native_init();
public native void native_start(String path);
public native void native_setVideoEncInfo(int width, int height, int fps, int bitrate);
public native void native_pushVideo(byte[] data);
public native void native_stop();
public native void native_release();
}
CameraHelper代码:
package com.example.rtmp.live.channel;
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.util.Iterator;
import java.util.List;
public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CameraHelper";
private Activity mActivity;
private int mHeight;
private int mWidth;
private int mCameraId;
private Camera mCamera;
private byte[] buffer;
private SurfaceHolder mSurfaceHolder;
private Camera.PreviewCallback mPreviewCallback;
private int mRotation;
private OnChangedSizeListener mOnChangedSizeListener;
public CameraHelper(Activity activity, int cameraId, int width, int height) {
mActivity = activity;
mCameraId = cameraId;
mWidth = width;
mHeight = height;
}
public void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
stopPreview();
startPreview();
}
private void stopPreview() {
if (mCamera != null) {
//预览数据回调接口
mCamera.setPreviewCallback(null);
//停止预览
mCamera.stopPreview();
//释放摄像头
mCamera.release();
mCamera = null;
}
}
private void startPreview() {
try {
//获得camera对象
mCamera = Camera.open(mCameraId);
//配置camera的属性
Camera.Parameters parameters = mCamera.getParameters();
//设置预览数据格式为nv21
parameters.setPreviewFormat(ImageFormat.NV21);
//这是摄像头宽、高
setPreviewSize(parameters);
// 设置摄像头 图像传感器的角度、方向
setPreviewOrientation(parameters);
mCamera.setParameters(parameters);
buffer = new byte[mWidth * mHeight * 3 / 2];
//数据缓存区
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
//设置预览画面
mCamera.setPreviewDisplay(mSurfaceHolder);
mOnChangedSizeListener.onChanged(mWidth, mHeight);
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void setPreviewOrientation(Camera.Parameters parameters) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
mRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (mRotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90: // 横屏 左边是头部(home键在右边)
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:// 横屏 头部在右边
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
//设置角度
mCamera.setDisplayOrientation(result);
}
private void setPreviewSize(Camera.Parameters parameters) {
//获取摄像头支持的宽、高
List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
Camera.Size size = supportedPreviewSizes.get(0);
Log.d(TAG, "支持 " + size.width + "x" + size.height);
//选择一个与设置的差距最小的支持分辨率
// 10x10 20x20 30x30
// 12x12
int m = Math.abs(size.height * size.width - mWidth * mHeight);
supportedPreviewSizes.remove(0);
Iterator<Camera.Size> iterator = supportedPreviewSizes.iterator();
//遍历
while (iterator.hasNext()) {
Camera.Size next = iterator.next();
Log.d(TAG, "支持 " + next.width + "x" + next.height);
int n = Math.abs(next.height * next.width - mWidth * mHeight);
if (n < m) {
m = n;
size = next;
}
}
mWidth = size.width;
mHeight = size.height;
parameters.setPreviewSize(mWidth, mHeight);
Log.d(TAG, "设置预览分辨率 width:" + size.width + " height:" + size.height);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
mSurfaceHolder.addCallback(this);
}
public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//释放摄像头
stopPreview();
//开启摄像头
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopPreview();
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// data数据依然是倒的
mPreviewCallback.onPreviewFrame(data, camera);
camera.addCallbackBuffer(buffer);
}
public void setOnChangedSizeListener(OnChangedSizeListener listener) {
mOnChangedSizeListener = listener;
}
public interface OnChangedSizeListener {
void onChanged(int w, int h);
}
}
VideoChannel:
package com.example.rtmp.live.channel;
import android.app.Activity;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import com.example.rtmp.live.LivePusher;
public class VideoChannel implements Camera.PreviewCallback, CameraHelper.OnChangedSizeListener {
private LivePusher mLivePusher;
private CameraHelper cameraHelper;
private int mBitrate;
private int mFps;
private boolean isLiving;
public VideoChannel(LivePusher livePusher, Activity activity, int width, int height, int bitrate, int fps, int cameraId) {
mLivePusher = livePusher;
mBitrate = bitrate;
mFps = fps;
cameraHelper = new CameraHelper(activity, cameraId, width, height);
//1、让camerahelper的
cameraHelper.setPreviewCallback(this);
//2、回调 真实的摄像头数据宽、高
cameraHelper.setOnChangedSizeListener(this);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
cameraHelper.setPreviewDisplay(surfaceHolder);
}
/**
* 得到nv21数据 已经旋转好的
*
* @param data
* @param camera
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving) {
mLivePusher.native_pushVideo(data); //将data送到native然后进行编码播放
}
}
public void switchCamera() {
cameraHelper.switchCamera();
}
/**
* 真实摄像头数据的宽、高
* @param w
* @param h
*/
@Override
public void onChanged(int w, int h) {
//初始化编码器
mLivePusher.native_setVideoEncInfo(w, h, mFps, mBitrate);
}
public void startLive() { //跳到onPreviewFrame
isLiving = true;
}
public void stopLive() {
isLiving = false;
}
}
可以使用debug看一看操作的流程
下面进行jni层的开发:
首先要初始化设置:
SafeQueue<RTMPPacket*> packets; //打包好的数据
VideoChannel *videoChannel=0;
int isstart=0;
pthread_t pid;
void releasePackets(RTMPPacket*& packet){
DELETE(packet);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_rtmp_live_LivePusher_native_1init(JNIEnv *env, jobject instance) {
// 准备一个Video编码器的工具类:进行编码
videoChannel=new VideoChannel;
//准备一个队列,打包好的数据 放入队列,在线程中统一的取出数据在再送给服务器
packets.setReleaseCallback(releasePackets);
}
初始化编码器:
extern "C"
JNIEXPORT void JNICALL
Java_com_example_rtmp_live_LivePusher_native_1setVideoEncInfo(JNIEnv *env, jobject instance, jint width,
jint height, jint fps, jint bitrate) {
//
if (videoChannel){
videoChannel->setVideoEncInfo(width,height,fps,bitrate);
}
}
setVideoEncInfo代码如下:
void VideoChannel::setVideoEncInfo(jint width, jint height, jint fps, jint bitrate) {
pthread_mutex_lock(&mutex);
mWidtd=width;
mHeight=height;
mFps=fps;
mBitrate=bitrate;
if (videoCodec){ //进行判断是因为后面进行跳转摄像头需要进行释放之前的摄像头
x264_encoder_close(videoCodec);
videoCodec=0;
}
//打开x264编码器
//x264编码器的属性
x264_param_t param;
//2:最快
//3: 无延迟编码
x264_param_default_preset(¶m,"ultrafast","zerolatency");
//base_line 3.2 编码规格
param.i_level_idc = 32;
//输入数据格式
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
//无b帧
param.i_bframe = 0;
//参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
param.rc.i_rc_method = X264_RC_ABR;
//码率(比特率,单位Kbps)
param.rc.i_bitrate = bitrate / 1000;
//瞬时最大码率
param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
//设置了i_vbv_max_bitrate必须设置此参数,码率控制区大小,单位kbps
param.rc.i_vbv_buffer_size = bitrate / 1000;
//帧率
param.i_fps_num = fps;
param.i_fps_den = 1;
param.i_timebase_den = param.i_fps_num;
param.i_timebase_num = param.i_fps_den;
// param.pf_log = x264_log_default2;
//用fps而不是时间戳来计算帧间距离
param.b_vfr_input = 0;
//帧距离(关键帧) 2s一个关键帧
param.i_keyint_max = fps * 2;
// 是否复制sps和pps放在每个关键帧的前面 该参数设置是让每个关键帧(I帧)都附带sps/pps。
param.b_repeat_headers = 1;
//多线程
param.i_threads = 1;
x264_param_apply_profile(¶m,"baseline");
//打开编码器
videoCodec= x264_encoder_open(¶m);
pthread_mutex_unlock(&mutex);
}
加一个互斥锁 多线程的时候为了保护线程安全,一般加互斥锁。
开始直播:
void *start(void *args) {
char *url = static_cast<char *>(args);
RTMP *rtmp = 0;
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("alloc rtmp失败");
break;
}
RTMP_Init(rtmp);
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("设置地址失败:%s", url);
break;
}
//5s超时时间
rtmp->Link.timeout = 5;
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("连接服务器:%s", url);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
if (!ret) {
LOGE("连接流:%s", url);
break;
}
//记录一个开始时间
start_time = RTMP_GetTime();
//表示可以开始推流了
readyPushing = 1;
packets.setWork(1);
RTMPPacket *packet = 0;
while (readyPushing) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
packet->m_nInfoField2 = rtmp->m_stream_id;
//发送rtmp包 1:队列
// 意外断网?发送失败,rtmpdump 内部会调用RTMP_Close
// RTMP_Close 又会调用 RTMP_SendPacket
// RTMP_SendPacket 又会调用 RTMP_Close
// 将rtmp.c 里面WriteN方法的 Rtmp_Close注释掉
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("发送失败");
break;
}
}
releasePackets(packet);
} while (0);
//
isStart = 0;
readyPushing = 0;
packets.setWork(0);
packets.clear();
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete (url);
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_push_LivePusher_native_1start(JNIEnv *env, jobject thiz, jstring path_) {
if (isStart) {
return;
}
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
isStart = 1;
//启动线程
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
编码并发送sps和pps以及I帧:
extern "C"
JNIEXPORT void JNICALL
Java_com_example_push_LivePusher_native_1pushVideo(JNIEnv *env, jobject thiz, jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
void VideoChannel::encodeData(int8_t *data) {
pthread_mutex_lock(&mutex);
//y数据
memcpy(pic_in->img.plane[0], data, ySize);
for (int i = 0; i < uvSize; ++i) {
//u数据
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);
}
//编码出来的数据 (帧数据)
x264_nal_t *pp_nal;
//编码出来有几个数据 (多少帧)
int pi_nal;
x264_picture_t pic_out;
x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
//如果是关键帧 3
int sps_len;
int pps_len;
uint8_t sps[100];
uint8_t pps[100];
for (int i = 0; i < pi_nal; ++i) {
if (pp_nal[i].i_type == NAL_SPS) {
//排除掉 h264的间隔 00 00 00 01
sps_len = pp_nal[i].i_payload - 4;
memcpy(sps, pp_nal[i].p_payload + 4, sps_len);
} else if (pp_nal[i].i_type == NAL_PPS) {
pps_len = pp_nal[i].i_payload - 4;
memcpy(pps, pp_nal[i].p_payload + 4, pps_len);
//pps肯定是跟着sps的
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload);
}
}
pthread_mutex_unlock(&mutex);
}
- 发送sps 和pps
-
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
//看表
int bodySize = 13 + sps_len + 3 + pps_len;
RTMPPacket *packet = new RTMPPacket;
//
RTMPPacket_Alloc(packet, bodySize);
int i = 0;
//固定头
packet->m_body[i++] = 0x17;
//类型
packet->m_body[i++] = 0x00;
//composition time 0x000000
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
//版本
packet->m_body[i++] = 0x01;
//编码规格
packet->m_body[i++] = sps[1];
packet->m_body[i++] = sps[2];
packet->m_body[i++] = sps[3];
packet->m_body[i++] = 0xFF;
//整个sps
packet->m_body[i++] = 0xE1;
//sps长度
packet->m_body[i++] = (sps_len >> 8) & 0xff;
packet->m_body[i++] = sps_len & 0xff;
memcpy(&packet->m_body[i], sps, sps_len);
i += sps_len;
//pps
packet->m_body[i++] = 0x01;
packet->m_body[i++] = (pps_len >> 8) & 0xff;
packet->m_body[i++] = (pps_len) & 0xff;
memcpy(&packet->m_body[i], pps, pps_len);
//视频
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = bodySize;
//随意分配一个管道(尽量避开rtmp.c中使用的)
packet->m_nChannel = 10;
//sps pps没有时间戳
packet->m_nTimeStamp = 0;
//不使用绝对时间
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
videoCallback(packet);
}
- 发送帧信息
-
void VideoChannel::sendFrame(int type, uint8_t *payload, int i_payload) {
if (payload[2] == 0x00) {
i_payload -= 4;
payload += 4;
} else {
i_payload -= 3;
payload += 3;
}
//看表
int bodySize = 9 + i_payload;
RTMPPacket *packet = new RTMPPacket;
//
RTMPPacket_Alloc(packet, bodySize);
packet->m_body[0] = 0x27;
if(type == NAL_SLICE_IDR){
packet->m_body[0] = 0x17;
LOGE("关键帧");
}
//类型
packet->m_body[1] = 0x01;
//时间戳
packet->m_body[2] = 0x00;
packet->m_body[3] = 0x00;
packet->m_body[4] = 0x00;
//数据长度 int 4个字节
packet->m_body[5] = (i_payload >> 24) & 0xff;
packet->m_body[6] = (i_payload >> 16) & 0xff;
packet->m_body[7] = (i_payload >> 8) & 0xff;
packet->m_body[8] = (i_payload) & 0xff;
//图片数据
memcpy(&packet->m_body[9], payload, i_payload);
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bodySize;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nChannel = 0x10;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
videoCallback(packet);
}
-
链接:https://pan.baidu.com/s/101sPDxMVJd9XeC7JWsmANw
提取码:bra3
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)