Android faac x264 rtmp camera2推流b站直播

浪子不回头ぞ 提交于 2021-01-30 01:43:39
package com.zzm.play.x264;

import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.TextureView;
import android.view.View;

import com.zzm.play.R;
import com.zzm.play.utils.PermissionUtil;

public class MyActivity extends AppCompatActivity {


    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);


        setContentView(R.layout.x264_camera2_layout);

        init();

        doSomeThing();

    }

    private Camera2Tool camera2Tool;

    private TextureView textureView;

    private void init() {

        textureView = findViewById(R.id.texture_view);

        camera2Tool = new Camera2Tool(this);


    }

    private void doSomeThing() {

        PermissionUtil.checkPermission(this);

        camera2Tool.setDisplay(textureView);

    }

    @Override
    protected void onDestroy() {
        if (camera2Tool != null) {
            camera2Tool.release();
            camera2Tool = null;
        }
        super.onDestroy();
    }
}
package com.zzm.play.x264;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;

import com.zzm.play.utils.FileUtil;
import com.zzm.play.utils.YUVUtil;
import com.zzm.play.utils.l;

import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;

import static android.hardware.camera2.CameraDevice.*;

public class Camera2Tool {

    static {

        System.loadLibrary("x264_camera2");

    }

    private Context c;

    private AudioRecorderTool audioRecorderTool;

    public Camera2Tool(Context c) {

        this.c = c;

        audioRecorderTool = new AudioRecorderTool(44100, 1);

    }


    private TextureView textureView;

    public void setDisplay(TextureView textureView) {
        this.textureView = textureView;
        textureView.setSurfaceTextureListener(new MySurfaceTextureListener());
    }

    class MySurfaceTextureListener implements TextureView.SurfaceTextureListener {
        @Override
        public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {

            l.i("onSurfaceTextureAvailable  width  height:" + "(" + width + " , " + height + ")");

            surfaceTexture = surface;

            previewViewSize = new Size(width, height);

            try {
                openCamera();
            } catch (Exception e) {
                l.i(e.toString());
                e.printStackTrace();
            }
        }

        @Override
        public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {

            l.i("onSurfaceTextureSizeChanged");

        }

        @Override
        public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {

            l.i("onSurfaceTextureDestroyed");

            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {

            //l.i("onSurfaceTextureUpdated");

        }
    }

    private Handler camera2Handle;
    private HandlerThread camera2HandleThread;
    private CameraManager cameraManager;
    private SurfaceTexture surfaceTexture;
    //texture viewsize
    private Size previewViewSize;
    //camera 给到的支持的最合适的尺寸
    private Size suitablePreviewSize;
    private int sensorOrientation;

    @SuppressLint("MissingPermission")
    public void openCamera() throws CameraAccessException {

        //testX264("hello x264");

        cameraManager = (CameraManager) c.getSystemService(Context.CAMERA_SERVICE);

        String[] cameraIds = cameraManager.getCameraIdList();
        for (String cameraId : cameraIds) {
            l.i("have camera id : " + cameraId);
        }

        String backMainCameraId = "-1";
        CameraCharacteristics characteristics = null;
        //取得后置主相头的camera id
        for (String cameraId : cameraIds) {
            characteristics = cameraManager.getCameraCharacteristics(cameraId);
            if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
                backMainCameraId = cameraId;
                l.i("backMainCameraId : " + backMainCameraId);
                break;
            }
        }
        if (null == backMainCameraId || backMainCameraId.equals("-1"))
            return;

        //camera sensor 角度一般是90度所以宽高的对换一下
        sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        l.i("sensorOrientation : " + sensorOrientation);
        if (sensorOrientation == 90) {
            previewViewSize = new Size(previewViewSize.getHeight(), previewViewSize.getWidth());
        }

        //获取支持的尺寸
        StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        Size[] outputSizes = streamConfigurationMap.getOutputSizes(SurfaceTexture.class);
        for (Size outputSize : outputSizes) {
            l.i("StreamConfigurationMap获取的支持的尺寸 : " + outputSize.toString());
        }

        //找到最合适的支持尺寸
        suitablePreviewSize = getSuitablePreviewSize(outputSizes, previewViewSize);

        //打开相机后续有耗时操作在子线程中运行,camera2Handle的作用就是把初始化的looper给到camera仅此而以
        //这个looper其实是在camera2HandleThread run方法中创建,其实最终运行在线程camera2HandleThread
        //run方法中
        camera2HandleThread = new HandlerThread("camera2-preview");
        camera2HandleThread.start();
        camera2Handle = new Handler(camera2HandleThread.getLooper(), null);
        cameraManager.openCamera(backMainCameraId, new MyStateCallBack(), camera2Handle);


        //初始化x264
        nativeX264EncodeInit();

        //链接服务器做好准备
        nativeRtmpstart(url);

        //开始录音
        audioRecorderTool.startRecording();


    }

    private Size getSuitablePreviewSize(Size[] outputSizes, Size previewViewSize) {

        int outWidth = 0;
        int outHeight = 0;

        int width = previewViewSize.getWidth();
        int height = previewViewSize.getHeight();

        //从支持的大尺寸到小尺寸地开始循环
        for (Size outputSize : outputSizes) {

            outWidth = outputSize.getWidth();
            outHeight = outputSize.getHeight();

            if (outWidth == width && outHeight == height) {
                //完全相同的尺寸
                break;
            } else if (outWidth / width == outHeight / height && outWidth < width && outHeight < height) {
                //比率相同的尺寸
                break;
            } else if (outWidth < width && outHeight < height) {
                //view尺寸最接近且小于的尺寸
                break;
            }

        }

        Size size = new Size(outWidth, outHeight);
        l.i("找到的最合适的尺寸 : " + size.toString());
        return size;
    }

    private CameraDevice cameraDevice;

    class MyStateCallBack extends StateCallback {
        @Override
        public void onOpened(@NonNull CameraDevice camera) {

            l.i("MyStateCallBack onOpened ");

            cameraDevice = camera;

            //打开x264编码器
            if (sensorOrientation == 90) {
                nativeX264EncodeOpen(suitablePreviewSize.getHeight(),
                        suitablePreviewSize.getWidth(),
                        25, suitablePreviewSize.getWidth() * suitablePreviewSize.getHeight());
            }

            //创建和camera的预览会话
            createCamera2PreviewSession();

        }

        @Override
        public void onDisconnected(@NonNull CameraDevice camera) {
            l.i("CameraDevice StateCallBack  onDisconnected");
        }

        @Override
        public void onError(@NonNull CameraDevice camera, int error) {
            l.i("CameraDevice StateCallBack  onError");
        }
    }


    private void createCamera2PreviewSession() {

        try {
            cameraDevice.createCaptureSession(getSurfaceList(), new MyStateCallBack1(), camera2Handle);
        } catch (CameraAccessException e) {
            l.i(e.toString());
            e.printStackTrace();
        }

    }

    private Surface previewSurface;
    private ImageReader imageReader;

    private List getSurfaceList() {

        //需要把预览数据装载在哪些surface里面
        List<Surface> list = new ArrayList<>();

        //预览的view
        //surfaceTexture.setDefaultBufferSize();
        list.add(previewSurface = new Surface(surfaceTexture));

        //image reader
        imageReader = createImageReader();

        //得到每帧的yuv数据的image readersurface
        list.add(imageReader.getSurface());

        return list;

    }

    private ImageReader createImageReader() {

        // maxImages The maximum number of images the user will want to
        // access simultaneously. This should be as small as possible to
        // limit memory use. Once maxImages Images are obtained by the
        // user, one of them has to be released before a new Image will
        // become available for access through
        ImageReader imageReader = ImageReader.newInstance(suitablePreviewSize.getWidth(),
                suitablePreviewSize.getHeight(),
                ImageFormat.YUV_420_888,
                1);

        imageReader.setOnImageAvailableListener(new MyImageAvailableListener(), camera2Handle);

        return imageReader;
    }

    private CameraCaptureSession previewSession;

    class MyStateCallBack1 extends CameraCaptureSession.StateCallback {

        @Override
        public void onConfigured(@NonNull CameraCaptureSession session) {

            l.i("CameraCaptureSession.StateCallback  onConfigured");

            previewSession = session;

            //会话配置好了 开始预览了
            startPreview();

        }

        @Override
        public void onConfigureFailed(@NonNull CameraCaptureSession session) {
            l.i("CameraCaptureSession.StateCallback  onConfigureFailed");
        }
    }

    private void startPreview() {

        try {

            createAndConfigurePreviewSessionRequest();

            configurePreviewSession();

        } catch (CameraAccessException e) {
            l.i(e.toString());
            e.printStackTrace();
        }


    }


    private CaptureRequest.Builder previewRequest;

    private void createAndConfigurePreviewSessionRequest() throws CameraAccessException {

        //TEMPLATE_PREVIEW 预览
        previewRequest = cameraDevice.createCaptureRequest(TEMPLATE_PREVIEW);
        //自动对焦等
        previewRequest.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        //AF_MODE_CONTINUOUS_PICTURE:快速持续聚焦,用于静态图片的ZSL捕获。一旦达到扫描目标,触发则立即锁住焦点。取消而继续持续聚焦。
        //previewRequest.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        previewRequest.addTarget(previewSurface);
        previewRequest.addTarget(imageReader.getSurface());

    }

    private void configurePreviewSession() throws CameraAccessException {

        //一直预览 不是只预览一帧的画面
        previewSession.setRepeatingRequest(previewRequest.build(), null, camera2Handle);

    }


    class MyImageAvailableListener implements ImageReader.OnImageAvailableListener {

        @Override
        public void onImageAvailable(ImageReader reader) {

            Image image = reader.acquireNextImage();

            Image.Plane[] planes = image.getPlanes();

            //处理planes里面的数据
            dealPlanes(planes);

            image.close();
        }
    }

    private void dealPlanes(Image.Plane[] planes) {

        //3
        //l.i("camera preview Image.Plane[] planes number : "+planes.length);

        Image.Plane y = planes[0];
        Image.Plane u = planes[1];
        Image.Plane v = planes[2];
        ByteBuffer yBuffer = y.getBuffer();
        ByteBuffer uBuffer = u.getBuffer();
        ByteBuffer vBuffer = v.getBuffer();

        //l.i("Y planes[0] length : "+yBuffer.remaining());
        //l.i("U planes[1] length : "+uBuffer.remaining());
        //l.i("V planes[2] length : "+vBuffer.remaining());


        int yLength = yBuffer.remaining();
        //yuv420 转成 yu12
        byte[] temp = new byte[yLength * 3 / 2];
        //填充y
        yBuffer.get(temp, 0, yBuffer.remaining());
        //填充u
        //1 没有无效数据,2 数据之间有一个无效数据
        int pixelStride = u.getPixelStride();
        int index = yLength;
        for (int i = 0; i < uBuffer.remaining(); i += pixelStride) {
            temp[index++] = uBuffer.get(i);
        }
        //填充v
        pixelStride = v.getPixelStride();
        for (int i = 0; i < vBuffer.remaining(); i += pixelStride) {
            temp[index++] = vBuffer.get(i);
        }

        if (sensorOrientation == 90) {
            temp = YUVUtil.YUV420BytesClockwise90Rotate(temp, suitablePreviewSize.getWidth(),
                    suitablePreviewSize.getHeight(), YUVUtil.YUV420p);
            //FileUtil.writeEncodeBytes(temp, System.currentTimeMillis() + ".yuv");
        }

        //发送yu12数据给x264
        nativeSendPreviewData(temp, temp.length);

    }

    public void release() {

        if (null != previewSession) {
            previewSession.close();
            previewSession = null;
        }

        if (null != cameraDevice) {
            cameraDevice.close();
            cameraDevice = null;
        }

        if (null != previewRequest) {
            previewRequest = null;
        }

        if (null != previewSurface) {
            previewSurface.release();
            previewSurface = null;
        }

        if (null != camera2Handle) {
            camera2Handle = null;
        }

        if (null != camera2HandleThread) {
            camera2HandleThread.quitSafely();
            camera2HandleThread = null;
        }

        if (null != imageReader) {
            imageReader.close();
            imageReader = null;
        }

        if (null != audioRecorderTool) {
            audioRecorderTool.releaseMe();
            audioRecorderTool = null;
        }

        nativeStop();

        nativeRelease();

    }

    //jni 回调的方法 回调编码好的h264数据
    private void getDataFromJni(byte[] data) {

        l.i("getDataFromJni data length : " + data.length);
        //FileUtil.writeBytesTo16Chars(data, "xh264.h264");

    }

    private native void testX264(String a);

    //初始化
    private native void nativeX264EncodeInit();


    //打开X264,准备编码 //需要宽高
    private native void nativeX264EncodeOpen(int width, int height, int fps, int bitRate);

    //发送preview yuv数据给x264编码
    private native void nativeSendPreviewData(byte[] data, int dataLength);

    private final String url = "rtmp://live-push.bilivideo.com/live-bvc/?streamname=" +
            "live_479017059_73139358&key=99634cb1724d24da7f87777efe169339&schedule=rtmp";

    //开始子线程链接服务器,然后取队列中的数据发送
    private native void nativeRtmpstart(String url);

    //stop
    private native void nativeStop();

    //release
    private native void nativeRelease();

}
package com.zzm.play.x264;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.HandlerThread;

import com.zzm.play.utils.l;

public class AudioRecorderTool {

    //采样率
    private int sampleRate;
    //通道数
    private int channelConfig;
    //最小的多大的数据大小来装载pcm
    private int minBufferSize;
    private AudioRecord audioRecord;
    //录音在子线程
    private HandlerThread handlerThread;
    private Handler handler;

    public AudioRecorderTool(int sampleRate, int channelCount) {

        this.sampleRate = sampleRate;

        channelConfig = channelCount == 2 ? AudioFormat.CHANNEL_IN_STEREO :
                AudioFormat.CHANNEL_IN_MONO;

        //返回-2 硬件则不支持此参数
        minBufferSize = AudioRecord.getMinBufferSize(sampleRate,
                channelConfig,
                AudioFormat.ENCODING_PCM_16BIT);
        l.i("AudioRecord getMinBufferSize minBufferSize  " + minBufferSize);

        handlerThread = new HandlerThread("audio-record");
        handlerThread.start();
        handler = new Handler(handlerThread.getLooper());

        //init faac
        nativeFaacEncodeInit();

        //打开faac
        int faacEncodeInputBufferSize = nativeFaacEncodeOpen(sampleRate, channelCount);
        pcmData = new byte[faacEncodeInputBufferSize];
        l.i("faac  get input BufferSize  " + faacEncodeInputBufferSize);
        //一个是硬件根据最小采样数目来算出minBufferSize,一个是编码器那边根据参数算出的输入的buffer大小
        //为了不出问题所以用最大的buffer size来装载pcm数据
        minBufferSize = Math.max(minBufferSize, faacEncodeInputBufferSize);

    }

    private byte[] pcmData;

    public void startRecording() {

        handler.post(() -> {

            audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
                    sampleRate,
                    channelConfig,
                    AudioFormat.ENCODING_PCM_16BIT,
                    minBufferSize);

            int readDataSize = -1;

            audioRecord.startRecording();

            while (null != audioRecord
                    && audioRecord.getRecordingState() ==
                    AudioRecord.RECORDSTATE_RECORDING) {

                //发送数据给faac去编码
                readDataSize = audioRecord.read(pcmData, 0, pcmData.length);
                l.i("audio recorder record pcm data size : " + readDataSize);

                if (readDataSize > 0) {

                    sendAudioPcmData(pcmData, readDataSize);

                }
            }

        });

    }


    public void releaseMe() {

        if (null != handlerThread) {
            handlerThread.quitSafely();
            handlerThread = null;
        }

        if (null != handler) {
            handler = null;
        }

        if (null != audioRecord) {
            audioRecord.stop();
            audioRecord.release();
            audioRecord = null;
        }

    }

    //初始化
    private native void nativeFaacEncodeInit();

    private native int nativeFaacEncodeOpen(int sampleRate, int channelCount);

    private native void sendAudioPcmData(byte[] pcmData, int dataLength);
}
//
// Created by Admin on 2021/1/27.
//

#include "x264_video_encode.h"
#include <jni.h>
#include "pthread.h"
#include "string"
#include "safe_queue.h"
#include "faac_audio_encode.h"
//引入c头文件
extern "C" {
#include "rtmp.h"
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_testX264(JNIEnv *env, jobject thiz, jstring a) {

    const char *hello = env->GetStringUTFChars(a, 0);

    log("x264 test : %s", hello);
//    x264_param_t x264Param;
//    x264_t *x264 = x264_encoder_open(&x264Param);
//    if (x264) {
//        LOGI("x264_encoder_open success");
//    } else {
//        LOGI("x264_encoder_open failed");
//    }

    env->ReleaseStringUTFChars(a, hello);
}


void *threadRun(void *url);

void releasePacket(RTMPPacket *pPacket);

//全局的x264 encode
X264VideoEncode *x264VideoEncode = nullptr;

FAACAudioEncode *faacAudioEncode = nullptr;

RTMP *rtmp = nullptr;

SafeQueue<RTMPPacket *> packetQueue;

RTMPPacket *videoPacket = nullptr;

//是否链接好服务器了
bool alreadyTryConnect = false;

//是否可以推流了
bool readPushing = false;

//子线程指针标识符
pthread_t pThread;

//开始时间
uint32_t startTime;

//System.loadLibrary()就会调用此方法
CallbackFromJniUtil *callbackFromJniUtil = nullptr;
JavaVM *vm = nullptr;

JNIEXPORT jint JNI_OnLoad(JavaVM *vm_, void *reserved) {
    vm = vm_;
    return JNI_VERSION_1_6;
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeX264EncodeInit(JNIEnv *env, jobject thiz) {

    x264VideoEncode = new X264VideoEncode;

    callbackFromJniUtil = new CallbackFromJniUtil(vm, env);

    callbackFromJniUtil->setClazz(thiz);

    x264VideoEncode->setCallbackFromJniUtil(callbackFromJniUtil);

}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_AudioRecorderTool_nativeFaacEncodeInit(JNIEnv *env, jobject thiz) {

    faacAudioEncode = new FAACAudioEncode;
//
//    char *id = (char *) calloc(1, 100);
//    char *copyRight = (char *) calloc(1, 100);
//    faacAudioEncode->getFaacVersion(id, copyRight);
//    log("FAAC version id : %s  copyRight : %s", id, copyRight);
//    free(id);
//    free(copyRight);
//    id = nullptr;
//    copyRight = nullptr;

}

extern "C"
JNIEXPORT jint JNICALL
Java_com_zzm_play_x264_AudioRecorderTool_nativeFaacEncodeOpen(JNIEnv *env, jobject thiz,
                                                              jint sample_rate,
                                                              jint channel_count) {
    if (!faacAudioEncode)
        return -1;

    faacAudioEncode->openFaacEncode(sample_rate, channel_count);

    return faacAudioEncode->getInputDataSize();
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeX264EncodeOpen(JNIEnv *env, jobject thiz, jint width,
                                                        jint height, jint fps, jint bit_rate) {

    if (x264VideoEncode) {

        int ret = x264VideoEncode->x264initAndOpen(width, height, fps, bit_rate);

        log("x264initAndOpen ret : %d", ret);
        log("x264initAndOpen width height fps bitRate : %d,%d,%d,%d", width, height, fps, bit_rate);
    }

}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeRtmpstart(JNIEnv *env, jobject thiz, jstring url_) {

    //尝试链接过的不在链接
    if (alreadyTryConnect) {
        return;
    }

    const char *url = env->GetStringUTFChars(url_, 0);
    //要在子线程中链接服务器时使用所以的复制
    //要加个结尾符 ‘\0’
    char *url__ = new char[strlen(url) + 1];
    strcpy(url__, url);

    alreadyTryConnect = true;
    //开启子线程
    pthread_create(&pThread, 0, threadRun, url__);
    //threadRun(url__);
    env->ReleaseStringUTFChars(url_, url);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeSendPreviewData(JNIEnv *env, jobject thiz,
                                                         jbyteArray data_,
                                                         jint data_length) {
    log("java层接受到的推流原始数据length : %d", data_length);


    //把数据给到x264编码器编码
    if (!readPushing || !x264VideoEncode) {
        return;
    }

    jbyte *data = env->GetByteArrayElements(data_, 0);


    x264VideoEncode->encodeData(data);

    env->ReleaseByteArrayElements(data_, data, 0);
}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_AudioRecorderTool_sendAudioPcmData(JNIEnv *env, jobject thiz,
                                                          jbyteArray pcm_data, jint data_length) {

    jbyte *pcmData = env->GetByteArrayElements(pcm_data, 0);

    faacAudioEncode->encode((int32_t *) pcmData, data_length);

    env->ReleaseByteArrayElements(pcm_data, pcmData, 0);

}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeStop(JNIEnv *env, jobject thiz) {

    alreadyTryConnect = false;

    readPushing = false;

    startTime = 0;

}

extern "C"
JNIEXPORT void JNICALL
Java_com_zzm_play_x264_Camera2Tool_nativeRelease(JNIEnv *env, jobject thiz) {

    if (rtmp) {
        log("rtmp close release");
        RTMP_Close(rtmp);
        RTMP_Free(rtmp);
        rtmp = nullptr;
    }

    if (x264VideoEncode) {
        delete x264VideoEncode;
        //析构函数里面自己释放x264编码器
        x264VideoEncode = nullptr;
    }

    if (faacAudioEncode) {
        delete faacAudioEncode;
        faacAudioEncode = nullptr;
    }

    if (callbackFromJniUtil) {
        delete callbackFromJniUtil;
        callbackFromJniUtil = nullptr;
    }


    alreadyTryConnect = false;

    readPushing = false;

    startTime = 0;


}


//链接服务器开始循环取编码数据的队列
void *threadRun(void *url_) {

    log("threadRun");

    char *url = (char *) url_;
    int ret = 0;
    do {

        rtmp = RTMP_Alloc();

        if (!rtmp) {
            log("创建rtmp失败");
            break;
        }
        RTMP_Init(rtmp);

        //超时时间
        rtmp->Link.timeout = 10;

        //设置地址
        ret = RTMP_SetupURL(rtmp, url);
        log("rtmp 地址:%s", url);
        if (!ret) {
            log("rtmpurl失败");
            break;
        }

        //开启输出模式并连接
        RTMP_EnableWrite(rtmp);
        ret = RTMP_Connect(rtmp, 0);
        if (!ret) {
            log("rtmp connect 失败");
            break;
        }
        //连接好数据通道
        ret = RTMP_ConnectStream(rtmp, 0);
        if (!ret) {
            log("rtmp connect stream 失败");
            break;
        }

        log("rtmp 链接成功!");

        x264VideoEncode->setRtmp(rtmp);
        faacAudioEncode->setRtmp(rtmp);

        //可以开始获取队列里面的编码数据推流了
        readPushing = true;

        //开始推流的时间
        startTime = RTMP_GetTime();

        //从循环队列开始工作并且在去取编码数据
        packetQueue.setWork(1);

        x264VideoEncode->setQueue(&packetQueue);
        faacAudioEncode->setQueue(&packetQueue);
        faacAudioEncode->queueAccDataHeader();

        while (alreadyTryConnect) {

            packetQueue.pop(videoPacket);
            if (!alreadyTryConnect) {
                break;
            }
            if (!videoPacket) {
                break;
            }

            //推流
            log("从队列中取得数据并且发送 packet body size:%d packet type:%d", videoPacket->m_nBodySize,
                videoPacket->m_packetType);
            ret = RTMP_SendPacket(rtmp, videoPacket, 1);

            log("RTMP_SendPacket(rtmp, videoPacket, 1) ret :% d  ", ret);
            //推完后release            releasePacket(videoPacket);
        }

        releasePacket(videoPacket);

    } while (0);

    if (rtmp) {
        log("rtmp close release");
        RTMP_Close(rtmp);
        RTMP_Free(rtmp);
        rtmp = nullptr;
    }

    return nullptr;

}

void releasePacket(RTMPPacket *pPacket) {

    if (pPacket) {
        RTMPPacket_Free(pPacket);
        delete pPacket;
        pPacket = nullptr;
    }

}
//
// Created by Admin on 2021/1/28.
//
#include "call_back_from_jni_util.h"

CallbackFromJniUtil::CallbackFromJniUtil(JavaVM *javaVm_, JNIEnv *jniEnv_) {
    javaVm = javaVm_;
    jniEnv = jniEnv_;
}

void CallbackFromJniUtil::setClazz(jobject &clazz_) {

    clazz = jniEnv->NewGlobalRef(clazz_);

}

void CallbackFromJniUtil::x264EncodeDataCallback(char *javaMethodName,
                                                 char *methodSig,
                                                 char *data,
                                                 int dataLength) {

    int attachedHere = 0; // know if detaching at the end is necessary
    JNIEnv *env_;
    jint ret = javaVm->GetEnv((void **) &env_, JNI_VERSION_1_6);

    if (JNI_EDETACHED == ret) {
        // Supported but not attached yet, needs to call AttachCurrentThread
        ret = javaVm->AttachCurrentThread(&env_, 0);
        if (JNI_OK == ret) {
            attachedHere = 1;
        } else {
            // Failed to attach, cancel
            return;
        }
    } else if (JNI_OK == ret) {
        // Current thread already attached, do not attach 'again' (just to save the attachedHere flag)
        // We make sure to keep attachedHere = 0
    } else {
        // JNI_EVERSION, specified version is not supported cancel this..
        return;
    }

    // Execute code using NewEnv
    jclass clazz1 = env_->GetObjectClass(clazz);
    jmethodID methodId = env_->GetMethodID(clazz1, javaMethodName, methodSig);
    jbyteArray array = env_->NewByteArray(dataLength);
    env_->SetByteArrayRegion(array, 0, dataLength, (jbyte *) data);
    env_->CallVoidMethod(clazz, methodId, array);

    if (attachedHere) { // Key check
        javaVm->DetachCurrentThread(); // Done only when attachment was done here
    }

}
//
// Created by Admin on 2021/1/29.
//
#include <malloc.h>
#include <jni.h>
#include "faac_audio_encode.h"


FAACAudioEncode::FAACAudioEncode() {

}

void FAACAudioEncode::getFaacVersion(char *id, char *copyRight) {

    int ret = faacEncGetVersion(&id, &copyRight);

    log("faacEncGetVersion ret : %d", ret);

}


void FAACAudioEncode::openFaacEncode(int sampleRate, int channelCount) {

    // 输入原始数据的大小 通道数+采样率+采样位数可以确定其值)
    unsigned long inputSamples;

    // 打开编码器
    audioEncoder = faacEncOpen(sampleRate, channelCount,
                               &inputSamples, &encodedDataMaxBytes);
    log("openFaacEncode inputSamples : %d encodedDataMaxBytes : %d ", inputSamples,
        encodedDataMaxBytes);

    //实例化编码好的数据的容器
    encodedData = (unsigned char *) calloc(1, encodedDataMaxBytes);

    //2通道
    inputDataSize = inputSamples;

    //获取编码参数
    faacEncConfigurationPtr config = faacEncGetCurrentConfiguration(audioEncoder);

    //编码等级
    config->aacObjectType = LOW;
    //mpeg 1MPEG2是大多是应用在DVD上的。
    //     2MPEG4就是MP4大多是应用在手机视频上的。
    config->mpegVersion = MPEG4;

    //Bitstream output format (0 = Raw 裸流 ; 1 = ADTS 带头信息)
    config->outputFormat = 0;

    //16位采样
    config->inputFormat = FAAC_INPUT_16BIT;

    //编码器配置参数
    faacEncSetConfiguration(audioEncoder, config);

}


int FAACAudioEncode::getInputDataSize() {

    return inputDataSize;

}

void FAACAudioEncode::encode(int32_t *pcmData, jint dataLength) {

    log("audio encoder state :%d ", audioEncoder);

    if (!audioEncoder) {
        return;
    }

    int aacDataLength = faacEncEncode(audioEncoder, pcmData, dataLength, encodedData,
                                      encodedDataMaxBytes);

    log("faac encoded data length : %d", aacDataLength);
    if (aacDataLength > 0) {

        if (startTime == 0) {
            startTime = RTMP_GetTime();
        }

        //queue aac packet
        queueAacData(encodedData, aacDataLength);
    }
}

void FAACAudioEncode::queueAacData(unsigned char *data, int dataLength) {

    if (rtmp && packetQueue) {

        log("queueAacData dataLength:%d", dataLength);

        // 多个两个字节0xaf 0x00 头文件数据
        // 多个两个字节0xaf 0x01 音频数据
        int packetBodySize = dataLength + 2;

        RTMPPacket *aacPacket = new RTMPPacket;
        RTMPPacket_Alloc(aacPacket, packetBodySize);
        RTMPPacket_Reset(aacPacket);
        //7个基本信息赋值

        aacPacket->m_packetType = RTMP_PACKET_TYPE_AUDIO;
        aacPacket->m_nBodySize = packetBodySize;
        //id 视频 音频发送packet不能相同
        aacPacket->m_nChannel = 11;
        aacPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
        aacPacket->m_hasAbsTimestamp = 0;
        aacPacket->m_nTimeStamp = RTMP_GetTime() - startTime;
        aacPacket->m_nInfoField2 = rtmp->m_stream_id;

        aacPacket->m_body[0] = 0xaf;
        aacPacket->m_body[1] = 0x01;

        memcpy(&aacPacket->m_body[2], data, dataLength);


        packetQueue->push(aacPacket);

    }

}


void FAACAudioEncode::setQueue(SafeQueue<RTMPPacket *> *packetQueue) {
    this->packetQueue = packetQueue;
}

void FAACAudioEncode::setRtmp(RTMP *rtmp) {
    this->rtmp = rtmp;
}

void FAACAudioEncode::queueAccDataHeader() {

    if (rtmp && packetQueue && audioEncoder) {

        if (startTime == 0) {
            startTime = RTMP_GetTime();
        }

//        u_long aacHeaderDataLength = 2;
//        u_char aacHeaderData[2] = {0x12, 0x08};

        u_long aacHeaderDataLength;
        u_char *aacHeaderData;
        int ret = faacEncGetDecoderSpecificInfo(audioEncoder, &aacHeaderData, &aacHeaderDataLength);
        log("queueAccDataHeader get header data ret : %d  aac header data length: %d", ret,
            aacHeaderDataLength);


        int packetBodySize = aacHeaderDataLength + 2;
        RTMPPacket *aacHeaderPacket = new RTMPPacket;
        RTMPPacket_Alloc(aacHeaderPacket, packetBodySize);
        RTMPPacket_Reset(aacHeaderPacket);

        //7个基本信息赋值

        aacHeaderPacket->m_packetType = RTMP_PACKET_TYPE_AUDIO;
        aacHeaderPacket->m_nBodySize = packetBodySize;
        //id 视频 音频发送packet不能相同
        aacHeaderPacket->m_nChannel = 11;
        aacHeaderPacket->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
        aacHeaderPacket->m_hasAbsTimestamp = 0;
        aacHeaderPacket->m_nTimeStamp = RTMP_GetTime() - startTime;
        aacHeaderPacket->m_nInfoField2 = rtmp->m_stream_id;

        aacHeaderPacket->m_body[0] = 0xaf;
        aacHeaderPacket->m_body[1] = 0x00;

        memcpy(&aacHeaderPacket->m_body[2], aacHeaderData, aacHeaderDataLength);


        packetQueue->push(aacHeaderPacket);

    }

}

FAACAudioEncode::~FAACAudioEncode() {

    if (packetQueue) {
        packetQueue = nullptr;
    }

    if (rtmp) {
        rtmp = nullptr;
    }

    if (audioEncoder) {
        faacEncClose(audioEncoder);
        audioEncoder = nullptr;
    }

}
//
// Created by Admin on 2021/1/27.
//

#include "x264_video_encode.h"

int X264VideoEncode::x264initAndOpen(int w, int h, int fps, int bitRate) {

    //初始化x264 参数 并且 open

    width = w;
    height = h;
    ySize = w * h;
    uvSize = ySize / 4;
    this->fps = fps;
    this->bitRate = bitRate;

    if (x264Encode) {
        x264_encoder_close(x264Encode);
        x264Encode = nullptr;
    }

    //定义编码器的编码参数
    x264_param_t param;
    //preset 越快编码质量比较差,直播合适了 tune差不多的意思
    x264_param_default_preset(&param, x264_preset_names[3], x264_tune_names[4]);
    param.i_width = width;
    param.i_height = height;
    //编码等级
    /**
     *  10     (supports only QCIF format and below with 380160 samples/sec)
        11     (CIF and below. 768000 samples/sec)
        12     (CIF and below. 1536000 samples/sec)
        13     (CIF and below. 3041280 samples/sec)
        20     (CIF and below. 3041280 samples/sec)
        21     (Supports HHR formats. Enables Interlace support. 5068800 samples/sec)
        22     (Supports SD/4CIF formats. Enables Interlace support. 5184000 samples/sec)
        30     (Supports SD/4CIF formats. Enables Interlace support. 10368000 samples/sec)
        31     (Supports 720p HD format. Enables Interlace support. 27648000 samples/sec)
        32     (Supports SXGA format. Enables Interlace support. 55296000 samples/sec)
        40     (Supports 2Kx1K format. Enables Interlace support. 62914560 samples/sec)
        41      (Supports 2Kx1K format. Enables Interlace support. 62914560 samples/sec)
        42     (Supports 2Kx1K format. Frame coding only. 125829120 samples/sec)
        50     (Supports 3672x1536 format. Frame coding only. 150994944 samples/sec)
        51     (Supports 4096x2304 format. Frame coding only. 251658240 samples/sec)
     */
    param.i_level_idc = 32;
    //编码数据格式 yu12 yy uu vv
    param.i_csp = X264_CSP_I420;
    //是否编码B    param.i_bframe = 0;
    //平均码率 折中选择
    param.rc.i_rc_method = X264_RC_ABR;
    //码率 K为单位
    param.rc.i_bitrate = bitRate / 1024;
    //帧率分母
    param.i_fps_num = fps;
    //帧率分子
    param.i_fps_den = 1;
    param.i_timebase_den = param.i_fps_den;
    param.i_timebase_num = param.i_fps_num;
    //不是用时间戳来记录帧间距
    // VFR input.  If 1, use timebase and timestamps for ratecontrol purposes
    param.b_vfr_input = 0;
    //I 帧间隔 多少秒编一个I (每两秒一个I)
    // Force an IDR keyframe at this interval
    param.i_keyint_max = fps * 2;
    //是否所有I帧前面都添加sps pps信息
    // put SPS/PPS before each keyframe
    param.b_repeat_headers = 1;
    //encode multiple frames in parallel
    //是否用多线程编码
    param.i_threads = 1;

    //apply param
    //x264_profile_names编码质量
    /**
     *
     *      66 Baseline
            77 Main  x264_profile_names[1]
            88 Extended
            100    High (FRExt)
            110    High 10 (FRExt)
            122    High 4:2:2 (FRExt)
            144    High 4:4:4 (FRExt)
     *
     */
    x264_param_apply_profile(&param, x264_profile_names[1]);

    int ret = 0;
    //打开编码器
    x264Encode = x264_encoder_open(&param);

    //初始化包装编码的data容量
    inputData = new x264_picture_t;
    ret = x264_picture_alloc(inputData, X264_CSP_I420, width, height);

    if (x264Encode && ret == 0) {
        ret = 1;
    }

    return ret;
}

void X264VideoEncode::encodeData(int8_t *data) {

    //copy y data
    memcpy(inputData->img.plane[0], data, ySize);

    //copy u data

    memcpy(inputData->img.plane[1], data + ySize, uvSize);

    //copy v data
    memcpy(inputData->img.plane[2], data + ySize + uvSize, uvSize);

    //编码了几个nalu (可初略理解为帧) 我们送进来的是一帧一帧的送所以应该永远为1
    int naluNumber;

    //编码出来的帧信息
    x264_picture_t outputDataInfo;

    //编码出来的压缩数据
    x264_nal_t *outputData;

    int ret = x264_encoder_encode(x264Encode, &outputData, &naluNumber, inputData, &outputDataInfo);

    log("264_encoder_encode ret : %d ", ret);
    log("x264_encoder_encode naluNumber : %d  ", naluNumber);

    for (int i = 0; i < naluNumber; ++i) {
        log("x264_encoder_encode  : %d   nalu , nalu type : %d", i, outputData[i].i_type);
        //outputData[i].p_payload 编码出来的数据
        //outputData[i].i_payload 编码出来的数据的长度
        //naluNumber: 编码出来的单元数据个数 第一次应该为4 sps pps sei I        //        callbackFromJniUtil->x264EncodeDataCallback(
        //                "getDataFromJni", "([B)V",
        //                (char *) outputData[i].p_payload,
        //                outputData[i].i_payload);
    }

    //包装videoPacket 加入 packet queue    uint8_t spsData[100];
    uint8_t ppsData[100];
    int spsDataLength, ppsDataLength;

    if (naluNumber > 0) {

        if (startTime == 0) {
            startTime = RTMP_GetTime();
            log("开始推流的相对的时间 :%d", startTime);
        }

        int dataLength;
        uint8_t *data;
        int dataType;

        for (int i = 0; i < naluNumber; i++) {

            dataLength = outputData[i].i_payload;
            data = outputData[i].p_payload;
            dataType = outputData[i].i_type;

            //I帧的时候 前面会输出spspps 先发送ppssps
            if (NAL_SPS == dataType) {

                dataLength -= 4;
                spsDataLength = dataLength;
                memcpy(spsData, data + 4, spsDataLength);
                continue;

            } else if (NAL_PPS == dataType) {

                dataLength -= 4;
                ppsDataLength = dataLength;
                memcpy(ppsData, data + 4, ppsDataLength);
                //然后将ppssps打包放入队列等待发送
                queueSPSAndPPSPacket(spsData, ppsData, spsDataLength, ppsDataLength);
                continue;

            }


            queueFramePacket(data, dataLength);

        }
    }

}


void X264VideoEncode::setCallbackFromJniUtil(CallbackFromJniUtil *callbackFromJniUtil_) {
    callbackFromJniUtil = callbackFromJniUtil_;
}

void X264VideoEncode::setQueue(SafeQueue<RTMPPacket *> *packetQueue) {
    this->packetQueue = packetQueue;
}

void X264VideoEncode::setRtmp(RTMP *rtmp) {
    this->rtmp = rtmp;
}

void X264VideoEncode::queueSPSAndPPSPacket(uint8_t *spsData,
                                           uint8_t *ppsData,
                                           int spsDataLength,
                                           int ppsDataLength) {

    if (packetQueue && rtmp) {

        log("queueSPSAndPPSPacket spsDataLength:%d  ppsDataLength:%d",
            spsDataLength,
            ppsDataLength);

        //16->rtmp协议固定内容加上描述sps pps长度等信息共有16字节
        int packetBodySize = spsDataLength + ppsDataLength + 16;

        RTMPPacket *SPSAndPPSPacket = new RTMPPacket;
        //packet body分配内存
        RTMPPacket_Alloc(SPSAndPPSPacket, packetBodySize);
        RTMPPacket_Reset(SPSAndPPSPacket);

        //7个基本信息赋值
        SPSAndPPSPacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
        SPSAndPPSPacket->m_nBodySize = packetBodySize;
        //id 视频 音频发送packet不能相同
        SPSAndPPSPacket->m_nChannel = 10;
        SPSAndPPSPacket->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
        SPSAndPPSPacket->m_hasAbsTimestamp = 0;
        SPSAndPPSPacket->m_nTimeStamp = 0;
        SPSAndPPSPacket->m_nInfoField2 = rtmp->m_stream_id;

        //赋值body
        int i = 0;
        //固定的5个字节 avc header等表示信息
        SPSAndPPSPacket->m_body[i++] = 0x17;
        SPSAndPPSPacket->m_body[i++] = 0x00;
        SPSAndPPSPacket->m_body[i++] = 0x00;
        SPSAndPPSPacket->m_body[i++] = 0x00;
        SPSAndPPSPacket->m_body[i++] = 0x00;
        //版本号
        SPSAndPPSPacket->m_body[i++] = 0x01;
        //sps 前三个字节 profile等信息(编码等级等等)
        SPSAndPPSPacket->m_body[i++] = spsData[1];
        SPSAndPPSPacket->m_body[i++] = spsData[2];
        SPSAndPPSPacket->m_body[i++] = spsData[3];
        //固定的俩个字节表示几个字节表示NALU长度和sps个数
        SPSAndPPSPacket->m_body[i++] = 0xff;
        SPSAndPPSPacket->m_body[i++] = 0xe1;
        //sps长度 信息 用这两个字节表示 先高八位后第八位 无分隔符
        SPSAndPPSPacket->m_body[i++] = (spsDataLength >> 8) & 0xff;
        SPSAndPPSPacket->m_body[i++] = spsDataLength & 0xff;
        //copy sps 内容
        memcpy(&SPSAndPPSPacket->m_body[i], spsData, spsDataLength);
        i += spsDataLength;
        //pps 数量 1
        SPSAndPPSPacket->m_body[i++] = 0x01;
        //两个字节表示pps长度 先高八位后第八位  无分隔符
        SPSAndPPSPacket->m_body[i++] = (ppsDataLength >> 8) & 0xff;;
        SPSAndPPSPacket->m_body[i++] = ppsDataLength & 0xff;
        //copy pps内容
        memcpy(&SPSAndPPSPacket->m_body[i], ppsData, ppsDataLength);


        packetQueue->push(SPSAndPPSPacket);

    }
}

void X264VideoEncode::queueFramePacket(uint8_t *data, int dataLength) {

    if (packetQueue && rtmp) {

        log("queueFramePacket dataLength:%d", dataLength);

        //除去分隔符
        if (data[2] == 0x00) {
            //0 0 0 1 65
            data += 4;
            dataLength -= 4;
        } else {
            //0 0  1 65
            data += 3;
            dataLength -= 3;
        }


        //9-> 5个固定字节内容 4个字节表示数据长度 从高位到低位
        int packetBodySize = dataLength + 9;

        RTMPPacket *framePacket = new RTMPPacket;
        RTMPPacket_Alloc(framePacket, packetBodySize);
        RTMPPacket_Reset(framePacket);

        //7个基本信息赋值

        framePacket->m_packetType = RTMP_PACKET_TYPE_VIDEO;
        framePacket->m_nBodySize = packetBodySize;
        //id 视频 音频发送packet不能相同 不能太大如0xaa 会崩溃
        framePacket->m_nChannel = 10;
        framePacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
        framePacket->m_hasAbsTimestamp = 0;
        uint32_t time = RTMP_GetTime() - startTime;
        log("queueFramePacket m_nTimeStamp : %d", time);
        framePacket->m_nTimeStamp = time;
        framePacket->m_nInfoField2 = rtmp->m_stream_id;

        //packet body赋值

        //5个固定字节内容
        framePacket->m_body[0] = 0x27;
        if (data[0] == 0x65) {
            //I             framePacket->m_body[0] = 0x17;
        }
        framePacket->m_body[1] = 0x01;
        framePacket->m_body[2] = 0x00;
        framePacket->m_body[3] = 0x00;
        framePacket->m_body[4] = 0x00;
        //4个字节表示data的长度 无隔符
        framePacket->m_body[5] = (dataLength >> 24) & 0xff;
        framePacket->m_body[6] = (dataLength >> 16) & 0xff;
        framePacket->m_body[7] = (dataLength >> 8) & 0xff;
        framePacket->m_body[8] = dataLength & 0xff;
        // frame 数据copy
        memcpy(&framePacket->m_body[9], data, dataLength);

        packetQueue->push(framePacket);
    }

}

X264VideoEncode::~X264VideoEncode() {

    if (x264Encode) {
        log("~X264VideoEncode");
        x264_encoder_close(x264Encode);
        x264Encode = nullptr;
    }
    if (rtmp) {
        rtmp = nullptr;
    }
    if (packetQueue) {
        packetQueue->setWork(0);
        packetQueue->clear();
        packetQueue = nullptr;
    }
    if (callbackFromJniUtil) {
        callbackFromJniUtil = nullptr;
    }

}
#ifndef PLAY_ANDROID_LOG_H
#define PLAY_ANDROID_LOG_H

#include <android/log.h>

#define log(...) __android_log_print(ANDROID_LOG_INFO,"22m jni",__VA_ARGS__)
#endif //PLAY_ANDROID_LOG_H
#关闭ssl
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DNO_CRYPTO")
#定义一个包含rtmp源码c文件的变量
file(GLOB rtmp_source *.c)
add_library(
        rtmp
        SHARED
        ${rtmp_source}
)
cmake_minimum_required(VERSION 3.4.1)

########jepg压缩
#include_directories( ${CMAKE_SOURCE_DIR}/head/jpeg)
#
##引入turbojpeg#set(LIB_DIR ../../../../libs)
#add_library( jpeg
#        SHARED
#        IMPORTED )
#set_target_properties( jpeg
#        PROPERTIES IMPORTED_LOCATION
#        ${LIB_DIR}/${ANDROID_ABI}/libjpeg.so )
#
##bitmap操作库
#find_library( lib-jnigraphics
#        jnigraphics)
#
#find_library(lib-log log)
##message("zemingzeng")
#
#add_library(play SHARED  ${CMAKE_SOURCE_DIR}/cpp/jpeg/Play.cpp)
#
#target_link_libraries(
#        play
#        jpeg
#        ${lib-jnigraphics}
#        ${lib-log}
#        )


##################硬编码rtmp
#根据其他目录的cmakelists编译其他目录下的源文件
#include_directories(${CMAKE_SOURCE_DIR}/head/rtmp)
#add_subdirectory(./cpp/rtmp)
#find_library(lib-log log)
#add_library(rtmp_live SHARED  ${CMAKE_SOURCE_DIR}/cpp/rtmp/RtmpLive.cpp)
#target_link_libraries(
#        rtmp_live
#        ${lib-log}
#        rtmp
#)


###############################软编码x264
include_directories(${CMAKE_SOURCE_DIR}/head/rtmp)
include_directories(${CMAKE_SOURCE_DIR}/head/x264)
include_directories(${CMAKE_SOURCE_DIR}/head/x264/faac)

add_subdirectory(./cpp/rtmp)

find_library(lib-log log)

add_library(x264
        SHARED
        IMPORTED)

set_target_properties(x264
        PROPERTIES IMPORTED_LOCATION
        ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI}/libx264.so)

message(WARNING "zemingzeng")

add_library(faac
        SHARED
        IMPORTED)

set_target_properties(faac
        PROPERTIES IMPORTED_LOCATION
        ${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI}/libfaac.so)

file(GLOB source_files ${CMAKE_SOURCE_DIR}/cpp/x264/*.cpp)

message(WARNING ${source_files})

add_library(x264_camera2 SHARED ${source_files})

target_link_libraries(
        x264_camera2
        x264
        faac
        rtmp
        ${lib-log}
)
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!