Skip to content

Latest commit

 

History

History
1837 lines (1556 loc) · 47.9 KB

音视频框架.md

File metadata and controls

1837 lines (1556 loc) · 47.9 KB

音视频框架

Windows编译

#编译动态库添加宏定义LIBYUV_BUILDING_SHARED_LIBRARY和修改生成类型
# win32
cmake -S . -B ../build/win32 -G "Visual Studio 16 2019" -A win32
cmake --build ../build/win32 --clean-first --config release --target ALL_BUILD
# x64
cmake -S . -B ../build/x64 -G "Visual Studio 16 2019" -A x64
cmake --build ../build/x64 --clean-first --config release --target ALL_BUILD
libyuv使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-06-08 10:34:48 
 * @Last Modified by: gongluck
 * @Last Modified time: 2022-06-28 11:32:10
 */

#include <iostream>
#include <fstream>
#include "libyuv.h"

int main(int argc, char *argv[])
{
	std::cout << "libyuv demo" << std::endl;
	std::cout << "Usage : "
			  << "thisfilename yuvfile width height outfile" << std::endl;
	if (argc < 5)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream yuvin(argv[1], std::ios::binary);
	if (yuvin.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	const int width = atoi(argv[2]);
	const int height = atoi(argv[3]);
	std::ofstream outfile(argv[4], std::ios::binary);

	auto framesize = width * height * 3 / 2;
	uint8_t *indata = static_cast<uint8_t *>(malloc(framesize));
	uint8_t *outdata = static_cast<uint8_t *>(malloc(width * height * 4));
	while (yuvin.read(reinterpret_cast<char*>(indata), framesize))
	{
		libyuv::I420ToABGR(indata, width, 
			indata + width * height, width / 2, 
			indata + width * height * 5 / 4, width / 2, 
			outdata, width*4, 
			width, height);
		outfile.write(reinterpret_cast<char*>(outdata), width * height * 4);
	}
	free(indata);
	free(outdata);
	outfile.close();
	yuvin.close();

	return 0;
}

Windows编译

# 打开类Linux环境
# 切换到源码目录
#调试源码加上--enable-debug
#32位
CC=cl ./configure --prefix=../build/win32 --enable-shared --enable-static --host=mingw32
#64位
CC=cl ./configure --prefix=../build/x64 --enable-shared --enable-static --host=mingw64
make -j 16
make install
x264使用例子
/*
 * @Author: gongluck
 * @Date: 2020-09-02 23:40:40
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-06-04 16:56:36
 */

#include <iostream>
#include <fstream>
#include "x264.h"

int main(int argc, char *argv[])
{
	std::cout << "x264 demo" << std::endl;
	std::cout << "Usage : "
			  << "thisfilename yuvfile width height outfile" << std::endl;
	if (argc < 5)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream yuvin(argv[1], std::ios::binary);
	if (yuvin.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	const int width = atoi(argv[2]);
	const int height = atoi(argv[3]);
	std::ofstream outfile(argv[4], std::ios::binary);

	x264_param_t param = {0};
	x264_param_default(&param);
	// 设置preset和tune
	auto ret = x264_param_default_preset(&param, "ultrafast", "zerolatency");
	// 设置profile
	ret = x264_param_apply_profile(&param, "baseline");
	param.i_threads = 5;
	param.i_width = width;
	param.i_height = height;
	param.i_bframe = 3;
	param.i_fps_num = 23;
	param.i_fps_den = 1;
	//b_vfr_input = 0,这时根据fps而不是timebase,timestamps来计算帧间距离。
	//b_vfr_input = 1是X264缺省设置,在zerolatency有 = 0的配置。
	param.b_vfr_input = 0;
	param.i_keyint_max = 250;
	param.rc.i_rc_method = X264_RC_ABR;
	param.rc.i_bitrate = 1500;
	param.rc.i_vbv_max_bitrate = 2500;
	param.i_scenecut_threshold = 40;
	param.i_level_idc = 51;
	param.b_repeat_headers = 1;
	x264_t *handle = x264_encoder_open(&param);

	x264_picture_t pic_in = {0};
	x264_picture_init(&pic_in);
	x264_picture_alloc(&pic_in, X264_CSP_I420, width, height);
	pic_in.img.i_csp = X264_CSP_I420;
	pic_in.img.i_plane = 3;

	x264_picture_t pic_out = {0};
	x264_picture_init(&pic_out);

	x264_nal_t *nal = nullptr;
	int i_nal = 0;

	auto framesize = width * height * 3 / 2;
	uint8_t *data = static_cast<uint8_t *>(malloc(framesize));
	while (yuvin.read(reinterpret_cast<char *>(data), framesize))
	{
		memcpy(pic_in.img.plane[0], data, width * height);
		memcpy(pic_in.img.plane[1], data + width * height, width * height / 4);
		memcpy(pic_in.img.plane[2], data + width * height * 5 / 4, width * height / 4);

		pic_in.i_type = X264_TYPE_AUTO;

		std::cout << "++++++++++ipts : " << pic_in.i_pts++ << std::endl;
		ret = x264_encoder_encode(handle, &nal, &i_nal, &pic_in, &pic_out);
		if (ret > 0)
		{
			std::cout << "----------opts : " << pic_out.i_pts << std::endl;
			outfile.write(reinterpret_cast<char *>(nal[0].p_payload), ret);
		}
	}

	//flush
	while (x264_encoder_encode(handle, &nal, &i_nal, nullptr, &pic_out) > 0)
	{
		std::cout << "----------opts : " << pic_out.i_pts << std::endl;
		outfile.write(reinterpret_cast<char *>(nal[0].p_payload), ret);
	}

	if (handle != nullptr)
	{
		x264_encoder_close(handle);
		handle = nullptr;
	}
	x264_picture_clean(&pic_in);
	free(data);

	outfile.close();
	yuvin.close();

	return 0;
}

Windows编译

# 打开类Linux环境
# 切换到源码目录
#调试源码加上DEBUGSYMBOLS=True
#32位
make OS=msvc ARCH=i386 clean
make OS=msvc ARCH=i386
#64位
make OS=msvc ARCH=x86_64 clean
make OS=msvc ARCH=x86_64
openh264使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-05-26 11:51:14 
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-05-26 21:18:54
 */

#include <iostream>
#include <fstream>
#include "codec_api.h"

int main(int argc, char *argv[])
{
	std::cout << "openh264 demo" << std::endl;
	std::cout << "Usage : "
			  << "thisfilename yuvfile width height" << std::endl;
	if (argc < 4)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream in(argv[1], std::ios::binary);
	if (in.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	int width = atoi(argv[2]);
	int height = atoi(argv[3]);
	std::ofstream out264("./out.h264", std::ios::binary);

	ISVCEncoder *encoder = nullptr;
	int ret = WelsCreateSVCEncoder(&encoder);
	SEncParamExt eparam;
	encoder->GetDefaultParams(&eparam);
	eparam.iUsageType = CAMERA_VIDEO_REAL_TIME;
	eparam.fMaxFrameRate = 150;
	eparam.iPicWidth = width;
	eparam.iPicHeight = height;
	eparam.iTargetBitrate = 5000;
	eparam.iRCMode = RC_BITRATE_MODE;
	eparam.iTemporalLayerNum = 1;
	eparam.iSpatialLayerNum = 1;
	eparam.bEnableDenoise = false;
	eparam.bEnableBackgroundDetection = true;
	eparam.bEnableAdaptiveQuant = false;
	eparam.bEnableFrameSkip = false;
	eparam.bEnableLongTermReference = false;
	eparam.uiIntraPeriod = 15u;
	eparam.eSpsPpsIdStrategy = CONSTANT_ID;
	eparam.bPrefixNalAddingCtrl = false;
	eparam.sSpatialLayers[0].iVideoWidth = width;
	eparam.sSpatialLayers[0].iVideoHeight = height;
	eparam.sSpatialLayers[0].fFrameRate = 64;
	eparam.sSpatialLayers[0].iSpatialBitrate = 5000;
	eparam.sSpatialLayers[0].iMaxSpatialBitrate = eparam.iMaxBitrate;
	encoder->InitializeExt(&eparam);

	SFrameBSInfo einfo = {0};
	SSourcePicture pic = {0};
	pic.iPicWidth = eparam.iPicWidth;
	pic.iPicHeight = eparam.iPicHeight;
	pic.iColorFormat = videoFormatI420;
	pic.iStride[0] = pic.iPicWidth;
	pic.iStride[1] = pic.iStride[2] = pic.iPicWidth / 2;

	auto framesize = width * height * 3 / 2;
	uint8_t *data = static_cast<uint8_t *>(malloc(framesize));
	while (in.read(reinterpret_cast<char *>(data), framesize))
	{
		pic.pData[0] = data;
		pic.pData[1] = data + width * height;
		pic.pData[2] = data + width * height * 5 / 4;
		static int index = 0;
		pic.uiTimeStamp = index++ * 41.667;
		std::cout << "++++++++++ipts : " << pic.uiTimeStamp << std::endl;
		ret = encoder->EncodeFrame(&pic, &einfo);
		if (ret >= 0)
		{
			std::cout << "----------opts : " << einfo.uiTimeStamp << std::endl;
			out264.write(reinterpret_cast<char *>(einfo.sLayerInfo[0].pBsBuf), einfo.iFrameSizeInBytes);
		}
	}
	free(data);

	WelsDestroySVCEncoder(encoder);
	encoder = nullptr;

	out264.close();
	in.close();

	///////////////////////////////////////////////////////////////

	in.open("./out.h264", std::ios::binary);
	in.seekg(0, std::ios_base::end);
	const int datalen = in.tellg();
	in.seekg(0, std::ios_base::beg);
	std::ofstream outyuv("./out.yuv", std::ios::binary);

	ISVCDecoder *decoder = nullptr;
	ret = WelsCreateDecoder(&decoder);
	SDecodingParam dparam = {0};
	dparam.sVideoProperty.size = sizeof(dparam.sVideoProperty);
	dparam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
	dparam.eEcActiveIdc = ERROR_CON_SLICE_COPY;
	dparam.uiTargetDqLayer = UCHAR_MAX;
	dparam.bParseOnly = false;
	ret = decoder->Initialize(&dparam);

	uint8_t *dst[3] = {0};
	data = static_cast<uint8_t *>(malloc(datalen));
	SBufferInfo dinfo = {0};
	in.read(reinterpret_cast<char *>(data), datalen);
	int curpos = 0;
	int slicesize = 0;
	while (curpos < datalen)
	{
		int i = 0;
		for (i = 0; i + curpos < datalen; i++)
		{
			if ((data[curpos + i] == 0 && data[curpos + i + 1] == 0 && data[curpos + i + 2] == 0 && data[curpos + i + 3] == 1 && i > 0) ||
				(data[curpos + i] == 0 && data[curpos + i + 1] == 0 && data[curpos + i + 2] == 1 && i > 0))
			{
				break;
			}
		}
		slicesize = i;

		ret = decoder->DecodeFrame2(data + curpos, slicesize, dst, &dinfo);
		if (ret >= 0 && dinfo.iBufferStatus == 1)
		{
			outyuv.write(reinterpret_cast<char *>(dst[0]), dinfo.UsrData.sSystemBuffer.iStride[0] * dinfo.UsrData.sSystemBuffer.iHeight);
			outyuv.write(reinterpret_cast<char *>(dst[1]), dinfo.UsrData.sSystemBuffer.iStride[1] * dinfo.UsrData.sSystemBuffer.iHeight / 2);
			outyuv.write(reinterpret_cast<char *>(dst[2]), dinfo.UsrData.sSystemBuffer.iStride[1] * dinfo.UsrData.sSystemBuffer.iHeight / 2);
			outyuv.flush();
		}
		curpos += slicesize;
	}

	//flush
	auto left = decoder->GetOption(DECODER_OPTION_NUM_OF_FRAMES_REMAINING_IN_BUFFER, nullptr);
	while (left-- > 0)
	{
		if (decoder->FlushFrame(dst, &dinfo) >= 0 && dinfo.iBufferStatus == 1)
		{
			outyuv.write(reinterpret_cast<char *>(dst[0]), dinfo.UsrData.sSystemBuffer.iStride[0] * dinfo.UsrData.sSystemBuffer.iHeight);
			outyuv.write(reinterpret_cast<char *>(dst[1]), dinfo.UsrData.sSystemBuffer.iStride[1] * dinfo.UsrData.sSystemBuffer.iHeight / 2);
			outyuv.write(reinterpret_cast<char *>(dst[2]), dinfo.UsrData.sSystemBuffer.iStride[1] * dinfo.UsrData.sSystemBuffer.iHeight / 2);
			outyuv.flush();
		}
	}

	WelsDestroyDecoder(decoder);
	decoder = nullptr;

	outyuv.close();
	in.close();

	return 0;
}

Windows编译

# 打开源码目录下的project/msvc/faac.sln编译
faac使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-05-27 20:53:35 
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-05-27 21:38:05
 */

#include <iostream>
#include <fstream>
#include "faac.h"

int main(int argc, char *argv[])
{
	std::cout << "faac demo" << std::endl;
	std::cout << "Usage : "
			  << "thisfilename pcmfile samplerate bytepersample channels outfile" << std::endl;
	if (argc < 6)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream pcmin(argv[1], std::ios::binary);
	if (pcmin.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	const unsigned long isamplerate = atol(argv[2]);
	const int ibytepersample = atoi(argv[3]);
	const unsigned int ichannels = atoi(argv[4]);
	std::ofstream outfile(argv[5], std::ios::binary);

	unsigned long inputsamples = 0;
	unsigned long maxoutputbytes = 0;
	faacEncHandle hencoder = faacEncOpen(isamplerate, ichannels, &inputsamples, &maxoutputbytes);

	faacEncConfiguration *config = faacEncGetCurrentConfiguration(hencoder);
	switch (ibytepersample)
	{
	case 16:
		config->inputFormat = FAAC_INPUT_16BIT;
		break;
	case 24:
		config->inputFormat = FAAC_INPUT_24BIT;
		break;
	case 32:
		config->inputFormat = FAAC_INPUT_32BIT;
		break;
	default:
		config->inputFormat = FAAC_INPUT_NULL;
		break;
	}
	config->outputFormat = ADTS_STREAM;
	config->bitRate = 1000;
	// 重置编码器的配置信息
	auto ret = faacEncSetConfiguration(hencoder, config);

	auto framesize = inputsamples * ibytepersample / 8;
	uint8_t *data = static_cast<uint8_t *>(malloc(framesize));
	unsigned char *outdata = static_cast<unsigned char *>(malloc(maxoutputbytes));
	while (pcmin.read(reinterpret_cast<char *>(data), framesize))
	{
		ret = faacEncEncode(hencoder, reinterpret_cast<int32_t *>(data), inputsamples, outdata, maxoutputbytes);
		if (ret > 0)
		{
			outfile.write(reinterpret_cast<char *>(outdata), ret);
		}
	}
	while (true)
	{
		ret = faacEncEncode(hencoder, nullptr, 0, outdata, maxoutputbytes);
		if (ret > 0)
		{
			outfile.write(reinterpret_cast<char *>(outdata), ret);
		}
		else
		{
			break;
		}
	}

	ret = faacEncClose(hencoder);
	hencoder = nullptr;

	free(data);
	data = nullptr;
	free(outdata);
	outdata = nullptr;

	outfile.flush();
	outfile.close();
	pcmin.close();

	return 0;
}

Windows编译

# 打开源码目录下的project/msvc/faad2.sln编译
faad2使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-05-28 17:06:35 
 * @Last Modified by:   gongluck 
 * @Last Modified time: 2021-05-28 17:06:35 
 */

#include <iostream>
#include <fstream>
#include "faad.h"
#undef ADTS
#include "aac.h"

int main(int argc, char *argv[])
{
	std::cout << "faac demo" << std::endl;
	std::cout << "Usage : "
			  << "thisfilename aacfile outfile" << std::endl;
	if (argc < 3)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream aacin(argv[1], std::ios::binary);
	if (aacin.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	std::ofstream outfile(argv[2], std::ios::binary);

	faacDecHandle hdecoder = faacDecOpen();
	// Get the current config
	faacDecConfiguration *config = faacDecGetCurrentConfiguration(hdecoder);
	// Set the new configuration
	auto ret = faacDecSetConfiguration(hdecoder, config);

	ADTS adts = { 0 };
	unsigned long samplerate = 0;
	unsigned char channels = 0;
	aacin.read(reinterpret_cast<char*>(&adts), sizeof(adts));
	uint16_t framelength = get_aac_frame_length(adts);
	aacin.seekg(-sizeof(adts), std::ios::cur);
	unsigned char* indata = static_cast<unsigned char*>(malloc(framelength));
	aacin.read(reinterpret_cast<char*>(indata), framelength);
	ret = faacDecInit(hdecoder, indata, framelength, &samplerate, &channels);
	std::cout << "samplerate : " << samplerate << " channels : " << static_cast<int>(channels) << " format : " << static_cast<int>(config->outputFormat) << std::endl;
	free(indata);
	indata = nullptr;

	faacDecFrameInfo info = { 0 };
	while (aacin.read(reinterpret_cast<char*>(&adts), sizeof(adts)))
	{
		framelength = get_aac_frame_length(adts);
		aacin.seekg(-sizeof(adts), std::ios::cur);
		
		unsigned char* indata = static_cast<unsigned char*>(malloc(framelength));
		aacin.read(reinterpret_cast<char*>(indata), framelength);
		auto outdata = faacDecDecode(hdecoder, &info, indata, framelength);
		if (info.error == 0)
		{
			auto outsize = info.samples * info.channels;
			outfile.write(reinterpret_cast<char*>(outdata), outsize);
		}
		free(indata);
		indata = nullptr;
	}

	faacDecClose(hdecoder);
	hdecoder = nullptr;

	free(indata);
	indata = nullptr;

	outfile.flush();
	outfile.close();
	aacin.close();

	return 0;
}

Windows编译

# win32
cmake -S . -B ../build/win32 -G "Visual Studio 16 2019" -A win32 -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded$<$<CONFIG:Debug>:Debug> -DBUILD_PROGRAMS=ON -DBUILD_SHARED_LIBS=OFF
cmake --build ../build/win32 --clean-first --config release --target ALL_BUILD
# x64
cmake -S . -B ../build/x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded$<$<CONFIG:Debug>:Debug> -DBUILD_PROGRAMS=ON -DBUILD_SHARED_LIBS=OFF
cmake --build ../build/x64 --clean-first --config release --target ALL_BUILD
fdk-aac使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-05-29 19:58:23 
 * @Last Modified by:   gongluck 
 * @Last Modified time: 2021-05-29 19:58:23 
 */

#include <iostream>
#include <fstream>
#include "aac.h"
#include "aacenc_lib.h"
#include "aacdecoder_lib.h"

int main(int argc, char *argv[])
{
	std::cout << "fdk-aac demo" << std::endl;
	std::cout << "Usage : "
		<< "thisfilename pcmfile samplerate bytepersample channels outfile" << std::endl;
	if (argc < 6)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream pcmin(argv[1], std::ios::binary);
	if (pcmin.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	const unsigned long isamplerate = atol(argv[2]);
	const int ibytepersample = atoi(argv[3]);
	const unsigned int ichannels = atoi(argv[4]);
	std::ofstream outfile(argv[5], std::ios::binary);

	AACENCODER* hencoder = nullptr;
	AACENC_ERROR ret = aacEncOpen(&hencoder, 0, static_cast<CHANNEL_MODE>(ichannels));
	//设置编码规格
	ret = aacEncoder_SetParam(hencoder, AACENC_AOT, AOT_AAC_LC);
	//设置采样率
	ret = aacEncoder_SetParam(hencoder, AACENC_SAMPLERATE, isamplerate);
	//设置声道数
	ret = aacEncoder_SetParam(hencoder, AACENC_CHANNELMODE, static_cast<CHANNEL_MODE>(ichannels));
	//设置码率
	ret = aacEncoder_SetParam(hencoder, AACENC_BITRATE, 1000);
	//设置封装格式为ADTS
	ret = aacEncoder_SetParam(hencoder, AACENC_TRANSMUX, TT_MP4_ADTS);
	
	ret = aacEncEncode(hencoder, nullptr, nullptr, nullptr, nullptr);
	//编码的参数信息
	AACENC_InfoStruct einfo = { 0 };
	ret = aacEncInfo(hencoder, &einfo);

	AACENC_BufDesc inbuf = { 0 }, outbuf = { 0 };
	AACENC_InArgs inargs = { 0 };
	AACENC_OutArgs outargs = { 0 };
	//必须根据einfo.frameLength计算出每次输入的数据大小
	int framesize = einfo.frameLength * ichannels * ibytepersample;
	void* indata = malloc(framesize);
	void* outdata = malloc(einfo.maxOutBufBytes);
	int inidentifier = IN_AUDIO_DATA;
	int isize = ibytepersample;
	int outidentifier = OUT_BITSTREAM_DATA;
	int osize = 1;
	while (pcmin.read(reinterpret_cast<char*>(indata), framesize))
	{
		inargs.numInSamples = einfo.frameLength * ichannels;
		inbuf.numBufs = 1;
		inbuf.bufs = &indata;
		inbuf.bufferIdentifiers = &inidentifier;
		inbuf.bufSizes = &framesize;
		inbuf.bufElSizes = &isize;

		outbuf.numBufs = 1;
		outbuf.bufs = &outdata;
		outbuf.bufferIdentifiers = &outidentifier;
		outbuf.bufSizes = reinterpret_cast<int*>(&einfo.maxOutBufBytes);
		outbuf.bufElSizes = &osize;

		ret = aacEncEncode(hencoder, &inbuf, &outbuf, &inargs, &outargs);
		if(ret == AACENC_OK && outargs.numOutBytes > 0)
		{
			outfile.write(reinterpret_cast<char*>(outdata), outargs.numOutBytes);
		}
	}
	ret = aacEncClose(&hencoder);
	free(outdata);
	free(indata);
	outfile.close();
	pcmin.close();

	///////////////////////////////////////////////////////////////

	std::ifstream aacin(argv[5], std::ios::binary);
	std::ofstream outpcm("./out.pcm", std::ios::binary);

	AAC_DECODER_ERROR dret = AAC_DEC_OK;
	HANDLE_AACDECODER hdecoder = aacDecoder_Open(TT_MP4_ADTS, 1);
	CStreamInfo* dinfo = nullptr;
	const int outbufsize = 4096;
	outdata = malloc(outbufsize);
	UINT valid = 0;
	ADTS adts = { 0 };
	while (aacin.read(reinterpret_cast<char*>(&adts), sizeof(adts)))
	{
		UINT framelength = get_aac_frame_length(adts);
		aacin.seekg(-sizeof(adts), std::ios::cur);

		UCHAR* indata = static_cast<UCHAR*>(malloc(framelength));
		aacin.read(reinterpret_cast<char*>(indata), framelength);
		valid = framelength;
		// 输入完整的adts
		dret = aacDecoder_Fill(hdecoder, reinterpret_cast<UCHAR**>(&indata), &framelength, &valid);
		dret = aacDecoder_DecodeFrame(hdecoder, reinterpret_cast<INT_PCM*>(outdata), outbufsize / sizeof(INT_PCM), 0);
		dinfo = aacDecoder_GetStreamInfo(hdecoder);
		if (dret == AAC_DEC_OK)
		{
			outpcm.write(reinterpret_cast<char*>(outdata), dinfo->numChannels * dinfo->frameSize * sizeof(INT_PCM));
			std::cout << "samplerate : " << dinfo->sampleRate << " channels : " << dinfo->numChannels << std::endl;
		}
		free(indata);
		indata = nullptr;
	}
	aacDecoder_Close(hdecoder);
	hdecoder = nullptr;
	free(outdata);
	outfile.close();
	aacin.close();

	return 0;
}

RTMPdump源码结构

# 使用https://github.com/gongluck/3rdparty/tree/main/rtmpdump/cmake的cmake脚本编译调试

# rtmpdump.c修改
# #ifdef _MSC_VER	/* MSVC */
# #if snprintf
# #define snprintf _snprintf
# #endif
# #define strcasecmp stricmp
# #define strncasecmp strnicmp
# #if vsnprintf
# #define vsnprintf _vsnprintf
# #endif
# #endif

# librtmp/rtmp_sys.h修改
# #ifdef _MSC_VER	/* MSVC */
# #if snprintf
# #define snprintf _snprintf
# #endif
# #define strcasecmp stricmp
# #define strncasecmp strnicmp
# #if vsnprintf
# #define vsnprintf _vsnprintf
# #endif
# #endif

# win32
cmake -S . -B ../build/win32 -G "Visual Studio 16 2019" -A win32
cmake --build ../build/win32 --clean-first --config release --target ALL_BUILD
# x64
cmake -S . -B ../build/x64 -G "Visual Studio 16 2019" -A x64
cmake --build ../build/x64 --clean-first --config release --target ALL_BUILD
librtmp拉流1
/*
 * @Author: gongluck
 * @Date: 2020-10-03 15:36:42
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-07-18 15:24:49
 */

#include <iostream>
#include <fstream>
#include <thread>

#ifdef _WIN32
#include <Windows.h>
#endif

extern "C"
{
#include "rtmp.h"
}

int main(int argc, char *argv[])
{
	std::cout << "librtmp pull example" << std::endl;
	std::cout << "Usage : "
			  << "thisfile rtmpurl flvfile." << std::endl;
	if (argc < 3)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ofstream out(argv[2], std::ios::binary | std::ios::trunc);
	if (out.fail())
	{
		std::cerr << "can not open file " << argv[2] << std::endl;
		return -1;
	}
	std::cout << "pulling : " << argv[1] << std::endl;

#ifdef _WIN32
	WORD version;
	WSADATA wsaData;
	version = MAKEWORD(1, 1);
	WSAStartup(version, &wsaData);
#endif

	RTMP *rtmp = RTMP_Alloc();
	RTMP_Init(rtmp);
	auto rtmpres = RTMP_SetupURL(rtmp, argv[1]);
	rtmpres = RTMP_Connect(rtmp, nullptr);
	rtmpres = RTMP_ConnectStream(rtmp, 0);

	bool stop = false;
	std::thread th([&]
				   {
					   auto data = new char[1024];
					   auto rent = 0;
					   while (!stop)
					   {
						   rent = RTMP_Read(rtmp, data, 1024);
						   if (rent <= 0)
						   {
							   break;
						   }
						   out.write(data, rent);
					   }
					   delete[] data;
				   });

	std::cout << "input char to stop" << std::endl;
	std::cin.get();

	stop = true;
	if (th.joinable())
	{
		th.join();
	}

	RTMP_Close(rtmp);
	RTMP_Free(rtmp);
	out.close();

#ifdef _WIN32
	WSACleanup();
#endif

	return 0;
}
librtmp拉流2
/*
 * @Author: gongluck
 * @Date: 2020-10-03 15:36:42
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-07-18 15:26:31
 */

#include <iostream>
#include <fstream>
#include <thread>

#ifdef _WIN32
#include <Windows.h>
#endif

#include "../../../analysis/flv/flv.h"
#include "../../../analysis/aac/aac.h"

#include "rtmp.h"

int main(int argc, char *argv[])
{
	std::cout << "librtmp example" << std::endl;
	std::cout << "Usage : "
			  << "thisfile rtmpurl h264file aacfile." << std::endl;
	if (argc < 4)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ofstream h264(argv[2], std::ios::binary | std::ios::trunc);
	if (h264.fail())
	{
		std::cerr << "can not open file " << argv[2] << std::endl;
		return -1;
	}
	std::ofstream aac(argv[3], std::ios::binary | std::ios::trunc);
	if (aac.fail())
	{
		std::cerr << "can not open file " << argv[3] << std::endl;
		return -1;
	}
	std::cout << "pulling : " << argv[1] << std::endl;

#ifdef _WIN32
	WORD version;
	WSADATA wsaData;
	version = MAKEWORD(1, 1);
	WSAStartup(version, &wsaData);
#endif

	RTMP *rtmp = RTMP_Alloc();
	RTMP_Init(rtmp);
	auto rtmpres = RTMP_SetupURL(rtmp, argv[1]);
	rtmpres = RTMP_Connect(rtmp, nullptr);
	rtmpres = RTMP_ConnectStream(rtmp, 0);

	RTMPPacket packet = {0};

	char nalu[] = {0x00, 0x00, 0x00, 0x01};
	FLVVIDEOTAG *video = nullptr;

	ADTS adts = {0};
	set_syncword(adts, 0xFFF);
	adts.protection_absent = 1;
	adts.ID = ADTS_ID_MPEG4;
	set_adts_buffer_fullness(adts, 0x7FF);
	FLVAUDIOTAG *audio = nullptr;

	bool stop = false;
	std::thread th([&]
				   {
					   auto rent = 0;
					   while (!stop)
					   {
						   rent = RTMP_ReadPacket(rtmp, &packet);
						   if (rent <= 0)
						   {
							   break;
						   }

						   if (packet.m_body != nullptr)
						   {
							   switch (packet.m_packetType)
							   {
							   case FLV_TAG_TYPE_AUDIO:
								   audio = reinterpret_cast<FLVAUDIOTAG *>(packet.m_body);
								   if (audio->soundFormat == FLV_SOUND_FORMAT_AAC)
								   {
									   switch (audio->audiopacket.aacaudiopacket.aacpackettype)
									   {
									   case AAC_PACKET_TYPE_HEAD:
									   {
										   auto config = reinterpret_cast<AudioSpecificConfig *>(audio->audiopacket.aacaudiopacket.data);
										   set_channel_configuration(adts, config->ChannelConfiguration);
										   adts.sampling_frequency_index = FVLSAMPLEFREQUENCYINDEX((*config));
										   adts.profile = config->AudioObjectType - 1;
									   }
									   break;
									   case AAC_PACKET_TYPE_RAW:
									   {
										   auto datasize = packet.m_nBodySize - offsetof(FLVAUDIOTAG, audiopacket.aacaudiopacket.data);
										   set_aac_frame_length(adts, datasize + sizeof(adts));
										   aac.write(reinterpret_cast<char *>(&adts), sizeof(adts));
										   aac.write(reinterpret_cast<char *>(audio->audiopacket.aacaudiopacket.data), datasize);
									   }
									   break;
									   default:
										   break;
									   }
								   }
								   break;
							   case FLV_TAG_TYPE_VIDEO:
								   video = reinterpret_cast<FLVVIDEOTAG *>(packet.m_body);
								   if (video->codecid == FLV_VIDEO_CODECID_AVC)
								   {
									   switch (video->videopacket.avcvideopacket.avcpacketype)
									   {
									   case AVC_PACKET_HEADER:
									   {
										   auto configheader = reinterpret_cast<AVCDecoderConfigurationRecordHeader *>(video->videopacket.avcvideopacket.avcpacketdata);

										   auto sps = reinterpret_cast<SequenceParameterSet *>(configheader->data);
										   auto datasize = FLVINT16TOINT((sps->sequenceParameterSetLength));
										   h264.write(nalu, sizeof(nalu));
										   h264.write(reinterpret_cast<char *>(sps->sequenceParameterSetNALUnit), datasize);

										   auto pps = reinterpret_cast<PictureParameterSet *>(sps->sequenceParameterSetNALUnit + FLVINT16TOINT(sps->sequenceParameterSetLength));
										   datasize = FLVINT16TOINT((pps->pictureParameterSetLength));
										   h264.write(nalu, sizeof(nalu));
										   h264.write(reinterpret_cast<char *>(pps->pictureParameterSetNALUnit), datasize);
									   }
									   break;
									   case AVC_PACKET_NALU:
									   {
										   auto alldatasize = packet.m_nBodySize - offsetof(FLVVIDEOTAG, videopacket.avcvideopacket.avcpacketdata);
										   auto size = 0;
										   while (size < alldatasize)
										   {
											   auto nalsize = reinterpret_cast<FLVINT32 *>(&video->videopacket.avcvideopacket.avcpacketdata[size]);
											   auto datasize = FLVINT32TOINT((*nalsize));
											   h264.write(nalu, sizeof(nalu));
											   h264.write(reinterpret_cast<char *>(&video->videopacket.avcvideopacket.avcpacketdata[size + sizeof(FLVINT32)]), datasize);
											   size += sizeof(FLVINT32) + datasize;
										   }
									   }
									   break;
									   case AVC_PACKET_END:
									   {
										   h264.flush();
									   }
									   break;
									   default:
										   break;
									   }
								   }
							   }
						   }
						   RTMPPacket_Free(&packet);
						   continue;
					   }
				   });

	std::cout << "input char to stop" << std::endl;
	std::cin.get();

	stop = true;
	if (th.joinable())
	{
		th.join();
	}

	RTMP_Close(rtmp);
	RTMP_Free(rtmp);
	h264.close();
	aac.close();

#ifdef _WIN32
	WSACleanup();
#endif

	return 0;
}
librtmp推流
/*
 * @Author: gongluck
 * @Date: 2020-10-03 15:36:42
 * @Last Modified by: gongluck
 * @Last Modified time: 2021-07-18 18:26:45
 */

#include <iostream>
#include <fstream>
#include <thread>

#ifdef _WIN32
#include <Windows.h>
#endif

#include "../../../analysis/flv/flv.h"

extern "C"
{
#include "rtmp.h"
}

#define DODELAY 0
const int presentime = 100;

#define PUSHPACKET 1

int main(int argc, char *argv[])
{
	std::cout << "librtmp example" << std::endl;
	std::cout << "Usage : "
			  << "thisfile flvfile pushurl." << std::endl;
	if (argc < 3)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream in(argv[1], std::ios::binary);
	if (in.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	std::cout << "pusing : " << argv[1] << std::endl;

#ifdef _WIN32
	WORD version;
	WSADATA wsaData;
	version = MAKEWORD(1, 1);
	WSAStartup(version, &wsaData);
#endif

	RTMP *rtmp = RTMP_Alloc();
	RTMP_Init(rtmp);
	auto rtmpres = RTMP_SetupURL(rtmp, argv[2]);
	RTMP_EnableWrite(rtmp); //推流要设置写
	rtmpres = RTMP_Connect(rtmp, nullptr);
	rtmpres = RTMP_ConnectStream(rtmp, 0);

	FLVHEADER flvheader = {0};
	in.read(reinterpret_cast<char *>(&flvheader), sizeof(flvheader));
	std::cout << flvheader << std::endl;

	FLVINT32 presize = {0};
	in.read(reinterpret_cast<char *>(&presize), 4);

	RTMPPacket packet = {0};
	auto begintime = RTMP_GetTime();
	uint32_t timestamp = 0, now = 0;
	while (true)
	{
		FLVTAGHEADER tagheader = {0};
		if (!in.read(reinterpret_cast<char *>(&tagheader), sizeof(tagheader)))
		{
			break;
		}

		auto datalen = FLVINT24TOINT(tagheader.datalen);
		auto data = new char[sizeof(FLVTAGHEADER) + datalen + sizeof(presize)];
		memcpy(data, &tagheader, sizeof(FLVTAGHEADER));
		if (!in.read(data + sizeof(FLVTAGHEADER), static_cast<uint64_t>(datalen) + sizeof(presize)))
		{
			break;
		}

		timestamp = FLVINT32TOINT(tagheader.timestamp);
#if DODELAY
	CALCTIME:
		now = RTMP_GetTime() - begintime;
		if (now < timestamp + presentime)
		{
			std::this_thread::sleep_for(std::chrono::milliseconds((timestamp + presentime - now) / 2));
			goto CALCTIME;
		}
#endif

#ifndef PUSHPACKET
		auto len = sizeof(FLVTAGHEADER) + datalen + sizeof(presize);
		rtmpres = RTMP_Write(rtmp, data, len); //tagheader + data + presize
		if (rtmpres < len)
		{
			std::cout << rtmpres << "\t" << len << std::endl;
			break;
		}
#else
		rtmpres = RTMPPacket_Alloc(&packet, datalen); //分配packet的buffer
		packet.m_nChannel = 0x03;
		packet.m_headerType = RTMP_PACKET_SIZE_MEDIUM;
		packet.m_packetType = tagheader.flvtagtype;
		packet.m_nTimeStamp = timestamp;
		packet.m_nInfoField2 = 0;
		packet.m_hasAbsTimestamp = 0;
		memcpy(packet.m_body, data + sizeof(FLVTAGHEADER), datalen);
		packet.m_nBodySize = datalen;
		rtmpres = RTMP_SendPacket(rtmp, &packet, 0);
		RTMPPacket_Free(&packet);
		if (rtmpres <= 0)
		{
			break;
		}
#endif
		std::cout << "timestamp " << timestamp << "ms" << std::endl;
		delete[] data;
	}

	RTMP_Close(rtmp);
	RTMP_Free(rtmp);
	in.close();

#ifdef _WIN32
	WSACleanup();
#endif

	return 0;
}

ffmpeg命令行

  • 查看帮助文档
ffmpeg -h
ffmpeg -h long
ffmpeg -h full

# 显示版本
ffmpeg -version
# 显示编译配置
ffmpeg -buildconf
# 显示可用格式(muxers+demuxers)
ffmpeg -formats
# 显示可用复用器
ffmpeg -muxers
# 显示可用解复用器
ffmpeg -demuxers
# 显示可用设备
ffmpeg -devices
# 显示可用编解码器(decoders+encoders)
ffmpeg -codecs
# 显示可用解码器
ffmpeg -decoders
# 显示可用编码器
ffmpeg -encoders
# 显示可用比特率过滤器
ffmpeg -bsfs
# 显示可用协议
ffmpeg -protocols
# 显示可用过滤器
ffmpeg -filters
# 显示可用过滤格式
ffmpeg -pix_fmts
# 显示可用声道布局
ffmpeg -layouts
# 显示可用音频采样格式
ffmpeg -sample_fmts
# 显示可用颜色
ffmpeg -colors

ffplay -h
ffprobe -h
  • ffmpeg命令
# 保留封装格式
ffmpeg -i test.mp4 -acodec copy -vn audio.mp4
ffmpeg -i test.mp4 -vcodec copy -an video.mp4

# 提取视频
#保留编码格式
ffmpeg -i test.mp4 -vcodec copy -an test_copy.h264
#强制格式
ffmpeg -i test.mp4 -vcodec libx264 -an test.h264

# 提取音频
#保留编码格式
ffmpeg -i test.mp4 -acodec copy -vn test.aac
#强制格式
ffmpeg -i test.mp4 -acodec libmp3lame -vn test.mp3

# 提取YUV
#提取3秒数据,分辨率和源视频一致
ffmpeg -i test_1280x720.mp4 -t 3 -pix_fmt yuv420p yuv420p_orig.yuv
#提取3秒数据,分辨率转为320x240
ffmpeg -i test_1280x720.mp4 -t 3 -pix_fmt yuv420p -s 320x240 yuv420p_320x240.yuv

# 提取RGB
#提取3秒数据,分辨率转为320x240
ffmpeg -i test.mp4 -t 3 -pix_fmt rgb24 -s 320x240 rgb24_320x240.rgb

# RGB和YUV之间的转换
ffmpeg -s 320x240 -pix_fmt yuv420p -i yuv420p_320x240.yuv -pix_fmt rgb24 rgb24_320x240_2.rgb

# 提取PCM
ffmpeg -i buweishui.mp3 -ar 48000 -ac 2 -f s16le 48000_2_s16le.pcm
ffmpeg -i buweishui.mp3 -ar 48000 -ac 2 -sample_fmt s16 out_s16.wav
ffmpeg -i buweishui.mp3 -ar 48000 -ac 2 -codec:a pcm_s16le out2_s16le.wav
ffmpeg -i buweishui.mp3 -ar 48000 -ac 2 -f f32le 48000_2_f32le.pcm
ffmpeg -i test.mp4 -t 10 -vn -ar 48000 -ac 2 -f f32le 48000_2_f32le_2.pcm

# 转封装
#保持编码格式
ffmpeg -i test.mp4 -vcodec copy -acodec copy test_copy.ts
ffmpeg -i test.mp4 -codec copy test_copy2.ts
#改变编码格式
ffmpeg -i test.mp4 -vcodec libx265 -acodec libmp3lame out_h265_mp3.mkv
#修改帧率
ffmpeg -i test.mp4 -r 15 output.mp4
#修改视频码率
ffmpeg -i test.mp4 -b:v 400k output_bv.mkv
#修改音频码率
ffmpeg -i test.mp4 -b:a 192k output_ba.mp4
#修改音视频码率
ffmpeg -i test.mp4 -b:v 400k -b:a 192k output_bva.mp4
#修改视频分辨率
ffmpeg -i test.mp4 -s 480x270 output_480x270.mp4
#修改音频采样率
ffmpeg -i test.mp4 -ar 44100 output_44100hz.mp3

# 推流
#h264推流
ffmpeg -re -i gx.flv -vcodec h264 -acodec aac -f rtsp -rtsp_transport tcp rtsp://127.0.0.1/live/test
#h265推流
ffmpeg -re -i gx.flv -vcodec hevc -acodec aac -f rtsp -rtsp_transport tcp rtsp://127.0.0.1/live/test
#copy
ffmpeg -re -i gx.flv -vcodec copy -acodec copy -f flv -y rtmp://127.0.0.1/live/test

# 录屏
ffmpeg -y -f gdigrab -i desktop -r 30 -vcodec libx264 -s 1920x1080 -b:v 10000 -crf 24 -pix_fmt yuv420p -preset:v veryfast -tune:v zerolatency test.mp4
  • ffplay命令
# 播放YUV数据
ffplay -pixel_format yuv420p -video_size 320x240 -framerate 5 
yuv420p_320x240.yuv

# 播放RGB数据
ffplay -pixel_format rgb24 -video_size 320x240 -i rgb24_320x240.rgb
ffplay -pixel_format rgb24 -video_size 320x240 -framerate 5 -i rgb24_320x240.rgb

# 播放PCM数据
ffplay -ar 48000 -ac 2 -f s32le 48000_2_f32le.pcm

ffmpeg解封装流程

FFMPEG解封装流程

ffmpeg封装流程

FFMPEG解封装流程

ffmpeg解码流程

FFMPEG解码视频流程

FFMPEG解码音频流程

ffmpeg编码流程

FFMPEG编码视频流程

FFMPEG编码音频流程

SDL架构

Linux编译

./configure
make
sudo make install
#如果出现Could not initialize SDL - No available video device(Did you set the DISPLAY variable?)错误,说明系统中没有安装x11的库文件,因此编译出来的SDL库实际上不能用。下载安装
sudo apt-get install libx11-dev
sudo apt-get install xorg-dev

Windows编译

# win32
cmake -S . -B ../build/win32 -G "Visual Studio 16 2019" -A win32
cmake --build ../build/win32 --clean-first --config release --target ALL_BUILD
# x64
cmake -S . -B ../build/x64 -G "Visual Studio 16 2019" -A x64
cmake --build ../build/x64 --clean-first --config release --target ALL_BUILD
SDL使用例子
/*
 * @Author: gongluck 
 * @Date: 2021-01-23 14:12:40 
 * @Last Modified by:   gongluck 
 * @Last Modified time: 2021-01-23 14:12:40 
 */

#define SDL_MAIN_HANDLED

#include <iostream>
#include <fstream>
#include "SDL.h"

#define MY_QUIT		SDL_USEREVENT+1
#define MY_REFRESH	SDL_USEREVENT+2

const int WIDTH = 500;
const int HEIGHT = 300;
const int width = 10;
const int height = 10;

bool exitflag = false;

int mythread(void* param)
{
	while (!exitflag)
	{
		SDL_Event event;
		event.type = MY_REFRESH;
		SDL_PushEvent(&event);
		SDL_Delay(100);
	}

	SDL_Event event;
	event.type = MY_QUIT;
	SDL_PushEvent(&event);

	return 0;
}
void SDLCALL mypcm(void* userdata, Uint8* stream, int len)
{
	auto pcm = static_cast<std::iostream*>(userdata);
	auto buf = static_cast<char*>(malloc(len));
	pcm->read(buf, len);
	if (!pcm)
	{
		free(buf);
		return;
	}
	memcpy(stream, buf, len);
	free(buf);
}

//SDL2_example ../../../../media/gx_yuv420p_320x240.yuv 320 240 ../../../../media/gx_44100_2_s16le.pcm
int main(int argc, char* argv[])
{
	std::cout << "SDL2 demo" << std::endl;

	std::cout << "Usage : " << "thisfilename YUVfile width height PCMfile" << std::endl;

	if (argc < 5)
	{
		std::cerr << "please see the usage message." << std::endl;
		return -1;
	}
	std::ifstream yuv(argv[1], std::ios::binary);
	if (yuv.fail())
	{
		std::cerr << "can not open file " << argv[1] << std::endl;
		return -1;
	}
	auto yuvwidth = atoi(argv[2]);
	auto yuvheight = atoi(argv[3]);
	std::ifstream pcm(argv[4], std::ios::binary);
	if (pcm.fail())
	{
		std::cerr << "can not open file " << argv[4] << std::endl;
		return -1;
	}

	auto ret = SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO);
	SDL_Window* window = SDL_CreateWindow("SDL2", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, WIDTH, HEIGHT, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
	auto renderer = SDL_CreateRenderer(window, -1, 0);
	auto texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGBA8888, SDL_TEXTUREACCESS_TARGET, WIDTH, HEIGHT);

	int i = 0;
	while (i++ < 20)
	{
		ret = SDL_SetRenderTarget(renderer, texture);
		ret = SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
		ret = SDL_RenderClear(renderer);
		SDL_Rect rect = { rand() % (WIDTH - width), rand() % (HEIGHT - height), width, height };
		ret = SDL_RenderDrawRect(renderer, &rect);
		ret = SDL_SetRenderDrawColor(renderer, 255, 0, 0, 255);
		ret = SDL_RenderFillRect(renderer, &rect);

		ret = SDL_SetRenderTarget(renderer, nullptr);
		ret = SDL_RenderCopy(renderer, texture, nullptr, nullptr);

		SDL_RenderPresent(renderer);
		SDL_Delay(100);
	}

	auto yuvtexture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, yuvwidth, yuvheight);
	auto datasize = yuvwidth * yuvheight * 3 / 2;
	auto yuvdata = static_cast<char*>(malloc(datasize));
	auto th = SDL_CreateThread(mythread, nullptr, nullptr);

	SDL_AudioSpec spec = { 0 };
	spec.freq = 44100;
	spec.format = AUDIO_S16SYS;
	spec.channels = 2;
	spec.silence = 0;
	spec.samples = 1024;
	spec.callback = mypcm;
	spec.userdata = &pcm;
	ret = SDL_OpenAudio(&spec, nullptr);
	SDL_PauseAudio(0);

	SDL_Event event = { 0 };
	while (!exitflag)
	{
		ret = SDL_WaitEvent(&event);
		switch (event.type)
		{
		case SDL_KEYDOWN:
			if (event.key.keysym.sym >= SDLK_a && event.key.keysym.sym <= SDLK_z)
			{
				std::cout << char('a' + event.key.keysym.sym - SDLK_a) << " down" << std::endl;
			}
			else if (event.key.keysym.sym == SDLK_ESCAPE)
			{
				SDL_Event event_q;
				event_q.type = MY_QUIT;
				ret = SDL_PushEvent(&event_q);
				break;
			}
			break;
		case SDL_MOUSEBUTTONDOWN:
			if (event.button.button == SDL_BUTTON_LEFT)
			{
				std::cout << "mouse left down" << std::endl;
			}
			else if (event.button.button == SDL_BUTTON_RIGHT)
			{
				std::cout << "mouse right down" << std::endl;
			}
			else
			{
				std::cout << "mouse down" << std::endl;
			}
			break;
		case SDL_MOUSEMOTION:
			std::cout << "mouse move " << event.button.x << ", " << event.button.y << std::endl;
			break;
		case MY_REFRESH:
		{
			yuv.read(yuvdata, datasize);
			if (!yuv)
			{
				exitflag = true;
				break;
			}

			SDL_UpdateTexture(yuvtexture, nullptr, yuvdata, yuvwidth);
			SDL_RenderClear(renderer);
			//SDL_RenderCopy(renderer, yuvtexture, nullptr, nullptr);

			//旋转90°并且铺满渲染区域
			SDL_Point center = { yuvwidth, 0 };//src坐标系
			SDL_Rect  dstrect;//旋转后src坐标系
			dstrect.x = -yuvwidth;
			dstrect.y = 0;
			dstrect.w = yuvwidth;
			dstrect.h = yuvheight;
			SDL_RenderSetScale(renderer, (float)HEIGHT / dstrect.w, (float)WIDTH / dstrect.h);//按比例缩放
			SDL_RenderCopyEx(renderer, yuvtexture, nullptr, &dstrect, -90, &center, SDL_FLIP_NONE);

			SDL_RenderPresent(renderer);
		}
		break;
		case MY_QUIT:
			std::cout << "my quit envent." << std::endl;
			exitflag = true;
			break;
		}
	}

	SDL_WaitThread(th, nullptr);
	SDL_DestroyTexture(yuvtexture);
	SDL_DestroyTexture(texture);
	SDL_DestroyWindow(window);
	SDL_DestroyRenderer(renderer);

	SDL_PauseAudio(1);
	SDL_CloseAudio();

	SDL_Quit();

	yuv.close();
	free(yuvdata);
	pcm.close();

	return 0;
}

Linux编译

cd srs/src/trunk/
sudo ./configure 
sudo make -j 8

ZLMediaKit

coturn

Nginx

  • 编译nginx

    # 安装依赖
    sudo apt-get update
    sudo apt-get install build-essential libtool -y
    sudo apt-get install libpcre3 libpcre3-dev -y
    sudo apt-get install zlib1g-dev -y
    sudo apt-get install openssl -y
    #下载nginx
    wget http://nginx.org/download/nginx-1.19.0.tar.gz
    tar zxvf nginx-1.19.0.tar.gz
    cd nginx-1.19.0/
    # 配置,支持https
    ./configure --with-http_ssl_module
    # 编译
    make -j 8
    # 安装
    sudo make install
    # 启动
    sudo /usr/local/nginx/sbin/nginx
    # 停止
    sudo /usr/local/nginx/sbin/nginx -s stop
    # 重新加载配置文件
    sudo /usr/local/nginx/sbin/nginx -s reload
  • 生成证书

    sudo mkdir -p /root/cert
    sudo cd /root/cert
    # CA私钥
    sudo openssl genrsa -out key.pem 2048
    # 自签名证书
    sudo openssl req -new -x509 -key key.pem -out cert.pem -days 1095
  • 配置Web服务

    • 创建webrtc-­https.conf文件:

      server{
      	listen 443 ssl;
      	ssl_certificate /root/cert/cert.pem;
      	ssl_certificate_key /root/cert/key.pem;
      	charset utf‐8;
      	# ip地址或者域名
      	server_name www.gongluck.icu;
      	location / {
      		add_header 'Access-Control-Allow-Origin' '*';
      		add_header 'Access-Control-Allow-Credentials' 'true';
      		add_header 'Access-Control-Allow-Methods' '*';
      		add_header 'Access-Control-Allow-Headers' 'Origin, X-Requested-With, Content-Type,Accept';
      	# web页面所在目录
      	root /code/AnalysisAVP/example/WebRTC/demo/client/;
      	index index.php index.html index.htm;
      	}
      }
    • 创建webrtc-websocket-proxy.conf文件:

      map $http_upgrade $connection_upgrade {
      	default upgrade;
      '' close;
      	}
      upstream websocket {
      		server www.gongluck.icu:8099;
      	}
      	server {
      		listen 8098 ssl;
      		#ssl on;
      		ssl_certificate /root/cert/cert.pem;
      		ssl_certificate_key /root/cert/key.pem;
      		server_name www.gongluck.icu;
      	location /ws {
      		proxy_pass http://websocket;
      		proxy_http_version 1.1;
      		proxy_connect_timeout 4s; #配置点1
      		proxy_read_timeout 6000s; #配置点2,如果没效,可以考虑这个时间配置长一点
      		proxy_send_timeout 6000s; #配置点3
      		proxy_set_header Upgrade $http_upgrade;
      		proxy_set_header Connection $connection_upgrade;
      	}
      }
    • 编辑nginx.conf文件,在末尾}之前添加包含文件:

      include /code/AnalysisAVP/example/WebRTC/demo/client/webrtc-https.conf;
      include /code/AnalysisAVP/example/WebRTC/demo/client/webrtc-websocket-proxy.conf;

nodejs

  • 安装nodejs

    # 下载
    wget https://nodejs.org/dist/v15.0.0/node-v15.0.0-linux-x64.tar.xz
    # 解压
    tar -xvf node-v15.0.0-linux-x64.tar.xz
    # 进入目录
    cd node-v15.0.0-linux-x64
    # 执行软连接
    sudo ln ‐s /mnt/e/ubuntu/node-v15.0.0-linux-x64/bin/npm /usr/local/bin/
    sudo ln ‐s /mnt/e/ubuntu/node-v15.0.0-linux-x64/bin/node /usr/local/bin/