1 Star 1 Fork 1

liuzhiyuan1225/VideoCamera

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
FFmpegCamera.cpp 12.47 KB
一键复制 编辑 原始数据 按行查看 历史
#include <iostream>
#include "FFmpegCamera.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
}
#include "SDL.h"
#include "SDL_thread.h"
#ifdef __linux__
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xos.h>
#endif
static int FFmpegCameraObj = 0;
FFmpegCamera::FFmpegCamera()
{
logger = log4cxx::Logger::getLogger("FFmpegCamera");
type = CAMERA_TYPE::FFMPEG_CAMERA;
hwnd = 0;
callback = 0;
context = 0;
FFmpegCameraObj++;
running = false;
#ifdef _WIN32
reconnect = false;
hEvent = 0;
#endif
}
FFmpegCamera::~FFmpegCamera()
{
FFmpegCameraObj--;
if (FFmpegCameraObj == 0)
FFmpegCamera::UninitLibrary();
}
bool FFmpegCamera::InitLibrary()
{
av_register_all();
avformat_network_init();
avdevice_register_all();
if (hwnd > 0)
SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER);
else
LOG4CXX_INFO(logger, "FFmpegCamera::InitLibrary(): SDL not used");
return true;
}
bool FFmpegCamera::UninitLibrary()
{
return true;
}
int FFmpegCamera::Init(HWND hwnd, const char *path, bool device)
{
this->hwnd = hwnd;
this->path = path;
this->device = device;
if (FFmpegCameraObj == 1)
FFmpegCamera::InitLibrary();
return this->handle;
}
void FFmpegCamera::SetCallback(VideoDataCallback callback, void *context)
{
this->callback = callback;
this->context = context;
}
int FFmpegCamera::Connect()
{
if (running) return -1;
running = true;
#ifdef _WIN32
hDecodeThread = CreateThread(NULL, 0, DecodeThread, this, 0, NULL);
if (hDecodeThread == 0)
#else
pthread_t decodeThread;
int ret = pthread_create(&(decodeThread), nullptr, DecodeThread, this);
if (ret != 0)
#endif
{
running = false;
return -1;
}
#ifdef _WIN32
if (!reconnect)
{
reconnect = true;
hEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
hReconnectThread = CreateThread(NULL, 0, ReconnectThread, this, 0, NULL);
if (hReconnectThread == 0)
{
reconnect = false;
return -1;
}
}
prevTime = GetCurrentTime();
#endif
return 0;
}
int FFmpegCamera::Disconnect()
{
if (!running) return -1;
#ifdef _WIN32
reconnect = false;
SetEvent(hEvent);
WaitForSingleObject(hReconnectThread, INFINITE);
CloseHandle(hReconnectThread);
#endif
running = false;
#ifdef _WIN32
WaitForSingleObject(hDecodeThread, INFINITE);
CloseHandle(hDecodeThread);
#endif
return 0;
}
#ifdef _WIN32
LONGLONG FFmpegCamera::GetCurrentTime()
{
SYSTEMTIME systemTime;
FILETIME fileTime;
LARGE_INTEGER time;
LONGLONG timeDiff;
GetLocalTime(&systemTime);
SystemTimeToFileTime(&systemTime, &fileTime); // 100-nanosecond
time.HighPart = fileTime.dwHighDateTime;
time.LowPart = fileTime.dwLowDateTime;
return time.QuadPart / 10000; // millisecond
}
#endif
#ifdef _WIN32
DWORD WINAPI FFmpegCamera::DecodeThread(LPVOID context)
#else
void* FFmpegCamera::DecodeThread(void* context)
#endif
{
FFmpegCamera *camera = (FFmpegCamera *)context;
int ret = 0;
AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL;
AVFrame *pFrameRGB = NULL;
AVFrame *pFrameYUV = NULL;
AVPacket packet;
AVDictionary *optionsDict = NULL;
struct SwsContext *sws_ctxRGB = NULL;
struct SwsContext *sws_ctxYUV = NULL;
pFormatCtx = avformat_alloc_context();
pFormatCtx->interrupt_callback.callback = DecodeInterruptCallback;
pFormatCtx->interrupt_callback.opaque = camera;
AVInputFormat *inputFormat = NULL;
AVDictionary *options = NULL;
if (camera->device)
{
LOG4CXX_INFO(camera->logger, "FFmpegCamera: device mode");
inputFormat = av_find_input_format("v4l2");
}
av_dict_set(&options, "stimeout", std::to_string(2 * 1000000).c_str(), 0);
av_dict_set(&options, "buffer_size", "1024000", 0);
av_dict_set(&options, "rtsp_transport", "tcp", 0);
LOG4CXX_INFO(camera->logger, "FFmpegCamera::path: " << camera->path);
ret = avformat_open_input(&pFormatCtx, camera->path.c_str(), inputFormat, &options);
if (ret != 0)
{
LOG4CXX_ERROR(camera->logger, "avformat_open_input() error: " << ret);
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
ret = avformat_find_stream_info(pFormatCtx, NULL);
if (ret < 0)
{
LOG4CXX_ERROR(camera->logger, "avformat_find_stream_info() error: " << ret);
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
av_dump_format(pFormatCtx, 0, camera->path.c_str(), 0);
// Find the first video stream
int videoStream = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++)
{
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoStream = i;
break;
}
}
if (videoStream == -1)
{
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
if (pFormatCtx->streams[videoStream]->codec->width < 1 || pFormatCtx->streams[videoStream]->codec->height < 1)
{
avformat_close_input(&pFormatCtx);
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
fprintf(stderr, "Unsupported codec!\n");
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
if (avcodec_open2(pCodecCtx, pCodec, &optionsDict) < 0)
{
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
pFrame = av_frame_alloc();
if (pFrame == NULL)
{
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
pFrameRGB = av_frame_alloc();
if (pFrameRGB == NULL)
{
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
pFrameYUV = av_frame_alloc();
if (pFrameYUV == NULL)
{
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
uint8_t *bufferRGB = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameRGB, bufferRGB, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
uint8_t *bufferYUV = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
avpicture_fill((AVPicture *)pFrameYUV, bufferYUV, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
sws_ctxRGB = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_BGR24,
SWS_BILINEAR,
NULL,
NULL,
NULL);
sws_ctxYUV = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_YUV420P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
SDL_Window *screen = 0;
SDL_Renderer *renderer = 0;
SDL_Texture *texture;
SDL_Rect srcRect, dstRect;
#ifdef _WIN32
RECT window_rect;
#else
Display* display = 0;
#endif
if (camera->hwnd)
{
#ifdef __linux__
display = XOpenDisplay(NULL);
if (display == NULL)
LOG4CXX_ERROR(camera->logger, "Cannot open display");
#endif
srcRect.x = 0;
srcRect.y = 0;
srcRect.w = pCodecCtx->width;
srcRect.h = pCodecCtx->height;
screen = SDL_CreateWindowFrom((void *)camera->hwnd);
if (!screen)
{
fprintf(stderr, "SDL: could not create window - %s\n", SDL_GetError());
camera->running = false;
#ifdef _WIN32
return -1;
#else
return (void *)-1;
#endif
}
if (camera->hardwareRender)
{
LOG4CXX_INFO(camera->logger, "Hardware accelerated render");
renderer = SDL_CreateRenderer(screen, -1, SDL_RENDERER_ACCELERATED);
}
else
{
LOG4CXX_INFO(camera->logger, "Software render");
renderer = SDL_CreateRenderer(screen, -1, SDL_RENDERER_SOFTWARE);
}
texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
pCodecCtx->width, pCodecCtx->height);
}
int frameFinished;
while (camera->running && (av_read_frame(pFormatCtx, &packet) >= 0))
{
if(packet.stream_index == videoStream)
{
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished)
{
#ifdef _WIN32
camera->prevTime = camera->GetCurrentTime();
#endif
sws_scale(sws_ctxRGB,
(uint8_t const * const *)pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
pFrameRGB->data,
pFrameRGB->linesize);
if (camera->callback)
{
camera->callback(camera->handle, pFrameRGB->data[0], pCodecCtx->width * pCodecCtx->height * 3,
pCodecCtx->width, pCodecCtx->height, 0, camera->context);
}
if (camera->hwnd)
{
// Convert the image into YUV format that SDL uses
sws_scale(sws_ctxYUV,
(uint8_t const * const *)pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
pFrameYUV->data,
pFrameYUV->linesize);
#ifdef _WIN32
GetClientRect(camera->hwnd, &window_rect);
dstRect.x = 0;
dstRect.y = 0;
dstRect.w = window_rect.right - window_rect.left;
dstRect.h = window_rect.bottom - window_rect.top;
#else
XWindowAttributes wa;
XGetWindowAttributes(display, camera->hwnd, &wa);
dstRect.x = 0;
dstRect.y = 0;
dstRect.w = wa.width;
dstRect.h = wa.height;
#endif
SDL_UpdateYUVTexture(texture, &srcRect,
pFrameYUV->data[0], pFrameYUV->linesize[0],
pFrameYUV->data[1], pFrameYUV->linesize[1],
pFrameYUV->data[2], pFrameYUV->linesize[2]);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, &srcRect, &dstRect);
SDL_RenderPresent(renderer);
}
}
}
av_free_packet(&packet);
}
av_frame_free(&pFrame);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
camera->running = false;
return 0;
}
int FFmpegCamera::DecodeInterruptCallback(void *context)
{
FFmpegCamera *camera = (FFmpegCamera *)context;
#ifdef _WIN32
LONGLONG timeDiff;
timeDiff = camera->GetCurrentTime() - camera->prevTime;
// Return 0: continue, return 1: exit
if (timeDiff > 5000)
{
return 1;
}
#endif
return 0;
}
#ifdef _WIN32
DWORD WINAPI FFmpegCamera::ReconnectThread(LPVOID context)
{
FFmpegCamera *camera = (FFmpegCamera *)context;
while (camera->reconnect)
{
WaitForSingleObject(camera->hEvent, 10000);
if (!camera->running)
{
camera->Connect();
}
}
return 0;
}
#endif
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/liuzhiyuan1225/VideoCamera.git
[email protected]:liuzhiyuan1225/VideoCamera.git
liuzhiyuan1225
VideoCamera
VideoCamera
master

搜索帮助