You'll need to get ndi library and header files. Place the ndi libraries (armhf4.9 etc) in a folder in the same directory of the project files called lib. Place the header files in the same directory of the project files called include.
Last active
November 12, 2021 14:47
-
-
Save atgeoriw/2af09d238ce7f55cde46117e6af63bbd to your computer and use it in GitHub Desktop.
Using v4l2 to broadcast Logitech C920 compressed mjpeg stream using NewTek Network Device Interface(NDI) using Raspberry Pi
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
cmake_minimum_required (VERSION 2.8) | |
project (Main) | |
SET(CMAKE_BUILD_TYPE DEBUG) | |
SET(CMAKE_VERBOSE_MAKEFILE ON) | |
SET(CMAKE_C_FLAGS "-Wall") | |
SET(CMAKE_CXX_FLAGS_RELEASE ${CMAKE_C_FLAGS_RELEASE}) | |
SET(CMAKE_CXX_FLAGS "-g -O2 -ldl -Wall -std=c++11 -D__STDC_CONSTANT_MACROS") | |
SET(CMAKE_FIND_LIBRARY_SUFFIXES .a) | |
include_directories("inc") | |
#Find ffmpeg directories | |
find_package(PkgConfig) | |
if (PKG_CONFIG_FOUND) | |
pkg_check_modules(_FFMPEG_AVFORMAT libavformat) | |
pkg_check_modules(_FFMPEG_AVCODEC libavcodec) | |
pkg_check_modules(_FFMPEG_AVUTIL libavutil) | |
pkg_check_modules(_FFMPEG_SWSCALE libswscale) | |
pkg_check_modules(_PC_X264 x264) | |
endif (PKG_CONFIG_FOUND) | |
#find_package(OpenCV) | |
#include_directories(${OpenCV_INCLUDE_DIRS}) | |
#Find ndi directories | |
include_directories(${PROJECT_SOURCE_DIR}/include ${PROJECT_SOURCE_DIR}) | |
SET(HEADER_FILES | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.Lib.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.compat.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.DynamicLoad.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.Find.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.Recv.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.Routing.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.Send.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.structs.h | |
${PROJECT_SOURCE_DIR}/include/Processing.NDI.utilities.h | |
) | |
find_library(LIBNDI_LIBRARY libndi.a PATHS "${PROJECT_SOURCE_DIR}/lib") | |
if (LIBNDI_LIBRARY) | |
message("Found libndi.a library") | |
else () | |
message("Could not find libndi.a library. Add them") | |
endif(LIBNDI_LIBRARY) | |
link_directories(${PROJECT_SOURCE_DIR}/lib) | |
#add ndi and other libraries, headers together | |
add_executable (Main main.cpp ${HEADER_FILES}) | |
add_library(ndi STATIC IMPORTED) | |
set_target_properties(ndi PROPERTIES IMPORTED_LOCATION ${PROJECT_SOURCE_DIR}/lib/libndi.a) | |
target_link_libraries (Main "-lswscale" "-lx264" ndi "-lm" "-lpthread" "-lz" "-lavcodec" "-lavutil") | |
#${OpenCV_LIBS}) | |
#testing | |
enable_testing() | |
add_test(help ./Main -h) | |
#package | |
install (TARGETS Main DESTINATION lib) | |
SET(CPACK_GENERATOR "DEB") | |
SET(CPACK_DEBIAN_PACKAGE_MAINTAINER "Andrew Tan") | |
INCLUDE(CPack) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <iomanip> | |
#include <stdio.h> | |
#include <cstdlib> | |
#include <string.h> | |
#include <assert.h> | |
#include <fcntl.h> | |
#include <sstream> | |
#include <cerrno> | |
#include <unistd.h> | |
#include <sys/stat.h> | |
#include <sys/types.h> | |
#include <sys/time.h> | |
#include <sys/mman.h> | |
#include <sys/ioctl.h> | |
#include <stdint.h> | |
#include <math.h> | |
#include <linux/videodev2.h> | |
/*-------------------FFMPEG--------------------*/ | |
extern "C" { | |
#include <libavformat/avformat.h> | |
#include <libavcodec/avcodec.h> | |
#include <libavutil/imgutils.h> | |
#include <libavutil/avutil.h> | |
} | |
/*----------------NDI library------------------*/ | |
#include <cstddef> | |
#include <cstdbool> | |
#include <signal.h> | |
#include <atomic> | |
#include <cstdint> | |
#include <Processing.NDI.Lib.h> | |
#ifndef V4L2_PIX_FMT_H264 | |
#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') | |
#endif // V4L2_PIX_FMT_H264 | |
#define CLEAR(x) memset(&(x), 0, sizeof(x)) | |
using namespace std; | |
struct buffer | |
{ | |
uint8_t *start; | |
size_t length; | |
}; | |
struct buffer *buffers; | |
static unsigned int n_buffers; | |
static int fd = -1; | |
static int out_width = 1920; | |
static int out_height = 1080; | |
static AVCodecContext *decoder_ctx; | |
static AVCodec *pCodec; | |
static AVPacket packet_in; | |
static AVFrame *decoded_frame; | |
/*-----------------------------------NDI interrupt handler---------------------------------*/ | |
volatile sig_atomic_t stop = 0; | |
void sig_handler(int signum) | |
{ | |
stop = 1; | |
} | |
static void err_fai(const char *s) | |
{ | |
perror(s); | |
exit(EXIT_FAILURE); | |
} | |
static void init_device(void) | |
{ | |
unsigned int min; | |
fd = open("/dev/video0",O_RDWR | O_NONBLOCK, 0); | |
if(fd < 0) | |
{ | |
err_fai("open"); | |
} | |
//getting device capabilities | |
struct v4l2_capability cap; | |
struct v4l2_format format; | |
if(ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) | |
{ | |
if(EINVAL == errno) | |
{ | |
fprintf(stderr, "This device is no v4l2 device\n"); | |
exit(EXIT_FAILURE); | |
} | |
else | |
{ | |
err_fai("VIDIOC_QUERYCAP"); | |
} | |
} | |
if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) | |
{ | |
fprintf(stderr, "The device does not support video capture.\nMight want to check device capability.\nv4l2-ctl -d 0 --all"); | |
exit(EXIT_FAILURE); | |
} | |
if(!(cap.capabilities & V4L2_CAP_STREAMING)) | |
{ | |
fprintf(stderr, "The device does not support streaming.\nMight want to check device capability.\nv4l2-ctl -d 0 --all"); | |
exit(EXIT_FAILURE); | |
} | |
CLEAR(format); | |
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
fprintf(stderr, "Setting H264 format\n"); | |
format.fmt.pix.width = out_width; | |
format.fmt.pix.height = out_height; | |
format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; | |
format.fmt.pix.field = V4L2_FIELD_NONE; | |
if(ioctl(fd,VIDIOC_S_FMT, &format) < 0) | |
{ | |
err_fai("VIDIOC_S_FMT"); | |
} | |
min = format.fmt.pix.width * 2; | |
if(format.fmt.pix.bytesperline < min) | |
format.fmt.pix.bytesperline = min; | |
min = format.fmt.pix.bytesperline * format.fmt.pix.height; | |
if (format.fmt.pix.sizeimage < min) | |
format.fmt.pix.sizeimage = min; | |
//Call mmap buffers | |
struct v4l2_requestbuffers buf_req; | |
CLEAR(buf_req); | |
buf_req.count = 4; | |
buf_req.memory = V4L2_MEMORY_MMAP; | |
buf_req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
if(ioctl(fd,VIDIOC_REQBUFS, &buf_req) < 0) | |
{ | |
if(EINVAL == errno) | |
{ | |
fprintf(stderr, "This device does not support user pointer i/o\n"); | |
exit(EXIT_FAILURE); | |
} | |
else | |
{ | |
err_fai("VIDIOC_REQBUFS"); | |
} | |
} | |
if(buf_req.count < 2) | |
{ | |
fprintf(stderr, "Insufficient buffer memory\n"); | |
exit(EXIT_FAILURE); | |
} | |
buffers = (struct buffer*)av_calloc(buf_req.count, sizeof(*buffers)); | |
if(!buffers) | |
{ | |
fprintf(stderr, "Out of memory.\n"); | |
exit(EXIT_FAILURE); | |
} | |
//Allocate buffers | |
for(n_buffers = 0; n_buffers < buf_req.count; ++n_buffers) | |
{ | |
struct v4l2_buffer buf_; | |
CLEAR(buf_); | |
buf_.index = n_buffers; | |
buf_.memory = V4L2_MEMORY_MMAP; | |
buf_.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
if(ioctl(fd, VIDIOC_QUERYBUF, &buf_) < 0) | |
{ | |
err_fai("VIDIOC_QUERYBUF"); | |
} | |
buffers[n_buffers].length = buf_.length; | |
buffers[n_buffers].start = (uint8_t *)mmap(NULL, buf_.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf_.m.offset); | |
if(MAP_FAILED == buffers[n_buffers].start) | |
{ | |
err_fai("mmap"); | |
} | |
} | |
printf("V4L2 device initialised\n\n"); | |
} | |
static void uninit_device(void) | |
{ | |
unsigned int i; | |
for (i = 0; i < n_buffers; ++i) | |
{ | |
if(munmap(buffers[i].start, buffers[i].length) < 0) | |
{ | |
err_fai("munmap"); | |
} | |
} | |
free(buffers); | |
} | |
static void start_capturing(void) | |
{ | |
unsigned int i; | |
struct v4l2_fmtdesc type_index; | |
for(i = 0; i < n_buffers; ++i) | |
{ | |
struct v4l2_buffer buf; | |
CLEAR(buf); | |
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
buf.memory = V4L2_MEMORY_MMAP; | |
buf.index = i; | |
if(ioctl(fd, VIDIOC_QBUF, &buf) < 0) | |
{ | |
err_fai("VIDIOC_QBUF"); | |
} | |
} | |
type_index.index = 2; //Check with v4l2-ctl -d 0 --all for index. | |
type_index.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
if(ioctl(fd, VIDIOC_ENUM_FMT, & type_index.index) < 0) | |
{ | |
err_fai("VIDIOC_ENUM_FMT"); | |
} | |
if(ioctl(fd, VIDIOC_STREAMON, &type_index.type) < 0) | |
{ | |
err_fai("VIDIOC_STREAMON"); | |
} | |
} | |
static void close_device(void) | |
{ | |
if(close(fd) < 0) | |
{ | |
err_fai("close"); | |
} | |
fd = -1; | |
} | |
static void free_buffers(void *opaque, uint8_t *data) | |
{ | |
memset(opaque, 0, size_t(opaque)); | |
av_freep(data); | |
} | |
static void init_decoder(void) | |
{ | |
pCodec = avcodec_find_decoder(AV_CODEC_ID_MJPEG); | |
if(pCodec == NULL) | |
{ | |
fprintf(stderr, "Unsupported codec\n"); | |
exit(EXIT_FAILURE); | |
} | |
decoder_ctx = avcodec_alloc_context3(pCodec); | |
decoder_ctx->codec_id = AV_CODEC_ID_MJPEG; | |
decoder_ctx->flags = AV_CODEC_FLAG_LOW_DELAY; | |
decoder_ctx->width = out_width; | |
decoder_ctx->coded_width = out_width; | |
decoder_ctx->height = out_height; | |
decoder_ctx->coded_height = out_height; | |
decoder_ctx->pix_fmt = AV_PIX_FMT_BGRA; | |
decoder_ctx->framerate = AVRational{1,30}; | |
decoder_ctx->thread_type = 2; | |
av_init_packet(&packet_in); | |
//packet_in.buf = av_buffer_create(buffers->start, buffers->length, free_buffers, decoded_frame->opaque, AV_BUFFER_FLAG_READONLY); | |
if(packet_in.buf == NULL) | |
{ | |
printf("Error starting AVBufferRef\n"); | |
av_packet_unref(&packet_in); | |
} | |
if(avcodec_open2(decoder_ctx, pCodec, NULL) < 0) | |
{ | |
fprintf(stderr, "Error opening codec\n"); | |
exit(EXIT_FAILURE); | |
} | |
decoded_frame = av_frame_alloc(); | |
if(decoded_frame == NULL) | |
{ | |
fprintf(stderr, "Error allocating decoded data\n"); | |
exit(EXIT_FAILURE); | |
} | |
decoded_frame->width = out_width; | |
decoded_frame->height = out_height; | |
decoded_frame->format = decoder_ctx->pix_fmt; | |
} | |
int main(int argc, char *argv[]) | |
{ | |
printf("Designed for Logitech C920\n\n"); | |
struct v4l2_buffer buf; | |
CLEAR(buf); | |
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
buf.memory = V4L2_MEMORY_MMAP; | |
/*---------------------NDI------------------------------*/ | |
if(!NDIlib_initialize()) | |
{ | |
fprintf(stderr,"Error! Couldn' initialise NDI\n"); | |
exit(EXIT_FAILURE); | |
} | |
NDIlib_send_create_t send_create_para = {"Raspberry", NULL, true, false}; | |
NDIlib_send_instance_t pSend = NDIlib_send_create(&send_create_para); | |
if(!pSend) | |
{ | |
err_fai("pSend"); | |
} | |
static const char* p_connection_string = "<ndi_product long_name=\"NDI Logitech\" " | |
" short_name=\"NDI Send\" " | |
" manufacturer=\"Dunno.\" " | |
" version=\"1.000.000\" " | |
" session=\"default\" " | |
" model_name=\"S1\" " | |
" serial=\"ABCDEFG\"/>"; | |
NDIlib_metadata_frame_t NDI_connection_type; | |
NDI_connection_type.length = (int)::strlen(p_connection_string); | |
NDI_connection_type.timecode = NDIlib_send_timecode_synthesize; | |
NDI_connection_type.p_data = (char*)p_connection_string; | |
NDIlib_send_add_connection_metadata(pSend, &NDI_connection_type); | |
const NDIlib_video_frame_t frame_para = | |
{ | |
out_width, out_height, //Resolution | |
NDIlib_FourCC_type_BGRA, //UYVY Colourspace | |
30000,1001, //29.97fps | |
16.0f/9.0f, //16:9 ratio | |
NDIlib_frame_format_type_progressive, //Progressive frames | |
NDIlib_send_timecode_synthesize, | |
(uint8_t*)av_malloc(out_width * out_height * 4), //TODO: Need to fix transmitting all planes at once | |
out_width * 4 | |
}; | |
printf("Assigned NDI frame parameters\n"); | |
init_decoder(); | |
printf("Initialsed decoder\n"); | |
init_device(); | |
printf("Assigned V4L2 frame parameters\n"); | |
start_capturing(); | |
printf("Initialised buffers and started device capturing.\n"); | |
/*-------------------------------------Main process-------------------------------------*/ | |
signal(SIGINT, sig_handler); | |
for(int idx = 0; !stop; idx++) | |
{ | |
fd_set fds; | |
struct timeval tv; | |
int r; | |
FD_ZERO(&fds); | |
FD_SET(fd, &fds); | |
//Timeout | |
tv.tv_sec = 1; | |
tv.tv_usec = 0; | |
r = select(fd + 1, &fds, NULL, NULL, &tv); | |
if (-1 == r) { | |
if (EINTR == errno) | |
continue; | |
err_fai("select"); | |
} | |
if (0 == r) { | |
fprintf(stderr, "select timeout\n"); | |
exit(EXIT_FAILURE); | |
} | |
if(ioctl(fd, VIDIOC_DQBUF, &buf) < 0) | |
{ | |
switch(errno) { | |
case EAGAIN: | |
return 0; | |
case EIO: | |
default: | |
err_fai("VIDIOC_DQBUF"); | |
} | |
} | |
assert(buf.index < n_buffers); | |
packet_in.data = (uint8_t*)buffers[buf.index].start; | |
packet_in.size = buf.bytesused; | |
if(AVERROR(EAGAIN) == (avcodec_send_packet(decoder_ctx, &packet_in))) | |
{ | |
fprintf(stderr,"Flushing input raw frames\n"); | |
avcodec_flush_buffers(decoder_ctx); | |
} | |
if(ioctl(fd, VIDIOC_QBUF, &buf) < 0) | |
{ | |
err_fai("VIDIOC_QBUF"); | |
} | |
if((avcodec_receive_frame(decoder_ctx, decoded_frame)) == AVERROR(EAGAIN)) | |
{ | |
fprintf(stderr,"Flushing output raw frames\n"); | |
avcodec_flush_buffers(decoder_ctx); | |
} | |
for(int y = 0; y < out_height; y++) | |
{ | |
uint8_t* p_image = (uint8_t*)frame_para.p_data + frame_para.line_stride_in_bytes*y; | |
int line_idx = y + idx; | |
for(int x = 0; x < out_width; x++; p_image += 4, line_idx++) | |
{ | |
p_image[0] = decoded_frame[0]; | |
p_image[1] = decoded_frame[1]; | |
p_image[2] = decoded_frame[2]; | |
p_image[3] = decoded_frame[3]; | |
} | |
} | |
::NDIlib_send_send_video(pSend, &frame_para); | |
cout << "NDI data: " << (int)frame_para.p_data<< endl; | |
cout << "NDI datasize:" << frame_para.line_stride_in_bytes<< endl; | |
} | |
/*---------------------------------Closing device----------------------------------------*/ | |
printf("Stopping NDI\n"); | |
NDIlib_send_destroy(pSend); | |
NDIlib_destroy(); | |
avcodec_send_packet(decoder_ctx,NULL); | |
while(AVERROR_EOF != (avcodec_receive_frame(decoder_ctx, decoded_frame))); | |
avcodec_flush_buffers(decoder_ctx); | |
av_freep(frame_para.p_data); | |
av_frame_free(&decoded_frame); | |
avcodec_close(decoder_ctx); | |
printf("Flushed decoder\nClosing device\n"); | |
uninit_device(); | |
close_device(); | |
fprintf(stderr,"\nEnded all processes\n"); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment