diff --git a/CMakeLists.txt b/CMakeLists.txt index bf22e7c..6be4842 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,8 +3,6 @@ project(nvmpi VERSION 1.0.0 DESCRIPTION "nvidia multimedia api") set(CMAKE_C_FLAGS“$ {CMAKE_C_FLAGS} -fPIC”) set(CMAKE_CXX_FLAGS“$ {CMAKE_CXX_FLAGS} -fPIC”) -#set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/lib") -#set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/lib/aarch64-linux-gnu") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/lib/aarch64-linux-gnu/tegra") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,-rpath-link=/usr/local/cuda/lib64") @@ -12,10 +10,8 @@ find_library(LIB_NVBUF nvbuf_utils PATHS /usr/lib/aarch64-linux-gnu/tegra) find_library(LIB_V4L2 nvv4l2 PATHS /usr/lib/aarch64-linux-gnu/tegra) find_library(LIB_NVJPEG nvjpeg PATHS /usr/lib/aarch64-linux-gnu/tegra) find_package (Threads) -#find_library(LIB_DRM drm PATHS /usr/lib/aarch64-linux-gnu/tegra) -#find_library(LIB_EGL EGL PATHS /usr/lib/aarch64-linux-gnu/tegra) -add_library(nvmpi SHARED +set(NVMPI_SRC nvmpi_dec.cpp nvmpi_enc.cpp /usr/src/jetson_multimedia_api/samples/common/classes/NvBuffer.cpp @@ -25,36 +21,29 @@ add_library(nvmpi SHARED /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2Element.cpp /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2ElementPlane.cpp /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoDecoder.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoEncoder.cpp - #common/NvVideoConverter.cpp - #common/NvApplicationProfiler.cpp - #common/NvEglRenderer.cpp - #common/NvUtils.cpp -) + /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoEncoder.cpp) + +set(NVMPI_DEP_LIBS ${CMAKE_THREAD_LIBS_INIT} ${LIB_NVBUF} ${LIB_V4L2} ${LIB_NVJPEG}) -add_library(nvmpi_static STATIC - nvmpi_dec.cpp - nvmpi_enc.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvBuffer.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvElement.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvElementProfiler.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvLogging.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2Element.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvV4l2ElementPlane.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoDecoder.cpp - /usr/src/jetson_multimedia_api/samples/common/classes/NvVideoEncoder.cpp - #common/NvVideoConverter.cpp - #common/NvApplicationProfiler.cpp - #common/NvEglRenderer.cpp - #common/NvUtils.cpp -) +#if NvUtils API is present prefer it to nvbuf_utils +if(EXISTS "/usr/src/jetson_multimedia_api/include/nvbufsurface.h") + add_definitions(-DWITH_NVUTILS) + find_library(LIB_NVBUFSURFACE nvbufsurface PATHS /usr/lib/aarch64-linux-gnu/tegra) + find_library(LIB_NVBUFSURFTRANSFORM nvbufsurftransform PATHS /usr/lib/aarch64-linux-gnu/tegra) + set(NVMPI_SRC ${NVMPI_SRC} + /usr/src/jetson_multimedia_api/samples/common/classes/NvBufSurface.cpp) + set(NVMPI_DEP_LIBS ${NVMPI_DEP_LIBS} ${LIB_NVBUFSURFACE} ${LIB_NVBUFSURFTRANSFORM}) +endif() + +add_library(nvmpi SHARED ${NVMPI_SRC}) +add_library(nvmpi_static STATIC ${NVMPI_SRC}) set_target_properties(nvmpi_static PROPERTIES OUTPUT_NAME nvmpi) set_target_properties(nvmpi PROPERTIES VERSION ${PROJECT_VERSION}) set_target_properties(nvmpi PROPERTIES SOVERSION 1) set_target_properties(nvmpi nvmpi_static PROPERTIES PUBLIC_HEADER nvmpi.h) set_target_properties(nvmpi PROPERTIES LINK_FLAGS "-Wl,--no-as-needed") -target_link_libraries(nvmpi PRIVATE ${CMAKE_THREAD_LIBS_INIT} ${LIB_NVBUF} ${LIB_V4L2} ${LIB_NVJPEG}) +target_link_libraries(nvmpi PRIVATE ${NVMPI_DEP_LIBS}) target_include_directories(nvmpi PRIVATE /usr/src/jetson_multimedia_api/include) target_include_directories(nvmpi PRIVATE /usr/local/cuda/include) target_include_directories(nvmpi_static PRIVATE /usr/src/jetson_multimedia_api/include) diff --git a/nvUtils2NvBuf.h b/nvUtils2NvBuf.h new file mode 100644 index 0000000..1d6226c --- /dev/null +++ b/nvUtils2NvBuf.h @@ -0,0 +1,27 @@ +#if defined(WITH_NVUTILS) +#include "nvbufsurface.h" +#include "nvbufsurftransform.h" +#include "NvBufSurface.h" +#define MAX_NUM_PLANES NVBUF_MAX_PLANES +#define NvBufferDestroy NvBufSurf::NvDestroy +#define NvBufferCreateParams NvBufSurf::NvCommonAllocateParams +#define NvBufferColorFormat_NV12 NVBUF_COLOR_FORMAT_NV12 +#define NvBufferColorFormat_NV12_ER NVBUF_COLOR_FORMAT_NV12_ER +#define NvBufferColorFormat_NV12_709 NVBUF_COLOR_FORMAT_NV12_709 +#define NvBufferColorFormat_NV12_709_ER NVBUF_COLOR_FORMAT_NV12_709_ER +#define NvBufferColorFormat_NV12_2020 NVBUF_COLOR_FORMAT_NV12_2020 +#define NvBufferColorFormat_YUV420 NVBUF_COLOR_FORMAT_YUV420 +#define NvBufferLayout_Pitch NVBUF_LAYOUT_PITCH +#define NvBufferLayout_BlockLinear NVBUF_LAYOUT_BLOCK_LINEAR +#define NvBufferTransformParams NvBufSurfTransformParams +#define NvBufferRect NvBufSurfTransformRect +#define NVBUFFER_TRANSFORM_FILTER NVBUFSURF_TRANSFORM_FILTER +#define NvBufferTransform_None NvBufSurfTransform_None +#define NvBufferTransform_Filter_Smart NvBufSurfTransformInter_Algo3 +#define NvBufferTransform_Filter_Nearest NvBufSurfTransformInter_Nearest +#define NvBufferParams NvBufSurfTransform + +//#define NvBuffer2Raw(dmabuf, plane, out_width, out_height, ptr) +#else +#include "nvbuf_utils.h" +#endif diff --git a/nvmpi_dec.cpp b/nvmpi_dec.cpp index 5db3300..20c7cbf 100644 --- a/nvmpi_dec.cpp +++ b/nvmpi_dec.cpp @@ -1,7 +1,7 @@ #include "nvmpi.h" #include "NvVideoDecoder.h" -#include "nvbuf_utils.h" +#include "nvUtils2NvBuf.h" #include #include #include @@ -46,16 +46,21 @@ struct nvmpictx unsigned long long timestamp[MAX_BUFFERS]; }; -void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx){ - +void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx) +{ int32_t minimumDecoderCaptureBuffers; int ret=0; NvBufferCreateParams input_params = {0}; NvBufferCreateParams cParams = {0}; + /* Get capture plane format from the decoder. + This may change after resolution change event. + Refer ioctl VIDIOC_G_FMT */ ret = ctx->dec->capture_plane.getFormat(format); TEST_ERROR(ret < 0, "Error: Could not get format from decoder capture plane", ret); + /* Get the display resolution from the decoder. + Refer ioctl VIDIOC_G_CROP */ ret = ctx->dec->capture_plane.getCrop(crop); TEST_ERROR(ret < 0, "Error: Could not get crop from decoder capture plane", ret); @@ -67,37 +72,44 @@ void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx NvBufferDestroy(ctx->dst_dma_fd); ctx->dst_dma_fd = -1; } - - input_params.payloadType = NvBufferPayload_SurfArray; + + /* Create PitchLinear output buffer for transform. */ input_params.width = crop.c.width; input_params.height = crop.c.height; input_params.layout = NvBufferLayout_Pitch; input_params.colorFormat = ctx->out_pixfmt==NV_PIX_NV12?NvBufferColorFormat_NV12: NvBufferColorFormat_YUV420; +#ifdef WITH_NVUTILS + input_params.memType = NVBUF_MEM_SURFACE_ARRAY; + input_params.memtag = NvBufSurfaceTag_VIDEO_CONVERT; + + ret = NvBufSurf::NvAllocate(&input_params, 1, &ctx->dst_dma_fd); +#else + input_params.payloadType = NvBufferPayload_SurfArray; input_params.nvbuf_tag = NvBufferTag_VIDEO_DEC; + + ret = NvBufferCreateEx (&ctx->dst_dma_fd, &input_params); +#endif + TEST_ERROR(ret == -1, "create dst_dmabuf failed", error); + /* deinitPlane unmaps the buffers and calls REQBUFS with count 0 */ ctx->dec->capture_plane.deinitPlane(); - - for (int index = 0; index < ctx->numberCaptureBuffers; index++) + for (int index = 0; index < ctx->numberCaptureBuffers; index++) //V4L2_MEMORY_DMABUF { if (ctx->dmaBufferFileDescriptor[index] != 0) { ret = NvBufferDestroy(ctx->dmaBufferFileDescriptor[index]); TEST_ERROR(ret < 0, "Failed to Destroy NvBuffer", ret); } - } - ret=ctx->dec->setCapturePlaneFormat(format.fmt.pix_mp.pixelformat,format.fmt.pix_mp.width,format.fmt.pix_mp.height); TEST_ERROR(ret < 0, "Error in setting decoder capture plane format", ret); ctx->dec->getMinimumCapturePlaneBuffers(minimumDecoderCaptureBuffers); TEST_ERROR(ret < 0, "Error while getting value of minimum capture plane buffers",ret); + /* Request (min + extra) buffers, export and map buffers. */ ctx->numberCaptureBuffers = minimumDecoderCaptureBuffers + 5; - - - switch (format.fmt.pix_mp.colorspace) { case V4L2_COLORSPACE_SMPTE170M: @@ -144,31 +156,35 @@ void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx break; } - - - ret = NvBufferCreateEx (&ctx->dst_dma_fd, &input_params); - TEST_ERROR(ret == -1, "create dst_dmabuf failed", error); - + cParams.width = crop.c.width; + cParams.height = crop.c.height; + cParams.layout = NvBufferLayout_BlockLinear; +#ifdef WITH_NVUTILS + cParams.memType = NVBUF_MEM_SURFACE_ARRAY; + cParams.memtag = NvBufSurfaceTag_VIDEO_DEC; + + ret = NvBufSurf::NvAllocate(&cParams, ctx->numberCaptureBuffers, ctx->dmaBufferFileDescriptor); + TEST_ERROR(ret < 0, "Failed to create buffers", error); +#else + cParams.payloadType = NvBufferPayload_SurfArray; + cParams.nvbuf_tag = NvBufferTag_VIDEO_DEC; + for (int index = 0; index < ctx->numberCaptureBuffers; index++) { - cParams.width = crop.c.width; - cParams.height = crop.c.height; - cParams.layout = NvBufferLayout_BlockLinear; - cParams.payloadType = NvBufferPayload_SurfArray; - cParams.nvbuf_tag = NvBufferTag_VIDEO_DEC; - ret = NvBufferCreateEx(&ctx->dmaBufferFileDescriptor[index], &cParams); TEST_ERROR(ret < 0, "Failed to create buffers", ret); - - } - + } +#endif + + /* Request buffers on decoder capture plane. Refer ioctl VIDIOC_REQBUFS */ ctx->dec->capture_plane.reqbufs(V4L2_MEMORY_DMABUF, ctx->numberCaptureBuffers); TEST_ERROR(ret < 0, "Error in decoder capture plane streamon", ret); + /* Decoder capture plane STREAMON. Refer ioctl VIDIOC_STREAMON */ ctx->dec->capture_plane.setStreamStatus(true); TEST_ERROR(ret < 0, "Error in decoder capture plane streamon", ret); - + /* Enqueue all the empty decoder capture plane buffers. */ for (uint32_t i = 0; i < ctx->dec->capture_plane.getNumBuffers(); i++) { struct v4l2_buffer v4l2_buf; @@ -190,55 +206,112 @@ void respondToResolutionEvent(v4l2_format &format, v4l2_crop &crop,nvmpictx* ctx ctx->got_res_event = true; } -void *dec_capture_loop_fcn(void *arg){ +void dec_capture_loop_fcn(void *arg) +{ nvmpictx* ctx=(nvmpictx*)arg; + NvVideoDecoder *dec = ctx->dec; struct v4l2_format v4l2Format; struct v4l2_crop v4l2Crop; struct v4l2_event v4l2Event; int ret,buf_index=0; + + /* override default seesion. Without overriding session we wil + get seg. fault if decoding in forked process*/ +#ifdef WITH_NVUTILS + NvBufSurfTransformConfigParams session; + session.compute_mode = NvBufSurfTransformCompute_VIC; + session.gpu_id = 0; + session.cuda_stream = 0; + NvBufSurfTransformSetSessionParams(&session); +#else NvBufferSession session; session = NvBufferSessionCreate(); - - while (!(ctx->dec->isInError()||ctx->eos)){ +#endif + + /* Need to wait for the first Resolution change event, so that + the decoder knows the stream resolution and can allocate appropriate + buffers when we call REQBUFS. */ + do + { + /* Refer ioctl VIDIOC_DQEVENT */ + ret = dec->dqEvent(v4l2Event, 50000); + if (ret < 0) + { + ctx->eos=true; + if (errno == EAGAIN) + { + ERROR_MSG("Timed out waiting for first V4L2_EVENT_RESOLUTION_CHANGE"); + } + else + { + ERROR_MSG("Error in dequeueing decoder event"); + } + //abort(ctx); + break; + } + } + while ((v4l2Event.type != V4L2_EVENT_RESOLUTION_CHANGE) && !ctx->eos); + + /* Received the resolution change event, now can do respondToResolutionEvent. */ + if (!ctx->eos) + respondToResolutionEvent(v4l2Format, v4l2Crop, ctx); + + +#ifdef WITH_NVUTILS + NvBufSurface *dst_dma_surface=0; + NvBufSurface *dec_buffer_surface=0; + ret = NvBufSurfaceFromFd(ctx->dst_dma_fd, (void**)(&dst_dma_surface)); + NvBufSurfaceParams dst_dma_surface_params = dst_dma_surface->surfaceList[0]; + NvBufSurfacePlaneParams parm = dst_dma_surface_params.planeParams; +#else + NvBufferParams parm; + ret = NvBufferGetParams(ctx->dst_dma_fd, &parm); +#endif + + while (!(ctx->eos || dec->isInError())) + { NvBuffer *dec_buffer; - - ret = ctx->dec->dqEvent(v4l2Event, ctx->got_res_event ? 0 : 500); + + // Check for Resolution change again. + ret = dec->dqEvent(v4l2Event, false); if (ret == 0) { switch (v4l2Event.type) { case V4L2_EVENT_RESOLUTION_CHANGE: - respondToResolutionEvent(v4l2Format, v4l2Crop,ctx); + respondToResolutionEvent(v4l2Format, v4l2Crop, ctx); continue; } - } - - if (!ctx->got_res_event) { - continue; } - - while(!ctx->eos){ + + /* Decoder capture loop */ + while(!ctx->eos) + { struct v4l2_buffer v4l2_buf; struct v4l2_plane planes[MAX_PLANES]; v4l2_buf.m.planes = planes; - - if (ctx->dec->capture_plane.dqBuffer(v4l2_buf, &dec_buffer, NULL, 0)){ + + /* Dequeue a filled buffer. */ + if (dec->capture_plane.dqBuffer(v4l2_buf, &dec_buffer, NULL, 0)) + { if (errno == EAGAIN) { + if (v4l2_buf.flags & V4L2_BUF_FLAG_LAST) + { + ERROR_MSG("Got EoS at capture plane"); + ctx->eos=true; + } usleep(1000); } else { - ERROR_MSG("Error while calling dequeue at capture plane"); ctx->eos=true; } break; - } - - dec_buffer->planes[0].fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index]; + NvBufferRect src_rect, dest_rect; src_rect.top = 0; src_rect.left = 0; @@ -254,30 +327,38 @@ void *dec_capture_loop_fcn(void *arg){ transform_params.transform_flag = NVBUFFER_TRANSFORM_FILTER; transform_params.transform_flip = NvBufferTransform_None; transform_params.transform_filter = NvBufferTransform_Filter_Smart; - transform_params.session = session; +#ifdef WITH_NVUTILS + transform_params.src_rect = &src_rect; + transform_params.dst_rect = &dest_rect; +#else transform_params.src_rect = src_rect; transform_params.dst_rect = dest_rect; + transform_params.session = session; +#endif + + dec_buffer->planes[0].fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index]; ctx->mutex->lock(); - - if(!ctx->eos){ - + if(!ctx->eos) + { +#ifdef WITH_NVUTILS + ret = NvBufSurfaceFromFd(dec_buffer->planes[0].fd, (void**)(&dec_buffer_surface)); + ret = NvBufSurfTransform(dec_buffer_surface, dst_dma_surface, &transform_params); +#else ret = NvBufferTransform(dec_buffer->planes[0].fd, ctx->dst_dma_fd, &transform_params); +#endif TEST_ERROR(ret==-1, "Transform failed",ret); - NvBufferParams parm; - ret = NvBufferGetParams(ctx->dst_dma_fd, &parm); - - if(!ctx->frame_size[0]){ - - for(int index=0;indexframe_size[0]) + { + for(int index=0;indexbufptr_0[index]=new unsigned char[parm.psize[0]];//Y ctx->bufptr_1[index]=new unsigned char[parm.psize[1]];//UV or UU ctx->bufptr_2[index]=new unsigned char[parm.psize[2]];//VV } } - ctx->frame_linesize[0]=parm.width[0]; ctx->frame_size[0]=parm.psize[0]; @@ -286,11 +367,17 @@ void *dec_capture_loop_fcn(void *arg){ ctx->frame_linesize[2]=parm.width[2]; ctx->frame_size[2]=parm.psize[2]; - +#ifdef WITH_NVUTILS + ret=NvBufSurface2Raw(dst_dma_surface,0,0,parm.width[0],parm.height[0],ctx->bufptr_0[buf_index]); + ret=NvBufSurface2Raw(dst_dma_surface,0,1,parm.width[1],parm.height[1],ctx->bufptr_1[buf_index]); + if(ctx->out_pixfmt==NV_PIX_YUV420) + ret=NvBufSurface2Raw(dst_dma_surface,0,2,parm.width[2],parm.height[2],ctx->bufptr_2[buf_index]); +#else ret=NvBuffer2Raw(ctx->dst_dma_fd,0,parm.width[0],parm.height[0],ctx->bufptr_0[buf_index]); ret=NvBuffer2Raw(ctx->dst_dma_fd,1,parm.width[1],parm.height[1],ctx->bufptr_1[buf_index]); if(ctx->out_pixfmt==NV_PIX_YUV420) - ret=NvBuffer2Raw(ctx->dst_dma_fd,2,parm.width[2],parm.height[2],ctx->bufptr_2[buf_index]); + ret=NvBuffer2Raw(ctx->dst_dma_fd,2,parm.width[2],parm.height[2],ctx->bufptr_2[buf_index]); +#endif ctx->frame_pools->push(buf_index); ctx->timestamp[buf_index]= (v4l2_buf.timestamp.tv_usec % 1000000) + (v4l2_buf.timestamp.tv_sec * 1000000UL); @@ -298,24 +385,30 @@ void *dec_capture_loop_fcn(void *arg){ buf_index=(buf_index+1)%MAX_BUFFERS; } - ctx->mutex->unlock(); - if (ctx->eos) { + if (ctx->eos) + { break; } ctx->has_frame_cv->notify_one(); v4l2_buf.m.planes[0].m.fd = ctx->dmaBufferFileDescriptor[v4l2_buf.index]; - if (ctx->dec->capture_plane.qBuffer(v4l2_buf, NULL) < 0){ + if (dec->capture_plane.qBuffer(v4l2_buf, NULL) < 0) + { ERROR_MSG("Error while queueing buffer at decoder capture plane"); } } } + +#ifndef WITH_NVUTILS NvBufferSessionDestroy(session); +#endif // Wake all waiting threads at EOS or decoder error ctx->has_frame_cv->notify_all(); + + return; } nvmpictx* nvmpi_create_decoder(nvCodingType codingType,nvPixFormat pixFormat){ @@ -435,7 +528,8 @@ int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet){ ret = ctx->dec->output_plane.qBuffer(v4l2_buf, NULL); - if (ret < 0) { + if (ret < 0) + { std::cout << "Error Qing buffer at output plane" << std::endl; return false; } @@ -443,7 +537,8 @@ int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet){ if (ctx->index < ctx->dec->output_plane.getNumBuffers()) ctx->index++; - if (v4l2_buf.m.planes[0].bytesused == 0) { + if (v4l2_buf.m.planes[0].bytesused == 0) + { ctx->eos=true; std::cout << "Input file read complete" << std::endl; } @@ -454,18 +549,21 @@ int nvmpi_decoder_put_packet(nvmpictx* ctx,nvPacket* packet){ } -int nvmpi_decoder_get_frame(nvmpictx* ctx,nvFrame* frame,bool wait){ - +int nvmpi_decoder_get_frame(nvmpictx* ctx,nvFrame* frame,bool wait) +{ int ret,picture_index; std::unique_lock lock(*ctx->mutex); - if (wait) { - while (ctx->frame_pools->empty() && !ctx->eos && !ctx->dec->isInError()) { + if (wait) + { + while (ctx->frame_pools->empty() && !ctx->eos && !ctx->dec->isInError()) + { ctx->has_frame_cv->wait(lock); } } - if (ctx->frame_pools->empty()) { + if (ctx->frame_pools->empty()) + { return -1; } @@ -492,8 +590,8 @@ int nvmpi_decoder_get_frame(nvmpictx* ctx,nvFrame* frame,bool wait){ } -int nvmpi_decoder_close(nvmpictx* ctx){ - +int nvmpi_decoder_close(nvmpictx* ctx) +{ ctx->mutex->lock(); ctx->eos=true; ctx->mutex->unlock(); diff --git a/nvmpi_enc.cpp b/nvmpi_enc.cpp index 43f267d..191a748 100644 --- a/nvmpi_enc.cpp +++ b/nvmpi_enc.cpp @@ -1,6 +1,6 @@ #include "nvmpi.h" #include "NvVideoEncoder.h" -#include "nvbuf_utils.h" +#include "nvUtils2NvBuf.h" #include #include #include @@ -149,7 +149,6 @@ nvmpictx* nvmpi_create_encoder(nvCodingType codingType,nvEncParam * param){ default: ctx->profile=V4L2_MPEG_VIDEO_H264_PROFILE_MAIN; break; - } switch(param->level){ @@ -219,11 +218,8 @@ nvmpictx* nvmpi_create_encoder(nvCodingType codingType,nvEncParam * param){ default: ctx->hw_preset_type = V4L2_ENC_HW_PRESET_MEDIUM; break; - } - - if(param->enableLossless) ctx->enableLossless=true; @@ -374,9 +370,9 @@ nvmpictx* nvmpi_create_encoder(nvCodingType codingType,nvEncParam * param){ } -int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame){ +int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame) +{ int ret; - struct v4l2_buffer v4l2_buf; struct v4l2_plane planes[MAX_PLANES]; NvBuffer *nvBuffer; @@ -389,27 +385,28 @@ int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame){ if(ctx->enc->isInError()) return -1; - if(ctx->index < ctx->enc->output_plane.getNumBuffers()){ - + if(ctx->index < ctx->enc->output_plane.getNumBuffers()) + { nvBuffer=ctx->enc->output_plane.getNthBuffer(ctx->index); v4l2_buf.index = ctx->index ; ctx->index++; - - }else{ + } + else + { ret = ctx->enc->output_plane.dqBuffer(v4l2_buf, &nvBuffer, NULL, -1); - if (ret < 0) { + if (ret < 0) + { cout << "Error DQing buffer at output plane" << std::endl; return false; } - } - - memcpy(nvBuffer->planes[0].data,frame->payload[0],frame->payload_size[0]); - memcpy(nvBuffer->planes[1].data,frame->payload[1],frame->payload_size[1]); - memcpy(nvBuffer->planes[2].data,frame->payload[2],frame->payload_size[2]); - nvBuffer->planes[0].bytesused=frame->payload_size[0]; - nvBuffer->planes[1].bytesused=frame->payload_size[1]; - nvBuffer->planes[2].bytesused=frame->payload_size[2]; + + nvBuffer->planes[0].bytesused=nvBuffer->planes[0].fmt.stride * nvBuffer->planes[0].fmt.height; + nvBuffer->planes[1].bytesused=nvBuffer->planes[1].fmt.stride * nvBuffer->planes[1].fmt.height; + nvBuffer->planes[2].bytesused=nvBuffer->planes[2].fmt.stride * nvBuffer->planes[2].fmt.height; + memcpy(nvBuffer->planes[0].data, frame->payload[0], nvBuffer->planes[0].bytesused); + memcpy(nvBuffer->planes[1].data, frame->payload[1], nvBuffer->planes[1].bytesused); + memcpy(nvBuffer->planes[2].data, frame->payload[2], nvBuffer->planes[2].bytesused); v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY; v4l2_buf.timestamp.tv_usec = frame->timestamp % 1000000; @@ -421,8 +418,8 @@ int nvmpi_encoder_put_frame(nvmpictx* ctx,nvFrame* frame){ return 0; } -int nvmpi_encoder_get_packet(nvmpictx* ctx,nvPacket* packet){ - +int nvmpi_encoder_get_packet(nvmpictx* ctx,nvPacket* packet) +{ int ret,packet_index; if(ctx->packet_pools->empty()) @@ -448,12 +445,13 @@ int nvmpi_encoder_get_packet(nvmpictx* ctx,nvPacket* packet){ return 0; } -int nvmpi_encoder_close(nvmpictx* ctx){ - +int nvmpi_encoder_close(nvmpictx* ctx) +{ ctx->enc->capture_plane.stopDQThread(); ctx->enc->capture_plane.waitForDQThread(1000); delete ctx->enc; delete ctx->packet_pools; delete ctx; + return 0; }