From ddcc149d211db26ef59fadeeda04102c2b22fd8b Mon Sep 17 00:00:00 2001 From: Aaron Kling Date: Wed, 10 Apr 2024 17:52:19 -0500 Subject: [PATCH 1/3] Write jpegs using avcodec --- src/zm_event.cpp | 60 +++++++++++++++++++++++++++++++++------ src/zm_event.h | 3 ++ src/zm_image.cpp | 73 +++++++++++++++++++++++++++++++++++++++++++++++- src/zm_image.h | 5 +++- 4 files changed, 131 insertions(+), 10 deletions(-) diff --git a/src/zm_event.cpp b/src/zm_event.cpp index 6ee690090..d771c5b4d 100644 --- a/src/zm_event.cpp +++ b/src/zm_event.cpp @@ -143,6 +143,46 @@ Event::Event( id = zmDbDoInsert(sql); } while (!id and !zm_terminate); + const AVCodec* mJpegCodec = avcodec_find_encoder(AV_CODEC_ID_MJPEG); + if (!mJpegCodec) { + Error("MJPEG codec not found"); + return; + } + + mJpegCodecContext = avcodec_alloc_context3(mJpegCodec); + if (!mJpegCodecContext) { + Error("Could not allocate jpeg codec context"); + return; + } + + mJpegCodecContext->bit_rate = 400000; + mJpegCodecContext->width = monitor->Width(); + mJpegCodecContext->height = monitor->Height(); + mJpegCodecContext->time_base= (AVRational) {1,25}; + mJpegCodecContext->pix_fmt = AV_PIX_FMT_YUVJ420P; + + if (avcodec_open2(mJpegCodecContext, mJpegCodec, NULL) < 0) { + Error("Could not open mjpeg codec"); + return; + } + + AVPixelFormat format; + switch (monitor->Colours()) { + case ZM_COLOUR_RGB24: + format = (monitor->SubpixelOrder() == ZM_SUBPIX_ORDER_BGR ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_RGB24); + break; + case ZM_COLOUR_GRAY8: + format = AV_PIX_FMT_GRAY8; + break; + default: + format = AV_PIX_FMT_RGBA; + break; + }; + mJpegSwsContext = sws_getContext( + mJpegCodecContext->width, mJpegCodecContext->height, format, + mJpegCodecContext->width, mJpegCodecContext->height, AV_PIX_FMT_YUV420P, + SWS_BICUBIC, nullptr, nullptr, nullptr); + thread_ = std::thread(&Event::Run, this); } @@ -220,6 +260,16 @@ Event::~Event() { id); zmDbDoUpdate(sql); } // end if no changed rows due to Name change during recording + + if (mJpegCodecContext) { + avcodec_close(mJpegCodecContext); + avcodec_free_context(&mJpegCodecContext); + mJpegCodecContext = nullptr; + } + + if (mJpegSwsContext) { + sws_freeContext(mJpegSwsContext); + } } // Event::~Event() void Event::createNotes(std::string ¬es) { @@ -241,20 +291,14 @@ void Event::addNote(const char *cause, const std::string ¬e) { } bool Event::WriteFrameImage(Image *image, SystemTimePoint timestamp, const char *event_file, bool alarm_frame) const { - int thisquality = - (alarm_frame && (config.jpeg_alarm_file_quality > config.jpeg_file_quality)) ? - config.jpeg_alarm_file_quality : 0; // quality to use, zero is default - - SystemTimePoint jpeg_timestamp = monitor->Exif() ? timestamp : SystemTimePoint(); - if (!config.timestamp_on_capture) { // stash the image we plan to use in another pointer regardless if timestamped. // exif is only timestamp at present this switches on or off for write Image ts_image(*image); monitor->TimestampImage(&ts_image, timestamp); - return ts_image.WriteJpeg(event_file, thisquality, jpeg_timestamp); + return ts_image.WriteJpeg(event_file, mJpegCodecContext, mJpegSwsContext); } - return image->WriteJpeg(event_file, thisquality, jpeg_timestamp); + return image->WriteJpeg(event_file, mJpegCodecContext, mJpegSwsContext); } bool Event::WritePacket(const std::shared_ptrpacket) { diff --git a/src/zm_event.h b/src/zm_event.h index 0a7a868aa..f0c1c7580 100644 --- a/src/zm_event.h +++ b/src/zm_event.h @@ -95,6 +95,9 @@ class Event { std::string alarm_file; VideoStore *videoStore; + AVCodecContext *mJpegCodecContext; + SwsContext *mJpegSwsContext; + std::string container; std::string codec; std::string video_file; diff --git a/src/zm_image.cpp b/src/zm_image.cpp index 26e69644c..0023d55d7 100644 --- a/src/zm_image.cpp +++ b/src/zm_image.cpp @@ -258,7 +258,7 @@ Image::Image(const AVFrame *frame, int p_width, int p_height) : static void dont_free(void *opaque, uint8_t *data) { } -int Image::PopulateFrame(AVFrame *frame) { +int Image::PopulateFrame(AVFrame *frame) const { Debug(1, "PopulateFrame: width %d height %d linesize %d colours %d imagesize %d %s", width, height, linesize, colours, size, av_get_pix_fmt_name(imagePixFormat) @@ -1321,6 +1321,77 @@ bool Image::WriteJpeg(const std::string &filename, return true; } +bool Image::WriteJpeg(const std::string &filename, + AVCodecContext *p_jpegcodeccontext, + SwsContext *p_jpegswscontext) const { + + if (config.colour_jpeg_files && (colours == ZM_COLOUR_GRAY8)) { + Image temp_image(*this); + temp_image.Colourise(ZM_COLOUR_RGB24, ZM_SUBPIX_ORDER_RGB); + return temp_image.WriteJpeg(filename, p_jpegcodeccontext, p_jpegswscontext); + } + + if (p_jpegcodeccontext == NULL) { + Error("Jpeg codec context is not initialized"); + return false; + } + + FILE *outfile = nullptr; + int raw_fd = 0; + av_frame_ptr frame = av_frame_ptr{zm_av_frame_alloc()}; + AVPacket *pkt; + + raw_fd = open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); + if (raw_fd < 0) + return false; + outfile = fdopen(raw_fd, "wb"); + if (outfile == nullptr) { + close(raw_fd); + return false; + } + + struct flock fl = { F_WRLCK, SEEK_SET, 0, 0, 0 }; + if (fcntl(raw_fd, F_SETLKW, &fl) == -1) { + Error("Couldn't get lock on %s, continuing", filename.c_str()); + } + + if ( p_jpegswscontext ) { + av_frame_ptr temp_frame = av_frame_ptr{zm_av_frame_alloc()}; + PopulateFrame(temp_frame.get()); + + frame.get()->width = width; + frame.get()->height = height; + frame.get()->format = AV_PIX_FMT_YUV420P; + av_image_fill_linesizes(frame.get()->linesize, AV_PIX_FMT_YUV420P, width); + av_frame_get_buffer(frame.get(), 32); + + sws_scale(p_jpegswscontext, temp_frame.get()->data, temp_frame.get()->linesize, 0, height, frame.get()->data, frame.get()->linesize); + + av_frame_unref(temp_frame.get()); + } else { + PopulateFrame(frame.get()); + } + + pkt = av_packet_alloc(); + + avcodec_send_frame(p_jpegcodeccontext, frame.get()); + if (avcodec_receive_packet(p_jpegcodeccontext, pkt) == 0) { + fwrite(pkt->data, 1, pkt->size, outfile); + av_packet_free(&pkt); + } + + av_frame_unref(frame.get()); + + fl.l_type = F_UNLCK; /* set to unlock same region */ + if (fcntl(raw_fd, F_SETLK, &fl) == -1) { + Error("Failed to unlock %s", filename.c_str()); + } + + fclose(outfile); + + return true; +} + bool Image::DecodeJpeg(const JOCTET *inbuffer, int inbuffer_size, unsigned int p_colours, unsigned int p_subpixelorder) { unsigned int new_width, new_height, new_colours, new_subpixelorder; diff --git a/src/zm_image.h b/src/zm_image.h index 566e2477b..dd3681a44 100644 --- a/src/zm_image.h +++ b/src/zm_image.h @@ -207,7 +207,7 @@ class Image { const size_t buffer_size, const int p_buffertype); - int PopulateFrame(AVFrame *frame); + int PopulateFrame(AVFrame *frame) const; inline void CopyBuffer(const Image &image) { Assign(image); @@ -235,6 +235,9 @@ class Image { const int &quality_override, SystemTimePoint timestamp, bool on_blocking_abort) const; + bool WriteJpeg(const std::string &filename, + AVCodecContext *p_jpegcodeccontext, + SwsContext *p_jpegswscontext) const; bool DecodeJpeg(const JOCTET *inbuffer, int inbuffer_size, unsigned int p_colours, unsigned int p_subpixelorder); bool EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, int quality_override=0) const; From 958c2ca7cbe8d4f6c23ee3eb51eec8d672f9c140 Mon Sep 17 00:00:00 2001 From: Aaron Kling Date: Sat, 20 Apr 2024 02:00:11 -0500 Subject: [PATCH 2/3] Stream mjpeg using avcodec and yuv420p --- src/zm_eventstream.cpp | 9 +++-- src/zm_image.cpp | 53 ++++++++++++++++++++++++++++++ src/zm_image.h | 1 + src/zm_monitorstream.cpp | 20 +++++++---- src/zm_stream.cpp | 71 +++++++++++++++++++++++++++++++++------- src/zm_stream.h | 4 +++ 6 files changed, 137 insertions(+), 21 deletions(-) diff --git a/src/zm_eventstream.cpp b/src/zm_eventstream.cpp index 2d413d0c8..519297598 100644 --- a/src/zm_eventstream.cpp +++ b/src/zm_eventstream.cpp @@ -870,14 +870,19 @@ bool EventStream::sendFrame(Microseconds delta_us) { } Image *send_image = prepareImage(image); - reserveTempImgBuffer(send_image->Size()); + int l_width = floor(send_image->Width() * scale / ZM_SCALE_BASE); + int l_height = floor(send_image->Height() * scale / ZM_SCALE_BASE); + reserveTempImgBuffer(av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, l_width, l_height, 32)); int img_buffer_size = 0; uint8_t *img_buffer = temp_img_buffer; fprintf(stdout, "--" BOUNDARY "\r\n"); switch ( type ) { case STREAM_JPEG : - send_image->EncodeJpeg(img_buffer, &img_buffer_size); + if (mJpegCodecContext->width != l_width || mJpegCodecContext->height != l_height) { + initContexts(l_width, l_height); + } + send_image->EncodeJpeg(img_buffer, &img_buffer_size, mJpegCodecContext, mJpegSwsContext); fputs("Content-Type: image/jpeg\r\n", stdout); break; case STREAM_ZIP : diff --git a/src/zm_image.cpp b/src/zm_image.cpp index 0023d55d7..bb23bbee2 100644 --- a/src/zm_image.cpp +++ b/src/zm_image.cpp @@ -1600,6 +1600,59 @@ bool Image::EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, int quality_overr return true; } +bool Image::EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, AVCodecContext *p_jpegcodeccontext, SwsContext *p_jpegswscontext) const { + if ( config.colour_jpeg_files && (colours == ZM_COLOUR_GRAY8) ) { + Image temp_image(*this); + temp_image.Colourise(ZM_COLOUR_RGB24, ZM_SUBPIX_ORDER_RGB); + return temp_image.EncodeJpeg(outbuffer, outbuffer_size, p_jpegcodeccontext, p_jpegswscontext); + } + + if (p_jpegcodeccontext == NULL) { + Error("Jpeg codec context is not initialized"); + return false; + } + + std::unique_lock lck(jpeg_mutex); + + av_frame_ptr frame = av_frame_ptr{zm_av_frame_alloc()}; + AVPacket *pkt; + + if (av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32) > static_cast(Size())) { + Error("Output buffer not large enough"); + return false; + } + + if ( p_jpegswscontext ) { + av_frame_ptr temp_frame = av_frame_ptr{zm_av_frame_alloc()}; + PopulateFrame(temp_frame.get()); + + frame.get()->width = width; + frame.get()->height = height; + frame.get()->format = AV_PIX_FMT_YUV420P; + av_image_fill_linesizes(frame.get()->linesize, AV_PIX_FMT_YUV420P, width); + av_frame_get_buffer(frame.get(), 32); + + sws_scale(p_jpegswscontext, temp_frame.get()->data, temp_frame.get()->linesize, 0, height, frame.get()->data, frame.get()->linesize); + + av_frame_unref(temp_frame.get()); + } else { + PopulateFrame(frame.get()); + } + + pkt = av_packet_alloc(); + + avcodec_send_frame(p_jpegcodeccontext, frame.get()); + if (avcodec_receive_packet(p_jpegcodeccontext, pkt) == 0) { + memcpy(outbuffer, pkt->data, pkt->size); + *outbuffer_size = pkt->size; + } + + av_packet_free(&pkt); + av_frame_unref(frame.get()); + + return true; +} + #if HAVE_ZLIB_H bool Image::Unzip( const Bytef *inbuffer, unsigned long inbuffer_size ) { unsigned long zip_size = size; diff --git a/src/zm_image.h b/src/zm_image.h index dd3681a44..3f391cf1f 100644 --- a/src/zm_image.h +++ b/src/zm_image.h @@ -241,6 +241,7 @@ class Image { bool DecodeJpeg(const JOCTET *inbuffer, int inbuffer_size, unsigned int p_colours, unsigned int p_subpixelorder); bool EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, int quality_override=0) const; + bool EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, AVCodecContext *p_jpegcodeccontext, SwsContext *p_jpegswscontext) const; #if HAVE_ZLIB_H bool Unzip(const Bytef *inbuffer, unsigned long inbuffer_size); diff --git a/src/zm_monitorstream.cpp b/src/zm_monitorstream.cpp index c114ca72e..450e47263 100644 --- a/src/zm_monitorstream.cpp +++ b/src/zm_monitorstream.cpp @@ -393,14 +393,20 @@ bool MonitorStream::sendFrame(Image *image, SystemTimePoint timestamp) { /* double pts = */ vid_stream->EncodeFrame(send_image->Buffer(), send_image->Size(), config.mpeg_timed_frames, delta_time.count()); } else { - reserveTempImgBuffer(send_image->Size()); + int l_width = floor(send_image->Width() * scale / ZM_SCALE_BASE); + int l_height = floor(send_image->Height() * scale / ZM_SCALE_BASE); + + reserveTempImgBuffer(av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, l_width, l_height, 32)); int img_buffer_size = 0; unsigned char *img_buffer = temp_img_buffer; switch (type) { case STREAM_JPEG : - send_image->EncodeJpeg(img_buffer, &img_buffer_size); + if (mJpegCodecContext->width != l_width || mJpegCodecContext->height != l_height) { + initContexts(l_width, l_height); + } + send_image->EncodeJpeg(img_buffer, &img_buffer_size, mJpegCodecContext, mJpegSwsContext); fputs("Content-Type: image/jpeg\r\n", stdout); break; case STREAM_RAW : @@ -937,12 +943,12 @@ void MonitorStream::SingleImage(int scale) { SystemTimePoint(zm::chrono::duration_cast(monitor->shared_timestamps[index]))); } - if ( scale != ZM_SCALE_BASE ) { - scaled_image.Assign(*snap_image); - scaled_image.Scale(scale); - snap_image = &scaled_image; + int l_width = floor(snap_image->Width() * scale / ZM_SCALE_BASE); + int l_height = floor(snap_image->Height() * scale / ZM_SCALE_BASE); + if (mJpegCodecContext->width != l_width || mJpegCodecContext->height != l_height) { + initContexts(l_width, l_height); } - snap_image->EncodeJpeg(img_buffer, &img_buffer_size); + snap_image->EncodeJpeg(img_buffer, &img_buffer_size, mJpegCodecContext, mJpegSwsContext); fprintf(stdout, "Content-Length: %d\r\n" diff --git a/src/zm_stream.cpp b/src/zm_stream.cpp index 1f55ad350..4cebec894 100644 --- a/src/zm_stream.cpp +++ b/src/zm_stream.cpp @@ -36,6 +36,61 @@ StreamBase::~StreamBase() { delete vid_stream; delete[] temp_img_buffer; closeComms(); + + if (mJpegCodecContext) { + avcodec_free_context(&mJpegCodecContext); + } + + if (mJpegSwsContext) { + sws_freeContext(mJpegSwsContext); + } +} + +bool StreamBase::initContexts(int p_width, int p_height) { + if (mJpegCodecContext) avcodec_free_context(&mJpegCodecContext); + if (mJpegSwsContext) sws_freeContext(mJpegSwsContext); + + const AVCodec* mJpegCodec = avcodec_find_encoder(AV_CODEC_ID_MJPEG); + if (!mJpegCodec) { + Error("MJPEG codec not found"); + return false; + } + + mJpegCodecContext = avcodec_alloc_context3(mJpegCodec); + if (!mJpegCodecContext) { + Error("Could not allocate jpeg codec context"); + return false; + } + + mJpegCodecContext->bit_rate = 400000; + mJpegCodecContext->width = p_width; + mJpegCodecContext->height = p_height; + mJpegCodecContext->time_base= (AVRational) {1,25}; + mJpegCodecContext->pix_fmt = AV_PIX_FMT_YUVJ420P; + + if (avcodec_open2(mJpegCodecContext, mJpegCodec, NULL) < 0) { + Error("Could not open mjpeg codec"); + return false; + } + + AVPixelFormat format; + switch (monitor->Colours()) { + case ZM_COLOUR_RGB24: + format = (monitor->SubpixelOrder() == ZM_SUBPIX_ORDER_BGR ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_RGB24); + break; + case ZM_COLOUR_GRAY8: + format = AV_PIX_FMT_GRAY8; + break; + default: + format = AV_PIX_FMT_RGBA; + break; + }; + mJpegSwsContext = sws_getContext( + monitor->Width(), monitor->Height(), format, + p_width, p_height, AV_PIX_FMT_YUV420P, + SWS_BICUBIC, nullptr, nullptr, nullptr); + + return true; } bool StreamBase::loadMonitor(int p_monitor_id) { @@ -64,7 +119,9 @@ bool StreamBase::loadMonitor(int p_monitor_id) { return false; } - return true; + mJpegCodecContext = nullptr; + mJpegSwsContext = nullptr; + return initContexts(monitor->Width(), monitor->Height()); } bool StreamBase::checkInitialised() { @@ -150,9 +207,7 @@ Image *StreamBase::prepareImage(Image *image) { if (zoom != 100) { int base_image_width = image->Width(), - base_image_height = image->Height(), - disp_image_width = image->Width() * scale/ZM_SCALE_BASE, - disp_image_height = image->Height() * scale / ZM_SCALE_BASE; + base_image_height = image->Height(); /* x and y are scaled by web UI to base dimensions units. * When zooming, we blow up the image by the amount 150 for first zoom, right? 150%, then cut out a base sized chunk * However if we have zoomed before, then we are zooming into the previous cutout @@ -229,14 +284,6 @@ Image *StreamBase::prepareImage(Image *image) { image_copied = true; } image->Crop(last_crop); - image->Scale(disp_image_width, disp_image_height); - } else if (scale != ZM_SCALE_BASE) { - Debug(3, "scaling by %d from %dx%d", scale, image->Width(), image->Height()); - static Image copy_image; - copy_image.Assign(*image); - image = ©_image; - image_copied = true; - image->Scale(scale); } Debug(3, "Sending %dx%d", image->Width(), image->Height()); diff --git a/src/zm_stream.h b/src/zm_stream.h index c668c0bcb..59393b004 100644 --- a/src/zm_stream.h +++ b/src/zm_stream.h @@ -153,6 +153,9 @@ class StreamBase { uint8_t *temp_img_buffer; // Used when encoding or sending file data size_t temp_img_buffer_size; + AVCodecContext *mJpegCodecContext; + SwsContext *mJpegSwsContext; + protected: bool loadMonitor(int monitor_id); bool checkInitialised(); @@ -161,6 +164,7 @@ class StreamBase { void checkCommandQueue(); virtual void processCommand(const CmdMsg *msg)=0; void reserveTempImgBuffer(size_t size); + bool initContexts(int p_width, int p_height); public: StreamBase(): From 236336db77c8cf5db136422d9d295b0bbc461aff Mon Sep 17 00:00:00 2001 From: Aaron Kling Date: Thu, 28 Mar 2024 16:36:39 -0500 Subject: [PATCH 3/3] WIP: Use native camera image format for all processing --- src/zm_camera.cpp | 8 +++---- src/zm_image.cpp | 55 ++++++++++++++++++++++------------------------ src/zm_monitor.cpp | 4 +++- 3 files changed, 33 insertions(+), 34 deletions(-) diff --git a/src/zm_camera.cpp b/src/zm_camera.cpp index ddece6000..d1d2d0d49 100644 --- a/src/zm_camera.cpp +++ b/src/zm_camera.cpp @@ -39,8 +39,8 @@ Camera::Camera( type(p_type), width(p_width), height(p_height), - colours(p_colours), - subpixelorder(p_subpixelorder), + colours(ZM_COLOUR_RGB24), + subpixelorder(ZM_SUBPIX_ORDER_YUV420P), brightness(p_brightness), hue(p_hue), colour(p_colour), @@ -61,9 +61,9 @@ Camera::Camera( mLastAudioPTS(0), bytes(0), mIsPrimed(false) { - linesize = width * colours; + linesize = FFALIGN(av_image_get_linesize(AV_PIX_FMT_YUVJ420P, width, 0), 32); // hardcoded hack pixels = width * height; - imagesize = static_cast(height) * linesize; + imagesize = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32); // hardcoded hack Debug(2, "New camera id: %d width: %d line size: %d height: %d colours: %d subpixelorder: %d capture: %d, size: %llu", monitor->Id(), width, linesize, height, colours, subpixelorder, capture, imagesize); diff --git a/src/zm_image.cpp b/src/zm_image.cpp index bb23bbee2..4ade753fe 100644 --- a/src/zm_image.cpp +++ b/src/zm_image.cpp @@ -174,7 +174,6 @@ Image::Image(int p_width, int p_height, int p_colours, int p_subpixelorder, uint if (!initialised) Initialise(); pixels = width * height; - linesize = p_width * p_colours; if (!subpixelorder and (colours>1)) { // Default to RGBA when no subpixelorder is specified. @@ -182,15 +181,14 @@ Image::Image(int p_width, int p_height, int p_colours, int p_subpixelorder, uint } imagePixFormat = AVPixFormat(); + linesize = FFALIGN(av_image_get_linesize(imagePixFormat, width, 0), 32); + size = av_image_get_buffer_size(imagePixFormat, width, height, 32); if (p_buffer) { - size = linesize * height + padding; allocation = size; buffertype = ZM_BUFTYPE_DONTFREE; buffer = p_buffer; } else { - size = av_image_get_buffer_size(imagePixFormat, width, height, 32); - linesize = FFALIGN(av_image_get_linesize(imagePixFormat, width, 0), 32); Debug(4, "line size: %d =? %d width %d Size %d ?= %d", linesize, av_image_get_linesize(imagePixFormat, width, 0), @@ -233,10 +231,10 @@ Image::Image(int p_width, int p_linesize, int p_height, int p_colours, int p_sub } Image::Image(const AVFrame *frame, int p_width, int p_height) : - colours(ZM_COLOUR_RGB32), + colours(ZM_COLOUR_RGB24), padding(0), - subpixelorder(ZM_SUBPIX_ORDER_RGBA), - imagePixFormat(AV_PIX_FMT_RGBA), + subpixelorder(ZM_SUBPIX_ORDER_YUV420P), + imagePixFormat(AV_PIX_FMT_YUVJ420P), buffer(0), holdbuffer(0) { width = (p_width == -1 ? frame->width : p_width); @@ -247,9 +245,9 @@ Image::Image(const AVFrame *frame, int p_width, int p_height) : // FIXME //(AVPixelFormat)frame->format; - size = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 32); + size = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32); // av_image_get_linesize isn't aligned, so we have to do that. - linesize = FFALIGN(av_image_get_linesize(AV_PIX_FMT_RGBA, width, 0), 32); + linesize = FFALIGN(av_image_get_linesize(AV_PIX_FMT_YUVJ420P, width, 0), 32); AllocImgBuffer(size); this->Assign(frame); @@ -676,7 +674,7 @@ void Image::AssignDirect( return; } - size_t new_buffer_size = static_cast(p_width) * p_height * p_colours; + size_t new_buffer_size = static_cast(av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32)); // hardcoded hack if ( buffer_size < new_buffer_size ) { Error("Attempt to directly assign buffer from an undersized buffer of size: %zu, needed %dx%d*%d colours = %zu", @@ -707,8 +705,9 @@ void Image::AssignDirect( width = p_width; height = p_height; colours = p_colours; - linesize = width * colours; subpixelorder = p_subpixelorder; + imagePixFormat = AVPixFormat(); + linesize = FFALIGN(av_image_get_linesize(imagePixFormat, width, 0), 32); pixels = width * height; size = new_buffer_size; update_function_pointers(); @@ -727,7 +726,7 @@ void Image::Assign( return; } - unsigned int new_size = p_width * p_height * p_colours; + unsigned int new_size = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32); // hardcoded hack if ( buffer_size < new_size ) { Error("Attempt to assign buffer from an undersized buffer of size: %zu", buffer_size); return; @@ -762,6 +761,8 @@ void Image::Assign( pixels = width*height; colours = p_colours; subpixelorder = p_subpixelorder; + imagePixFormat = AVPixFormat(); + linesize = FFALIGN(av_image_get_linesize(imagePixFormat, width, 0), 32); size = new_size; } @@ -771,7 +772,7 @@ void Image::Assign( } void Image::Assign(const Image &image) { - unsigned int new_size = image.height * image.linesize; + unsigned int new_size = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, width, height, 32); // hardcoded hack if ( image.buffer == nullptr ) { Error("Attempt to assign image with an empty buffer"); @@ -809,26 +810,14 @@ void Image::Assign(const Image &image) { pixels = width*height; colours = image.colours; subpixelorder = image.subpixelorder; + imagePixFormat = image.imagePixFormat; size = new_size; linesize = image.linesize; update_function_pointers(); } - if ( image.buffer != buffer ) { - if (image.linesize > linesize) { - Debug(1, "Must copy line by line due to different line size %d != %d", image.linesize, linesize); - uint8_t *src_ptr = image.buffer; - uint8_t *dst_ptr = buffer; - for (unsigned int i=0; i< image.height; i++) { - (*fptr_imgbufcpy)(dst_ptr, src_ptr, image.linesize); - src_ptr += image.linesize; - dst_ptr += linesize; - } - } else { - Debug(4, "Doing full copy line size %d != %d", image.linesize, linesize); - (*fptr_imgbufcpy)(buffer, image.buffer, size); - } - } + if ( image.buffer != buffer ) + (*fptr_imgbufcpy)(buffer, image.buffer, size); } Image *Image::HighlightEdges( @@ -1639,6 +1628,12 @@ bool Image::EncodeJpeg(JOCTET *outbuffer, int *outbuffer_size, AVCodecContext *p PopulateFrame(frame.get()); } + if (frame.get()->format != AV_PIX_FMT_YUV420P) { + Error("Jpeg frame format incorrect, got %d", frame.get()->format); + av_frame_unref(frame.get()); + return false; + } + pkt = av_packet_alloc(); avcodec_send_frame(p_jpegcodeccontext, frame.get()); @@ -5460,7 +5455,9 @@ __attribute__((noinline)) void std_deinterlace_4field_abgr(uint8_t* col1, uint8_ } AVPixelFormat Image::AVPixFormat() const { - if ( colours == ZM_COLOUR_RGB32 ) { + if ( subpixelorder == ZM_SUBPIX_ORDER_YUV420P) { + return AV_PIX_FMT_YUV420P; + } else if ( colours == ZM_COLOUR_RGB32 ) { return AV_PIX_FMT_RGBA; } else if ( colours == ZM_COLOUR_RGB24 ) { if ( subpixelorder == ZM_SUBPIX_ORDER_BGR) { diff --git a/src/zm_monitor.cpp b/src/zm_monitor.cpp index 6232796ab..a63ed795f 100644 --- a/src/zm_monitor.cpp +++ b/src/zm_monitor.cpp @@ -2933,7 +2933,9 @@ bool Monitor::Decode() { int ret = packet->decode(camera->getVideoCodecContext()); if (ret > 0 and !zm_terminate) { if (packet->in_frame and !packet->image) { - packet->image = new Image(camera_width, camera_height, camera->Colours(), camera->SubpixelOrder()); + unsigned int subpix = packet->in_frame->format == AV_PIX_FMT_YUV420P ? ZM_SUBPIX_ORDER_YUV420P : camera->SubpixelOrder(); + unsigned int colours = packet->in_frame->format == AV_PIX_FMT_YUV420P ? ZM_COLOUR_RGB24 : camera->Colours(); + packet->image = new Image(camera_width, camera_height, colours, subpix); if (convert_context || this->setupConvertContext(packet->in_frame.get(), packet->image)) { if (!packet->image->Assign(packet->in_frame.get(), convert_context, dest_frame.get())) {