Add allcam module

This commit is contained in:
Mr-Dave
2024-10-25 12:41:08 -06:00
parent 9f2eb87b4c
commit 8328ef1343
30 changed files with 1610 additions and 1169 deletions

View File

@@ -33,6 +33,7 @@ motionplus_SOURCES = \
libcam.hpp libcam.cpp \
logger.hpp logger.cpp \
motionplus.hpp motionplus.cpp \
allcam.hpp allcam.cpp \
camera.hpp camera.cpp \
movie.hpp movie.cpp \
netcam.hpp netcam.cpp \
@@ -49,7 +50,6 @@ motionplus_SOURCES = \
webu_html.hpp webu_html.cpp \
webu_json.hpp webu_json.cpp \
webu_post.hpp webu_post.cpp \
webu_common.hpp webu_common.cpp \
webu_stream.hpp webu_stream.cpp \
webu_getimg.hpp webu_getimg.cpp \
webu_mpegts.hpp webu_mpegts.cpp

View File

@@ -598,7 +598,7 @@ void cls_algsec::handler()
MOTPLS_LOG(INF, TYPE_ALL, NO_ERRNO,_("Secondary detection starting."));
handler_finished = false;
handler_running = true;
handler_stop = false;
load_params();
@@ -621,7 +621,7 @@ void cls_algsec::handler()
}
is_started = false;
handler_stop = false;
handler_finished = true;
handler_running = false;
MOTPLS_LOG(INF, TYPE_ALL, NO_ERRNO,_("Secondary detection stopped."));
pthread_exit(nullptr);
@@ -636,15 +636,15 @@ void cls_algsec::handler_startup()
return;
}
if (handler_finished == true) {
handler_finished = false;
if (handler_running == false) {
handler_running = true;
handler_stop = false;
pthread_attr_init(&thread_attr);
pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_DETACHED);
retcd = pthread_create(&handler_thread, &thread_attr, &algsec_handler, this);
if (retcd != 0) {
MOTPLS_LOG(WRN, TYPE_ALL, NO_ERRNO,_("Unable to start secondary detection"));
handler_finished = true;
handler_running = false;
handler_stop = true;
}
pthread_attr_destroy(&thread_attr);
@@ -655,10 +655,10 @@ void cls_algsec::handler_shutdown()
{
int waitcnt;
if (handler_finished == false) {
if (handler_running == true) {
handler_stop = true;
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cam->cfg->watchdog_tmo)){
while ((handler_running == true) && (waitcnt < cam->cfg->watchdog_tmo)){
SLEEP(1,0)
waitcnt++;
}
@@ -670,7 +670,7 @@ void cls_algsec::handler_shutdown()
,_("Waiting additional %d seconds (watchdog_kill).")
,cam->cfg->watchdog_kill);
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cam->cfg->watchdog_kill)){
while ((handler_running == true) && (waitcnt < cam->cfg->watchdog_kill)){
SLEEP(1,0)
waitcnt++;
}
@@ -687,7 +687,7 @@ void cls_algsec::handler_shutdown()
exit(1);
}
}
handler_finished = true;
handler_running = false;
}
myfree(image_norm);
@@ -752,7 +752,7 @@ cls_algsec::cls_algsec(cls_camera *p_cam)
{
#ifdef HAVE_OPENCV
cam = p_cam;
handler_finished = true;
handler_running = false;
handler_stop = true;
image_norm = nullptr;
params = nullptr;

View File

@@ -45,7 +45,7 @@ class cls_algsec {
pthread_mutex_t mutex;
bool handler_stop;
bool handler_finished;
bool handler_running;
pthread_t handler_thread;
void handler();

830
src/allcam.cpp Normal file
View File

@@ -0,0 +1,830 @@
/*
* This file is part of MotionPlus.
*
* MotionPlus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MotionPlus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MotionPlus. If not, see <https://www.gnu.org/licenses/>.
*
*
*/
#include "motionplus.hpp"
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "allcam.hpp"
#include "camera.hpp"
#include "jpegutils.hpp"
static void *allcam_handler(void *arg)
{
((cls_allcam *)arg)->handler();
return nullptr;
}
void cls_allcam::getsizes_img(cls_camera *p_cam, int &img_w, int &img_h)
{
int src_w, src_h;
src_w = p_cam->all_sizes.width;
src_h = p_cam->all_sizes.height;
img_w = ((p_cam->all_loc.scale * src_w) / 100);
if ((img_w % 16) != 0) {
img_w = img_w - (img_w % 16) + 16;
}
img_h = ((p_cam->all_loc.scale * src_h) / 100);
if ((img_h % 16) != 0) {
img_h = img_h - (img_h % 16) + 16;
}
if (img_w < 64){
img_w = 64;
}
if (img_h < 64){
img_h = 64;
}
}
void cls_allcam::getimg(ctx_stream_data *strm_a, std::string imgtyp)
{
int a_y, a_u, a_v; /* all img y,u,v */
int c_y, c_u, c_v; /* camera img y,u,v */
int dst_h, dst_w, dst_sz, src_sz, img_orow, img_ocol;
int indx, row, indx1;
int src_w, src_h;
unsigned char *dst_img, *src_img;
ctx_stream_data *strm_c;
ctx_all_sizes *all_sz;
cls_camera *p_cam;
getsizes();
all_sz = &all_sizes;
a_y = 0;
a_u = (all_sz->width * all_sz->height);
a_v = a_u + (a_u / 4);
memset(strm_a->img_data , 0x80, (size_t)a_u);
memset(strm_a->img_data + a_u, 0x80, (size_t)(a_u/2));
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
src_h = p_cam->imgs.height;
src_w = p_cam->imgs.width;
getsizes_img(p_cam, dst_w, dst_h);
dst_sz = (dst_h * dst_w * 3)/2;
src_sz = (src_h * src_w * 3)/2;
img_orow = p_cam->all_loc.offset_row;
img_ocol = p_cam->all_loc.offset_col;
if (imgtyp == "norm") {
strm_c = &p_cam->stream.norm;
} else if (imgtyp == "motion") {
strm_c = &p_cam->stream.motion;
} else if (imgtyp == "source") {
strm_c = &p_cam->stream.source;
} else if (imgtyp == "secondary") {
strm_c = &p_cam->stream.secondary;
} else {
return;
}
dst_img = (unsigned char*) mymalloc((uint)dst_sz);
src_img = (unsigned char*) mymalloc((uint)src_sz);
pthread_mutex_lock(&p_cam->stream.mutex);
indx1=0;
while (indx1 < 1000) {
if (strm_c->img_data == NULL) {
if (strm_c->all_cnct == 0){
strm_c->all_cnct++;
}
pthread_mutex_unlock(&p_cam->stream.mutex);
SLEEP(0, 1000);
pthread_mutex_lock(&p_cam->stream.mutex);
} else {
break;
}
indx1++;
}
if ((p_cam->all_sizes.width != src_w) ||
(p_cam->all_sizes.height != src_h)) {
MOTPLS_LOG(NTC, TYPE_STREAM, NO_ERRNO
, "Image has changed. Device: %d"
, p_cam->cfg->device_id);
memset(src_img, 0x00, (uint)src_sz);
p_cam->all_sizes.reset = true;
} else if (strm_c->img_data == NULL) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Could not get image for device %d"
, p_cam->cfg->device_id);
memset(src_img, 0x00, (uint)src_sz);
} else {
memcpy(src_img, strm_c->img_data, (uint)src_sz);
}
pthread_mutex_unlock(&p_cam->stream.mutex);
util_resize(src_img, src_w, src_h, dst_img, dst_w, dst_h);
/*
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "src w %d h %d dst w %d h %d all w %d h %d "
, p_cam->imgs.width, p_cam->imgs.height
, dst_w, dst_h
, all_sz->width,all_sz->height);
*/
a_y = (img_orow * all_sz->width) + img_ocol;
a_u =(all_sz->height * all_sz->width) +
((img_orow / 4) * all_sz->width) + (img_ocol / 2) ;
a_v = a_u + ((all_sz->height * all_sz->width) / 4);
c_y = 0;
c_u = (dst_w * dst_h);
c_v = c_u + (c_u / 4);
/*
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "r %d c %d a %d %d %d h %d w %d"
, img_orow, img_ocol
, a_y, a_u, a_v
, all_sz->height, all_sz->width);
*/
for (row=0; row<dst_h; row++) {
memcpy(strm_a->img_data + a_y, dst_img + c_y, (uint)dst_w);
a_y += all_sz->width;
c_y += dst_w;
if (row % 2) {
memcpy(strm_a->img_data + a_u, dst_img + c_u, (uint)dst_w / 2);
//mymemset(strm_a->img_data + a_u, 0xFA, dst_w/2);
a_u += (all_sz->width / 2);
c_u += (dst_w / 2);
memcpy(strm_a->img_data + a_v, dst_img + c_v, (uint)dst_w / 2);
a_v += (all_sz->width / 2);
c_v += (dst_w / 2);
}
}
myfree(dst_img);
myfree(src_img);
}
strm_a->jpg_sz = jpgutl_put_yuv420p(
strm_a->jpg_data, all_sz->img_sz
,strm_a->img_data, all_sz->width, all_sz->height
, 70, NULL,NULL,NULL);
strm_a->consumed = false;
}
void cls_allcam::stream_free()
{
int indx;
ctx_stream_data *strm;
for (indx=0;indx<5;indx++) {
if (indx == 0) {
strm = &stream.norm;
} else if (indx == 1) {
strm = &stream.motion;
} else if (indx == 2) {
strm = &stream.secondary;
} else if (indx == 3) {
strm = &stream.source;
} else if (indx == 4) {
strm = &stream.sub;
}
myfree(strm->img_data);
myfree(strm->jpg_data);
strm->img_data = (unsigned char*)
mymalloc((size_t)all_sizes.img_sz);
}
}
void cls_allcam::stream_alloc()
{
int indx;
ctx_stream_data *strm;
for (indx=0;indx<5;indx++) {
if (indx == 0) {
strm = &stream.norm;
} else if (indx == 1) {
strm = &stream.motion;
} else if (indx == 2) {
strm = &stream.secondary;
} else if (indx == 3) {
strm = &stream.source;
} else if (indx == 4) {
strm = &stream.sub;
}
strm->img_data = (unsigned char*)
mymalloc((size_t)all_sizes.img_sz);
strm->jpg_data = (unsigned char*)
mymalloc((size_t)all_sizes.img_sz);
strm->consumed = true;
}
}
void cls_allcam::getsizes_scale(int mx_row)
{
int indx, row;
int mx_h, img_h, img_w;
bool dflt_scale;
cls_camera *p_cam;
dflt_scale = false;
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
if (active_cam[indx]->all_loc.scale == -1) {
dflt_scale = true;
}
}
if (dflt_scale) {
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
active_cam[indx]->all_loc.scale = 100;
}
for (row=1; row<=mx_row; row++) {
mx_h = 0;
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
if (row == p_cam->all_loc.row) {
getsizes_img(p_cam, img_w, img_h);
if (mx_h < img_h) {
mx_h = img_h;
}
}
}
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
if (row == p_cam->all_loc.row) {
getsizes_img(p_cam, img_w, img_h);
p_cam->all_loc.scale = (int)((float)(mx_h*100 / img_h ));
}
}
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d Original Size %dx%d Scale %d New Size %dx%d"
, p_cam->cfg->device_id
, p_cam->imgs.width, p_cam->imgs.height
, p_cam->all_loc.scale, img_w, img_h);
}
}
}
}
void cls_allcam::getsizes_alignv(int mx_row, int mx_col)
{
int indx, row, col;
int chk_sz;
int mx_h, img_h, img_w;
cls_camera *p_cam;
for (row=1; row<=mx_row; row++) {
chk_sz = 0;
mx_h = 0;
for (col=1; col<=mx_col; col++) {
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
if ((row == p_cam->all_loc.row) &&
(col == p_cam->all_loc.col)) {
p_cam->all_loc.offset_col = chk_sz;
chk_sz += img_w;
if (mx_h < img_h) {
mx_h = img_h;
}
}
}
}
/* Align/center vert. the images in each row*/
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
if (p_cam->all_loc.row == row) {
p_cam->all_loc.offset_row =
all_sizes.height +
((mx_h - img_h)/2) ;
}
}
all_sizes.height += mx_h;
if (all_sizes.width < chk_sz) {
all_sizes.width = chk_sz;
}
}
}
void cls_allcam::getsizes_alignh(int mx_col)
{
int indx, col;
int chk_sz, chk_w;
int mx_w, img_h, img_w;
cls_camera *p_cam;
/* Align/center horiz. the images within each column area */
chk_w = 0;
for (col=1; col<=mx_col; col++) {
chk_sz = 0;
mx_w = 0;
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
if (p_cam->all_loc.col == col) {
if (p_cam->all_loc.offset_col < chk_w) {
p_cam->all_loc.offset_col = chk_w;
}
if (chk_sz < p_cam->all_loc.offset_col) {
chk_sz = p_cam->all_loc.offset_col;
}
if (mx_w < img_w) {
mx_w = img_w;
}
}
}
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
if (p_cam->all_loc.col == col) {
p_cam->all_loc.offset_col =
chk_sz + ((mx_w - img_w) /2) ;
}
}
chk_w = mx_w + chk_sz;
if (all_sizes.width < chk_w) {
all_sizes.width = chk_w;
}
}
}
void cls_allcam::getsizes()
{
int indx;
int chk_sz, mx_col, mx_row;
int img_h, img_w;
bool chk;
cls_camera *p_cam;
if (all_sizes.reset == true) {
chk = true;
} else {
chk = false;
}
active_cam.clear();
active_cnt = 0;
for (indx=0;indx<app->cam_cnt;indx++) {
p_cam = app->cam_list[indx];
if (p_cam->device_status == STATUS_OPENED) {
if (p_cam->all_sizes.reset == true) {
chk = true;
}
active_cnt++;
active_cam.push_back(p_cam);
p_cam->all_sizes.width = p_cam->imgs.width;
p_cam->all_sizes.height = p_cam->imgs.height;
p_cam->all_sizes.img_sz = ((p_cam->imgs.height * p_cam->imgs.width * 3)/2);
p_cam->all_sizes.reset = false;
}
}
if (chk == false) {
return;
}
init_cams();
all_sizes.width = 0;
all_sizes.height = 0;
mx_row = 0;
mx_col = 0;
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
if (mx_row < p_cam->all_loc.row) {
mx_row = p_cam->all_loc.row;
}
if (mx_col < p_cam->all_loc.col) {
mx_col = p_cam->all_loc.col;
}
}
getsizes_scale(mx_row);
getsizes_alignv(mx_row, mx_col);
getsizes_alignh(mx_col);
for (indx=0; indx<active_cnt; indx++) {
p_cam = active_cam[indx];
getsizes_img(p_cam, img_w, img_h);
chk_sz = p_cam->all_loc.offset_col + p_cam->all_loc.offset_user_col;
if (chk_sz < 0) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image column offset. (%d + %d) less than zero "
, p_cam->cfg->device_id
, p_cam->all_loc.offset_col
, p_cam->all_loc.offset_user_col);
} else if ((chk_sz + img_w) > all_sizes.width) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image column offset. (%d + %d) over image size"
, p_cam->cfg->device_id
, p_cam->all_loc.offset_col
, p_cam->all_loc.offset_user_col);
} else {
p_cam->all_loc.offset_col = chk_sz;
}
chk_sz = p_cam->all_loc.offset_row + p_cam->all_loc.offset_user_row;
if (chk_sz < 0 ) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image row offset. (%d + %d) less than zero "
, p_cam->cfg->device_id
, p_cam->all_loc.offset_row
, p_cam->all_loc.offset_user_row);
} else if ((chk_sz + img_h) > all_sizes.height) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image row offset. (%d + %d) over image size"
, p_cam->cfg->device_id
, p_cam->all_loc.offset_row
, p_cam->all_loc.offset_user_row);
} else {
p_cam->all_loc.offset_row = chk_sz;
}
}
if ((all_sizes.height ==0) ||
(all_sizes.width == 0)) {
all_sizes.width = 320;
all_sizes.height = 240;
}
all_sizes.img_sz =((
all_sizes.height *
all_sizes.width * 3)/2);
all_sizes.reset = false;
stream_free();
stream_alloc();
/*
for (indx=0; indx<active_cnt; indx++) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
, "row %d col %d offset row %d offset col %d"
, active_cam[indx]->all_loc.row
, active_cam[indx]->all_loc.col
, active_cam[indx]->all_loc.offset_row
, active_cam[indx]->all_loc.offset_col);
}
*/
}
void cls_allcam::init_params()
{
int indx, indx1;
ctx_params *params_loc;
ctx_params_item *itm;
memset(&all_sizes, 0, sizeof(ctx_all_sizes));
params_loc = new ctx_params;
for (indx=0; indx<app->cam_cnt; indx++) {
app->cam_list[indx]->all_loc.row = -1;
app->cam_list[indx]->all_loc.col = -1;
app->cam_list[indx]->all_loc.offset_user_col = 0;
app->cam_list[indx]->all_loc.offset_user_row = 0;
app->cam_list[indx]->all_loc.scale =
app->cam_list[indx]->cfg->stream_preview_scale;
util_parms_parse(params_loc
, "stream_preview_location"
, app->cam_list[indx]->cfg->stream_preview_location);
for (indx1=0;indx1<params_loc->params_cnt;indx1++) {
itm = &params_loc->params_array[indx1];
if (itm->param_name == "row") {
app->cam_list[indx]->all_loc.row = mtoi(itm->param_value);
}
if (itm->param_name == "col") {
app->cam_list[indx]->all_loc.col = mtoi(itm->param_value);
}
if (itm->param_name == "offset_col") {
app->cam_list[indx]->all_loc.offset_user_col =
mtoi(itm->param_value);
}
if (itm->param_name == "offset_row") {
app->cam_list[indx]->all_loc.offset_user_row =
mtoi(itm->param_value);
}
}
params_loc->params_array.clear();
}
mydelete(params_loc);
}
bool cls_allcam::init_validate()
{
int indx, indx1;
int row, col, mx_row, mx_col, col_chk;
bool cfg_valid, chk;
std::string cfg_row, cfg_col;
mx_row = 0;
mx_col = 0;
for (indx=0; indx<app->cam_cnt; indx++) {
if (mx_col < app->cam_list[indx]->all_loc.col) {
mx_col = app->cam_list[indx]->all_loc.col;
}
if (mx_row < app->cam_list[indx]->all_loc.row) {
mx_row = app->cam_list[indx]->all_loc.row;
}
}
cfg_valid = true;
for (indx=0; indx<app->cam_cnt; indx++) {
if ((app->cam_list[indx]->all_loc.col == -1) ||
(app->cam_list[indx]->all_loc.row == -1)) {
cfg_valid = false;
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "No stream_preview_location for cam %d"
, app->cam_list[indx]->cfg->device_id);
} else {
for (indx1=0; indx1<app->cam_cnt; indx1++) {
if ((app->cam_list[indx]->all_loc.col ==
app->cam_list[indx1]->all_loc.col) &&
(app->cam_list[indx]->all_loc.row ==
app->cam_list[indx1]->all_loc.row) &&
(indx != indx1)) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Duplicate stream_preview_location "
" cam %d, cam %d row %d col %d"
, app->cam_list[indx]->cfg->device_id
, app->cam_list[indx1]->cfg->device_id
, app->cam_list[indx]->all_loc.row
, app->cam_list[indx]->all_loc.col);
cfg_valid = false;
}
}
}
if (app->cam_list[indx]->all_loc.row == 0) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location row cam %d, row %d"
, app->cam_list[indx]->cfg->device_id
, app->cam_list[indx]->all_loc.row);
cfg_valid = false;
}
if (app->cam_list[indx]->all_loc.col == 0) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location col cam %d, col %d"
, app->cam_list[indx]->cfg->device_id
, app->cam_list[indx]->all_loc.col);
cfg_valid = false;
}
}
for (row=1; row<=mx_row; row++) {
chk = false;
for (indx=0; indx<app->cam_cnt; indx++) {
if (row == app->cam_list[indx]->all_loc.row) {
chk = true;
}
}
if (chk == false) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location combination. "
" Missing row %d", row);
cfg_valid = false;
}
col_chk = 0;
for (col=1; col<=mx_col; col++) {
for (indx=0; indx<app->cam_cnt; indx++) {
if ((row == app->cam_list[indx]->all_loc.row) &&
(col == app->cam_list[indx]->all_loc.col)) {
if ((col_chk+1) == col) {
col_chk = col;
} else {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location combination. "
" Missing row %d column %d", row, col_chk+1);
cfg_valid = false;
}
}
}
}
}
return cfg_valid;
}
void cls_allcam::init_cams()
{
int row, col, indx;
if (app->cam_cnt < 1) {
return;
}
init_params();
if (init_validate() == false) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
,"Creating default stream preview values");
row = 0;
col = 0;
for (indx=0; indx<app->cam_cnt; indx++) {
if (col == 1) {
col++;
} else {
row++;
col = 1;
}
app->cam_list[indx]->all_loc.col = col;
app->cam_list[indx]->all_loc.row = row;
app->cam_list[indx]->all_loc.scale = -1;
}
}
for (indx=0; indx<app->cam_cnt; indx++) {
MOTPLS_LOG(DBG, TYPE_ALL, NO_ERRNO
,"stream_preview_location values. Device %d row %d col %d"
, app->cam_list[indx]->cfg->device_id
, app->cam_list[indx]->all_loc.row
, app->cam_list[indx]->all_loc.col);
}
}
void cls_allcam::timing()
{
struct timespec ts2;
int64_t sleeptm;
if ((restart == true) || (handler_stop == true)) {
return;
}
clock_gettime(CLOCK_MONOTONIC, &ts2);
sleeptm = ((1000000L / app->cfg->stream_maxrate) -
(1000000L * (ts2.tv_sec - curr_ts.tv_sec)) -
((ts2.tv_nsec - curr_ts.tv_nsec)/1000))*1000;
/* If over 1 second, just do one*/
if (sleeptm > 999999999L) {
SLEEP(1, 0);
} else if (sleeptm > 0) {
SLEEP(0, sleeptm);
}
clock_gettime(CLOCK_MONOTONIC, &curr_ts);
}
void cls_allcam::handler()
{
mythreadname_set("ac", 0, "allcam");
while (handler_stop == false) {
if ((stream.norm.all_cnct > 0) &&
(stream.norm.consumed == true)) {
getimg(&stream.norm,"norm");
}
if ((stream.sub.all_cnct > 0) &&
(stream.sub.consumed == true)) {
getimg(&stream.sub,"norm");
}
if ((stream.motion.all_cnct > 0) &&
(stream.motion.consumed == true)) {
getimg(&stream.motion,"motion");
}
if ((stream.source.all_cnct > 0) &&
(stream.source.consumed == true)) {
getimg(&stream.source,"source");
}
if ((stream.secondary.all_cnct > 0) &&
(stream.secondary.consumed == true)) {
getimg(&stream.secondary,"secondary");
}
timing();
}
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO, _("All camera closed"));
handler_running = false;
pthread_exit(NULL);
}
void cls_allcam::handler_startup()
{
int retcd;
pthread_attr_t thread_attr;
if (handler_running == false) {
handler_running = true;
handler_stop = false;
restart = false;
pthread_attr_init(&thread_attr);
pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_DETACHED);
retcd = pthread_create(&handler_thread, &thread_attr, &allcam_handler, this);
if (retcd != 0) {
MOTPLS_LOG(WRN, TYPE_ALL, NO_ERRNO,_("Unable to start all camera thread."));
handler_running = false;
handler_stop = true;
}
pthread_attr_destroy(&thread_attr);
}
}
void cls_allcam::handler_shutdown()
{
int waitcnt;
if (handler_running == true) {
handler_stop = true;
waitcnt = 0;
while ((handler_running == true) && (waitcnt < app->cfg->watchdog_tmo)){
SLEEP(1,0)
waitcnt++;
}
if (waitcnt == app->cfg->watchdog_tmo) {
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO
, _("Normal shutdown of all camera failed"));
if (app->cfg->watchdog_kill > 0) {
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO
,_("Waiting additional %d seconds (watchdog_kill).")
,app->cfg->watchdog_kill);
waitcnt = 0;
while ((handler_running == true) && (waitcnt < app->cfg->watchdog_kill)){
SLEEP(1,0)
waitcnt++;
}
if (waitcnt == app->cfg->watchdog_kill) {
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO
, _("No response to shutdown. Killing it."));
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO
, _("Memory leaks will occur."));
pthread_kill(handler_thread, SIGVTALRM);
}
} else {
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO
, _("watchdog_kill set to terminate application."));
exit(1);
}
}
handler_running = false;
watchdog = app->cfg->watchdog_tmo;
}
}
cls_allcam::cls_allcam(cls_motapp *p_app)
{
app = p_app;
watchdog = app->cfg->watchdog_tmo;
handler_running = false;
handler_stop = true;
finish = false;
memset(&all_sizes, 0, sizeof(ctx_all_sizes));
memset(&stream, 0, sizeof(ctx_stream));
all_sizes.reset = true;
pthread_mutex_init(&stream.mutex, NULL);
stream.motion.consumed = true;
stream.norm.consumed = true;
stream.secondary.consumed = true;
stream.source.consumed = true;
stream.sub.consumed = true;
clock_gettime(CLOCK_MONOTONIC, &curr_ts);
active_cnt = 0;
active_cam.clear();
handler_startup();
}
cls_allcam::~cls_allcam()
{
finish = true;
handler_shutdown();
pthread_mutex_destroy(&stream.mutex);
stream_free();
}

63
src/allcam.hpp Normal file
View File

@@ -0,0 +1,63 @@
/*
* This file is part of MotionPlus.
*
* MotionPlus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MotionPlus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MotionPlus. If not, see <https://www.gnu.org/licenses/>.
*
*/
#ifndef _INCLUDE_ALLCAM_HPP_
#define _INCLUDE_ALLCAM_HPP_
class cls_allcam {
public:
cls_allcam(cls_motapp *p_app);
~cls_allcam();
bool handler_stop;
bool handler_running;
pthread_t handler_thread;
void handler();
ctx_stream stream;
ctx_all_sizes all_sizes;
bool restart;
bool finish;
private:
cls_motapp *app;
std::vector<cls_camera*> active_cam;
int active_cnt;
int watchdog;
struct timespec curr_ts;
void handler_startup();
void handler_shutdown();
void timing();
void stream_free();
void stream_alloc();
void getsizes_img(cls_camera *p_cam, int &img_w, int &img_h);
void getsizes_scale(int mx_row);
void getsizes_alignv(int mx_row, int mx_col);
void getsizes_alignh(int mx_col);
void getsizes();
void init_params();
bool init_validate();
void init_cams();
void getimg(ctx_stream_data *strm_a, std::string imgtyp);
};
#endif /*_INCLUDE_ALLCAM_HPP_*/

View File

@@ -607,7 +607,6 @@ void cls_camera::init_values()
noise = cfg->noise_level;
passflag = false;
app->all_sizes->reset= true;
threshold = cfg->threshold;
device_status = STATUS_CLOSED;
startup_frames = (cfg->framerate * 2) + cfg->pre_capture + cfg->minimum_motion_frames;
@@ -753,7 +752,7 @@ void cls_camera::cleanup()
/* initialize everything for the loop */
void cls_camera::init()
{
if (((device_status != STATUS_INIT) && (restart != true)) ||
if (((device_status != STATUS_INIT) && (restart == false)) ||
(handler_stop == true)) {
return;
}
@@ -769,8 +768,6 @@ void cls_camera::init()
MOTPLS_LOG(INF, TYPE_ALL, NO_ERRNO,_("Initialize Camera"));
cfg->parms_copy(conf_src);
init_camera_type();
init_values();
@@ -1466,7 +1463,7 @@ void cls_camera::handler()
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO, _("Camera closed"));
handler_finished = true;
handler_running = false;
pthread_exit(NULL);
}
@@ -1475,15 +1472,16 @@ void cls_camera::handler_startup()
int retcd;
pthread_attr_t thread_attr;
if (handler_finished == true) {
handler_finished = false;
if (handler_running == false) {
handler_running = true;
handler_stop = false;
restart = false;
pthread_attr_init(&thread_attr);
pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_DETACHED);
retcd = pthread_create(&handler_thread, &thread_attr, &camera_handler, this);
if (retcd != 0) {
MOTPLS_LOG(WRN, TYPE_ALL, NO_ERRNO,_("Unable to start camera thread."));
handler_finished = true;
handler_running = false;
handler_stop = true;
}
pthread_attr_destroy(&thread_attr);
@@ -1494,10 +1492,10 @@ void cls_camera::handler_shutdown()
{
int waitcnt;
if (handler_finished == false) {
if (handler_running == true) {
handler_stop = true;
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cfg->watchdog_tmo)){
while ((handler_running == true) && (waitcnt < cfg->watchdog_tmo)){
SLEEP(1,0)
waitcnt++;
}
@@ -1509,7 +1507,7 @@ void cls_camera::handler_shutdown()
,_("Waiting additional %d seconds (watchdog_kill).")
,cfg->watchdog_kill);
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cfg->watchdog_kill)){
while ((handler_running == true) && (waitcnt < cfg->watchdog_kill)){
SLEEP(1,0)
waitcnt++;
}
@@ -1526,21 +1524,75 @@ void cls_camera::handler_shutdown()
exit(1);
}
}
handler_finished = true;
handler_running = false;
watchdog = cfg->watchdog_tmo;
}
}
cls_camera::cls_camera(cls_motapp *p_app)
{
app = p_app;
handler_finished = true;
cfg = nullptr;
conf_src = nullptr;
current_image = nullptr;
alg = nullptr;
algsec = nullptr;
rotate = nullptr;
netcam = nullptr;
netcam_high = nullptr;
draw = nullptr;
picture = nullptr;
threadnr = -1;
noise = -1;
detecting_motion = false;
event_curr_nbr = -1;
event_prev_nbr = -1;
threshold = -1;
lastrate = -1;
frame_skip = -1;
lost_connection = false;
text_scale = 1;
movie_passthrough = false;
memset(&eventid, 0, sizeof(eventid));
memset(&text_event_string, 0, sizeof(text_event_string));
memset(hostname, 0, sizeof(hostname));
memset(action_user, 0, sizeof(action_user));
movie_fps = -1;
pipe = -1;
mpipe = -1;
pause = false;
missing_frame_counter = -1;
info_diff_tot = 0;
info_diff_cnt = 0;
info_sdev_min = 0;
info_sdev_max = 0;
info_sdev_tot = 0;
action_snapshot = false;
event_stop = false;
event_user = false;
camera_type = CAMERA_TYPE_UNKNOWN;
connectionlosttime.tv_sec = 0;
connectionlosttime.tv_nsec = 0;
handler_running = false;
handler_stop = true;
restart = false;
action_snapshot = false;
finish = false;
watchdog = 90;
passflag = false;
pthread_mutex_init(&stream.mutex, NULL);
device_status = STATUS_CLOSED;
memset(&imgs, 0, sizeof(ctx_images));
memset(&stream, 0, sizeof(ctx_stream));
memset(&all_loc, 0, sizeof(ctx_all_loc));
memset(&all_sizes, 0, sizeof(ctx_all_sizes));
all_sizes.reset = true;
}
cls_camera::~cls_camera()
@@ -1548,4 +1600,5 @@ cls_camera::~cls_camera()
mydelete(conf_src);
mydelete(cfg);
pthread_mutex_destroy(&stream.mutex);
device_status = STATUS_CLOSED;
}

View File

@@ -17,7 +17,6 @@
*
*/
#ifndef _INCLUDE_CAMERA_HPP_
#define _INCLUDE_CAMERA_HPP_
@@ -34,13 +33,13 @@ enum CAMERA_TYPE {
CAMERA_TYPE_LIBCAM,
CAMERA_TYPE_NETCAM
};
enum CAPTURE_RESULT {
CAPTURE_SUCCESS,
CAPTURE_FAILURE,
CAPTURE_ATTEMPTED
};
struct ctx_coord {
int x;
int y;
@@ -115,25 +114,6 @@ struct ctx_images {
};
struct ctx_stream_data {
u_char *jpg_data; /* Image compressed as JPG */
int jpg_sz; /* The number of bytes for jpg */
int consumed; /* Bool for whether the jpeg data was consumed*/
u_char *img_data; /* The base data used for image */
int jpg_cnct; /* Counter of the number of jpg connections*/
int ts_cnct; /* Counter of the number of mpegts connections */
int all_cnct; /* Counter of the number of all camera connections */
};
struct ctx_stream {
pthread_mutex_t mutex;
ctx_stream_data norm; /* Copy of the image to use for web stream*/
ctx_stream_data sub; /* Copy of the image to use for web stream*/
ctx_stream_data motion; /* Copy of the image to use for web stream*/
ctx_stream_data source; /* Copy of the image to use for web stream*/
ctx_stream_data secondary; /* Copy of the image to use for web stream*/
};
class cls_camera {
public:
cls_camera(cls_motapp *p_app);
@@ -151,17 +131,19 @@ class cls_camera {
cls_netcam *netcam;
cls_netcam *netcam_high;
ctx_all_loc all_loc;
ctx_all_sizes all_sizes;
cls_draw *draw;
cls_picture *picture;
bool handler_stop;
bool handler_finished;
bool handler_running;
pthread_t handler_thread;
void handler();
void handler_startup();
void handler_shutdown();
bool restart;
bool finish;
int threadnr;
int noise;
bool detecting_motion;
@@ -255,7 +237,6 @@ class cls_camera {
void areadetect();
void prepare();
void resetimages();
void retry();
int capture();
void detection();
void tuning();

View File

@@ -630,6 +630,9 @@ void cls_config::edit_device_id(std::string &parm, enum PARM_ACT pact)
device_id = 0;
} else {
parm_in = atoi(parm.c_str());
if (device_id == parm_in) {
return;
}
if (parm_in < 1) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO, _("Invalid device_id %d"),parm_in);
} else if (parm_in > 32000) {

View File

@@ -20,6 +20,7 @@
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "allcam.hpp"
#include "camera.hpp"
#include "sound.hpp"
#include "dbse.hpp"
@@ -124,7 +125,7 @@ void cls_motapp::signal_process()
break;
case MOTPLS_SIGNAL_SIGHUP: /* Reload the parameters and restart*/
reload_all = true;
webu->wb_finish = true;
webu->finish = true;
for (indx=0; indx<cam_cnt; indx++) {
cam_list[indx]->event_stop = true;
cam_list[indx]->handler_stop = true;
@@ -140,7 +141,7 @@ void cls_motapp::signal_process()
}
break;
case MOTPLS_SIGNAL_SIGTERM: /* Quit application */
webu->wb_finish = true;
webu->finish = true;
for (indx=0; indx<cam_cnt; indx++) {
cam_list[indx]->event_stop = true;
cam_list[indx]->restart = false;
@@ -152,9 +153,11 @@ void cls_motapp::signal_process()
}
for (indx=0; indx<cam_cnt; indx++) {
cam_list[indx]->handler_shutdown();
cam_list[indx]->finish = true;
}
for (indx=0; indx<snd_cnt; indx++) {
snd_list[indx]->handler_shutdown();
snd_list[indx]->finish = true;
}
default:
@@ -278,173 +281,6 @@ void cls_motapp::av_deinit()
avformat_network_deinit();
}
/* Validate or set the position on the all cameras image*/
void cls_motapp::allcams_init()
{
int indx, indx1;
int row, col, mx_row, mx_col, col_chk;
bool cfg_valid, chk;
std::string cfg_row, cfg_col;
ctx_params *params_loc;
ctx_params_item *itm;
all_sizes = new ctx_all_sizes;
all_sizes->height = 0;
all_sizes->width = 0;
all_sizes->img_sz = 0;
all_sizes->reset = true;
if (cam_cnt < 1) {
return;
}
params_loc = new ctx_params;
for (indx=0; indx<cam_cnt; indx++) {
cam_list[indx]->all_loc.row = -1;
cam_list[indx]->all_loc.col = -1;
cam_list[indx]->all_loc.offset_user_col = 0;
cam_list[indx]->all_loc.offset_user_row = 0;
cam_list[indx]->all_loc.scale =
cam_list[indx]->cfg->stream_preview_scale;
util_parms_parse(params_loc
, "stream_preview_location"
, cam_list[indx]->cfg->stream_preview_location);
for (indx1=0;indx1<params_loc->params_cnt;indx1++) {
itm = &params_loc->params_array[indx1];
if (itm->param_name == "row") {
cam_list[indx]->all_loc.row = mtoi(itm->param_value);
}
if (itm->param_name == "col") {
cam_list[indx]->all_loc.col = mtoi(itm->param_value);
}
if (itm->param_name == "offset_col") {
cam_list[indx]->all_loc.offset_user_col =
mtoi(itm->param_value);
}
if (itm->param_name == "offset_row") {
cam_list[indx]->all_loc.offset_user_row =
mtoi(itm->param_value);
}
}
params_loc->params_array.clear();
}
mydelete(params_loc);
mx_row = 0;
mx_col = 0;
for (indx=0; indx<cam_cnt; indx++) {
if (mx_col < cam_list[indx]->all_loc.col) {
mx_col = cam_list[indx]->all_loc.col;
}
if (mx_row < cam_list[indx]->all_loc.row) {
mx_row = cam_list[indx]->all_loc.row;
}
}
cfg_valid = true;
for (indx=0; indx<cam_cnt; indx++) {
if ((cam_list[indx]->all_loc.col == -1) ||
(cam_list[indx]->all_loc.row == -1)) {
cfg_valid = false;
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "No stream_preview_location for cam %d"
, cam_list[indx]->cfg->device_id);
} else {
for (indx1=0; indx1<cam_cnt; indx1++) {
if ((cam_list[indx]->all_loc.col ==
cam_list[indx1]->all_loc.col) &&
(cam_list[indx]->all_loc.row ==
cam_list[indx1]->all_loc.row) &&
(indx != indx1)) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Duplicate stream_preview_location "
" cam %d, cam %d row %d col %d"
, cam_list[indx]->cfg->device_id
, cam_list[indx1]->cfg->device_id
, cam_list[indx]->all_loc.row
, cam_list[indx]->all_loc.col);
cfg_valid = false;
}
}
}
if (cam_list[indx]->all_loc.row == 0) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location row cam %d, row %d"
, cam_list[indx]->cfg->device_id
, cam_list[indx]->all_loc.row);
cfg_valid = false;
}
if (cam_list[indx]->all_loc.col == 0) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location col cam %d, col %d"
, cam_list[indx]->cfg->device_id
, cam_list[indx]->all_loc.col);
cfg_valid = false;
}
}
for (row=1; row<=mx_row; row++) {
chk = false;
for (indx=0; indx<cam_cnt; indx++) {
if (row == cam_list[indx]->all_loc.row) {
chk = true;
}
}
if (chk == false) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location combination. "
" Missing row %d", row);
cfg_valid = false;
}
col_chk = 0;
for (col=1; col<=mx_col; col++) {
for (indx=0; indx<cam_cnt; indx++) {
if ((row == cam_list[indx]->all_loc.row) &&
(col == cam_list[indx]->all_loc.col)) {
if ((col_chk+1) == col) {
col_chk = col;
} else {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
, "Invalid stream_preview_location combination. "
" Missing row %d column %d", row, col_chk+1);
cfg_valid = false;
}
}
}
}
}
if (cfg_valid == false) {
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO
,"Creating default stream preview values");
row = 0;
col = 0;
for (indx=0; indx<cam_cnt; indx++) {
if (col == 1) {
col++;
} else {
row++;
col = 1;
}
cam_list[indx]->all_loc.col = col;
cam_list[indx]->all_loc.row = row;
cam_list[indx]->all_loc.scale = -1;
}
}
for (indx=0; indx<cam_cnt; indx++) {
MOTPLS_LOG(DBG, TYPE_ALL, NO_ERRNO
,"stream_preview_location values. Device %d row %d col %d"
, cam_list[indx]->cfg->device_id
, cam_list[indx]->all_loc.row
, cam_list[indx]->all_loc.col);
}
}
void cls_motapp::ntc()
{
#ifdef HAVE_V4L2
@@ -514,7 +350,7 @@ void cls_motapp::watchdog(uint camindx)
{
int indx;
if (cam_list[camindx]->handler_finished == true) {
if (cam_list[camindx]->handler_running == false) {
return;
}
@@ -614,23 +450,27 @@ bool cls_motapp::check_devices()
retcd = false;
for (indx=0; indx<cam_cnt; indx++) {
if (cam_list[indx]->handler_finished == false) {
if (cam_list[indx]->finish == false) {
retcd = true;
} else if (cam_list[indx]->handler_stop == false) {
}
if ((cam_list[indx]->handler_stop == false) &&
(cam_list[indx]->handler_running == false)) {
cam_list[indx]->handler_startup();
retcd = true;
}
}
for (indx=0; indx<snd_cnt; indx++) {
if (snd_list[indx]->handler_finished == false) {
if (snd_list[indx]->finish == false) {
retcd = true;
} else if (snd_list[indx]->handler_stop == false) {
}
if ((snd_list[indx]->handler_stop == false) &&
(snd_list[indx]->handler_running == false)) {
snd_list[indx]->handler_startup();
retcd = true;
}
}
if ((webu->wb_finish == false) &&
if ((webu->finish == false) &&
(webu->wb_daemon != NULL)) {
retcd = true;
}
@@ -656,6 +496,7 @@ void cls_motapp::init(int p_argc, char *p_argv[])
cfg = nullptr;
dbse = nullptr;
webu = nullptr;
allcam = nullptr;
pthread_mutex_init(&mutex_camlst, NULL);
pthread_mutex_init(&mutex_post, NULL);
@@ -683,12 +524,12 @@ void cls_motapp::init(int p_argc, char *p_argv[])
ntc();
dbse = new cls_dbse(this);
allcams_init();
av_init();
dbse = new cls_dbse(this);
webu = new cls_webu(this);
allcam = new cls_allcam(this);
if ((cam_cnt > 0) || (snd_cnt > 0)) {
for (indx=0; indx<cam_cnt; indx++) {
cam_list[indx]->handler_startup();
@@ -703,9 +544,6 @@ void cls_motapp::init(int p_argc, char *p_argv[])
, _("Waiting for camera or sound configuration to be added via web control."));
}
/* Start web control last */
webu = new cls_webu(this);
}
void cls_motapp::deinit()
@@ -717,9 +555,9 @@ void cls_motapp::deinit()
mydelete(webu);
mydelete(dbse);
mydelete(allcam)
mydelete(conf_src);
mydelete(cfg);
mydelete(all_sizes);
for (indx = 0; indx < cam_cnt;indx++) {
mydelete(cam_list[indx]);
@@ -773,7 +611,7 @@ void cls_motapp::camera_delete()
cam->handler_shutdown();
if (cam->handler_finished == false) {
if (cam->handler_running == true) {
MOTPLS_LOG(ERR, TYPE_ALL, NO_ERRNO, "Error stopping camera. Timed out shutting down");
cam_delete = -1;
return;
@@ -787,6 +625,7 @@ void cls_motapp::camera_delete()
pthread_mutex_unlock(&mutex_camlst);
cam_delete = -1;
allcam->all_sizes.reset = true;
}

View File

@@ -90,6 +90,7 @@
class cls_motapp;
class cls_camera;
class cls_allcam;
class cls_sound;
class cls_algsec;
class cls_alg;
@@ -141,10 +142,29 @@ struct ctx_all_loc {
struct ctx_all_sizes {
int width;
int height;
int img_sz; /* Image size*/
int img_sz;
bool reset;
};
struct ctx_stream_data {
u_char *jpg_data; /* Image compressed as JPG */
int jpg_sz; /* The number of bytes for jpg */
int consumed; /* Bool for whether the jpeg data was consumed*/
u_char *img_data; /* The base data used for image */
int jpg_cnct; /* Counter of the number of jpg connections*/
int ts_cnct; /* Counter of the number of mpegts connections */
int all_cnct; /* Counter of the number of all camera connections */
};
struct ctx_stream {
pthread_mutex_t mutex;
ctx_stream_data norm; /* Copy of the image to use for web stream*/
ctx_stream_data sub; /* Copy of the image to use for web stream*/
ctx_stream_data motion; /* Copy of the image to use for web stream*/
ctx_stream_data source; /* Copy of the image to use for web stream*/
ctx_stream_data secondary; /* Copy of the image to use for web stream*/
};
class cls_motapp {
public:
cls_motapp();
@@ -165,9 +185,9 @@ class cls_motapp {
cls_config *conf_src;
cls_config *cfg;
ctx_all_sizes *all_sizes;
cls_webu *webu;
cls_dbse *dbse;
cls_allcam *allcam;
pthread_mutex_t mutex_camlst; /* Lock the list of cams while adding/removing */
pthread_mutex_t mutex_post; /* mutex to allow for processing of post actions*/
@@ -186,10 +206,8 @@ class cls_motapp {
void daemon();
void av_init();
void av_deinit();
void allcams_init();
void ntc();
void watchdog(uint camindx);
};
#endif /* _INCLUDE_MOTIONPLUS_HPP_ */

View File

@@ -2075,7 +2075,7 @@ void cls_netcam::handler_reconnect()
void cls_netcam::handler()
{
handler_finished = false;
handler_running = true;
mythreadname_set("nc", threadnbr, camera_name.c_str());
@@ -2107,7 +2107,7 @@ void cls_netcam::handler()
MOTPLS_LOG(INF, TYPE_NETCAM, NO_ERRNO
,_("%s:Camera handler stopped"),cameratype.c_str());
handler_finished = true;
handler_running = false;
pthread_exit(nullptr);
}
@@ -2117,15 +2117,15 @@ void cls_netcam::handler_startup()
int wait_counter, retcd;
pthread_attr_t thread_attr;
if (handler_finished == true) {
handler_finished = false;
if (handler_running == false) {
handler_running = true;
handler_stop = false;
pthread_attr_init(&thread_attr);
pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_DETACHED);
retcd = pthread_create(&handler_thread, &thread_attr, &netcam_handler, this);
if (retcd != 0) {
MOTPLS_LOG(WRN, TYPE_ALL, NO_ERRNO,_("Unable to start camera thread."));
handler_finished = true;
handler_running = false;
handler_stop = true;
return;
}
@@ -2165,9 +2165,9 @@ void cls_netcam::handler_shutdown()
idur = 0;
handler_stop = true;
if (handler_finished == false) {
if (handler_running == true) {
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cam->cfg->watchdog_tmo)){
while ((handler_running == true) && (waitcnt < cam->cfg->watchdog_tmo)){
SLEEP(1,0)
waitcnt++;
}
@@ -2179,7 +2179,7 @@ void cls_netcam::handler_shutdown()
,_("Waiting additional %d seconds (watchdog_kill).")
,cam->cfg->watchdog_kill);
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cam->cfg->watchdog_kill)){
while ((handler_running == true) && (waitcnt < cam->cfg->watchdog_kill)){
SLEEP(1,0)
waitcnt++;
}
@@ -2196,7 +2196,7 @@ void cls_netcam::handler_shutdown()
exit(1);
}
}
handler_finished = true;
handler_running = false;
}
context_close();
@@ -2231,7 +2231,7 @@ void cls_netcam::netcam_start()
return;
}
handler_finished = true;
handler_running = false;
handler_stop = false;
if (high_resolution == false) {
@@ -2291,7 +2291,7 @@ void cls_netcam::noimage()
{
int slp_dur;
if (handler_finished == true ) { /* handler is not running */
if (handler_running == false ) {
if (reconnect_count >= 500) {
MOTPLS_LOG(NTC, TYPE_NETCAM, NO_ERRNO,_("Camera did not reconnect."));
MOTPLS_LOG(NTC, TYPE_NETCAM, NO_ERRNO,_("Checking for camera every 2 hours."));

View File

@@ -96,7 +96,7 @@ class cls_netcam {
int audio_stream_index; /* Stream index associated with video from camera */
bool handler_stop;
bool handler_finished;
bool handler_running;
pthread_t handler_thread;
void handler();

View File

@@ -873,7 +873,7 @@ void cls_sound::handler()
MOTPLS_LOG(NTC, TYPE_ALL, NO_ERRNO, _("Sound exiting"));
handler_finished = true;
handler_running = false;
pthread_exit(nullptr);
}
@@ -888,15 +888,15 @@ void cls_sound::handler_startup()
return;
#endif
if (handler_finished == true) {
handler_finished = false;
if (handler_running == false) {
handler_running = true;
handler_stop = false;
pthread_attr_init(&thread_attr);
pthread_attr_setdetachstate(&thread_attr, PTHREAD_CREATE_DETACHED);
retcd = pthread_create(&handler_thread, &thread_attr, &sound_handler, this);
if (retcd != 0) {
MOTPLS_LOG(WRN, TYPE_ALL, NO_ERRNO,_("Unable to start sound frequency detection loop."));
handler_finished = true;
handler_running = false;
handler_stop = true;
}
pthread_attr_destroy(&thread_attr);
@@ -907,10 +907,10 @@ void cls_sound::handler_shutdown()
{
int waitcnt;
if (handler_finished == false) {
if (handler_running == true) {
handler_stop = true;
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cfg->watchdog_tmo)){
while ((handler_running == true) && (waitcnt < cfg->watchdog_tmo)){
SLEEP(1,0)
waitcnt++;
}
@@ -922,7 +922,7 @@ void cls_sound::handler_shutdown()
,_("Waiting additional %d seconds (watchdog_kill).")
,cfg->watchdog_kill);
waitcnt = 0;
while ((handler_finished == false) && (waitcnt < cfg->watchdog_kill)){
while ((handler_running == true) && (waitcnt < cfg->watchdog_kill)){
SLEEP(1,0)
waitcnt++;
}
@@ -939,7 +939,7 @@ void cls_sound::handler_shutdown()
exit(1);
}
}
handler_finished = true;
handler_running = false;
watchdog = cfg->watchdog_tmo;
}
@@ -948,7 +948,7 @@ void cls_sound::handler_shutdown()
cls_sound::cls_sound(cls_motapp *p_app)
{
app = p_app;
handler_finished = true;
handler_running = false;
handler_stop = true;
restart = false;
watchdog = 30;

View File

@@ -121,9 +121,10 @@ class cls_sound {
ctx_snd_info *snd_info;
int threadnr;
bool restart;
bool finish;
bool handler_stop;
bool handler_finished;
bool handler_running;
pthread_t handler_thread;
void handler();
void handler_startup();

View File

@@ -1147,3 +1147,109 @@ std::string mtok(std::string &parm, std::string tok)
return tmp;
}
void util_resize(uint8_t *src, int src_w, int src_h
, uint8_t *dst, int dst_w, int dst_h)
{
int retcd, img_sz;
char errstr[128];
uint8_t *buf;
AVFrame *frm_in, *frm_out;
struct SwsContext *swsctx;
img_sz = (dst_h * dst_w * 3)/2;
memset(dst, 0x00, (size_t)img_sz);
frm_in = av_frame_alloc();
if (frm_in == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate frm_in."));
return;
}
frm_out = av_frame_alloc();
if (frm_out == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate frm_out."));
av_frame_free(&frm_in);
return;
}
retcd = av_image_fill_arrays(
frm_in->data, frm_in->linesize
, src, AV_PIX_FMT_YUV420P
, src_w, src_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, "Error filling arrays: %s", errstr);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
buf = (uint8_t *)mymalloc((size_t)img_sz);
retcd = av_image_fill_arrays(
frm_out->data, frm_out->linesize
, buf, AV_PIX_FMT_YUV420P
, dst_w, dst_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, "Error Filling array 2: %s", errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
swsctx = sws_getContext(
src_w, src_h, AV_PIX_FMT_YUV420P
,dst_w, dst_h, AV_PIX_FMT_YUV420P
,SWS_BICUBIC, NULL, NULL, NULL);
if (swsctx == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate scaling context."));
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
retcd = sws_scale(swsctx
, (const uint8_t* const *)frm_in->data, frm_in->linesize
, 0, src_h, frm_out->data, frm_out->linesize);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
,_("Error resizing/reformatting: %s"), errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
return;
}
retcd = av_image_copy_to_buffer(
(uint8_t *)dst, img_sz
, (const uint8_t * const*)frm_out
, frm_out->linesize
, AV_PIX_FMT_YUV420P, dst_w, dst_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
,_("Error putting frame into output buffer: %s"), errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
return;
}
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
}

View File

@@ -118,4 +118,7 @@ struct ctx_params {
long mtol(char *parm);
std::string mtok(std::string &parm, std::string tok);
void util_resize(uint8_t *src, int src_w, int src_h
, uint8_t *dst, int dst_w, int dst_h);
#endif /* _INCLUDE_UTIL_HPP_ */

View File

@@ -27,7 +27,6 @@
#include "webu_json.hpp"
#include "webu_post.hpp"
#include "webu_file.hpp"
#include "webu_common.hpp"
#include "webu_stream.hpp"
#include "webu_mpegts.hpp"
#include "video_v4l2.hpp"
@@ -496,7 +495,7 @@ void cls_webu::startup()
unsigned int randnbr;
wb_daemon = nullptr;
wb_daemon2 = nullptr;
wb_finish = false;
finish = false;
wb_clients.clear();
memset(wb_digest_rand, 0, sizeof(wb_digest_rand));
@@ -529,7 +528,7 @@ void cls_webu::shutdown()
{
int chkcnt;
wb_finish = true;
finish = true;
MOTPLS_LOG(NTC, TYPE_STREAM, NO_ERRNO, _("Closing webcontrol"));

View File

@@ -89,7 +89,7 @@
public:
cls_webu(cls_motapp *p_app);
~cls_webu();
bool wb_finish;
bool finish;
ctx_params *wb_headers;
ctx_params *wb_actions;
char wb_digest_rand[12];

View File

@@ -18,13 +18,13 @@
#include "motionplus.hpp"
#include "camera.hpp"
#include "allcam.hpp"
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "webu.hpp"
#include "webu_ans.hpp"
#include "webu_html.hpp"
#include "webu_common.hpp"
#include "webu_stream.hpp"
#include "webu_mpegts.hpp"
#include "webu_json.hpp"
@@ -714,8 +714,8 @@ void cls_webu_ans::bad_request()
/* Answer the get request from the user */
void cls_webu_ans::answer_get()
{
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO ,"processing get");
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
,"processing get: %s",uri_cmd1.c_str());
if ((uri_cmd1 == "mjpg") || (uri_cmd1 == "mpegts") ||
(uri_cmd1 == "static")) {
@@ -765,7 +765,7 @@ mhdrslt cls_webu_ans::answer_main(struct MHD_Connection *p_connection
}
if (cam != NULL) {
if (cam->handler_stop) {
if (cam->finish) {
MOTPLS_LOG(NTC, TYPE_STREAM, NO_ERRNO ,_("Shutting down camera"));
return MHD_NO;
}
@@ -805,7 +805,6 @@ mhdrslt cls_webu_ans::answer_main(struct MHD_Connection *p_connection
cnct_method = WEBUI_METHOD_GET;
retcd = MHD_YES;
}
return retcd;
}
@@ -821,60 +820,6 @@ mhdrslt cls_webu_ans::answer_main(struct MHD_Connection *p_connection
}
cls_webu_ans::cls_webu_ans(cls_motapp *p_app, const char *uri)
{
app = p_app;
webu = p_app->webu;
char *tmplang;
url = "";
uri_camid = "";
uri_cmd1 = "";
uri_cmd2 = "";
uri_cmd3 = "";
clientip = "";
lang = ""; /* Two digit lang code */
auth_opaque = (char*)mymalloc(WEBUI_LEN_PARM);
auth_realm = (char*)mymalloc(WEBUI_LEN_PARM);
auth_user = nullptr; /* Buffer to hold the user name*/
auth_pass = nullptr; /* Buffer to hold the password */
authenticated = false; /* boolean for whether we are authenticated*/
resp_page = ""; /* The response being constructed */
req_file = nullptr;
gzip_resp = nullptr;
gzip_size = 0;
gzip_encode = false;
cnct_type = WEBUI_CNCT_UNKNOWN;
resp_type = WEBUI_RESP_HTML; /* Default to html response */
cnct_method = WEBUI_METHOD_GET;
camindx = -1;
device_id = -1;
tmplang = setlocale(LC_ALL, NULL);
if (tmplang == nullptr) {
lang = "en";
} else {
lang.assign(tmplang, 2);
}
mhd_first = true;
cam = nullptr;
webu_file = nullptr;
webu_html = nullptr;
webu_json = nullptr;
webu_post = nullptr;
webu_stream = nullptr;
url.assign(uri);
parms_edit();
webu->cnct_cnt++;
}
void cls_webu_ans::deinit_counter()
{
ctx_stream_data *strm;
@@ -942,27 +887,98 @@ void cls_webu_ans::deinit_counter()
}
pthread_mutex_unlock(&p_cam->stream.mutex);
}
if (device_id == 0) {
pthread_mutex_lock(&app->allcam->stream.mutex);
if ((cnct_type == WEBUI_CNCT_JPG_FULL) ||
(cnct_type == WEBUI_CNCT_TS_FULL)) {
strm = &app->allcam->stream.norm;
} else if ( (cnct_type == WEBUI_CNCT_JPG_SUB) ||
(cnct_type == WEBUI_CNCT_TS_SUB)) {
strm = &app->allcam->stream.sub;
} else if ( (cnct_type == WEBUI_CNCT_JPG_MOTION) ||
(cnct_type == WEBUI_CNCT_TS_MOTION )) {
strm = &app->allcam->stream.motion;
} else if ( (cnct_type == WEBUI_CNCT_JPG_SOURCE) ||
(cnct_type == WEBUI_CNCT_TS_SOURCE)) {
strm = &app->allcam->stream.source;
} else if ( (cnct_type == WEBUI_CNCT_JPG_SECONDARY) ||
(cnct_type == WEBUI_CNCT_TS_SECONDARY)) {
strm = &app->allcam->stream.secondary;
} else {
strm = &app->allcam->stream.norm;
}
if (strm->all_cnct > 0) {
strm->all_cnct--;
}
pthread_mutex_unlock(&app->allcam->stream.mutex);
}
}
cls_webu_ans::cls_webu_ans(cls_motapp *p_app, const char *uri)
{
app = p_app;
webu = p_app->webu;
char *tmplang;
url = "";
uri_camid = "";
uri_cmd1 = "";
uri_cmd2 = "";
uri_cmd3 = "";
clientip = "";
lang = ""; /* Two digit lang code */
auth_opaque = (char*)mymalloc(WEBUI_LEN_PARM);
auth_realm = (char*)mymalloc(WEBUI_LEN_PARM);
auth_user = nullptr; /* Buffer to hold the user name*/
auth_pass = nullptr; /* Buffer to hold the password */
authenticated = false; /* boolean for whether we are authenticated*/
resp_page = ""; /* The response being constructed */
req_file = nullptr;
gzip_resp = nullptr;
gzip_size = 0;
gzip_encode = false;
cnct_type = WEBUI_CNCT_UNKNOWN;
resp_type = WEBUI_RESP_HTML; /* Default to html response */
cnct_method = WEBUI_METHOD_GET;
camindx = -1;
device_id = -1;
tmplang = setlocale(LC_ALL, NULL);
if (tmplang == nullptr) {
lang = "en";
} else {
lang.assign(tmplang, 2);
}
mhd_first = true;
cam = nullptr;
webu_file = nullptr;
webu_html = nullptr;
webu_json = nullptr;
webu_post = nullptr;
webu_stream = nullptr;
url.assign(uri);
parms_edit();
webu->cnct_cnt++;
}
cls_webu_ans::~cls_webu_ans()
{
deinit_counter();
if (webu_file != nullptr) {
delete webu_file;
}
if (webu_html != nullptr) {
delete webu_html;
}
if (webu_json != nullptr) {
delete webu_json;
}
if (webu_post != nullptr) {
delete webu_post;
}
if (webu_stream != nullptr) {
delete webu_stream;
}
mydelete(webu_file);
mydelete(webu_html);
mydelete(webu_json);
mydelete(webu_post);
mydelete(webu_stream);
myfree(auth_user);
myfree(auth_pass);

View File

@@ -23,12 +23,6 @@
cls_webu_ans(cls_motapp *p_motapp, const char *uri);
~cls_webu_ans();
mhdrslt answer_main(struct MHD_Connection *connection, const char *method
, const char *upload_data, size_t *upload_data_size);
void mhd_send();
void bad_request();
cls_motapp *app;
cls_webu *webu;
cls_camera *cam;
@@ -46,6 +40,7 @@
enum WEBUI_RESP resp_type; /* indicator for the type of response to provide. */
std::string resp_page; /* The response that will be sent */
int camindx; /* Index number of the cam */
int device_id; /* Device id number requested */
enum WEBUI_CNCT cnct_type; /* Type of connection we are processing */
@@ -53,6 +48,12 @@
std::string hostfull; /* Full http name for host with port number */
bool gzip_encode; /* Bool for whether to gzip response */
void mhd_send();
void bad_request();
mhdrslt answer_main(struct MHD_Connection *connection, const char *method
, const char *upload_data, size_t *upload_data_size);
private:
cls_webu_file *webu_file;
cls_webu_html *webu_html;

View File

@@ -1,609 +0,0 @@
/*
* This file is part of MotionPlus.
*
* MotionPlus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MotionPlus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MotionPlus. If not, see <https://www.gnu.org/licenses/>.
*
*/
#include "motionplus.hpp"
#include "camera.hpp"
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "webu.hpp"
#include "webu_common.hpp"
#include "webu_ans.hpp"
#include "webu_html.hpp"
#include "webu_stream.hpp"
#include "webu_mpegts.hpp"
#include "webu_json.hpp"
#include "webu_post.hpp"
#include "webu_file.hpp"
#include "video_v4l2.hpp"
bool cls_webu_common::check_finish()
{
if (webu->wb_finish){
resp_used = 0;
return true;
}
if (webua->cam != NULL) {
if ((webua->cam->handler_stop == true) ||
(webua->cam->passflag == false)) {
resp_used = 0;
return true;
}
}
return false;
}
void cls_webu_common::set_fps()
{
if ((webua->cam->detecting_motion == false) &&
(app->cam_list[webua->camindx]->cfg->stream_motion)) {
stream_fps = 1;
} else {
stream_fps = app->cam_list[webua->camindx]->cfg->stream_maxrate;
}
}
/* Sleep required time to get to the user requested framerate for the stream */
void cls_webu_common::delay()
{
long stream_rate;
struct timespec time_curr;
long stream_delay;
if (check_finish()) {
return;
}
clock_gettime(CLOCK_MONOTONIC, &time_curr);
/* The stream rate MUST be less than 1000000000 otherwise undefined behaviour
* will occur with the SLEEP function.
*/
stream_delay = ((time_curr.tv_nsec - time_last.tv_nsec)) +
((time_curr.tv_sec - time_last.tv_sec)*1000000000);
if (stream_delay < 0) {
stream_delay = 0;
}
if (stream_delay > 1000000000 ) {
stream_delay = 1000000000;
}
if (stream_fps >= 1) {
stream_rate = ( (1000000000 / stream_fps) - stream_delay);
if ((stream_rate > 0) && (stream_rate < 1000000000)) {
SLEEP(0,stream_rate);
} else if (stream_rate == 1000000000) {
SLEEP(1,0);
}
}
clock_gettime(CLOCK_MONOTONIC, &time_last);
}
void cls_webu_common::img_sizes(cls_camera *p_cam, int &img_w, int &img_h)
{
if (((webua->cnct_type == WEBUI_CNCT_JPG_SUB) ||
(webua->cnct_type == WEBUI_CNCT_TS_SUB)) &&
(((p_cam->imgs.width % 16) == 0) &&
((p_cam->imgs.height % 16) == 0))) {
img_w = (p_cam->imgs.width/2);
img_h = (p_cam->imgs.height/2);
} else {
img_w = p_cam->imgs.width;
img_h = p_cam->imgs.height;
}
img_w = ((p_cam->all_loc.scale * img_w) / 100);
if ((img_w % 16) != 0) {
img_w = img_w - (img_w % 16) + 16;
}
img_h = ((p_cam->all_loc.scale * img_h) / 100);
if ((img_h % 16) != 0) {
img_h = img_h - (img_h % 16) + 16;
}
if (img_w < 64){
img_w = 64;
}
if (img_h < 64){
img_h = 64;
}
}
void cls_webu_common::img_resize(cls_camera *p_cam
, uint8_t *src, uint8_t *dst, int dst_w, int dst_h)
{
int retcd, img_sz, src_h, src_w;
char errstr[128];
uint8_t *buf;
AVFrame *frm_in, *frm_out;
struct SwsContext *swsctx;
src_h = p_cam->imgs.height;
src_w = p_cam->imgs.width;
img_sz = (dst_h * dst_w * 3)/2;
memset(dst, 0x00, (size_t)img_sz);
frm_in = av_frame_alloc();
if (frm_in == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate frm_in."));
return;
}
frm_out = av_frame_alloc();
if (frm_out == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate frm_out."));
av_frame_free(&frm_in);
return;
}
retcd = av_image_fill_arrays(
frm_in->data, frm_in->linesize
, src, AV_PIX_FMT_YUV420P
, src_w, src_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, "Error filling arrays: %s", errstr);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
buf = (uint8_t *)mymalloc((size_t)img_sz);
retcd = av_image_fill_arrays(
frm_out->data, frm_out->linesize
, buf, AV_PIX_FMT_YUV420P
, dst_w, dst_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, "Error Filling array 2: %s", errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
swsctx = sws_getContext(
src_w, src_h, AV_PIX_FMT_YUV420P
,dst_w, dst_h, AV_PIX_FMT_YUV420P
,SWS_BICUBIC, NULL, NULL, NULL);
if (swsctx == NULL) {
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
, _("Unable to allocate scaling context."));
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
return;
}
retcd = sws_scale(swsctx
, (const uint8_t* const *)frm_in->data, frm_in->linesize
, 0, src_h, frm_out->data, frm_out->linesize);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
,_("Error resizing/reformatting: %s"), errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
return;
}
retcd = av_image_copy_to_buffer(
(uint8_t *)dst, img_sz
, (const uint8_t * const*)frm_out
, frm_out->linesize
, AV_PIX_FMT_YUV420P, dst_w, dst_h, 1);
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_NETCAM, NO_ERRNO
,_("Error putting frame into output buffer: %s"), errstr);
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
return;
}
free(buf);
av_frame_free(&frm_in);
av_frame_free(&frm_out);
sws_freeContext(swsctx);
}
void cls_webu_common::all_getimg()
{
int a_y, a_u, a_v; /* all img y,u,v */
int c_y, c_u, c_v; /* camera img y,u,v */
int dst_h, dst_w, dst_sz, src_sz, img_orow, img_ocol;
int indx, row, indx1;
unsigned char *dst_img, *src_img;
ctx_stream_data *strm;
ctx_all_sizes *all_sz;
cls_camera *p_cam;
memset(resp_image, '\0', resp_size);
all_sz = app->all_sizes;
a_y = 0;
a_u = (all_sz->width * all_sz->height);
a_v = a_u + (a_u / 4);
memset(all_img_data , 0x80, (size_t)a_u);
memset(all_img_data + a_u, 0x80, (size_t)(a_u/2));
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, dst_w, dst_h);
dst_sz = (dst_h * dst_w * 3)/2;
src_sz = (p_cam->imgs.width * p_cam->imgs.height * 3)/2;
img_orow = p_cam->all_loc.offset_row;
img_ocol = p_cam->all_loc.offset_col;
if ((webua->cnct_type == WEBUI_CNCT_JPG_FULL) ||
(webua->cnct_type == WEBUI_CNCT_TS_FULL)) {
strm = &p_cam->stream.norm;
} else if ((webua->cnct_type == WEBUI_CNCT_JPG_SUB) ||
(webua->cnct_type == WEBUI_CNCT_TS_SUB)) {
/* The use of the full size image is is is not an error here.
For the all_img, we are using a different scaling/resizing method
and as a result, we need to start with the full size image then
resize to substream and stream_preview_scale*/
strm = &p_cam->stream.norm; /* <<==Normal size is correct here*/
} else if ((webua->cnct_type == WEBUI_CNCT_JPG_MOTION) ||
(webua->cnct_type == WEBUI_CNCT_TS_MOTION)) {
strm = &p_cam->stream.motion;
} else if ((webua->cnct_type == WEBUI_CNCT_JPG_SOURCE) ||
(webua->cnct_type == WEBUI_CNCT_TS_SOURCE )) {
strm = &p_cam->stream.source;
} else if ((webua->cnct_type == WEBUI_CNCT_JPG_SECONDARY) ||
(webua->cnct_type == WEBUI_CNCT_TS_SECONDARY)) {
strm = &p_cam->stream.secondary;
} else { /* Should not be possible*/
return;
}
dst_img = (unsigned char*) mymalloc((uint)dst_sz);
src_img = (unsigned char*) mymalloc((uint)src_sz);
pthread_mutex_lock(&p_cam->stream.mutex);
indx1=0;
while (indx1 < 1000) {
if (strm->img_data == NULL) {
if (strm->all_cnct == 0){
strm->all_cnct++;
}
pthread_mutex_unlock(&p_cam->stream.mutex);
SLEEP(0, 1000);
pthread_mutex_lock(&p_cam->stream.mutex);
} else {
break;
}
indx1++;
}
if (strm->img_data == NULL) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Could not get image for device %d", p_cam->cfg->device_id);
memset(src_img, 0x00, (uint)src_sz);
} else {
memcpy(src_img, strm->img_data, (uint)src_sz);
}
pthread_mutex_unlock(&p_cam->stream.mutex);
img_resize(p_cam, src_img, dst_img, dst_w, dst_h);
/*
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "src w %d h %d dst w %d h %d all w %d h %d "
, p_cam->imgs.width, p_cam->imgs.height
, dst_w, dst_h
, all_sz->width,all_sz->height);
*/
a_y = (img_orow * all_sz->width) + img_ocol;
a_u =(all_sz->height * all_sz->width) +
((img_orow / 4) * all_sz->width) + (img_ocol / 2) ;
a_v = a_u + ((all_sz->height * all_sz->width) / 4);
c_y = 0;
c_u = (dst_w * dst_h);
c_v = c_u + (c_u / 4);
/*
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "r %d c %d a %d %d %d h %d w %d"
, img_orow, img_ocol
, a_y, a_u, a_v
, all_sz->height, all_sz->width);
*/
for (row=0; row<dst_h; row++) {
memcpy(all_img_data + a_y, dst_img + c_y, (uint)dst_w);
a_y += all_sz->width;
c_y += dst_w;
if (row % 2) {
memcpy(all_img_data + a_u, dst_img + c_u, (uint)dst_w / 2);
//mymemset(webui->all_img_data + a_u, 0xFA, dst_w/2);
a_u += (all_sz->width / 2);
c_u += (dst_w / 2);
memcpy(all_img_data + a_v, dst_img + c_v, (uint)dst_w / 2);
a_v += (all_sz->width / 2);
c_v += (dst_w / 2);
}
}
myfree(dst_img);
myfree(src_img);
}
}
void cls_webu_common::all_sizes()
{
int indx, row, col;
int chk_sz, chk_w, mx_col, mx_row;
int mx_h, mx_w, img_h, img_w;
bool dflt_scale;
cls_camera *p_cam;
if (app->all_sizes->reset == false) {
return;
}
mx_row = 0;
mx_col = 0;
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
if (mx_row < p_cam->all_loc.row) {
mx_row = p_cam->all_loc.row;
}
if (mx_col < p_cam->all_loc.col) {
mx_col = p_cam->all_loc.col;
}
}
dflt_scale = false;
for (indx=0; indx<app->cam_cnt; indx++) {
if (app->cam_list[indx]->all_loc.scale == -1) {
dflt_scale = true;
}
}
if (dflt_scale) {
for (indx=0; indx<app->cam_cnt; indx++) {
app->cam_list[indx]->all_loc.scale = 100;
}
for (row=1; row<=mx_row; row++) {
mx_h = 0;
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
if (row == p_cam->all_loc.row) {
img_sizes(p_cam, img_w, img_h);
if (mx_h < img_h) {
mx_h = img_h;
}
}
}
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
if (row == p_cam->all_loc.row) {
img_sizes(p_cam, img_w, img_h);
p_cam->all_loc.scale = (int)((float)(mx_h*100 / img_h ));
}
}
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d Original Size %dx%d Scale %d New Size %dx%d"
, p_cam->cfg->device_id
, p_cam->imgs.width, p_cam->imgs.height
, p_cam->all_loc.scale, img_w, img_h);
}
}
}
app->all_sizes->width = 0;
app->all_sizes->height = 0;
for (row=1; row<=mx_row; row++) {
chk_sz = 0;
mx_h = 0;
for (col=1; col<=mx_col; col++) {
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
if ((row == p_cam->all_loc.row) &&
(col == p_cam->all_loc.col)) {
p_cam->all_loc.offset_col = chk_sz;
chk_sz += img_w;
if (mx_h < img_h) {
mx_h = img_h;
}
}
}
}
/* Align/center vert. the images in each row*/
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
if (p_cam->all_loc.row == row) {
p_cam->all_loc.offset_row =
app->all_sizes->height +
((mx_h - img_h)/2) ;
}
}
app->all_sizes->height += mx_h;
if (app->all_sizes->width < chk_sz) {
app->all_sizes->width = chk_sz;
}
}
/* Align/center horiz. the images within each column area */
chk_w = 0;
for (col=1; col<=mx_col; col++) {
chk_sz = 0;
mx_w = 0;
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
if (p_cam->all_loc.col == col) {
if (p_cam->all_loc.offset_col < chk_w) {
p_cam->all_loc.offset_col = chk_w;
}
if (chk_sz < p_cam->all_loc.offset_col) {
chk_sz = p_cam->all_loc.offset_col;
}
if (mx_w < img_w) {
mx_w = img_w;
}
}
}
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
if (p_cam->all_loc.col == col) {
p_cam->all_loc.offset_col =
chk_sz + ((mx_w - img_w) /2) ;
}
}
chk_w = mx_w + chk_sz;
if (app->all_sizes->width < chk_w) {
app->all_sizes->width = chk_w;
}
}
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
img_sizes(p_cam, img_w, img_h);
chk_sz = p_cam->all_loc.offset_col + p_cam->all_loc.offset_user_col;
if (chk_sz < 0) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image column offset. (%d + %d) less than zero "
, p_cam->cfg->device_id
, p_cam->all_loc.offset_col
, p_cam->all_loc.offset_user_col);
} else if ((chk_sz + img_w) > app->all_sizes->width) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image column offset. (%d + %d) over image size"
, p_cam->cfg->device_id
, p_cam->all_loc.offset_col
, p_cam->all_loc.offset_user_col);
} else {
p_cam->all_loc.offset_col = chk_sz;
}
chk_sz = p_cam->all_loc.offset_row + p_cam->all_loc.offset_user_row;
if (chk_sz < 0 ) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image row offset. (%d + %d) less than zero "
, p_cam->cfg->device_id
, p_cam->all_loc.offset_row
, p_cam->all_loc.offset_user_row);
} else if ((chk_sz + img_h) > app->all_sizes->height) {
MOTPLS_LOG(DBG, TYPE_STREAM, NO_ERRNO
, "Device %d invalid image row offset. (%d + %d) over image size"
, p_cam->cfg->device_id
, p_cam->all_loc.offset_row
, p_cam->all_loc.offset_user_row);
} else {
p_cam->all_loc.offset_row = chk_sz;
}
}
app->all_sizes->img_sz =((
app->all_sizes->height *
app->all_sizes->width * 3)/2);
/*
for (indx=0; indx<app->cam_cnt; indx++) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
, "row %d col %d offset row %d offset col %d"
, app->cam_list[indx]->all_loc.row
, app->cam_list[indx]->all_loc.col
, app->cam_list[indx]->all_loc.offset_row
, app->cam_list[indx]->all_loc.offset_col);
}
*/
}
/* Allocate buffers if needed. */
void cls_webu_common::one_buffer()
{
if (webua->cam == NULL) {
return;
}
if (resp_size < (size_t)webua->cam->imgs.size_norm) {
if (resp_image != NULL) {
myfree(resp_image);
}
resp_image = (unsigned char*) mymalloc((uint)webua->cam->imgs.size_norm);
memset(resp_image,'\0', (uint)webua->cam->imgs.size_norm);
resp_size = (uint)webua->cam->imgs.size_norm;
resp_used = 0;
}
}
void cls_webu_common::all_buffer()
{
if (resp_size < (size_t)app->all_sizes->img_sz) {
if (resp_image != nullptr) {
myfree(resp_image);
}
resp_size = (uint)app->all_sizes->img_sz;
resp_image = (unsigned char*) mymalloc(resp_size);
memset(resp_image, '\0', resp_size);
resp_used = 0;
}
if ((all_img_data == nullptr) &&
(app->all_sizes->img_sz >0)) {
all_img_data = (unsigned char*)
mymalloc((size_t)app->all_sizes->img_sz);
}
}
cls_webu_common::cls_webu_common(cls_webu_ans *p_webua)
{
app = p_webua->app;
webu = p_webua->webu;
webua = p_webua;
all_img_data = nullptr;
resp_image = nullptr;
resp_size = 0; /* The size of the resp_page buffer. May get adjusted */
resp_used = 0; /* How many bytes used so far in resp_page*/
stream_fps = 1; /* Stream rate */
}
cls_webu_common::~cls_webu_common()
{
app = nullptr;
webu = nullptr;
webua = nullptr;
myfree(resp_image);
myfree(all_img_data);
}

View File

@@ -1,52 +0,0 @@
/*
* This file is part of MotionPlus.
*
* MotionPlus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MotionPlus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MotionPlus. If not, see <https://www.gnu.org/licenses/>.
*
*/
#ifndef _INCLUDE_WEBU_COMMON_HPP_
#define _INCLUDE_WEBU_COMMON_HPP_
class cls_webu_common {
public:
cls_webu_common(cls_webu_ans *p_webua);
~cls_webu_common();
bool check_finish();
void delay();
void set_fps();
void all_getimg();
void all_sizes();
void all_buffer();
void one_buffer();
struct timespec time_last; /* Keep track of processing time for stream thread*/
size_t resp_size; /* The allocated size of the response */
size_t resp_used; /* The amount of the response page used */
unsigned char *resp_image; /* Response image to provide to user */
unsigned char *all_img_data; /* Image for all cameras */
int stream_fps; /* Stream rate per second */
private:
cls_motapp *app;
cls_webu *webu;
cls_webu_ans *webua;
void img_sizes(cls_camera *cam, int &img_w, int &img_h);
void img_resize(cls_camera *cam
, uint8_t *src, uint8_t *dst, int dst_w, int dst_h);
};
#endif

View File

@@ -24,7 +24,6 @@
#include "picture.hpp"
#include "webu.hpp"
#include "webu_ans.hpp"
#include "webu_common.hpp"
#include "webu_file.hpp"
#include "dbse.hpp"

View File

@@ -22,7 +22,6 @@
#include "logger.hpp"
#include "util.hpp"
#include "webu.hpp"
#include "webu_common.hpp"
#include "webu_ans.hpp"
#include "webu_html.hpp"

View File

@@ -23,7 +23,6 @@
#include "util.hpp"
#include "webu.hpp"
#include "webu_ans.hpp"
#include "webu_common.hpp"
#include "webu_json.hpp"
#include "dbse.hpp"

View File

@@ -18,12 +18,12 @@
#include "motionplus.hpp"
#include "camera.hpp"
#include "allcam.hpp"
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "picture.hpp"
#include "webu.hpp"
#include "webu_common.hpp"
#include "webu_ans.hpp"
#include "webu_stream.hpp"
#include "webu_mpegts.hpp"
@@ -136,7 +136,7 @@ int cls_webu_mpegts::pic_get()
void cls_webu_mpegts::resetpos()
{
stream_pos = 0;
webuc->resp_used = 0;
webus->resp_used = 0;
}
int cls_webu_mpegts::getimg()
@@ -146,18 +146,18 @@ int cls_webu_mpegts::getimg()
unsigned char *img_data;
int img_sz;
if (webuc->check_finish()) {
if (webus->check_finish() == true) {
resetpos();
return 0;
}
clock_gettime(CLOCK_REALTIME, &curr_ts);
memset(webuc->resp_image, '\0', webuc->resp_size);
webuc->resp_used = 0;
memset(webus->resp_image, '\0', webus->resp_size);
webus->resp_used = 0;
if (webua->device_id > 0) {
webuc->set_fps();
webus->set_fps();
/* Assign to a local pointer the stream we want */
if (webua->cnct_type == WEBUI_CNCT_TS_FULL) {
strm = &webua->cam->stream.norm;
@@ -183,11 +183,30 @@ int cls_webu_mpegts::getimg()
}
pthread_mutex_unlock(&webua->cam->stream.mutex);
} else {
webuc->all_getimg();
if (webua->cnct_type == WEBUI_CNCT_TS_FULL) {
strm = &webua->app->allcam->stream.norm;
} else if (webua->cnct_type == WEBUI_CNCT_TS_SUB) {
strm = &webua->app->allcam->stream.sub;
} else if (webua->cnct_type == WEBUI_CNCT_TS_MOTION) {
strm = &webua->app->allcam->stream.motion;
} else if (webua->cnct_type == WEBUI_CNCT_TS_SOURCE) {
strm = &webua->app->allcam->stream.source;
} else if (webua->cnct_type == WEBUI_CNCT_TS_SECONDARY) {
strm = &webua->app->allcam->stream.secondary;
} else {
return 0;
}
img_sz = app->allcam->all_sizes.img_sz;
img_data = (unsigned char*) mymalloc((uint)img_sz);
pthread_mutex_lock(&webua->app->allcam->stream.mutex);
if (strm->img_data == nullptr) {
memset(img_data, 0x00, (uint)img_sz);
} else {
memcpy(img_data, strm->img_data, (uint)img_sz);
strm->consumed = true;
}
pthread_mutex_unlock(&webua->app->allcam->stream.mutex);
img_data = (unsigned char*) mymalloc((uint)app->all_sizes->img_sz);
memcpy(img_data, webuc->all_img_data, (uint)app->all_sizes->img_sz);
}
if (pic_send(img_data) < 0) {
@@ -205,19 +224,19 @@ int cls_webu_mpegts::getimg()
int cls_webu_mpegts::avio_buf(myuint *buf, int buf_size)
{
if (webuc->resp_size < (size_t)buf_size + webuc->resp_used) {
webuc->resp_size = (size_t)buf_size + webuc->resp_used;
webuc->resp_image = (unsigned char*)realloc(
webuc->resp_image, webuc->resp_size);
if (webus->resp_size < (size_t)buf_size + webus->resp_used) {
webus->resp_size = (size_t)buf_size + webus->resp_used;
webus->resp_image = (unsigned char*)realloc(
webus->resp_image, webus->resp_size);
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
,_("resp_image reallocated %d %d %d")
,webuc->resp_size
,webuc->resp_used
,webus->resp_size
,webus->resp_used
,buf_size);
}
memcpy(webuc->resp_image + webuc->resp_used, buf, (uint)buf_size);
webuc->resp_used += (uint)buf_size;
memcpy(webus->resp_image + webus->resp_used, buf, (uint)buf_size);
webus->resp_used += (uint)buf_size;
return buf_size;
}
@@ -226,12 +245,19 @@ ssize_t cls_webu_mpegts::response(char *buf, size_t max)
{
size_t sent_bytes;
if (webuc->check_finish()) {
if (webus->check_finish()) {
return -1;
}
if (ctx_codec != nullptr) {
if ((webua->app->allcam->all_sizes.height != ctx_codec->height ) ||
(webua->app->allcam->all_sizes.width != ctx_codec->width)) {
return -1;
}
}
if (stream_pos == 0) {
webuc->delay();
webus->delay();
resetpos();
if (getimg() < 0) {
return 0;
@@ -239,21 +265,21 @@ ssize_t cls_webu_mpegts::response(char *buf, size_t max)
}
/* If we don't have anything in the avio buffer at this point bail out */
if (webuc->resp_used == 0) {
if (webus->resp_used == 0) {
resetpos();
return 0;
}
if ((webuc->resp_used - stream_pos) > max) {
if ((webus->resp_used - stream_pos) > max) {
sent_bytes = max;
} else {
sent_bytes = webuc->resp_used - stream_pos;
sent_bytes = webus->resp_used - stream_pos;
}
memcpy(buf, webuc->resp_image + stream_pos, (uint)sent_bytes);
memcpy(buf, webus->resp_image + stream_pos, (uint)sent_bytes);
stream_pos = stream_pos + sent_bytes;
if (stream_pos >= webuc->resp_used) {
if (stream_pos >= webus->resp_used) {
stream_pos = 0;
}
@@ -271,7 +297,7 @@ int cls_webu_mpegts::open_mpegts()
size_t aviobuf_sz;
opts = NULL;
webuc->stream_fps = 10000; /* For quick start up*/
webus->stream_fps = 10000; /* For quick start up*/
aviobuf_sz = 4096;
clock_gettime(CLOCK_REALTIME, &start_time);
@@ -293,9 +319,11 @@ int cls_webu_mpegts::open_mpegts()
img_h = webua->cam->imgs.height;
}
} else {
webuc->all_sizes();
img_w = app->all_sizes->width;
img_h = app->all_sizes->height;
if (webus->all_ready() == false) {
return -1;
}
img_w = app->allcam->all_sizes.width;
img_h = app->allcam->all_sizes.height;
}
ctx_codec = avcodec_alloc_context3(codec);
@@ -322,7 +350,8 @@ int cls_webu_mpegts::open_mpegts()
if (retcd < 0) {
av_strerror(retcd, errstr, sizeof(errstr));
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
,_("Failed to copy decoder parameters!: %s"), errstr);
,_("Failed to open codec context for %dx%d transport stream: %s")
, img_w, img_h, errstr);
av_dict_free(&opts);
return -1;
}
@@ -336,10 +365,10 @@ int cls_webu_mpegts::open_mpegts()
return -1;
}
if (webua->device_id > 0) {
webuc->one_buffer();
if (webua->device_id == 0) {
webus->all_buffer();
} else {
webuc->all_buffer();
webus->one_buffer();
}
@@ -359,7 +388,7 @@ int cls_webu_mpegts::open_mpegts()
}
stream_pos = 0;
webuc->resp_used = 0;
webus->resp_used = 0;
av_dict_free(&opts);
@@ -377,7 +406,7 @@ mhdrslt cls_webu_mpegts::main()
return MHD_NO;
}
clock_gettime(CLOCK_MONOTONIC, &webuc->time_last);
clock_gettime(CLOCK_MONOTONIC, &webus->time_last);
response = MHD_create_response_from_callback (MHD_SIZE_UNKNOWN, 4096
,&webu_mpegts_response, this, NULL);
@@ -403,12 +432,13 @@ mhdrslt cls_webu_mpegts::main()
return retcd;
}
cls_webu_mpegts::cls_webu_mpegts(cls_webu_ans *p_webua)
cls_webu_mpegts::cls_webu_mpegts(cls_webu_ans *p_webua, cls_webu_stream *p_webus)
{
app = p_webua->app;
webu = p_webua->webu;
webua = p_webua;
webuc = new cls_webu_common(p_webua);
webus = p_webus;
stream_pos = 0;
picture = nullptr;;
ctx_codec = nullptr;
@@ -420,7 +450,6 @@ cls_webu_mpegts::~cls_webu_mpegts()
app = nullptr;
webu = nullptr;
webua = nullptr;
delete webuc;
if (picture != nullptr) {
av_frame_free(&picture);
picture = nullptr;

View File

@@ -21,7 +21,7 @@
class cls_webu_mpegts {
public:
cls_webu_mpegts(cls_webu_ans *p_webua);
cls_webu_mpegts(cls_webu_ans *p_webua, cls_webu_stream *p_webus);
~cls_webu_mpegts();
int avio_buf(myuint *buf, int buf_size);
ssize_t response(char *buf, size_t max);
@@ -31,7 +31,7 @@
cls_motapp *app;
cls_webu *webu;
cls_webu_ans *webua;
cls_webu_common *webuc;
cls_webu_stream *webus;
AVFrame *picture;
AVFormatContext *fmtctx;

View File

@@ -26,7 +26,6 @@
#include "webu.hpp"
#include "webu_ans.hpp"
#include "webu_html.hpp"
#include "webu_common.hpp"
#include "webu_post.hpp"
/**************Callback functions for MHD **********************/
@@ -320,12 +319,14 @@ void cls_webu_post::action_restart()
if (webua->device_id == 0) {
MOTPLS_LOG(NTC, TYPE_STREAM, NO_ERRNO, _("Restarting all cameras"));
for (indx=0; indx<app->cam_cnt; indx++) {
app->cam_list[indx]->handler_stop = false;
app->cam_list[indx]->restart = true;
}
} else {
MOTPLS_LOG(NTC, TYPE_STREAM, NO_ERRNO
, _("Restarting camera %d")
, app->cam_list[webua->camindx]->cfg->device_id);
app->cam_list[webua->camindx]->handler_stop = false;
app->cam_list[webua->camindx]->restart = true;
}
}
@@ -479,25 +480,29 @@ void cls_webu_post::config_set(int indx_parm, std::string parm_vl)
app->conf_src->edit_set(parm_nm, parm_vl);
config_restart_set("dbse",0);
} else {
for (indx=0;indx<app->cam_cnt;indx++){
app->cam_list[indx]->conf_src->edit_get(
parm_nm, parm_vl_dev, parm_ct);
if (parm_vl_dev == parm_vl_dflt) {
app->cam_list[indx]->conf_src->edit_set(
parm_nm, parm_vl);
config_restart_set("cam",indx);
}
}
for (indx=0;indx<app->snd_cnt;indx++) {
app->snd_list[indx]->conf_src->edit_get(
parm_nm, parm_vl_dev, parm_ct);
if (parm_vl_dev == parm_vl_dflt) {
app->snd_list[indx]->conf_src->edit_set(
parm_nm, parm_vl);
config_restart_set("snd",indx);
}
}
app->conf_src->edit_set(parm_nm, parm_vl);
if (app->cam_list[webua->camindx]->handler_running == false) {
app->cfg->edit_set(parm_nm, parm_vl);
} else {
for (indx=0;indx<app->cam_cnt;indx++){
app->cam_list[indx]->conf_src->edit_get(
parm_nm, parm_vl_dev, parm_ct);
if (parm_vl_dev == parm_vl_dflt) {
app->cam_list[indx]->conf_src->edit_set(
parm_nm, parm_vl);
config_restart_set("cam",indx);
}
}
for (indx=0;indx<app->snd_cnt;indx++) {
app->snd_list[indx]->conf_src->edit_get(
parm_nm, parm_vl_dev, parm_ct);
if (parm_vl_dev == parm_vl_dflt) {
app->snd_list[indx]->conf_src->edit_set(
parm_nm, parm_vl);
config_restart_set("snd",indx);
}
}
}
}
} else {
if ((parm_ct == PARM_CAT_00) ||
@@ -505,9 +510,16 @@ void cls_webu_post::config_set(int indx_parm, std::string parm_vl)
(parm_ct == PARM_CAT_15)) {
return;
}
MOTPLS_LOG(INF, TYPE_ALL, NO_ERRNO, "Config edit set. %s:%s"
,parm_nm.c_str(), parm_vl.c_str());
app->cam_list[webua->camindx]->conf_src->edit_set(
parm_nm, parm_vl);
config_restart_set("cam", webua->camindx);
if (app->cam_list[webua->camindx]->handler_running == true) {
config_restart_set("cam", webua->camindx);
} else {
app->cam_list[webua->camindx]->cfg->edit_set(
parm_nm, parm_vl);
}
}
}

View File

@@ -18,13 +18,13 @@
#include "motionplus.hpp"
#include "camera.hpp"
#include "allcam.hpp"
#include "conf.hpp"
#include "logger.hpp"
#include "util.hpp"
#include "picture.hpp"
#include "webu.hpp"
#include "webu_ans.hpp"
#include "webu_common.hpp"
#include "webu_stream.hpp"
#include "webu_mpegts.hpp"
#include "alg_sec.hpp"
@@ -37,6 +37,99 @@ static ssize_t webu_mjpeg_response (void *cls, uint64_t pos, char *buf, size_t m
return webu_stream->mjpeg_response(buf, max);
}
void cls_webu_stream::set_fps()
{
if (webua->camindx >= app->cam_list.size()) {
stream_fps = 1;
} else if ((webua->cam->detecting_motion == false) &&
(app->cam_list[webua->camindx]->cfg->stream_motion)) {
stream_fps = 1;
} else {
stream_fps = app->cam_list[webua->camindx]->cfg->stream_maxrate;
}
}
/* Sleep required time to get to the user requested framerate for the stream */
void cls_webu_stream::delay()
{
long stream_rate;
struct timespec time_curr;
long stream_delay;
if (check_finish()) {
return;
}
clock_gettime(CLOCK_MONOTONIC, &time_curr);
/* The stream rate MUST be less than 1000000000 otherwise undefined behaviour
* will occur with the SLEEP function.
*/
stream_delay = ((time_curr.tv_nsec - time_last.tv_nsec)) +
((time_curr.tv_sec - time_last.tv_sec)*1000000000);
if (stream_delay < 0) {
stream_delay = 0;
}
if (stream_delay > 1000000000 ) {
stream_delay = 1000000000;
}
if (stream_fps >= 1) {
stream_rate = ( (1000000000 / stream_fps) - stream_delay);
if ((stream_rate > 0) && (stream_rate < 1000000000)) {
SLEEP(0,stream_rate);
} else if (stream_rate == 1000000000) {
SLEEP(1,0);
}
}
clock_gettime(CLOCK_MONOTONIC, &time_last);
}
void cls_webu_stream::one_buffer()
{
if (webua->cam == NULL) {
return;
}
if (resp_size < (size_t)webua->cam->imgs.size_norm) {
if (resp_image != NULL) {
myfree(resp_image);
}
resp_image = (unsigned char*) mymalloc((uint)webua->cam->imgs.size_norm);
memset(resp_image,'\0', (uint)webua->cam->imgs.size_norm);
resp_size = (uint)webua->cam->imgs.size_norm;
resp_used = 0;
}
}
void cls_webu_stream::all_buffer()
{
if (resp_size < (size_t)app->allcam->all_sizes.img_sz) {
if (resp_image != nullptr) {
myfree(resp_image);
}
resp_size = (uint)app->allcam->all_sizes.img_sz;
resp_image = (unsigned char*) mymalloc(resp_size);
memset(resp_image, '\0', resp_size);
resp_used = 0;
}
}
bool cls_webu_stream::check_finish()
{
if (webu->finish){
resp_used = 0;
return true;
}
if (webua->cam != NULL) {
if ((webua->cam->finish == true) ||
(webua->cam->passflag == false)) {
resp_used = 0;
return true;
}
}
return false;
}
bool cls_webu_stream::all_ready()
{
int indx, indx1;
@@ -44,8 +137,8 @@ bool cls_webu_stream::all_ready()
for (indx=0; indx<app->cam_cnt; indx++) {
p_cam = app->cam_list[indx];
if (p_cam->passflag == false) {
app->all_sizes->reset = true;
if ((p_cam->device_status == STATUS_OPENED) &&
(p_cam->passflag == false)) {
indx1 = 0;
while (indx1 < 1000) {
SLEEP(0, 1000);
@@ -64,12 +157,11 @@ bool cls_webu_stream::all_ready()
void cls_webu_stream::mjpeg_all_img()
{
int header_len, jpg_sz;
char resp_head[80];
ctx_all_sizes *all_sz;
unsigned char *jpg_data;
int header_len;
ctx_stream_data *strm;
if (webuc->check_finish()) {
if (check_finish()) {
return;
}
@@ -77,29 +169,48 @@ void cls_webu_stream::mjpeg_all_img()
return;
}
webuc->all_sizes();
webuc->all_buffer();
webuc->all_getimg();
all_buffer();
all_sz = app->all_sizes;
memset(resp_image, '\0', resp_size);
jpg_data = (unsigned char*) mymalloc((uint)all_sz->img_sz);
/* Assign to a local pointer the stream we want */
if (webua->app == NULL) {
return;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_FULL) {
strm = &webua->app->allcam->stream.norm;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SUB) {
strm = &webua->app->allcam->stream.sub;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_MOTION) {
strm = &webua->app->allcam->stream.motion;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SOURCE) {
strm = &webua->app->allcam->stream.source;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SECONDARY) {
strm = &webua->app->allcam->stream.secondary;
} else {
return;
}
jpg_sz = jpgutl_put_yuv420p(jpg_data, all_sz->img_sz
, webuc->all_img_data, all_sz->width, all_sz->height
, 70, NULL,NULL,NULL);
webuc->stream_fps = 1;
header_len = snprintf(resp_head, 80
,"--BoundaryString\r\n"
"Content-type: image/jpeg\r\n"
"Content-Length: %9d\r\n\r\n"
,jpg_sz);
memcpy(webuc->resp_image, resp_head, (uint)header_len);
memcpy(webuc->resp_image + header_len, jpg_data, (uint)jpg_sz);
memcpy(webuc->resp_image + header_len + jpg_sz,"\r\n",(uint)2);
webuc->resp_used =(uint)(header_len + jpg_sz + 2);
myfree(jpg_data);
/* Copy jpg from the motion loop thread */
pthread_mutex_lock(&webua->app->allcam->stream.mutex);
set_fps();
if (strm->jpg_data == NULL) {
pthread_mutex_unlock(&webua->app->allcam->stream.mutex);
return;
}
header_len = snprintf(resp_head, 80
,"--BoundaryString\r\n"
"Content-type: image/jpeg\r\n"
"Content-Length: %9d\r\n\r\n"
,strm->jpg_sz);
memcpy(resp_image, resp_head, (uint)header_len);
memcpy(resp_image + header_len
, strm->jpg_data
, (uint)strm->jpg_sz);
/* Copy in the terminator after the jpg data at the end*/
memcpy(resp_image + header_len + strm->jpg_sz,"\r\n",2);
resp_used =(uint)(header_len + strm->jpg_sz + 2);
strm->consumed = true;
pthread_mutex_unlock(&webua->app->allcam->stream.mutex);
}
@@ -109,11 +220,11 @@ void cls_webu_stream::mjpeg_one_img()
int header_len;
ctx_stream_data *strm;
if (webuc->check_finish()) {
if (check_finish()) {
return;
}
memset(webuc->resp_image, '\0', webuc->resp_size);
memset(resp_image, '\0', resp_size);
/* Assign to a local pointer the stream we want */
if (webua->cam == NULL) {
@@ -134,7 +245,7 @@ void cls_webu_stream::mjpeg_one_img()
/* Copy jpg from the motion loop thread */
pthread_mutex_lock(&webua->cam->stream.mutex);
webuc->set_fps();
set_fps();
if (strm->jpg_data == NULL) {
pthread_mutex_unlock(&webua->cam->stream.mutex);
return;
@@ -144,13 +255,13 @@ void cls_webu_stream::mjpeg_one_img()
"Content-type: image/jpeg\r\n"
"Content-Length: %9d\r\n\r\n"
,strm->jpg_sz);
memcpy(webuc->resp_image, resp_head, (uint)header_len);
memcpy(webuc->resp_image + header_len
memcpy(resp_image, resp_head, (uint)header_len);
memcpy(resp_image + header_len
, strm->jpg_data
, (uint)strm->jpg_sz);
/* Copy in the terminator after the jpg data at the end*/
memcpy(webuc->resp_image + header_len + strm->jpg_sz,"\r\n",2);
webuc->resp_used =(uint)(header_len + strm->jpg_sz + 2);
memcpy(resp_image + header_len + strm->jpg_sz,"\r\n",2);
resp_used =(uint)(header_len + strm->jpg_sz + 2);
strm->consumed = true;
pthread_mutex_unlock(&webua->cam->stream.mutex);
@@ -160,16 +271,16 @@ ssize_t cls_webu_stream::mjpeg_response (char *buf, size_t max)
{
size_t sent_bytes;
if (webuc->check_finish()) {
if (check_finish()) {
return -1;
}
if ((stream_pos == 0) || (webuc->resp_used == 0)) {
if ((stream_pos == 0) || (resp_used == 0)) {
webuc->delay();
delay();
stream_pos = 0;
webuc->resp_used = 0;
resp_used = 0;
if (webua->device_id == 0) {
mjpeg_all_img();
@@ -177,21 +288,21 @@ ssize_t cls_webu_stream::mjpeg_response (char *buf, size_t max)
mjpeg_one_img();
}
if (webuc->resp_used == 0) {
if (resp_used == 0) {
return 0;
}
}
if ((webuc->resp_used - stream_pos) > max) {
if ((resp_used - stream_pos) > max) {
sent_bytes = max;
} else {
sent_bytes = webuc->resp_used - stream_pos;
sent_bytes = resp_used - stream_pos;
}
memcpy(buf, webuc->resp_image + stream_pos, sent_bytes);
memcpy(buf, resp_image + stream_pos, sent_bytes);
stream_pos = stream_pos + sent_bytes;
if (stream_pos >= webuc->resp_used) {
if (stream_pos >= resp_used) {
stream_pos = 0;
}
@@ -220,33 +331,68 @@ void cls_webu_stream::all_cnct()
strm->all_cnct++;
pthread_mutex_unlock(&app->cam_list[indx_cam]->stream.mutex);
}
if (webua->cnct_type == WEBUI_CNCT_JPG_SUB) {
strm = &app->allcam->stream.sub;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_MOTION) {
strm = &app->allcam->stream.motion;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SOURCE) {
strm = &app->allcam->stream.source;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SECONDARY) {
strm = &app->allcam->stream.secondary;
} else {
strm = &app->allcam->stream.norm;
}
pthread_mutex_lock(&app->allcam->stream.mutex);
strm->all_cnct++;
pthread_mutex_unlock(&app->allcam->stream.mutex);
}
/* Obtain the current image for the camera.*/
void cls_webu_stream::static_all_img()
{
ctx_all_sizes *all_sz;
unsigned char *jpg_data;
ctx_stream_data *strm;
if (webuc->check_finish()) {
if (check_finish()) {
return;
}
if (all_ready() == false) {
return;
}
webuc->all_sizes();
webuc->all_buffer();
webuc->all_getimg();
all_sz = app->all_sizes;
jpg_data = (unsigned char*)mymalloc((uint)all_sz->img_sz);
all_buffer();
resp_used = 0;
memset(resp_image, '\0', resp_size);
/* Assign to a local pointer the stream we want */
if (webua->cnct_type == WEBUI_CNCT_JPG_FULL) {
strm = &webua->app->allcam->stream.norm;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SUB) {
strm = &webua->app->allcam->stream.sub;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_MOTION) {
strm = &webua->app->allcam->stream.motion;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SOURCE) {
strm = &webua->app->allcam->stream.source;
} else if (webua->cnct_type == WEBUI_CNCT_JPG_SECONDARY) {
strm = &webua->app->allcam->stream.secondary;
} else {
return;
}
pthread_mutex_lock(&webua->app->allcam->stream.mutex);
if (strm->jpg_data == NULL) {
pthread_mutex_unlock(&webua->app->allcam->stream.mutex);
return;
}
memcpy(resp_image
, strm->jpg_data
, (uint)strm->jpg_sz);
resp_used =(uint)strm->jpg_sz;
strm->consumed = true;
pthread_mutex_unlock(&webua->app->allcam->stream.mutex);
webuc->resp_used = (uint)jpgutl_put_yuv420p(jpg_data
, all_sz->img_sz, webuc->all_img_data, all_sz->width
, all_sz->height, 70, NULL,NULL,NULL);
memcpy(webuc->resp_image, jpg_data, webuc->resp_used);
myfree(jpg_data);
}
/* Increment the jpg stream counters */
@@ -287,10 +433,10 @@ void cls_webu_stream::static_one_img()
{
ctx_stream_data *strm;
webuc->one_buffer();
one_buffer();
webuc->resp_used = 0;
memset(webuc->resp_image, '\0', webuc->resp_size);
resp_used = 0;
memset(resp_image, '\0', resp_size);
/* Assign to a local pointer the stream we want */
if (webua->cam == NULL) {
@@ -314,37 +460,37 @@ void cls_webu_stream::static_one_img()
pthread_mutex_unlock(&webua->cam->stream.mutex);
return;
}
memcpy(webuc->resp_image
memcpy(resp_image
, strm->jpg_data
, (uint)strm->jpg_sz);
webuc->resp_used =(uint)strm->jpg_sz;
resp_used =(uint)strm->jpg_sz;
strm->consumed = true;
pthread_mutex_unlock(&webua->cam->stream.mutex);
}
/* Determine whether the user specified a valid URL for the particular port */
int cls_webu_stream::checks()
bool cls_webu_stream::valid_request()
{
if (check_finish()) {
return false;
}
pthread_mutex_lock(&app->mutex_camlst);
if (webua->device_id < 0) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
, _("Invalid camera specified: %s"), webua->url.c_str());
pthread_mutex_unlock(&app->mutex_camlst);
return -1;
return false;
}
if ((webua->device_id > 0) && (webua->cam == NULL)) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO
, _("Invalid camera specified: %s"), webua->url.c_str());
pthread_mutex_unlock(&app->mutex_camlst);
return -1;
}
if (webuc->check_finish()) {
return -1;
return false;
}
pthread_mutex_unlock(&app->mutex_camlst);
return 0;
return true;
}
/* Increment the transport stream counters */
@@ -437,7 +583,7 @@ mhdrslt cls_webu_stream::stream_mjpeg()
struct MHD_Response *response;
int indx;
clock_gettime(CLOCK_MONOTONIC, &webuc->time_last);
clock_gettime(CLOCK_MONOTONIC, &time_last);
response = MHD_create_response_from_callback (MHD_SIZE_UNKNOWN, 1024
, &webu_mjpeg_response, (void *)this, NULL);
@@ -468,16 +614,16 @@ mhdrslt cls_webu_stream::stream_static()
{
mhdrslt retcd;
struct MHD_Response *response;
char resp_used[20];
char resp_head[20];
int indx;
if (webuc->resp_used == 0) {
if (resp_used == 0) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO, _("Could not get image to stream."));
return MHD_NO;
}
response = MHD_create_response_from_buffer (
webuc->resp_size,(void *)webuc->resp_image
resp_size,(void *)resp_image
, MHD_RESPMEM_MUST_COPY);
if (response == NULL) {
MOTPLS_LOG(ERR, TYPE_STREAM, NO_ERRNO, _("Invalid response"));
@@ -493,8 +639,8 @@ mhdrslt cls_webu_stream::stream_static()
}
MHD_add_response_header (response, MHD_HTTP_HEADER_CONTENT_TYPE, "image/jpeg");
snprintf(resp_used, 20, "%9ld\r\n\r\n",(long)webuc->resp_used);
MHD_add_response_header (response, MHD_HTTP_HEADER_CONTENT_LENGTH, resp_used);
snprintf(resp_head, 20, "%9ld\r\n\r\n",(long)resp_used);
MHD_add_response_header (response, MHD_HTTP_HEADER_CONTENT_LENGTH, resp_head);
retcd = MHD_queue_response (webua->connection, MHD_HTTP_OK, response);
MHD_destroy_response (response);
@@ -507,23 +653,13 @@ mhdrslt cls_webu_stream::main()
{
mhdrslt retcd;
if (webuc->check_finish()) {
return MHD_NO;
}
if (webua->cam != NULL) {
if ((webua->cam->passflag == false) || (webua->cam->handler_stop)) {
return MHD_NO;
}
}
set_cnct_type();
if (checks() == -1) {
if (valid_request() == false) {
webua->bad_request();
return MHD_NO;
}
set_cnct_type();
if (webua->uri_cmd1 == "static") {
if (webua->device_id > 0) {
jpg_cnct();
@@ -536,10 +672,10 @@ mhdrslt cls_webu_stream::main()
} else if (webua->uri_cmd1 == "mjpg") {
if (webua->device_id > 0) {
jpg_cnct();
webuc->one_buffer();
one_buffer();
} else {
all_cnct();
webuc->all_buffer();
all_buffer();
}
retcd = stream_mjpeg();
} else if (webua->uri_cmd1 == "mpegts") {
@@ -549,7 +685,7 @@ mhdrslt cls_webu_stream::main()
all_cnct();
}
if (webu_mpegts == nullptr){
webu_mpegts = new cls_webu_mpegts(webua);
webu_mpegts = new cls_webu_mpegts(webua, this);
}
retcd = webu_mpegts->main();
if (retcd == MHD_NO) {
@@ -558,6 +694,7 @@ mhdrslt cls_webu_stream::main()
}
} else {
webua->bad_request();
retcd = MHD_NO;
}
@@ -568,18 +705,22 @@ cls_webu_stream::cls_webu_stream(cls_webu_ans *p_webua)
{
app = p_webua->app;
webu = p_webua->webu;
webuc = new cls_webu_common(p_webua);
webua = p_webua;
webu_mpegts = nullptr;
resp_image = nullptr;
resp_size = 0;
resp_used = 0;
stream_pos = 0;
stream_fps = 1;
}
cls_webu_stream::~cls_webu_stream()
{
if (webu_mpegts != nullptr){
delete webu_mpegts;
}
if (webuc != nullptr){
delete webuc;
}
mydelete(webu_mpegts);
myfree(resp_image);
}

View File

@@ -23,14 +23,25 @@
cls_webu_stream(cls_webu_ans *webua);
~cls_webu_stream();
int stream_fps;
size_t resp_size; /* The allocated size of the response */
size_t resp_used; /* The amount of the response page used */
u_char *resp_image; /* Response image to provide to user */
mhdrslt main();
ssize_t mjpeg_response (char *buf, size_t max);
bool check_finish();
void delay();
void set_fps();
void one_buffer();
void all_buffer();
bool all_ready();
struct timespec time_last; /* Keep track of processing time for stream thread*/
private:
cls_motapp *app;
cls_webu *webu;
cls_webu_ans *webua;
cls_webu_common *webuc;
cls_webu_mpegts *webu_mpegts;
size_t stream_pos;
@@ -42,8 +53,7 @@
mhdrslt stream_static();
mhdrslt stream_mjpeg();
int checks();
bool all_ready();
bool valid_request();
void all_cnct();
void jpg_cnct();
void ts_cnct();