Merge branch 'master' into webgeek1234-avcodec-jpeg

This commit is contained in:
Isaac Connor
2025-01-31 15:18:00 -05:00
537 changed files with 228079 additions and 183164 deletions

View File

@@ -7,7 +7,7 @@ web/js/videojs.zoomrotate.js
web/js/fontfaceobserver.standalone.js
web/skins/classic/js/bootstrap-4.5.0.js
web/skins/classic/js/bootstrap.bundle.min.js
web/skins/classic/js/bootstrap-table-1.22.3
web/skins/classic/js/bootstrap-table-1.23.5
web/skins/classic/js/chosen
web/skins/classic/js/dateTimePicker
web/skins/classic/js/jquery-*.js
@@ -16,7 +16,6 @@ web/skins/classic/js/jquery.js
web/skins/classic/js/moment.js
web/skins/classic/js/video.js
web/skins/classic/assets
web/tools/mootools
web/js/janus.js
web/js/ajaxQueue.js
web/js/hls.js

4
.gitignore vendored
View File

@@ -199,10 +199,6 @@ web/cmake_install.cmake
web/events/
web/images/
web/includes/config.php
web/tools/mootools/CMakeFiles/
web/tools/mootools/cmake_install.cmake
web/tools/mootools/mootools-core.js
web/tools/mootools/mootools-more.js
web/undef.log
zm.conf
zmconfgen.pl

View File

@@ -7,6 +7,7 @@ build:
sphinx:
fail_on_warning: true
configuration: docs/conf.py
python:
install:

View File

@@ -0,0 +1 @@
https://zoneminder.com/funding.json

View File

@@ -445,7 +445,7 @@ if(NOT ZM_NO_MQTT)
find_package(MOSQUITTO)
if(MOSQUITTO_FOUND)
include_directories(${MOSQUITTO_INCLUDE_DIRS})
list(APPEND ZM_BIN_LIBS "${MOSQUITTO_LIBRARIES}")
list(APPEND ZM_BIN_LIBS "${MOSQUITTO_LIBRARY}")
set(optlibsfound "${optlibsfound} Mosquitto")
else()
set(optlibsnotfound "${optlibsnotfound} Mosquitto")
@@ -454,7 +454,7 @@ if(NOT ZM_NO_MQTT)
find_package(MOSQUITTOPP)
if(MOSQUITTOPP_FOUND)
include_directories(${MOSQUITTOPP_INCLUDE_DIRS})
list(APPEND ZM_BIN_LIBS "${MOSQUITTOPP_LIBRARIES}")
list(APPEND ZM_BIN_LIBS "${MOSQUITTOPP_LIBRARY}")
set(optlibsfound "${optlibsfound} Mosquittopp")
else()
set(optlibsnotfound "${optlibsnotfound} Mosquittopp")

View File

@@ -47,7 +47,7 @@ MACRO(POD2MAN PODFILE MANFILE SECTION MANPAGE_DEST_PREFIX)
ADD_CUSTOM_COMMAND(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION}.gz
COMMAND ${GZIP} --best -c ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION} > ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION}.gz
COMMAND ${GZIP} -n --best -c ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION} > ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION}.gz
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${MANFILE}.${SECTION}
)

View File

@@ -112,13 +112,34 @@ SET @s = (SELECT IF(
PREPARE stmt FROM @s;
EXECUTE stmt;
REPLACE INTO Monitors_Permissions (UserId,Permission, MonitorId)
SET @s = (SELECT IF(
(SELECT COUNT(*)
FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'MonitorIds'
) > 0,
"REPLACE INTO Monitors_Permissions (UserId,Permission, MonitorId)
SELECT Id, 'Edit', SUBSTRING_INDEX(SUBSTRING_INDEX(Users.MonitorIds, ',', n.n), ',', -1) value FROM Users CROSS JOIN (
SELECT a.N + b.N * 10 + 1 n FROM (SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) a ,(SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) b ORDER BY n ) n WHERE Users.Monitors='Edit' and Users.MonitorIds != '' AND n.n <= 1 + (LENGTH(Users.MonitorIds) - LENGTH(REPLACE(Users.MonitorIds, ',', ''))) ORDER BY value;
SELECT a.N + b.N * 10 + 1 n FROM (SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) a ,(SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) b ORDER BY n ) n WHERE Users.Monitors='Edit' and Users.MonitorIds != '' AND n.n <= 1 + (LENGTH(Users.MonitorIds) - LENGTH(REPLACE(Users.MonitorIds, ',', ''))) ORDER BY value",
"SELECT 'No MonitorIds in Users'"
));
PREPARE stmt FROM @s;
EXECUTE stmt;
REPLACE INTO Monitors_Permissions (UserId,Permission, MonitorId)
SET @s = (SELECT IF(
(SELECT COUNT(*)
FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_name = 'Users'
AND table_schema = DATABASE()
AND column_name = 'MonitorIds'
) > 0,
"REPLACE INTO Monitors_Permissions (UserId,Permission, MonitorId)
SELECT Id, 'View', SUBSTRING_INDEX(SUBSTRING_INDEX(Users.MonitorIds, ',', n.n), ',', -1) value FROM Users CROSS JOIN (
SELECT a.N + b.N * 10 + 1 n FROM (SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) a ,(SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) b ORDER BY n ) n WHERE Users.Monitors!='Edit' and Users.MonitorIds != '' AND n.n <= 1 + (LENGTH(Users.MonitorIds) - LENGTH(REPLACE(Users.MonitorIds, ',', ''))) ORDER BY value;
SELECT a.N + b.N * 10 + 1 n FROM (SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) a ,(SELECT 0 AS N UNION ALL SELECT 1 UNION ALL SELECT 2 UNION ALL SELECT 3 UNION ALL SELECT 4 UNION ALL SELECT 5 UNION ALL SELECT 6 UNION ALL SELECT 7 UNION ALL SELECT 8 UNION ALL SELECT 9) b ORDER BY n ) n WHERE Users.Monitors!='Edit' and Users.MonitorIds != '' AND n.n <= 1 + (LENGTH(Users.MonitorIds) - LENGTH(REPLACE(Users.MonitorIds, ',', ''))) ORDER BY value",
"SELECT 'No MonitorIds in Users'"
));
PREPARE stmt FROM @s;
EXECUTE stmt;
DELETE FROM Monitors_Permissions WHERE MonitorID NOT IN (SELECT Id FROM Monitors);
ALTER TABLE Monitors_Permissions ADD CONSTRAINT Monitors_Permissions_ibfk_1 FOREIGN KEY (`MonitorId`) REFERENCES `Monitors` (`Id`) ON DELETE CASCADE;

22
dep/jwt-cpp/.clang-format Normal file
View File

@@ -0,0 +1,22 @@
BasedOnStyle: LLVM
BreakBeforeBraces: Attach
ColumnLimit: 120 # Match GitHub UI
UseTab: Always
TabWidth: 4
IndentWidth: 4
AccessModifierOffset: -4
ContinuationIndentWidth: 4
NamespaceIndentation: All
IndentCaseLabels: false
PointerAlignment: Left
AlwaysBreakTemplateDeclarations: Yes
SpaceAfterTemplateKeyword: false
AllowShortCaseLabelsOnASingleLine: true
AllowShortIfStatementsOnASingleLine: WithoutElse
AllowShortBlocksOnASingleLine: Always
FixNamespaceComments: true
ReflowComments: false

41
dep/jwt-cpp/.clang-tidy Normal file
View File

@@ -0,0 +1,41 @@
Checks: '-*,
bugprone-*,
cert-*,
clang-analyzer-*,
clang-diagnostic-*,
-clang-diagnostic-c++17-extensions,
google-*,
-google-runtime-references,
-google-readability-braces-around-statements,
hicpp-*,
-hicpp-braces-around-statements,
-hicpp-signed-bitwise,
misc-*,
-misc-non-private-member-variables-in-classes,
llvm-*,
modernize-*,
-modernize-use-trailing-return-type,
performance-*,
portability-*,
readability-*,
-readability-magic-numbers,
-readability-braces-around-statements,
-readability-uppercase-literal-suffix'
CheckOptions:
- key: readability-identifier-naming.TypedefCase
value: lower_case
- key: readability-identifier-naming.StructCase
value: lower_case
- key: readability-identifier-naming.ClassCase
value: lower_case
- key: readability-identifier-naming.VariableCase
value: lower_case
- key: readability-identifier-naming.ParameterCase
value: lower_case
- key: readability-identifier-naming.FunctionCase
value: lower_case
- key: readability-identifier-naming.NamespaceCase
value: lower_case
- key: readability-identifier-naming.GlobalConstantCase
value: lower_case

208
dep/jwt-cpp/.cmake-format Normal file
View File

@@ -0,0 +1,208 @@
# ----------------------------------
# Options affecting listfile parsing
# ----------------------------------
with section("parse"):
# Specify structure for custom cmake functions
additional_commands = { 'foo': { 'flags': ['BAR', 'BAZ'],
'kwargs': {'DEPENDS': '*', 'HEADERS': '*', 'SOURCES': '*'}}}
# -----------------------------
# Options effecting formatting.
# -----------------------------
with section("format"):
# How wide to allow formatted cmake files
line_width = 120
# How many spaces to tab for indent
tab_size = 2
# If an argument group contains more than this many sub-groups (parg or kwarg
# groups) then force it to a vertical layout.
max_subgroups_hwrap = 12
# If a positional argument group contains more than this many arguments, then
# force it to a vertical layout.
max_pargs_hwrap = 24
# If true, separate flow control names from their parentheses with a space
separate_ctrl_name_with_space = False
# If true, separate function names from parentheses with a space
separate_fn_name_with_space = False
# If a statement is wrapped to more than one line, than dangle the closing
# parenthesis on its own line.
dangle_parens = False
# If the trailing parenthesis must be 'dangled' on its on line, then align it
# to this reference: `prefix`: the start of the statement, `prefix-indent`:
# the start of the statement, plus one indentation level, `child`: align to
# the column of the arguments
dangle_align = 'prefix'
# If the statement spelling length (including space and parenthesis) is
# smaller than this amount, then force reject nested layouts.
min_prefix_chars = 4
# If the statement spelling length (including space and parenthesis) is larger
# than the tab width by more than this amount, then force reject un-nested
# layouts.
max_prefix_chars = 10
# If a candidate layout is wrapped horizontally but it exceeds this many
# lines, then reject the layout.
max_lines_hwrap = 12
# What style line endings to use in the output.
line_ending = 'unix'
# Format command names consistently as 'lower' or 'upper' case
command_case = 'lower'
# Format keywords consistently as 'lower' or 'upper' case
keyword_case = 'upper'
# A list of command names which should always be wrapped
always_wrap = []
# If true, the argument lists which are known to be sortable will be sorted
# lexicographicall
enable_sort = True
# If true, the parsers may infer whether or not an argument list is sortable
# (without annotation).
autosort = False
# By default, if cmake-format cannot successfully fit everything into the
# desired linewidth it will apply the last, most agressive attempt that it
# made. If this flag is True, however, cmake-format will print error, exit
# with non-zero status code, and write-out nothing
require_valid_layout = False
# A dictionary mapping layout nodes to a list of wrap decisions. See the
# documentation for more information.
layout_passes = {}
# ------------------------------------------------
# Options affecting comment reflow and formatting.
# ------------------------------------------------
with section("markup"):
# What character to use for bulleted lists
bullet_char = '*'
# What character to use as punctuation after numerals in an enumerated list
enum_char = '.'
# If comment markup is enabled, don't reflow the first comment block in each
# listfile. Use this to preserve formatting of your copyright/license
# statements.
first_comment_is_literal = False
# If comment markup is enabled, don't reflow any comment block which matches
# this (regex) pattern. Default is `None` (disabled).
literal_comment_pattern = None
# Regular expression to match preformat fences in comments
# default=r'^\s*([`~]{3}[`~]*)(.*)$'
fence_pattern = '^\\s*([`~]{3}[`~]*)(.*)$'
# Regular expression to match rulers in comments
# default=r'^\s*[^\w\s]{3}.*[^\w\s]{3}$'
ruler_pattern = '^\\s*[^\\w\\s]{3}.*[^\\w\\s]{3}$'
# If a comment line matches starts with this pattern then it is explicitly a
# trailing comment for the preceeding argument. Default is '#<'
explicit_trailing_pattern = '#<'
# If a comment line starts with at least this many consecutive hash
# characters, then don't lstrip() them off. This allows for lazy hash rulers
# where the first hash char is not separated by space
hashruler_min_length = 10
# If true, then insert a space between the first hash char and remaining hash
# chars in a hash ruler, and normalize its length to fill the column
canonicalize_hashrulers = True
# enable comment markup parsing and reflow
enable_markup = True
# ----------------------------
# Options affecting the linter
# ----------------------------
with section("lint"):
# a list of lint codes to disable
disabled_codes = []
# regular expression pattern describing valid function names
function_pattern = '[0-9a-z_]+'
# regular expression pattern describing valid macro names
macro_pattern = '[0-9A-Z_]+'
# regular expression pattern describing valid names for variables with global
# scope
global_var_pattern = '[0-9A-Z][0-9A-Z_]+'
# regular expression pattern describing valid names for variables with global
# scope (but internal semantic)
internal_var_pattern = '_[0-9A-Z][0-9A-Z_]+'
# regular expression pattern describing valid names for variables with local
# scope
local_var_pattern = '[0-9a-z_]+'
# regular expression pattern describing valid names for privatedirectory
# variables
private_var_pattern = '_[0-9a-z_]+'
# regular expression pattern describing valid names for publicdirectory
# variables
public_var_pattern = '[0-9A-Z][0-9A-Z_]+'
# regular expression pattern describing valid names for keywords used in
# functions or macros
keyword_pattern = '[0-9A-Z_]+'
# In the heuristic for C0201, how many conditionals to match within a loop in
# before considering the loop a parser.
max_conditionals_custom_parser = 2
# Require at least this many newlines between statements
min_statement_spacing = 1
# Require no more than this many newlines between statements
max_statement_spacing = 1
max_returns = 6
max_branches = 12
max_arguments = 5
max_localvars = 15
max_statements = 50
# -------------------------------
# Options effecting file encoding
# -------------------------------
with section("encode"):
# If true, emit the unicode byte-order mark (BOM) at the start of the file
emit_byteorder_mark = False
# Specify the encoding of the input file. Defaults to utf-8
input_encoding = 'utf-8'
# Specify the encoding of the output file. Defaults to utf-8. Note that cmake
# only claims to support utf-8 so be careful when using anything else
output_encoding = 'utf-8'
# -------------------------------------
# Miscellaneous configurations options.
# -------------------------------------
with section("misc"):
# A dictionary containing any per-command configuration overrides. Currently
# only `command_case` is supported.
per_command = {}

17
dep/jwt-cpp/.editorconfig Normal file
View File

@@ -0,0 +1,17 @@
root = true
[!*.{h,cpp}]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.{h,cpp}]
indent_style = tab
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false

17
dep/jwt-cpp/.gitattributes vendored Normal file
View File

@@ -0,0 +1,17 @@
# Auto detect text files and perform LF normalization
* text=auto
# Custom for Visual Studio
*.cs diff=csharp
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain

2
dep/jwt-cpp/.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,2 @@
github: [Thalhammer,prince-chrismc]
patreon: Thalhammer

View File

@@ -0,0 +1,83 @@
name: Bug Report 🐛
description: File a bug report
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
validations:
required: false
- type: textarea
id: what-happened
attributes:
label: What happened?
description: Also tell us, what did you expect to happen? Feel free to include some screenshots
placeholder: Tell us what you see!
value: "A bug happened!"
validations:
required: true
- type: textarea
id: reproduce
attributes:
label: How To Reproduce?
description: Please provide a small snippet to reproduce the issue
placeholder: Some C++ code or Shell code to recreate th problem
value: |
```c++
#include "jwt-cpp/jwt.h"
int main() {
return 0;
}
```
- type: dropdown
id: version
attributes:
label: Version
description: What version of our software are you running?
options:
- 0.7.0
- 0.6.0
- 0.5.2
- Older (please let us know if the "What happened" box)
validations:
required: true
- type: dropdown
id: operating-system
attributes:
label: What OS are you seeing the problem on?
multiple: true
options:
- Windows
- Linux
- MacOS
- Other (please let us know if the "What happened" box)
validations:
required: true
- type: dropdown
id: compiler
attributes:
label: What compiler are you seeing the problem on?
multiple: true
options:
- GCC
- Clang
- MSVC
- Other (please let us know if the "What happened" box)
validations:
required: true
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
- type: checkboxes
id: terms
attributes:
label: Code of Conduct
description: By submitting this issue, you agree to follow our [Code of Conduct](https://example.com)
options:
- label: I agree to follow this project's Code of Conduct
required: true

View File

@@ -0,0 +1,20 @@
name: Feature Request 🧪
description: Have a great idea? Find something is missing?
labels: ["enhancement"]
body:
- type: markdown
attributes:
value: |
We'd love to hear your idea(s)!
- type: input
id: question
attributes:
label: "What would you like to see added?"
validations:
required: true
- type: textarea
id: context
attributes:
label: Additional Context
validations:
required: true

View File

@@ -0,0 +1,20 @@
name: Support Question 🤹
description: Have some questions? We can offer help.
labels: ["question"]
body:
- type: markdown
attributes:
value: |
Don't hesitate to ask any question you might have!
- type: input
id: question
attributes:
label: "What's your question?"
validations:
required: true
- type: textarea
id: context
attributes:
label: Additional Context
validations:
required: true

View File

@@ -0,0 +1,31 @@
name: Regular badging sequence
description: Publishes a badge based on the job status
inputs:
category:
description: The subfolder where to group the badges
required: true
label:
description: The label to you in the badge (this should be unique for each badge in a category)
required: true
github_token:
description: The token to use to publish the changes
required: false
default: ${{ github.token }}
runs:
using: composite
steps:
- if: job.status == 'success'
uses: ./.github/actions/badge/write
with:
category: ${{ inputs.category }}
label: ${{ inputs.label }}
- if: job.status == 'failure'
uses: ./.github/actions/badge/write
with:
category: ${{ inputs.category }}
label: ${{ inputs.label }}
message: Failing
color: red
- uses: ./.github/actions/badge/publish
with:
github_token: ${{ inputs.github_token }}

View File

@@ -0,0 +1,24 @@
name: Make a Badge
description: Creates a JSON file for use with Sheilds.io. The default is a "brightgreen" "Passing" badge
inputs:
category:
description: The subfolder where to group the badges
required: true
label:
description: The label to you in the badge (this should be unqie for each badge in a category)
required: true
message:
description: The message you wish to have in the badge
required: false
default: "Passing"
color:
description: The color you wish the badge to be
required: false
default: "brightgreen"
runs:
using: composite
steps:
- run: |
mkdir -p badges/${{ inputs.category }}/${{ inputs.label }}
echo '{ "schemaVersion": 1, "label": "${{ inputs.label }}", "message": "${{ inputs.message }}", "color": "${{ inputs.color }}" }' > badges/${{ inputs.category }}/${{ inputs.label }}/shields.json
shell: bash

View File

@@ -0,0 +1,17 @@
name: Install Boost.JSON
description: Install Boost.JSON for building test application
inputs:
version:
description: The desired Boost.JSON version to install
required: false
default: "1.78.0"
runs:
using: composite
steps:
- run: |
cd /tmp
wget https://github.com/boostorg/json/archive/boost-${{ inputs.version }}.tar.gz
tar -zxf /tmp/boost-${{ inputs.version }}.tar.gz
cd json-boost-${{ inputs.version }}
sudo cp -vR include/boost /usr/local/include
shell: bash

View File

@@ -0,0 +1,39 @@
name: Install CMake
description: Download, Build and Cache CMake
inputs:
version:
description: The desired CMake version to install
required: true
url:
description: "The corresponding URL to download the source code from"
required: true
runs:
using: composite
steps:
- name: Cache CMake
id: cache-cmake
uses: actions/cache@v3
with:
path: cmake-${{ inputs.version }}
key: ${{ runner.name }}-${{ runner.os }}-${{ runner.arch }}-cmake-${{ inputs.version }}
- name: Build cmake
if: steps.cache-cmake.outputs.cache-hit != 'true'
run: |
wget ${{ inputs.url }}
tar -zxf cmake-${{ inputs.version }}.tar.gz
cd cmake-${{ inputs.version }}
./bootstrap
make -j $(nproc)
shell: bash
- name: Install cmake
run: |
cd cmake-${{ inputs.version }}
# Depending if we run in on a GitHub Actions or from within a Docker image we have different permissions
if [[ $EUID > 0 ]]; then
# If we are not root then we need to sudo
sudo make install
else
# Default docker image does not have users setup so we are only root and can not sudo
make install
fi
shell: bash

View File

@@ -0,0 +1,18 @@
name: Install jsoncons
description: Install jsoncons for building test application
inputs:
version:
description: The desired jsoncons version to install
required: false
default: "0.168.7"
runs:
using: composite
steps:
- run: |
cd /tmp
wget https://github.com/danielaparker/jsoncons/archive/v${{ inputs.version }}.tar.gz
tar -zxf /tmp/v${{ inputs.version }}.tar.gz
cd jsoncons-${{ inputs.version }}
cmake .
sudo cmake --install .
shell: bash

View File

@@ -0,0 +1,15 @@
name: Install GTest
description: Install and setup GTest for linking and building test application
runs:
using: composite
steps:
- run: sudo apt-get install libgtest-dev lcov
shell: bash
- run: (cd /usr/src/gtest && sudo `which cmake` .)
shell: bash
- run: sudo make -j $(nproc) -C /usr/src/gtest
shell: bash
- run: sudo ln -s /usr/src/gtest/libgtest.a /usr/lib/libgtest.a
shell: bash
- run: sudo ln -s /usr/src/gtest/libgtest_main.a /usr/lib/libgtest_main.a
shell: bash

View File

@@ -0,0 +1,17 @@
name: Install PicoJSON
description: Install PicoJSON for building test application
inputs:
version:
description: The desired PicoJSON version to install
required: false
default: "v1.3.0"
runs:
using: composite
steps:
- run: |
cd /tmp
wget https://github.com/kazuho/picojson/archive/${{ inputs.version }}.tar.gz
tar -zxf /tmp/${{ inputs.version }}.tar.gz
cd picojson-${{ inputs.version }}
sudo cp -v picojson.h /usr/local/include/picojson
shell: bash

View File

@@ -0,0 +1,19 @@
name: Install LibreSSL
description: Install and setup LibreSSL for linking and building test application
inputs:
version:
description: The desired LibreSSL version to install
required: false
default: "3.3.0"
runs:
using: composite
steps:
- run: |
wget https://raw.githubusercontent.com/libressl-portable/portable/v${{ inputs.version }}/FindLibreSSL.cmake -P cmake/
cd /tmp
wget https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-${{ inputs.version }}.tar.gz
tar -zvxf /tmp/libressl-${{ inputs.version }}.tar.gz
cd libressl-${{ inputs.version }}
./configure
sudo make -j $(nproc) install
shell: bash

View File

@@ -0,0 +1,18 @@
name: Install nlohmann-json
description: Install nlohmann-json for building test application
inputs:
version:
description: The desired nlohmann-json version to install
required: false
default: "3.11.2"
runs:
using: composite
steps:
- run: |
cd /tmp
wget https://github.com/nlohmann/json/archive/v${{ inputs.version }}.tar.gz
tar -zxf /tmp/v${{ inputs.version }}.tar.gz
cd json-${{ inputs.version }}
cmake .
sudo cmake --install .
shell: bash

View File

@@ -0,0 +1,19 @@
name: Install OpenSSL
description: Install and setup OpenSSL for linking and building test application
inputs:
version:
description: The desired OpenSSL version to install
required: false
default: "openssl-3.0.0"
runs:
using: composite
steps:
- run: |
cd /tmp
wget https://github.com/openssl/openssl/archive/refs/tags/${{ inputs.version }}.tar.gz
tar -zxf /tmp/${{ inputs.version }}.tar.gz
cd openssl-${{ inputs.version }}
./config --prefix=/tmp --libdir=lib
make -j $(nproc)
sudo make -j $(nproc) install_sw
shell: bash

View File

@@ -0,0 +1,22 @@
name: Install wolfSSL
description: Install and setup wolfSSL for linking and building test application
inputs:
version:
description: The desired stable wolfSSL version to install
required: false
default: "v4.8.1-stable"
runs:
using: composite
steps:
- run: |
cd /tmp
wget -O wolfssl.tar.gz https://github.com/wolfSSL/wolfssl/archive/${{ inputs.version }}.tar.gz
tar -zxf /tmp/wolfssl.tar.gz
cd wolfssl-*
autoreconf -fiv
./configure --enable-opensslall --enable-opensslextra --disable-examples --disable-crypttests --enable-harden --enable-all --enable-all-crypto
make
sudo make install
shell: bash
- run: sudo rm -rf /usr/include/openssl
shell: bash

View File

@@ -0,0 +1,26 @@
name: Process Linting Results
description: Add a comment to a pull request with when `git diff` present and save the changes as an artifact so they can be applied manually
inputs:
linter_name:
description: The name of the tool to credit in the comment
required: true
runs:
using: "composite"
steps:
- run: git add --update
shell: bash
- id: stage
#continue-on-error: true
uses: Thalhammer/patch-generator-action@v2
# Unfortunately the previous action reports a failure so nothing else can run
# partially a limitation on composite actions since `continue-on-error` is not
# yet supported
- if: steps.stage.outputs.result == 'dirty'
uses: actions-ecosystem/action-create-comment@v1
with:
github_token: ${{ github.token }}
body: |
Hello, @${{ github.actor }}! `${{ inputs.linter_name }}` had some concerns :scream:
- run: exit $(git status -uno -s | wc -l)
shell: bash

View File

@@ -0,0 +1,52 @@
name: "Render `defaults.h` Template"
description: "Generate the `defaults.h` header file for a JSON library"
inputs:
traits_name:
description: "Name of the traits structure to be used. Typically in the format `author_repository` or equivilant"
required: true
library_name:
description: "Name of the JSON library."
required: true
library_url:
description: "URL to the JSON library."
required: true
disable_default_traits:
description: "Set the macro to disable the default traits"
required: false
default: "true"
runs:
using: composite
steps:
- uses: actions/setup-node@v3
with:
node-version: 14
- run: npm install mustache
shell: bash
- uses: actions/github-script@v6
env:
TRAITS_NAME: ${{ inputs.traits_name }}
LIBRARY_NAME: ${{ inputs.library_name }}
LIBRARY_URL: ${{ inputs.library_url }}
DISABLE_DEFAULT_TRAITS: ${{ inputs.disable_default_traits }}
with:
script: |
const mustache = require('mustache')
const path = require('path')
const fs = require('fs')
const { TRAITS_NAME, LIBRARY_NAME, LIBRARY_URL, DISABLE_DEFAULT_TRAITS } = process.env
console.log(`Rendering ${TRAITS_NAME}!`)
const disableDefault = DISABLE_DEFAULT_TRAITS === 'true'
const template = fs.readFileSync(path.join('include', 'jwt-cpp', 'traits', 'defaults.h.mustache'), 'utf8')
const content = mustache.render(template, {
traits_name: TRAITS_NAME,
traits_name_upper: TRAITS_NAME.toUpperCase(),
library_name: LIBRARY_NAME,
library_url: LIBRARY_URL,
disable_default_traits: disableDefault,
})
const outputDir = path.join('include', 'jwt-cpp', 'traits', TRAITS_NAME.replace('_', '-'))
fs.mkdirSync(outputDir, { recursive: true })
fs.writeFileSync(path.join(outputDir, 'defaults.h'), content)

View File

@@ -0,0 +1,39 @@
name: "Render `TraitsTests.cpp` Template"
description: "Generate the `TraitsTests.cpp` header file for a JSON library"
inputs:
traits_name:
description: "Name of the traits structure to be used. Typically in the format `author_repository` or equivilant"
required: true
test_suite_name:
description: "Name of the JSON library."
required: true
runs:
using: composite
steps:
- uses: actions/setup-node@v3
with:
node-version: 14
- run: npm install mustache
shell: bash
- uses: actions/github-script@v6
env:
TRAITS_NAME: ${{ inputs.traits_name }}
SUITE_NAME: ${{ inputs.test_suite_name }}
with:
script: |
const mustache = require('mustache')
const path = require('path')
const fs = require('fs')
const { TRAITS_NAME, SUITE_NAME } = process.env
console.log(`Rendering ${TRAITS_NAME}!`)
const template = fs.readFileSync(path.join('tests', 'traits', 'TraitsTest.cpp.mustache'), 'utf8')
const content = mustache.render(template, {
traits_name: TRAITS_NAME,
traits_dir: TRAITS_NAME.replace('_', '-'),
test_suite_name: SUITE_NAME,
})
const outputDir = path.join('tests', 'traits')
fs.mkdirSync(outputDir, { recursive: true })
fs.writeFileSync(path.join(outputDir, `${SUITE_NAME}.cpp`), content)

16
dep/jwt-cpp/.github/logo.svg vendored Normal file
View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<svg viewBox="0 0 2450 750" width="2450" height="750" xmlns="http://www.w3.org/2000/svg">
<g style="" transform="matrix(1.01, 0, 0, 1, 550, 140)">
<path d="M -551.881 -175.95 L 1948.119 -175.95 L 1948.119 609.368 L -551.881 609.368 Z" clip-rule="evenodd" fill-rule="evenodd" style=""/>
<path d="M -108.039 81.534 L -108.639 -86.566 L -202.439 -86.566 L -201.839 81.534 L -154.939 145.934 Z M -201.839 369.634 L -201.839 538.334 L -108.039 538.334 L -108.039 369.634 L -154.939 305.234 Z" fill="#fff" clip-rule="evenodd" fill-rule="evenodd"/>
<path d="M -108.039 369.634 L -9.239 505.934 L 66.361 450.934 L -32.439 314.634 L -108.039 290.234 Z M -201.839 81.534 L -301.239 -54.766 L -376.839 0.234 L -278.039 136.434 L -201.839 160.834 Z" clip-rule="evenodd" fill-rule="evenodd" style="fill: rgb(101, 154, 210);"/>
<path d="M -278.039 136.534 L -438.039 84.634 L -466.839 173.334 L -306.839 225.834 L -231.239 200.834 Z M -79.339 250.234 L -32.439 314.634 L 127.561 366.534 L 156.361 277.834 L -3.639 225.934 Z" fill="#00b9f1" clip-rule="evenodd" fill-rule="evenodd"/>
<path d="M -3.739 225.934 L 156.261 173.434 L 127.461 84.734 L -32.539 136.634 L -79.439 201.034 Z M -306.839 225.934 L -466.839 277.834 L -438.039 366.534 L -278.039 314.634 L -231.139 250.234 Z" clip-rule="evenodd" fill-rule="evenodd" style="fill: rgb(0, 89, 156);"/>
<path d="M -278.039 314.634 L -376.839 450.934 L -301.239 505.934 L -201.839 369.634 L -201.839 290.234 Z M -32.439 136.534 L 66.361 0.234 L -9.239 -54.766 L -108.039 81.434 L -108.039 160.834 Z" clip-rule="evenodd" fill-rule="evenodd" style="fill: rgb(0, 68, 130);"/>
<path d="M 343.623 72.734 L 343.623 291.434 C 343.623 339.534 304.223 378.934 256.123 378.934 L 256.123 335.134 C 280.523 335.134 299.923 315.734 299.923 291.334 L 299.923 72.734 Z M 1142.423 116.534 L 1241.123 116.534 L 1241.123 72.834 L 1000.523 72.834 L 1000.523 116.534 L 1098.623 116.534 L 1098.623 379.034 L 1142.423 379.034 Z M 868.623 72.734 L 868.623 291.434 C 868.623 315.834 849.223 335.234 824.823 335.234 C 800.423 335.234 781.023 315.834 781.023 291.434 L 781.023 160.234 C 781.023 112.134 741.623 72.734 693.523 72.734 C 645.423 72.734 606.023 112.134 606.023 160.234 L 606.023 291.534 C 606.023 315.934 586.623 335.334 562.223 335.334 C 537.823 335.334 518.423 315.934 518.423 291.534 L 518.423 72.734 L 474.623 72.734 L 474.623 291.434 C 474.623 339.534 514.023 378.934 562.123 378.934 C 610.223 378.934 649.623 339.534 649.623 291.434 L 649.623 160.234 C 649.623 135.834 669.023 116.434 693.423 116.434 C 717.823 116.434 737.223 135.834 737.223 160.234 L 737.223 291.534 C 737.223 339.634 776.623 379.034 824.723 379.034 C 872.823 379.034 912.223 339.634 912.223 291.534 L 912.223 72.734 Z" fill="#fff" clip-rule="evenodd" fill-rule="evenodd"/>
<g transform="matrix(6.824428, -0.018276, 0.018773, 7.009964, -218.175049, -971.69281)" style="">
<polygon fill="#FFFFFF" points="255 168.508 241.666 168.508 241.666 155.175 234.334 155.175 234.334 168.508 221 168.508 221 175.841 234.334 175.841 234.334 189.175 241.666 189.175 241.666 175.841 255 175.841"/>
<polygon fill="#FFFFFF" points="297.5 168.508 284.166 168.508 284.166 155.175 276.834 155.175 276.834 168.508 263.5 168.508 263.5 175.841 276.834 175.841 276.834 189.175 284.166 189.175 284.166 175.841 297.5 175.841"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.3 KiB

19
dep/jwt-cpp/.github/overrides.css vendored Normal file
View File

@@ -0,0 +1,19 @@
@media (prefers-color-scheme: dark) {
table.markdownTable, table.fieldtable {
width: auto;
}
html:not(.light-mode) {
/* define dark-mode variable overrides here if you DON'T use doxygen-awesome-darkmode-toggle.js */
}
html:not(.light-mode) div.contents table img {
filter: none;
}
html:not(.light-mode) div.qindex,
html:not(.light-mode) div.navtab {
background-color: var(--side-nav-background);
border: 1px solid var(--separator-color);
}
}

21
dep/jwt-cpp/.github/security.md vendored Normal file
View File

@@ -0,0 +1,21 @@
# Reporting Security Issues
If you believe you have found a security vulnerability in JWT-CPP, we encourage you to let us know right away.
We will investigate all legitimate reports and do our best to quickly fix the problem.
Please refer to the section below for our responsible disclosure policy:
## Disclosure Policy
Please contact one or more of the maintainers using the email advertised on our GitHub profiles:
- [@Thalhammer](https://github.com/Thalhammer)
- [@prince-chrismc](https://github.com/prince-chrismc)
Please provide as many details as possible about version, platform, and workflow as possible.
Having steps and reproducible code is better and is always greatly appreciated.
## Supported Version
Typically, fixes will be immediately released as a new patch release. However, older affected versions may receive
a new patch upon request.

233
dep/jwt-cpp/.github/workflows/cmake.yml vendored Normal file
View File

@@ -0,0 +1,233 @@
name: CMake CI
on:
push:
branches: [master]
pull_request:
branches: [master]
paths:
- "CMakeLists.txt"
- "cmake/**"
- "include/jwt-cpp/**"
- "tests/cmake/**"
- ".github/actions/**"
- ".github/workflows/cmake.yml"
jobs:
default-linux:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_EXAMPLES=OFF
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DTEST:STRING="defaults-enabled" -DCMAKE_FIND_DEBUG_MODE=1
cmake --build .
default-linux-with-examples:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake ..
sudo cmake --install .
- name: test
run: |
cd tests/cmake
cmake . -DTEST:STRING="defaults-enabled"
cmake --build .
default-win:
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- run: choco install openssl
- name: setup
run: |
cmake -E make_directory build
cd build
cmake .. -DJWT_BUILD_EXAMPLES=OFF
cmake --install .
- name: test
run: |
cd tests/cmake
cmake . -DTEST:STRING="defaults-enabled" -DCMAKE_FIND_DEBUG_MODE=1
cmake --build .
min-req:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/install/cmake
with:
version: "3.14.7"
url: "https://cmake.org/files/v3.14/cmake-3.14.7.tar.gz"
- uses: ./.github/actions/install/gtest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_EXAMPLES=ON -DJWT_BUILD_TESTS=ON
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DTEST:STRING="defaults-enabled"
cmake --build .
custom-install-linux:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX:STRING="/opt/jwt-cpp" -DJWT_BUILD_EXAMPLES=OFF
make install
- name: test
run: |
cd tests/cmake
cmake . -DCMAKE_PREFIX_PATH="/opt/jwt-cpp" -DTEST:STRING="defaults-enabled" -DCMAKE_FIND_DEBUG_MODE=1
cmake --build .
custom-install-win:
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- run: choco install openssl
- name: setup
run: |
cmake -E make_directory build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX:STRING="C:/jwt-cpp" -DJWT_BUILD_EXAMPLES=OFF
cmake --install .
- name: test
run: |
cd tests/cmake
cmake . -DCMAKE_PREFIX_PATH="C:/jwt-cpp" -DTEST:STRING="defaults-enabled" -DCMAKE_FIND_DEBUG_MODE=1
cmake --build .
no-pico:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_DISABLE_PICOJSON=ON -DJWT_BUILD_EXAMPLES=OFF
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DCMAKE_PREFIX_PATH=/usr/local/cmake -DTEST:STRING="picojson-is-disabled"
cmake --build .
no-base64:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_DISABLE_BASE64=ON -DJWT_BUILD_EXAMPLES=OFF
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DCMAKE_PREFIX_PATH=/usr/local/cmake -DTEST:STRING="base64-is-disabled"
cmake --build .
with-libressl:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/libressl
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_SSL_LIBRARY:STRING="LibreSSL" -DJWT_BUILD_EXAMPLES=OFF
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DCMAKE_PREFIX_PATH=/usr/local/cmake -DCMAKE_MODULE_PATH=../../cmake -DTEST:STRING="libressl-is-used"
cmake --build .
with-wolfssl:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/wolfssl
with:
version: ab3bbf11e9d39b52e24bf42bbc6babc16d4a669b
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_SSL_LIBRARY:STRING="wolfSSL" -DJWT_BUILD_EXAMPLES=OFF
sudo make install
- name: test
run: |
cd tests/cmake
cmake . -DTEST:STRING="wolfssl-is-used"
cmake --build .
with-hunter:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_TESTS=ON -DJWT_BUILD_EXAMPLES=ON -DJWT_ENABLE_COVERAGE=OFF -DHUNTER_ENABLED=ON
make
- name: test
run: |
cd build
./tests/jwt-cpp-test

View File

@@ -0,0 +1,54 @@
name: Cross-Platform CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- if: matrix.os == 'windows-latest'
run: choco install openssl
- if: matrix.os == 'macos-latest'
run: sudo cp /usr/local/opt/openssl@1.1/lib/pkgconfig/*.pc /usr/local/lib/pkgconfig/
- uses: actions/checkout@v3
- run: cmake -E make_directory ${{ github.workspace }}/build
- name: configure
shell: bash # access regardless of the host operating system
working-directory: ${{ github.workspace }}/build
run: cmake $GITHUB_WORKSPACE -DJWT_BUILD_EXAMPLES=ON
- name: build
working-directory: ${{ github.workspace }}/build
shell: bash
run: cmake --build .
- if: matrix.os != 'windows-latest'
name: test
working-directory: ${{ github.workspace }}/build
shell: bash
run: |
./example/rsa-create
./example/rsa-verify
- if: matrix.os == 'windows-latest'
name: test
working-directory: ${{ github.workspace }}/build
run: |
example\Debug\rsa-create.exe
example\Debug\rsa-verify.exe
- if: github.event_name == 'push' && always()
uses: ./.github/actions/badge
with:
category: cross-platform
label: ${{ matrix.os }}

View File

@@ -0,0 +1,21 @@
name: Documentation CI
on:
push:
branches: [master]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: sudo apt install graphviz
- run: wget https://raw.githubusercontent.com/jothepro/doxygen-awesome-css/v1.6.1/doxygen-awesome.css
- run: wget https://raw.githubusercontent.com/jothepro/doxygen-awesome-css/v1.6.1/doxygen-awesome-sidebar-only.css
- uses: mattnotmitt/doxygen-action@v1
- name: deploy
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./doxy/html
force_orphan: true

104
dep/jwt-cpp/.github/workflows/jwt.yml vendored Normal file
View File

@@ -0,0 +1,104 @@
name: JWT CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
coverage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- uses: ./.github/actions/install/danielaparker-jsoncons
- uses: ./.github/actions/install/boost-json
- name: configure
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_EXAMPLES=OFF -DJWT_BUILD_TESTS=ON -DJWT_ENABLE_COVERAGE=ON -DCMAKE_BUILD_TYPE=Debug
- name: run
working-directory: build
run: make jwt-cpp-test coverage
- uses: coverallsapp/github-action@1.1.3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: build/coverage.info
fuzzing:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- name: configure
run: |
mkdir build
cd build
cmake .. -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DJWT_ENABLE_FUZZING=ON
- name: run
working-directory: build
run: |
make jwt-cpp-fuzz-BaseEncodeFuzz jwt-cpp-fuzz-BaseDecodeFuzz jwt-cpp-fuzz-TokenDecodeFuzz
./tests/fuzz/jwt-cpp-fuzz-BaseEncodeFuzz -runs=100000
./tests/fuzz/jwt-cpp-fuzz-BaseDecodeFuzz -runs=100000 ../tests/fuzz/decode-corpus
./tests/fuzz/jwt-cpp-fuzz-TokenDecodeFuzz -runs=100000 ../tests/fuzz/token-corpus
asan: ## Based on https://gist.github.com/jlblancoc/44be9d4d466f0a973b1f3808a8e56782
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- name: configure
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_TESTS=ON -DCMAKE_CXX_FLAGS="-fsanitize=address -fsanitize=leak -g" \
-DCMAKE_C_FLAGS="-fsanitize=address -fsanitize=leak -g" \
-DCMAKE_EXE_LINKER_FLAGS="-fsanitize=address -fsanitize=leak" \
-DCMAKE_MODULE_LINKER_FLAGS="-fsanitize=address -fsanitize=leak"
- name: run
working-directory: build
run: |
make
export ASAN_OPTIONS=check_initialization_order=true:detect_stack_use_after_return=true:fast_unwind_on_malloc=0
./example/rsa-create
./example/rsa-verify
./tests/jwt-cpp-test
ubsan:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- name: configure
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_TESTS=ON -DCMAKE_CXX_STANDARD=20 \
-DCMAKE_CXX_FLAGS="-fsanitize=undefined -fsanitize=return -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize-recover=all -g" \
-DCMAKE_C_FLAGS="-fsanitize=undefined -fsanitize=return -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize-recover=all -g" \
-DCMAKE_EXE_LINKER_FLAGS="-fsanitize=undefined -fsanitize=return -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize-recover=all" \
-DCMAKE_MODULE_LINKER_FLAGS="-fsanitize=undefined -fsanitize=return -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize-recover=all"
- name: run
working-directory: build
run: |
make
export UBSAN_OPTIONS=print_stacktrace=1
./example/rsa-create
./example/rsa-verify
./tests/jwt-cpp-test

123
dep/jwt-cpp/.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,123 @@
name: Lint CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
clang-format:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
files:
- "include/jwt-cpp/*.h"
- "include/jwt-cpp/traits/**/*.h"
- "tests/*.cpp"
- "tests/**/*.cpp"
- "example/*.cpp"
- "example/**/*.cpp"
steps:
- run: |
sudo apt-get install clang-format-14
shopt -s globstar
- uses: actions/checkout@v3
- run: clang-format-14 -i ${{ matrix.files }}
- uses: ./.github/actions/process-linting-results
with:
linter_name: clang-format
cmake-format:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
files: ["**/CMakeLists.txt", "cmake/code-coverage.cmake"]
steps:
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install cmakelang
- run: shopt -s globstar
- uses: actions/checkout@v3
- run: cmake-format -i ${{ matrix.files }}
- uses: ./.github/actions/process-linting-results
with:
linter_name: cmake-format
clang-tidy:
runs-on: ubuntu-20.04
steps:
- run: sudo apt-get install clang-tidy
- uses: lukka/get-cmake@latest
- uses: actions/checkout@v3
- name: configure
run: |
mkdir build
cd build
cmake .. -DCMAKE_CXX_CLANG_TIDY="clang-tidy;-fix"
- name: run
working-directory: build
run: make
- uses: ./.github/actions/process-linting-results
with:
linter_name: clang-tidy
render-defaults:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
traits:
- { name: "boost_json", library: "Boost.JSON", url: "https://github.com/boostorg/json", disable_pico: true }
- { name: "danielaparker_jsoncons", library: "jsoncons", url: "https://github.com/danielaparker/jsoncons", disable_pico: true }
- { name: "kazuho_picojson", library: "picojson", url: "https://github.com/kazuho/picojson", disable_pico: false }
- { name: "nlohmann_json", library: "JSON for Modern C++", url: "https://github.com/nlohmann/json", disable_pico: true }
name: render-defaults (${{ matrix.traits.name }})
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/render/defaults
with:
traits_name: ${{ matrix.traits.name }}
library_name: ${{ matrix.traits.library }}
library_url: ${{ matrix.traits.url }}
disable_default_traits: ${{ matrix.traits.disable_pico }}
- run: git add include/jwt-cpp/traits/*
- uses: ./.github/actions/process-linting-results
with:
linter_name: render-defaults
render-tests:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
traits:
# - { name: "boost_json", suite: "BoostJsonTest" } # Currently needs work arounds for API limitations
- { name: "danielaparker_jsoncons", suite: "JsonconsTest" }
# - { name: "kazuho_picojson", suite: "PicoJsonTest" } # Currently the default everything tests against this!
- { name: "nlohmann_json", suite: "NlohmannTest" }
name: render-tests (${{ matrix.traits.name }})
steps:
- uses: actions/checkout@v3
- run: shopt -s globstar
- uses: ./.github/actions/render/tests
with:
traits_name: ${{ matrix.traits.name }}
test_suite_name: ${{ matrix.traits.suite }}
- run: clang-format -i tests/**/*.cpp
- run: git add tests/traits/*
- uses: ./.github/actions/process-linting-results
with:
linter_name: render-tests
line-ending:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- run: git add --renormalize .
- uses: ./.github/actions/process-linting-results
with:
linter_name: line-ending

27
dep/jwt-cpp/.github/workflows/nuget.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: Nuget CD
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release
release:
types: [published]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup NuGet
uses: NuGet/setup-nuget@v1
with:
nuget-api-key: ${{ secrets.nuget_api_key }}
- name: Create NuGet pkg
working-directory: ./nuget
run: nuget pack jwt-cpp.nuspec
- name: Publish NuGet pkg
working-directory: ./nuget
run: nuget push *.nupkg -Source 'https://api.nuget.org/v3/index.json'

110
dep/jwt-cpp/.github/workflows/ssl.yml vendored Normal file
View File

@@ -0,0 +1,110 @@
name: SSL Compatibility CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
openssl:
runs-on: ubuntu-latest
strategy:
matrix:
openssl:
- { tag: "openssl-3.0.5", name: "3.0.5" }
- { tag: "OpenSSL_1_1_1q", name: "1.1.1q" }
- { tag: "OpenSSL_1_1_0i", name: "1.1.0i" } # Do not bump, there's a broken in the autoconfig script and it's not maintained
- { tag: "OpenSSL_1_0_2u", name: "1.0.2u" }
- { tag: "OpenSSL_1_0_1u", name: "1.0.1u" }
name: OpenSSL ${{ matrix.openssl.name }}
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- uses: ./.github/actions/install/openssl
with:
version: ${{ matrix.openssl.tag }}
- name: configure
run: cmake . -DJWT_BUILD_TESTS=ON -DOPENSSL_ROOT_DIR=/tmp
- run: make
- name: test
run: ./tests/jwt-cpp-test
- if: github.event_name == 'push' && always()
uses: ./.github/actions/badge
with:
category: openssl
label: ${{ matrix.openssl.name }}
openssl-no-deprecated:
runs-on: ubuntu-latest
name: OpenSSL 3.0 No Deprecated
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- uses: ./.github/actions/install/openssl
with:
version: "openssl-3.0.5"
- name: configure
run: cmake . -DJWT_BUILD_TESTS=ON -DOPENSSL_ROOT_DIR=/tmp -DCMAKE_CXX_FLAGS="-DOPENSSL_NO_DEPRECATED=1" -DCMAKE_C_FLAGS="-DOPENSSL_NO_DEPRECATED=1"
- run: make
libressl:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
libressl: ["3.5.3", "3.4.3", "3.3.6"]
name: LibreSSL ${{ matrix.libressl }}
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- uses: ./.github/actions/install/libressl
with:
version: ${{ matrix.libressl }}
- name: configure
run: cmake . -DJWT_BUILD_TESTS=ON -DJWT_SSL_LIBRARY:STRING=LibreSSL
- run: make
- name: test
run: ./tests/jwt-cpp-test
- if: github.event_name == 'push' && always()
uses: ./.github/actions/badge
with:
category: libressl
label: ${{ matrix.libressl }}
wolfssl:
runs-on: ubuntu-latest
strategy:
matrix:
wolfssl:
- { ref: "v5.1.1-stable", name: "5.1.1"}
- { ref: "v5.2.0-stable", name: "5.2.0" }
- { ref: "v5.3.0-stable", name: "5.3.0"}
name: wolfSSL ${{ matrix.wolfssl.name }}
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- uses: ./.github/actions/install/gtest
- uses: ./.github/actions/install/wolfssl
with:
version: ${{ matrix.wolfssl.ref }}
- name: configure
run: cmake . -DJWT_BUILD_TESTS=ON -DJWT_SSL_LIBRARY:STRING=wolfSSL
- run: make
- name: test
run: ./tests/jwt-cpp-test
- if: github.event_name == 'push' && always()
uses: ./.github/actions/badge
with:
category: wolfssl
label: ${{ matrix.wolfssl.name }}

View File

@@ -0,0 +1,75 @@
name: Specific Targets CI
on:
push:
branches: [master]
pull_request:
branches: [master]
paths:
- "CMakeLists.txt"
- "cmake/**"
- "include/jwt-cpp/**"
- "tests/cmake/**"
- ".github/actions/**"
- ".github/workflows/targets.yml"
jobs:
gcc-4-8:
name: GCC 4.8
runs-on: ubuntu-latest
container:
image: ubuntu:bionic-20230530 # 18.04
env:
CC: /usr/bin/gcc-4.8
CXX: /usr/bin/g++-4.8
steps:
- run: |
apt-get update
apt-get install -y g++-4.8 wget make libssl-dev
- uses: actions/checkout@v3
- uses: ./.github/actions/install/cmake
with:
version: "3.26.3"
url: "https://cmake.org/files/v3.26/cmake-3.26.3.tar.gz"
- name: setup
run: |
mkdir build
cd build
cmake ..
cmake --install .
- name: test
working-directory: tests/cmake
run: |
CC=gcc-4.8 CXX=g++-4.8 cmake . -DTEST:STRING="defaults-enabled"
cmake --build .
gcc-12:
runs-on: ubuntu-latest
container:
image: ubuntu:jammy-20231004 # 22.04
env:
CC: /usr/bin/gcc-12
CXX: /usr/bin/g++-12
steps:
- run: |
apt-get update
apt-get install -y g++-12 wget make libssl-dev
- uses: actions/checkout@v3
- uses: ./.github/actions/install/cmake
with:
version: "3.26.3"
url: "https://cmake.org/files/v3.26/cmake-3.26.3.tar.gz"
- name: setup
run: |
mkdir build
cd build
cmake ..
cmake --install .
- name: test
working-directory: tests/cmake
run: |
CC=gcc-12 CXX=g++-12 cmake . -DCMAKE_CXX_STANDARD=20 -DTEST:STRING="defaults-enabled"
cmake --build .

View File

@@ -0,0 +1,79 @@
name: Traits CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
traits:
name: Traits (${{ matrix.target.name }})
runs-on: ubuntu-latest
strategy:
matrix:
target:
- { name: "danielaparker-jsoncons", tag: "0.168.7", version: "v0.168.7" }
- { name: "boost-json", tag: "1.78.0", version: "v1.80.0" }
- { name: "nlohmann-json", tag: "3.11.2", version: "v3.11.2" }
- { name: "kazuho-picojson", tag: "111c9be5188f7350c2eac9ddaedd8cca3d7bf394", version: "111c9be" }
steps:
- uses: actions/checkout@v3
- uses: lukka/get-cmake@latest
- name: setup
run: |
mkdir build
cd build
cmake .. -DJWT_BUILD_EXAMPLES=OFF
sudo cmake --install .
# Install the JSON library
- if: matrix.target.name == 'danielaparker-jsoncons'
uses: ./.github/actions/install/danielaparker-jsoncons
with:
version: ${{matrix.target.tag}}
- if: matrix.target.name == 'boost-json'
uses: ./.github/actions/install/boost-json
with:
version: ${{matrix.target.tag}}
- if: matrix.target.name == 'nlohmann-json'
uses: ./.github/actions/install/nlohmann-json
with:
version: ${{matrix.target.tag}}
- if: matrix.target.name == 'kazuho-picojson'
run: rm -rf include/picojson
- if: matrix.target.name == 'kazuho-picojson'
uses: ./.github/actions/install/kazuho-picojson
with:
version: ${{matrix.target.tag}}
- name: test
working-directory: example/traits
run: |
cmake . -DCMAKE_FIND_DEBUG_MODE=1
cmake --build . --target ${{ matrix.target.name }}
./${{ matrix.target.name }}
- name: badge success
if: github.event_name == 'push' && success()
uses: ./.github/actions/badge/write
with:
category: traits
label: ${{ matrix.target.name }}
message: ${{ matrix.target.version }}
color: lightblue # turquoise
- name: badge failure
if: github.event_name == 'push' && !success()
uses: ./.github/actions/badge/write
with:
category: traits
label: ${{ matrix.target.name }}
message: ${{ matrix.target.version }}
color: orange
- if: github.event_name == 'push' && always()
uses: ./.github/actions/badge/publish
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

317
dep/jwt-cpp/.gitignore vendored Normal file
View File

@@ -0,0 +1,317 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# DNX
project.lock.json
artifacts/
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment the next line if you want to checkin your web deploy settings
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# NuGet v3's project.json files produces more ignoreable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
orleans.codegen.cs
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# =========================
# Operating System Files
# =========================
# OSX
# =========================
.DS_Store
.AppleDouble
.LSOverride
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# Windows
# =========================
# Windows image file caches
Thumbs.db
ehthumbs.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
# Linux files
test
*.o
*.o.d
.vscode
# ClangD cache files
.cache
doxy/
doxygen-awesome*.css
build/*
package-lock.json

View File

@@ -131,7 +131,7 @@ endif()
configure_package_config_file(
${CMAKE_CURRENT_LIST_DIR}/cmake/jwt-cpp-config.cmake.in ${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config.cmake
INSTALL_DESTINATION ${JWT_CMAKE_FILES_INSTALL_DIR} PATH_VARS JWT_EXTERNAL_PICOJSON JWT_SSL_LIBRARY)
write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config-version.cmake VERSION 0.6.0
write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/jwt-cpp-config-version.cmake VERSION 0.7.0
COMPATIBILITY ExactVersion)
install(TARGETS jwt-cpp EXPORT jwt-cpp-targets PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})

View File

@@ -4,7 +4,7 @@
### The generated JWT token can be decoded, is this correct and secure?
This is the expected behaviour. While the integrity of tokens is ensured by the generated/verified hash,
This is the expected behavior. While the integrity of tokens is ensured by the generated/verified hash,
the contents of the token are only **encoded and not encrypted**. This means you can be sure the token
has not been modified by an unauthorized party, but you should not store confidential information in it.
Anyone with access to the token can read all the claims you put into it. They can however not modify
@@ -26,7 +26,7 @@ Here are a few links for your convenience:
### Can this library encrypt/decrypt claims?
No it does not, see [#115](https://github.com/Thalhammer/jwt-cpp/issues/115) for more details.
More importantly you probably dont want to be using JWTs for anything sensitive. Read [this](https://stackoverflow.com/a/43497242/8480874)
More importantly you probably don't want to be using JWTs for anything sensitive. Read [this](https://stackoverflow.com/a/43497242/8480874)
for more.
### Why are my tokens immediately expired?
@@ -53,8 +53,8 @@ That should result in the token being rejected. For more details checkout [#194]
This was brought up in [#212](https://github.com/Thalhammer/jwt-cpp/issues/212#issuecomment-1054344192) and
[#101](https://github.com/Thalhammer/jwt-cpp/issues/101) as it's an excellent question.
It simply was not required to handle the required keys in JWTs for signing or verification. All the the mandatory keys are numeric,
string or array types which required type definitions and access.
It simply was not required to handle the required keys in JWTs for signing or verification. All the the mandatory keys
are numeric, string or array types which required type definitions and access.
The alternative is to use the `to_json()` method and use the libraries own APIs to pick the data type you need.
@@ -63,13 +63,16 @@ The alternative is to use the `to_json()` method and use the libraries own APIs
### Missing \_HMAC and \_EVP_sha256 symbols on Mac
There seems to exists a problem with the included openssl library of MacOS. Make sure you link to one provided by brew.
See [here](https://github.com/Thalhammer/jwt-cpp/issues/6) for more details.
See [#6](https://github.com/Thalhammer/jwt-cpp/issues/6) for more details.
### Building on windows fails with syntax errors
The header `<Windows.h>`, which is often included in windowsprojects, defines macros for MIN and MAX which screw up std::numeric_limits.
See [here](https://github.com/Thalhammer/jwt-cpp/issues/5) for more details. To fix this do one of the following things:
The header `<Windows.h>`, which is often included in Windows projects, defines macros for MIN and MAX which conflicts
with `std::numeric_limits`. See [#5](https://github.com/Thalhammer/jwt-cpp/issues/5) for more details or
[this StackOverflow thread](https://stackoverflow.com/questions/13416418/define-nominmax-using-stdmin-max).
* define NOMINMAX, which suppresses this behaviour
* include this library before you include windows.h
To fix this do one of the following things:
* define `NOMINMAX`, which suppresses this behavior
* include this library before you `#include <Windows.h>`
* place `#undef max` and `#undef min` before you include this library

View File

@@ -0,0 +1,34 @@
# Signing Tokens
## Custom Signature Algorithms
The libraries design is open so you can implement your own algorithms, see [existing examples](https://github.com/Thalhammer/jwt-cpp/blob/73f23419235661e89a304ba5ab09d6714fb8dd94/include/jwt-cpp/jwt.h#L874) for ideas.
```cpp
struct your_algorithm{
std::string sign(const std::string& /*unused*/, std::error_code& ec) const {
ec.clear();
// CALL YOUR METHOD HERE
return {};
}
void verify(const std::string& /*unused*/, const std::string& signature, std::error_code& ec) const {
ec.clear();
if (!signature.empty()) { ec = error::signature_verification_error::invalid_signature; }
// CALL YOUR METHOD HERE
}
std::string name() const { return "your_algorithm"; }
};
```
Then everything else is the same, just pass in your implementation such as:
```cpp
auto token = jwt::create()
.set_id("custom-algo-example")
.set_issued_at(std::chrono::system_clock::now())
.set_expires_at(std::chrono::system_clock::now() + std::chrono::seconds{36000})
.set_payload_claim("sample", jwt::claim(std::string{"test"}))
.sign(your_algorithm{/* what ever you want */});
```

View File

@@ -39,7 +39,7 @@ namespace jwt {
return data;
}
static const std::string& fill() {
static std::string fill{"="};
static const std::string fill{"="};
return fill;
}
};
@@ -62,7 +62,7 @@ namespace jwt {
return data;
}
static const std::string& fill() {
static std::string fill{"%3d"};
static const std::string fill{"%3d"};
return fill;
}
};
@@ -82,8 +82,8 @@ namespace jwt {
'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-', '_'}};
return data;
}
static const std::initializer_list<std::string>& fill() {
static std::initializer_list<std::string> fill{"%3D", "%3d"};
static const std::vector<std::string>& fill() {
static const std::vector<std::string> fill{"%3D", "%3d"};
return fill;
}
};

View File

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
<package>
<metadata>
<id>jwt-cpp</id>
<version>0.6.0-nuget.3</version>
<version>0.7.0</version>
<authors>Thalhammer; prince-chrismc</authors>
<owners>Thalhammer; prince-chrismc</owners>
<projectUrl>https://github.com/Thalhammer/jwt-cpp</projectUrl>

View File

@@ -3,7 +3,7 @@ project(jwt-cpp-installation-tests)
set(TEST CACHE STRING "The test source file to be used")
find_package(jwt-cpp 0.6.0 EXACT REQUIRED CONFIG)
find_package(jwt-cpp 0.7.0 EXACT REQUIRED CONFIG)
add_executable(test-project ${TEST}.cpp)
target_link_libraries(test-project jwt-cpp::jwt-cpp)

View File

@@ -64,8 +64,8 @@ Apache can be configured as folder "/zm" using sample .conf:
Alternatively Apache web site configuration template can be used to setup
zoneminder as "http://zoneminder":
sudo cp -v /usr/share/doc/zoneminder/examples/apache.conf /etc/apache2/sites-available/
sudo a2ensite zoneminder.conf
sudo cp -v /usr/share/doc/zoneminder/examples/apache.conf /etc/apache2/sites-available/zoneminder.conf
sudo a2ensite zoneminder
Common configuration steps for Apache2:

View File

@@ -18,7 +18,7 @@
%global zmtargetdistro %{?rhel:el%{rhel}}%{!?rhel:fc%{fedora}}
Name: zoneminder
Version: 1.36.34
Version: 1.37.65
Release: 1%{?dist}
Summary: A camera monitoring and analysis tool
Group: System Environment/Daemons
@@ -76,6 +76,9 @@ BuildRequires: desktop-file-utils
BuildRequires: gzip
BuildRequires: zlib-devel
# jwt-cpp looks for nlohmann_json which is part of json-devel
BuildRequires: json-devel
# ZoneMinder looks for and records the location of the ffmpeg binary during build
BuildRequires: ffmpeg
BuildRequires: ffmpeg-devel
@@ -105,7 +108,6 @@ Requires: php-gd
Requires: php-intl
Requires: php-process
Requires: php-json
Requires: cambozola
Requires: php-pecl-apcu
Requires: net-tools
Requires: psmisc
@@ -196,7 +198,6 @@ rm -rf ./dep/RtspServer
mv -f RtspServer-%{rtspserver_commit} ./dep/RtspServer
# Change the following default values
./utils/zmeditconfigdata.sh ZM_OPT_CAMBOZOLA yes
./utils/zmeditconfigdata.sh ZM_OPT_CONTROL yes
./utils/zmeditconfigdata.sh ZM_CHECK_FOR_UPDATES no
@@ -225,6 +226,10 @@ desktop-file-install \
# Remove unwanted files and folders
find %{buildroot} \( -name .htaccess -or -name .editorconfig -or -name .packlist -or -name .git -or -name .gitignore -or -name .gitattributes -or -name .travis.yml \) -type f -delete > /dev/null 2>&1 || :
# Remove third-party header and cmake files that should not have been installed
rm -rf %{buildroot}%{_prefix}/cmake
rm -rf %{buildroot}%{_includedir}
# Recursively change shebang in all relevant scripts and set execute permission
find %{buildroot}%{_datadir}/zoneminder/www/api \( -name cake -or -name cake.php \) -type f -exec sed -i 's\^#!/usr/bin/env bash$\#!%{_buildshell}\' {} \; -exec %{__chmod} 755 {} \;
@@ -337,12 +342,15 @@ ln -sf %{_sysconfdir}/zm/www/zoneminder.nginx.conf %{_sysconfdir}/zm/www/zonemin
%{_unitdir}/zoneminder.service
%{_datadir}/polkit-1/actions/com.zoneminder.systemctl.policy
%{_datadir}/polkit-1/actions/com.zoneminder.arp-scan.policy
%{_datadir}/polkit-1/rules.d/com.zoneminder.arp-scan.rules
%{_bindir}/zmsystemctl.pl
%{_bindir}/zmaudit.pl
%{_bindir}/zmc
%{_bindir}/zmcontrol.pl
%{_bindir}/zmdc.pl
%{_bindir}/zmeventtool.pl
%{_bindir}/zmfilter.pl
%{_bindir}/zmpkg.pl
%{_bindir}/zmtrack.pl

View File

@@ -64,8 +64,8 @@ Apache can be configured as folder "/zm" using sample .conf:
Alternatively Apache web site configuration template can be used to setup
zoneminder as "http://zoneminder":
sudo cp -v /usr/share/doc/zoneminder/examples/apache.conf /etc/apache2/sites-available/
sudo a2ensite zoneminder.conf
sudo cp -v /usr/share/doc/zoneminder/examples/apache.conf /etc/apache2/sites-available/zoneminder.conf
sudo a2ensite zoneminder
Common configuration steps for Apache2:

View File

@@ -200,9 +200,11 @@ To make sure zoneminder can read the configuration file, run the following comma
::
sudo a2enconf zoneminder
sudo a2enmod cgi
sudo systemctl reload apache2.service
sudo systemctl restart zoneminder.service
sudo systemctl status zoneminder.service
sudo systemctl enable zoneminder.service # start zoneminder automatically at boot
If the zoneminder.service show to be active and without any errors, you should be able to access zoneminder at ``http://yourhostname/zm``

View File

@@ -3,9 +3,9 @@ Ubuntu
.. contents::
Ubuntu 22.04 (Jammy)
--------------------
These instructions are for a brand new ubuntu 22.04 LTS system which does not have ZM installed.
Ubuntu 22.04+ (Jammy)
---------------------
These instructions are for a brand new ubuntu 22.04 LTS system or newer which does not have ZM installed.
**Step 1:** Update system
@@ -27,30 +27,51 @@ To use this repository instead of the official Ubuntu repository, enter the foll
sudo add-apt-repository ppa:iconnor/zoneminder-1.36
sudo apt update
**Step 3:** Install Zoneminder
**Step 3:** Configuration MySQL/MariaDB
::
apt-get install mysql-server
Alternatively
::
apt-get install mariab-server
**Step 4:** Configure the ZoneMinder Database
This step is not required if you are using our ppa packages as they will do it for you. It is ok to do it yourself though.
::
sudo mysql --defaults-file=/etc/mysql/debian.cnf -p < /usr/share/zoneminder/db/zm_create.sql
sudo mysql --defaults-file=/etc/mysql/debian.cnf -p -e "grant lock tables,alter,drop,select,insert,update,delete,create,index,alter routine,create routine, trigger,execute,references on zm.* to 'zmuser'@localhost identified by 'zmpass';"
**Step 5:** Install Zoneminder
::
sudo apt install -y zoneminder
**Step 4:** Configure Apache correctly:
**Step 6:** Configure Apache correctly:
::
sudo a2enmod rewrite
sudo a2enmod rewrite headers cgi
sudo a2enconf zoneminder
sudo systemctl restart apache2
**Step 5:** Enable and start zoneminder
**Step 7:** Enable and start zoneminder
::
sudo systemctl enable zoneminder
sudo systemctl start zoneminder
**Step 6:** Open Zoneminder
**Step 8:** Open Zoneminder
Open up a browser and go to ``http://hostname_or_ip/zm`` to open the ZoneMinder Console.

View File

@@ -14,23 +14,17 @@ A brief description of each of the principle components follows.
Binaries
--------
**zmc**
This is the ZoneMinder Capture daemon. This binary's job is to sit on a video device and suck frames off it as fast as possible, this should run at more or less constant speed.
**zma**
This is the ZoneMinder Analysis daemon. This is the component that goes through the captured frames and checks them for motion which might generate an alarm or event. It generally keeps up with the Capture daemon but if very busy may skip some frames to prevent it falling behind.
This is the ZoneMinder Capture daemon. This binary's job is to capture video from a network camera or local video capture device. It then optionally performs motion detection and records the video to disk. There will be one zmc process per capture card/camera.
**zms**
This is the ZoneMinder Streaming server. The web interface connects with this to get real-time or historical streamed images. It runs only when a live monitor stream or event stream is actually being viewed and dies when the event finishes or the associate web page is closed. If you find you have several zms processes running when nothing is being viewed then it is likely you need a patch for apache (see the Troubleshooting section). A non-parsed header version of zms, called nph-zms, is also installed and may be used instead depending on your web server configuration.
**zmu**
This is the ZoneMinder Utility. It's basically a handy command line interface to several useful functions. Its not really meant to be used by anyone except the web page (there's only limited 'help' in it so far) but can be if necessary, especially for debugging video problems.
PHP
---
As well as this there are the web PHP files in the web directory. Currently these consist of a single skin with Classic and Flat styles.
**Classic**
Original ZoneMinder skin
**Flat**
An updated version of Classic skin, retaining the same layout with a more modern style. Originally a skin this is now just a CSS style.
As well as this there are the web PHP files in the web directory. This UI was designed to be easily themeable, allowing either entirely different UIs (skins) or merely changing colours and layout by altering CSS styles.
Perl
----
@@ -39,7 +33,7 @@ Finally some perl scripts in the scripts directory. These scripts all have some
**zmpkg.pl**
This is the ZoneMinder Package Control script. This is used by the web interface and service scripts to control the execution of the system as a whole.
**zmdc.pl**
This is the ZoneMinder Daemon Control script. This is used by the web interface and the zmpkg.pl script to control and maintain the execution of the capture and analysis daemons, amongst others. You should not need to run this script yourself, although you can use it to start/top individual ZM processes.
This is the ZoneMinder Daemon Control script. This is used by the web interface and the zmpkg.pl script to control and maintain the execution of the various capture, filters and other background daemons. You should not need to run this script yourself, although you can use it to start/top individual ZM processes.
**zmfilter.pl**
This script controls the execution of saved filters and will be started and stopped by the web interface based on whether there are filters that have been defined to be autonomous(background). This script is also responsible for the automatic uploading of events to a 3rd party server. Prior to 1.32 there was one zmfilter.pl process. In 1.32 onwards we start a zmfilter.pl process for each background filter so that the processing time of one filter doesn't delay the processing of another filter.
**zmaudit.pl**
@@ -83,7 +77,7 @@ Finally, there are also a number of ZoneMinder perl modules included. These are
This is the base ZoneMinder perl module. It contains only simple data such as version information. It is included by all other ZoneMinder perl modules
**ZoneMinder/Config.pm**
This module imports the ZoneMinder configuration from the database.
**ZoneMinder/Debug.pm**
**ZoneMinder/Logger.pm**
This module contains the defined Debug and Error functions etc, that are used by scripts to produce diagnostic information in a standard format.
**ZoneMinder/Database.pm**
This module contains database access definitions and functions. Currently not a lot is in this module but it is included as a placeholder for future development.
@@ -91,9 +85,9 @@ Finally, there are also a number of ZoneMinder perl modules included. These are
This module contains functions to load, manipulate, delete, copy, move events.
**ZoneMinder/Filter.pm**
This module contains functions to load, execute etc filters.
**ZoneMinder/SharedMem.pm**
**ZoneMinder/Memory.pm**
This module contains standard shared memory access functions. These can be used to access the current state of monitors etc as well as issuing commands to the monitors to switch things on and off. This module effectively provides a ZoneMinder API.
**ZoneMinder/ConfigAdmin.pm**
**ZoneMinder/ConfigData.pm**
This module is a specialised module that contains the definition, and other information, about the various configuration options. It is not intended for use by 3rd parties.
**ZoneMinder/Control/\*.pm**
These modules contain implementations of the various PTZ protocols.

70
funding.json Normal file
View File

@@ -0,0 +1,70 @@
{
"version": "v1.0.0",
"entity": {
"type": "organisation",
"role": "owner",
"name": "ZoneMinder Inc",
"email": "isaac@zoneminder.com",
"phone": "16478835483",
"description": "A Canadian corporation to hold and maintain the assets relating to the ZoneMinder CCTV project.",
"webpageUrl": {
"url": "https://www.zoneminder.com",
"wellKnown": "https://www.zoneminder.com/.well-known/funding-manifest-urls"
}
},
"projects": [
{
"guid": "zoneminder",
"name": "ZoneMinder",
"description": "A full-featured, open source, state-of-the-art video surveillance software system.",
"webpageUrl": {
"url": "https://www.zoneminder.com",
"wellKnown": "https://www.zoneminder.com/.well-known/funding-manifest-urls"
},
"repositoryUrl": {
"url": "https://github.com/zoneminder/zoneminder",
"wellKnown": "https://github.com/zoneminder/zoneminder/blob/master/.well-known/funding-manifest-urls"
},
"licenses": [
"GPL-v2"
],
"tags": [
"video",
"security",
"cameras"
]
}
],
"funding": {
"channels": [
{
"guid": "bank",
"type": "bank",
"address": "Account: 5212751 Transit No: 29842 Institution: 004",
"description": ""
},
{
"guid": "paypal",
"type": "payment-provider",
"address": "paypal@zoneminder.com",
"description": ""
}
],
"plans": [
{
"guid": "myplan",
"status": "active",
"name": "Infrastructure costs, support, development",
"description": "Not really sure this plan part is about.",
"amount": 10000,
"currency": "USD",
"frequency": "yearly",
"channels": [
"bank",
"paypal"
]
}
],
"history": []
}
}

View File

@@ -287,7 +287,7 @@ our @options = (
limited by their defined permissions.
`,
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_TYPE',
@@ -311,7 +311,7 @@ our @options = (
pattern => qr|^([br])|i,
format => q( $1 =~ /^b/ ? 'builtin' : 'remote' )
},
category => 'system',
category => 'auth',
},
{
name => 'ZM_CASE_INSENSITIVE_USERNAMES',
@@ -323,7 +323,7 @@ our @options = (
`,
requires => [ { name=>'ZM_OPT_USE_AUTH', value=>'yes' } ],
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_RELAY',
@@ -348,7 +348,7 @@ our @options = (
pattern => qr|^([hpn])|i,
format => q( ($1 =~ /^h/) ? 'hashed' : ($1 =~ /^p/ ? 'plain' : 'none' ) )
},
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_HASH_SECRET',
@@ -367,7 +367,7 @@ our @options = (
],
type => $types{string},
private => 1,
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_HASH_IPS',
@@ -390,7 +390,7 @@ our @options = (
{ name=>'ZM_AUTH_RELAY', value=>'hashed' }
],
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_HASH_TTL',
@@ -405,7 +405,7 @@ our @options = (
{ name=>'ZM_AUTH_RELAY', value=>'hashed' }
],
type => $types{integer},
category => 'system',
category => 'auth',
},
{
name => 'ZM_AUTH_HASH_LOGINS',
@@ -431,7 +431,7 @@ our @options = (
{ name=>'ZM_AUTH_RELAY', value=>'hashed' }
],
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_RTSP2WEB_PATH',
@@ -497,7 +497,7 @@ our @options = (
if you are exposing your ZM instance on the Internet.
`,
type => $types{boolean},
category => 'system',
category => 'api',
},
{
name => 'ZM_OPT_USE_LEGACY_API_AUTH',
@@ -508,7 +508,7 @@ our @options = (
Authentication mechanism using JWT tokens. Older versions used a less secure MD5 based auth hash. It is recommended you turn this off after you are sure you don't need it. If you are using a 3rd party app that relies on the older API auth mechanisms, you will have to update that app if you turn this off. Note that zmNinja 1.3.057 onwards supports the new token system
`,
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_OPT_USE_EVENTNOTIFICATION',
@@ -545,7 +545,7 @@ our @options = (
{name=>'ZM_OPT_USE_AUTH', value=>'yes'}
],
type => $types{boolean},
category => 'system',
category => 'auth',
},
{
name => 'ZM_OPT_GOOG_RECAPTCHA_SITEKEY',
@@ -560,7 +560,7 @@ our @options = (
],
type => $types{string},
private => 1,
category => 'system',
category => 'auth',
},
{
name => 'ZM_OPT_GOOG_RECAPTCHA_SECRETKEY',
@@ -576,7 +576,7 @@ our @options = (
],
type => $types{string},
private => 1,
category => 'system',
category => 'auth',
},
{
name => 'ZM_OPT_USE_GEOLOCATION',
@@ -1697,22 +1697,6 @@ our @options = (
type => $types{boolean},
category => 'web',
},
{
name => 'ZM_WEB_RESIZE_CONSOLE',
default => 'yes',
description => 'Should the console window resize itself to fit',
help => q`
Traditionally the main ZoneMinder web console window has
resized itself to shrink to a size small enough to list only
the monitors that are actually present. This is intended to
make the window more unobtrusize but may not be to everyone's
tastes, especially if opened in a tab in browsers which support
this kind if layout. Switch this option off to have the console
window size left to the users preference
`,
type => $types{boolean},
category => 'web',
},
{
name => 'ZM_WEB_ID_ON_CONSOLE',
default => 'yes',

View File

@@ -398,7 +398,7 @@ sub get_realm {
if ( $$self{realm} ne $tokens{realm} ) {
$$self{realm} = $tokens{realm};
Debug("Changing REALM to $$self{realm}, $$self{host}:$$self{port}, $$self{realm}, $$self{username}, $$self{password}");
$self->{ua}->credentials("$$self{host}:$$self{port}", $$self{realm}, $$self{username}, $$self{password});
$self->{ua}->credentials($$self{address}?$$self{address}:"$$self{host}:$$self{port}", $$self{realm}, $$self{username}, $$self{password});
$response = $self->get($url);
if ( !$response->is_success() ) {
Debug('Authentication still failed after updating REALM' . $response->status_line);
@@ -417,11 +417,27 @@ sub get_realm {
} # end if
} # end foreach auth header
} else {
debug('No headers line');
Debug('No headers line');
} # end if headers
} # end if not authen
return undef;
} # end sub get_realm
sub ping {
my $self = shift;
my $ip = @_ ? shift : $$self{host};
return undef if ! $ip;
require Net::Ping;
Debug("Pinging $ip");
my $p = Net::Ping->new();
my $rv = $p->ping($ip);
$p->close();
Debug("Pinging $ip $rv");
return $rv;
}
1;
__END__

View File

@@ -42,10 +42,6 @@ our @ISA = qw(ZoneMinder::Control);
use ZoneMinder::Logger qw(:all);
use ZoneMinder::Config qw(:all);
our $username = '';
our $password = '';
our $realm = '';
sub new {
my $class = shift;
my $id = shift;
@@ -54,12 +50,6 @@ sub new {
return $self;
}
sub credentials {
my $self = shift;
($username, $password) = @_;
}
sub open {
my $self = shift;
@@ -69,9 +59,9 @@ sub open {
if ($self->{Monitor}->{ControlAddress}
and
$self->{Monitor}{ControlAddress} ne 'user:pass@ip'
$self->{Monitor}->{ControlAddress} ne 'user:pass@ip'
and
$self->{Monitor}{ControlAddress} ne 'user:port@ip'
$self->{Monitor}->{ControlAddress} ne 'user:port@ip'
) {
if ( $self->{Monitor}->{ControlAddress} !~ /^\w+:\/\// ) {
@@ -80,30 +70,30 @@ sub open {
}
my $uri = URI->new($self->{Monitor}->{ControlAddress});
$realm = 'Login to ' . $self->{Monitor}->{ControlDevice};
$$self{realm} = 'Login to ' . $self->{Monitor}->{ControlDevice} if $self->{Monitor}->{ControlDevice};
if ($self->{Monitor}->{ControlAddress}) {
if ( $uri->userinfo()) {
( $username, $password ) = $uri->userinfo() =~ /^(.*):(.*)$/;
@$self{'username', 'password'} = $uri->userinfo() =~ /^(.*):(.*)$/;
} else {
$username = $self->{Monitor}->{User};
$password = $self->{Monitor}->{Pass};
$$self{username} = $self->{Monitor}->{User};
$$self{password} = $self->{Monitor}->{Pass};
}
$$self{address} = $uri->host_port();
$self->{ua}->credentials($uri->host_port(), $realm, $username, $password);
$self->{ua}->credentials($uri->host_port(), @$self{'realm', 'username', 'password'});
# Testing seems to show that we need the username/password in each url as well as credentials
$$self{base_url} = $uri->canonical();
Debug('Using initial credentials for '.$uri->host_port().", $realm, $username, $password, base_url: $$self{base_url} auth:".$uri->authority());
Debug('Using initial credentials for '.$uri->host_port().join(',', '', @$self{'realm', 'username', 'password'}).", base_url: $$self{base_url} auth:".$uri->authority());
}
} elsif ( $self->{Monitor}{Path}) {
my $uri = URI->new($self->{Monitor}{Path});
} elsif ( $self->{Monitor}->{Path}) {
my $uri = URI->new($self->{Monitor}->{Path});
Debug("Using Path for credentials: $self->{Monitor}{Path} " . $uri->userinfo());
if ( $uri->userinfo()) {
( $username, $password ) = $uri->userinfo() =~ /^(.*):(.*)$/;
@$self{'username', 'password'} = $uri->userinfo() =~ /^(.*):(.*)$/;
} else {
$username = $self->{Monitor}->{User};
$password = $self->{Monitor}->{Pass};
$uri->userinfo($username.':'.$password);
$$self{username} = $self->{Monitor}->{User};
$$self{password} = $self->{Monitor}->{Pass};
$uri->userinfo($$self{username}.':'.$$self{password});
}
$uri->scheme('http');
$uri->port(80);
@@ -111,8 +101,8 @@ sub open {
$$self{base_url} = $uri->canonical();
$$self{address} = $uri->host_port();
Debug("User auth $username $password " . $uri->authority() . ' ' . $uri->host_port());
$self->{ua}->credentials($uri->host_port(), $realm, $username, $password);
Debug("User auth $$self{username} $$self{password} " . $uri->authority() . ' ' . $uri->host_port());
$self->{ua}->credentials($uri->host_port(), @$self{'realm', 'username', 'password'});
chomp $$self{base_url};
Debug("Base_url is ".$$self{base_url});
} else {
@@ -121,57 +111,13 @@ sub open {
my $url = $$self{base_url}.'cgi-bin/magicBox.cgi?action=getDeviceType';
# Detect REALM, has to be /cgi-bin/ptz.cgi because just / accepts no auth
my $res = $self->get($url);
if ( $res->is_success ) {
if ($self->get_realm($url)) {
$self->{state} = 'open';
return;
return !undef;
}
if ( $res->status_line() eq '401 Unauthorized' ) {
my $headers = $res->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}");
}
if ( $$headers{'www-authenticate'} ) {
my ( $auth, $tokens ) = $$headers{'www-authenticate'} =~ /^(\w+)\s+(.*)$/;
if ( $tokens =~ /realm="([^"]+)"/i ) {
if ( $realm ne $1 ) {
$realm = $1;
Debug("Changing REALM to ($realm)");
$self->{ua}->credentials($$self{address}, $realm, $username, $password);
$res = $self->get($url);
if ( $res->is_success() ) {
$self->{state} = 'open';
return !undef;
} elsif ( $res->status_line eq '400 Bad Request' ) {
# In testing, this second request fails with Bad Request, I assume because we didn't actually give it a command.
$self->{state} = 'open';
return !undef;
} else {
Error('Authentication still failed after updating REALM' . $res->status_line);
$headers = $res->headers();
foreach my $k ( keys %$headers ) {
Debug("Header $k => $$headers{$k}");
} # end foreach
}
} else {
Error('Authentication failed, not a REALM problem');
}
} else {
Error('Failed to match realm in tokens');
} # end if
} else {
Debug('No headers line');
} # end if headers
} else {
Error("Failed to get $$self{base_url}cgi-bin/magicBox.cgi?action=getDeviceType ".$res->status_line());
} # end if $res->status_line() eq '401 Unauthorized'
$self->{state} = 'closed';
return undef;
}
sub close {
@@ -179,15 +125,6 @@ sub close {
$self->{state} = 'closed';
}
sub get {
my $self = shift;
my $url = shift;
Debug("Getting $url");
my $response = $self->{ua}->get($url);
Debug('Response: '. $response->status_line . ' ' . $response->content);
return $response;
}
sub sendCmd {
my $self = shift;
my $cmd = shift;
@@ -195,7 +132,7 @@ sub sendCmd {
$self->printMsg($cmd, 'Tx');
my $res = $self->{ua}->get($$self{base_url}.$cmd);
my $res = $self->get($$self{base_url}.$cmd);
if ( $res->is_success ) {
$result = !undef;
@@ -203,14 +140,19 @@ sub sendCmd {
Info('Camera control: \''.$res->status_line().'\' for URL '.$$self{base_url}.$cmd);
# TODO: Add code to retrieve $res->message_decode or some such. Then we could do things like check the camera status.
} else {
# Have seen on some HikVision cams that whatever cookie LWP uses times out and it never refreshes, so we have to actually create a new LWP object.
$self->{ua} = LWP::UserAgent->new();
$self->{ua}->cookie_jar( {} );
$self->{ua}->credentials($$self{address}, $$self{realm}, $$self{username}, $$self{password});
# Try again
$res = $self->{ua}->get($$self{base_url}.$cmd);
$res = $self->get($$self{base_url}.$cmd);
if ( $res->is_success ) {
# Command to camera appears successful, write Info item to log
Info('Camera control 2: \''.$res->status_line().'\' for URL '.$$self{base_url}.$cmd);
} else {
Error('Camera control command FAILED: \''.$res->status_line().'\' for URL '.$$self{base_url}.$cmd);
$res = $self->{ua}->get('http://'.$self->{Monitor}->{ControlAddress}.'/'.$cmd);
$res = $self->get('http://'.$self->{Monitor}->{ControlAddress}.'/'.$cmd);
}
}
@@ -447,7 +389,7 @@ sub set_config {
my $url = $$self{base_url}.'/cgi-bin/configManager.cgi?action=setConfig'.
join('&', map { $_.'='.uri_encode($$diff{$_}) } keys %$diff);
my $response = $self->{ua}->get($url);
my $response = $self->get($url);
Debug($response->content);
return $response->is_success();
}

View File

@@ -39,7 +39,7 @@ our @ISA = qw(ZoneMinder::Control);
#
# Set the following:
# ControlAddress: username:password@camera_webaddress:port
# ControlDevice: IP Camera Model
# ControlDevice: IP Camera Model or Device 1
#
# ==========================================================================
@@ -51,27 +51,24 @@ use LWP::UserAgent;
use HTTP::Cookies;
use URI;
use URI::Encode qw(uri_encode);
use Data::Dumper;
#use Crypt::Mode::CBC;
#use Crypt::Cipher::AES;
my $ChannelID = 1; # Usually...
my $DefaultFocusSpeed = 50; # Should be between 1 and 100
my $DefaultIrisSpeed = 50; # Should be between 1 and 100
my $uri;
my ($user, $pass, $host, $port, $realm) = ();
sub credentials {
my $self = shift;
($user, $pass) = @_;
Debug("Setting credentials to $user/$pass");
}
sub open {
my $self = shift;
$self->loadMonitor();
$port = 80;
$$self{port} = 80;
# Create a UserAgent for the requests
$self->{UA} = LWP::UserAgent->new();
$self->{UA}->cookie_jar( {} );
$self->{ua} = LWP::UserAgent->new();
$self->{ua}->cookie_jar( {} );
# Extract the username/password host/port from ControlAddress
if ($self->{Monitor}{ControlAddress}
@@ -81,107 +78,68 @@ sub open {
$self->{Monitor}{ControlAddress} ne 'user:port@ip'
) {
Debug("Using ControlAddress for credentials: $self->{Monitor}{ControlAddress}");
if ($self->{Monitor}{ControlAddress} =~ /^([^:]+):([^@]+)@(.+)/ ) { # user:pass@host...
$user = $1 if !$user;
$pass = $2 if !$pass;
$host = $3;
} elsif ( $self->{Monitor}{ControlAddress} =~ /^([^@]+)@(.+)/ ) { # user@host...
$user = $1 if !$user;
$host = $2;
} else { # Just a host
$host = $self->{Monitor}{ControlAddress};
$uri = URI->new($self->{Monitor}->{ControlAddress});
$uri = URI->new('http://'.$self->{Monitor}->{ControlAddress}) if ref($uri) eq 'URI::_foreign';
$$self{host} = $uri->host();
if ( $uri->userinfo()) {
@$self{'username','password'} = $uri->userinfo() =~ /^(.*):(.*)$/;
} else {
$$self{username} = $self->{Monitor}->{User};
$$self{password} = $self->{Monitor}->{Pass};
}
# Check if it is a host and port or just a host
if ( $host =~ /([^:]+):(.+)/ ) {
$host = $1;
$port = $2 ? $2 : $port;
if ( $$self{host} =~ /([^:]+):(.+)/ ) {
$$self{host} = $1;
$$self{port} = $2 ? $2 : $$self{port};
}
} elsif ($self->{Monitor}{Path}) {
Debug("Using Path for credentials: $self->{Monitor}{Path}");
if (($self->{Monitor}->{Path} =~ /^(?<PROTOCOL>(https?|rtsp):\/\/)?(?<USERNAME>[^:@]+)?:?(?<PASSWORD>[^\/@]+)?@(?<ADDRESS>[^:\/]+)/)) {
Debug("Have " . $+{USERNAME});
Debug("Have " . $+{PASSWORD});
$user = $+{USERNAME} if $+{USERNAME} and !$user;
$pass = $+{PASSWORD} if $+{PASSWORD} and !$pass;
$host = $+{ADDRESS} if $+{ADDRESS};
$$self{username} = $+{USERNAME} if $+{USERNAME} and !$$self{username};
$$self{password} = $+{PASSWORD} if $+{PASSWORD} and !$$self{password};
$$self{host} = $+{ADDRESS} if $+{ADDRESS};
} elsif (($self->{Monitor}->{Path} =~ /^(?<PROTOCOL>(https?|rtsp):\/\/)?(?<ADDRESS>[^:\/]+)/)) {
$host = $+{ADDRESS} if $+{ADDRESS};
$user = $self->{Monitor}->{User} if $self->{Monitor}->{User} and !$user;
$pass = $self->{Monitor}->{Pass} if $self->{Monitor}->{Pass} and !$pass;
$$self{host} = $+{ADDRESS} if $+{ADDRESS};
$$self{username} = $self->{Monitor}->{User} if $self->{Monitor}->{User} and !$$self{username};
$$self{password} = $self->{Monitor}->{Pass} if $self->{Monitor}->{Pass} and !$$self{password};
} else {
$user = $self->{Monitor}->{User} if $self->{Monitor}->{User} and !$user;
$pass = $self->{Monitor}->{Pass} if $self->{Monitor}->{Pass} and !$pass;
$$self{username}= $self->{Monitor}->{User} if $self->{Monitor}->{User} and !$$self{username};
$$self{password} = $self->{Monitor}->{Pass} if $self->{Monitor}->{Pass} and !$$self{password};
}
$uri = URI->new($self->{Monitor}->{Path});
$uri->scheme('http');
$uri->port(80);
$uri->path('');
$host = $uri->host();
$$self{host} = $uri->host();
} else {
Debug('Not using credentials');
}
# Save the base url
$self->{BaseURL} = "http://$host:$port";
$self->{BaseURL} = "http://$$self{host}:$$self{port}";
$ChannelID = $self->{Monitor}{ControlDevice} if $self->{Monitor}{ControlDevice} =~ /^\d+$/;
$$self{realm} = defined($self->{Monitor}->{ControlDevice}) ? $self->{Monitor}->{ControlDevice} : '';
$ChannelID = $self->{Monitor}{ControlDevice} if $self->{Monitor}{ControlDevice};
$realm = '';
if (defined($user)) {
Debug("Credentials: $host:$port, realm:$realm, $user, $pass");
$self->{UA}->credentials("$host:$port", $realm, $user, $pass);
# Save and test the credentials
if (defined($$self{username})) {
Debug("Credentials: $$self{host}:$$self{port}, realm:$$self{realm}, $$self{username}, $$self{password}");
$self->{ua}->credentials("$$self{host}:$$self{port}", $$self{realm}, $$self{username}, $$self{password});
} # end if defined user
my $url = $self->{BaseURL} .'/ISAPI/Streaming/channels/101';
my $response = $self->get($url);
if ($response->status_line() eq '401 Unauthorized' and defined $user) {
my $headers = $response->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}");
}
if ( $$headers{'www-authenticate'} ) {
foreach my $auth_header ( ref $$headers{'www-authenticate'} eq 'ARRAY' ? @{$$headers{'www-authenticate'}} : ($$headers{'www-authenticate'})) {
my ( $auth, $tokens ) = $auth_header =~ /^(\w+)\s+(.*)$/;
Debug("Have tokens $auth $tokens");
my %tokens = map { /(\w+)="?([^"]+)"?/i } split(', ', $tokens );
if ( $tokens{realm} ) {
if ( $realm ne $tokens{realm} ) {
$realm = $tokens{realm};
Debug("Changing REALM to $realm");
$self->{UA}->credentials("$host:$port", $realm, $user, $pass);
$response = $self->{UA}->get($url);
if ( !$response->is_success() ) {
Debug('Authentication still failed after updating REALM' . $response->status_line);
$headers = $response->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}\n");
} # end foreach
} else {
last;
}
} else {
Error('Authentication failed, not a REALM problem');
}
} else {
Debug('Failed to match realm in tokens');
} # end if
} # end foreach auth header
} else {
debug('No headers line');
} # end if headers
} # end if not authen
if ($response->is_success()) {
my $url = '/ISAPI/System/deviceInfo';
if ($self->get_realm($url)) {
$self->{state} = 'open';
return !undef;
}
Debug('Response: '. $response->status_line . ' ' . $response->content);
return $response->is_success;
return undef;
} # end sub open
sub get {
my $self = shift;
my $url = shift;
my $url = $self->{BaseURL}.shift;
Debug("Getting $url");
my $response = $self->{UA}->get($url);
my $response = $self->{ua}->get($url);
#Debug('Response: '. $response->status_line . ' ' . $response->content);
return $response;
}
@@ -191,74 +149,52 @@ sub PutCmd {
my $cmd = shift;
my $content = shift;
if (!$cmd) {
Error("No cmd specified in PutCmd");
Error('No cmd specified in PutCmd');
return;
}
Debug("Put: $cmd to ".$self->{BaseURL}.(defined($content)?' content:'.$content:''));
my $req = HTTP::Request->new(PUT => $self->{BaseURL}.'/'.$cmd);
if ( defined($content) ) {
$req->content_type('application/x-www-form-urlencoded; charset=UTF-8');
$req->content('<?xml version="1.0" encoding="UTF-8"?>' . "\n" . $content);
}
my $res = $self->{UA}->request($req);
my $res = $self->{ua}->request($req);
if (!$res->is_success) {
#
# The camera timeouts connections at short intervals. When this
# happens the user agent connects again and uses the same auth tokens.
# The camera rejects this and asks for another token but the UserAgent
# just gives up. Because of this I try the request again and it should
# succeed the second time if the credentials are correct.
#
# Apparently it is necessary to create a new ua
if ( $res->code == 401 ) {
#
# It has failed authentication. The odds are
# that the user has set some parameter incorrectly
# so check the realm against the ControlDevice
# entry and send a message if different
#
my $headers = $res->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}");
}
$self->{ua} = LWP::UserAgent->new();
$self->{ua}->cookie_jar( {} );
$self->{ua}->credentials("$$self{host}:$$self{port}", $$self{realm}, $$self{username}, $$self{password});
if ( $$headers{'www-authenticate'} ) {
foreach my $auth ( ref $$headers{'www-authenticate'} eq 'ARRAY' ? @{$$headers{'www-authenticate'}} : ($$headers{'www-authenticate'})) {
foreach (split(/\s*,\s*/, $auth)) {
if ( $_ =~ /^realm\s*=\s*"([^"]+)"/i ) {
if ($realm ne $1) {
$realm = $1;
$self->{UA}->credentials("$host:$port", $realm, $user, $pass);
return PutCmd($self, $cmd, $content);
}
} else {
Debug('Not realm: '.$_);
}
} # end foreach auth token
} # end foreach auth token
} else {
Debug('No authenticate header');
$res = $self->{ua}->request($req);
if (!$res->is_success) {
# Check for username/password
if ( $self->{Monitor}{ControlAddress} =~ /.+:(.+)@.+/ ) {
Info('Check username/password is correct');
} elsif ( $self->{Monitor}{ControlAddress} =~ /^[^:]+@.+/ ) {
Info('No password in Control Address. Should there be one?');
} elsif ( $self->{Monitor}{ControlAddress} =~ /^:.+@.+/ ) {
Info('Password but no username in Control Address.');
} else {
Info('Missing username and password in Control Address.');
}
Error($res->status_line);
}
#
# Check for username/password
#
if ( $self->{Monitor}{ControlAddress} =~ /.+:.+@.+/ ) {
Info('Check username/password is correct');
} elsif ( $self->{Monitor}{ControlAddress} =~ /^[^:]+@.+/ ) {
Info('No password in Control Address. Should there be one?');
} elsif ( $self->{Monitor}{ControlAddress} =~ /^:.+@.+/ ) {
Info('Password but no username in Control Address.');
} else {
Info('Missing username and password in Control Address.');
}
Error($res->status_line);
} else {
Error($res->status_line);
}
} else {
Debug('Success: ' . $res->content);
Debug("Success sending $cmd: ".$res->content);
} # end unless res->is_success
Debug($res->content);
} # end sub putCmd
#
# The move continuous functions all call moveVector
# with the direction to move in. This includes zoom
#
@@ -270,15 +206,10 @@ sub moveVector {
my $params = shift;
my $command; # The ISAPI/PTZ command
# Calculate autostop time
my $autostop = $self->getParam($params, 'autostop', 0);
my $duration = $self->duration();
my $duration = $autostop * $self->{Monitor}{AutoStopTimeout};
$duration = ($duration < 1000) ? $duration * 1000 : int($duration/1000);
# Change from microseconds to milliseconds or seconds to milliseconds
Debug("Calculate duration $duration from autostop($autostop) and AutoStopTimeout ".$self->{Monitor}{AutoStopTimeout});
my $momentxml;
if ($duration) {
if( $duration ) {
$momentxml = "<Momentary><duration>$duration</duration></Momentary>";
$command = "ISAPI/PTZCtrl/channels/$ChannelID/momentary";
} else {
@@ -298,7 +229,6 @@ sub moveVector {
# Send it to the camera
$self->PutCmd($command, $xml);
}
sub zoomStop { $_[0]->moveVector( 0, 0, 0, splice(@_,1)); }
sub moveStop { $_[0]->moveVector( 0, 0, 0, splice(@_,1)); }
sub moveConUp { $_[0]->moveVector( 0, 1, 0, splice(@_,1)); }
@@ -332,6 +262,17 @@ sub presetHome {
my $params = shift;
$self->PutCmd("ISAPI/PTZCtrl/channels/$ChannelID/homeposition/goto");
}
sub duration() {
my $self = shift;
my $params = shift;
my $autostop = $self->getParam($params, 'autostop', 0);
my $duration = $autostop * $self->{Monitor}{AutoStopTimeout};
$duration = ($duration < 1000) ? $duration * 1000 : int($duration/1000);
# Change from microseconds to milliseconds or seconds to milliseconds
Debug("Calculate duration $duration from autostop($autostop) and AutoStopTimeout ".$self->{Monitor}{AutoStopTimeout});
return $duration;
}
#
# Focus controls all call Focus with a +/- speed
#
@@ -345,12 +286,11 @@ sub focusConNear {
my $self = shift;
my $params = shift;
# Calculate autostop time
my $duration = $self->getParam( $params, 'autostop', 0 ) * $self->{Monitor}{AutoStopTimeout};
my $duration = $self->duration();
# Get the focus speed
my $speed = $self->getParam( $params, 'speed', $DefaultFocusSpeed );
$self->Focus(-$speed);
if($duration) {
if ($duration) {
usleep($duration);
$self->moveStop($params);
}
@@ -379,8 +319,7 @@ sub focusConFar {
my $self = shift;
my $params = shift;
# Calculate autostop time
my $duration = $self->getParam( $params, 'autostop', 0 ) * $self->{Monitor}{AutoStopTimeout};
my $duration = $self->duration();
# Get the focus speed
my $speed = $self->getParam( $params, 'speed', $DefaultFocusSpeed );
$self->Focus($speed);
@@ -424,8 +363,7 @@ sub irisConClose {
my $self = shift;
my $params = shift;
# Calculate autostop time
my $duration = $self->getParam( $params, 'autostop', 0 ) * $self->{Monitor}{AutoStopTimeout};
my $duration = $self->duration();
# Get the iris speed
my $speed = $self->getParam( $params, 'speed', $DefaultIrisSpeed );
$self->Iris(-$speed);
@@ -460,8 +398,7 @@ sub irisConOpen {
my $self = shift;
my $params = shift;
# Calculate autostop time
my $duration = $self->getParam( $params, 'autostop', 0 ) * $self->{Monitor}{AutoStopTimeout};
my $duration = $self->duration();
# Get the iris speed
my $speed = $self->getParam( $params, 'speed', $DefaultIrisSpeed );
$self->Iris($speed);
@@ -502,10 +439,24 @@ sub reboot {
}
my %config_types = (
'ISAPI/System/deviceInfo' => {
},
'ISAPI/System/time' => {
},
'ISAPI/System/time/ntpServers' => {
},
'ISAPI/System/Network/interfaces' => {
},
'ISAPI/System/logServer' => {
},
'ISAPI/Streaming/channels/1' => {
Video => {
videoResolutionWidth => { value=>1920 },
videoResolutionHeight => { value=>1080 },
maxFrameRate => { value=>1000}, # appears to be fps * 100
keyframeInterval => {value=>5000},
}
},
'ISAPI/Streaming/channels/101' => {
Video => {
videoResolutionWidth => { value=>1920 },
@@ -514,10 +465,26 @@ my %config_types = (
keyframeInterval => {value=>5000},
}
},
'ISAPI/Streaming/channels/102' => {
Video => {
videoResolutionWidth => { value=>1920 },
videoResolutionHeight => { value=>1080 },
maxFrameRate => { value=>1000}, # appears to be fps * 100
keyframeInterval => {value=>5000},
}
},
'ISAPI/System/Video/inputs/channels/1/overlays' => {
},
'ISAPI/System/Video/inputs/channels/101/overlays' => {
},
'ISAPI/System/Video/inputs/channels/1/motionDetectionExt' => {
},
'ISAPI/System/Network/Integrate' => {
},
'ISAPI/Security/ONVIF/users' => {
},
'ISAPI/Security/users' => {
},
);
sub xml_apply_updates {
@@ -583,7 +550,7 @@ sub get_config {
my $self = shift;
my %config;
foreach my $category ( @_ ? @_ : keys %config_types ) {
my $response = $self->get($self->{BaseURL}.'/'.$category);
my $response = $self->get('/'.$category);
Debug($response->content);
my $dom = XML::LibXML->load_xml(string => $response->content);
if (!$dom) {
@@ -609,11 +576,11 @@ sub set_config {
}
Debug("Applying $category");
my $response = $self->get($self->{BaseURL}.'/'.$category);
my $response = $self->get('/'.$category);
my $dom = XML::LibXML->load_xml(string => $response->content);
if (!$dom) {
Error('No document from :'.$response->content());
return;
return undef;
}
my $xml = $dom->documentElement();
xml_apply_updates($xml, $$diff{$category});
@@ -621,69 +588,35 @@ sub set_config {
Debug($xml->toString());
$req->content($xml->toString());
$response = $self->{UA}->request($req);
Debug( 'status:'.$response->status_line );
Debug($response->content);
$response = $self->{ua}->request($req);
if (!$response->is_success()) {
Error('status:'.$response->status_line);
Debug($response->content);
return undef;
} else {
Debug('status:'.$response->status_line);
Debug($response->content);
}
}
return !undef;
}
sub ping {
return -1 if ! $host;
require Net::Ping;
my $p = Net::Ping->new();
my $rv = $p->ping($host);
$p->close();
return $rv;
}
sub probe {
my ($ip, $user, $pass) = @_;
my ($ip, $username, $password) = @_;
my $self = new ZoneMinder::Control::HikVision();
$self->set_credentials($username, $password);
# Create a UserAgent for the requests
$self->{UA} = LWP::UserAgent->new();
$self->{UA}->cookie_jar( {} );
my $realm;
$self->{ua} = LWP::UserAgent->new();
$self->{ua}->cookie_jar( {} );
foreach my $port ( '80','443' ) {
my $url = 'http://'.$user.':'.$pass.'@'.$ip.':'.$port.'/ISAPI/Streaming/channels/101';
Debug("Probing $url");
my $response = $self->get($url);
if ($response->status_line() eq '401 Unauthorized' and defined $user) {
my $headers = $response->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}");
}
if ( $$headers{'www-authenticate'} ) {
my ( $auth, $tokens ) = $$headers{'www-authenticate'} =~ /^(\w+)\s+(.*)$/;
my %tokens = map { /(\w+)="?([^"]+)"?/i } split(', ', $tokens );
if ($tokens{realm}) {
$realm = $tokens{realm};
Debug('Changing REALM to '.$tokens{realm});
$self->{UA}->credentials("$ip:$port", $tokens{realm}, $user, $pass);
$response = $self->{UA}->get($url);
if (!$response->is_success()) {
Error('Authentication still failed after updating REALM' . $response->status_line);
}
$headers = $response->headers();
foreach my $k ( keys %$headers ) {
Debug("Initial Header $k => $$headers{$k}\n");
} # end foreach
} else {
Debug('Failed to match realm in tokens');
} # end if
} else {
Debug('No headers line');
} # end if headers
} # end if not authen
Debug('Response: '. $response->status_line . ' ' . $response->content);
if ($response->is_success) {
foreach ( '80','443' ) {
$$self{port} = $_;
if ($self->get_realm('/ISAPI/Streaming/channels/101')) {
return {
url => 'http://'.$user.':'.$pass.'@'.$ip.':'.$port.'/h264',
realm => $realm,
url => 'http://'.$$self{username}.':'.$$self{password}.'@'.$ip.':'.$$self{port}.'/h264',
realm => $$self{realm},
};
}
} # end foreach port
@@ -693,5 +626,70 @@ sub probe {
sub profiles {
}
sub rtsp_url {
my ($self, $ip) = @_;
return 'rtsp://'.$ip.'/Streaming/Channels/101';
}
my %latest_firmware = (
'I918L' => {
latest_version=>'V5.7.1',
build=>20211130,
url=>'https://download.annke.com/firmware/4K_IPC/C800_5.7.1_211130.zip'
},
'DS-2CD2126G2-I' => {
'latest_version'=>'V5.7.0',
build=>240507,
url=>'https://assets.hikvision.com/prd/public/all/files/202405/1715716961127/Firmware__V5.7.0_240507_S3000573675.zip',
file=>'Firmware__V5.7.0_240507_S3000573675.zip',
},
'DS-2CD2046G2-I' => {
'latest_version'=>'V5.7.18',
build=>240826,
url=>'https://assets.hikvision.com/prd/public/all/files/202409/Firmware__V5.7.18_240826_S3000597013.zip',
file=>'Firmware__V5.7.18_240826_S3000597013.zip',
},
'DS-2CD2146G2-I' => {
'latest_version'=>'V5.7.18',
build=>240826,
url=>'https://assets.hikvision.com/prd/public/all/files/202409/Firmware__V5.7.18_240826_S3000597013.zip',
file=>'Firmware__V5.7.18_240826_S3000597013.zip',
},
'DS-2CD2142FWD-I' => {
latest_version=>'V5.5.82',
build=>190909,
file=>'IPC_R6_EN_STD_5.5.82_190909.zip',
url=>'https://www.hikvisioneurope.com/eu/portal/portal/Technical%20Materials/00%20%20Network%20Camera/00%20%20Product%20Firmware/R6%20platform%20%282X22FWD%2C%202X42FWD%2C%202X52%2C64X4FWD%2C1X31%2C1X41%29/V5.5.82_Build190909/IPC_R6_EN_STD_5.5.82_190909.zip',
},
);
sub check_firmware {
my $self = shift;
my $config = $self->get_config('ISAPI/System/deviceInfo');
print Dumper($config);
my $model = $$config{'ISAPI/System/deviceInfo'}{model};
if (!$model) {
print "No model\n";
return;
}
my $firmware = $$config{'ISAPI/System/deviceInfo'}{firmwareVersion};
if ($latest_firmware{$model}) {
my %result = %{$latest_firmware{$model}};
$result{current_version} = $firmware;
$result{current_build} = $$config{'ISAPI/System/deviceInfo'}{firmwareReleasedDate};
$result{update_available} = ($firmware lt $result{latest_version});
return %result;
} else {
Debug("We don't have a listing for latest firmware for ($model)");
}
return;
}
sub update_firmware {
my $self = shift;
my $firmware = shift;
my $response = $self->put('/ISAPI/System/updateFirmware', $firmware);
}
1;
__END__

View File

@@ -145,13 +145,22 @@ our %codes = (
);
our %priorities = (
&DEBUG => 'debug',
&INFO => 'info',
&WARNING => 'warning',
&ERROR => 'err',
&FATAL => 'err',
&PANIC => 'err'
);
&DEBUG9 => 'debug',
&DEBUG8 => 'debug',
&DEBUG7 => 'debug',
&DEBUG6 => 'debug',
&DEBUG5 => 'debug',
&DEBUG4 => 'debug',
&DEBUG3 => 'debug',
&DEBUG2 => 'debug',
&DEBUG1 => 'debug',
&DEBUG => 'debug',
&INFO => 'info',
&WARNING => 'warning',
&ERROR => 'err',
&FATAL => 'err',
&PANIC => 'err'
);
our $logger;
our $LOGFILE;
@@ -308,9 +317,6 @@ sub initialise( @ ) {
$this->{initialised} = !undef;
# this function can get called on a previously initialized log Object, so clean any sth's
$this->{sth} = undef;
Debug( 'LogOpts: level='.$codes{$this->{level}}
.'/'.$codes{$this->{effectiveLevel}}
.', screen='.$codes{$this->{termLevel}}
@@ -467,7 +473,6 @@ sub databaseLevel {
} else {
undef($this->{dbh});
}
$this->{sth} = undef;
$this->{databaseLevel} = $databaseLevel;
}
return $this->{databaseLevel};
@@ -592,7 +597,6 @@ sub logPrint {
if ( $level <= $this->{databaseLevel} ) {
if ( ! ( $ZoneMinder::Database::dbh and $ZoneMinder::Database::dbh->ping() ) ) {
$this->{sth} = undef;
# Turn this off because zDbConnect will do logging calls.
my $oldlevel = $this->{databaseLevel};
$this->{databaseLevel} = NOLOG;
@@ -603,15 +607,15 @@ sub logPrint {
$this->{databaseLevel} = $oldlevel;
}
my $sql = 'INSERT INTO Logs ( TimeKey, Component, ServerId, Pid, Level, Code, Message, File, Line ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, NULL )';
$this->{sth} = $ZoneMinder::Database::dbh->prepare_cached($sql) if ! $this->{sth};
if ( !$this->{sth} ) {
my $sql = 'INSERT INTO Logs ( TimeKey, Component, ServerId, Pid, Level, Code, Message, File, Line ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? )';
my $sth = $ZoneMinder::Database::dbh->prepare_cached($sql);
if ( !$sth ) {
$this->{databaseLevel} = NOLOG;
Error("Can't prepare log entry '$sql': ".$ZoneMinder::Database::dbh->errstr());
return;
}
my $res = $this->{sth}->execute(
my $res = $sth->execute(
$seconds+($microseconds/1000000.0),
$this->{id},
($ZoneMinder::Config::Config{ZM_SERVER_ID} ? $ZoneMinder::Config::Config{ZM_SERVER_ID} : undef),
@@ -620,6 +624,7 @@ sub logPrint {
$codes{$level},
$string,
$this->{fileName},
$line
);
if ( !$res ) {
$this->{databaseLevel} = NOLOG;
@@ -746,10 +751,6 @@ sub Fatal {
if ( $SIG{TERM} and ( $SIG{TERM} ne 'DEFAULT' ) ) {
$SIG{TERM}();
}
if ( $$this{sth} ) {
$$this{sth}->finish();
$$this{sth} = undef;
}
# I think if we don't disconnect we will leave sockets around in TIME_WAIT
ZoneMinder::Database::zmDbDisconnect();
exit(-1);

View File

@@ -153,7 +153,7 @@ our %mem_data = (
capture_fps => { type=>'double', seq=>$mem_seq++ },
analysis_fps => { type=>'double', seq=>$mem_seq++ },
latitude => { type=>'double', seq=>$mem_seq++ },
Longitude => { type=>'double', seq=>$mem_seq++ },
longitude => { type=>'double', seq=>$mem_seq++ },
last_event => { type=>'uint64', seq=>$mem_seq++ },
action => { type=>'uint32', seq=>$mem_seq++ },
brightness => { type=>'int32', seq=>$mem_seq++ },
@@ -169,7 +169,7 @@ our %mem_data = (
signal => { type=>'uint8', seq=>$mem_seq++ },
format => { type=>'uint8', seq=>$mem_seq++ },
reserved1 => { type=>'uint8', seq=>$mem_seq++ },
#reserved2 => { type=>'uint8', seq=>$mem_seq++ },
reserved2 => { type=>'uint8', seq=>$mem_seq++ },
imagesize => { type=>'uint32', seq=>$mem_seq++ },
last_frame_score => { type=>'uint32', seq=>$mem_seq++ },
audio_frequency => { type=>'uint32', seq=>$mem_seq++ },
@@ -287,9 +287,16 @@ sub zmMemVerify {
my $sd_size = zmMemRead($monitor, 'shared_data:size', 1);
if ($sd_size != $mem_data{shared_data}->{size}) {
Error('Shared data size conflict in shared_data for monitor '
.$monitor->{Name}.', expected '.$mem_data{shared_data}->{size}
.', got '.$sd_size);
if ($mem_data{shared_data}->{size}) {
Error('Shared data size conflict in shared_data for monitor '
.$monitor->{Name}.', expected '.$mem_data{shared_data}->{size}
.', got '.$sd_size);
} else {
# Means it may be starting up/reloading, not really an error
Debug('Shared data size conflict in shared_data for monitor '
.$monitor->{Name}.', expected '.$mem_data{shared_data}->{size}
.', got '.$sd_size);
}
return undef;
} else {
Debug(4, 'Shared data size match for monitor '.$monitor->{Name}

View File

@@ -292,7 +292,7 @@ $fields{model} = undef;
Longitude => undef,
RTSPStreamName => '',
RTSPServer => 0,
Importance => 'Normal',
Importance => q`'Normal'`,
MQTT_Enabled => 0,
MQTT_Subscriptions => q`''`,
);
@@ -467,7 +467,7 @@ sub Control {
}
require Module::Load::Conditional;
if (!Module::Load::Conditional::can_load(modules => {'ZoneMinder::Control::'.$Protocol => undef})) {
Error("Can't load ZoneMinder::Control::$Protocol\n$Module::Load::Conditional::ERROR");
Error("Monitor $$self{Id} $$self{Name} Can't load ZoneMinder::Control::$Protocol\n$Module::Load::Conditional::ERROR");
return undef;
}
$Control = $Control->clone(); # Because this object is not per monitor specific

View File

@@ -241,12 +241,8 @@ if ( $command =~ /^(?:start|restart)$/ ) {
} # end foreach monitor
my @filters = ZoneMinder::Filter->find(Background=>1);
if (@filters) {
foreach my $filter (@filters) {
runCommand("zmdc.pl start zmfilter.pl --filter_id=$$filter{Id} --daemon");
}
} else {
runCommand('zmdc.pl start zmfilter.pl');
foreach my $filter (@filters) {
runCommand("zmdc.pl start zmfilter.pl --filter_id=$$filter{Id} --daemon");
}
if ( $Config{ZM_RUN_AUDIT} ) {

View File

@@ -22,6 +22,7 @@
# ==========================================================================
use strict;
use warnings;
use bytes;
use utf8;
@@ -67,9 +68,11 @@ if ($help) {
pod2usage(-exitstatus => -1);
}
logInit();
my $dbh = zmDbConnect();
if ($show) {
my %telemetry;
my $dbh = zmDbConnect();
collectData($dbh, \%telemetry);
my $result = jsonEncode(\%telemetry);
print($result);
@@ -90,6 +93,13 @@ print('ZoneMinder Telemetry Agent starting at '.strftime('%y/%m/%d %H:%M:%S', lo
my $lastCheck = $Config{ZM_TELEMETRY_LAST_UPLOAD};
while (1) {
while ( ! ( $dbh and $dbh->ping() ) ) {
Info('Reconnecting to db');
if ( !($dbh = zmDbConnect()) ) {
#What we do here is not that important, so just skip this interval
sleep($interval);
}
}
my $now = time();
my $since_last_check = $now - $lastCheck;
Debug("Last Check time (now($now) - lastCheck($lastCheck)) = $since_last_check > interval($interval) or force($force)");
@@ -100,23 +110,17 @@ while (1) {
}
if ((($since_last_check) > $interval) or $force) {
print "Collecting data to send to ZoneMinder Telemetry server.\n";
my $dbh = zmDbConnect();
if ($dbh) {
# Build the telemetry hash
# We should keep *BSD systems in mind when calling system commands
# Build the telemetry hash
# We should keep *BSD systems in mind when calling system commands
my %telemetry;
collectData($dbh, \%telemetry);
my $result = jsonEncode(\%telemetry);
my %telemetry;
collectData($dbh, \%telemetry);
my $result = jsonEncode(\%telemetry);
if (sendData($result)) {
ZoneMinder::Database::zmDbDo('UPDATE Config SET Value=? WHERE Name=?',
$now, 'ZM_TELEMETRY_LAST_UPLOAD');
$Config{ZM_TELEMETRY_LAST_UPLOAD} = $now;
}
zmDbDisconnect();
} else {
Error('Failed to open database. Sleeping.');
if (sendData($result)) {
ZoneMinder::Database::zmDbDo('UPDATE Config SET Value=? WHERE Name=?',
$now, 'ZM_TELEMETRY_LAST_UPLOAD');
$Config{ZM_TELEMETRY_LAST_UPLOAD} = $now;
}
} elsif (-t STDIN) {
print "ZoneMinder Telemetry Agent sleeping for $interval seconds because ($now-$lastCheck=$since_last_check > $interval\n";

View File

@@ -387,11 +387,6 @@ if ( $version ) {
my ( $detaint_version ) = $version =~ /^([\w.]+)$/;
$version = $detaint_version;
if ( ZM_VERSION eq $version ) {
print("\nDatabase already at version $version, update skipped.\n\n");
exit(0);
}
my $start_zm = 0;
print("\nInitiating database upgrade to version ".ZM_VERSION." from version $version\n");
if ( $interactive ) {

View File

@@ -132,7 +132,7 @@ while (!$zm_terminate) {
Debug("Monitor $monitor->{Id} $monitor->{Name}, startup time $now - $startup_time $startup_elapsed <? $Config{ZM_WATCH_MAX_DELAY}");
if ($monitor->ControlId()) {
my $control = $monitor->Control();
if ($control and $control->CanReboot() and $control->open()) {
if ($control and $control->CanReboot() and $control->ping() and $control->open()) {
$control->reboot();
}
}

View File

@@ -36,7 +36,7 @@ management of the ZM daemons based on the receipt of X10 signals.
=head1 OPTIONS
-c <command>, --command=<command> - Command to issue, one of 'on','off','dim','bright','status','shutdown'
-c <command>, --command=<command> - Command to issue, one of 'on','off','dim','bright','start', 'status','shutdown'
-u <unit code>, --unit-code=<unit code> - Unit code to act on required for all commands
except 'status' (optional) and 'shutdown'
-v, --verison - Pirnts the currently installed version of ZoneMinder
@@ -75,9 +75,6 @@ $ENV{PATH} = '/bin:/usr/bin:/usr/local/bin';
$ENV{SHELL} = '/bin/sh' if exists $ENV{SHELL};
delete @ENV{qw(IFS CDPATH ENV BASH_ENV)};
logInit();
logSetSignal();
my $command;
my $unit_code;
my $version;
@@ -102,6 +99,16 @@ if ( $command eq 'start' ) {
exit();
}
logInit();
logSetSignal();
my $zm_terminate = 0;
sub TermHandler {
Info('Received TERM, exiting');
$zm_terminate = 1;
}
$SIG{TERM} = \&TermHandler;
$SIG{INT} = \&TermHandler;
socket(CLIENT, PF_UNIX, SOCK_STREAM, 0)
or Fatal("Can't open socket: $!");
@@ -116,10 +123,8 @@ if ( !connect(CLIENT, $saddr) ) {
# Parent process just sleep and fall through
sleep(2);
logReinit();
socket(CLIENT, PF_UNIX, SOCK_STREAM, 0)
or Fatal("Can't open socket: $!");
connect(CLIENT, $saddr)
or Fatal("Can't connect: $!");
socket(CLIENT, PF_UNIX, SOCK_STREAM, 0) or Fatal("Can't open socket: $!");
connect(CLIENT, $saddr) or Fatal("Can't connect: $!");
} elsif ( defined($cpid) ) {
setpgrp();
@@ -130,7 +135,6 @@ if ( !connect(CLIENT, $saddr) ) {
}
}
# The server is there, connect to it
#print( "Writing commands\n" );
CLIENT->autoflush();
my $message = $command;
$message .= ';'.$unit_code if $unit_code;
@@ -141,7 +145,6 @@ while ( my $line = <CLIENT> ) {
print("$line\n");
}
close(CLIENT);
#print( "Finished writing, bye\n" );
exit;
#
@@ -173,8 +176,7 @@ our %pending_tasks;
sub runServer {
Info('X10 server starting');
socket(SERVER, PF_UNIX, SOCK_STREAM, 0)
or Fatal("Can't open socket: $!");
socket(SERVER, PF_UNIX, SOCK_STREAM, 0) or Fatal("Can't open socket: $!");
unlink(main::SOCK_FILE);
my $saddr = sockaddr_un(main::SOCK_FILE);
bind(SERVER, $saddr) or Fatal("Can't bind: $!");
@@ -185,28 +187,31 @@ sub runServer {
$x10 = new X10::ActiveHome(
port=>$Config{ZM_X10_DEVICE},
house_code=>$Config{ZM_X10_HOUSE_CODE},
debug=>0
debug=>1
);
Fatal("Failed to open x10 device at $Config{ZM_X10_DEVICE}") if !$x10;
loadTasks();
$x10->register_listener(\&x10listen);
my $rin = '';
vec($rin, fileno(SERVER),1) = 1;
vec($rin, $x10->select_fds(),1) = 1;
my $timeout = 0.2;
#print( 'F:'.fileno(SERVER)."\n" );
vec($rin, fileno(SERVER), 1) = 1;
vec($rin, $x10->select_fds(), 1) = 1;
my $timeout = 1.0; # WHy .2? Why not 1s?
Debug(2, 'F:'.fileno(SERVER)."\n" );
my $reload = undef;
my $reload_count = 0;
my $reload_limit = $Config{ZM_X10_DB_RELOAD_INTERVAL} / $timeout;
while( 1 ) {
while ( !$zm_terminate ) {
Debug(2, "Selecting for $timeout");
my $nfound = select(my $rout = $rin, undef, undef, $timeout);
#print( "Off select, NF:$nfound, ER:$!\n" );
#print( vec( $rout, fileno(SERVER),1)."\n" );
#print( vec( $rout, $x10->select_fds(),1)."\n" );
if ( $nfound > 0 ) {
if ( vec($rout, fileno(SERVER),1) ) {
Debug(2, "Nfound $nfound");
if ( vec($rout, fileno(SERVER), 1) ) {
my $paddr = accept(CLIENT, SERVER);
my $message = <CLIENT>;
@@ -267,7 +272,7 @@ sub runServer {
}
} # end if defined result
close(CLIENT);
} elsif ( vec($rout, $x10->select_fds(),1) ) {
} elsif ( vec($rout, $x10->select_fds(), 1) ) {
$x10->handle_input();
} else {
Fatal('Bogus descriptor');
@@ -275,9 +280,11 @@ sub runServer {
} elsif ( $nfound < 0 ) {
if ( $! != EINTR ) {
Fatal("Can't select: $!");
} else {
Debug(2, "Nfound $nfound $!");
}
} else {
#print( "Select timed out\n" );
Debug(2, "Select timed out");
# Check for state changes
foreach my $monitor_id ( sort(keys(%monitor_hash) ) ) {
my $monitor = $monitor_hash{$monitor_id};
@@ -289,15 +296,13 @@ sub runServer {
if ( defined( $monitor->{LastState} ) ) {
my $task_list;
if ( ($state == STATE_ALARM || $state == STATE_ALERT)
&& ($monitor->{LastState} == STATE_IDLE || $monitor->{LastState} == STATE_TAPE)
&& ($monitor->{LastState} == STATE_IDLE )
) # Gone into alarm state
{
Debug("Applying ON_list for $monitor_id");
$task_list = $monitor->{ON_list};
} elsif ( ($state == STATE_IDLE && $monitor->{LastState} != STATE_IDLE)
|| ($state == STATE_TAPE && $monitor->{LastState} != STATE_TAPE)
) # Come out of alarm state
{
} elsif ($state == STATE_IDLE && $monitor->{LastState} != STATE_IDLE) {
# Come out of alarm state
Debug("Applying OFF_list for $monitor_id");
$task_list = $monitor->{OFF_list};
}
@@ -656,8 +661,10 @@ sub x10listen {
processTask($task);
}
}
} else {
Debug("Not for out house code ".$event->house_code().' eq '.$Config{ZM_X10_HOUSE_CODE});
} # end if correct house code
Info('Got event - '.$event->as_string());
Debug('Got event - '.$event->as_string());
}
} # end sub x10listen

View File

@@ -165,11 +165,15 @@ target_link_libraries(zm
PRIVATE
zm-core-interface)
set(GSOAP_LIBRARIES ${GSOAP_CXX_LIBRARIES})
if(HAVE_LIBOPENSSL)
set(GSOAP_LIBRARIES ${GSOAP_SSL_CXX_LIBRARIES})
endif()
if(GSOAP_FOUND)
target_link_libraries(zm
PUBLIC
${GSOAP_CXX_LIBRARIES}
${GSOAP_SSL_CXX_LIBRARIES}
${GSOAP_LIBRARIES}
${OPENSSL_SSL_LIBRARY}
${OPENSSL_CRYPTO_LIBRARY})
endif()

View File

@@ -11,6 +11,7 @@ AnalysisThread::AnalysisThread(Monitor *monitor) :
AnalysisThread::~AnalysisThread() {
Stop();
if (thread_.joinable()) thread_.join();
}
void AnalysisThread::Start() {
@@ -22,6 +23,8 @@ void AnalysisThread::Start() {
void AnalysisThread::Stop() {
terminate_ = true;
}
void AnalysisThread::Join() {
if (thread_.joinable()) thread_.join();
}

View File

@@ -16,6 +16,7 @@ class AnalysisThread {
void Start();
void Stop();
void Join();
bool Stopped() const { return terminate_; }
private:

View File

@@ -10,6 +10,7 @@ DecoderThread::DecoderThread(Monitor *monitor) :
DecoderThread::~DecoderThread() {
Stop();
if (thread_.joinable()) thread_.join();
}
void DecoderThread::Start() {
@@ -20,6 +21,9 @@ void DecoderThread::Start() {
void DecoderThread::Stop() {
terminate_ = true;
}
void DecoderThread::Join() {
if (thread_.joinable()) thread_.join();
}

View File

@@ -16,6 +16,7 @@ class DecoderThread {
void Start();
void Stop();
void Join();
private:
void Run();

View File

@@ -661,7 +661,7 @@ void Event::Run() {
MYSQL_RES *result = zmDbFetch(sql);
if (result) {
for (int i = 0; MYSQL_ROW dbrow = mysql_fetch_row(result); i++) {
while(MYSQL_ROW dbrow = mysql_fetch_row(result)) {
storage = new Storage(atoi(dbrow[0]));
if (SetPath(storage))
break;
@@ -680,7 +680,7 @@ void Event::Run() {
result = zmDbFetch(sql);
if (result) {
for (int i = 0; MYSQL_ROW dbrow = mysql_fetch_row(result); i++) {
while (MYSQL_ROW dbrow = mysql_fetch_row(result)) {
storage = new Storage(atoi(dbrow[0]));
if (SetPath(storage))
break;

View File

@@ -112,7 +112,7 @@ bool EventStream::loadInitialEventData(
bool EventStream::loadEventData(uint64_t event_id) {
std::string sql = stringtf(
"SELECT `MonitorId`, `StorageId`, `Frames`, unix_timestamp( `StartDateTime` ) AS StartTimestamp, "
"unix_timestamp( `EndDateTime` ) AS EndTimestamp, "
"unix_timestamp( `EndDateTime` ) AS EndTimestamp, Length, "
"(SELECT max(`Delta`)-min(`Delta`) FROM `Frames` WHERE `EventId`=`Events`.`Id`) AS FramesDuration, "
"`DefaultVideo`, `Scheme`, `SaveJPEGs`, `Orientation`+0 FROM `Events` WHERE `Id` = %" PRIu64, event_id);
@@ -140,11 +140,11 @@ bool EventStream::loadEventData(uint64_t event_id) {
event_data->frame_count = dbrow[2] == nullptr ? 0 : atoi(dbrow[2]);
event_data->start_time = SystemTimePoint(Seconds(atoi(dbrow[3])));
event_data->end_time = dbrow[4] ? SystemTimePoint(Seconds(atoi(dbrow[4]))) : std::chrono::system_clock::now();
event_data->duration = std::chrono::duration_cast<Microseconds>(event_data->end_time - event_data->start_time);
event_data->duration = std::chrono::duration_cast<Microseconds>(dbrow[5] ? FPSeconds(atof(dbrow[5])) : event_data->end_time - event_data->start_time);
event_data->frames_duration =
std::chrono::duration_cast<Microseconds>(dbrow[5] ? FPSeconds(atof(dbrow[5])) : FPSeconds(0.0));
event_data->video_file = std::string(dbrow[6]);
std::string scheme_str = std::string(dbrow[7]);
std::chrono::duration_cast<Microseconds>(dbrow[6] ? FPSeconds(atof(dbrow[6])) : FPSeconds(0.0));
event_data->video_file = std::string(dbrow[7]);
std::string scheme_str = std::string(dbrow[8]);
if ( scheme_str == "Deep" ) {
event_data->scheme = Storage::DEEP;
} else if ( scheme_str == "Medium" ) {
@@ -152,8 +152,8 @@ bool EventStream::loadEventData(uint64_t event_id) {
} else {
event_data->scheme = Storage::SHALLOW;
}
event_data->SaveJPEGs = dbrow[8] == nullptr ? 0 : atoi(dbrow[8]);
event_data->Orientation = (Monitor::Orientation)(dbrow[9] == nullptr ? 0 : atoi(dbrow[9]));
event_data->SaveJPEGs = dbrow[9] == nullptr ? 0 : atoi(dbrow[9]);
event_data->Orientation = (Monitor::Orientation)(dbrow[10] == nullptr ? 0 : atoi(dbrow[10]));
mysql_free_result(result);
if (!monitor) {
@@ -289,12 +289,11 @@ bool EventStream::loadEventData(uint64_t event_id) {
frame.in_db);
} // end foreach db row
if (event_data->end_time.time_since_epoch() != Seconds(0)) {
Microseconds delta = (last_frame && (last_frame->delta > Microseconds(0)))
? last_frame->delta
: Microseconds( static_cast<int>(1000000 * base_fps / FPSeconds(event_data->duration).count()) );
if (event_data->end_time.time_since_epoch() != Seconds(0) and event_data->duration != Seconds(0) and event_data->frame_count > last_id) {
Microseconds delta;
if (!last_frame) {
// There were no frames in db
delta = Microseconds( static_cast<int>(1000000 * base_fps / FPSeconds(event_data->duration).count()) );
auto frame = event_data->frames.emplace_back(
1,
event_data->start_time,
@@ -306,28 +305,39 @@ bool EventStream::loadEventData(uint64_t event_id) {
last_id ++;
last_timestamp = event_data->start_time;
event_data->frame_count ++;
} else {
Debug(1, "EIther no endtime or no duration, frame_count %d, last_id %d", event_data->frame_count, last_id);
delta = std::chrono::duration_cast<Microseconds>((event_data->end_time - last_timestamp)/(event_data->frame_count-last_id));
Debug(1, "Setting delta from endtime %f - %f / %d - %d",
FPSeconds(event_data->end_time.time_since_epoch()).count(),
FPSeconds(last_timestamp.time_since_epoch()).count(),
event_data->frame_count,
last_id
);
}
while (event_data->end_time > last_timestamp and !zm_terminate) {
last_timestamp += delta;
last_id ++;
if (delta > Microseconds(0)) {
while (event_data->end_time > last_timestamp and !zm_terminate) {
last_timestamp += delta;
last_id ++;
auto frame = event_data->frames.emplace_back(
last_id,
last_timestamp,
last_frame->offset + delta,
delta,
false
);
last_frame = &frame;
Debug(3, "Trailing Frame %d timestamp (%f s), offset (%f s), delta(%f s), in_db(%d)",
last_id,
FPSeconds(frame.timestamp.time_since_epoch()).count(),
FPSeconds(frame.offset).count(),
FPSeconds(frame.delta).count(),
frame.in_db);
event_data->frame_count ++;
} // end while
auto frame = event_data->frames.emplace_back(
last_id,
last_timestamp,
last_frame->offset + delta,
delta,
false
);
last_frame = &frame;
Debug(3, "Trailing Frame %d timestamp (%f s), offset (%f s), delta(%f s), in_db(%d)",
last_id,
FPSeconds(frame.timestamp.time_since_epoch()).count(),
FPSeconds(frame.offset).count(),
FPSeconds(frame.delta).count(),
frame.in_db);
event_data->frame_count ++;
} // end while
}
} // end if have endtime
// Incomplete events might not have any frame data
@@ -339,11 +349,7 @@ bool EventStream::loadEventData(uint64_t event_id) {
}
mysql_free_result(result);
if (!event_data->video_file.empty() || (monitor->GetOptVideoWriter() > 0)) {
if (event_data->video_file.empty()) {
event_data->video_file = stringtf("%" PRIu64 "-%s", event_data->event_id, "video.mp4");
}
if (!event_data->video_file.empty()) {
std::string filepath = event_data->path + "/" + event_data->video_file;
Debug(1, "Loading video file from %s", filepath.c_str());
delete ffmpeg_input;
@@ -636,13 +642,13 @@ void EventStream::processCommand(const CmdMsg *msg) {
break;
}
struct {
uint64_t event_id;
//Microseconds duration;
double duration;
//Microseconds progress;
double progress;
double fps;
int rate;
int zoom;
int scale;
@@ -654,13 +660,25 @@ void EventStream::processCommand(const CmdMsg *msg) {
status_data.event_id = event_data->event_id;
//status_data.duration = event_data->duration;
status_data.duration = std::chrono::duration<double>(event_data->duration).count();
status_data.duration = FPSeconds(event_data->duration).count();
//status_data.progress = event_data->frames[curr_frame_id-1].offset;
status_data.progress = std::chrono::duration<double>(event_data->frames[curr_frame_id-1].offset).count();
status_data.rate = replay_rate;
status_data.zoom = zoom;
status_data.scale = scale;
status_data.paused = paused;
FPSeconds elapsed = now - last_fps_update;
if (elapsed.count() > 0) {
actual_fps = (actual_fps + (frame_count - last_frame_count) / elapsed.count())/2;
Debug(1, "actual_fps %f = old + frame_count %d - last %d / elapsed %.2f from %.2f - %.2f scale %d", actual_fps, frame_count, last_frame_count,
elapsed.count(), FPSeconds(now.time_since_epoch()).count(), FPSeconds(last_fps_update.time_since_epoch()).count(), scale);
last_frame_count = frame_count;
last_fps_update = now;
}
status_data.fps = actual_fps;
Debug(2, "Event:%" PRIu64 ", Duration %f, Paused:%d, progress:%f Rate:%d, Zoom:%d Scale:%d",
status_data.event_id,
FPSeconds(status_data.duration).count(),
@@ -1026,6 +1044,7 @@ void EventStream::runStream() {
zm_terminate = true;
break;
}
frame_count++;
}
{
@@ -1059,7 +1078,8 @@ void EventStream::runStream() {
// but what if we are skipping frames? We need the distance from the last frame sent
// Also, what about reverse? needs to be absolute value
delta = abs(next_frame_data->offset - last_frame_data->offset) /frame_mod;
delta = abs(next_frame_data->offset - last_frame_data->offset);
if (frame_mod) delta /= frame_mod;
Debug(2, "New delta: %fs from last frame offset %fs - next_frame_offset %fs",
FPSeconds(delta).count(),
FPSeconds(last_frame_data->offset).count(),

View File

@@ -171,13 +171,16 @@ void zm_dump_codecpar(const AVCodecParameters *par);
#endif
#if LIBAVUTIL_VERSION_CHECK(58, 7, 100, 7, 0)
#define zm_dump_video_frame(frame, text) Debug(1, "%s: format %d %s %dx%d linesize:%dx%d pts: %" PRId64 " keyframe: %d", \
#define zm_dump_video_frame(frame, text) Debug(1, "%s: format %d %s %dx%d linesize:%dx%dx%dx%d data:%p,%p,%p,%p pts: %" PRId64 " keyframe: %d", \
text, \
frame->format, \
av_get_pix_fmt_name((AVPixelFormat)frame->format), \
frame->width, \
frame->height, \
frame->linesize[0], frame->linesize[1], \
frame->linesize[2], frame->linesize[3], \
frame->data[0], frame->data[1], \
frame->data[2], frame->data[3], \
frame->pts, \
frame->flags && AV_FRAME_FLAG_KEY \
);

View File

@@ -188,7 +188,7 @@ int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
start_read_time = std::chrono::steady_clock::now();
int ret;
AVFormatContext *formatContextPtr;
int64_t lastPTS;
int64_t lastPTS = -1;
if ( mSecondFormatContext and
(
@@ -242,12 +242,13 @@ int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
}
return -1;
}
if ( packet->stream_index == mAudioStreamId) {
lastPTS = mLastAudioPTS;
} else if ( packet->stream_index == mVideoStreamId) {
if ( packet->stream_index == mVideoStreamId) {
lastPTS = mLastVideoPTS;
} else if (packet->stream_index == mAudioStreamId) {
lastPTS = mLastAudioPTS;
} else {
Debug(1, "Have packet which isn't for video or audio stream.");
Debug(1, "Have packet (%d) which isn't for video (%d) or audio stream (%d).", packet->stream_index, mVideoStreamId, mAudioStreamId);
return 0;
}
}
@@ -261,11 +262,13 @@ int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
Info("Suspected 32bit wraparound in input pts. %" PRId64, packet->pts);
return -1;
} else if (packet->pts - lastPTS < -10*stream->time_base.den) {
// -10 is for 10 seconds. Avigilon cameras seem to jump around by about 36 constantly
double pts_time = static_cast<double>(av_rescale_q(packet->pts, stream->time_base, AV_TIME_BASE_Q)) / AV_TIME_BASE;
double last_pts_time = static_cast<double>(av_rescale_q(lastPTS, stream->time_base, AV_TIME_BASE_Q)) / AV_TIME_BASE;
logPrintf(Logger::WARNING + monitor->Importance(), "Stream pts jumped back in time too far. pts %.2f - last pts %.2f = %.2f > 10seconds",
pts_time, last_pts_time, pts_time - last_pts_time);
if (!monitor->WallClockTimestamps()) {
// -10 is for 10 seconds. Avigilon cameras seem to jump around by about 36 constantly
double pts_time = static_cast<double>(av_rescale_q(packet->pts, stream->time_base, AV_TIME_BASE_Q)) / AV_TIME_BASE;
double last_pts_time = static_cast<double>(av_rescale_q(lastPTS, stream->time_base, AV_TIME_BASE_Q)) / AV_TIME_BASE;
logPrintf(Logger::WARNING + monitor->Importance(), "Stream pts jumped back in time too far. pts %.2f - last pts %.2f = %.2f > 10seconds",
pts_time, last_pts_time, pts_time - last_pts_time);
}
if (error_count > 5)
return -1;
error_count += 1;
@@ -288,7 +291,7 @@ int FfmpegCamera::Capture(std::shared_ptr<ZMPacket> &zm_packet) {
mFirstVideoPTS = packet->pts;
mLastVideoPTS = packet->pts - mFirstVideoPTS;
} else {
} else if (stream == mAudioStream) {
if (mFirstAudioPTS == AV_NOPTS_VALUE)
mFirstAudioPTS = packet->pts;
@@ -393,7 +396,8 @@ int FfmpegCamera::OpenFfmpeg() {
mVideoStreamId = -1;
mAudioStreamId = -1;
for (unsigned int i=0; i < mFormatContext->nb_streams; i++) {
const AVStream *stream = mFormatContext->streams[i];
AVStream *stream = mFormatContext->streams[i];
zm_dump_stream_format(mFormatContext, i, 0, 0);
if (is_video_stream(stream)) {
if (!(stream->codecpar->width && stream->codecpar->height)) {
Warning("No width and height in video stream. Trying again");
@@ -401,9 +405,16 @@ int FfmpegCamera::OpenFfmpeg() {
}
if (mVideoStreamId == -1) {
mVideoStreamId = i;
mVideoStream = mFormatContext->streams[i];
mVideoStream = stream;
} else {
Debug(2, "Have another video stream.");
if (stream->codecpar->width == width and stream->codecpar->height == height) {
Debug(1, "Choosing alternate video stream because it matches our resolution.");
mVideoStreamId = i;
mVideoStream = stream;
} else {
stream->discard = AVDISCARD_ALL;
}
}
} else if (is_audio_stream(stream)) {
if (mAudioStreamId == -1) {
@@ -412,6 +423,8 @@ int FfmpegCamera::OpenFfmpeg() {
} else {
Debug(2, "Have another audio stream.");
}
} else {
Debug(1, "Unknown stream type for stream %d", i);
}
} // end foreach stream
@@ -544,6 +557,7 @@ int FfmpegCamera::OpenFfmpeg() {
ret = av_dict_parse_string(&opts, mOptions.c_str(), "=", ",", 0);
// reorder_queue is for avformat not codec
av_dict_set(&opts, "reorder_queue_size", nullptr, AV_DICT_MATCH_CASE);
av_dict_set(&opts, "probesize", nullptr, AV_DICT_MATCH_CASE);
}
ret = avcodec_open2(mVideoCodecContext, mVideoCodec, &opts);

View File

@@ -533,8 +533,6 @@ void Logger::logPrint(bool hex, const char *filepath, int line, int level, const
now_sec, static_cast<int64>(now_frac.count()), mId.c_str(), staticConfig.SERVER_ID, tid, level, classString,
escapedString.c_str(), file, line);
dbQueue.push(std::move(sql_string));
} else {
puts("Db is closed");
}
} // end if level <= mDatabaseLevel

View File

@@ -25,6 +25,7 @@
#include "zm_fifo.h"
#include "zm_file_camera.h"
#include "zm_monitorlink_expression.h"
#include "zm_mqtt.h"
#include "zm_remote_camera.h"
#include "zm_remote_camera_http.h"
#include "zm_remote_camera_nvsocket.h"
@@ -35,7 +36,6 @@
#include "zm_uri.h"
#include "zm_zone.h"
#if ZM_HAS_V4L2
#include "zm_local_camera.h"
#endif // ZM_HAS_V4L2
@@ -351,7 +351,7 @@ Monitor::Monitor() :
"AnalysisFPSLimit, AnalysisUpdateDelay, MaxFPS, AlarmMaxFPS,"
"Device, Channel, Format, V4LMultiBuffer, V4LCapturesPerFrame, " // V4L Settings
"Protocol, Method, Options, User, Pass, Host, Port, Path, SecondPath, Width, Height, Colours, Palette, Orientation+0, Deinterlacing, RTSPDescribe, "
"SaveJPEGs, VideoWriter, EncoderParameters,
"SaveJPEGs, VideoWriter, EncoderParameters,"
"OutputCodec, Encoder, OutputContainer, RecordAudio, WallClockTimestamps,"
"Brightness, Contrast, Hue, Colour, "
"EventPrefix, LabelFormat, LabelX, LabelY, LabelSize,"
@@ -716,7 +716,9 @@ void Monitor::Load(MYSQL_ROW dbrow, bool load_zones=true, Purpose p = QUERY) {
std::string mqtt_subscriptions_string = std::string(dbrow[col] ? dbrow[col] : "");
mqtt_subscriptions = Split(mqtt_subscriptions_string, ',');
col++;
Error("MQTT enabled ? %d, subs %s", mqtt_enabled, mqtt_subscriptions_string.c_str());
Debug(1, "MQTT enabled ? %d, subs %s", mqtt_enabled, mqtt_subscriptions_string.c_str());
#else
Debug(1, "Not compiled with MQTT");
#endif
startup_delay = dbrow[col] ? atoi(dbrow[col]) : 0;
col++;
@@ -999,7 +1001,7 @@ bool Monitor::connect() {
map_fd = open(mem_file.c_str(), O_RDWR);
} else {
umask(0);
map_fd = open(mem_file.c_str(), O_RDWR|O_CREAT, (mode_t)0666);
map_fd = open(mem_file.c_str(), O_RDWR|O_CREAT, (mode_t)0660);
}
if (map_fd < 0) {
@@ -1376,11 +1378,11 @@ SystemTimePoint Monitor::GetTimestamp(int index) const {
return {};
}
unsigned int Monitor::GetLastReadIndex() const {
int Monitor::GetLastReadIndex() const {
return ( shared_data->last_read_index != image_buffer_count ? shared_data->last_read_index : -1 );
}
unsigned int Monitor::GetLastWriteIndex() const {
int Monitor::GetLastWriteIndex() const {
return ( shared_data->last_write_index != image_buffer_count ? shared_data->last_write_index : -1 );
}
@@ -1853,10 +1855,16 @@ void Monitor::UpdateFPS() {
last_camera_bytes = new_camera_bytes;
last_fps_time = now;
FPSeconds db_elapsed = now - last_status_time;
if (db_elapsed > Seconds(10)) {
std::string sql = stringtf(
"UPDATE LOW_PRIORITY Monitor_Status SET Status='Connected', CaptureFPS = %.2lf, CaptureBandwidth=%u, AnalysisFPS = %.2lf, UpdatedOn=NOW() WHERE MonitorId=%u",
new_capture_fps, new_capture_bandwidth, new_analysis_fps, id);
"INSERT INTO Monitor_Status (MonitorId, Status,CaptureFPS,CaptureBandwidth, AnalysisFPS, UpdatedOn) VALUES (%u, 'Connected',%.2lf, %u, %.2lf, NOW()) ON DUPLICATE KEY "
"UPDATE Status='Connected', CaptureFPS = %.2lf, CaptureBandwidth=%u, AnalysisFPS = %.2lf, UpdatedOn=NOW()",
id, new_capture_fps, new_capture_bandwidth, new_analysis_fps,
new_capture_fps, new_capture_bandwidth, new_analysis_fps);
dbQueue.push(std::move(sql));
last_status_time = now;
}
} // now != last_fps_time
} // void Monitor::UpdateFPS()
@@ -1952,7 +1960,7 @@ bool Monitor::Analyse() {
Debug(4, "Triggered on ONVIF");
Event::StringSet noteSet;
noteSet.insert("ONVIF");
noteSet.insert(onvif->lastTopic() + '/' + onvif->lastValue());
onvif->setNotes(noteSet);
noteSetMap[MOTION_CAUSE] = noteSet;
cause += "ONVIF";
// If the camera isn't going to send an event close, we need to close it here, but only after it has actually triggered an alarm.
@@ -2593,7 +2601,9 @@ int Monitor::Capture() {
packet->timestamp = std::chrono::system_clock::now();
shared_data->heartbeat_time = std::chrono::system_clock::to_time_t(packet->timestamp);
int captureResult = camera->Capture(packet);
Debug(4, "Back from capture result=%d image count %d", captureResult, shared_data->image_count);
Debug(4, "Back from capture result=%d image count %d timestamp %" PRId64, captureResult, shared_data->image_count,
static_cast<int64>(std::chrono::duration_cast<Microseconds>(packet->timestamp.time_since_epoch()).count())
);
if (captureResult < 0) {
// Unable to capture image
@@ -2747,11 +2757,11 @@ bool Monitor::setupConvertContext(const AVFrame *input_frame, const Image *image
bool Monitor::Decode() {
ZMLockedPacket *packet_lock = packetqueue.get_packet_and_increment_it(decoder_it);
if (!packet_lock) return false;
std::shared_ptr<ZMPacket> packet = packet_lock->packet_;
if (packet->codec_type != AVMEDIA_TYPE_VIDEO) {
packet->decoded = true;
Debug(4, "Not video");
//packetqueue.unlock(packet_lock);
delete packet_lock;
return true; // Don't need decode
}
@@ -2759,7 +2769,7 @@ bool Monitor::Decode() {
if ((!packet->image) and packet->packet->size and !packet->in_frame) {
if ((decoding == DECODING_ALWAYS)
or
((decoding == DECODING_ONDEMAND) and this->hasViewers() )
((decoding == DECODING_ONDEMAND) and (this->hasViewers() or (shared_data->last_write_index == image_buffer_count)))
or
((decoding == DECODING_KEYFRAMES) and packet->keyframe)
or
@@ -2790,7 +2800,7 @@ bool Monitor::Decode() {
Debug(1, "Ret from decode %d, zm_terminate %d", ret, zm_terminate);
}
} else {
Debug(1, "Not Decoding ? %s", Decoding_Strings[decoding].c_str());
Debug(1, "Not Decoding frame %d? %s", packet->image_index, Decoding_Strings[decoding].c_str());
} // end if doing decoding
} else {
Debug(1, "No packet.size(%d) or packet->in_frame(%p). Not decoding", packet->packet->size, packet->in_frame.get());
@@ -3012,13 +3022,17 @@ Event * Monitor::openEvent(
if (!event_start_command.empty()) {
if (fork() == 0) {
Logger *log = Logger::fetch();
std::string log_id = log->id();
logTerm();
int fdlimit = (int)sysconf(_SC_OPEN_MAX);
for (int i = 0; i < fdlimit; i++) close(i);
execlp(event_start_command.c_str(),
event_start_command.c_str(),
std::to_string(event->Id()).c_str(),
std::to_string(event->MonitorId()).c_str(),
nullptr);
Logger *log = Logger::fetch();
log->databaseLevel(Logger::NOLOG);
logInit(log_id.c_str());
Error("Error execing %s: %s", event_start_command.c_str(), strerror(errno));
std::quick_exit(0);
}
@@ -3048,12 +3062,16 @@ void Monitor::closeEvent() {
if (!command.empty()) {
if (fork() == 0) {
Logger *log = Logger::fetch();
std::string log_id = log->id();
logTerm();
int fdlimit = (int)sysconf(_SC_OPEN_MAX);
for (int i = 0; i < fdlimit; i++) close(i);
execlp(command.c_str(), command.c_str(),
std::to_string(event_id).c_str(),
std::to_string(monitor_id).c_str(),
nullptr);
Logger *log = Logger::fetch();
log->databaseLevel(Logger::NOLOG);
logInit(log_id.c_str());
Error("Error execing %s: %s", command.c_str(), strerror(errno));
std::quick_exit(0);
}
@@ -3363,27 +3381,34 @@ int Monitor::PreCapture() const { return camera->PreCapture(); }
int Monitor::PostCapture() const { return camera->PostCapture(); }
int Monitor::Pause() {
Debug(1, "Stopping packetqueue");
// Wake everyone up
packetqueue.stop();
// Because the stream indexes may change we have to clear out the packetqueue
if (decoder) {
Debug(1, "Decoder stopping");
decoder->Stop();
Debug(1, "Decoder stopped");
}
if (convert_context) {
sws_freeContext(convert_context);
convert_context = nullptr;
}
if (decoder) decoder->Stop();
if (analysis_thread) {
analysis_thread->Stop();
Debug(1, "Analysis stopped");
}
Debug(1, "Stopping packetqueue");
// Wake everyone up
packetqueue.stop();
if (decoder) {
Debug(1, "Joining decode");
decoder->Join();
if (convert_context) {
sws_freeContext(convert_context);
convert_context = nullptr;
}
decoding_image_count = 0;
}
if (analysis_thread) {
Debug(1, "Joining analysis");
analysis_thread->Join();
}
// Must close event before closing camera because it uses in_streams
if (close_event_thread.joinable()) {
Debug(1, "Joining event thread");

View File

@@ -181,30 +181,30 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
int32_t last_read_index; /* +8 */
int32_t image_count; /* +12 */
uint32_t state; /* +16 */
double capture_fps; // Current capturing fps
double analysis_fps; // Current analysis fps
double latitude;
double longitude;
uint64_t last_event_id; /* +48 */
uint32_t action; /* +56 */
int32_t brightness; /* +60 */
int32_t hue; /* +64 */
int32_t colour; /* +68 */
int32_t contrast; /* +72 */
int32_t alarm_x; /* +76 */
int32_t alarm_y; /* +80 */
uint8_t valid; /* +81 */
uint8_t capturing; /* +82 */
uint8_t analysing; /* +83 */
uint8_t recording; /* +84 */
uint8_t signal; /* +85 */
uint8_t format; /* +86 */
uint8_t reserved1; /* +87 */
//uint8_t reserved2; /* +0 */
uint32_t imagesize; /* +88 */
uint32_t last_frame_score; /* +72 */
uint32_t audio_frequency; /* +76 */
uint32_t audio_channels; /* +80 */
double capture_fps; /* +20 Current capturing fps */
double analysis_fps; /* +28 Current analysis fps */
double latitude; /* +36 */
double longitude; /* +44 */
uint64_t last_event_id; /* +52 */
uint32_t action; /* +60 */
int32_t brightness; /* +64 */
int32_t hue; /* +68 */
int32_t colour; /* +72 */
int32_t contrast; /* +76 */
int32_t alarm_x; /* +80 */
int32_t alarm_y; /* +84 */
uint8_t valid; /* +88 */
uint8_t capturing; /* +89 */
uint8_t analysing; /* +90 */
uint8_t recording; /* +91 */
uint8_t signal; /* +92 */
uint8_t format; /* +93 */
uint8_t reserved1; /* +94 */
uint8_t reserved2; /* +95 */
uint32_t imagesize; /* +96 */
uint32_t last_frame_score; /* +100 */
uint32_t audio_frequency; /* +104 */
uint32_t audio_channels; /* +108 */
//uint32_t reserved3; /* +0 */
/*
** This keeps 32bit time_t and 64bit time_t identical and compatible as long as time is before 2038.
@@ -212,32 +212,33 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
** Because startup_time is 64bit it may be aligned to a 64bit boundary. So it's offset SHOULD be a multiple
** of 8. Add or delete epadding's to achieve this.
*/
union { /* +84 */
union { /* +112 */
time_t startup_time; /* When the zmc process started. zmwatch uses this to see how long the process has been running without getting any images */
uint64_t extrapad1;
};
union { /* +92 */
union { /* +120 */
time_t heartbeat_time; /* Constantly updated by zmc. Used to determine if the process is alive or hung or dead */
uint64_t extrapad2;
};
union { /* +100 */
union { /* +128 */
time_t last_write_time;
uint64_t extrapad3;
};
union { /* +108 */
union { /* +136 */
time_t last_read_time;
uint64_t extrapad4;
};
union { /* +116 */
union { /* +144 */
time_t last_viewed_time;
uint64_t extrapad5;
};
uint8_t control_state[256]; /* +124 */
uint8_t control_state[256]; /* +152 */
char alarm_cause[256];
char video_fifo_path[64];
char audio_fifo_path[64];
char janus_pin[64];
char alarm_cause[256]; /* 408 */
char video_fifo_path[64]; /* 664 */
char audio_fifo_path[64]; /* 728 */
char janus_pin[64]; /* 792 */
/* 856 total? */
} SharedData;
enum TriggerState : uint32 {
@@ -248,12 +249,12 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
/* sizeof(TriggerData) expected to be 560 on 32bit & and 64bit */
typedef struct {
uint32_t size;
TriggerState trigger_state;
uint32_t trigger_score;
uint32_t padding;
char trigger_cause[32];
char trigger_text[256];
uint32_t size; /* 920 */
TriggerState trigger_state; /* 924 */
uint32_t trigger_score; /* 928 */
uint32_t padding; /* 936 */
char trigger_cause[32]; /* 968 */
char trigger_text[256]; /* 1224 */
char trigger_showtext[256];
} TriggerData;
@@ -328,6 +329,7 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
bool healthy;
std::string last_topic;
std::string last_value;
void SetNoteSet(Event::StringSet &noteSet);
#ifdef WITH_GSOAP
struct soap *soap = nullptr;
_tev__CreatePullPointSubscription request;
@@ -338,6 +340,8 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
_wsnt__RenewResponse wsnt__RenewResponse;
PullPointSubscriptionBindingProxy proxyEvent;
void set_credentials(struct soap *soap);
std::unordered_map<std::string, std::string> alarms;
std::mutex alarms_mutex;
#endif
public:
explicit ONVIF(Monitor *parent_);
@@ -347,8 +351,7 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
bool isAlarmed() const { return alarmed; };
void setAlarmed(bool p_alarmed) { alarmed = p_alarmed; };
bool isHealthy() const { return healthy; };
const std::string &lastTopic() const { return last_topic; };
const std::string &lastValue() const { return last_value; };
void setNotes(Event::StringSet &noteSet) { SetNoteSet(noteSet); };
};
class AmcrestAPI {
@@ -566,6 +569,7 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
State state;
SystemTimePoint start_time;
SystemTimePoint last_fps_time;
SystemTimePoint last_status_time;
SystemTimePoint last_analysis_fps_time;
SystemTimePoint auto_resume_time;
unsigned int last_motion_score;
@@ -682,7 +686,6 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
}
return false;
}
inline unsigned int Id() const { return id; }
inline const char *Name() const { return name.c_str(); }
inline bool Deleted() const { return deleted; }
@@ -735,11 +738,11 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
inline const char *EventPrefix() const { return event_prefix.c_str(); }
inline bool Ready() const {
if (!packetqueue.get_max_keyframe_interval()) {
Debug(4, "No keyframe interval.");
Debug(4, "Not ready because no keyframe interval.");
return false;
}
if (decoding_image_count >= ready_count) {
Debug(4, "Ready because image_count(%d) >= ready_count(%d)", decoding_image_count, ready_count);
if (decoding_image_count > ready_count) {
Debug(4, "Ready because decoding_image_count(%d) > ready_count(%d)", decoding_image_count, ready_count);
return true;
}
Debug(4, "Not ready because decoding_image_count(%d) <= ready_count(%d)", decoding_image_count, ready_count);
@@ -764,17 +767,9 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
bool hasViewers() {
if (shared_data && shared_data->valid) {
SystemTimePoint now = std::chrono::system_clock::now();
Debug(3, "Last viewed %" PRId64 " seconds ago",
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count())
-
shared_data->last_viewed_time
);
return (
(
static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count())
-
shared_data->last_viewed_time
) > 10 ? false : true);
int64 intNow = static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count());
Debug(3, "Last viewed %" PRId64 " seconds ago", intNow - shared_data->last_viewed_time);
return (((!shared_data->last_viewed_time) or ((intNow - shared_data->last_viewed_time)) > 10)) ? false : true;
}
return false;
}
@@ -846,8 +841,8 @@ class Monitor : public std::enable_shared_from_this<Monitor> {
unsigned int GetCaptureMaxFPS() const { return capture_max_fps; }
Microseconds GetCaptureDelay() const { return capture_delay; }
Microseconds GetAlarmCaptureDelay() const { return alarm_capture_delay; }
unsigned int GetLastReadIndex() const;
unsigned int GetLastWriteIndex() const;
int GetLastReadIndex() const;
int GetLastWriteIndex() const;
uint64_t GetLastEventId() const;
double GetFPS() const;
void UpdateFPS();

View File

@@ -39,7 +39,6 @@ Monitor::AmcrestAPI::~AmcrestAPI() {
int Monitor::AmcrestAPI::start() {
// init the transfer and start it in multi-handle
int running_handles;
long response_code;
CURLMcode curl_error;
if (Amcrest_handle != nullptr) { // potentially clean up the old handle
curl_multi_remove_handle(curl_multi, Amcrest_handle);
@@ -49,6 +48,7 @@ int Monitor::AmcrestAPI::start() {
std::string full_url = parent->onvif_url;
if (full_url.back() != '/') full_url += '/';
full_url += "eventManager.cgi?action=attach&codes=[VideoMotion]";
Debug(1, "AMCREST url is %s", full_url.c_str());
Amcrest_handle = curl_easy_init();
if (!Amcrest_handle) {
Warning("Handle is null!");
@@ -66,15 +66,20 @@ int Monitor::AmcrestAPI::start() {
}
curl_error = curl_multi_perform(curl_multi, &running_handles);
if (curl_error == CURLM_OK) {
long response_code;
curl_easy_getinfo(Amcrest_handle, CURLINFO_RESPONSE_CODE, &response_code);
int msgq = 0;
struct CURLMsg *m = curl_multi_info_read(curl_multi, &msgq);
if (m && (m->msg == CURLMSG_DONE)) {
Warning("AMCREST Libcurl exited Early: %i", m->data.result);
} else {
Debug(1, "AMCREST response code %ld, response %s", response_code, amcrest_response.c_str());
}
curl_multi_wait(curl_multi, nullptr, 0, 300, nullptr);
curl_error = curl_multi_perform(curl_multi, &running_handles);
} else {
Debug(1, "Error code %i", curl_error);
}
if ((curl_error == CURLM_OK) && (running_handles > 0)) {
@@ -84,6 +89,7 @@ int Monitor::AmcrestAPI::start() {
Warning("AMCREST Response: %s", amcrest_response.c_str());
Warning("AMCREST Seeing %i streams, and error of %i, url: %s",
running_handles, curl_error, full_url.c_str());
long response_code;
curl_easy_getinfo(Amcrest_handle, CURLINFO_OS_ERRNO, &response_code);
Warning("AMCREST Response code: %lu", response_code);
}

View File

@@ -35,6 +35,11 @@ Monitor::ONVIF::~ONVIF() {
#ifdef WITH_GSOAP
if (soap != nullptr) {
Debug(1, "Tearing Down Onvif");
//We have lost ONVIF clear previous alarm topics
alarms.clear();
//Set alarmed to false so we don't get stuck recording
alarmed = false;
Debug(1, "ONVIF Alarms Cleared: Alarms count is %zu, alarmed is %s", alarms.size(), alarmed ? "true": "false");
_wsnt__Unsubscribe wsnt__Unsubscribe;
_wsnt__UnsubscribeResponse wsnt__UnsubscribeResponse;
const char *RequestMessageID = parent->soap_wsa_compl ? soap_wsa_rand_uuid(soap) : "RequestMessageID";
@@ -157,7 +162,7 @@ void Monitor::ONVIF::WaitForMessage() {
int result = proxyEvent.PullMessages(response.SubscriptionReference.Address, nullptr, &tev__PullMessages, tev__PullMessagesResponse);
if (result != SOAP_OK) {
const char *detail = soap_fault_detail(soap);
Debug(1, "Result of getting ONVIF messages= %d soap_fault_string=%s detail=%s",
Debug(1, "Result of getting ONVIF result=%d soap_fault_string=%s detail=%s",
result, soap_fault_string(soap), detail ? detail : "null");
if (result != SOAP_EOF) { //Ignore the timeout error
Error("Failed to get ONVIF messages! %d %s", result, soap_fault_string(soap));
@@ -176,27 +181,38 @@ void Monitor::ONVIF::WaitForMessage() {
(msg->Message.__any.elts->next->elts->atts->next != nullptr) &&
(msg->Message.__any.elts->next->elts->atts->next->text != nullptr)
) {
Info("ONVIF Got Motion Alarm! %s %s", msg->Topic->__any.text, msg->Message.__any.elts->next->elts->atts->next->text);
last_topic = msg->Topic->__any.text;
last_value = msg->Message.__any.elts->next->elts->atts->next->text;
Info("ONVIF Got Motion Alarm! %s %s", last_topic.c_str(), last_value.c_str());
// Apparently simple motion events, the value is boolean, but for people detection can be things like isMotion, isPeople
if (strcmp(msg->Message.__any.elts->next->elts->atts->next->text, "false") == 0) {
if (last_value.find("false") == 0) {
Info("Triggered off ONVIF");
alarmed = false;
{
std::unique_lock<std::mutex> lck(alarms_mutex);
alarms.erase(last_topic);
}
Debug(1, "ONVIF Alarms Empty: Alarms count is %zu, alarmed is %s, empty is %d ", alarms.size(), alarmed ? "true": "false", alarms.empty());
if (alarms.empty()) {
alarmed = false;
}
if (!parent->Event_Poller_Closes_Event) { //If we get a close event, then we know to expect them.
parent->Event_Poller_Closes_Event = true;
Info("Setting ClosesEvent");
}
} else {
// Event Start
Info("Triggered on ONVIF");
if (!alarmed) {
Info("Triggered Event");
alarmed = true;
last_topic = msg->Topic->__any.text;
last_value = msg->Message.__any.elts->next->elts->atts->next->text;
// Why sleep?
std::this_thread::sleep_for(std::chrono::seconds(1)); //thread sleep
Info("Triggered Start on ONVIF");
if (alarms.count(last_topic) == 0) {
alarms[last_topic] = last_value;
if (!alarmed) {
Info("Triggered Start Event on ONVIF");
alarmed = true;
// Why sleep?
std::this_thread::sleep_for(std::chrono::seconds(1)); //thread sleep
}
}
}
Debug(1, "ONVIF Alarms count is %zu, alarmed is %s", alarms.size(), alarmed ? "true": "false");
} else {
Debug(1, "ONVIF Got a message that we couldn't parse");
if ((msg->Topic != nullptr) && (msg->Topic->__any.text != nullptr)) {
@@ -243,7 +259,7 @@ void Monitor::ONVIF::WaitForMessage() {
//ONVIF Set Credentials
void Monitor::ONVIF::set_credentials(struct soap *soap) {
soap_wsse_delete_Security(soap);
soap_wsse_add_Timestamp(soap, nullptr, 10);
soap_wsse_add_Timestamp(soap, "Time", 10);
soap_wsse_add_UsernameTokenDigest(soap, "Auth", parent->onvif_username.c_str(), parent->onvif_password.c_str());
}
@@ -269,3 +285,18 @@ int SOAP_ENV__Fault(struct soap *soap, char *faultcode, char *faultstring, char
return soap_send_empty_response(soap, SOAP_OK);
}
#endif
void Monitor::ONVIF::SetNoteSet(Event::StringSet &noteSet) {
#ifdef WITH_GSOAP
std::unique_lock<std::mutex> lck(alarms_mutex);
if (alarms.empty()) return;
std::string note = "";
for (auto it = alarms.begin(); it != alarms.end(); ++it) {
note = it->first + "/" + it->second;
noteSet.insert(note);
}
#endif
return;
}

View File

@@ -247,6 +247,8 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
bool paused;
bool enabled;
bool forced;
int score;
int analysing;
} status_data;
status_data.id = monitor->Id();
@@ -278,6 +280,8 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
status_data.buffer_level = (MOD_ADD( (temp_write_index-temp_read_index), 0, temp_image_buffer_count )*100)/temp_image_buffer_count;
else
status_data.buffer_level = 0;
status_data.analysing = monitor->shared_data->analysing;
status_data.score = monitor->shared_data->last_frame_score;
}
status_data.delayed = delayed;
status_data.paused = paused;
@@ -285,7 +289,7 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
status_data.delay = FPSeconds(now - last_frame_sent).count();
status_data.zoom = zoom;
status_data.scale = scale;
Debug(2, "viewing fps: %.2f capture_fps: %.2f analysis_fps: %.2f Buffer Level:%d, Delayed:%d, Paused:%d, Rate:%d, delay:%.3f, Zoom:%d, Enabled:%d Forced:%d",
Debug(2, "viewing fps: %.2f capture_fps: %.2f analysis_fps: %.2f Buffer Level:%d, Delayed:%d, Paused:%d, Rate:%d, delay:%.3f, Zoom:%d, Enabled:%d Forced:%d score: %d",
status_data.fps,
status_data.capture_fps,
status_data.analysis_fps,
@@ -296,7 +300,8 @@ void MonitorStream::processCommand(const CmdMsg *msg) {
status_data.delay,
status_data.zoom,
status_data.enabled,
status_data.forced
status_data.forced,
status_data.score
);
DataMsg status_msg;
@@ -355,7 +360,7 @@ bool MonitorStream::sendFrame(const std::string &filepath, SystemTimePoint times
TimePoint::duration frame_send_time = send_end_time - send_start_time;
if (frame_send_time > Milliseconds(lround(Milliseconds::period::den / maxfps))) {
Info("Frame send time %" PRIi64 " ms too slow, throttling maxfps to %.2f",
Debug(1, "Frame send time %" PRIi64 " ms too slow, throttling maxfps to %.2f",
static_cast<int64>(std::chrono::duration_cast<Milliseconds>(frame_send_time).count()),
maxfps);
}
@@ -486,6 +491,10 @@ void MonitorStream::runStream() {
}
openComms();
std::thread command_processor;
if (connkey) {
command_processor = std::thread(&MonitorStream::checkCommandQueue, this);
}
if (type == STREAM_JPEG)
fputs("Content-Type: multipart/x-mixed-replace; boundary=" BOUNDARY "\r\n\r\n", stdout);
@@ -499,9 +508,6 @@ void MonitorStream::runStream() {
TimePoint stream_start_time = std::chrono::steady_clock::now();
when_to_send_next_frame = stream_start_time; // initialize it to now so that we spit out a frame immediately
frame_count = 0;
temp_image_buffer = nullptr;
temp_image_buffer_count = playback_buffer;
temp_read_index = temp_image_buffer_count;
temp_write_index = temp_image_buffer_count;
@@ -554,10 +560,6 @@ void MonitorStream::runStream() {
Debug(2, "Not using playback_buffer");
} // end if connkey && playback_buffer
std::thread command_processor;
if (connkey) {
command_processor = std::thread(&MonitorStream::checkCommandQueue, this);
}
while (!zm_terminate) {
if (feof(stdout)) {

View File

@@ -1,18 +1,20 @@
#ifdef MOSQUITTOPP_FOUND
#include "zm.h"
#include "zm_logger.h"
#include "zm_mqtt.h"
#include "zm_monitor.h"
#include "zm_mqtt.h"
#ifdef MOSQUITTOPP_FOUND
#include "zm_time.h"
#include <sstream>
#include <string.h>
MQTT::MQTT(Monitor *monitor) :
mosquittopp("ZoneMinder"),
monitor_(monitor),
connected_(false) {
connected_(false)
{
std::string name="ZoneMinder"+std::to_string(monitor->Id());
mosquittopp(name.c_str());
mosqpp::lib_init();
connect();
}
@@ -51,15 +53,15 @@ void MQTT::on_connect(int rc) {
}
void MQTT::on_message(const struct mosquitto_message *message) {
Debug(1, "MQTT: Have message %s: %s", message->topic, message->payload);
Debug(1, "MQTT: Have message %s: %s", message->topic, static_cast<const char *>(message->payload));
}
void MQTT::on_subscribe(int mid, int qos_count, const int *granted_qos) {
Debug(1, "MQTT: Subscribed to topic ");
}
void MQTT::on_publish() {
Debug(1, "MQTT: on_publish ");
void MQTT::on_publish(int mid) {
Debug(1, "MQTT: on_publish %d", mid);
}
void MQTT::send(const std::string &message) {

View File

@@ -31,7 +31,7 @@ class MQTT : public mosqpp::mosquittopp {
void on_connect(int rc);
void on_message(const struct mosquitto_message *message);
void on_subscribe(int mid, int qos_count, const int *granted_qos);
void on_publish();
void on_publish(int mid);
enum sensorTypes {
NUMERIC = 0,
DIGITAL

View File

@@ -114,8 +114,8 @@ bool PacketQueue::queuePacket(std::shared_ptr<ZMPacket> add_packet) {
while (rit != pktQueue.rend()) {
std::shared_ptr<ZMPacket> prev_packet = *rit;
if (prev_packet->packet->stream_index == add_packet->packet->stream_index) {
if (prev_packet->keyframe) break;
packet_count ++;
if (prev_packet->keyframe) break;
}
++rit;
}
@@ -174,38 +174,21 @@ bool PacketQueue::queuePacket(std::shared_ptr<ZMPacket> add_packet) {
Warning("Found locked packet when trying to free up video packets. This means that decoding is not keeping up.");
}
++it;
continue;
break;
}
for (
auto iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
auto iterator_it = *iterators_it;
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if (*(*iterator_it) == zm_packet) {
Debug(1, "Bumping IT because it is at the front that we are deleting");
++(*iterator_it);
if (zm_packet->packet->stream_index == video_stream_id and zm_packet->keyframe) {
for ( it = pktQueue.begin(); *it !=zm_packet; ) {
it = this->deletePacket(it);
}
} // end foreach iterator
zm_packet->decoded = true; // Have to in case analysis is waiting on it
zm_packet->notify_all();
it = pktQueue.erase(it);
packet_counts[zm_packet->packet->stream_index] -= 1;
Debug(1,
"Deleting a packet with stream index:%d image_index:%d with keyframe:%d, video frames in queue:%d max: %d, queuesize:%zu",
zm_packet->packet->stream_index,
zm_packet->image_index,
zm_packet->keyframe,
packet_counts[video_stream_id],
max_video_packet_count,
pktQueue.size());
if (zm_packet->packet->stream_index == video_stream_id)
break;
} else {
it ++;
//this->deletePacket(it);
//if (zm_packet->packet->stream_index == video_stream_id)
//break;
} // end if erasing a whole gop
} // end while
} else if (warned_count > 0) {
warned_count--;
@@ -218,6 +201,37 @@ bool PacketQueue::queuePacket(std::shared_ptr<ZMPacket> add_packet) {
return true;
} // end bool PacketQueue::queuePacket(ZMPacket* zm_packet)
packetqueue_iterator PacketQueue::deletePacket(packetqueue_iterator it) {
auto zm_packet = *it;
for (
auto iterators_it = iterators.begin();
iterators_it != iterators.end();
++iterators_it
) {
auto iterator_it = *iterators_it;
// Have to check each iterator and make sure it doesn't point to the packet we are about to delete
if (*(*iterator_it) == zm_packet) {
Debug(1, "Bumping IT because it is at the front that we are deleting");
++(*iterator_it);
} else {
Debug(1, "Not Bumping IT because it is pointing at %d and we are %d", (*(*iterator_it))->image_index, zm_packet->image_index);
}
} // end foreach iterator
zm_packet->decoded = true;
zm_packet->notify_all();
packet_counts[zm_packet->packet->stream_index] -= 1;
Debug(1,
"Deleting a packet with stream index:%d image_index:%d with keyframe:%d, video frames in queue:%d max: %d, queuesize:%zu",
zm_packet->packet->stream_index,
zm_packet->image_index,
zm_packet->keyframe,
packet_counts[video_stream_id],
max_video_packet_count,
pktQueue.size());
return pktQueue.erase(it);
}
void PacketQueue::clearPackets(const std::shared_ptr<ZMPacket> &add_packet) {
// Only do queueCleaning if we are adding a video keyframe, so that we guarantee that there is one.
// No good. Have to satisfy two conditions:
@@ -295,7 +309,7 @@ void PacketQueue::clearPackets(const std::shared_ptr<ZMPacket> &add_packet) {
return;
}
int keyframe_interval_count = 1;
int keyframe_interval_count = 0;
int video_packets_to_delete = 0; // This is a count of how many packets we will delete so we know when to stop looking
ZMLockedPacket *lp = new ZMLockedPacket(zm_packet);
@@ -428,6 +442,7 @@ void PacketQueue::clear() {
delete[] packet_counts;
packet_counts = nullptr;
max_stream_id = -1;
max_keyframe_interval_ = 0;
Debug(1, "Packetqueue is clear, notifying");
condition.notify_all();

View File

@@ -91,6 +91,8 @@ class PacketQueue {
void unlock(ZMLockedPacket *lp);
void notify_all();
void wait();
private:
packetqueue_iterator deletePacket(packetqueue_iterator it);
};
#endif /* ZM_PACKETQUEUE_H */

View File

@@ -209,6 +209,7 @@ class StreamBase {
memset(&rem_sock_path, 0, sizeof(rem_sock_path));
memset(&rem_addr, 0, sizeof(rem_addr));
memset(&sock_path_lock, 0, sizeof(sock_path_lock));
last_fps_update = std::chrono::steady_clock::now();
vid_stream = nullptr;
msg = { 0, { 0 } };

View File

@@ -26,6 +26,7 @@
extern "C" {
#include <libavutil/time.h>
#include <libavutil/display.h>
}
#include <string>
@@ -148,7 +149,12 @@ bool VideoStore::open() {
} else {
const AVDictionaryEntry *entry = av_dict_get(opts, "reorder_queue_size", nullptr, AV_DICT_MATCH_CASE);
if (entry) {
reorder_queue_size = std::stoul(entry->value);
if (monitor->GetOptVideoWriter() == Monitor::ENCODE) {
Debug(1, "reorder_queue_size ignored for non-passthrough");
} else {
reorder_queue_size = std::stoul(entry->value);
Debug(1, "reorder_queue_size set to %zu", reorder_queue_size);
}
// remove it to prevent complaining later.
av_dict_set(&opts, "reorder_queue_size", nullptr, AV_DICT_MATCH_CASE);
} else if (monitor->has_out_of_order_packets()
@@ -157,8 +163,8 @@ bool VideoStore::open() {
and monitor->GetOptVideoWriter() == Monitor::PASSTHROUGH
) {
reorder_queue_size = 2*monitor->get_max_keyframe_interval();
Debug(1, "reorder_queue_size set to %zu", reorder_queue_size);
}
Debug(1, "reorder_queue_size set to %zu", reorder_queue_size);
}
oc->metadata = pmetadata;
@@ -186,8 +192,35 @@ bool VideoStore::open() {
video_out_stream->avg_frame_rate = video_in_stream->avg_frame_rate;
// Only set orientation if doing passthrough, otherwise the frame image will be rotated
Monitor::Orientation orientation = monitor->getOrientation();
if (orientation) {
if (orientation > 1) { // 1 is ROTATE_0
#if LIBAVCODEC_VERSION_CHECK(59, 37, 100, 37, 100)
int32_t* displaymatrix = static_cast<int32_t*>(av_malloc(sizeof(int32_t)*9));
Debug(3, "Have orientation %d", orientation);
if (orientation == Monitor::ROTATE_0) {
} else if (orientation == Monitor::ROTATE_90) {
av_display_rotation_set(displaymatrix, 90);
} else if (orientation == Monitor::ROTATE_180) {
av_display_rotation_set(displaymatrix, 180);
} else if (orientation == Monitor::ROTATE_270) {
av_display_rotation_set(displaymatrix, 270);
} else {
Warning("Unsupported Orientation(%d)", orientation);
}
#endif
#if LIBAVCODEC_VERSION_CHECK(60, 31, 102, 31, 102)
av_packet_side_data_add(
&video_out_stream->codecpar->coded_side_data,
&video_out_stream->codecpar->nb_coded_side_data,
AV_PKT_DATA_DISPLAYMATRIX,
(int32_t *)displaymatrix, sizeof(int32_t)*9, 0);
#else
#if LIBAVCODEC_VERSION_CHECK(59, 37, 100, 37, 100)
av_stream_add_side_data(video_out_stream,
AV_PKT_DATA_DISPLAYMATRIX,
(uint8_t *)displaymatrix,
sizeof(*displaymatrix));
#endif
#endif
if (orientation == Monitor::ROTATE_0) {
} else if (orientation == Monitor::ROTATE_90) {
ret = av_dict_set(&video_out_stream->metadata, "rotate", "90", 0);
@@ -1329,8 +1362,8 @@ int VideoStore::writeAudioFramePacket(const std::shared_ptr<ZMPacket> zm_packet)
int64_t ts = static_cast<int64>(std::chrono::duration_cast<Microseconds>(zm_packet->timestamp.time_since_epoch()).count());
ipkt->pts = ipkt->dts = av_rescale_q(ts, AV_TIME_BASE_Q, audio_in_stream->time_base);
Debug(2, "dts from timestamp, set to (%" PRId64 ") secs(%.2f)",
ts, FPSeconds(zm_packet->timestamp.time_since_epoch()).count());
Debug(2, "dts %" PRId64 " from timestamp %" PRId64 " secs(%.2f)",
ipkt->dts, ts, FPSeconds(zm_packet->timestamp.time_since_epoch()).count());
}
if (audio_first_dts == AV_NOPTS_VALUE) {
@@ -1397,7 +1430,7 @@ int VideoStore::writeAudioFramePacket(const std::shared_ptr<ZMPacket> zm_packet)
opkt->dts = ipkt->dts;
}
ZM_DUMP_STREAM_PACKET(audio_in_stream, ipkt, "after pts adjustment");
ZM_DUMP_STREAM_PACKET(audio_in_stream, opkt, "after pts adjustment");
av_packet_rescale_ts(opkt.get(), audio_in_stream->time_base, audio_out_stream->time_base);
ZM_DUMP_STREAM_PACKET(audio_out_stream, opkt, "after stream pts adjustment");
write_packet(opkt.get(), audio_out_stream);

View File

@@ -238,20 +238,23 @@ int main(int argc, char *argv[]) {
result = 0;
for (const std::shared_ptr<Monitor> &monitor : monitors) {
monitor->LoadCamera();
if (!monitor->connect()) {
Warning("Couldn't connect to monitor %d", monitor->Id());
}
SystemTimePoint now = std::chrono::system_clock::now();
monitor->SetStartupTime(now);
std::string sql = stringtf(
"INSERT INTO Monitor_Status (MonitorId,Status,CaptureFPS,AnalysisFPS,CaptureBandwidth)"
" VALUES (%u, 'Running',0,0,0) ON DUPLICATE KEY UPDATE Status='Running',CaptureFPS=0,AnalysisFPS=0,CaptureBandwidth=0",
monitor->Id());
zmDbDo(sql);
monitor->LoadCamera();
while (!monitor->connect() and !zm_terminate) {
Warning("Couldn't connect to monitor %d", monitor->Id());
sleep(1);
}
if (zm_terminate) break;
SystemTimePoint now = std::chrono::system_clock::now();
monitor->SetStartupTime(now);
if (monitor->StartupDelay() > 0) {
Debug(1, "Doing startup sleep for %ds", monitor->StartupDelay());
std::this_thread::sleep_for(Seconds(monitor->StartupDelay()));
@@ -293,9 +296,11 @@ int main(int argc, char *argv[]) {
if (monitors[i]->Capturing() == Monitor::CAPTURING_ONDEMAND) {
SystemTimePoint now = std::chrono::system_clock::now();
monitors[i]->SetHeartbeatTime(now);
int64 since_last_view = static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()) - monitors[i]->getLastViewed();
if (since_last_view > 10 and monitors[i]->Ready()) {
time_t last_viewed = monitors[i]->getLastViewed();
int64 since_last_view = static_cast<int64>(std::chrono::duration_cast<Seconds>(now.time_since_epoch()).count()) - last_viewed;
Debug(1, "Last view %jd= %" PRId64 " seconds since last view", last_viewed, since_last_view);
if (((!last_viewed) or (since_last_view > 10)) and (monitors[i]->GetLastWriteIndex() != -1)) {
if (monitors[i]->getCamera()->isPrimed()) {
monitors[i]->Pause();
}
@@ -303,10 +308,12 @@ int main(int argc, char *argv[]) {
result = 0;
continue;
} else if (!monitors[i]->getCamera()->isPrimed()) {
if (1 > (result = monitors[i]->Play()))
if (1 > (result = monitors[i]->Play())) {
Debug(1, "Failed to play");
break;
}
}
}
} // end if ONDEMAND
if (monitors[i]->PreCapture() < 0) {
Error("Failed to pre-capture monitor %d %s (%zu/%zu)",

View File

@@ -118,7 +118,7 @@ commonprep () {
fi
fi
RTSPVER="eab32851421ffe54fec0229c3efc44c642bc8d46"
RTSPVER="055d81fe1293429e496b19104a9ed3360755a440"
if [ -e "build/RtspServer-${RTSPVER}.tar.gz" ]; then
echo "Found existing RtspServer ${RTSPVER} tarball..."
else

View File

@@ -1 +1 @@
1.37.63
1.37.65

View File

@@ -215,14 +215,11 @@ if ( canEdit('Events') ) {
ajaxResponse(array('response'=>$response));
break;
case 'removetag' :
$tagId = $_REQUEST['tid'];
$tagId = validCardinal($_REQUEST['tid']);
dbQuery('DELETE FROM Events_Tags WHERE TagId = ? AND EventId = ?', array($tagId, $_REQUEST['id']));
$sql = "SELECT * FROM Events_Tags WHERE TagId = $tagId";
$rowCount = dbNumRows($sql);
$rowCount = dbNumRows('SELECT * FROM Events_Tags WHERE TagId=?', [ $tagId ]);
if ($rowCount < 1) {
$sql = 'DELETE FROM Tags WHERE Id = ?';
$values = array($_REQUEST['tid']);
$response = dbNumRows($sql, $values);
$response = dbNumRows('DELETE FROM Tags WHERE Id=?', [$tagId]);
ajaxResponse(array('response'=>$response));
}
ajaxResponse();

View File

@@ -223,7 +223,7 @@ function queryRequest($filter, $search, $advsearch, $sort, $offset, $order, $lim
LEFT JOIN Events_Tags AS ET ON E.Id = ET.EventId
LEFT JOIN Tags AS T ON T.Id = ET.TagId
'.$where.'
GROUP BY E.Id
GROUP BY E.Id, Monitor
'.($sort?' ORDER BY '.$sort.' '.$order:'');
if ((int)($filter->limit()) and !$has_post_sql_conditions) {

View File

@@ -192,7 +192,7 @@ function queryRequest() {
$row['Server'] = $Server ? $Server->Name() : '';
// Strip out all characters that are not ASCII 32-126 (yes, 126)
$row['Message'] = preg_replace('/[^\x20-\x7E]/', '', $row['Message']);
$row['Message'] = preg_replace('/[^\x20-\x7E]/', '', htmlspecialchars($row['Message']));
$row['File'] = preg_replace('/[^\x20-\x7E]/', '', strip_tags($row['File']));
$rows[] = $row;
}

View File

@@ -42,8 +42,7 @@ function parentGrpSelect($newGroup) {
}
$kids = get_children($newGroup);
if ( $newGroup->Id() )
$kids[] = $newGroup->Id();
if ( $newGroup->Id() ) $kids[] = $newGroup->Id();
$sql = 'SELECT Id,Name FROM `Groups`'.(count($kids)?' WHERE Id NOT IN ('.implode(',',array_map(function(){return '?';}, $kids)).')' : '').' ORDER BY Name';
$options = array(''=>'None');
@@ -57,11 +56,11 @@ function parentGrpSelect($newGroup) {
function monitorList($newGroup) {
$result = '';
$monitors = dbFetchAll('SELECT Id,Name FROM Monitors ORDER BY Sequence ASC');
$monitors = dbFetchAll('SELECT Id,Name FROM Monitors WHERE Deleted=false ORDER BY Sequence ASC');
$monitorIds = $newGroup->MonitorIds();
foreach ( $monitors as $monitor ) {
if ( visibleMonitor($monitor['Id']) ) {
$result .= '<option value="' .validCardinal($monitor['Id']). '"' .( in_array( $monitor['Id'], $monitorIds ) ? ' selected="selected"' : ''). '>' .validHtmlStr($monitor['Name']). '</option>'.PHP_EOL;
$result .= '<option value="' .validCardinal($monitor['Id']). '"' .( in_array($monitor['Id'], $monitorIds, true) ? ' selected="selected"' : ''). '>' .validHtmlStr($monitor['Name']). '</option>'.PHP_EOL;
}
}
@@ -112,7 +111,7 @@ if ( !empty($_REQUEST['gid']) ) {
<tr>
<th class="text-right pr-3" scope="row"><?php echo translate('Monitor') ?></th>
<td>
<select name="newGroup[MonitorIds][]" class="chosen" multiple="multiple" data-on-change="configModalBtns">
<select name="newGroup[MonitorIds][]" id="newGroupMonitorIds" class="chosen" multiple="multiple" data-on-change="configModalBtns">
<?php echo monitorList($newGroup) ?>
</select>
</td>

View File

@@ -1,12 +1,19 @@
<?php
if (!canView('Control')) return;
if (!canView('Control')) {
return;
}
$mid = validCardinal($_REQUEST['mid']);
if (!$mid) return;
if (!$mid) {
ZM\Warning('Invalid mid '.$_REQUEST['mid']. ' != '.$mid);
return;
}
$monitor = ZM\Monitor::find_one(array('Id'=>$mid));
if (!$monitor) return;
if (!$monitor) {
ZM\Warning("Monitor $mid not found.");
return;
}
$zmuCommand = getZmuCommand(' -m '.escapeshellarg($mid).' -B -C -H -O');
$zmuOutput = exec( $zmuCommand );
if ($zmuOutput) {

View File

@@ -212,7 +212,7 @@ $statusData = array(
'EventId' => true,
'Type' => true,
'TimeStamp' => true,
'TimeStampShort' => array( 'sql' => 'date_format( StartDateTime, \''.MYSQL_FMT_DATETIME_SHORT.'\' )' ),
'TimeStampShort' => array( 'sql' => 'date_format( StartDateTime, \''.MYSQL_FMT_DATETIME_SHORT.'\' )' ),
'Delta' => true,
'Score' => true,
//'Image' => array( 'postFunc' => 'getFrameImage' ),
@@ -509,40 +509,29 @@ function getNearEvents() {
$filter_sql = $filter->sql();
# When listing, it may make sense to list them in descending order.
# But when viewing Prev should timewise earlier and Next should be after.
# But when viewing Prev should be timewise earlier and Next should be after.
if ( $sortColumn == 'E.Id' or $sortColumn == 'E.StartDateTime' ) {
$sortOrder = 'ASC';
}
$sql = '
SELECT
E.Id
AS Id,
E.StartDateTime
AS StartDateTime
FROM Events
AS E
INNER JOIN Monitors
AS M
ON E.MonitorId = M.Id
LEFT JOIN Events_Tags
AS ET
ON E.Id = ET.EventId
LEFT JOIN Tags
AS T
ON T.Id = ET.TagId
WHERE '.$sortColumn.'
SELECT E.Id AS Id, E.StartDateTime AS StartDateTime
FROM Events AS E
INNER JOIN Monitors AS M ON E.MonitorId = M.Id
LEFT JOIN Events_Tags AS ET ON E.Id = ET.EventId
LEFT JOIN Tags AS T ON T.Id = ET.TagId
WHERE E.Id != ? AND '.$sortColumn.'
'.($sortOrder=='ASC'?'<=':'>=').' \''.$event[$_REQUEST['sort_field']].'\'';
if ($filter->sql()) {
$sql .= ' AND ('.$filter->sql().')';
}
$sql .= ' AND E.Id<'.$event['Id'] . ' ORDER BY '.$sortColumn.' '.($sortOrder=='ASC'?'DESC':'ASC');
$sql .= ' AND E.StartDateTime <= ? ORDER BY '.$sortColumn.' '.($sortOrder=='ASC'?'DESC':'ASC');
if ( $sortColumn != 'E.Id' ) {
# When sorting by starttime, if we have two events with the same starttime (different monitors) then we should sort secondly by Id
$sql .= ', E.Id DESC';
}
$sql .= ' LIMIT 1';
$result = dbQuery($sql);
$result = dbQuery($sql, [$eventId, $event['StartDateTime']]);
if ( !$result ) {
ZM\Error('Failed to load previous event using '.$sql);
return $NearEvents;
@@ -551,34 +540,23 @@ function getNearEvents() {
$prevEvent = dbFetchNext($result);
$sql = '
SELECT
E.Id
AS Id,
E.StartDateTime
AS StartDateTime
FROM Events
AS E
INNER JOIN Monitors
AS M
ON E.MonitorId = M.Id
LEFT JOIN Events_Tags
AS ET
ON E.Id = ET.EventId
LEFT JOIN Tags
AS T
ON T.Id = ET.TagId
WHERE '.$sortColumn.'
SELECT E.Id AS Id, E.StartDateTime AS StartDateTime
FROM Events AS E
INNER JOIN Monitors AS M ON E.MonitorId = M.Id
LEFT JOIN Events_Tags AS ET ON E.Id = ET.EventId
LEFT JOIN Tags AS T ON T.Id = ET.TagId
WHERE E.Id != ? AND '.$sortColumn.'
'.($sortOrder=='ASC'?'>=':'<=').' \''.$event[$_REQUEST['sort_field']].'\'';
if ($filter->sql()) {
$sql .= ' AND ('.$filter->sql().')';
}
$sql .= ' AND E.Id>'.$event['Id'] . ' ORDER BY '.$sortColumn.' '.($sortOrder=='ASC'?'ASC':'DESC');
$sql .= ' AND E.StartDateTime >= ? ORDER BY '.$sortColumn.' '.($sortOrder=='ASC'?'ASC':'DESC');
if ( $sortColumn != 'E.Id' ) {
# When sorting by starttime, if we have two events with the same starttime (different monitors) then we should sort secondly by Id
$sql .= ', E.Id ASC';
}
$sql .= ' LIMIT 1';
$result = dbQuery($sql);
$result = dbQuery($sql, [$eventId, $event['StartDateTime']]);
if ( !$result ) {
ZM\Error('Failed to load next event using '.$sql);
return $NearEvents;

View File

@@ -132,18 +132,17 @@ if ( $numSockets === false ) {
}
}
switch ($nbytes = @socket_recvfrom($socket, $msg, MSG_DATA_SIZE, 0, $remSockFile)) {
$nbytes = @socket_recvfrom($socket, $msg, MSG_DATA_SIZE, 0, $remSockFile);
if ($semaphore) sem_release($semaphore);
switch ($nbytes) {
case -1 :
if ($semaphore) sem_release($semaphore);
ajaxError("socket_recvfrom( $remSockFile ) failed: ".socket_strerror(socket_last_error()));
break;
case 0 :
if ($semaphore) sem_release($semaphore);
ajaxError('No data to read from socket');
break;
default :
if ( $nbytes != MSG_DATA_SIZE ) {
sem_release($semaphore);
ajaxError("Got unexpected message size, got $nbytes, expected ".MSG_DATA_SIZE);
}
break;
@@ -152,7 +151,7 @@ default :
$data = unpack('ltype', $msg);
switch ( $data['type'] ) {
case MSG_DATA_WATCH :
$data = unpack('ltype/imonitor/istate/dfps/dcapturefps/danalysisfps/ilevel/irate/ddelay/izoom/iscale/Cdelayed/Cpaused/Cenabled/Cforced', $msg);
$data = unpack('ltype/imonitor/istate/dfps/dcapturefps/danalysisfps/ilevel/irate/ddelay/izoom/iscale/Cdelayed/Cpaused/Cenabled/Cforced/iscore/ianalysing', $msg);
$data['fps'] = round( $data['fps'], 2 );
$data['capturefps'] = round( $data['capturefps'], 2 );
$data['analysisfps'] = round( $data['analysisfps'], 2 );
@@ -172,19 +171,19 @@ case MSG_DATA_WATCH :
}
$data['auth_relay'] = get_auth_relay();
}
if ($semaphore) sem_release($semaphore);
ajaxResponse(array('status'=>$data));
break;
case MSG_DATA_EVENT :
if ( PHP_INT_SIZE===4 || version_compare( phpversion(), '5.6.0', '<') ) {
ZM\Debug('Using old unpack methods to handle 64bit event id');
$data = unpack('ltype/ieventlow/ieventhigh/dduration/dprogress/irate/izoom/iscale/Cpaused', $msg);
$data = unpack('ltype/ieventlow/ieventhigh/dduration/dprogress/dfps/irate/izoom/iscale/Cpaused', $msg);
$data['event'] = $data['eventhigh'] << 32 | $data['eventlow'];
} else {
$data = unpack('ltype/Qevent/dduration/dprogress/irate/izoom/iscale/Cpaused', $msg);
$data = unpack('ltype/Qevent/dduration/dprogress/dfps/irate/izoom/iscale/Cpaused', $msg);
}
$data['rate'] /= RATE_BASE;
$data['zoom'] = round($data['zoom']/SCALE_BASE, 1);
$data['fps'] = round( $data['fps'], 2 );
if ( ZM_OPT_USE_AUTH ) {
if (ZM_AUTH_RELAY == 'hashed') {
$auth_hash = generateAuthHash(ZM_AUTH_HASH_IPS);
@@ -195,15 +194,11 @@ case MSG_DATA_EVENT :
$data['auth_relay'] = get_auth_relay();
}
if ($semaphore) sem_release($semaphore);
ajaxResponse(array('status'=>$data));
break;
default :
if ($semaphore) sem_release($semaphore);
ajaxError('Unexpected received message type '.$data['type']);
}
if ($semaphore) sem_release($semaphore);
ajaxError('Unrecognised action or insufficient permissions in ajax/stream');
function ajaxCleanup() {

Some files were not shown because too many files have changed in this diff Show More