mirror of
https://github.com/kiwix/libkiwix.git
synced 2025-12-27 16:38:01 -05:00
Compare commits
423 Commits
bookFilter
...
13.0.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24faf84163 | ||
|
|
571c09e00a | ||
|
|
a959800173 | ||
|
|
b2196ee7a9 | ||
|
|
aea51c21ff | ||
|
|
95d627afa1 | ||
|
|
183bdcf2c0 | ||
|
|
e1cf16ddea | ||
|
|
a74df86fcf | ||
|
|
605c7f71e0 | ||
|
|
f58d4a93e1 | ||
|
|
00032adce2 | ||
|
|
f5e6502e04 | ||
|
|
37274f7882 | ||
|
|
07ff4eab43 | ||
|
|
e89f4e2ac7 | ||
|
|
bcbdce6a9a | ||
|
|
0effcdb23f | ||
|
|
5c8dd0e8d3 | ||
|
|
d2c031e047 | ||
|
|
733b027c2f | ||
|
|
e8b8c18297 | ||
|
|
29c33a7ad6 | ||
|
|
fd504c1166 | ||
|
|
0c05af658d | ||
|
|
0c0b1f5971 | ||
|
|
a65681d6f4 | ||
|
|
af27141320 | ||
|
|
d2bb3d198c | ||
|
|
a5db4a1fd5 | ||
|
|
59f0070ecc | ||
|
|
bd818d33af | ||
|
|
16fbf15938 | ||
|
|
8383265ac4 | ||
|
|
0eb9a06736 | ||
|
|
01aa190c38 | ||
|
|
da891699ac | ||
|
|
f9be9f98ce | ||
|
|
22b55d36c6 | ||
|
|
2d86927e17 | ||
|
|
86be66a2d8 | ||
|
|
4425cd2122 | ||
|
|
ab0d7b6e80 | ||
|
|
cfc91b0967 | ||
|
|
2650cdd7da | ||
|
|
efdb596561 | ||
|
|
177e1d5da6 | ||
|
|
b861dfc9dd | ||
|
|
3fdbb5a990 | ||
|
|
e49abc1df1 | ||
|
|
9166b67c47 | ||
|
|
1dc9705597 | ||
|
|
5292f06fff | ||
|
|
f8e7c3d476 | ||
|
|
ead1474ead | ||
|
|
1316dec37c | ||
|
|
a5557eeb25 | ||
|
|
efcbf6ef1e | ||
|
|
139b561253 | ||
|
|
c203e07ee9 | ||
|
|
49e99e7c22 | ||
|
|
e13324fbba | ||
|
|
c38ab3e5d7 | ||
|
|
bde737f63b | ||
|
|
cc6aa9b162 | ||
|
|
9063450b5a | ||
|
|
f8c3a1fd2e | ||
|
|
b5b98e7a61 | ||
|
|
e7e8275a31 | ||
|
|
c6456cac42 | ||
|
|
f0c0400485 | ||
|
|
ccbeb154a5 | ||
|
|
0e8a2952d5 | ||
|
|
fe5e6c451d | ||
|
|
3966e8544b | ||
|
|
09476ededb | ||
|
|
d47c4fa72f | ||
|
|
c938101c70 | ||
|
|
9c91fc7369 | ||
|
|
385931f229 | ||
|
|
8726de494c | ||
|
|
94d6bef402 | ||
|
|
a28c2973e9 | ||
|
|
7feb89c30e | ||
|
|
903dcd46d6 | ||
|
|
1be5424711 | ||
|
|
de517330f6 | ||
|
|
5c3a997de4 | ||
|
|
cb74c9c7c7 | ||
|
|
312cecf5f2 | ||
|
|
a4d207a03a | ||
|
|
7e36dd5ddb | ||
|
|
8ca809f8d9 | ||
|
|
fd22e34d58 | ||
|
|
3be1ddd8a9 | ||
|
|
e9d9d85427 | ||
|
|
9599a31d2f | ||
|
|
4d60b106a2 | ||
|
|
60cce602a3 | ||
|
|
abb81e7798 | ||
|
|
1808857173 | ||
|
|
556b94daae | ||
|
|
5f4dad60b9 | ||
|
|
820ffa8134 | ||
|
|
f6f7214c99 | ||
|
|
1f5a160d3d | ||
|
|
f41007989b | ||
|
|
f25d287afa | ||
|
|
550fc2fcf9 | ||
|
|
96fb65f560 | ||
|
|
93197f8175 | ||
|
|
144945cfe0 | ||
|
|
c1ad65d515 | ||
|
|
af2dfdccbc | ||
|
|
f2072d87a0 | ||
|
|
e9c3a7ff45 | ||
|
|
a715203d3e | ||
|
|
552717b9ce | ||
|
|
0ed805ae6b | ||
|
|
b45cfd767a | ||
|
|
df164aefe5 | ||
|
|
58890a3f97 | ||
|
|
2d58142c58 | ||
|
|
0afa5e569c | ||
|
|
ae605dc26d | ||
|
|
d8f02ac225 | ||
|
|
e4595f357d | ||
|
|
881c121142 | ||
|
|
2d51e1f0c6 | ||
|
|
b24f681c24 | ||
|
|
deb02d92e2 | ||
|
|
dc58e278c7 | ||
|
|
9994302312 | ||
|
|
8c190cf34f | ||
|
|
1273570e01 | ||
|
|
9bd2df2327 | ||
|
|
08834d6f17 | ||
|
|
47950f132e | ||
|
|
1a92d4a0b5 | ||
|
|
272dc142c5 | ||
|
|
bf1d207651 | ||
|
|
6f0e55d603 | ||
|
|
ebe16f92a5 | ||
|
|
4f6a5759aa | ||
|
|
d85eb1b747 | ||
|
|
41a1124585 | ||
|
|
98853a0708 | ||
|
|
95bde675ef | ||
|
|
fcde243117 | ||
|
|
9fd7f7da34 | ||
|
|
453f02cc85 | ||
|
|
a6659cbe96 | ||
|
|
e13fed8670 | ||
|
|
25f589ee73 | ||
|
|
208f0f5f69 | ||
|
|
951e15c665 | ||
|
|
cc35fe503f | ||
|
|
37aadb86fb | ||
|
|
f843ea48f0 | ||
|
|
a48e2e6f06 | ||
|
|
0f7e11bd86 | ||
|
|
dbded6eee2 | ||
|
|
c1d7cc37fd | ||
|
|
6071b98fb7 | ||
|
|
dca47d35f7 | ||
|
|
d8656ec149 | ||
|
|
f1873876b2 | ||
|
|
cb20317047 | ||
|
|
ae58f009fb | ||
|
|
d7a3a417e1 | ||
|
|
68c6c93945 | ||
|
|
4c256e97c7 | ||
|
|
7478217ad4 | ||
|
|
ea33a3b65e | ||
|
|
f4e8f688ad | ||
|
|
4c4969d95a | ||
|
|
676a5d11f5 | ||
|
|
6b57ad89b7 | ||
|
|
174deddf35 | ||
|
|
782a25bba8 | ||
|
|
24ed5491fd | ||
|
|
88de978a9c | ||
|
|
eb002ae306 | ||
|
|
2550306052 | ||
|
|
51fcb90dc0 | ||
|
|
b1ad319d52 | ||
|
|
12826a57bd | ||
|
|
5bda7fd45c | ||
|
|
30725136c8 | ||
|
|
571b6089a4 | ||
|
|
32b4bca745 | ||
|
|
f838314435 | ||
|
|
08d6376eed | ||
|
|
3cdc6c41c4 | ||
|
|
973ac28dcb | ||
|
|
a855b422c7 | ||
|
|
28673c1bb8 | ||
|
|
df4b16e485 | ||
|
|
936707f73b | ||
|
|
9e2a601d52 | ||
|
|
1d074cda40 | ||
|
|
5850e0d489 | ||
|
|
904615a51a | ||
|
|
763fb86ad0 | ||
|
|
fbf6d97f5e | ||
|
|
c85466995d | ||
|
|
514d6e6514 | ||
|
|
351bc87231 | ||
|
|
ac742e9da2 | ||
|
|
0581da44fe | ||
|
|
2825c4c63d | ||
|
|
fa7d044037 | ||
|
|
d42fa22450 | ||
|
|
7307a9a1b7 | ||
|
|
bf80367b5a | ||
|
|
a04646b7b2 | ||
|
|
cfe3f8e3d9 | ||
|
|
2d0cff2dc1 | ||
|
|
b24157ddf9 | ||
|
|
c57b5ba1ad | ||
|
|
fe646511d1 | ||
|
|
cc31846152 | ||
|
|
cb4938c5f8 | ||
|
|
b1055e814a | ||
|
|
13951c13df | ||
|
|
60fbe7f714 | ||
|
|
595817852d | ||
|
|
2e0124710a | ||
|
|
340fadd9be | ||
|
|
4bdc1d76c6 | ||
|
|
738c06ada6 | ||
|
|
93bb0f098b | ||
|
|
e8c8a297b5 | ||
|
|
f4f7879ff3 | ||
|
|
706108256b | ||
|
|
12f0614350 | ||
|
|
29519df906 | ||
|
|
6b8f9aa6ab | ||
|
|
e3a211e41c | ||
|
|
fa80be87be | ||
|
|
51206f4037 | ||
|
|
c2fffacbbd | ||
|
|
02f631fdb6 | ||
|
|
05a66ead6e | ||
|
|
97f0314fe6 | ||
|
|
a7fe4193e3 | ||
|
|
2c5e84b6b3 | ||
|
|
71a66e0528 | ||
|
|
a807ce27f1 | ||
|
|
58bb8b9843 | ||
|
|
2e9bec95b0 | ||
|
|
2f419996ab | ||
|
|
1ba588272c | ||
|
|
2c3b7409aa | ||
|
|
f239f2de18 | ||
|
|
18b7b5f277 | ||
|
|
0e612de4d1 | ||
|
|
52ae5c3a5f | ||
|
|
d1fe1b89ae | ||
|
|
1aa8521e15 | ||
|
|
95ebb6a492 | ||
|
|
a74aaa5b13 | ||
|
|
4bf4b66b27 | ||
|
|
57484fd63d | ||
|
|
3a40b6b6d7 | ||
|
|
2781da3221 | ||
|
|
4629673161 | ||
|
|
fe30438854 | ||
|
|
291fca2b17 | ||
|
|
6fd54c7e6e | ||
|
|
a9e4d8a0a1 | ||
|
|
f3c0d5d422 | ||
|
|
a620c8658b | ||
|
|
d59cfb1fa2 | ||
|
|
ca65dd9000 | ||
|
|
6c2f229d31 | ||
|
|
eba7e15358 | ||
|
|
e42719c9df | ||
|
|
2995a00cd0 | ||
|
|
9f34613473 | ||
|
|
430bcb17c2 | ||
|
|
37bf993759 | ||
|
|
886a92a795 | ||
|
|
2b01b8168f | ||
|
|
35aacf7a48 | ||
|
|
0e0044f840 | ||
|
|
76dfc03751 | ||
|
|
ca079a72cc | ||
|
|
471c5b89f4 | ||
|
|
3bf8211b70 | ||
|
|
ec81d5904d | ||
|
|
82dcba542a | ||
|
|
63e0d5c7c2 | ||
|
|
772243e832 | ||
|
|
bad13d76b4 | ||
|
|
0bde4d9412 | ||
|
|
239b108fa7 | ||
|
|
c5ccbd37e2 | ||
|
|
822fb3748a | ||
|
|
aa2e443eb8 | ||
|
|
82d477009d | ||
|
|
e49081da80 | ||
|
|
07c7d3931d | ||
|
|
cf59a93cf1 | ||
|
|
e35e7585e0 | ||
|
|
fcb97c3c06 | ||
|
|
0edee4d066 | ||
|
|
b9937e6859 | ||
|
|
59012c50b4 | ||
|
|
7a98878273 | ||
|
|
8eb527389e | ||
|
|
78b2c1a273 | ||
|
|
497c0700b5 | ||
|
|
bac12010aa | ||
|
|
dad33a850c | ||
|
|
0968fc98ee | ||
|
|
ff44d88f21 | ||
|
|
1e7baee9d7 | ||
|
|
d9342acf5b | ||
|
|
b3f1ab6579 | ||
|
|
f5c9b2404a | ||
|
|
8b1fe21e4e | ||
|
|
815c59ff6d | ||
|
|
90318dfb6b | ||
|
|
f3d2f474a7 | ||
|
|
12140098e6 | ||
|
|
c7d8081e9a | ||
|
|
a10067e6b6 | ||
|
|
28e9fb48b6 | ||
|
|
634f3fcf14 | ||
|
|
88597e1834 | ||
|
|
69b3e1f8a7 | ||
|
|
669d8898ac | ||
|
|
14f0f79061 | ||
|
|
600ff07986 | ||
|
|
1d74b5e311 | ||
|
|
c0fe6f4aee | ||
|
|
aa7053bbe8 | ||
|
|
99f24eb598 | ||
|
|
6790a144a1 | ||
|
|
cd3d2110d9 | ||
|
|
b404241d0b | ||
|
|
2d42d6dc60 | ||
|
|
e65c9c41d8 | ||
|
|
0ae31bd181 | ||
|
|
0d8971ef88 | ||
|
|
2812b5ca5c | ||
|
|
4dc8973cdc | ||
|
|
160c95e317 | ||
|
|
956289d9f8 | ||
|
|
3568ccd511 | ||
|
|
d66cc6286c | ||
|
|
7743e73ede | ||
|
|
4966f4155d | ||
|
|
c727de6591 | ||
|
|
0f0ae1cfed | ||
|
|
da78aae62b | ||
|
|
abcd4ade99 | ||
|
|
7a9780eb90 | ||
|
|
51bd881211 | ||
|
|
f36f1661d5 | ||
|
|
18f4a58237 | ||
|
|
6285599b7c | ||
|
|
764f68f7d8 | ||
|
|
777c5e1f7a | ||
|
|
8031ffa447 | ||
|
|
0c8ceac117 | ||
|
|
ec31882e94 | ||
|
|
8cec014691 | ||
|
|
bf9aeffbfa | ||
|
|
7765769e6f | ||
|
|
7d69ece27d | ||
|
|
c0d027e8a4 | ||
|
|
c87add1419 | ||
|
|
a52138e5ba | ||
|
|
d1b85192c0 | ||
|
|
cb02dbd92a | ||
|
|
9409e8bd91 | ||
|
|
cd62b5dd91 | ||
|
|
414d7ae4fe | ||
|
|
9d2cc35447 | ||
|
|
7167ca1e6a | ||
|
|
8cc1c47133 | ||
|
|
e5b94fa1bb | ||
|
|
b0d719431d | ||
|
|
0e20f50443 | ||
|
|
18a18c17a9 | ||
|
|
602c20f160 | ||
|
|
415ec41099 | ||
|
|
b9f60ecfe9 | ||
|
|
12a638750e | ||
|
|
b62486c2f9 | ||
|
|
6bc7e0178d | ||
|
|
ce8b2bf9d9 | ||
|
|
9fd1423100 | ||
|
|
6b8d6232f0 | ||
|
|
c91df1cb26 | ||
|
|
b249edee60 | ||
|
|
a31ccb6588 | ||
|
|
43c8da9b04 | ||
|
|
190156e095 | ||
|
|
5471819021 | ||
|
|
7feef320d9 | ||
|
|
73191fb8f8 | ||
|
|
a844bc4000 | ||
|
|
f13ca55ef6 | ||
|
|
dc194683bb | ||
|
|
0841472004 | ||
|
|
ebb713cb85 | ||
|
|
cd6cbe3655 | ||
|
|
582c8d868a | ||
|
|
f6ae75e41d | ||
|
|
ffbda34b75 | ||
|
|
f61fc07121 | ||
|
|
de7fa771fc | ||
|
|
24c1ca5a4a | ||
|
|
15f5abad3c | ||
|
|
0a866fa914 | ||
|
|
ff192cba49 | ||
|
|
0dd638f261 | ||
|
|
229c0ceaf9 | ||
|
|
70f7be4202 | ||
|
|
60148717e1 |
27
.github/move.yml
vendored
27
.github/move.yml
vendored
@@ -1,27 +0,0 @@
|
||||
# Configuration for Move Issues - https://github.com/dessant/move-issues
|
||||
|
||||
# Delete the command comment when it contains no other content
|
||||
deleteCommand: true
|
||||
|
||||
# Close the source issue after moving
|
||||
closeSourceIssue: true
|
||||
|
||||
# Lock the source issue after moving
|
||||
lockSourceIssue: false
|
||||
|
||||
# Mention issue and comment authors
|
||||
mentionAuthors: true
|
||||
|
||||
# Preserve mentions in the issue content
|
||||
keepContentMentions: true
|
||||
|
||||
# Move labels that also exist on the target repository
|
||||
moveLabels: true
|
||||
|
||||
# Set custom aliases for targets
|
||||
# aliases:
|
||||
# r: repo
|
||||
# or: owner/repo
|
||||
|
||||
# Repository to extend settings from
|
||||
# _extends: repo
|
||||
94
.github/workflows/ci.yml
vendored
94
.github/workflows/ci.yml
vendored
@@ -3,46 +3,45 @@ name: CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
Macos:
|
||||
runs-on: macos-latest
|
||||
macOS:
|
||||
runs-on: macos-13
|
||||
env:
|
||||
HOME: /Users/runner
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup python 3.10
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Retrieve source code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
brew update
|
||||
brew install gcovr pkg-config ninja
|
||||
- name: Install python modules
|
||||
run: pip3 install meson==0.49.2 pytest
|
||||
- name: Install deps
|
||||
shell: bash
|
||||
brew unlink python3
|
||||
# upgrade from python@3.11.2_1 to python@3.11.3 fails to overwrite those
|
||||
rm -f /usr/local/bin/2to3 /usr/local/bin/2to3-3.11 /usr/local/bin/idle3 /usr/local/bin/idle3.11 /usr/local/bin/pydoc3 /usr/local/bin/pydoc3.11 /usr/local/bin/python3 /usr/local/bin/python3-config /usr/local/bin/python3.11 /usr/local/bin/python3.11-config
|
||||
brew install pkg-config ninja meson
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
ARCHIVE_NAME: deps2_macos_native_dyn_libkiwix.tar.xz
|
||||
run: |
|
||||
ARCHIVE_NAME=deps2_osx_native_dyn_libkiwix.tar.xz
|
||||
wget -O- http://tmp.kiwix.org/ci/${ARCHIVE_NAME} | tar -xJ -C $HOME
|
||||
- name: Compile
|
||||
shell: bash
|
||||
wget -O- https://tmp.kiwix.org/ci/${{env.ARCHIVE_NAME}} | tar -xJ -C ${{env.HOME}}
|
||||
|
||||
- name: Compile source code
|
||||
env:
|
||||
PKG_CONFIG_PATH: ${{env.HOME}}/BUILD_native_dyn/INSTALL/lib/pkgconfig
|
||||
CPPFLAGS: -I${{env.HOME}}/BUILD_native_dyn/INSTALL/include
|
||||
run: |
|
||||
export PKG_CONFIG_PATH=$HOME/BUILD_native_dyn/INSTALL/lib/pkgconfig
|
||||
export CPPFLAGS="-I$HOME/BUILD_native_dyn/INSTALL/include"
|
||||
meson . build --default-library=shared -Db_coverage=true
|
||||
cd build
|
||||
ninja
|
||||
- name: Test
|
||||
shell: bash
|
||||
run: |
|
||||
export LD_LIBRARY_PATH=$HOME/BUILD_native_dyn/INSTALL/lib:$HOME/BUILD_native_dyn/INSTALL/lib64
|
||||
cd build
|
||||
meson test --verbose
|
||||
ninja -C build
|
||||
|
||||
- name: Test libkiwix
|
||||
env:
|
||||
SKIP_BIG_MEMORY_TEST: 1
|
||||
LD_LIBRARY_PATH: ${{env.HOME}}/BUILD_native_dyn/INSTALL/lib:${{env.HOME}}/BUILD_native_dyn/INSTALL/lib64
|
||||
run: meson test -C build --verbose
|
||||
|
||||
Linux:
|
||||
strategy:
|
||||
@@ -58,19 +57,19 @@ jobs:
|
||||
include:
|
||||
- name: native_static
|
||||
target: native_static
|
||||
image_variant: bionic
|
||||
image_variant: focal
|
||||
lib_postfix: '/x86_64-linux-gnu'
|
||||
- name: native_dyn
|
||||
target: native_dyn
|
||||
image_variant: bionic
|
||||
image_variant: focal
|
||||
lib_postfix: '/x86_64-linux-gnu'
|
||||
- name: android_arm
|
||||
target: android_arm
|
||||
image_variant: bionic
|
||||
image_variant: focal
|
||||
lib_postfix: '/arm-linux-androideabi'
|
||||
- name: android_arm64
|
||||
target: android_arm64
|
||||
image_variant: bionic
|
||||
image_variant: focal
|
||||
lib_postfix: '/aarch64-linux-android'
|
||||
- name: win32_static
|
||||
target: win32_static
|
||||
@@ -82,22 +81,12 @@ jobs:
|
||||
lib_postfix: '64'
|
||||
env:
|
||||
HOME: /home/runner
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: "kiwix/kiwix-build_ci:${{matrix.image_variant}}-31"
|
||||
image: "ghcr.io/kiwix/kiwix-build_ci_${{matrix.image_variant}}:38"
|
||||
steps:
|
||||
- name: Checkout code
|
||||
shell: python
|
||||
run: |
|
||||
from subprocess import check_call
|
||||
from os import environ
|
||||
command = [
|
||||
'git', 'clone',
|
||||
'https://github.com/${{github.repository}}',
|
||||
'--depth=1',
|
||||
'--branch', '${{ github.head_ref || github.ref_name }}'
|
||||
]
|
||||
check_call(command, cwd=environ['HOME'])
|
||||
uses: actions/checkout@v3
|
||||
- name: Install deps
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -120,7 +109,6 @@ jobs:
|
||||
if [[ "${{matrix.target}}" =~ android_.* ]]; then
|
||||
MESON_OPTION="$MESON_OPTION -Dstatic-linkage=true"
|
||||
fi
|
||||
cd $HOME/libkiwix
|
||||
meson . build ${MESON_OPTION}
|
||||
cd build
|
||||
ninja
|
||||
@@ -131,19 +119,15 @@ jobs:
|
||||
if: startsWith(matrix.target, 'native_')
|
||||
shell: bash
|
||||
run: |
|
||||
cd $HOME/libkiwix/build
|
||||
cd build
|
||||
meson test --verbose
|
||||
ninja coverage
|
||||
env:
|
||||
LD_LIBRARY_PATH: "/home/runner/BUILD_${{matrix.target}}/INSTALL/lib:/home/runner/BUILD_${{matrix.target}}/INSTALL/lib${{matrix.lib_postfix}}"
|
||||
SKIP_BIG_MEMORY_TEST: 1
|
||||
|
||||
- name: Publish coverage
|
||||
shell: bash
|
||||
run: |
|
||||
cd $HOME/libkiwix
|
||||
curl https://codecov.io/bash -o codecov.sh
|
||||
bash codecov.sh -n "${OS_NAME}_${{matrix.target}}" -Z
|
||||
rm codecov.sh
|
||||
if: startsWith(matrix.target, 'native_')
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
49
.github/workflows/package.yml
vendored
49
.github/workflows/package.yml
vendored
@@ -1,19 +1,25 @@
|
||||
name: Packages
|
||||
on: [push, pull_request]
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build-deb:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
distro:
|
||||
- debian-unstable
|
||||
- ubuntu-jammy
|
||||
- ubuntu-impish
|
||||
- ubuntu-focal
|
||||
- ubuntu-bionic
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Determine which PPA we should upload to
|
||||
- name: PPA
|
||||
@@ -34,6 +40,13 @@ jobs:
|
||||
email: release+launchpad@kiwix.org
|
||||
distro: ${{ matrix.distro }}
|
||||
|
||||
- uses: legoktm/gh-action-build-deb@debian-unstable
|
||||
if: matrix.distro == 'debian-unstable'
|
||||
name: Build package for debian-unstable
|
||||
id: build-debian-unstable
|
||||
with:
|
||||
args: --no-sign
|
||||
|
||||
- uses: legoktm/gh-action-build-deb@ubuntu-jammy
|
||||
if: matrix.distro == 'ubuntu-jammy'
|
||||
name: Build package for ubuntu-jammy
|
||||
@@ -42,14 +55,6 @@ jobs:
|
||||
args: --no-sign
|
||||
ppa: ${{ steps.ppa.outputs.ppa }}
|
||||
|
||||
- uses: legoktm/gh-action-build-deb@ubuntu-impish
|
||||
if: matrix.distro == 'ubuntu-impish'
|
||||
name: Build package for ubuntu-impish
|
||||
id: build-ubuntu-impish
|
||||
with:
|
||||
args: --no-sign
|
||||
ppa: ${{ steps.ppa.outputs.ppa }}
|
||||
|
||||
- uses: legoktm/gh-action-build-deb@ubuntu-focal
|
||||
if: matrix.distro == 'ubuntu-focal'
|
||||
name: Build package for ubuntu-focal
|
||||
@@ -58,23 +63,15 @@ jobs:
|
||||
args: --no-sign
|
||||
ppa: ${{ steps.ppa.outputs.ppa }}
|
||||
|
||||
- uses: legoktm/gh-action-build-deb@ubuntu-bionic
|
||||
if: matrix.distro == 'ubuntu-bionic'
|
||||
name: Build package for ubuntu-bionic
|
||||
id: build-ubuntu-bionic
|
||||
with:
|
||||
args: --no-sign
|
||||
ppa: ${{ steps.ppa.outputs.ppa }}
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Packages for ${{ matrix.distro }}
|
||||
path: output
|
||||
|
||||
- uses: legoktm/gh-action-dput@master
|
||||
name: Upload dev package
|
||||
# Only upload on pushes to master
|
||||
if: github.event_name == 'push' && github.event.ref == 'refs/heads/master' && startswith(matrix.distro, 'ubuntu-')
|
||||
# Only upload on pushes to main
|
||||
if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' && startswith(matrix.distro, 'ubuntu-')
|
||||
with:
|
||||
gpg_key: ${{ secrets.LAUNCHPAD_GPG }}
|
||||
repository: ppa:kiwixteam/dev
|
||||
@@ -82,10 +79,8 @@ jobs:
|
||||
|
||||
- uses: legoktm/gh-action-dput@master
|
||||
name: Upload release package
|
||||
# Only upload on pushes to master or tag
|
||||
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') && startswith(matrix.distro, 'ubuntu-')
|
||||
if: github.event_name == 'release' && startswith(matrix.distro, 'ubuntu-')
|
||||
with:
|
||||
gpg_key: ${{ secrets.LAUNCHPAD_GPG }}
|
||||
repository: ppa:kiwixteam/release
|
||||
packages: output/*_source.changes
|
||||
|
||||
|
||||
21
.readthedocs.yaml
Normal file
21
.readthedocs.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the version of Python and other tools you might need
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# We recommend specifying your dependencies to enable reproducible builds:
|
||||
# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
99
ChangeLog
99
ChangeLog
@@ -1,3 +1,100 @@
|
||||
libkiwix 13.0.0
|
||||
===============
|
||||
|
||||
* Server:
|
||||
- Improved look & feel of kiwix-serve UI (@veloman-yunkan #917 #1021)
|
||||
- Increase tolerance to malformed (control characters) ZIM entry titles (@veloman-yunkan #1023)
|
||||
- API allowing to filter many categories at once (@juuz0 #974)
|
||||
- Cookie-less user language control (@veloman-yumkan #997)
|
||||
- Hack to fix Mirrorbrain based broken magnet URLs (@rgaudin #1001)
|
||||
* Fix handling of books with 'Name' metadata with dots (@mgautier #1016)
|
||||
* New method beautifyFileSize() to provide nice-looking book sizes (@vuuz0 #971)
|
||||
* Fix a few missing includes (@mgautierfr #978)
|
||||
* New functions to read - kiwix-serve - languages and categories streams (@juuz0 #967)
|
||||
* Add support of Fon language (@kelson42 #1013)
|
||||
* C++17 code base compliancy (@mgautierfr #996)
|
||||
* Use everywhere std::shared_ptr in place of raw pointer (@mgautierfr #991)
|
||||
* Do not use [[nodiscard]] attribute on compiler not supporting it (@mgautierfr #1003)
|
||||
* Add a non minified version of autoComplete.js (@mgautierfr #1008)
|
||||
* Multiple CI/CD improvements (@kelson42 #982)
|
||||
|
||||
libkiwix 12.1.0
|
||||
===============
|
||||
|
||||
* Server:
|
||||
- Introduce a `/nojs` endpoint to browse catalog and zim files with a browser without js (@juuz0 #897)
|
||||
- Translate the viewer (@veloman-yunkan #871 #846)
|
||||
- Display `mul` on tile when zim is multi-languages (@juuz0 #934)
|
||||
- Suggestion links point to the `/content` endpoint (@veloman-yunkan #862)
|
||||
- Correctly compress web fonts in http answers (@kelson42 #856)
|
||||
- Correctly encode link in suggestions (@veloman-yunkan #859 #860 #963)
|
||||
- Correctly encode url redirection (@veloman-yunkan #866 #890)
|
||||
- Properly handle user language, through cookies and http headers (@veloman-yunkan #849 #869)
|
||||
- Fix url encoding (@veloman-yunkan #870)
|
||||
- Fix viewer for viewer for SeaMonkey (@veloman-yunkan #887)
|
||||
- Make the downloader threadsafe (@mgautierfr #886)
|
||||
- Add RSS feed in the main page (pointing to the catalog) (@juuz0 #882 #920)
|
||||
- Correctly set the mimetype for json and ico (@veloman-yunkan #892)
|
||||
- `count=-1` correspond to unlimited count (instead of 0) (@veloman-yunkan #894)
|
||||
- Keep the navigation bar on top (@juuz0 #896)
|
||||
- Make the viewer's iframe "safe" (@veloman-yunkan #906 #930)
|
||||
- Correctly escape search link in XML Opds output (@veloman-yunkan #936)
|
||||
- Store values needed for the viewer js in the url fragment instead of the query string (@juuz0 #907)
|
||||
- Get rid of legacy OPDS API usage in the viewer (@veloman-yunkan #939)
|
||||
- Fix charset encoding declaration in OPDS response MIME types (@veloman-yunkan #942)
|
||||
- Fix PDF in the viewer (@veloman-yunkan #940)
|
||||
- Fix external links handling in the viewer (@veloman-yunkan #959)
|
||||
- Add tests of searching with accents (@mgautierfs #954)
|
||||
* Fix handling of missing illustration in the book (@veloman-yunkan #961)
|
||||
* Add support for multi languages zim files (@veloman-yunkan #904)
|
||||
* Fix includes for openbsd (@bentley #949)
|
||||
* Fix pathes in git to allow git clone on Windows (@adamlamar #868)
|
||||
* Switch to `main` as principal branch (instead of `master`) (@kelson42)
|
||||
* Remove libkiwix android publisher from the repository (@kelson42 #884)
|
||||
* Various fixes of meson and CI. (@mgautierfr @kelson42)
|
||||
|
||||
|
||||
|
||||
libkiwix 12.0.0
|
||||
===============
|
||||
|
||||
* [API Break] Remove wrapper around libzim (@mgautierfr #789)
|
||||
* Allow kiwix-serve to use custom resource files (@veloman-yunkan #779)
|
||||
* Properly handle searchProtocolPrefix when rendering search result (@veloman-yunkan #823)
|
||||
* Prevent search on multi language content (@veloman-yunkan #838)
|
||||
* Use new `zim::Archive::getMediaCount` from libzim (@mgautierfr #836)
|
||||
* Catalog:
|
||||
- Include tags in free text catalog search (@veloman-yunkan #802)
|
||||
- Illustration's url is based on book's uuid (@veloman-yunkan #804)
|
||||
- Cleanup of the opds-dumper (@veloman-yunkan #829)
|
||||
- Allow filtering of catalog content using multiple languages (@veloman-yunkan #841)
|
||||
- Make opds-dumper respect the namemapper (@mgautierfr #837)
|
||||
* Server:
|
||||
- Correctly handle `\` in suggestion json generation (@veloman-yunkan #843)
|
||||
- Better http caching (@veloman-yunkan #833)
|
||||
- Make `/suggest` endpoint thread-safe (@veloman-yunkan #834)
|
||||
- Better redirection of main page (@veloman-yunkan #827)
|
||||
- Remove jquery (@mgautierfr @juuz0 #796)
|
||||
- Better Viewer of zim content :
|
||||
. Introduce `/content` endpoints (@veloman-yunkan #806)
|
||||
. Switch to iframe based content viewer (@veloman-yunkan #716)
|
||||
- Optimised design of the welcome page:
|
||||
. Alignement (@juuz0 @kelson42 #786)
|
||||
. Exit download modal on pressing escape key (@juzz0 #800)
|
||||
. Add favicon for different devices (@juzz0 #805)
|
||||
. Fix auto hidding of the toolbar (@veloman-yunkan #821)
|
||||
. Allow user to filter books by tags in the front page (@juuz0 #711)
|
||||
* CI :
|
||||
- Trigger CI on pull_request (@kelson42 #791)
|
||||
- Drop Ubuntu Impish packaging (@legoktm #825)
|
||||
- Add Ubuntu Kinetic packaging (@legoktm #801)
|
||||
* Testing:
|
||||
- Test ICULanguageInfo (@veloman-yunkan #795)
|
||||
- Introduce fake `test` language to test i18n (@veloman-yunkan #848)
|
||||
* Fix documentation (@kelson42 #816)
|
||||
* Udpate translation (#787 #839 #847)
|
||||
|
||||
|
||||
libkiwix 11.0.0
|
||||
===============
|
||||
|
||||
@@ -5,7 +102,7 @@ libkiwix 11.0.0
|
||||
* [server] Use gzip compression instead of deflat (mgautierfr #757)
|
||||
* [server] Version the static resources. This allow better invalidating
|
||||
browser cache when resources are changed (@veloman-yunkan #712)
|
||||
* [server|front] Use integer to query the host for page length (@juuz #772)
|
||||
* [server|front] Use integer to query the host for page length (@juuz0 #772)
|
||||
* [server] Improve multizim search API:
|
||||
- Improvement of the cache system
|
||||
- Better API to select on which books to search in.
|
||||
|
||||
14
README.md
14
README.md
@@ -7,10 +7,10 @@ GNU/Linux, macOS, Android, iOS, ...).
|
||||
|
||||
[](https://download.kiwix.org/release/libkiwix/)
|
||||
[](https://github.com/kiwix/libkiwix/wiki/Repology)
|
||||
[](https://github.com/kiwix/libkiwix/actions?query=branch%3Amaster)
|
||||
[](https://github.com/kiwix/libkiwix/actions?query=branch%3Amain)
|
||||
[](https://libkiwix.readthedocs.org/en/latest/?badge=latest)
|
||||
[](https://www.codefactor.io/repository/github/kiwix/libkiwix)
|
||||
[](https://codecov.io/gh/kiwix/libkiwix)
|
||||
[](https://codecov.io/gh/kiwix/libkiwix)
|
||||
[](https://www.gnu.org/licenses/gpl-3.0)
|
||||
|
||||
Disclaimer
|
||||
@@ -24,9 +24,9 @@ with the Libkiwix compilation itself, we recommend to have a look to
|
||||
Preamble
|
||||
--------
|
||||
|
||||
Although the Libkiwix can be (cross-)compiled on/for many sytems, the
|
||||
Although the Libkiwix can be (cross-)compiled on/for many systems, the
|
||||
following documentation explains how to do it on POSIX ones. It is
|
||||
primarly thought for GNU/Linux systems and has been tested on recent
|
||||
primarily thought for GNU/Linux systems and has been tested on recent
|
||||
releases of Ubuntu and Fedora.
|
||||
|
||||
Dependencies
|
||||
@@ -54,7 +54,7 @@ The following dependency needs to be available at runtime:
|
||||
These dependencies may or may not be packaged by your operating
|
||||
system. They may also be packaged but only in an older version. The
|
||||
compilation script will tell you if one of them is missing or too old.
|
||||
In the worse case, you will have to download and compile bleeding edge
|
||||
In the worst case, you will have to download and compile bleeding edge
|
||||
version by hand.
|
||||
|
||||
If you want to install these dependencies locally, then use the
|
||||
@@ -190,7 +190,7 @@ To use JS provided by kiwix-serve you can use the following template to start wi
|
||||
- To get books listed using `index.js` add - `<div class="book__list"></div>` under body tag.
|
||||
- To get number of books listed add - `<h3 class="kiwixHomeBody__results"></h3>` under body tag.
|
||||
- To add language select box add - `<select id="languageFilter"></select>` under body tag.
|
||||
- To add language select box add - `<select id="categoryFilter"></select>` under body tag.
|
||||
- To add category select box add - `<select id="categoryFilter"></select>` under body tag.
|
||||
- To add search box for books use following form -
|
||||
```
|
||||
<form id='kiwixSearchForm'>
|
||||
@@ -201,7 +201,7 @@ To use JS provided by kiwix-serve you can use the following template to start wi
|
||||
|
||||
|
||||
If you compile manually Libmicrohttpd, you might need to compile it
|
||||
without GNU TLS, a bug here will empeach further compilation
|
||||
without GNU TLS, a bug here will impeach further compilation
|
||||
otherwise.
|
||||
|
||||
If the compilation still fails, you might need to get a more recent
|
||||
|
||||
13
android-kiwix-lib-publisher/.gitignore
vendored
13
android-kiwix-lib-publisher/.gitignore
vendored
@@ -1,13 +0,0 @@
|
||||
*.iml
|
||||
.gradle
|
||||
/local.properties
|
||||
/.idea/caches
|
||||
/.idea/libraries
|
||||
/.idea/modules.xml
|
||||
/.idea/workspace.xml
|
||||
/.idea/navEditor.xml
|
||||
/.idea/assetWizardSettings.xml
|
||||
.DS_Store
|
||||
/build
|
||||
/captures
|
||||
.externalNativeBuild
|
||||
@@ -1,25 +0,0 @@
|
||||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.4.1'
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
|
||||
task clean(type: Delete) {
|
||||
delete rootProject.buildDir
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
# Project-wide Gradle settings.
|
||||
# IDE (e.g. Android Studio) users:
|
||||
# Gradle settings configured through the IDE *will override*
|
||||
# any settings specified in this file.
|
||||
# For more details on how to configure your build environment visit
|
||||
# http://www.gradle.org/docs/current/userguide/build_environment.html
|
||||
# Specifies the JVM arguments used for the daemon process.
|
||||
# The setting is particularly useful for tweaking memory settings.
|
||||
org.gradle.jvmargs=-Xmx1536m
|
||||
# When configured, Gradle will run in incubating parallel mode.
|
||||
# This option should only be used with decoupled projects. More details, visit
|
||||
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
|
||||
# org.gradle.parallel=true
|
||||
# Kotlin code style for this project: "official" or "obsolete":
|
||||
kotlin.code.style=official
|
||||
Binary file not shown.
@@ -1,6 +0,0 @@
|
||||
#Wed Jun 19 15:28:39 BST 2019
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
|
||||
172
android-kiwix-lib-publisher/gradlew
vendored
172
android-kiwix-lib-publisher/gradlew
vendored
@@ -1,172 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=$(save "$@")
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" "$@"
|
||||
84
android-kiwix-lib-publisher/gradlew.bat
vendored
84
android-kiwix-lib-publisher/gradlew.bat
vendored
@@ -1,84 +0,0 @@
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
||||
@@ -1 +0,0 @@
|
||||
/build
|
||||
@@ -1,64 +0,0 @@
|
||||
apply plugin: 'com.android.library'
|
||||
apply plugin: 'maven'
|
||||
|
||||
android {
|
||||
compileSdkVersion 28
|
||||
defaultConfig {
|
||||
minSdkVersion 15
|
||||
targetSdkVersion 28
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
}
|
||||
buildTypes {
|
||||
release {
|
||||
minifyEnabled false
|
||||
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'com.getkeepsafe.relinker:relinker:1.3.1'
|
||||
}
|
||||
|
||||
task writePom {
|
||||
pom {
|
||||
project {
|
||||
groupId 'org.kiwix.kiwixlib'
|
||||
artifactId 'kiwixlib'
|
||||
version '10.1.1' + (System.env.KIWIXLIB_BUILDVERSION == null ? '' : '-'+System.env.KIWIXLIB_BUILDVERSION)
|
||||
packaging 'aar'
|
||||
name 'kiwixlib'
|
||||
url 'https://github.com/kiwix/libkiwix'
|
||||
licenses {
|
||||
license {
|
||||
name 'GPLv3'
|
||||
url 'https://www.gnu.org/licenses/gpl-3.0.en.html'
|
||||
}
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id 'kiwix'
|
||||
name 'kiwix'
|
||||
email 'contact@kiwix.org'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'https://github.com/kiwix/libkiwix.git'
|
||||
developerConnection 'https://github.com/kiwix/libkiwix.git'
|
||||
url 'https://github.com/kiwix/libkiwix'
|
||||
}
|
||||
}
|
||||
}.withXml {
|
||||
def dependenciesNode = asNode().appendNode('dependencies')
|
||||
|
||||
//Iterate over the implementation dependencies, adding a <dependency> node for each
|
||||
configurations.implementation.allDependencies.each {
|
||||
def dependencyNode = dependenciesNode.appendNode('dependency')
|
||||
dependencyNode.appendNode('groupId', it.group)
|
||||
dependencyNode.appendNode('artifactId', it.name)
|
||||
dependencyNode.appendNode('version', it.version)
|
||||
}
|
||||
}.writeTo("$buildDir/pom.xml")
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# Add project specific ProGuard rules here.
|
||||
# You can control the set of applied configuration files using the
|
||||
# proguardFiles setting in build.gradle.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
||||
|
||||
# Uncomment this to preserve the line number information for
|
||||
# debugging stack traces.
|
||||
#-keepattributes SourceFile,LineNumberTable
|
||||
|
||||
# If you keep the line number information, uncomment this to
|
||||
# hide the original source file name.
|
||||
#-renamesourcefileattribute SourceFile
|
||||
@@ -1,10 +0,0 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="org.kiwix.kiwixlib">
|
||||
|
||||
<application
|
||||
android:allowBackup="true"
|
||||
android:supportsRtl="true">
|
||||
|
||||
</application>
|
||||
|
||||
</manifest>
|
||||
@@ -1 +0,0 @@
|
||||
include ':kiwixLibAndroid'
|
||||
@@ -24,8 +24,6 @@ author = 'libkiwix-team'
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
@@ -42,9 +40,7 @@ templates_path = ['_templates']
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
|
||||
if not on_rtd:
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
breathe
|
||||
exhale
|
||||
sphinx_rtd_theme
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
files=(
|
||||
"include/library.h"
|
||||
"include/common/stringTools.h"
|
||||
"include/common/pathTools.h"
|
||||
"include/common/otherTools.h"
|
||||
"include/common/regexTools.h"
|
||||
"include/common/networkTools.h"
|
||||
"include/common/archiveTools.h"
|
||||
"include/manager.h"
|
||||
"include/reader.h"
|
||||
"include/kiwix.h"
|
||||
"include/xapianSearcher.h"
|
||||
"include/searcher.h"
|
||||
"src/library.cpp"
|
||||
"src/android/kiwix.cpp"
|
||||
"src/android/org/kiwix/kiwixlib/JNIKiwixBool.java"
|
||||
"src/android/org/kiwix/kiwixlib/JNIKiwix.java"
|
||||
"src/android/org/kiwix/kiwixlib/JNIKiwixString.java"
|
||||
"src/android/org/kiwix/kiwixlib/JNIKiwixInt.java"
|
||||
"src/searcher.cpp"
|
||||
"src/common/pathTools.cpp"
|
||||
"src/common/regexTools.cpp"
|
||||
"src/common/otherTools.cpp"
|
||||
"src/common/archiveTools.cpp"
|
||||
"src/common/networkTools.cpp"
|
||||
"src/common/stringTools.cpp"
|
||||
"src/xapianSearcher.cpp"
|
||||
"src/manager.cpp"
|
||||
"src/reader.cpp"
|
||||
)
|
||||
|
||||
for i in "${files[@]}"
|
||||
do
|
||||
echo $i
|
||||
clang-format -i -style=file $i
|
||||
done
|
||||
@@ -79,7 +79,9 @@ class Book
|
||||
bool isPathValid() const { return m_pathValid; }
|
||||
const std::string& getTitle() const { return m_title; }
|
||||
const std::string& getDescription() const { return m_description; }
|
||||
const std::string& getLanguage() const { return m_language; }
|
||||
DEPRECATED const std::string& getLanguage() const { return m_language; }
|
||||
const std::string& getCommaSeparatedLanguages() const { return m_language; }
|
||||
const std::vector<std::string> getLanguages() const;
|
||||
const std::string& getCreator() const { return m_creator; }
|
||||
const std::string& getPublisher() const { return m_publisher; }
|
||||
const std::string& getDate() const { return m_date; }
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <stdexcept>
|
||||
#include <mutex>
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
@@ -43,6 +44,14 @@ class AriaError : public std::runtime_error {
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* A representation of a current download.
|
||||
*
|
||||
* `Download` is not thread safe. User must care to not call method on a
|
||||
* same download from different threads.
|
||||
* However, it is safe to use different `Download`s from different threads.
|
||||
*/
|
||||
|
||||
class Download {
|
||||
public:
|
||||
typedef enum { K_ACTIVE, K_WAITING, K_PAUSED, K_ERROR, K_COMPLETE, K_REMOVED, K_UNKNOWN } StatusResult;
|
||||
@@ -53,19 +62,89 @@ class Download {
|
||||
: mp_aria(p_aria),
|
||||
m_status(K_UNKNOWN),
|
||||
m_did(did) {};
|
||||
void updateStatus(bool follow=false);
|
||||
|
||||
/**
|
||||
* Update the status of the download.
|
||||
*
|
||||
* This call make an aria rpc call and is blocking.
|
||||
* Some download (started with a metalink) are in fact several downloads.
|
||||
* - A first one to download the metadlink.
|
||||
* - A second one to download the real file.
|
||||
*
|
||||
* If `follow` is true, updateStatus tries to detect that and tracks
|
||||
* the second download when the first one is finished.
|
||||
* By passing false to `follow`, `Download` will only track the first download.
|
||||
*
|
||||
* `getFoo` methods are based on the last statusUpdate.
|
||||
*
|
||||
* @param follow: Do we have to follow following downloads.
|
||||
*/
|
||||
void updateStatus(bool follow);
|
||||
|
||||
/**
|
||||
* Pause the download (and call updateStatus)
|
||||
*/
|
||||
void pauseDownload();
|
||||
|
||||
/**
|
||||
* Resume the download (and call updateStatus)
|
||||
*/
|
||||
void resumeDownload();
|
||||
|
||||
/**
|
||||
* Cancel the download.
|
||||
*
|
||||
* A canceled downlod cannot be resume and updateStatus does nothing.
|
||||
* However, you can still get information based on the last known information.
|
||||
*/
|
||||
void cancelDownload();
|
||||
StatusResult getStatus() { return m_status; }
|
||||
std::string getDid() { return m_did; }
|
||||
std::string getFollowedBy() { return m_followedBy; }
|
||||
uint64_t getTotalLength() { return m_totalLength; }
|
||||
uint64_t getCompletedLength() { return m_completedLength; }
|
||||
uint64_t getDownloadSpeed() { return m_downloadSpeed; }
|
||||
uint64_t getVerifiedLength() { return m_verifiedLength; }
|
||||
std::string getPath() { return m_path; }
|
||||
std::vector<std::string>& getUris() { return m_uris; }
|
||||
|
||||
/*
|
||||
* Get the status of the download.
|
||||
*/
|
||||
StatusResult getStatus() const { return m_status; }
|
||||
|
||||
/*
|
||||
* Get the id of the download.
|
||||
*/
|
||||
const std::string& getDid() const { return m_did; }
|
||||
|
||||
/*
|
||||
* Get the id of the "second" download.
|
||||
*
|
||||
* Set only if the "first" download is a metalink and is complete.
|
||||
*/
|
||||
const std::string& getFollowedBy() const { return m_followedBy; }
|
||||
|
||||
/*
|
||||
* Get the total length of the download.
|
||||
*/
|
||||
uint64_t getTotalLength() const { return m_totalLength; }
|
||||
|
||||
/*
|
||||
* Get the completed length of the download.
|
||||
*/
|
||||
uint64_t getCompletedLength() const { return m_completedLength; }
|
||||
|
||||
/*
|
||||
* Get the download speed of the download.
|
||||
*/
|
||||
uint64_t getDownloadSpeed() const { return m_downloadSpeed; }
|
||||
|
||||
/*
|
||||
* Get the verified length of the download.
|
||||
*/
|
||||
uint64_t getVerifiedLength() const { return m_verifiedLength; }
|
||||
|
||||
/*
|
||||
* Get the path (local file) of the download.
|
||||
*/
|
||||
const std::string& getPath() const { return m_path; }
|
||||
|
||||
/*
|
||||
* Get the download uris of the download.
|
||||
*/
|
||||
const std::vector<std::string>& getUris() const { return m_uris; }
|
||||
|
||||
protected:
|
||||
std::shared_ptr<Aria2> mp_aria;
|
||||
@@ -83,6 +162,9 @@ class Download {
|
||||
/**
|
||||
* A tool to download things.
|
||||
*
|
||||
* A Downloader manages `Download` using aria2 in the background.
|
||||
* `Downloader` is threadsafe.
|
||||
* However, the returned `Download`s are NOT threadsafe.
|
||||
*/
|
||||
class Downloader
|
||||
{
|
||||
@@ -92,14 +174,41 @@ class Downloader
|
||||
|
||||
void close();
|
||||
|
||||
Download* startDownload(const std::string& uri, const std::vector<std::pair<std::string, std::string>>& options = {});
|
||||
Download* getDownload(const std::string& did);
|
||||
/**
|
||||
* Start a new download.
|
||||
*
|
||||
* This method is thread safe and return a pointer to a newly created `Download`.
|
||||
* User should call `update` on the returned `Download` to have an accurate status.
|
||||
*
|
||||
* @param uri: The uri of the thing to download.
|
||||
* @param options: A series of pair <option_name, option_value> to pass to aria.
|
||||
* @return: The newly created Download.
|
||||
*/
|
||||
std::shared_ptr<Download> startDownload(const std::string& uri, const std::vector<std::pair<std::string, std::string>>& options = {});
|
||||
|
||||
size_t getNbDownload() { return m_knownDownloads.size(); }
|
||||
std::vector<std::string> getDownloadIds();
|
||||
/**
|
||||
* Get a download corrsponding to a download id (did)
|
||||
* User should call `update` on the returned `Download` to have an accurate status.
|
||||
*
|
||||
* @param did: The download id to search for.
|
||||
* @return: The Download corresponding to did.
|
||||
* @throw: Throw std::out_of_range if did is not found.
|
||||
*/
|
||||
std::shared_ptr<Download> getDownload(const std::string& did);
|
||||
|
||||
/**
|
||||
* Get the number of downloads currently managed.
|
||||
*/
|
||||
size_t getNbDownload() const;
|
||||
|
||||
/**
|
||||
* Get the ids of the managed downloads.
|
||||
*/
|
||||
std::vector<std::string> getDownloadIds() const;
|
||||
|
||||
private:
|
||||
std::map<std::string, std::unique_ptr<Download>> m_knownDownloads;
|
||||
mutable std::mutex m_lock;
|
||||
std::map<std::string, std::shared_ptr<Download>> m_knownDownloads;
|
||||
std::shared_ptr<Aria2> mp_aria;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -34,6 +34,10 @@
|
||||
|
||||
#define KIWIX_LIBRARY_VERSION "20110515"
|
||||
|
||||
namespace Xapian {
|
||||
class WritableDatabase;
|
||||
};
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
@@ -105,13 +109,29 @@ class Filter {
|
||||
Filter& acceptTags(const Tags& tags);
|
||||
Filter& rejectTags(const Tags& tags);
|
||||
|
||||
/**
|
||||
* Set the filter to only accept books in the specified category.
|
||||
*
|
||||
* Multiple categories can be specified as a comma-separated list (in
|
||||
* which case a book in any of those categories will match).
|
||||
*/
|
||||
Filter& category(std::string category);
|
||||
|
||||
/**
|
||||
* Set the filter to only accept books in the specified language.
|
||||
*
|
||||
* Multiple languages can be specified as a comma-separated list (in
|
||||
* which case a book in any of those languages will match).
|
||||
*/
|
||||
Filter& lang(std::string lang);
|
||||
|
||||
Filter& publisher(std::string publisher);
|
||||
Filter& creator(std::string creator);
|
||||
Filter& maxSize(size_t size);
|
||||
Filter& query(std::string query, bool partial=true);
|
||||
Filter& name(std::string name);
|
||||
Filter& clearLang();
|
||||
Filter& clearCategory();
|
||||
|
||||
bool hasQuery() const;
|
||||
const std::string& getQuery() const { return _query; }
|
||||
@@ -157,31 +177,53 @@ class ZimSearcher : public zim::Searcher
|
||||
std::mutex m_mutex;
|
||||
};
|
||||
|
||||
template<typename, typename>
|
||||
class ConcurrentCache;
|
||||
|
||||
template<typename, typename>
|
||||
class MultiKeyCache;
|
||||
|
||||
using LibraryPtr = std::shared_ptr<Library>;
|
||||
using ConstLibraryPtr = std::shared_ptr<const Library>;
|
||||
|
||||
|
||||
// Some compiler we use don't have [[nodiscard]] attribute.
|
||||
// We don't want to declare `create` with it in this case.
|
||||
#define LIBKIWIX_NODISCARD
|
||||
#if defined __has_cpp_attribute
|
||||
#if __has_cpp_attribute (nodiscard)
|
||||
#undef LIBKIWIX_NODISCARD
|
||||
#define LIBKIWIX_NODISCARD [[nodiscard]]
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/**
|
||||
* A Library store several books.
|
||||
*/
|
||||
class Library
|
||||
class Library: public std::enable_shared_from_this<Library>
|
||||
{
|
||||
// all data fields must be added in LibraryBase
|
||||
mutable std::mutex m_mutex;
|
||||
|
||||
public:
|
||||
typedef uint64_t Revision;
|
||||
typedef std::vector<std::string> BookIdCollection;
|
||||
typedef std::map<std::string, int> AttributeCounts;
|
||||
typedef std::set<std::string> BookIdSet;
|
||||
|
||||
public:
|
||||
private:
|
||||
Library();
|
||||
|
||||
public:
|
||||
LIBKIWIX_NODISCARD static LibraryPtr create() {
|
||||
return LibraryPtr(new Library());
|
||||
}
|
||||
~Library();
|
||||
|
||||
/**
|
||||
* Library is not a copiable object. However it can be moved.
|
||||
*/
|
||||
Library(const Library& ) = delete;
|
||||
Library(Library&& );
|
||||
Library(Library&& ) = delete;
|
||||
void operator=(const Library& ) = delete;
|
||||
Library& operator=(Library&& );
|
||||
Library& operator=(Library&& ) = delete;
|
||||
|
||||
/**
|
||||
* Add a book to the library.
|
||||
@@ -332,8 +374,8 @@ class Library
|
||||
/**
|
||||
* Return the current revision of the library.
|
||||
*
|
||||
* The revision of the library is updated (incremented by one) only by
|
||||
* the addBook() operation.
|
||||
* The revision of the library is updated (incremented by one) by
|
||||
* the addBook() and removeBookById() operations.
|
||||
*
|
||||
* @return Current revision of the library.
|
||||
*/
|
||||
@@ -352,19 +394,36 @@ class Library
|
||||
|
||||
private: // types
|
||||
typedef const std::string& (Book::*BookStrPropMemFn)() const;
|
||||
struct Impl;
|
||||
struct Entry : Book
|
||||
{
|
||||
Library::Revision lastUpdatedRevision = 0;
|
||||
};
|
||||
|
||||
private: // functions
|
||||
AttributeCounts getBookAttributeCounts(BookStrPropMemFn p) const;
|
||||
std::vector<std::string> getBookPropValueSet(BookStrPropMemFn p) const;
|
||||
BookIdCollection filterViaBookDB(const Filter& filter) const;
|
||||
unsigned int getBookCount_not_protected(const bool localBooks, const bool remoteBooks) const;
|
||||
void updateBookDB(const Book& book);
|
||||
void dropCache(const std::string& bookId);
|
||||
|
||||
private: //data
|
||||
std::unique_ptr<Impl> mp_impl;
|
||||
mutable std::mutex m_mutex;
|
||||
Library::Revision m_revision;
|
||||
std::map<std::string, Entry> m_books;
|
||||
using ArchiveCache = ConcurrentCache<std::string, std::shared_ptr<zim::Archive>>;
|
||||
std::unique_ptr<ArchiveCache> mp_archiveCache;
|
||||
using SearcherCache = MultiKeyCache<std::string, std::shared_ptr<ZimSearcher>>;
|
||||
std::unique_ptr<SearcherCache> mp_searcherCache;
|
||||
std::vector<kiwix::Bookmark> m_bookmarks;
|
||||
std::unique_ptr<Xapian::WritableDatabase> m_bookDB;
|
||||
};
|
||||
|
||||
// We don't need it anymore and we don't want to polute any other potential usage
|
||||
// of `LIBKIWIX_NODISCARD` token.
|
||||
#undef LIBKIWIX_NODISCARD
|
||||
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@@ -37,10 +37,10 @@ namespace kiwix
|
||||
class LibraryManipulator
|
||||
{
|
||||
public: // functions
|
||||
explicit LibraryManipulator(Library* library);
|
||||
explicit LibraryManipulator(LibraryPtr library);
|
||||
virtual ~LibraryManipulator();
|
||||
|
||||
Library& getLibrary() const { return library; }
|
||||
LibraryPtr getLibrary() const { return library; }
|
||||
|
||||
bool addBookToLibrary(const Book& book);
|
||||
void addBookmarkToLibrary(const Bookmark& bookmark);
|
||||
@@ -52,7 +52,7 @@ class LibraryManipulator
|
||||
virtual void booksWereRemovedFromLibrary();
|
||||
|
||||
private: // data
|
||||
kiwix::Library& library;
|
||||
LibraryPtr library;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -64,8 +64,8 @@ class Manager
|
||||
typedef std::vector<std::string> Paths;
|
||||
|
||||
public: // functions
|
||||
explicit Manager(LibraryManipulator* manipulator);
|
||||
explicit Manager(Library* library);
|
||||
explicit Manager(LibraryManipulator manipulator);
|
||||
explicit Manager(LibraryPtr library);
|
||||
|
||||
/**
|
||||
* Read a `library.xml` and add book in the file to the library.
|
||||
@@ -163,7 +163,7 @@ class Manager
|
||||
uint64_t m_itemsPerPage = 0;
|
||||
|
||||
protected:
|
||||
std::shared_ptr<kiwix::LibraryManipulator> manipulator;
|
||||
kiwix::LibraryManipulator manipulator;
|
||||
|
||||
bool readBookFromPath(const std::string& path, Book* book);
|
||||
bool parseXmlDom(const pugi::xml_document& doc,
|
||||
|
||||
@@ -4,8 +4,6 @@ headers = [
|
||||
'common.h',
|
||||
'library.h',
|
||||
'manager.h',
|
||||
'libxml_dumper.h',
|
||||
'opds_dumper.h',
|
||||
'downloader.h',
|
||||
'search_renderer.h',
|
||||
'server.h',
|
||||
|
||||
@@ -59,7 +59,7 @@ class HumanReadableNameMapper : public NameMapper {
|
||||
class UpdatableNameMapper : public NameMapper {
|
||||
typedef std::shared_ptr<NameMapper> NameMapperHandle;
|
||||
public:
|
||||
UpdatableNameMapper(Library& library, bool withAlias);
|
||||
UpdatableNameMapper(std::shared_ptr<Library> library, bool withAlias);
|
||||
|
||||
virtual std::string getNameForId(const std::string& id) const;
|
||||
virtual std::string getIdForName(const std::string& name) const;
|
||||
@@ -71,7 +71,7 @@ class UpdatableNameMapper : public NameMapper {
|
||||
|
||||
private:
|
||||
mutable std::mutex mutex;
|
||||
Library& library;
|
||||
std::shared_ptr<Library> library;
|
||||
NameMapperHandle nameMapper;
|
||||
const bool withAlias;
|
||||
};
|
||||
|
||||
@@ -37,29 +37,11 @@ class SearchRenderer
|
||||
/**
|
||||
* Construct a SearchRenderer from a SearchResultSet.
|
||||
*
|
||||
* The constructed version of the SearchRenderer will not introduce
|
||||
* the book name for each result. It is better to use the other constructor
|
||||
* with a Library pointer to have a better html page.
|
||||
*
|
||||
* @param srs The `SearchResultSet` to render.
|
||||
* @param mapper The `NameMapper` to use to do the rendering.
|
||||
* @param start The start offset used for the srs.
|
||||
* @param estimatedResultCount The estimatedResultCount of the whole search
|
||||
*/
|
||||
SearchRenderer(zim::SearchResultSet srs, NameMapper* mapper,
|
||||
unsigned int start, unsigned int estimatedResultCount);
|
||||
|
||||
/**
|
||||
* Construct a SearchRenderer from a SearchResultSet.
|
||||
*
|
||||
* @param srs The `SearchResultSet` to render.
|
||||
* @param mapper The `NameMapper` to use to do the rendering.
|
||||
* @param library The `Library` to use to look up book details for search results.
|
||||
* @param start The start offset used for the srs.
|
||||
* @param estimatedResultCount The estimatedResultCount of the whole search
|
||||
*/
|
||||
SearchRenderer(zim::SearchResultSet srs, NameMapper* mapper, Library* library,
|
||||
unsigned int start, unsigned int estimatedResultCount);
|
||||
SearchRenderer(zim::SearchResultSet srs, unsigned int start, unsigned int estimatedResultCount);
|
||||
|
||||
~SearchRenderer();
|
||||
|
||||
@@ -90,24 +72,32 @@ class SearchRenderer
|
||||
this->pageLength = pageLength;
|
||||
}
|
||||
|
||||
std::string renderTemplate(const std::string& tmpl_str);
|
||||
|
||||
/**
|
||||
* Generate the html page with the resutls of the search.
|
||||
*
|
||||
* @param mapper The `NameMapper` to use to do the rendering.
|
||||
* @param library The `Library` to use to look up book details for search results.
|
||||
May be nullptr. In this case, bookName is not set in the rendered string.
|
||||
* @return The html string
|
||||
*/
|
||||
std::string getHtml();
|
||||
std::string getHtml(const NameMapper& mapper, const Library* library);
|
||||
|
||||
/**
|
||||
/**
|
||||
* Generate the xml page with the resutls of the search.
|
||||
*
|
||||
* @param mapper The `NameMapper` to use to do the rendering.
|
||||
* @param library The `Library` to use to look up book details for search results.
|
||||
May be nullptr. In this case, bookName is not set in the rendered string.
|
||||
* @return The xml string
|
||||
*/
|
||||
std::string getXml();
|
||||
std::string getXml(const NameMapper& mapper, const Library* library);
|
||||
|
||||
protected: // function
|
||||
std::string renderTemplate(const std::string& tmpl_str, const NameMapper& mapper, const Library *library);
|
||||
|
||||
protected:
|
||||
std::string beautifyInteger(const unsigned int number);
|
||||
zim::SearchResultSet m_srs;
|
||||
NameMapper* mp_nameMapper;
|
||||
Library* mp_library;
|
||||
std::string searchBookQuery;
|
||||
std::string searchPattern;
|
||||
std::string protocolPrefix;
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace kiwix
|
||||
*
|
||||
* @param library The library to serve.
|
||||
*/
|
||||
Server(Library* library, NameMapper* nameMapper=nullptr);
|
||||
Server(std::shared_ptr<Library> library, std::shared_ptr<NameMapper> nameMapper=nullptr);
|
||||
|
||||
virtual ~Server();
|
||||
|
||||
@@ -66,8 +66,8 @@ namespace kiwix
|
||||
std::string getAddress();
|
||||
|
||||
protected:
|
||||
Library* mp_library;
|
||||
NameMapper* mp_nameMapper;
|
||||
std::shared_ptr<Library> mp_library;
|
||||
std::shared_ptr<NameMapper> mp_nameMapper;
|
||||
std::string m_root = "";
|
||||
std::string m_addr = "";
|
||||
std::string m_indexTemplateString = "";
|
||||
|
||||
@@ -23,8 +23,12 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <cstdint>
|
||||
|
||||
namespace kiwix {
|
||||
typedef std::pair<std::string, std::string> LangNameCodePair;
|
||||
typedef std::vector<LangNameCodePair> FeedLanguages;
|
||||
typedef std::vector<std::string> FeedCategories;
|
||||
|
||||
/**
|
||||
* Return the current directory.
|
||||
@@ -216,5 +220,37 @@ std::map<std::string, std::string> getNetworkInterfaces();
|
||||
*/
|
||||
std::string getBestPublicIp();
|
||||
|
||||
/** Converts file size to human readable format.
|
||||
*
|
||||
* This function will convert a number to its equivalent size using units.
|
||||
*
|
||||
* @param number file size in bytes.
|
||||
* @return a human-readable string representation of the size, e.g., "2.3 KB", "1.8 MB", "5.2 GB".
|
||||
*/
|
||||
std::string beautifyFileSize(uint64_t number);
|
||||
|
||||
/**
|
||||
* Load languages stored in an OPDS stream.
|
||||
*
|
||||
* @param content the OPDS stream.
|
||||
* @return vector containing pairs of language code and their corresponding full language name.
|
||||
*/
|
||||
FeedLanguages readLanguagesFromFeed(const std::string& content);
|
||||
|
||||
/**
|
||||
* Load categories stored in an OPDS stream .
|
||||
*
|
||||
* @param content the OPDS stream.
|
||||
* @return vector containing category strings.
|
||||
*/
|
||||
FeedCategories readCategoriesFromFeed(const std::string& content);
|
||||
|
||||
/**
|
||||
* Retrieve the full language name associated with a given ISO 639-3 language code.
|
||||
*
|
||||
* @param lang ISO 639-3 language code.
|
||||
* @return full language name.
|
||||
*/
|
||||
std::string getLanguageSelfName(const std::string& lang);
|
||||
}
|
||||
#endif // KIWIX_TOOLS_H
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
project('libkiwix', 'cpp',
|
||||
version : '11.0.0',
|
||||
version : '13.0.0',
|
||||
license : 'GPLv3+',
|
||||
default_options : ['c_std=c11', 'cpp_std=c++11', 'werror=true'])
|
||||
default_options : ['c_std=c11', 'cpp_std=c++17', 'werror=true'])
|
||||
|
||||
compiler = meson.get_compiler('cpp')
|
||||
|
||||
@@ -35,8 +35,8 @@ else
|
||||
error('Cannot found header mustache.hpp')
|
||||
endif
|
||||
|
||||
libzim_dep = dependency('libzim', version : '>=7.2.0', static:static_deps)
|
||||
if not compiler.has_header_symbol('zim/zim.h', 'LIBZIM_WITH_XAPIAN')
|
||||
libzim_dep = dependency('libzim', version : '>=8.1.0', static:static_deps)
|
||||
if not compiler.has_header_symbol('zim/zim.h', 'LIBZIM_WITH_XAPIAN', dependencies: libzim_dep)
|
||||
error('Libzim seems to be compiled without xapian. Xapian support is mandatory.')
|
||||
endif
|
||||
|
||||
|
||||
14
scripts/format_code.sh
Executable file
14
scripts/format_code.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
# Compute 'src' path
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
REPO_DIR=$(readlink -f "$SCRIPT_DIR"/..)
|
||||
DIRS="src include"
|
||||
|
||||
# Apply formating to all *.cpp and *.h files
|
||||
cd "$REPO_DIR"
|
||||
for FILE in $(find $DIRS -name '*.h' -o -name '*.cpp')
|
||||
do
|
||||
echo $FILE
|
||||
clang-format -i -style=file "$FILE"
|
||||
done
|
||||
@@ -52,15 +52,21 @@ resource_getter_template = """
|
||||
return RESOURCE::{identifier};
|
||||
"""
|
||||
|
||||
resource_cacheid_getter_template = """
|
||||
if (name == "{common_name}")
|
||||
return "{cacheid}";
|
||||
"""
|
||||
|
||||
resource_decl_template = """{namespaces_open}
|
||||
extern const std::string {identifier};
|
||||
{namespaces_close}"""
|
||||
|
||||
class Resource:
|
||||
def __init__(self, base_dirs, filename):
|
||||
filename = filename.strip()
|
||||
def __init__(self, base_dirs, filename, cacheid=None):
|
||||
filename = filename
|
||||
self.filename = filename
|
||||
self.identifier = full_identifier(filename)
|
||||
self.cacheid = cacheid
|
||||
found = False
|
||||
for base_dir in base_dirs:
|
||||
try:
|
||||
@@ -71,7 +77,7 @@ class Resource:
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
if not found:
|
||||
raise Exception("Impossible to found {}".format(filename))
|
||||
raise Exception("Resource not found: {}".format(filename))
|
||||
|
||||
def dump_impl(self):
|
||||
nb_row = len(self.data)//16 + (1 if len(self.data) % 16 else 0)
|
||||
@@ -93,6 +99,12 @@ class Resource:
|
||||
identifier="::".join(self.identifier)
|
||||
)
|
||||
|
||||
def dump_cacheid_getter(self):
|
||||
return resource_cacheid_getter_template.format(
|
||||
common_name=self.filename,
|
||||
cacheid=self.cacheid
|
||||
)
|
||||
|
||||
def dump_decl(self):
|
||||
return resource_decl_template.format(
|
||||
namespaces_open=" ".join("namespace {} {{".format(id) for id in self.identifier[:-1]),
|
||||
@@ -123,7 +135,12 @@ static std::string init_resource(const char* name, const unsigned char* content,
|
||||
|
||||
const std::string& getResource_{basename}(const std::string& name) {{
|
||||
{RESOURCES_GETTER}
|
||||
throw ResourceNotFound("Resource not found.");
|
||||
throw ResourceNotFound("Resource not found: " + name);
|
||||
}}
|
||||
|
||||
const char* getResourceCacheId_{basename}(const std::string& name) {{
|
||||
{RESOURCE_CACHEID_GETTER}
|
||||
return nullptr;
|
||||
}}
|
||||
|
||||
{RESOURCES}
|
||||
@@ -134,6 +151,7 @@ def gen_c_file(resources, basename):
|
||||
return master_c_template.format(
|
||||
RESOURCES="\n\n".join(r.dump_impl() for r in resources),
|
||||
RESOURCES_GETTER="\n\n".join(r.dump_getter() for r in resources),
|
||||
RESOURCE_CACHEID_GETTER="\n\n".join(r.dump_cacheid_getter() for r in resources if r.cacheid is not None),
|
||||
include_file=basename,
|
||||
basename=to_identifier(basename)
|
||||
)
|
||||
@@ -159,8 +177,10 @@ class ResourceNotFound : public std::runtime_error {{
|
||||
}};
|
||||
|
||||
const std::string& getResource_{basename}(const std::string& name);
|
||||
const char* getResourceCacheId_{basename}(const std::string& name);
|
||||
|
||||
#define getResource(a) (getResource_{basename}(a))
|
||||
#define getResourceCacheId(a) (getResourceCacheId_{basename}(a))
|
||||
|
||||
#endif // KIWIX_{BASENAME}
|
||||
|
||||
@@ -182,15 +202,17 @@ if __name__ == "__main__":
|
||||
parser.add_argument('--source_dir',
|
||||
help="Additional directory where to look for resources.",
|
||||
action='append')
|
||||
parser.add_argument('resource_file',
|
||||
parser.add_argument('resource_files', nargs='+',
|
||||
help='The list of resources to compile.')
|
||||
args = parser.parse_args()
|
||||
|
||||
base_dir = os.path.dirname(os.path.realpath(args.resource_file))
|
||||
source_dir = args.source_dir or []
|
||||
with open(args.resource_file, 'r') as f:
|
||||
resources = [Resource([base_dir]+source_dir, filename)
|
||||
for filename in f.readlines()]
|
||||
resources = []
|
||||
for resfile in args.resource_files:
|
||||
base_dir = os.path.dirname(os.path.realpath(resfile))
|
||||
with open(resfile, 'r') as f:
|
||||
resources += [Resource([base_dir]+source_dir, *line.strip().split())
|
||||
for line in f.readlines()]
|
||||
|
||||
h_identifier = to_identifier(os.path.basename(args.hfile))
|
||||
with open(args.hfile, 'w') as f:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
.SH NAME
|
||||
kiwix-compile-resources \- helper to compile and generate some Kiwix resources
|
||||
.SH SYNOPSIS
|
||||
\fBkiwix\-compile\-resources\fR [\-h] [\-\-cxxfile CXXFILE] [\-\-hfile HFILE] resource_file\fR
|
||||
\fBkiwix\-compile\-resources\fR [\-h] [\-\-cxxfile CXXFILE] [\-\-hfile HFILE] resource_file ...\fR
|
||||
.SH DESCRIPTION
|
||||
.TP
|
||||
resource_file
|
||||
|
||||
@@ -99,16 +99,24 @@ def preprocess_resource(resource_path):
|
||||
print(preprocessed_content, end='', file=target)
|
||||
|
||||
|
||||
def copy_file(src_path, dst_path):
|
||||
with open(src_path, 'rb') as src:
|
||||
with open(dst_path, 'wb') as dst:
|
||||
dst.write(src.read())
|
||||
def copy_resource_list_file(src_path, dst_path):
|
||||
with open(src_path, 'r') as src:
|
||||
with open(dst_path, 'w') as dst:
|
||||
for line in src:
|
||||
res = line.strip()
|
||||
if line.startswith("skin/") and res in resource_revisions:
|
||||
dst.write(res + " " + resource_revisions[res] + "\n")
|
||||
else:
|
||||
dst.write(line)
|
||||
|
||||
def preprocess_resources(resource_file_path):
|
||||
resource_filename = os.path.basename(resource_file_path)
|
||||
for resource in read_resource_file(resource_file_path):
|
||||
preprocess_resource(resource)
|
||||
copy_file(resource_file_path, os.path.join(OUT_DIR, resource_filename))
|
||||
if resource.startswith('skin/'):
|
||||
get_resource_revision(resource)
|
||||
else:
|
||||
preprocess_resource(resource)
|
||||
copy_resource_list_file(resource_file_path, os.path.join(OUT_DIR, resource_filename))
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
#define LOG_ARIA_ERROR() \
|
||||
{ \
|
||||
std::cerr << "ERROR: aria2 RPC request failed. (" << res << ")." << std::endl; \
|
||||
std::cerr << (m_curlErrorBuffer[0] ? m_curlErrorBuffer.get() : curl_easy_strerror(res)) << std::endl; \
|
||||
std::cerr << (curlErrorBuffer[0] ? curlErrorBuffer : curl_easy_strerror(res)) << std::endl; \
|
||||
}
|
||||
|
||||
namespace kiwix {
|
||||
@@ -32,9 +32,7 @@ namespace kiwix {
|
||||
Aria2::Aria2():
|
||||
mp_aria(nullptr),
|
||||
m_port(42042),
|
||||
m_secret(getNewRpcSecret()),
|
||||
m_curlErrorBuffer(new char[CURL_ERROR_SIZE]),
|
||||
mp_curl(nullptr)
|
||||
m_secret(getNewRpcSecret())
|
||||
{
|
||||
m_downloadDir = getDataDirectory();
|
||||
makeDirectory(m_downloadDir);
|
||||
@@ -91,36 +89,32 @@ Aria2::Aria2():
|
||||
launchCmd.append(cmd).append(" ");
|
||||
}
|
||||
mp_aria = Subprocess::run(callCmd);
|
||||
mp_curl = curl_easy_init();
|
||||
|
||||
curl_easy_setopt(mp_curl, CURLOPT_URL, "http://localhost/rpc");
|
||||
curl_easy_setopt(mp_curl, CURLOPT_PORT, m_port);
|
||||
curl_easy_setopt(mp_curl, CURLOPT_POST, 1L);
|
||||
curl_easy_setopt(mp_curl, CURLOPT_ERRORBUFFER, m_curlErrorBuffer.get());
|
||||
CURL* p_curl = curl_easy_init();
|
||||
char curlErrorBuffer[CURL_ERROR_SIZE];
|
||||
|
||||
curl_easy_setopt(p_curl, CURLOPT_URL, "http://localhost/rpc");
|
||||
curl_easy_setopt(p_curl, CURLOPT_PORT, m_port);
|
||||
curl_easy_setopt(p_curl, CURLOPT_POST, 1L);
|
||||
curl_easy_setopt(p_curl, CURLOPT_ERRORBUFFER, curlErrorBuffer);
|
||||
|
||||
int watchdog = 50;
|
||||
while(--watchdog) {
|
||||
sleep(10);
|
||||
m_curlErrorBuffer[0] = 0;
|
||||
auto res = curl_easy_perform(mp_curl);
|
||||
curlErrorBuffer[0] = 0;
|
||||
auto res = curl_easy_perform(p_curl);
|
||||
if (res == CURLE_OK) {
|
||||
break;
|
||||
} else if (watchdog == 1) {
|
||||
LOG_ARIA_ERROR();
|
||||
}
|
||||
}
|
||||
curl_easy_cleanup(p_curl);
|
||||
if (!watchdog) {
|
||||
curl_easy_cleanup(mp_curl);
|
||||
throw std::runtime_error("Cannot connect to aria2c rpc. Aria2c launch cmd : " + launchCmd);
|
||||
}
|
||||
}
|
||||
|
||||
Aria2::~Aria2()
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
curl_easy_cleanup(mp_curl);
|
||||
}
|
||||
|
||||
void Aria2::close()
|
||||
{
|
||||
saveSession();
|
||||
@@ -140,20 +134,25 @@ std::string Aria2::doRequest(const MethodCall& methodCall)
|
||||
std::stringstream outStream;
|
||||
CURLcode res;
|
||||
long response_code;
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
curl_easy_setopt(mp_curl, CURLOPT_POSTFIELDSIZE, requestContent.size());
|
||||
curl_easy_setopt(mp_curl, CURLOPT_POSTFIELDS, requestContent.c_str());
|
||||
curl_easy_setopt(mp_curl, CURLOPT_WRITEFUNCTION, &write_callback_to_iss);
|
||||
curl_easy_setopt(mp_curl, CURLOPT_WRITEDATA, &outStream);
|
||||
m_curlErrorBuffer[0] = 0;
|
||||
res = curl_easy_perform(mp_curl);
|
||||
if (res != CURLE_OK) {
|
||||
LOG_ARIA_ERROR();
|
||||
throw std::runtime_error("Cannot perform request");
|
||||
}
|
||||
curl_easy_getinfo(mp_curl, CURLINFO_RESPONSE_CODE, &response_code);
|
||||
char curlErrorBuffer[CURL_ERROR_SIZE];
|
||||
CURL* p_curl = curl_easy_init();
|
||||
curl_easy_setopt(p_curl, CURLOPT_URL, "http://localhost/rpc");
|
||||
curl_easy_setopt(p_curl, CURLOPT_PORT, m_port);
|
||||
curl_easy_setopt(p_curl, CURLOPT_POST, 1L);
|
||||
curl_easy_setopt(p_curl, CURLOPT_ERRORBUFFER, curlErrorBuffer);
|
||||
curl_easy_setopt(p_curl, CURLOPT_POSTFIELDSIZE, requestContent.size());
|
||||
curl_easy_setopt(p_curl, CURLOPT_POSTFIELDS, requestContent.c_str());
|
||||
curl_easy_setopt(p_curl, CURLOPT_WRITEFUNCTION, &write_callback_to_iss);
|
||||
curl_easy_setopt(p_curl, CURLOPT_WRITEDATA, &outStream);
|
||||
curlErrorBuffer[0] = 0;
|
||||
res = curl_easy_perform(p_curl);
|
||||
if (res != CURLE_OK) {
|
||||
LOG_ARIA_ERROR();
|
||||
curl_easy_cleanup(p_curl);
|
||||
throw std::runtime_error("Cannot perform request");
|
||||
}
|
||||
curl_easy_getinfo(p_curl, CURLINFO_RESPONSE_CODE, &response_code);
|
||||
curl_easy_cleanup(p_curl);
|
||||
|
||||
auto responseContent = outStream.str();
|
||||
if (response_code != 200) {
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
#include "xmlrpc.h"
|
||||
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <curl/curl.h>
|
||||
|
||||
namespace kiwix {
|
||||
@@ -24,15 +23,11 @@ class Aria2
|
||||
int m_port;
|
||||
std::string m_secret;
|
||||
std::string m_downloadDir;
|
||||
std::unique_ptr<char[]> m_curlErrorBuffer;
|
||||
CURL* mp_curl;
|
||||
std::mutex m_lock;
|
||||
|
||||
std::string doRequest(const MethodCall& methodCall);
|
||||
|
||||
public:
|
||||
Aria2();
|
||||
virtual ~Aria2();
|
||||
virtual ~Aria2() = default;
|
||||
void close();
|
||||
|
||||
std::string addUri(const std::vector<std::string>& uri, const std::vector<std::pair<std::string, std::string>>& options = {});
|
||||
|
||||
20
src/book.cpp
20
src/book.cpp
@@ -66,7 +66,7 @@ bool Book::update(const kiwix::Book& other)
|
||||
void Book::update(const zim::Archive& archive) {
|
||||
m_path = archive.getFilename();
|
||||
m_pathValid = true;
|
||||
m_id = getArchiveId(archive);
|
||||
m_id = std::string(archive.getUuid());
|
||||
m_title = getArchiveTitle(archive);
|
||||
m_description = getMetaDescription(archive);
|
||||
m_language = getMetaLanguage(archive);
|
||||
@@ -77,8 +77,8 @@ void Book::update(const zim::Archive& archive) {
|
||||
m_flavour = getMetaFlavour(archive);
|
||||
m_tags = getMetaTags(archive);
|
||||
m_category = getCategoryFromTags();
|
||||
m_articleCount = getArchiveArticleCount(archive);
|
||||
m_mediaCount = getArchiveMediaCount(archive);
|
||||
m_articleCount = archive.getArticleCount();
|
||||
m_mediaCount = archive.getMediaCount();
|
||||
m_size = static_cast<uint64_t>(getArchiveFileSize(archive)) << 10;
|
||||
|
||||
m_illustrations.clear();
|
||||
@@ -117,11 +117,12 @@ void Book::updateFromXml(const pugi::xml_node& node, const std::string& baseDir)
|
||||
m_articleCount = strtoull(ATTR("articleCount"), 0, 0);
|
||||
m_mediaCount = strtoull(ATTR("mediaCount"), 0, 0);
|
||||
m_size = strtoull(ATTR("size"), 0, 0) << 10;
|
||||
std::string favicon_mimetype = ATTR("faviconMimeType");
|
||||
if (! favicon_mimetype.empty()) {
|
||||
const std::string faviconMimeType = ATTR("faviconMimeType");
|
||||
const std::string faviconBase64EncodedData = ATTR("favicon");
|
||||
if ( !faviconMimeType.empty() && !faviconBase64EncodedData.empty() ) {
|
||||
const auto favicon = std::make_shared<Illustration>();
|
||||
favicon->data = base64_decode(ATTR("favicon"));
|
||||
favicon->mimeType = favicon_mimetype;
|
||||
favicon->data = base64_decode(faviconBase64EncodedData);
|
||||
favicon->mimeType = faviconMimeType;
|
||||
favicon->url = ATTR("faviconUrl");
|
||||
m_illustrations.assign(1, favicon);
|
||||
}
|
||||
@@ -286,4 +287,9 @@ std::string Book::getCategoryFromTags() const
|
||||
}
|
||||
}
|
||||
|
||||
const std::vector<std::string> Book::getLanguages() const
|
||||
{
|
||||
return kiwix::split(m_language, ",");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -127,22 +127,24 @@ void Download::cancelDownload()
|
||||
Downloader::Downloader() :
|
||||
mp_aria(new Aria2())
|
||||
{
|
||||
try {
|
||||
for (auto gid : mp_aria->tellActive()) {
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
m_knownDownloads[gid]->updateStatus();
|
||||
}
|
||||
} catch (std::exception& e) {
|
||||
std::cerr << "aria2 tellActive failed : " << e.what() << std::endl;
|
||||
}
|
||||
try {
|
||||
for (auto gid : mp_aria->tellWaiting()) {
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
m_knownDownloads[gid]->updateStatus();
|
||||
m_knownDownloads[gid]->updateStatus(false);
|
||||
}
|
||||
} catch (std::exception& e) {
|
||||
std::cerr << "aria2 tellWaiting failed : " << e.what() << std::endl;
|
||||
}
|
||||
try {
|
||||
for (auto gid : mp_aria->tellActive()) {
|
||||
if( m_knownDownloads.find(gid) == m_knownDownloads.end()) {
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
m_knownDownloads[gid]->updateStatus(false);
|
||||
}
|
||||
}
|
||||
} catch (std::exception& e) {
|
||||
std::cerr << "aria2 tellActive failed : " << e.what() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
/* Destructor */
|
||||
@@ -155,7 +157,8 @@ void Downloader::close()
|
||||
mp_aria->close();
|
||||
}
|
||||
|
||||
std::vector<std::string> Downloader::getDownloadIds() {
|
||||
std::vector<std::string> Downloader::getDownloadIds() const {
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
std::vector<std::string> ret;
|
||||
for(auto& p:m_knownDownloads) {
|
||||
ret.push_back(p.first);
|
||||
@@ -163,42 +166,46 @@ std::vector<std::string> Downloader::getDownloadIds() {
|
||||
return ret;
|
||||
}
|
||||
|
||||
Download* Downloader::startDownload(const std::string& uri, const std::vector<std::pair<std::string, std::string>>& options)
|
||||
std::shared_ptr<Download> Downloader::startDownload(const std::string& uri, const std::vector<std::pair<std::string, std::string>>& options)
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
for (auto& p: m_knownDownloads) {
|
||||
auto& d = p.second;
|
||||
auto& uris = d->getUris();
|
||||
if (std::find(uris.begin(), uris.end(), uri) != uris.end())
|
||||
return d.get();
|
||||
return d;
|
||||
}
|
||||
std::vector<std::string> uris = {uri};
|
||||
auto gid = mp_aria->addUri(uris, options);
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
return m_knownDownloads[gid].get();
|
||||
m_knownDownloads[gid] = std::make_shared<Download>(mp_aria, gid);
|
||||
return m_knownDownloads[gid];
|
||||
}
|
||||
|
||||
Download* Downloader::getDownload(const std::string& did)
|
||||
std::shared_ptr<Download> Downloader::getDownload(const std::string& did)
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
try {
|
||||
m_knownDownloads.at(did).get()->updateStatus(true);
|
||||
return m_knownDownloads.at(did).get();
|
||||
return m_knownDownloads.at(did);
|
||||
} catch(std::exception& e) {
|
||||
for (auto gid : mp_aria->tellActive()) {
|
||||
if (gid == did) {
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
m_knownDownloads.at(gid).get()->updateStatus(true);
|
||||
return m_knownDownloads[gid].get();
|
||||
}
|
||||
}
|
||||
for (auto gid : mp_aria->tellWaiting()) {
|
||||
if (gid == did) {
|
||||
m_knownDownloads[gid] = std::unique_ptr<Download>(new Download(mp_aria, gid));
|
||||
m_knownDownloads.at(gid).get()->updateStatus(true);
|
||||
return m_knownDownloads[gid].get();
|
||||
m_knownDownloads[gid] = std::make_shared<Download>(mp_aria, gid);
|
||||
return m_knownDownloads[gid];
|
||||
}
|
||||
}
|
||||
}
|
||||
for (auto gid : mp_aria->tellActive()) {
|
||||
if (gid == did) {
|
||||
m_knownDownloads[gid] = std::make_shared<Download>(mp_aria, gid);
|
||||
return m_knownDownloads[gid];
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
size_t Downloader::getNbDownload() const {
|
||||
std::unique_lock<std::mutex> lock(m_lock);
|
||||
return m_knownDownloads.size();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
120
src/html_dumper.cpp
Normal file
120
src/html_dumper.cpp
Normal file
@@ -0,0 +1,120 @@
|
||||
#include "html_dumper.h"
|
||||
#include "libkiwix-resources.h"
|
||||
#include "tools/otherTools.h"
|
||||
#include "tools.h"
|
||||
#include "tools/regexTools.h"
|
||||
#include "server/i18n.h"
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
/* Constructor */
|
||||
HTMLDumper::HTMLDumper(const Library* library, const NameMapper* nameMapper)
|
||||
: LibraryDumper(library, nameMapper)
|
||||
{
|
||||
}
|
||||
/* Destructor */
|
||||
HTMLDumper::~HTMLDumper()
|
||||
{
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
std::string humanFriendlyTitle(std::string title)
|
||||
{
|
||||
std::string humanFriendlyString = replaceRegex(title, "_", " ");
|
||||
humanFriendlyString[0] = toupper(humanFriendlyString[0]);
|
||||
return humanFriendlyString;
|
||||
}
|
||||
|
||||
kainjow::mustache::list getTagList(std::string tags)
|
||||
{
|
||||
const auto tagsList = kiwix::split(tags, ";", true, false);
|
||||
kainjow::mustache::list finalTagList;
|
||||
for (auto tag : tagsList) {
|
||||
if (tag[0] != '_')
|
||||
finalTagList.push_back(kainjow::mustache::object{
|
||||
{"tag", tag}
|
||||
});
|
||||
}
|
||||
return finalTagList;
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::string HTMLDumper::dumpPlainHTML(kiwix::Filter filter) const
|
||||
{
|
||||
kainjow::mustache::list booksData;
|
||||
const auto filteredBooks = library->filter(filter);
|
||||
const auto searchQuery = filter.getQuery();
|
||||
auto languages = getLanguageData();
|
||||
auto categories = getCategoryData();
|
||||
|
||||
for (auto &category : categories) {
|
||||
const auto categoryName = category.get("name")->string_value();
|
||||
if (categoryName == filter.getCategory()) {
|
||||
category["selected"] = true;
|
||||
}
|
||||
category["hf_name"] = humanFriendlyTitle(categoryName);
|
||||
}
|
||||
|
||||
for (auto &language : languages) {
|
||||
if (language.get("lang_code")->string_value() == filter.getLang()) {
|
||||
language["selected"] = true;
|
||||
}
|
||||
}
|
||||
|
||||
for ( const auto& bookId : filteredBooks ) {
|
||||
const auto bookObj = library->getBookById(bookId);
|
||||
const auto bookTitle = bookObj.getTitle();
|
||||
std::string contentId = "";
|
||||
try {
|
||||
contentId = urlEncode(nameMapper->getNameForId(bookId));
|
||||
} catch (...) {}
|
||||
const auto bookDescription = bookObj.getDescription();
|
||||
const auto langCode = bookObj.getCommaSeparatedLanguages();
|
||||
const auto bookIconUrl = rootLocation + "/catalog/v2/illustration/" + bookId + "/?size=48";
|
||||
const auto tags = bookObj.getTags();
|
||||
const auto downloadAvailable = (bookObj.getUrl() != "");
|
||||
std::string faviconAttr = "style=background-image:url(" + bookIconUrl + ")";
|
||||
|
||||
booksData.push_back(kainjow::mustache::object{
|
||||
{"id", contentId},
|
||||
{"title", bookTitle},
|
||||
{"description", bookDescription},
|
||||
{"langCode", langCode},
|
||||
{"faviconAttr", faviconAttr},
|
||||
{"tagList", getTagList(tags)},
|
||||
{"downloadAvailable", downloadAvailable}
|
||||
});
|
||||
}
|
||||
|
||||
auto getTranslation = i18n::GetTranslatedStringWithMsgId(m_userLang);
|
||||
|
||||
const auto translations = kainjow::mustache::object{
|
||||
getTranslation("search"),
|
||||
getTranslation("download"),
|
||||
getTranslation("count-of-matching-books", {{"COUNT", to_string(filteredBooks.size())}}),
|
||||
getTranslation("book-filtering-all-categories"),
|
||||
getTranslation("book-filtering-all-languages"),
|
||||
getTranslation("powered-by-kiwix-html"),
|
||||
getTranslation("welcome-to-kiwix-server"),
|
||||
getTranslation("preview-book"),
|
||||
getTranslation("welcome-page-overzealous-filter", {{"URL", "?lang="}})
|
||||
};
|
||||
|
||||
return render_template(
|
||||
RESOURCE::templates::no_js_library_page_html,
|
||||
kainjow::mustache::object{
|
||||
{"root", rootLocation},
|
||||
{"books", booksData },
|
||||
{"searchQuery", searchQuery},
|
||||
{"languages", languages},
|
||||
{"categories", categories},
|
||||
{"noResults", filteredBooks.size() == 0},
|
||||
{"translations", translations}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
} // namespace kiwix
|
||||
50
src/html_dumper.h
Normal file
50
src/html_dumper.h
Normal file
@@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright 2023 Nikhil Tanwar <2002nikhiltanwar@gmail.com>
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||
* MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef KIWIX_HTML_DUMPER_H
|
||||
#define KIWIX_HTML_DUMPER_H
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "library_dumper.h"
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
/**
|
||||
* A class to dump Library in HTML format.
|
||||
*/
|
||||
class HTMLDumper : public LibraryDumper
|
||||
{
|
||||
public:
|
||||
HTMLDumper(const Library* library, const NameMapper* NameMapper);
|
||||
~HTMLDumper();
|
||||
|
||||
|
||||
/**
|
||||
* Dump library in HTML
|
||||
*
|
||||
* @return HTML content
|
||||
*/
|
||||
std::string dumpPlainHTML(kiwix::Filter filter) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // KIWIX_HTML_DUMPER_H
|
||||
203
src/library.cpp
203
src/library.cpp
@@ -39,6 +39,8 @@
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
@@ -58,6 +60,8 @@ bool booksReferToTheSameArchive(const Book& book1, const Book& book2)
|
||||
&& book1.getPath() == book2.getPath();
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
template<typename Key, typename Value>
|
||||
class MultiKeyCache: public ConcurrentCache<std::set<Key>, Value>
|
||||
{
|
||||
@@ -79,49 +83,8 @@ class MultiKeyCache: public ConcurrentCache<std::set<Key>, Value>
|
||||
}
|
||||
};
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
struct Library::Impl
|
||||
{
|
||||
struct Entry : Book
|
||||
{
|
||||
Library::Revision lastUpdatedRevision = 0;
|
||||
};
|
||||
|
||||
Library::Revision m_revision;
|
||||
std::map<std::string, Entry> m_books;
|
||||
using ArchiveCache = ConcurrentCache<std::string, std::shared_ptr<zim::Archive>>;
|
||||
std::unique_ptr<ArchiveCache> mp_archiveCache;
|
||||
using SearcherCache = MultiKeyCache<std::string, std::shared_ptr<ZimSearcher>>;
|
||||
std::unique_ptr<SearcherCache> mp_searcherCache;
|
||||
std::vector<kiwix::Bookmark> m_bookmarks;
|
||||
Xapian::WritableDatabase m_bookDB;
|
||||
|
||||
unsigned int getBookCount(const bool localBooks, const bool remoteBooks) const;
|
||||
|
||||
Impl();
|
||||
~Impl();
|
||||
|
||||
Impl(Impl&& );
|
||||
Impl& operator=(Impl&& );
|
||||
};
|
||||
|
||||
Library::Impl::Impl()
|
||||
: mp_archiveCache(new ArchiveCache(std::max(getEnvVar<int>("KIWIX_ARCHIVE_CACHE_SIZE", 1), 1))),
|
||||
mp_searcherCache(new SearcherCache(std::max(getEnvVar<int>("KIWIX_SEARCHER_CACHE_SIZE", 1), 1))),
|
||||
m_bookDB("", Xapian::DB_BACKEND_INMEMORY)
|
||||
{
|
||||
}
|
||||
|
||||
Library::Impl::~Impl()
|
||||
{
|
||||
}
|
||||
|
||||
Library::Impl::Impl(Library::Impl&& ) = default;
|
||||
Library::Impl& Library::Impl::operator=(Library::Impl&& ) = default;
|
||||
|
||||
unsigned int
|
||||
Library::Impl::getBookCount(const bool localBooks, const bool remoteBooks) const
|
||||
Library::getBookCount_not_protected(const bool localBooks, const bool remoteBooks) const
|
||||
{
|
||||
unsigned int result = 0;
|
||||
for (auto& pair: m_books) {
|
||||
@@ -136,50 +99,41 @@ Library::Impl::getBookCount(const bool localBooks, const bool remoteBooks) const
|
||||
|
||||
/* Constructor */
|
||||
Library::Library()
|
||||
: mp_impl(new Library::Impl)
|
||||
: mp_archiveCache(new ArchiveCache(std::max(getEnvVar<int>("KIWIX_ARCHIVE_CACHE_SIZE", 1), 1))),
|
||||
mp_searcherCache(new SearcherCache(std::max(getEnvVar<int>("KIWIX_SEARCHER_CACHE_SIZE", 1), 1))),
|
||||
m_bookDB(new Xapian::WritableDatabase("", Xapian::DB_BACKEND_INMEMORY))
|
||||
{
|
||||
}
|
||||
|
||||
Library::Library(Library&& other)
|
||||
: mp_impl(std::move(other.mp_impl))
|
||||
{
|
||||
}
|
||||
|
||||
Library& Library::operator=(Library&& other)
|
||||
{
|
||||
mp_impl = std::move(other.mp_impl);
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* Destructor */
|
||||
Library::~Library() = default;
|
||||
|
||||
bool Library::addBook(const Book& book)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
++mp_impl->m_revision;
|
||||
++m_revision;
|
||||
/* Try to find it */
|
||||
updateBookDB(book);
|
||||
try {
|
||||
auto& oldbook = mp_impl->m_books.at(book.getId());
|
||||
auto& oldbook = m_books.at(book.getId());
|
||||
if ( ! booksReferToTheSameArchive(oldbook, book) ) {
|
||||
dropCache(book.getId());
|
||||
}
|
||||
oldbook.update(book); // XXX: This may have no effect if oldbook is readonly
|
||||
// XXX: Then m_bookDB will become out-of-sync with
|
||||
// XXX: the real contents of the library.
|
||||
oldbook.lastUpdatedRevision = mp_impl->m_revision;
|
||||
oldbook.lastUpdatedRevision = m_revision;
|
||||
return false;
|
||||
} catch (std::out_of_range&) {
|
||||
auto& newEntry = mp_impl->m_books[book.getId()];
|
||||
auto& newEntry = m_books[book.getId()];
|
||||
static_cast<Book&>(newEntry) = book;
|
||||
newEntry.lastUpdatedRevision = mp_impl->m_revision;
|
||||
size_t new_cache_size = static_cast<size_t>(std::ceil(mp_impl->getBookCount(true, true)*0.1));
|
||||
newEntry.lastUpdatedRevision = m_revision;
|
||||
size_t new_cache_size = static_cast<size_t>(std::ceil(getBookCount_not_protected(true, true)*0.1));
|
||||
if (getEnvVar<int>("KIWIX_ARCHIVE_CACHE_SIZE", -1) <= 0) {
|
||||
mp_impl->mp_archiveCache->setMaxSize(new_cache_size);
|
||||
mp_archiveCache->setMaxSize(new_cache_size);
|
||||
}
|
||||
if (getEnvVar<int>("KIWIX_SEARCHER_CACHE_SIZE", -1) <= 0) {
|
||||
mp_impl->mp_searcherCache->setMaxSize(new_cache_size);
|
||||
mp_searcherCache->setMaxSize(new_cache_size);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -188,15 +142,15 @@ bool Library::addBook(const Book& book)
|
||||
void Library::addBookmark(const Bookmark& bookmark)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
mp_impl->m_bookmarks.push_back(bookmark);
|
||||
m_bookmarks.push_back(bookmark);
|
||||
}
|
||||
|
||||
bool Library::removeBookmark(const std::string& zimId, const std::string& url)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
for(auto it=mp_impl->m_bookmarks.begin(); it!=mp_impl->m_bookmarks.end(); it++) {
|
||||
for(auto it=m_bookmarks.begin(); it!=m_bookmarks.end(); it++) {
|
||||
if (it->getBookId() == zimId && it->getUrl() == url) {
|
||||
mp_impl->m_bookmarks.erase(it);
|
||||
m_bookmarks.erase(it);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -206,14 +160,14 @@ bool Library::removeBookmark(const std::string& zimId, const std::string& url)
|
||||
|
||||
void Library::dropCache(const std::string& id)
|
||||
{
|
||||
mp_impl->mp_archiveCache->drop(id);
|
||||
mp_impl->mp_searcherCache->drop(id);
|
||||
mp_archiveCache->drop(id);
|
||||
mp_searcherCache->drop(id);
|
||||
}
|
||||
|
||||
bool Library::removeBookById(const std::string& id)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
mp_impl->m_bookDB.delete_document("Q" + id);
|
||||
m_bookDB->delete_document("Q" + id);
|
||||
dropCache(id);
|
||||
// We do not change the cache size here
|
||||
// Most of the time, the book is remove in case of library refresh, it is
|
||||
@@ -221,13 +175,17 @@ bool Library::removeBookById(const std::string& id)
|
||||
// Having a too big cache is not a problem here (or it would have been before)
|
||||
// (And setMaxSize doesn't actually reduce the cache size, extra cached items
|
||||
// will be removed in put or getOrPut).
|
||||
return mp_impl->m_books.erase(id) == 1;
|
||||
const bool bookWasRemoved = m_books.erase(id) == 1;
|
||||
if ( bookWasRemoved ) {
|
||||
++m_revision;
|
||||
}
|
||||
return bookWasRemoved;
|
||||
}
|
||||
|
||||
Library::Revision Library::getRevision() const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
return mp_impl->m_revision;
|
||||
return m_revision;
|
||||
}
|
||||
|
||||
uint32_t Library::removeBooksNotUpdatedSince(Revision libraryRevision)
|
||||
@@ -235,7 +193,7 @@ uint32_t Library::removeBooksNotUpdatedSince(Revision libraryRevision)
|
||||
BookIdCollection booksToRemove;
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
for ( const auto& entry : mp_impl->m_books) {
|
||||
for ( const auto& entry : m_books) {
|
||||
if ( entry.second.lastUpdatedRevision <= libraryRevision ) {
|
||||
booksToRemove.push_back(entry.first);
|
||||
}
|
||||
@@ -254,7 +212,7 @@ const Book& Library::getBookById(const std::string& id) const
|
||||
{
|
||||
// XXX: Doesn't make sense to lock this operation since it cannot
|
||||
// XXX: guarantee thread-safety because of its return type
|
||||
return mp_impl->m_books.at(id);
|
||||
return m_books.at(id);
|
||||
}
|
||||
|
||||
Book Library::getBookByIdThreadSafe(const std::string& id) const
|
||||
@@ -267,7 +225,7 @@ const Book& Library::getBookByPath(const std::string& path) const
|
||||
{
|
||||
// XXX: Doesn't make sense to lock this operation since it cannot
|
||||
// XXX: guarantee thread-safety because of its return type
|
||||
for(auto& it: mp_impl->m_books) {
|
||||
for(auto& it: m_books) {
|
||||
auto& book = it.second;
|
||||
if (book.getPath() == path)
|
||||
return book;
|
||||
@@ -280,7 +238,7 @@ const Book& Library::getBookByPath(const std::string& path) const
|
||||
std::shared_ptr<zim::Archive> Library::getArchiveById(const std::string& id)
|
||||
{
|
||||
try {
|
||||
return mp_impl->mp_archiveCache->getOrPut(id,
|
||||
return mp_archiveCache->getOrPut(id,
|
||||
[&](){
|
||||
auto book = getBookById(id);
|
||||
if (!book.isPathValid()) {
|
||||
@@ -297,7 +255,7 @@ std::shared_ptr<ZimSearcher> Library::getSearcherByIds(const BookIdSet& ids)
|
||||
{
|
||||
assert(!ids.empty());
|
||||
try {
|
||||
return mp_impl->mp_searcherCache->getOrPut(ids,
|
||||
return mp_searcherCache->getOrPut(ids,
|
||||
[&](){
|
||||
std::vector<zim::Archive> archives;
|
||||
for(auto& id:ids) {
|
||||
@@ -318,7 +276,7 @@ unsigned int Library::getBookCount(const bool localBooks,
|
||||
const bool remoteBooks) const
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
return mp_impl->getBookCount(localBooks, remoteBooks);
|
||||
return getBookCount_not_protected(localBooks, remoteBooks);
|
||||
}
|
||||
|
||||
bool Library::writeToFile(const std::string& path) const
|
||||
@@ -349,7 +307,7 @@ Library::AttributeCounts Library::getBookAttributeCounts(BookStrPropMemFn p) con
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
AttributeCounts propValueCounts;
|
||||
|
||||
for (const auto& pair: mp_impl->m_books) {
|
||||
for (const auto& pair: m_books) {
|
||||
const auto& book = pair.second;
|
||||
if (book.getOrigId().empty()) {
|
||||
propValueCounts[(book.*p)()] += 1;
|
||||
@@ -369,12 +327,27 @@ std::vector<std::string> Library::getBookPropValueSet(BookStrPropMemFn p) const
|
||||
|
||||
std::vector<std::string> Library::getBooksLanguages() const
|
||||
{
|
||||
return getBookPropValueSet(&Book::getLanguage);
|
||||
std::vector<std::string> langs;
|
||||
for ( const auto& langAndCount : getBooksLanguagesWithCounts() ) {
|
||||
langs.push_back(langAndCount.first);
|
||||
}
|
||||
return langs;
|
||||
}
|
||||
|
||||
Library::AttributeCounts Library::getBooksLanguagesWithCounts() const
|
||||
{
|
||||
return getBookAttributeCounts(&Book::getLanguage);
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
AttributeCounts langsWithCounts;
|
||||
|
||||
for (const auto& pair: m_books) {
|
||||
const auto& book = pair.second;
|
||||
if (book.getOrigId().empty()) {
|
||||
for ( const auto& lang : book.getLanguages() ) {
|
||||
++langsWithCounts[lang];
|
||||
}
|
||||
}
|
||||
}
|
||||
return langsWithCounts;
|
||||
}
|
||||
|
||||
std::vector<std::string> Library::getBooksCategories() const
|
||||
@@ -382,7 +355,7 @@ std::vector<std::string> Library::getBooksCategories() const
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
std::set<std::string> categories;
|
||||
|
||||
for (const auto& pair: mp_impl->m_books) {
|
||||
for (const auto& pair: m_books) {
|
||||
const auto& book = pair.second;
|
||||
const auto& c = book.getCategory();
|
||||
if ( !c.empty() ) {
|
||||
@@ -406,12 +379,12 @@ std::vector<std::string> Library::getBooksPublishers() const
|
||||
const std::vector<kiwix::Bookmark> Library::getBookmarks(bool onlyValidBookmarks) const
|
||||
{
|
||||
if (!onlyValidBookmarks) {
|
||||
return mp_impl->m_bookmarks;
|
||||
return m_bookmarks;
|
||||
}
|
||||
std::vector<kiwix::Bookmark> validBookmarks;
|
||||
auto booksId = getBooksIds();
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
for(auto& bookmark:mp_impl->m_bookmarks) {
|
||||
for(auto& bookmark:m_bookmarks) {
|
||||
if (std::find(booksId.begin(), booksId.end(), bookmark.getBookId()) != booksId.end()) {
|
||||
validBookmarks.push_back(bookmark);
|
||||
}
|
||||
@@ -424,7 +397,7 @@ Library::BookIdCollection Library::getBooksIds() const
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
BookIdCollection bookIds;
|
||||
|
||||
for (auto& pair: mp_impl->m_books) {
|
||||
for (auto& pair: m_books) {
|
||||
bookIds.push_back(pair.first);
|
||||
}
|
||||
|
||||
@@ -436,12 +409,14 @@ void Library::updateBookDB(const Book& book)
|
||||
{
|
||||
Xapian::Stem stemmer;
|
||||
Xapian::TermGenerator indexer;
|
||||
const std::string lang = book.getLanguage();
|
||||
try {
|
||||
stemmer = Xapian::Stem(iso639_3ToXapian(lang));
|
||||
indexer.set_stemmer(stemmer);
|
||||
indexer.set_stemming_strategy(Xapian::TermGenerator::STEM_SOME);
|
||||
} catch (...) {}
|
||||
const auto langs = book.getLanguages();
|
||||
if ( langs.size() == 1 ) {
|
||||
try {
|
||||
stemmer = Xapian::Stem(iso639_3ToXapian(langs[0]));
|
||||
indexer.set_stemmer(stemmer);
|
||||
indexer.set_stemming_strategy(Xapian::TermGenerator::STEM_SOME);
|
||||
} catch (...) {}
|
||||
}
|
||||
Xapian::Document doc;
|
||||
indexer.set_document(doc);
|
||||
|
||||
@@ -456,10 +431,12 @@ void Library::updateBookDB(const Book& book)
|
||||
// Index all fields for field-based search
|
||||
indexer.index_text(title, 1, "S");
|
||||
indexer.index_text(desc, 1, "XD");
|
||||
indexer.index_text(lang, 1, "L");
|
||||
for ( const auto& lang : langs ) {
|
||||
indexer.index_text(lang, 1, "L");
|
||||
}
|
||||
indexer.index_text(normalizeText(book.getCreator()), 1, "A");
|
||||
indexer.index_text(normalizeText(book.getPublisher()), 1, "XP");
|
||||
indexer.index_text(normalizeText(book.getName()), 1, "XN");
|
||||
doc.add_term("XN"+normalizeText(book.getName()));
|
||||
indexer.index_text(normalizeText(book.getCategory()), 1, "XC");
|
||||
|
||||
for ( const auto& tag : split(normalizeText(book.getTags()), ";") ) {
|
||||
@@ -475,7 +452,7 @@ void Library::updateBookDB(const Book& book)
|
||||
|
||||
doc.set_data(book.getId());
|
||||
|
||||
mp_impl->m_bookDB.replace_document(idterm, doc);
|
||||
m_bookDB->replace_document(idterm, doc);
|
||||
}
|
||||
|
||||
namespace
|
||||
@@ -526,14 +503,30 @@ Xapian::Query nameQuery(const std::string& name)
|
||||
return Xapian::Query("XN" + normalizeText(name));
|
||||
}
|
||||
|
||||
Xapian::Query categoryQuery(const std::string& category)
|
||||
Xapian::Query multipleParamQuery(const std::string& commaSeparatedList, const std::string& prefix)
|
||||
{
|
||||
return Xapian::Query("XC" + normalizeText(category));
|
||||
Xapian::Query q;
|
||||
bool firstIteration = true;
|
||||
for ( const auto& elem : kiwix::split(commaSeparatedList, ",") ) {
|
||||
const Xapian::Query singleQuery(prefix + normalizeText(elem));
|
||||
if ( firstIteration ) {
|
||||
q = singleQuery;
|
||||
firstIteration = false;
|
||||
} else {
|
||||
q = Xapian::Query(Xapian::Query::OP_OR, q, singleQuery);
|
||||
}
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
||||
Xapian::Query langQuery(const std::string& lang)
|
||||
Xapian::Query categoryQuery(const std::string& commaSeparatedCategoryList)
|
||||
{
|
||||
return Xapian::Query("L" + normalizeText(lang));
|
||||
return multipleParamQuery(commaSeparatedCategoryList, "XC");
|
||||
}
|
||||
|
||||
Xapian::Query langQuery(const std::string& commaSeparatedLanguageList)
|
||||
{
|
||||
return multipleParamQuery(commaSeparatedLanguageList, "L");
|
||||
}
|
||||
|
||||
Xapian::Query publisherQuery(const std::string& publisher)
|
||||
@@ -608,9 +601,9 @@ Library::BookIdCollection Library::filterViaBookDB(const Filter& filter) const
|
||||
BookIdCollection bookIds;
|
||||
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
Xapian::Enquire enquire(mp_impl->m_bookDB);
|
||||
Xapian::Enquire enquire(*m_bookDB);
|
||||
enquire.set_query(query);
|
||||
const auto results = enquire.get_mset(0, mp_impl->m_books.size());
|
||||
const auto results = enquire.get_mset(0, m_books.size());
|
||||
for ( auto it = results.begin(); it != results.end(); ++it ) {
|
||||
bookIds.push_back(it.get_document().get_data());
|
||||
}
|
||||
@@ -624,7 +617,7 @@ Library::BookIdCollection Library::filter(const Filter& filter) const
|
||||
const auto preliminaryResult = filterViaBookDB(filter);
|
||||
std::lock_guard<std::mutex> lock(m_mutex);
|
||||
for(auto id : preliminaryResult) {
|
||||
if(filter.accept(mp_impl->m_books.at(id))) {
|
||||
if(filter.accept(m_books.at(id))) {
|
||||
result.push_back(id);
|
||||
}
|
||||
}
|
||||
@@ -844,6 +837,18 @@ Filter& Filter::name(std::string name)
|
||||
return *this;
|
||||
}
|
||||
|
||||
Filter& Filter::clearLang()
|
||||
{
|
||||
activeFilters &= ~LANG;
|
||||
return *this;
|
||||
}
|
||||
|
||||
Filter& Filter::clearCategory()
|
||||
{
|
||||
activeFilters &= ~CATEGORY;
|
||||
return *this;
|
||||
}
|
||||
|
||||
#define ACTIVE(X) (activeFilters & (X))
|
||||
#define FILTER(TAG, TEST) if (ACTIVE(TAG) && !(TEST)) { return false; }
|
||||
bool Filter::hasQuery() const
|
||||
|
||||
61
src/library_dumper.cpp
Normal file
61
src/library_dumper.cpp
Normal file
@@ -0,0 +1,61 @@
|
||||
#include "library_dumper.h"
|
||||
#include "tools/stringTools.h"
|
||||
#include "tools/otherTools.h"
|
||||
#include "tools.h"
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
/* Constructor */
|
||||
LibraryDumper::LibraryDumper(const Library* library, const NameMapper* nameMapper)
|
||||
: library(library),
|
||||
nameMapper(nameMapper)
|
||||
{
|
||||
}
|
||||
/* Destructor */
|
||||
LibraryDumper::~LibraryDumper()
|
||||
{
|
||||
}
|
||||
|
||||
void LibraryDumper::setOpenSearchInfo(int totalResults, int startIndex, int count)
|
||||
{
|
||||
m_totalResults = totalResults;
|
||||
m_startIndex = startIndex,
|
||||
m_count = count;
|
||||
}
|
||||
|
||||
kainjow::mustache::list LibraryDumper::getCategoryData() const
|
||||
{
|
||||
const auto now = gen_date_str();
|
||||
kainjow::mustache::list categoryData;
|
||||
for ( const auto& category : library->getBooksCategories() ) {
|
||||
const auto urlencodedCategoryName = urlEncode(category);
|
||||
categoryData.push_back(kainjow::mustache::object{
|
||||
{"name", category},
|
||||
{"urlencoded_name", urlencodedCategoryName},
|
||||
{"updated", now},
|
||||
{"id", gen_uuid(libraryId + "/categories/" + urlencodedCategoryName)}
|
||||
});
|
||||
}
|
||||
return categoryData;
|
||||
}
|
||||
|
||||
kainjow::mustache::list LibraryDumper::getLanguageData() const
|
||||
{
|
||||
const auto now = gen_date_str();
|
||||
kainjow::mustache::list languageData;
|
||||
for ( const auto& langAndBookCount : library->getBooksLanguagesWithCounts() ) {
|
||||
const std::string languageCode = langAndBookCount.first;
|
||||
const int bookCount = langAndBookCount.second;
|
||||
const auto languageSelfName = getLanguageSelfName(languageCode);
|
||||
languageData.push_back(kainjow::mustache::object{
|
||||
{"lang_code", languageCode},
|
||||
{"lang_self_name", languageSelfName},
|
||||
{"book_count", to_string(bookCount)},
|
||||
{"updated", now},
|
||||
{"id", gen_uuid(libraryId + "/languages/" + languageCode)}
|
||||
});
|
||||
}
|
||||
return languageData;
|
||||
}
|
||||
|
||||
} // namespace kiwix
|
||||
91
src/library_dumper.h
Normal file
91
src/library_dumper.h
Normal file
@@ -0,0 +1,91 @@
|
||||
/*
|
||||
* Copyright 2023 Nikhil Tanwar <2002nikhiltanwar@gmail.com>
|
||||
* Copyright 2017 Matthieu Gautier <mgautier@kymeria.fr>
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||
* MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef KIWIX_LIBRARY_DUMPER_H
|
||||
#define KIWIX_LIBRARY_DUMPER_H
|
||||
|
||||
#include <string>
|
||||
|
||||
|
||||
#include "library.h"
|
||||
#include "name_mapper.h"
|
||||
#include <mustache.hpp>
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
/**
|
||||
* A base class to dump Library in various formats.
|
||||
*
|
||||
*/
|
||||
class LibraryDumper
|
||||
{
|
||||
public:
|
||||
LibraryDumper(const Library* library, const NameMapper* NameMapper);
|
||||
~LibraryDumper();
|
||||
|
||||
void setLibraryId(const std::string& id) { this->libraryId = id;}
|
||||
|
||||
/**
|
||||
* Set the root location used when generating url.
|
||||
*
|
||||
* @param rootLocation the root location to use.
|
||||
*/
|
||||
void setRootLocation(const std::string& rootLocation) { this->rootLocation = rootLocation; }
|
||||
|
||||
/**
|
||||
* Set some informations about the search results.
|
||||
*
|
||||
* @param totalResult the total number of results of the search.
|
||||
* @param startIndex the start index of the result.
|
||||
* @param count the number of result of the current set (or page).
|
||||
*/
|
||||
void setOpenSearchInfo(int totalResult, int startIndex, int count);
|
||||
|
||||
/**
|
||||
* Sets user default language
|
||||
*
|
||||
* @param userLang the user language to be set
|
||||
*/
|
||||
void setUserLanguage(std::string userLang) { this->m_userLang = userLang; }
|
||||
|
||||
/**
|
||||
* Get the data of categories
|
||||
*/
|
||||
kainjow::mustache::list getCategoryData() const;
|
||||
|
||||
/**
|
||||
* Get the data of languages
|
||||
*/
|
||||
kainjow::mustache::list getLanguageData() const;
|
||||
|
||||
protected:
|
||||
const kiwix::Library* const library;
|
||||
const kiwix::NameMapper* const nameMapper;
|
||||
std::string libraryId;
|
||||
std::string rootLocation;
|
||||
std::string m_userLang;
|
||||
int m_totalResults;
|
||||
int m_startIndex;
|
||||
int m_count;
|
||||
};
|
||||
}
|
||||
|
||||
#endif // KIWIX_LIBRARY_DUMPER_H
|
||||
@@ -54,7 +54,7 @@ void LibXMLDumper::handleBook(Book book, pugi::xml_node root_node) {
|
||||
if (book.getOrigId().empty()) {
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "title", book.getTitle());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "description", book.getDescription());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "language", book.getLanguage());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "language", book.getCommaSeparatedLanguages());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "creator", book.getCreator());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "publisher", book.getPublisher());
|
||||
ADD_ATTR_NOT_EMPTY(entry_node, "name", book.getName());
|
||||
@@ -97,7 +97,7 @@ void LibXMLDumper::handleBookmark(Bookmark bookmark, pugi::xml_node root_node) {
|
||||
auto book = library->getBookByIdThreadSafe(bookmark.getBookId());
|
||||
ADD_TEXT_ENTRY(book_node, "id", book.getId());
|
||||
ADD_TEXT_ENTRY(book_node, "title", book.getTitle());
|
||||
ADD_TEXT_ENTRY(book_node, "language", book.getLanguage());
|
||||
ADD_TEXT_ENTRY(book_node, "language", book.getCommaSeparatedLanguages());
|
||||
ADD_TEXT_ENTRY(book_node, "date", book.getDate());
|
||||
} catch (...) {
|
||||
ADD_TEXT_ENTRY(book_node, "id", bookmark.getBookId());
|
||||
|
||||
@@ -27,22 +27,12 @@
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
struct NoDelete
|
||||
{
|
||||
template<class T> void operator()(T*) {}
|
||||
};
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// LibraryManipulator
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
LibraryManipulator::LibraryManipulator(Library* library)
|
||||
: library(*library)
|
||||
LibraryManipulator::LibraryManipulator(LibraryPtr library)
|
||||
: library(library)
|
||||
{}
|
||||
|
||||
LibraryManipulator::~LibraryManipulator()
|
||||
@@ -50,7 +40,7 @@ LibraryManipulator::~LibraryManipulator()
|
||||
|
||||
bool LibraryManipulator::addBookToLibrary(const Book& book)
|
||||
{
|
||||
const auto ret = library.addBook(book);
|
||||
const auto ret = library->addBook(book);
|
||||
if ( ret ) {
|
||||
bookWasAddedToLibrary(book);
|
||||
}
|
||||
@@ -59,13 +49,13 @@ bool LibraryManipulator::addBookToLibrary(const Book& book)
|
||||
|
||||
void LibraryManipulator::addBookmarkToLibrary(const Bookmark& bookmark)
|
||||
{
|
||||
library.addBookmark(bookmark);
|
||||
library->addBookmark(bookmark);
|
||||
bookmarkWasAddedToLibrary(bookmark);
|
||||
}
|
||||
|
||||
uint32_t LibraryManipulator::removeBooksNotUpdatedSince(Library::Revision rev)
|
||||
{
|
||||
const auto n = library.removeBooksNotUpdatedSince(rev);
|
||||
const auto n = library->removeBooksNotUpdatedSince(rev);
|
||||
if ( n != 0 ) {
|
||||
booksWereRemovedFromLibrary();
|
||||
}
|
||||
@@ -89,15 +79,15 @@ void LibraryManipulator::booksWereRemovedFromLibrary()
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* Constructor */
|
||||
Manager::Manager(LibraryManipulator* manipulator):
|
||||
Manager::Manager(LibraryManipulator manipulator):
|
||||
writableLibraryPath(""),
|
||||
manipulator(manipulator, NoDelete())
|
||||
manipulator(manipulator)
|
||||
{
|
||||
}
|
||||
|
||||
Manager::Manager(Library* library) :
|
||||
Manager::Manager(LibraryPtr library) :
|
||||
writableLibraryPath(""),
|
||||
manipulator(new LibraryManipulator(library))
|
||||
manipulator(LibraryManipulator(library))
|
||||
{
|
||||
}
|
||||
|
||||
@@ -121,7 +111,7 @@ bool Manager::parseXmlDom(const pugi::xml_document& doc,
|
||||
if (!trustLibrary && !book.getPath().empty()) {
|
||||
this->readBookFromPath(book.getPath(), &book);
|
||||
}
|
||||
manipulator->addBookToLibrary(book);
|
||||
manipulator.addBookToLibrary(book);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -166,7 +156,7 @@ bool Manager::parseOpdsDom(const pugi::xml_document& doc, const std::string& url
|
||||
book.updateFromOpds(entryNode, urlHost);
|
||||
|
||||
/* Update the book properties with the new importer */
|
||||
manipulator->addBookToLibrary(book);
|
||||
manipulator.addBookToLibrary(book);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -238,10 +228,10 @@ std::string Manager::addBookFromPathAndGetId(const std::string& pathToOpen,
|
||||
}
|
||||
|
||||
if (!checkMetaData
|
||||
|| (checkMetaData && !book.getTitle().empty() && !book.getLanguage().empty()
|
||||
|| (!book.getTitle().empty() && !book.getLanguages().empty()
|
||||
&& !book.getDate().empty())) {
|
||||
book.setUrl(url);
|
||||
manipulator->addBookToLibrary(book);
|
||||
manipulator.addBookToLibrary(book);
|
||||
return book.getId();
|
||||
}
|
||||
}
|
||||
@@ -296,7 +286,7 @@ bool Manager::readBookmarkFile(const std::string& path)
|
||||
|
||||
bookmark.updateFromXml(node);
|
||||
|
||||
manipulator->addBookmarkToLibrary(bookmark);
|
||||
manipulator.addBookmarkToLibrary(bookmark);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -304,7 +294,7 @@ bool Manager::readBookmarkFile(const std::string& path)
|
||||
|
||||
void Manager::reload(const Paths& paths)
|
||||
{
|
||||
const auto libRevision = manipulator->getLibrary().getRevision();
|
||||
const auto libRevision = manipulator.getLibrary()->getRevision();
|
||||
for (std::string path : paths) {
|
||||
if (!path.empty()) {
|
||||
if ( kiwix::isRelativePath(path) )
|
||||
@@ -316,7 +306,7 @@ void Manager::reload(const Paths& paths)
|
||||
}
|
||||
}
|
||||
|
||||
manipulator->removeBooksNotUpdatedSince(libRevision);
|
||||
manipulator.removeBooksNotUpdatedSince(libRevision);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ kiwix_sources = [
|
||||
'manager.cpp',
|
||||
'libxml_dumper.cpp',
|
||||
'opds_dumper.cpp',
|
||||
'html_dumper.cpp',
|
||||
'library_dumper.cpp',
|
||||
'downloader.cpp',
|
||||
'server.cpp',
|
||||
'search_renderer.cpp',
|
||||
@@ -15,6 +17,8 @@ kiwix_sources = [
|
||||
'tools/regexTools.cpp',
|
||||
'tools/stringTools.cpp',
|
||||
'tools/networkTools.cpp',
|
||||
'tools/opdsParsingTools.cpp',
|
||||
'tools/languageTools.cpp',
|
||||
'tools/otherTools.cpp',
|
||||
'tools/archiveTools.cpp',
|
||||
'kiwixserve.cpp',
|
||||
@@ -24,7 +28,7 @@ kiwix_sources = [
|
||||
'server/request_context.cpp',
|
||||
'server/response.cpp',
|
||||
'server/internalServer.cpp',
|
||||
'server/internalServer_catalog_v2.cpp',
|
||||
'server/internalServer_catalog.cpp',
|
||||
'server/i18n.cpp',
|
||||
'opds_catalog.cpp',
|
||||
'version.cpp'
|
||||
|
||||
@@ -63,7 +63,7 @@ std::string HumanReadableNameMapper::getIdForName(const std::string& name) const
|
||||
// UpdatableNameMapper
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
UpdatableNameMapper::UpdatableNameMapper(Library& lib, bool withAlias)
|
||||
UpdatableNameMapper::UpdatableNameMapper(LibraryPtr lib, bool withAlias)
|
||||
: library(lib)
|
||||
, withAlias(withAlias)
|
||||
{
|
||||
@@ -72,7 +72,7 @@ UpdatableNameMapper::UpdatableNameMapper(Library& lib, bool withAlias)
|
||||
|
||||
void UpdatableNameMapper::update()
|
||||
{
|
||||
const auto newNameMapper = new HumanReadableNameMapper(library, withAlias);
|
||||
const auto newNameMapper = new HumanReadableNameMapper(*library, withAlias);
|
||||
std::lock_guard<std::mutex> lock(mutex);
|
||||
nameMapper.reset(newNameMapper);
|
||||
}
|
||||
|
||||
@@ -30,8 +30,8 @@ namespace kiwix
|
||||
{
|
||||
|
||||
/* Constructor */
|
||||
OPDSDumper::OPDSDumper(Library* library)
|
||||
: library(library)
|
||||
OPDSDumper::OPDSDumper(const Library* library, const NameMapper* nameMapper)
|
||||
: LibraryDumper(library, nameMapper)
|
||||
{
|
||||
}
|
||||
/* Destructor */
|
||||
@@ -39,16 +39,11 @@ OPDSDumper::~OPDSDumper()
|
||||
{
|
||||
}
|
||||
|
||||
void OPDSDumper::setOpenSearchInfo(int totalResults, int startIndex, int count)
|
||||
{
|
||||
m_totalResults = totalResults;
|
||||
m_startIndex = startIndex,
|
||||
m_count = count;
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
const std::string XML_HEADER(R"(<?xml version="1.0" encoding="UTF-8"?>)");
|
||||
|
||||
typedef kainjow::mustache::data MustacheData;
|
||||
typedef kainjow::mustache::list BooksData;
|
||||
typedef kainjow::mustache::list IllustrationInfo;
|
||||
@@ -69,16 +64,17 @@ IllustrationInfo getBookIllustrationInfo(const Book& book)
|
||||
return illustrations;
|
||||
}
|
||||
|
||||
kainjow::mustache::object getSingleBookData(const Book& book)
|
||||
std::string fullEntryXML(const Book& book, const std::string& rootLocation, const std::string& contentId)
|
||||
{
|
||||
const auto bookDate = book.getDate() + "T00:00:00Z";
|
||||
return kainjow::mustache::object{
|
||||
const kainjow::mustache::object data{
|
||||
{"root", rootLocation},
|
||||
{"id", book.getId()},
|
||||
{"name", book.getName()},
|
||||
{"title", book.getTitle()},
|
||||
{"description", book.getDescription()},
|
||||
{"language", book.getLanguage()},
|
||||
{"content_id", urlEncode(book.getHumanReadableIdFromPath(), true)},
|
||||
{"language", book.getCommaSeparatedLanguages()},
|
||||
{"content_id", urlEncode(contentId)},
|
||||
{"updated", bookDate}, // XXX: this should be the entry update datetime
|
||||
{"book_date", bookDate},
|
||||
{"category", book.getCategory()},
|
||||
@@ -92,27 +88,34 @@ kainjow::mustache::object getSingleBookData(const Book& book)
|
||||
{"size", to_string(book.getSize())},
|
||||
{"icons", getBookIllustrationInfo(book)},
|
||||
};
|
||||
return render_template(RESOURCE::templates::catalog_v2_entry_xml, data);
|
||||
}
|
||||
|
||||
std::string getSingleBookEntryXML(const Book& book, bool withXMLHeader, const std::string& rootLocation, const std::string& endpointRoot, bool partial)
|
||||
std::string partialEntryXML(const Book& book, const std::string& rootLocation)
|
||||
{
|
||||
auto data = getSingleBookData(book);
|
||||
data["with_xml_header"] = MustacheData(withXMLHeader);
|
||||
data["dump_partial_entries"] = MustacheData(partial);
|
||||
data["endpoint_root"] = endpointRoot;
|
||||
data["root"] = rootLocation;
|
||||
return render_template(RESOURCE::templates::catalog_v2_entry_xml, data);
|
||||
const auto bookDate = book.getDate() + "T00:00:00Z";
|
||||
const kainjow::mustache::object data{
|
||||
{"root", rootLocation},
|
||||
{"endpoint_root", rootLocation + "/catalog/v2"},
|
||||
{"id", book.getId()},
|
||||
{"title", book.getTitle()},
|
||||
{"updated", bookDate}, // XXX: this should be the entry update datetime
|
||||
};
|
||||
const auto xmlTemplate = RESOURCE::templates::catalog_v2_partial_entry_xml;
|
||||
return render_template(xmlTemplate, data);
|
||||
}
|
||||
|
||||
BooksData getBooksData(const Library* library, const std::vector<std::string>& bookIds, const std::string& rootLocation, const std::string& endpointRoot, bool partial)
|
||||
BooksData getBooksData(const Library* library, const NameMapper* nameMapper, const std::vector<std::string>& bookIds, const std::string& rootLocation, bool partial)
|
||||
{
|
||||
BooksData booksData;
|
||||
for ( const auto& bookId : bookIds ) {
|
||||
try {
|
||||
const Book book = library->getBookByIdThreadSafe(bookId);
|
||||
booksData.push_back(kainjow::mustache::object{
|
||||
{"entry", getSingleBookEntryXML(book, false, rootLocation, endpointRoot, partial)}
|
||||
});
|
||||
const std::string contentId = nameMapper->getNameForId(bookId);
|
||||
const auto entryXML = partial
|
||||
? partialEntryXML(book, rootLocation)
|
||||
: fullEntryXML(book, rootLocation, contentId);
|
||||
booksData.push_back(kainjow::mustache::object{ {"entry", entryXML} });
|
||||
} catch ( const std::out_of_range& ) {
|
||||
// the book was removed from the library since its id was obtained
|
||||
// ignore it
|
||||
@@ -122,64 +125,11 @@ BooksData getBooksData(const Library* library, const std::vector<std::string>& b
|
||||
return booksData;
|
||||
}
|
||||
|
||||
std::map<std::string, std::string> iso639_3 = {
|
||||
{"atj", "atikamekw"},
|
||||
{"azb", "آذربایجان دیلی"},
|
||||
{"bcl", "central bikol"},
|
||||
{"bgs", "tagabawa"},
|
||||
{"bxr", "буряад хэлэн"},
|
||||
{"cbk", "chavacano"},
|
||||
{"cdo", "閩東語"},
|
||||
{"dag", "Dagbani"},
|
||||
{"diq", "dimli"},
|
||||
{"dty", "डोटेली"},
|
||||
{"eml", "emiliân-rumagnōl"},
|
||||
{"fbs", "српскохрватски"},
|
||||
{"ido", "ido"},
|
||||
{"kbp", "kabɩyɛ"},
|
||||
{"kld", "Gamilaraay"},
|
||||
{"lbe", "лакку маз"},
|
||||
{"lbj", "ལ་དྭགས་སྐད་"},
|
||||
{"map", "Austronesian"},
|
||||
{"mhr", "марий йылме"},
|
||||
{"mnw", "ဘာသာမန်"},
|
||||
{"myn", "mayan"},
|
||||
{"nah", "nahuatl"},
|
||||
{"nai", "north American Indian"},
|
||||
{"nds", "plattdütsch"},
|
||||
{"nrm", "bhasa narom"},
|
||||
{"olo", "livvi"},
|
||||
{"pih", "Pitcairn-Norfolk"},
|
||||
{"pnb", "Western Panjabi"},
|
||||
{"rmr", "Caló"},
|
||||
{"rmy", "romani shib"},
|
||||
{"roa", "romance languages"},
|
||||
{"twi", "twi"}
|
||||
};
|
||||
|
||||
std::once_flag fillLanguagesFlag;
|
||||
|
||||
void fillLanguagesMap()
|
||||
{
|
||||
for (auto icuLangPtr = icu::Locale::getISOLanguages(); *icuLangPtr != NULL; ++icuLangPtr) {
|
||||
const ICULanguageInfo lang(*icuLangPtr);
|
||||
iso639_3.insert({lang.iso3Code(), lang.selfName()});
|
||||
}
|
||||
}
|
||||
|
||||
std::string getLanguageSelfName(const std::string& lang) {
|
||||
const auto itr = iso639_3.find(lang);
|
||||
if (itr != iso639_3.end()) {
|
||||
return itr->second;
|
||||
}
|
||||
return lang;
|
||||
};
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
string OPDSDumper::dumpOPDSFeed(const std::vector<std::string>& bookIds, const std::string& query) const
|
||||
{
|
||||
const auto booksData = getBooksData(library, bookIds, rootLocation, "", false);
|
||||
const auto booksData = getBooksData(library, nameMapper, bookIds, rootLocation, false);
|
||||
const kainjow::mustache::object template_data{
|
||||
{"date", gen_date_str()},
|
||||
{"root", rootLocation},
|
||||
@@ -197,20 +147,20 @@ string OPDSDumper::dumpOPDSFeed(const std::vector<std::string>& bookIds, const s
|
||||
string OPDSDumper::dumpOPDSFeedV2(const std::vector<std::string>& bookIds, const std::string& query, bool partial) const
|
||||
{
|
||||
const auto endpointRoot = rootLocation + "/catalog/v2";
|
||||
const auto booksData = getBooksData(library, bookIds, rootLocation, endpointRoot, partial);
|
||||
const auto booksData = getBooksData(library, nameMapper, bookIds, rootLocation, partial);
|
||||
|
||||
const char* const endpoint = partial ? "/partial_entries" : "/entries";
|
||||
const std::string url = endpoint + (query.empty() ? "" : "?" + query);
|
||||
const kainjow::mustache::object template_data{
|
||||
{"date", gen_date_str()},
|
||||
{"endpoint_root", endpointRoot},
|
||||
{"feed_id", gen_uuid(libraryId + endpoint + "?" + query)},
|
||||
{"filter", onlyAsNonEmptyMustacheValue(query)},
|
||||
{"query", query.empty() ? "" : "?" + urlEncode(query)},
|
||||
{"self_url", url},
|
||||
{"totalResults", to_string(m_totalResults)},
|
||||
{"startIndex", to_string(m_startIndex)},
|
||||
{"itemsPerPage", to_string(m_count)},
|
||||
{"books", booksData },
|
||||
{"dump_partial_entries", MustacheData(partial)}
|
||||
{"books", booksData }
|
||||
};
|
||||
|
||||
return render_template(RESOURCE::templates::catalog_v2_entries_xml, template_data);
|
||||
@@ -218,23 +168,17 @@ string OPDSDumper::dumpOPDSFeedV2(const std::vector<std::string>& bookIds, const
|
||||
|
||||
std::string OPDSDumper::dumpOPDSCompleteEntry(const std::string& bookId) const
|
||||
{
|
||||
return getSingleBookEntryXML(library->getBookById(bookId), true, rootLocation, "", false);
|
||||
const auto book = library->getBookById(bookId);
|
||||
const std::string contentId = nameMapper->getNameForId(bookId);
|
||||
return XML_HEADER
|
||||
+ "\n"
|
||||
+ fullEntryXML(book, rootLocation, contentId);
|
||||
}
|
||||
|
||||
std::string OPDSDumper::categoriesOPDSFeed() const
|
||||
{
|
||||
const auto now = gen_date_str();
|
||||
kainjow::mustache::list categoryData;
|
||||
for ( const auto& category : library->getBooksCategories() ) {
|
||||
const auto urlencodedCategoryName = urlEncode(category);
|
||||
categoryData.push_back(kainjow::mustache::object{
|
||||
{"name", category},
|
||||
{"urlencoded_name", urlencodedCategoryName},
|
||||
{"updated", now},
|
||||
{"id", gen_uuid(libraryId + "/categories/" + urlencodedCategoryName)}
|
||||
});
|
||||
}
|
||||
|
||||
kainjow::mustache::list categoryData = getCategoryData();
|
||||
return render_template(
|
||||
RESOURCE::templates::catalog_v2_categories_xml,
|
||||
kainjow::mustache::object{
|
||||
@@ -249,21 +193,7 @@ std::string OPDSDumper::categoriesOPDSFeed() const
|
||||
std::string OPDSDumper::languagesOPDSFeed() const
|
||||
{
|
||||
const auto now = gen_date_str();
|
||||
kainjow::mustache::list languageData;
|
||||
std::call_once(fillLanguagesFlag, fillLanguagesMap);
|
||||
for ( const auto& langAndBookCount : library->getBooksLanguagesWithCounts() ) {
|
||||
const std::string languageCode = langAndBookCount.first;
|
||||
const int bookCount = langAndBookCount.second;
|
||||
const auto languageSelfName = getLanguageSelfName(languageCode);
|
||||
languageData.push_back(kainjow::mustache::object{
|
||||
{"lang_code", languageCode},
|
||||
{"lang_self_name", languageSelfName},
|
||||
{"book_count", to_string(bookCount)},
|
||||
{"updated", now},
|
||||
{"id", gen_uuid(libraryId + "/languages/" + languageCode)}
|
||||
});
|
||||
}
|
||||
|
||||
kainjow::mustache::list languageData = getLanguageData();
|
||||
return render_template(
|
||||
RESOURCE::templates::catalog_v2_languages_xml,
|
||||
kainjow::mustache::object{
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
#include <pugixml.hpp>
|
||||
|
||||
#include "library.h"
|
||||
#include "name_mapper.h"
|
||||
#include "library_dumper.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
@@ -37,11 +39,10 @@ namespace kiwix
|
||||
* A tool to dump a `Library` into a opds stream.
|
||||
*
|
||||
*/
|
||||
class OPDSDumper
|
||||
class OPDSDumper : public LibraryDumper
|
||||
{
|
||||
public:
|
||||
OPDSDumper() = default;
|
||||
OPDSDumper(Library* library);
|
||||
OPDSDumper(const Library* library, const NameMapper* NameMapper);
|
||||
~OPDSDumper();
|
||||
|
||||
/**
|
||||
@@ -84,37 +85,6 @@ class OPDSDumper
|
||||
* @return The OPDS feed.
|
||||
*/
|
||||
std::string languagesOPDSFeed() const;
|
||||
|
||||
/**
|
||||
* Set the id of the library.
|
||||
*
|
||||
* @param id the id to use.
|
||||
*/
|
||||
void setLibraryId(const std::string& id) { this->libraryId = id;}
|
||||
|
||||
/**
|
||||
* Set the root location used when generating url.
|
||||
*
|
||||
* @param rootLocation the root location to use.
|
||||
*/
|
||||
void setRootLocation(const std::string& rootLocation) { this->rootLocation = rootLocation; }
|
||||
|
||||
/**
|
||||
* Set some informations about the search results.
|
||||
*
|
||||
* @param totalResult the total number of results of the search.
|
||||
* @param startIndex the start index of the result.
|
||||
* @param count the number of result of the current set (or page).
|
||||
*/
|
||||
void setOpenSearchInfo(int totalResult, int startIndex, int count);
|
||||
|
||||
protected:
|
||||
kiwix::Library* library;
|
||||
std::string libraryId;
|
||||
std::string rootLocation;
|
||||
int m_totalResults;
|
||||
int m_startIndex;
|
||||
int m_count;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -36,16 +36,9 @@ namespace kiwix
|
||||
{
|
||||
|
||||
/* Constructor */
|
||||
SearchRenderer::SearchRenderer(zim::SearchResultSet srs, NameMapper* mapper,
|
||||
unsigned int start, unsigned int estimatedResultCount)
|
||||
: SearchRenderer(srs, mapper, nullptr, start, estimatedResultCount)
|
||||
{}
|
||||
|
||||
SearchRenderer::SearchRenderer(zim::SearchResultSet srs, NameMapper* mapper, Library* library,
|
||||
SearchRenderer::SearchRenderer(zim::SearchResultSet srs,
|
||||
unsigned int start, unsigned int estimatedResultCount)
|
||||
: m_srs(srs),
|
||||
mp_nameMapper(mapper),
|
||||
mp_library(library),
|
||||
protocolPrefix("zim://"),
|
||||
searchProtocolPrefix("search://"),
|
||||
estimatedResultCount(estimatedResultCount),
|
||||
@@ -94,7 +87,7 @@ kainjow::mustache::data buildQueryData
|
||||
kainjow::mustache::data query;
|
||||
query.set("pattern", kiwix::encodeDiples(pattern));
|
||||
std::ostringstream ss;
|
||||
ss << searchProtocolPrefix << "?pattern=" << urlEncode(pattern, true);
|
||||
ss << searchProtocolPrefix << "?pattern=" << urlEncode(pattern);
|
||||
ss << "&" << bookQuery;
|
||||
query.set("unpaginatedQuery", ss.str());
|
||||
auto lang = extractValueFromQuery(bookQuery, "books.filter.lang");
|
||||
@@ -164,19 +157,20 @@ kainjow::mustache::data buildPagination(
|
||||
return pagination;
|
||||
}
|
||||
|
||||
std::string SearchRenderer::renderTemplate(const std::string& tmpl_str)
|
||||
std::string SearchRenderer::renderTemplate(const std::string& tmpl_str, const NameMapper& nameMapper, const Library* library)
|
||||
{
|
||||
const std::string absPathPrefix = protocolPrefix + "content/";
|
||||
const std::string absPathPrefix = protocolPrefix;
|
||||
// Build the results list
|
||||
kainjow::mustache::data items{kainjow::mustache::data::type::list};
|
||||
for (auto it = m_srs.begin(); it != m_srs.end(); it++) {
|
||||
kainjow::mustache::data result;
|
||||
std::string zim_id(it.getZimId());
|
||||
const std::string zim_id(it.getZimId());
|
||||
const auto path = nameMapper.getNameForId(zim_id) + "/" + it.getPath();
|
||||
result.set("title", it.getTitle());
|
||||
result.set("absolutePath", absPathPrefix + urlEncode(mp_nameMapper->getNameForId(zim_id), true) + "/" + urlEncode(it.getPath()));
|
||||
result.set("absolutePath", absPathPrefix + urlEncode(path));
|
||||
result.set("snippet", it.getSnippet());
|
||||
if (mp_library) {
|
||||
result.set("bookTitle", mp_library->getBookById(zim_id).getTitle());
|
||||
if (library) {
|
||||
result.set("bookTitle", library->getBookById(zim_id).getTitle());
|
||||
}
|
||||
if (it.getWordCount() >= 0) {
|
||||
result.set("wordCount", kiwix::beautifyInteger(it.getWordCount()));
|
||||
@@ -206,7 +200,7 @@ std::string SearchRenderer::renderTemplate(const std::string& tmpl_str)
|
||||
|
||||
|
||||
kainjow::mustache::data allData;
|
||||
allData.set("protocolPrefix", protocolPrefix);
|
||||
allData.set("searchProtocolPrefix", searchProtocolPrefix);
|
||||
allData.set("results", results);
|
||||
allData.set("pagination", pagination);
|
||||
allData.set("query", query);
|
||||
@@ -221,14 +215,14 @@ std::string SearchRenderer::renderTemplate(const std::string& tmpl_str)
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
std::string SearchRenderer::getHtml()
|
||||
std::string SearchRenderer::getHtml(const NameMapper& mapper, const Library* library)
|
||||
{
|
||||
return renderTemplate(RESOURCE::templates::search_result_html);
|
||||
return renderTemplate(RESOURCE::templates::search_result_html, mapper, library);
|
||||
}
|
||||
|
||||
std::string SearchRenderer::getXml()
|
||||
std::string SearchRenderer::getXml(const NameMapper& mapper, const Library* library)
|
||||
{
|
||||
return renderTemplate(RESOURCE::templates::search_result_xml);
|
||||
return renderTemplate(RESOURCE::templates::search_result_xml, mapper, library);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
namespace kiwix {
|
||||
|
||||
Server::Server(Library* library, NameMapper* nameMapper) :
|
||||
Server::Server(LibraryPtr library, std::shared_ptr<NameMapper> nameMapper) :
|
||||
mp_library(library),
|
||||
mp_nameMapper(nameMapper),
|
||||
mp_server(nullptr)
|
||||
|
||||
@@ -37,11 +37,11 @@ namespace {
|
||||
// into the ETag for ETag::Option opt.
|
||||
// IMPORTANT: The characters in all_options must come in sorted order (so that
|
||||
// IMPORTANT: isValidOptionsString() works correctly).
|
||||
const char all_options[] = "cz";
|
||||
const char all_options[] = "Zz";
|
||||
|
||||
static_assert(ETag::OPTION_COUNT == sizeof(all_options) - 1, "");
|
||||
|
||||
bool isValidServerId(const std::string& s)
|
||||
bool isValidETagBody(const std::string& s)
|
||||
{
|
||||
return !s.empty() && s.find_first_of("\"/") == std::string::npos;
|
||||
}
|
||||
@@ -83,17 +83,17 @@ bool ETag::get_option(Option opt) const
|
||||
|
||||
std::string ETag::get_etag() const
|
||||
{
|
||||
if ( m_serverId.empty() )
|
||||
if ( m_body.empty() )
|
||||
return std::string();
|
||||
|
||||
return "\"" + m_serverId + "/" + m_options + "\"";
|
||||
return "\"" + m_body + "/" + m_options + "\"";
|
||||
}
|
||||
|
||||
ETag::ETag(const std::string& serverId, const std::string& options)
|
||||
ETag::ETag(const std::string& body, const std::string& options)
|
||||
{
|
||||
if ( isValidServerId(serverId) && isValidOptionsString(options) )
|
||||
if ( isValidETagBody(body) && isValidOptionsString(options) )
|
||||
{
|
||||
m_serverId = serverId;
|
||||
m_body = body;
|
||||
m_options = options;
|
||||
}
|
||||
}
|
||||
@@ -115,7 +115,7 @@ ETag ETag::parse(std::string s)
|
||||
return ETag(s.substr(0, i), s.substr(i+1));
|
||||
}
|
||||
|
||||
ETag ETag::match(const std::string& etags, const std::string& server_id)
|
||||
ETag ETag::match(const std::string& etags, const std::string& body)
|
||||
{
|
||||
std::istringstream ss(etags);
|
||||
std::string etag_str;
|
||||
@@ -125,7 +125,7 @@ ETag ETag::match(const std::string& etags, const std::string& server_id)
|
||||
etag_str.pop_back();
|
||||
|
||||
const ETag etag = parse(etag_str);
|
||||
if ( etag && etag.m_serverId == server_id )
|
||||
if ( etag && etag.m_body == body )
|
||||
return etag;
|
||||
}
|
||||
|
||||
|
||||
@@ -28,10 +28,11 @@ namespace kiwix {
|
||||
// The ETag string used by Kiwix server (more precisely, its value inside the
|
||||
// double quotes) consists of two parts:
|
||||
//
|
||||
// 1. ServerId - The string obtained on server start up
|
||||
// 1. Body - A string uniquely identifying the object or state from which
|
||||
// the resource has been obtained.
|
||||
//
|
||||
// 2. Options - Zero or more characters encoding the values of some of the
|
||||
// headers of the response
|
||||
// 2. Options - Zero or more characters encoding the type of the ETag and/or
|
||||
// the values of some of the headers of the response
|
||||
//
|
||||
// The two parts are separated with a slash (/) symbol (which is always present,
|
||||
// even when the the options part is empty). Neither portion of a Kiwix ETag
|
||||
@@ -40,7 +41,7 @@ namespace kiwix {
|
||||
//
|
||||
// "abcdefghijklmn/"
|
||||
// "1234567890/z"
|
||||
// "1234567890/cz"
|
||||
// "6f1d19d0-633f-087b-fb55-7ac324ff9baf/Zz"
|
||||
//
|
||||
// The options part of the Kiwix ETag allows to correctly set the required
|
||||
// headers when responding to a conditional If-None-Match request with a 304
|
||||
@@ -51,7 +52,7 @@ class ETag
|
||||
{
|
||||
public: // types
|
||||
enum Option {
|
||||
CACHEABLE_ENTITY,
|
||||
ZIM_CONTENT,
|
||||
COMPRESSED_CONTENT,
|
||||
OPTION_COUNT
|
||||
};
|
||||
@@ -59,10 +60,10 @@ class ETag
|
||||
public: // functions
|
||||
ETag() {}
|
||||
|
||||
void set_server_id(const std::string& id) { m_serverId = id; }
|
||||
void set_body(const std::string& s) { m_body = s; }
|
||||
void set_option(Option opt);
|
||||
|
||||
explicit operator bool() const { return !m_serverId.empty(); }
|
||||
explicit operator bool() const { return !m_body.empty(); }
|
||||
|
||||
bool get_option(Option opt) const;
|
||||
std::string get_etag() const;
|
||||
@@ -76,7 +77,7 @@ class ETag
|
||||
static ETag parse(std::string s);
|
||||
|
||||
private: // data
|
||||
std::string m_serverId;
|
||||
std::string m_body;
|
||||
std::string m_options;
|
||||
};
|
||||
|
||||
|
||||
@@ -70,6 +70,14 @@ public: // functions
|
||||
return s;
|
||||
}
|
||||
|
||||
size_t getStringCount(const std::string& lang) const {
|
||||
try {
|
||||
return lang2TableMap.at(lang)->entryCount;
|
||||
} catch(const std::out_of_range&) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private: // functions
|
||||
const I18nStringTable* getStringsFor(const std::string& lang) const {
|
||||
try {
|
||||
@@ -84,13 +92,17 @@ private: // data
|
||||
const I18nStringTable* enStrings;
|
||||
};
|
||||
|
||||
const I18nStringDB& getStringDb()
|
||||
{
|
||||
static const I18nStringDB stringDb;
|
||||
return stringDb;
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::string getTranslatedString(const std::string& lang, const std::string& key)
|
||||
{
|
||||
static const I18nStringDB stringDb;
|
||||
|
||||
return stringDb.get(lang, key);
|
||||
return getStringDb().get(lang, key);
|
||||
}
|
||||
|
||||
namespace i18n
|
||||
@@ -111,4 +123,70 @@ std::string ParameterizedMessage::getText(const std::string& lang) const
|
||||
return i18n::expandParameterizedString(lang, msgId, params);
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
LangPreference parseSingleLanguagePreference(const std::string& s)
|
||||
{
|
||||
const size_t langStart = s.find_first_not_of(" \t\n");
|
||||
if ( langStart == std::string::npos ) {
|
||||
return {"", 0};
|
||||
}
|
||||
|
||||
const size_t langEnd = s.find(';', langStart);
|
||||
if ( langEnd == std::string::npos ) {
|
||||
return {s.substr(langStart), 1};
|
||||
}
|
||||
|
||||
const std::string lang = s.substr(langStart, langEnd - langStart);
|
||||
// We don't care about langEnd == langStart which will result in an empty
|
||||
// language name - it will be dismissed by parseUserLanguagePreferences()
|
||||
|
||||
float q = 1.0;
|
||||
int nCharsScanned;
|
||||
if ( 1 == sscanf(s.c_str() + langEnd + 1, "q=%f%n", &q, &nCharsScanned)
|
||||
&& langEnd + 1 + nCharsScanned == s.size() ) {
|
||||
return {lang, q};
|
||||
}
|
||||
|
||||
return {"", 0};
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
UserLangPreferences parseUserLanguagePreferences(const std::string& s)
|
||||
{
|
||||
UserLangPreferences result;
|
||||
std::istringstream iss(s);
|
||||
std::string singleLangPrefStr;
|
||||
while ( std::getline(iss, singleLangPrefStr, ',') )
|
||||
{
|
||||
const auto langPref = parseSingleLanguagePreference(singleLangPrefStr);
|
||||
if ( !langPref.lang.empty() && langPref.preference > 0 ) {
|
||||
result.push_back(langPref);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string selectMostSuitableLanguage(const UserLangPreferences& prefs)
|
||||
{
|
||||
if ( prefs.empty() ) {
|
||||
return "en";
|
||||
}
|
||||
|
||||
std::string bestLangSoFar("en");
|
||||
float bestScoreSoFar = 0;
|
||||
const auto& stringDb = getStringDb();
|
||||
for ( const auto& entry : prefs ) {
|
||||
const float score = entry.preference * stringDb.getStringCount(entry.lang);
|
||||
if ( score > bestScoreSoFar ) {
|
||||
bestScoreSoFar = score;
|
||||
bestLangSoFar = entry.lang;
|
||||
}
|
||||
}
|
||||
return bestLangSoFar;
|
||||
}
|
||||
|
||||
} // namespace kiwix
|
||||
|
||||
@@ -69,6 +69,28 @@ private:
|
||||
const std::string m_lang;
|
||||
};
|
||||
|
||||
class GetTranslatedStringWithMsgId
|
||||
{
|
||||
typedef kainjow::mustache::basic_data<std::string> MustacheString;
|
||||
typedef std::pair<std::string, MustacheString> MsgIdAndTranslation;
|
||||
|
||||
public:
|
||||
explicit GetTranslatedStringWithMsgId(const std::string& lang) : m_lang(lang) {}
|
||||
|
||||
MsgIdAndTranslation operator()(const std::string& key) const
|
||||
{
|
||||
return {key, getTranslatedString(m_lang, key)};
|
||||
}
|
||||
|
||||
MsgIdAndTranslation operator()(const std::string& key, const Parameters& params) const
|
||||
{
|
||||
return {key, expandParameterizedString(m_lang, key, params)};
|
||||
}
|
||||
|
||||
private:
|
||||
const std::string m_lang;
|
||||
};
|
||||
|
||||
} // namespace i18n
|
||||
|
||||
struct ParameterizedMessage
|
||||
@@ -89,6 +111,18 @@ private: // data
|
||||
const Parameters params;
|
||||
};
|
||||
|
||||
struct LangPreference
|
||||
{
|
||||
const std::string lang;
|
||||
const float preference;
|
||||
};
|
||||
|
||||
typedef std::vector<LangPreference> UserLangPreferences;
|
||||
|
||||
UserLangPreferences parseUserLanguagePreferences(const std::string& s);
|
||||
|
||||
std::string selectMostSuitableLanguage(const UserLangPreferences& prefs);
|
||||
|
||||
} // namespace kiwix
|
||||
|
||||
#endif // KIWIX_SERVER_I18N
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
#include "internalServer.h"
|
||||
|
||||
#ifdef __FreeBSD__
|
||||
#ifndef _WIN32
|
||||
#include <netinet/in.h>
|
||||
#endif
|
||||
|
||||
@@ -53,6 +53,7 @@ extern "C" {
|
||||
#include "name_mapper.h"
|
||||
#include "search_renderer.h"
|
||||
#include "opds_dumper.h"
|
||||
#include "html_dumper.h"
|
||||
#include "i18n.h"
|
||||
|
||||
#include <zim/uuid.h>
|
||||
@@ -77,7 +78,6 @@ extern "C" {
|
||||
#include "request_context.h"
|
||||
#include "response.h"
|
||||
|
||||
#define MAX_SEARCH_LEN 140
|
||||
#define DEFAULT_CACHE_SIZE 2
|
||||
|
||||
namespace kiwix {
|
||||
@@ -95,6 +95,22 @@ inline std::string normalizeRootUrl(std::string rootUrl)
|
||||
return rootUrl.empty() ? rootUrl : "/" + rootUrl;
|
||||
}
|
||||
|
||||
std::string
|
||||
fullURL2LocalURL(const std::string& fullUrl, const std::string& rootLocation)
|
||||
{
|
||||
if ( kiwix::startsWith(fullUrl, rootLocation) ) {
|
||||
return fullUrl.substr(rootLocation.size());
|
||||
} else {
|
||||
return "INVALID URL";
|
||||
}
|
||||
}
|
||||
|
||||
std::string getSearchComponent(const RequestContext& request)
|
||||
{
|
||||
const std::string query = request.get_query();
|
||||
return query.empty() ? query : "?" + query;
|
||||
}
|
||||
|
||||
Filter get_search_filter(const RequestContext& request, const std::string& prefix="")
|
||||
{
|
||||
auto filter = kiwix::Filter().valid(true).local(true);
|
||||
@@ -139,15 +155,6 @@ std::string renderUrl(const std::string& root, const std::string& urlTemplate)
|
||||
return url;
|
||||
}
|
||||
|
||||
std::string makeFulltextSearchSuggestion(const std::string& lang, const std::string& queryString)
|
||||
{
|
||||
return i18n::expandParameterizedString(lang, "suggest-full-text-search",
|
||||
{
|
||||
{"SEARCH_TERMS", queryString}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
ParameterizedMessage noSuchBookErrorMsg(const std::string& bookName)
|
||||
{
|
||||
return ParameterizedMessage("no-such-book", { {"BOOK_NAME", bookName} });
|
||||
@@ -212,12 +219,46 @@ void checkBookNumber(const Library::BookIdSet& bookIds, size_t limit) {
|
||||
}
|
||||
}
|
||||
|
||||
typedef std::set<std::string> Languages;
|
||||
|
||||
Languages getLanguages(const Library& lib, const Library::BookIdSet& bookIds) {
|
||||
Languages langs;
|
||||
for ( const auto& b : bookIds ) {
|
||||
const auto bookLangs = lib.getBookById(b).getLanguages();
|
||||
langs.insert(bookLangs.begin(), bookLangs.end());
|
||||
}
|
||||
return langs;
|
||||
}
|
||||
|
||||
struct CustomizedResourceData
|
||||
{
|
||||
std::string mimeType;
|
||||
std::string resourceFilePath;
|
||||
};
|
||||
|
||||
bool responseMustBeETaggedWithLibraryId(const Response& response, const RequestContext& request)
|
||||
{
|
||||
return response.getReturnCode() == MHD_HTTP_OK
|
||||
&& response.get_kind() == Response::DYNAMIC_CONTENT
|
||||
&& request.get_url() != "/random";
|
||||
}
|
||||
|
||||
ETag
|
||||
get_matching_if_none_match_etag(const RequestContext& r, const std::string& etagBody)
|
||||
{
|
||||
try {
|
||||
const std::string etag_list = r.get_header(MHD_HTTP_HEADER_IF_NONE_MATCH);
|
||||
return ETag::match(etag_list, etagBody);
|
||||
} catch (const std::out_of_range&) {
|
||||
return ETag();
|
||||
}
|
||||
}
|
||||
|
||||
struct NoDelete
|
||||
{
|
||||
template<class T> void operator()(T*) {}
|
||||
};
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::pair<std::string, Library::BookIdSet> InternalServer::selectBooks(const RequestContext& request) const
|
||||
@@ -289,6 +330,10 @@ SearchInfo InternalServer::getSearchInfo(const RequestContext& request) const
|
||||
{
|
||||
auto bookIds = selectBooks(request);
|
||||
checkBookNumber(bookIds.second, m_multizimSearchLimit);
|
||||
if ( getLanguages(*mp_library, bookIds.second).size() != 1 ) {
|
||||
throw Error(nonParameterizedMessage("confusion-of-tongues"));
|
||||
}
|
||||
|
||||
auto pattern = request.get_optional_param<std::string>("pattern", "");
|
||||
GeoQuery geoQuery;
|
||||
|
||||
@@ -366,8 +411,8 @@ public:
|
||||
};
|
||||
|
||||
|
||||
InternalServer::InternalServer(Library* library,
|
||||
NameMapper* nameMapper,
|
||||
InternalServer::InternalServer(LibraryPtr library,
|
||||
std::shared_ptr<NameMapper> nameMapper,
|
||||
std::string addr,
|
||||
int port,
|
||||
std::string root,
|
||||
@@ -382,6 +427,7 @@ InternalServer::InternalServer(Library* library,
|
||||
m_addr(addr),
|
||||
m_port(port),
|
||||
m_root(normalizeRootUrl(root)),
|
||||
m_rootPrefixOfDecodedURL(m_root),
|
||||
m_nbThreads(nbThreads),
|
||||
m_multizimSearchLimit(multizimSearchLimit),
|
||||
m_verbose(verbose),
|
||||
@@ -392,11 +438,13 @@ InternalServer::InternalServer(Library* library,
|
||||
m_ipConnectionLimit(ipConnectionLimit),
|
||||
mp_daemon(nullptr),
|
||||
mp_library(library),
|
||||
mp_nameMapper(nameMapper ? nameMapper : &defaultNameMapper),
|
||||
mp_nameMapper(nameMapper ? nameMapper : std::shared_ptr<NameMapper>(&defaultNameMapper, NoDelete())),
|
||||
searchCache(getEnvVar<int>("KIWIX_SEARCH_CACHE_SIZE", DEFAULT_CACHE_SIZE)),
|
||||
suggestionSearcherCache(getEnvVar<int>("KIWIX_SUGGESTION_SEARCHER_CACHE_SIZE", std::max((unsigned int) (mp_library->getBookCount(true, true)*0.1), 1U))),
|
||||
m_customizedResources(new CustomizedResources)
|
||||
{}
|
||||
{
|
||||
m_root = urlEncode(m_root);
|
||||
}
|
||||
|
||||
InternalServer::~InternalServer() = default;
|
||||
|
||||
@@ -443,7 +491,6 @@ bool InternalServer::start() {
|
||||
}
|
||||
auto server_start_time = std::chrono::system_clock::now().time_since_epoch();
|
||||
m_server_id = kiwix::to_string(server_start_time.count());
|
||||
m_library_id = m_server_id;
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -473,7 +520,7 @@ static MHD_Result staticHandlerCallback(void* cls,
|
||||
}
|
||||
|
||||
MHD_Result InternalServer::handlerCallback(struct MHD_Connection* connection,
|
||||
const char* url,
|
||||
const char* fullUrl,
|
||||
const char* method,
|
||||
const char* version,
|
||||
const char* upload_data,
|
||||
@@ -484,8 +531,10 @@ MHD_Result InternalServer::handlerCallback(struct MHD_Connection* connection,
|
||||
if (m_verbose.load() ) {
|
||||
printf("======================\n");
|
||||
printf("Requesting : \n");
|
||||
printf("full_url : %s\n", url);
|
||||
printf("full_url : %s\n", fullUrl);
|
||||
}
|
||||
|
||||
const auto url = fullURL2LocalURL(fullUrl, m_rootPrefixOfDecodedURL);
|
||||
RequestContext request(connection, m_root, url, method, version);
|
||||
|
||||
if (m_verbose.load() ) {
|
||||
@@ -506,13 +555,14 @@ MHD_Result InternalServer::handlerCallback(struct MHD_Connection* connection,
|
||||
printf("========== INTERNAL ERROR !! ============\n");
|
||||
if (!m_verbose.load()) {
|
||||
printf("Requesting : \n");
|
||||
printf("full_url : %s\n", url);
|
||||
printf("full_url : %s\n", fullUrl);
|
||||
request.print_debug_info();
|
||||
}
|
||||
}
|
||||
|
||||
if (response->getReturnCode() == MHD_HTTP_OK && !etag_not_needed(request))
|
||||
response->set_server_id(m_server_id);
|
||||
if ( responseMustBeETaggedWithLibraryId(*response, request) ) {
|
||||
response->set_etag_body(getLibraryId());
|
||||
}
|
||||
|
||||
auto ret = response->send(request, connection);
|
||||
auto end_time = std::chrono::steady_clock::now();
|
||||
@@ -534,6 +584,11 @@ bool isEndpointUrl(const std::string& url, const std::string& endpoint)
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::string InternalServer::getLibraryId() const
|
||||
{
|
||||
return m_server_id + "." + kiwix::to_string(mp_library->getRevision());
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_request(const RequestContext& request)
|
||||
{
|
||||
try {
|
||||
@@ -542,7 +597,14 @@ std::unique_ptr<Response> InternalServer::handle_request(const RequestContext& r
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
const ETag etag = get_matching_if_none_match_etag(request);
|
||||
if ( request.get_url() == "" ) {
|
||||
// Redirect /ROOT_LOCATION to /ROOT_LOCATION/ (note the added slash)
|
||||
// so that relative URLs are resolved correctly
|
||||
const std::string query = getSearchComponent(request);
|
||||
return Response::build_redirect(*this, m_root + "/" + query);
|
||||
}
|
||||
|
||||
const ETag etag = get_matching_if_none_match_etag(request, getLibraryId());
|
||||
if ( etag )
|
||||
return Response::build_304(*this, etag);
|
||||
|
||||
@@ -571,6 +633,9 @@ std::unique_ptr<Response> InternalServer::handle_request(const RequestContext& r
|
||||
if (isEndpointUrl(url, "search"))
|
||||
return handle_search(request);
|
||||
|
||||
if (isEndpointUrl(url, "nojs"))
|
||||
return handle_no_js(request);
|
||||
|
||||
if (isEndpointUrl(url, "suggest"))
|
||||
return handle_suggest(request);
|
||||
|
||||
@@ -580,11 +645,9 @@ std::unique_ptr<Response> InternalServer::handle_request(const RequestContext& r
|
||||
if (isEndpointUrl(url, "catch"))
|
||||
return handle_catch(request);
|
||||
|
||||
std::string contentUrl = m_root + "/content" + url;
|
||||
const std::string query = request.get_query();
|
||||
if ( ! query.empty() )
|
||||
contentUrl += "?" + query;
|
||||
return Response::build_redirect(*this, contentUrl);
|
||||
const std::string contentUrl = m_root + "/content" + urlEncode(url);
|
||||
const std::string query = getSearchComponent(request);
|
||||
return Response::build_redirect(*this, contentUrl + query);
|
||||
} catch (std::exception& e) {
|
||||
fprintf(stderr, "===== Unhandled error : %s\n", e.what());
|
||||
return HTTP500Response(*this, request)
|
||||
@@ -603,27 +666,6 @@ MustacheData InternalServer::get_default_data() const
|
||||
return data;
|
||||
}
|
||||
|
||||
bool InternalServer::etag_not_needed(const RequestContext& request) const
|
||||
{
|
||||
const std::string url = request.get_url();
|
||||
return kiwix::startsWith(url, "/catalog")
|
||||
|| url == "/search"
|
||||
|| url == "/suggest"
|
||||
|| url == "/random"
|
||||
|| url == "/catch/external";
|
||||
}
|
||||
|
||||
ETag
|
||||
InternalServer::get_matching_if_none_match_etag(const RequestContext& r) const
|
||||
{
|
||||
try {
|
||||
const std::string etag_list = r.get_header(MHD_HTTP_HEADER_IF_NONE_MATCH);
|
||||
return ETag::match(etag_list, m_server_id);
|
||||
} catch (const std::out_of_range&) {
|
||||
return ETag();
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::build_homepage(const RequestContext& request)
|
||||
{
|
||||
return ContentResponse::build(*this, m_indexTemplateString, get_default_data(), "text/html; charset=utf-8");
|
||||
@@ -633,6 +675,21 @@ std::unique_ptr<Response> InternalServer::build_homepage(const RequestContext& r
|
||||
* Archive and Zim handlers begin
|
||||
**/
|
||||
|
||||
class InternalServer::LockableSuggestionSearcher : public zim::SuggestionSearcher
|
||||
{
|
||||
public:
|
||||
explicit LockableSuggestionSearcher(const zim::Archive& archive)
|
||||
: zim::SuggestionSearcher(archive)
|
||||
{}
|
||||
|
||||
std::unique_lock<std::mutex> getLock() {
|
||||
return std::unique_lock<std::mutex>(m_mutex);
|
||||
}
|
||||
virtual ~LockableSuggestionSearcher() = default;
|
||||
private:
|
||||
std::mutex m_mutex;
|
||||
};
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_suggest(const RequestContext& request)
|
||||
{
|
||||
if (m_verbose.load()) {
|
||||
@@ -670,50 +727,27 @@ std::unique_ptr<Response> InternalServer::handle_suggest(const RequestContext& r
|
||||
printf("Searching suggestions for: \"%s\"\n", queryString.c_str());
|
||||
}
|
||||
|
||||
MustacheData results{MustacheData::type::list};
|
||||
|
||||
bool first = true;
|
||||
Suggestions results;
|
||||
|
||||
/* Get the suggestions */
|
||||
auto searcher = suggestionSearcherCache.getOrPut(bookId,
|
||||
[=](){ return make_shared<zim::SuggestionSearcher>(*archive); }
|
||||
[=](){ return make_shared<LockableSuggestionSearcher>(*archive); }
|
||||
);
|
||||
const auto lock(searcher->getLock());
|
||||
auto search = searcher->suggest(queryString);
|
||||
auto srs = search.getResults(start, count);
|
||||
|
||||
for(auto& suggestion: srs) {
|
||||
MustacheData result;
|
||||
result.set("label", suggestion.getTitle());
|
||||
|
||||
if (suggestion.hasSnippet()) {
|
||||
result.set("label", suggestion.getSnippet());
|
||||
}
|
||||
|
||||
result.set("value", suggestion.getTitle());
|
||||
result.set("kind", "path");
|
||||
result.set("path", suggestion.getPath());
|
||||
result.set("first", first);
|
||||
first = false;
|
||||
results.push_back(result);
|
||||
results.add(suggestion);
|
||||
}
|
||||
|
||||
|
||||
/* Propose the fulltext search if possible */
|
||||
if (archive->hasFulltextIndex()) {
|
||||
MustacheData result;
|
||||
const auto lang = request.get_user_language();
|
||||
result.set("label", makeFulltextSearchSuggestion(lang, queryString));
|
||||
result.set("value", queryString + " ");
|
||||
result.set("kind", "pattern");
|
||||
result.set("first", first);
|
||||
results.push_back(result);
|
||||
results.addFTSearchSuggestion(request.get_user_language(), queryString);
|
||||
}
|
||||
|
||||
auto data = get_default_data();
|
||||
data.set("suggestions", results);
|
||||
|
||||
auto response = ContentResponse::build(*this, RESOURCE::templates::suggestion_json, data, "application/json; charset=utf-8");
|
||||
return std::move(response);
|
||||
return ContentResponse::build(*this, results.getJSON(), "application/json; charset=utf-8");
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_viewer_settings(const RequestContext& request)
|
||||
@@ -725,11 +759,98 @@ std::unique_ptr<Response> InternalServer::handle_viewer_settings(const RequestCo
|
||||
const kainjow::mustache::object data{
|
||||
{"enable_toolbar", m_withTaskbar ? "true" : "false" },
|
||||
{"enable_link_blocking", m_blockExternalLinks ? "true" : "false" },
|
||||
{"enable_library_button", m_withLibraryButton ? "true" : "false" }
|
||||
{"enable_library_button", m_withLibraryButton ? "true" : "false" },
|
||||
{"default_user_language", request.get_user_language() }
|
||||
};
|
||||
return ContentResponse::build(*this, RESOURCE::templates::viewer_settings_js, data, "application/javascript; charset=utf-8");
|
||||
}
|
||||
|
||||
std::string InternalServer::getNoJSDownloadPageHTML(const std::string& bookId, const std::string& userLang) const
|
||||
{
|
||||
const auto book = mp_library->getBookById(bookId);
|
||||
auto bookUrl = kiwix::stripSuffix(book.getUrl(), ".meta4");
|
||||
auto getTranslation = i18n::GetTranslatedStringWithMsgId(userLang);
|
||||
const auto translations = kainjow::mustache::object{
|
||||
getTranslation("download-links-heading", {{"BOOK_TITLE", book.getTitle()}}),
|
||||
getTranslation("download-links-title"),
|
||||
getTranslation("direct-download-link-text"),
|
||||
getTranslation("hash-download-link-text"),
|
||||
getTranslation("magnet-link-text"),
|
||||
getTranslation("torrent-download-link-text")
|
||||
};
|
||||
|
||||
return render_template(
|
||||
RESOURCE::templates::no_js_download_html,
|
||||
kainjow::mustache::object{
|
||||
{"url", bookUrl},
|
||||
{"translations", translations}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_no_js(const RequestContext& request)
|
||||
{
|
||||
const auto url = request.get_url();
|
||||
const auto urlParts = kiwix::split(url, "/", true, false);
|
||||
HTMLDumper htmlDumper(mp_library.get(), mp_nameMapper.get());
|
||||
htmlDumper.setRootLocation(m_root);
|
||||
htmlDumper.setLibraryId(getLibraryId());
|
||||
auto userLang = request.get_user_language();
|
||||
htmlDumper.setUserLanguage(userLang);
|
||||
std::string content;
|
||||
|
||||
if (urlParts.size() == 1) {
|
||||
auto filter = get_search_filter(request);
|
||||
try {
|
||||
if (request.get_argument("category") == "") {
|
||||
filter.clearCategory();
|
||||
}
|
||||
} catch (...) {}
|
||||
try {
|
||||
if (request.get_argument("lang") == "") {
|
||||
filter.clearLang();
|
||||
}
|
||||
} catch (...) {}
|
||||
content = htmlDumper.dumpPlainHTML(filter);
|
||||
} else if ((urlParts.size() == 3) && (urlParts[1] == "download")) {
|
||||
try {
|
||||
const auto bookId = mp_nameMapper->getIdForName(urlParts[2]);
|
||||
content = getNoJSDownloadPageHTML(bookId, userLang);
|
||||
} catch (const std::out_of_range&) {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
} else {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
content,
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
Response::Kind staticResourceAccessType(const RequestContext& req, const char* expectedCacheid)
|
||||
{
|
||||
if ( expectedCacheid == nullptr )
|
||||
return Response::DYNAMIC_CONTENT;
|
||||
|
||||
try {
|
||||
if ( expectedCacheid != req.get_argument("cacheid") )
|
||||
throw ResourceNotFound("Wrong cacheid");
|
||||
return Response::STATIC_RESOURCE;
|
||||
} catch( const std::out_of_range& ) {
|
||||
return Response::DYNAMIC_CONTENT;
|
||||
}
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_skin(const RequestContext& request)
|
||||
{
|
||||
if (m_verbose.load()) {
|
||||
@@ -740,12 +861,16 @@ std::unique_ptr<Response> InternalServer::handle_skin(const RequestContext& requ
|
||||
auto resourceName = isRequestForViewer
|
||||
? "viewer.html"
|
||||
: request.get_url().substr(1);
|
||||
|
||||
const char* const resourceCacheId = getResourceCacheId(resourceName);
|
||||
|
||||
try {
|
||||
const auto accessType = staticResourceAccessType(request, resourceCacheId);
|
||||
auto response = ContentResponse::build(
|
||||
*this,
|
||||
getResource(resourceName),
|
||||
getMimeTypeForFile(resourceName));
|
||||
response->set_cacheable();
|
||||
response->set_kind(accessType);
|
||||
return std::move(response);
|
||||
} catch (const ResourceNotFound& e) {
|
||||
return HTTP404Response(*this, request)
|
||||
@@ -772,86 +897,101 @@ std::unique_ptr<Response> InternalServer::handle_search(const RequestContext& re
|
||||
}
|
||||
|
||||
try {
|
||||
auto searchInfo = getSearchInfo(request);
|
||||
auto bookIds = searchInfo.getBookIds();
|
||||
return handle_search_request(request);
|
||||
} catch (const Error& e) {
|
||||
return HTTP400Response(*this, request)
|
||||
+ invalidUrlMsg
|
||||
+ e.message();
|
||||
}
|
||||
}
|
||||
|
||||
/* Make the search */
|
||||
// Try to get a search from the searchInfo, else build it
|
||||
auto searcher = mp_library->getSearcherByIds(bookIds);
|
||||
auto lock(searcher->getLock());
|
||||
namespace
|
||||
{
|
||||
|
||||
std::shared_ptr<zim::Search> search;
|
||||
try {
|
||||
search = searchCache.getOrPut(searchInfo,
|
||||
[=](){
|
||||
return make_shared<zim::Search>(searcher->search(searchInfo.getZimQuery(m_verbose.load())));
|
||||
}
|
||||
);
|
||||
} catch(std::runtime_error& e) {
|
||||
// Searcher->search will throw a runtime error if there is no valid xapian database to do the search.
|
||||
// (in case of zim file not containing a index)
|
||||
const auto cssUrl = renderUrl(m_root, RESOURCE::templates::url_of_search_results_css);
|
||||
HTTPErrorResponse response(*this, request, MHD_HTTP_NOT_FOUND,
|
||||
"fulltext-search-unavailable",
|
||||
"404-page-heading",
|
||||
cssUrl);
|
||||
response += nonParameterizedMessage("no-search-results");
|
||||
// XXX: Now this has to be handled by the iframe-based viewer which
|
||||
// XXX: has to resolve if the book selection resulted in a single book.
|
||||
/*
|
||||
if(bookIds.size() == 1) {
|
||||
auto bookId = *bookIds.begin();
|
||||
auto bookName = mp_nameMapper->getNameForId(bookId);
|
||||
response += TaskbarInfo(bookName, mp_library->getArchiveById(bookId).get());
|
||||
unsigned getSearchPageSize(const RequestContext& r)
|
||||
{
|
||||
const auto DEFAULT_PAGE_LEN = 25u;
|
||||
const auto MAX_PAGE_LEN = 140u;
|
||||
|
||||
const auto pageLength = r.get_optional_param("pageLength", DEFAULT_PAGE_LEN);
|
||||
return pageLength == 0
|
||||
? DEFAULT_PAGE_LEN
|
||||
: min(MAX_PAGE_LEN, pageLength);
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_search_request(const RequestContext& request)
|
||||
{
|
||||
auto searchInfo = getSearchInfo(request);
|
||||
auto bookIds = searchInfo.getBookIds();
|
||||
|
||||
/* Make the search */
|
||||
// Try to get a search from the searchInfo, else build it
|
||||
auto searcher = mp_library->getSearcherByIds(bookIds);
|
||||
auto lock(searcher->getLock());
|
||||
|
||||
std::shared_ptr<zim::Search> search;
|
||||
try {
|
||||
search = searchCache.getOrPut(searchInfo,
|
||||
[=](){
|
||||
return make_shared<zim::Search>(searcher->search(searchInfo.getZimQuery(m_verbose.load())));
|
||||
}
|
||||
*/
|
||||
return response;
|
||||
}
|
||||
|
||||
auto start = 1;
|
||||
try {
|
||||
start = request.get_argument<unsigned int>("start");
|
||||
} catch (const std::exception&) {}
|
||||
start = max(1, start);
|
||||
|
||||
auto pageLength = 25;
|
||||
try {
|
||||
pageLength = request.get_argument<unsigned int>("pageLength");
|
||||
} catch (const std::exception&) {}
|
||||
if (pageLength > MAX_SEARCH_LEN) {
|
||||
pageLength = MAX_SEARCH_LEN;
|
||||
}
|
||||
if (pageLength == 0) {
|
||||
pageLength = 25;
|
||||
}
|
||||
|
||||
/* Get the results */
|
||||
SearchRenderer renderer(search->getResults(start-1, pageLength), mp_nameMapper, mp_library, start,
|
||||
search->getEstimatedMatches());
|
||||
renderer.setSearchPattern(searchInfo.pattern);
|
||||
renderer.setSearchBookQuery(searchInfo.bookFilterQuery);
|
||||
renderer.setProtocolPrefix(m_root + "/");
|
||||
renderer.setSearchProtocolPrefix(m_root + "/search");
|
||||
renderer.setPageLength(pageLength);
|
||||
if (request.get_requested_format() == "xml") {
|
||||
return ContentResponse::build(*this, renderer.getXml(), "application/rss+xml; charset=utf-8");
|
||||
}
|
||||
auto response = ContentResponse::build(*this, renderer.getHtml(), "text/html; charset=utf-8");
|
||||
);
|
||||
} catch(std::runtime_error& e) {
|
||||
// Searcher->search will throw a runtime error if there is no valid xapian database to do the search.
|
||||
// (in case of zim file not containing a index)
|
||||
const auto cssUrl = renderUrl(m_root, RESOURCE::templates::url_of_search_results_css);
|
||||
HTTPErrorResponse response(*this, request, MHD_HTTP_NOT_FOUND,
|
||||
"fulltext-search-unavailable",
|
||||
"404-page-heading",
|
||||
cssUrl);
|
||||
response += nonParameterizedMessage("no-search-results");
|
||||
// XXX: Now this has to be handled by the iframe-based viewer which
|
||||
// XXX: has to resolve if the book selection resulted in a single book.
|
||||
/*
|
||||
if(bookIds.size() == 1) {
|
||||
auto bookId = *bookIds.begin();
|
||||
auto bookName = mp_nameMapper->getNameForId(bookId);
|
||||
response->set_taskbar(bookName, mp_library->getArchiveById(bookId).get());
|
||||
response += TaskbarInfo(bookName, mp_library->getArchiveById(bookId).get());
|
||||
}
|
||||
*/
|
||||
return std::move(response);
|
||||
} catch (const Error& e) {
|
||||
return HTTP400Response(*this, request)
|
||||
+ invalidUrlMsg
|
||||
+ e.message();
|
||||
return response;
|
||||
}
|
||||
|
||||
const auto start = max(1u, request.get_optional_param("start", 1u));
|
||||
const auto pageLength = getSearchPageSize(request);
|
||||
|
||||
/* Get the results */
|
||||
SearchRenderer renderer(search->getResults(start-1, pageLength), start,
|
||||
search->getEstimatedMatches());
|
||||
renderer.setSearchPattern(searchInfo.pattern);
|
||||
renderer.setSearchBookQuery(searchInfo.bookFilterQuery);
|
||||
renderer.setProtocolPrefix(m_root + "/content/");
|
||||
renderer.setSearchProtocolPrefix(m_root + "/search");
|
||||
renderer.setPageLength(pageLength);
|
||||
if (request.get_requested_format() == "xml") {
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
renderer.getXml(*mp_nameMapper, mp_library.get()),
|
||||
"application/rss+xml; charset=utf-8"
|
||||
);
|
||||
}
|
||||
auto response = ContentResponse::build(
|
||||
*this,
|
||||
renderer.getHtml(*mp_nameMapper, mp_library.get()),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
// XXX: Now this has to be handled by the iframe-based viewer which
|
||||
// XXX: has to resolve if the book selection resulted in a single book.
|
||||
/*
|
||||
if(bookIds.size() == 1) {
|
||||
auto bookId = *bookIds.begin();
|
||||
auto bookName = mp_nameMapper->getNameForId(bookId);
|
||||
response->set_taskbar(bookName, mp_library->getArchiveById(bookId).get());
|
||||
}
|
||||
*/
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_random(const RequestContext& request)
|
||||
@@ -920,69 +1060,16 @@ std::unique_ptr<Response> InternalServer::handle_catch(const RequestContext& req
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog(const RequestContext& request)
|
||||
{
|
||||
if (m_verbose.load()) {
|
||||
printf("** running handle_catalog");
|
||||
}
|
||||
|
||||
std::string host;
|
||||
std::string url;
|
||||
try {
|
||||
host = request.get_header("Host");
|
||||
url = request.get_url_part(1);
|
||||
} catch (const std::out_of_range&) {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
if (url == "v2") {
|
||||
return handle_catalog_v2(request);
|
||||
}
|
||||
|
||||
if (url != "searchdescription.xml" && url != "root.xml" && url != "search") {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
if (url == "searchdescription.xml") {
|
||||
auto response = ContentResponse::build(*this, RESOURCE::opensearchdescription_xml, get_default_data(), "application/opensearchdescription+xml");
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
zim::Uuid uuid;
|
||||
kiwix::OPDSDumper opdsDumper(mp_library);
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(m_library_id);
|
||||
std::vector<std::string> bookIdsToDump;
|
||||
if (url == "root.xml") {
|
||||
uuid = zim::Uuid::generate(host);
|
||||
bookIdsToDump = mp_library->filter(kiwix::Filter().valid(true).local(true).remote(true));
|
||||
} else if (url == "search") {
|
||||
bookIdsToDump = search_catalog(request, opdsDumper);
|
||||
uuid = zim::Uuid::generate();
|
||||
}
|
||||
|
||||
auto response = ContentResponse::build(
|
||||
*this,
|
||||
opdsDumper.dumpOPDSFeed(bookIdsToDump, request.get_query()),
|
||||
"application/atom+xml; profile=opds-catalog; kind=acquisition; charset=utf-8");
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
std::vector<std::string>
|
||||
InternalServer::search_catalog(const RequestContext& request,
|
||||
kiwix::OPDSDumper& opdsDumper)
|
||||
{
|
||||
const auto filter = get_search_filter(request);
|
||||
const std::string q = filter.hasQuery()
|
||||
? filter.getQuery()
|
||||
: "<Empty query>";
|
||||
std::vector<std::string> bookIdsToDump = mp_library->filter(filter);
|
||||
const auto totalResults = bookIdsToDump.size();
|
||||
const size_t count = request.get_optional_param("count", 10UL);
|
||||
const long count = request.get_optional_param("count", 10L);
|
||||
const size_t startIndex = request.get_optional_param("start", 0UL);
|
||||
const size_t intendedCount = count > 0 ? count : bookIdsToDump.size();
|
||||
const size_t intendedCount = count >= 0 ? count : bookIdsToDump.size();
|
||||
bookIdsToDump = subrange(bookIdsToDump, startIndex, intendedCount);
|
||||
opdsDumper.setOpenSearchInfo(totalResults, startIndex, bookIdsToDump.size());
|
||||
return bookIdsToDump;
|
||||
@@ -1000,14 +1087,37 @@ ParameterizedMessage suggestSearchMsg(const std::string& searchURL, const std::s
|
||||
});
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// The content security policy below is set on responses to the /content
|
||||
// endpoint in order to prevent the ZIM content from interfering with the
|
||||
// viewer (e.g. breaking out of the viewer iframe by performing top-level
|
||||
// navigation).
|
||||
const std::string CONTENT_CSP_HEADER =
|
||||
"default-src 'self' "
|
||||
"data: "
|
||||
"blob: "
|
||||
"about: "
|
||||
"'unsafe-inline' "
|
||||
"'unsafe-eval'; "
|
||||
|
||||
"sandbox allow-scripts "
|
||||
"allow-same-origin "
|
||||
"allow-modals "
|
||||
"allow-popups "
|
||||
"allow-forms "
|
||||
"allow-downloads;";
|
||||
|
||||
// End of content security policy
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::unique_ptr<Response>
|
||||
InternalServer::build_redirect(const std::string& bookName, const zim::Item& item) const
|
||||
{
|
||||
const auto path = kiwix::urlEncode(item.getPath());
|
||||
const auto redirectUrl = m_root + "/content/" + bookName + "/" + path;
|
||||
return Response::build_redirect(*this, redirectUrl);
|
||||
const auto contentPath = "/content/" + bookName + "/" + item.getPath();
|
||||
const auto url = m_root + kiwix::urlEncode(contentPath);
|
||||
return Response::build_redirect(*this, url);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_content(const RequestContext& request)
|
||||
@@ -1030,12 +1140,17 @@ std::unique_ptr<Response> InternalServer::handle_content(const RequestContext& r
|
||||
} catch (const std::out_of_range& e) {}
|
||||
|
||||
if (archive == nullptr) {
|
||||
const std::string searchURL = m_root + "/search?pattern=" + kiwix::urlEncode(pattern, true);
|
||||
const std::string searchURL = m_root + "/search?pattern=" + kiwix::urlEncode(pattern);
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg
|
||||
+ suggestSearchMsg(searchURL, kiwix::urlDecode(pattern));
|
||||
}
|
||||
|
||||
const std::string archiveUuid(archive->getUuid());
|
||||
const ETag etag = get_matching_if_none_match_etag(request, archiveUuid);
|
||||
if ( etag )
|
||||
return Response::build_304(*this, etag);
|
||||
|
||||
auto urlStr = url.substr(prefixLength + bookName.size());
|
||||
if (urlStr[0] == '/') {
|
||||
urlStr = urlStr.substr(1);
|
||||
@@ -1043,12 +1158,25 @@ std::unique_ptr<Response> InternalServer::handle_content(const RequestContext& r
|
||||
|
||||
try {
|
||||
auto entry = getEntryFromPath(*archive, urlStr);
|
||||
if (entry.isRedirect() || urlStr.empty()) {
|
||||
// If urlStr is empty, we want to mainPage.
|
||||
// We must do a redirection to the real page.
|
||||
if (entry.isRedirect() || urlStr != entry.getPath()) {
|
||||
// In the condition above, the second case (an entry with a different
|
||||
// URL was returned) can occur in the following situations:
|
||||
// 1. urlStr is empty or equal to "/" and the ZIM file doesn't contain
|
||||
// such an entry, in which case the main entry is returned instead.
|
||||
// 2. The ZIM file uses old namespace scheme, and the resource at urlStr
|
||||
// is not present but can be found under one of the 'A', 'I', 'J' or
|
||||
// '-' namespaces, in which case that resource is returned instead.
|
||||
return build_redirect(bookName, getFinalItem(*archive, entry));
|
||||
}
|
||||
auto response = ItemResponse::build(*this, request, entry.getItem());
|
||||
response->set_etag_body(archiveUuid);
|
||||
|
||||
if ( !startsWith(entry.getItem().getMimetype(), "application/pdf") ) {
|
||||
// NOTE: Content security policy is not applied to PDF content so that
|
||||
// NOTE: it can be displayed in the viewer in Chromium-based browsers.
|
||||
response->add_header("Content-Security-Policy", CONTENT_CSP_HEADER);
|
||||
response->add_header("Referrer-Policy", "no-referrer");
|
||||
}
|
||||
|
||||
if (m_verbose.load()) {
|
||||
printf("Found %s\n", entry.getPath().c_str());
|
||||
@@ -1060,7 +1188,7 @@ std::unique_ptr<Response> InternalServer::handle_content(const RequestContext& r
|
||||
if (m_verbose.load())
|
||||
printf("Failed to find %s\n", urlStr.c_str());
|
||||
|
||||
std::string searchURL = m_root + "/search?content=" + bookName + "&pattern=" + kiwix::urlEncode(pattern, true);
|
||||
std::string searchURL = m_root + "/search?content=" + bookName + "&pattern=" + kiwix::urlEncode(pattern);
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg
|
||||
+ suggestSearchMsg(searchURL, kiwix::urlDecode(pattern));
|
||||
@@ -1102,6 +1230,11 @@ std::unique_ptr<Response> InternalServer::handle_raw(const RequestContext& reque
|
||||
+ noSuchBookErrorMsg(bookName);
|
||||
}
|
||||
|
||||
const std::string archiveUuid(archive->getUuid());
|
||||
const ETag etag = get_matching_if_none_match_etag(request, archiveUuid);
|
||||
if ( etag )
|
||||
return Response::build_304(*this, etag);
|
||||
|
||||
// Remove the beggining of the path:
|
||||
// /raw/<bookName>/<kind>/foo
|
||||
// ^^^^^ ^ ^
|
||||
@@ -1111,13 +1244,17 @@ std::unique_ptr<Response> InternalServer::handle_raw(const RequestContext& reque
|
||||
try {
|
||||
if (kind == "meta") {
|
||||
auto item = archive->getMetadataItem(itemPath);
|
||||
return ItemResponse::build(*this, request, item);
|
||||
auto response = ItemResponse::build(*this, request, item);
|
||||
response->set_etag_body(archiveUuid);
|
||||
return response;
|
||||
} else {
|
||||
auto entry = archive->getEntryByPath(itemPath);
|
||||
if (entry.isRedirect()) {
|
||||
return build_redirect(bookName, entry.getItem(true));
|
||||
}
|
||||
return ItemResponse::build(*this, request, entry.getItem());
|
||||
auto response = ItemResponse::build(*this, request, entry.getItem());
|
||||
response->set_etag_body(archiveUuid);
|
||||
return response;
|
||||
}
|
||||
} catch (zim::EntryNotFound& e ) {
|
||||
if (m_verbose.load()) {
|
||||
|
||||
@@ -88,15 +88,12 @@ class SearchInfo {
|
||||
|
||||
|
||||
typedef kainjow::mustache::data MustacheData;
|
||||
typedef ConcurrentCache<SearchInfo, std::shared_ptr<zim::Search>> SearchCache;
|
||||
typedef ConcurrentCache<std::string, std::shared_ptr<zim::SuggestionSearcher>> SuggestionSearcherCache;
|
||||
|
||||
class OPDSDumper;
|
||||
|
||||
class InternalServer {
|
||||
public:
|
||||
InternalServer(Library* library,
|
||||
NameMapper* nameMapper,
|
||||
InternalServer(LibraryPtr library,
|
||||
std::shared_ptr<NameMapper> nameMapper,
|
||||
std::string addr,
|
||||
int port,
|
||||
std::string root,
|
||||
@@ -134,9 +131,11 @@ class InternalServer {
|
||||
std::unique_ptr<Response> handle_catalog_v2_entries(const RequestContext& request, bool partial);
|
||||
std::unique_ptr<Response> handle_catalog_v2_complete_entry(const RequestContext& request, const std::string& entryId);
|
||||
std::unique_ptr<Response> handle_catalog_v2_categories(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_no_js(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_catalog_v2_languages(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_catalog_v2_illustration(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_search(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_search_request(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_suggest(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_random(const RequestContext& request);
|
||||
std::unique_ptr<Response> handle_catch(const RequestContext& request);
|
||||
@@ -150,17 +149,25 @@ class InternalServer {
|
||||
|
||||
MustacheData get_default_data() const;
|
||||
|
||||
bool etag_not_needed(const RequestContext& r) const;
|
||||
ETag get_matching_if_none_match_etag(const RequestContext& request) const;
|
||||
std::pair<std::string, Library::BookIdSet> selectBooks(const RequestContext& r) const;
|
||||
SearchInfo getSearchInfo(const RequestContext& r) const;
|
||||
|
||||
bool isLocallyCustomizedResource(const std::string& url) const;
|
||||
|
||||
std::string getLibraryId() const;
|
||||
|
||||
std::string getNoJSDownloadPageHTML(const std::string& bookId, const std::string& userLang) const;
|
||||
|
||||
private: // types
|
||||
class LockableSuggestionSearcher;
|
||||
typedef ConcurrentCache<SearchInfo, std::shared_ptr<zim::Search>> SearchCache;
|
||||
typedef ConcurrentCache<std::string, std::shared_ptr<LockableSuggestionSearcher>> SuggestionSearcherCache;
|
||||
|
||||
private: // data
|
||||
std::string m_addr;
|
||||
int m_port;
|
||||
std::string m_root;
|
||||
std::string m_root; // URI-encoded
|
||||
std::string m_rootPrefixOfDecodedURL; // URI-decoded
|
||||
int m_nbThreads;
|
||||
unsigned int m_multizimSearchLimit;
|
||||
std::atomic_bool m_verbose;
|
||||
@@ -171,14 +178,13 @@ class InternalServer {
|
||||
int m_ipConnectionLimit;
|
||||
struct MHD_Daemon* mp_daemon;
|
||||
|
||||
Library* mp_library;
|
||||
NameMapper* mp_nameMapper;
|
||||
LibraryPtr mp_library;
|
||||
std::shared_ptr<NameMapper> mp_nameMapper;
|
||||
|
||||
SearchCache searchCache;
|
||||
SuggestionSearcherCache suggestionSearcherCache;
|
||||
|
||||
std::string m_server_id;
|
||||
std::string m_library_id;
|
||||
|
||||
class CustomizedResources;
|
||||
std::unique_ptr<CustomizedResources> m_customizedResources;
|
||||
|
||||
@@ -33,6 +33,74 @@
|
||||
|
||||
namespace kiwix {
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
enum OPDSResponseKind
|
||||
{
|
||||
OPDS_ENTRY,
|
||||
OPDS_NAVIGATION_FEED,
|
||||
OPDS_ACQUISITION_FEED
|
||||
};
|
||||
|
||||
const std::string opdsMimeType[] = {
|
||||
"application/atom+xml;type=entry;profile=opds-catalog;charset=utf-8",
|
||||
"application/atom+xml;profile=opds-catalog;kind=navigation;charset=utf-8",
|
||||
"application/atom+xml;profile=opds-catalog;kind=acquisition;charset=utf-8"
|
||||
};
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog(const RequestContext& request)
|
||||
{
|
||||
if (m_verbose.load()) {
|
||||
printf("** running handle_catalog");
|
||||
}
|
||||
|
||||
std::string host;
|
||||
std::string url;
|
||||
try {
|
||||
host = request.get_header("Host");
|
||||
url = request.get_url_part(1);
|
||||
} catch (const std::out_of_range&) {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
if (url == "v2") {
|
||||
return handle_catalog_v2(request);
|
||||
}
|
||||
|
||||
if (url != "searchdescription.xml" && url != "root.xml" && url != "search") {
|
||||
return HTTP404Response(*this, request)
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
if (url == "searchdescription.xml") {
|
||||
auto response = ContentResponse::build(*this, RESOURCE::opensearchdescription_xml, get_default_data(), "application/opensearchdescription+xml");
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
zim::Uuid uuid;
|
||||
kiwix::OPDSDumper opdsDumper(mp_library.get(), mp_nameMapper.get());
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(getLibraryId());
|
||||
std::vector<std::string> bookIdsToDump;
|
||||
if (url == "root.xml") {
|
||||
uuid = zim::Uuid::generate(host);
|
||||
bookIdsToDump = mp_library->filter(kiwix::Filter().valid(true).local(true).remote(true));
|
||||
} else if (url == "search") {
|
||||
bookIdsToDump = search_catalog(request, opdsDumper);
|
||||
uuid = zim::Uuid::generate();
|
||||
}
|
||||
|
||||
auto response = ContentResponse::build(
|
||||
*this,
|
||||
opdsDumper.dumpOPDSFeed(bookIdsToDump, request.get_query()),
|
||||
opdsMimeType[OPDS_ACQUISITION_FEED]);
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog_v2(const RequestContext& request)
|
||||
{
|
||||
if (m_verbose.load()) {
|
||||
@@ -77,33 +145,34 @@ std::unique_ptr<Response> InternalServer::handle_catalog_v2(const RequestContext
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog_v2_root(const RequestContext& request)
|
||||
{
|
||||
const std::string libraryId = getLibraryId();
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
RESOURCE::templates::catalog_v2_root_xml,
|
||||
kainjow::mustache::object{
|
||||
{"date", gen_date_str()},
|
||||
{"endpoint_root", m_root + "/catalog/v2"},
|
||||
{"feed_id", gen_uuid(m_library_id)},
|
||||
{"all_entries_feed_id", gen_uuid(m_library_id + "/entries")},
|
||||
{"partial_entries_feed_id", gen_uuid(m_library_id + "/partial_entries")},
|
||||
{"category_list_feed_id", gen_uuid(m_library_id + "/categories")},
|
||||
{"language_list_feed_id", gen_uuid(m_library_id + "/languages")}
|
||||
{"feed_id", gen_uuid(libraryId)},
|
||||
{"all_entries_feed_id", gen_uuid(libraryId + "/entries")},
|
||||
{"partial_entries_feed_id", gen_uuid(libraryId + "/partial_entries")},
|
||||
{"category_list_feed_id", gen_uuid(libraryId + "/categories")},
|
||||
{"language_list_feed_id", gen_uuid(libraryId + "/languages")}
|
||||
},
|
||||
"application/atom+xml;profile=opds-catalog;kind=navigation"
|
||||
opdsMimeType[OPDS_NAVIGATION_FEED]
|
||||
);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog_v2_entries(const RequestContext& request, bool partial)
|
||||
{
|
||||
OPDSDumper opdsDumper(mp_library);
|
||||
OPDSDumper opdsDumper(mp_library.get(), mp_nameMapper.get());
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(m_library_id);
|
||||
opdsDumper.setLibraryId(getLibraryId());
|
||||
const auto bookIds = search_catalog(request, opdsDumper);
|
||||
const auto opdsFeed = opdsDumper.dumpOPDSFeedV2(bookIds, request.get_query(), partial);
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
opdsFeed,
|
||||
"application/atom+xml;profile=opds-catalog;kind=acquisition"
|
||||
opdsMimeType[OPDS_ACQUISITION_FEED]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -116,38 +185,38 @@ std::unique_ptr<Response> InternalServer::handle_catalog_v2_complete_entry(const
|
||||
+ urlNotFoundMsg;
|
||||
}
|
||||
|
||||
OPDSDumper opdsDumper(mp_library);
|
||||
OPDSDumper opdsDumper(mp_library.get(), mp_nameMapper.get());
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(m_library_id);
|
||||
opdsDumper.setLibraryId(getLibraryId());
|
||||
const auto opdsFeed = opdsDumper.dumpOPDSCompleteEntry(entryId);
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
opdsFeed,
|
||||
"application/atom+xml;type=entry;profile=opds-catalog"
|
||||
opdsMimeType[OPDS_ENTRY]
|
||||
);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog_v2_categories(const RequestContext& request)
|
||||
{
|
||||
OPDSDumper opdsDumper(mp_library);
|
||||
OPDSDumper opdsDumper(mp_library.get(), mp_nameMapper.get());
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(m_library_id);
|
||||
opdsDumper.setLibraryId(getLibraryId());
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
opdsDumper.categoriesOPDSFeed(),
|
||||
"application/atom+xml;profile=opds-catalog;kind=navigation"
|
||||
opdsMimeType[OPDS_NAVIGATION_FEED]
|
||||
);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> InternalServer::handle_catalog_v2_languages(const RequestContext& request)
|
||||
{
|
||||
OPDSDumper opdsDumper(mp_library);
|
||||
OPDSDumper opdsDumper(mp_library.get(), mp_nameMapper.get());
|
||||
opdsDumper.setRootLocation(m_root);
|
||||
opdsDumper.setLibraryId(m_library_id);
|
||||
opdsDumper.setLibraryId(getLibraryId());
|
||||
return ContentResponse::build(
|
||||
*this,
|
||||
opdsDumper.languagesOPDSFeed(),
|
||||
"application/atom+xml;profile=opds-catalog;kind=navigation"
|
||||
opdsMimeType[OPDS_NAVIGATION_FEED]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -25,8 +25,10 @@
|
||||
#include <sstream>
|
||||
#include <cstdio>
|
||||
#include <atomic>
|
||||
#include <cctype>
|
||||
|
||||
#include "tools/stringTools.h"
|
||||
#include "i18n.h"
|
||||
|
||||
namespace kiwix {
|
||||
|
||||
@@ -47,31 +49,15 @@ RequestMethod str2RequestMethod(const std::string& method) {
|
||||
else return RequestMethod::OTHER;
|
||||
}
|
||||
|
||||
std::string
|
||||
fullURL2LocalURL(const std::string& full_url, const std::string& rootLocation)
|
||||
{
|
||||
if (rootLocation.empty()) {
|
||||
// nothing special to handle.
|
||||
return full_url;
|
||||
} else if (full_url == rootLocation) {
|
||||
return "/";
|
||||
} else if (full_url.size() > rootLocation.size() &&
|
||||
full_url.substr(0, rootLocation.size()+1) == rootLocation + "/") {
|
||||
return full_url.substr(rootLocation.size());
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
RequestContext::RequestContext(struct MHD_Connection* connection,
|
||||
std::string rootLocation,
|
||||
const std::string& _url,
|
||||
const std::string& _rootLocation, // URI-encoded
|
||||
const std::string& unrootedUrl, // URI-decoded
|
||||
const std::string& _method,
|
||||
const std::string& version) :
|
||||
full_url(_url),
|
||||
url(fullURL2LocalURL(_url, rootLocation)),
|
||||
rootLocation(_rootLocation),
|
||||
url(unrootedUrl),
|
||||
method(str2RequestMethod(_method)),
|
||||
version(version),
|
||||
requestIndex(s_requestIndex++),
|
||||
@@ -80,6 +66,7 @@ RequestContext::RequestContext(struct MHD_Connection* connection,
|
||||
{
|
||||
MHD_get_connection_values(connection, MHD_HEADER_KIND, &RequestContext::fill_header, this);
|
||||
MHD_get_connection_values(connection, MHD_GET_ARGUMENT_KIND, &RequestContext::fill_argument, this);
|
||||
MHD_get_connection_values(connection, MHD_COOKIE_KIND, &RequestContext::fill_cookie, this);
|
||||
|
||||
try {
|
||||
acceptEncodingGzip =
|
||||
@@ -89,6 +76,8 @@ RequestContext::RequestContext(struct MHD_Connection* connection,
|
||||
try {
|
||||
byteRange_ = ByteRange::parse(get_header(MHD_HTTP_HEADER_RANGE));
|
||||
} catch (const std::out_of_range&) {}
|
||||
|
||||
userlang = determine_user_language();
|
||||
}
|
||||
|
||||
RequestContext::~RequestContext()
|
||||
@@ -107,6 +96,22 @@ MHD_Result RequestContext::fill_argument(void *__this, enum MHD_ValueKind kind,
|
||||
{
|
||||
RequestContext *_this = static_cast<RequestContext*>(__this);
|
||||
_this->arguments[key].push_back(value == nullptr ? "" : value);
|
||||
if ( ! _this->queryString.empty() ) {
|
||||
_this->queryString += "&";
|
||||
}
|
||||
_this->queryString += urlEncode(key);
|
||||
if ( value ) {
|
||||
_this->queryString += "=";
|
||||
_this->queryString += urlEncode(value);
|
||||
}
|
||||
return MHD_YES;
|
||||
}
|
||||
|
||||
MHD_Result RequestContext::fill_cookie(void *__this, enum MHD_ValueKind kind,
|
||||
const char *key, const char* value)
|
||||
{
|
||||
RequestContext *_this = static_cast<RequestContext*>(__this);
|
||||
_this->cookies[key] = value == nullptr ? "" : value;
|
||||
return MHD_YES;
|
||||
}
|
||||
|
||||
@@ -131,7 +136,6 @@ void RequestContext::print_debug_info() const {
|
||||
printf("\n");
|
||||
}
|
||||
printf("Parsed : \n");
|
||||
printf("full_url: %s\n", full_url.c_str());
|
||||
printf("url : %s\n", url.c_str());
|
||||
printf("acceptEncodingGzip : %d\n", acceptEncodingGzip);
|
||||
printf("has_range : %d\n", byteRange_.kind() != ByteRange::NONE);
|
||||
@@ -169,11 +173,15 @@ std::string RequestContext::get_url_part(int number) const {
|
||||
}
|
||||
|
||||
std::string RequestContext::get_full_url() const {
|
||||
return full_url;
|
||||
return rootLocation + urlEncode(url);
|
||||
}
|
||||
|
||||
std::string RequestContext::get_root_path() const {
|
||||
return rootLocation.empty() ? "/" : rootLocation;
|
||||
}
|
||||
|
||||
bool RequestContext::is_valid_url() const {
|
||||
return !url.empty();
|
||||
return url.empty() || url[0] == '/';
|
||||
}
|
||||
|
||||
ByteRange RequestContext::get_range() const {
|
||||
@@ -190,16 +198,24 @@ std::string RequestContext::get_header(const std::string& name) const {
|
||||
}
|
||||
|
||||
std::string RequestContext::get_user_language() const
|
||||
{
|
||||
return userlang.lang;
|
||||
}
|
||||
|
||||
RequestContext::UserLanguage RequestContext::determine_user_language() const
|
||||
{
|
||||
try {
|
||||
return get_argument("userlang");
|
||||
return {UserLanguage::SelectorKind::QUERY_PARAM, get_argument("userlang")};
|
||||
} catch(const std::out_of_range&) {}
|
||||
|
||||
try {
|
||||
return get_header("Accept-Language");
|
||||
const std::string acceptLanguage = get_header("Accept-Language");
|
||||
const auto userLangPrefs = parseUserLanguagePreferences(acceptLanguage);
|
||||
const auto lang = selectMostSuitableLanguage(userLangPrefs);
|
||||
return {UserLanguage::SelectorKind::ACCEPT_LANGUAGE_HEADER, lang};
|
||||
} catch(const std::out_of_range&) {}
|
||||
|
||||
return "en";
|
||||
return {UserLanguage::SelectorKind::DEFAULT, "en"};
|
||||
}
|
||||
|
||||
std::string RequestContext::get_requested_format() const
|
||||
|
||||
@@ -57,8 +57,8 @@ class IndexError: public std::runtime_error {};
|
||||
class RequestContext {
|
||||
public: // functions
|
||||
RequestContext(struct MHD_Connection* connection,
|
||||
std::string rootLocation,
|
||||
const std::string& url,
|
||||
const std::string& rootLocation, // URI-encoded
|
||||
const std::string& unrootedUrl, // URI-decoded
|
||||
const std::string& method,
|
||||
const std::string& version);
|
||||
~RequestContext();
|
||||
@@ -91,16 +91,15 @@ class RequestContext {
|
||||
std::string get_url() const;
|
||||
std::string get_url_part(int part) const;
|
||||
std::string get_full_url() const;
|
||||
std::string get_root_path() const;
|
||||
|
||||
std::string get_query(bool mustEncode = false) const {
|
||||
return get_query([](const std::string& key) {return true;}, mustEncode);
|
||||
}
|
||||
std::string get_query() const { return queryString; }
|
||||
|
||||
template<class F>
|
||||
std::string get_query(F filter, bool mustEncode) const {
|
||||
std::string q;
|
||||
const char* sep = "";
|
||||
auto encode = [=](const std::string& value) { return mustEncode?urlEncode(value, true):value; };
|
||||
auto encode = [=](const std::string& value) { return mustEncode?urlEncode(value):value; };
|
||||
for ( const auto& a : arguments ) {
|
||||
if (!filter(a.first)) {
|
||||
continue;
|
||||
@@ -120,8 +119,22 @@ class RequestContext {
|
||||
std::string get_user_language() const;
|
||||
std::string get_requested_format() const;
|
||||
|
||||
private: // types
|
||||
struct UserLanguage
|
||||
{
|
||||
enum SelectorKind
|
||||
{
|
||||
QUERY_PARAM,
|
||||
ACCEPT_LANGUAGE_HEADER,
|
||||
DEFAULT
|
||||
};
|
||||
|
||||
SelectorKind selectedBy;
|
||||
std::string lang;
|
||||
};
|
||||
|
||||
private: // data
|
||||
std::string full_url;
|
||||
std::string rootLocation;
|
||||
std::string url;
|
||||
RequestMethod method;
|
||||
std::string version;
|
||||
@@ -132,9 +145,15 @@ class RequestContext {
|
||||
ByteRange byteRange_;
|
||||
std::map<std::string, std::string> headers;
|
||||
std::map<std::string, std::vector<std::string>> arguments;
|
||||
std::map<std::string, std::string> cookies;
|
||||
std::string queryString;
|
||||
UserLanguage userlang;
|
||||
|
||||
private: // functions
|
||||
UserLanguage determine_user_language() const;
|
||||
|
||||
static MHD_Result fill_header(void *, enum MHD_ValueKind, const char*, const char*);
|
||||
static MHD_Result fill_cookie(void *, enum MHD_ValueKind, const char*, const char*);
|
||||
static MHD_Result fill_argument(void *, enum MHD_ValueKind, const char*, const char*);
|
||||
};
|
||||
|
||||
|
||||
@@ -64,7 +64,13 @@ bool is_compressible_mime_type(const std::string& mimeType)
|
||||
|| mimeType.find("application/javascript") != std::string::npos
|
||||
|| mimeType.find("application/atom") != std::string::npos
|
||||
|| mimeType.find("application/opensearchdescription") != std::string::npos
|
||||
|| mimeType.find("application/json") != std::string::npos;
|
||||
|| mimeType.find("application/json") != std::string::npos
|
||||
|
||||
// Web fonts
|
||||
|| mimeType.find("application/font-") != std::string::npos
|
||||
|| mimeType.find("application/x-font-") != std::string::npos
|
||||
|| mimeType.find("application/vnd.ms-fontobject") != std::string::npos
|
||||
|| mimeType.find("font/") != std::string::npos;
|
||||
}
|
||||
|
||||
bool compress(std::string &content) {
|
||||
@@ -102,6 +108,14 @@ bool compress(std::string &content) {
|
||||
}
|
||||
|
||||
|
||||
const char* getCacheControlHeader(Response::Kind k)
|
||||
{
|
||||
switch(k) {
|
||||
case Response::STATIC_RESOURCE: return "max-age=31536000, immutable";
|
||||
case Response::ZIM_CONTENT: return "max-age=3600, must-revalidate";
|
||||
default: return "max-age=0, must-revalidate";
|
||||
}
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
@@ -112,6 +126,13 @@ Response::Response(bool verbose)
|
||||
add_header(MHD_HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN, "*");
|
||||
}
|
||||
|
||||
void Response::set_kind(Kind k)
|
||||
{
|
||||
m_kind = k;
|
||||
if ( k == ZIM_CONTENT )
|
||||
m_etag.set_option(ETag::ZIM_CONTENT);
|
||||
}
|
||||
|
||||
std::unique_ptr<Response> Response::build(const InternalServer& server)
|
||||
{
|
||||
return std::unique_ptr<Response>(new Response(server.m_verbose.load()));
|
||||
@@ -122,6 +143,9 @@ std::unique_ptr<Response> Response::build_304(const InternalServer& server, cons
|
||||
auto response = Response::build(server);
|
||||
response->set_code(MHD_HTTP_NOT_MODIFIED);
|
||||
response->m_etag = etag;
|
||||
if ( etag.get_option(ETag::ZIM_CONTENT) ) {
|
||||
response->set_kind(Response::ZIM_CONTENT);
|
||||
}
|
||||
if ( etag.get_option(ETag::COMPRESSED_CONTENT) ) {
|
||||
response->add_header(MHD_HTTP_HEADER_VARY, "Accept-Encoding");
|
||||
}
|
||||
@@ -176,7 +200,7 @@ HTTP404Response::HTTP404Response(const InternalServer& server,
|
||||
|
||||
HTTPErrorResponse& HTTP404Response::operator+(UrlNotFoundMsg /*unused*/)
|
||||
{
|
||||
const std::string requestUrl = m_request.get_full_url();
|
||||
const std::string requestUrl = urlDecode(m_request.get_full_url(), false);
|
||||
return *this + ParameterizedMessage("url-not-found", {{"url", requestUrl}});
|
||||
}
|
||||
|
||||
@@ -210,7 +234,7 @@ HTTP400Response::HTTP400Response(const InternalServer& server,
|
||||
|
||||
HTTPErrorResponse& HTTP400Response::operator+(InvalidUrlMsg /*unused*/)
|
||||
{
|
||||
std::string requestUrl = m_request.get_full_url();
|
||||
std::string requestUrl = urlDecode(m_request.get_full_url(), false);
|
||||
const auto query = m_request.get_query();
|
||||
if (!query.empty()) {
|
||||
requestUrl += "?" + encodeDiples(query);
|
||||
@@ -355,7 +379,7 @@ MHD_Result Response::send(const RequestContext& request, MHD_Connection* connect
|
||||
MHD_Response* response = create_mhd_response(request);
|
||||
|
||||
MHD_add_response_header(response, MHD_HTTP_HEADER_CACHE_CONTROL,
|
||||
m_etag.get_option(ETag::CACHEABLE_ENTITY) ? "max-age=2723040, public" : "no-cache, no-store, must-revalidate");
|
||||
getCacheControlHeader(m_kind));
|
||||
const std::string etag = m_etag.get_etag();
|
||||
if ( ! etag.empty() )
|
||||
MHD_add_response_header(response, MHD_HTTP_HEADER_ETAG, etag.c_str());
|
||||
@@ -411,7 +435,7 @@ ItemResponse::ItemResponse(bool verbose, const zim::Item& item, const std::strin
|
||||
m_mimeType(mimetype)
|
||||
{
|
||||
m_byteRange = byterange;
|
||||
set_cacheable();
|
||||
set_kind(Response::ZIM_CONTENT);
|
||||
add_header(MHD_HTTP_HEADER_CONTENT_TYPE, m_mimeType);
|
||||
}
|
||||
|
||||
@@ -423,14 +447,14 @@ std::unique_ptr<Response> ItemResponse::build(const InternalServer& server, cons
|
||||
if (noRange && is_compressible_mime_type(mimetype)) {
|
||||
// Return a contentResponse
|
||||
auto response = ContentResponse::build(server, item.getData(), mimetype);
|
||||
response->set_cacheable();
|
||||
response->set_kind(Response::ZIM_CONTENT);
|
||||
response->m_byteRange = byteRange;
|
||||
return std::move(response);
|
||||
}
|
||||
|
||||
if (byteRange.kind() == ByteRange::RESOLVED_UNSATISFIABLE) {
|
||||
auto response = Response::build_416(server, item.getSize());
|
||||
response->set_cacheable();
|
||||
response->set_kind(Response::ZIM_CONTENT);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
@@ -45,6 +45,14 @@ class InternalServer;
|
||||
class RequestContext;
|
||||
|
||||
class Response {
|
||||
public:
|
||||
enum Kind
|
||||
{
|
||||
STATIC_RESOURCE,
|
||||
ZIM_CONTENT,
|
||||
DYNAMIC_CONTENT
|
||||
};
|
||||
|
||||
public:
|
||||
Response(bool verbose);
|
||||
virtual ~Response() = default;
|
||||
@@ -57,8 +65,9 @@ class Response {
|
||||
MHD_Result send(const RequestContext& request, MHD_Connection* connection);
|
||||
|
||||
void set_code(int code) { m_returnCode = code; }
|
||||
void set_cacheable() { m_etag.set_option(ETag::CACHEABLE_ENTITY); }
|
||||
void set_server_id(const std::string& id) { m_etag.set_server_id(id); }
|
||||
void set_kind(Kind k);
|
||||
Kind get_kind() const { return m_kind; }
|
||||
void set_etag_body(const std::string& id) { m_etag.set_body(id); }
|
||||
void add_header(const std::string& name, const std::string& value) { m_customHeaders[name] = value; }
|
||||
|
||||
int getReturnCode() const { return m_returnCode; }
|
||||
@@ -68,6 +77,7 @@ class Response {
|
||||
MHD_Response* create_error_response(const RequestContext& request) const;
|
||||
|
||||
protected: // data
|
||||
Kind m_kind = DYNAMIC_CONTENT;
|
||||
bool m_verbose;
|
||||
int m_returnCode;
|
||||
ByteRange m_byteRange;
|
||||
|
||||
@@ -93,10 +93,6 @@ std::string getMetaFlavour(const zim::Archive& archive) {
|
||||
return getMetadata(archive, "Flavour");
|
||||
}
|
||||
|
||||
std::string getArchiveId(const zim::Archive& archive) {
|
||||
return (std::string) archive.getUuid();
|
||||
}
|
||||
|
||||
bool getArchiveFavicon(const zim::Archive& archive, unsigned size,
|
||||
std::string& content, std::string& mimeType){
|
||||
try {
|
||||
@@ -109,46 +105,6 @@ bool getArchiveFavicon(const zim::Archive& archive, unsigned size,
|
||||
return false;
|
||||
}
|
||||
|
||||
// should this be in libzim
|
||||
unsigned int getArchiveMediaCount(const zim::Archive& archive) {
|
||||
std::map<const std::string, unsigned int> counterMap = parseArchiveCounter(archive);
|
||||
unsigned int counter = 0;
|
||||
|
||||
for (auto &pair:counterMap) {
|
||||
if (startsWith(pair.first, "image/") ||
|
||||
startsWith(pair.first, "video/") ||
|
||||
startsWith(pair.first, "audio/")) {
|
||||
counter += pair.second;
|
||||
}
|
||||
}
|
||||
|
||||
return counter;
|
||||
}
|
||||
|
||||
unsigned int getArchiveArticleCount(const zim::Archive& archive) {
|
||||
// [HACK]
|
||||
// getArticleCount() returns different things depending of the "version" of the zim.
|
||||
// On old zim (<=6), it returns the number of entry in `A` namespace
|
||||
// On recent zim (>=7), it returns:
|
||||
// - the number of entry in `C` namespace (==getEntryCount) if no frontArticleIndex is present
|
||||
// - the number of front article if a frontArticleIndex is present
|
||||
// The use case >=7 without frontArticleIndex is pretty rare so we don't care
|
||||
// We can detect if we are reading a zim <= 6 by checking if we have a newNamespaceScheme.
|
||||
if (archive.hasNewNamespaceScheme()) {
|
||||
//The articleCount is "good"
|
||||
return archive.getArticleCount();
|
||||
} else {
|
||||
// We have to parse the `M/Counter` metadata
|
||||
unsigned int counter = 0;
|
||||
for(const auto& pair:parseArchiveCounter(archive)) {
|
||||
if (startsWith(pair.first, "text/html")) {
|
||||
counter += pair.second;
|
||||
}
|
||||
}
|
||||
return counter;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int getArchiveFileSize(const zim::Archive& archive) {
|
||||
return archive.getFilesize() / 1024;
|
||||
}
|
||||
@@ -169,14 +125,4 @@ zim::Entry getEntryFromPath(const zim::Archive& archive, const std::string& path
|
||||
}
|
||||
throw zim::EntryNotFound("Cannot find entry for non empty path");
|
||||
}
|
||||
|
||||
MimeCounterType parseArchiveCounter(const zim::Archive& archive) {
|
||||
try {
|
||||
auto counterContent = archive.getMetadata("Counter");
|
||||
return parseMimetypeCounter(counterContent);
|
||||
} catch (zim::EntryNotFound& e) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
} // kiwix
|
||||
|
||||
@@ -40,7 +40,6 @@ namespace kiwix
|
||||
std::string getMetaCreator(const zim::Archive& archive);
|
||||
std::string getMetaPublisher(const zim::Archive& archive);
|
||||
std::string getMetaFlavour(const zim::Archive& archive);
|
||||
std::string getArchiveId(const zim::Archive& archive);
|
||||
|
||||
bool getArchiveFavicon(const zim::Archive& archive, unsigned size,
|
||||
std::string& content, std::string& mimeType);
|
||||
@@ -52,9 +51,6 @@ namespace kiwix
|
||||
zim::Item getFinalItem(const zim::Archive& archive, const zim::Entry& entry);
|
||||
|
||||
zim::Entry getEntryFromPath(const zim::Archive& archive, const std::string& path);
|
||||
|
||||
MimeCounterType parseArchiveCounter(const zim::Archive& archive);
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
75
src/tools/languageTools.cpp
Normal file
75
src/tools/languageTools.cpp
Normal file
@@ -0,0 +1,75 @@
|
||||
#include "tools.h"
|
||||
#include "stringTools.h"
|
||||
#include <mutex>
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
// These mappings are not provided by the ICU library, any such mappings can be manually added here
|
||||
std::map<std::string, std::string> iso639_3 = {
|
||||
{"atj", "atikamekw"},
|
||||
{"azb", "آذربایجان دیلی"},
|
||||
{"bcl", "central bikol"},
|
||||
{"bgs", "tagabawa"},
|
||||
{"bxr", "буряад хэлэн"},
|
||||
{"cbk", "chavacano"},
|
||||
{"cdo", "閩東語"},
|
||||
{"dag", "Dagbani"},
|
||||
{"diq", "dimli"},
|
||||
{"dty", "डोटेली"},
|
||||
{"eml", "emiliân-rumagnōl"},
|
||||
{"fbs", "српскохрватски"},
|
||||
{"fon", "fɔ̀ngbè"},
|
||||
{"guw", "Gungbe"},
|
||||
{"hbs", "srpskohrvatski"},
|
||||
{"ido", "ido"},
|
||||
{"kbp", "kabɩyɛ"},
|
||||
{"kld", "Gamilaraay"},
|
||||
{"lbe", "лакку маз"},
|
||||
{"lbj", "ལ་དྭགས་སྐད་"},
|
||||
{"map", "Austronesian"},
|
||||
{"mhr", "марий йылме"},
|
||||
{"mnw", "ဘာသာမန်"},
|
||||
{"myn", "mayan"},
|
||||
{"nah", "nahuatl"},
|
||||
{"nai", "north American Indian"},
|
||||
{"nds", "plattdütsch"},
|
||||
{"nrm", "bhasa narom"},
|
||||
{"olo", "livvi"},
|
||||
{"pih", "Pitcairn-Norfolk"},
|
||||
{"pnb", "Western Panjabi"},
|
||||
{"rmr", "Caló"},
|
||||
{"rmy", "romani shib"},
|
||||
{"roa", "romance languages"},
|
||||
{"twi", "twi"},
|
||||
// ICU for Ubuntu versions <= focal (20.04) returns "" for the language code ""
|
||||
// unlike the later versions - which returns "und". We map this value to "Undetermined" for a common ground.
|
||||
{"", "Undetermined"},
|
||||
};
|
||||
|
||||
std::once_flag fillLanguagesFlag;
|
||||
|
||||
void fillLanguagesMap()
|
||||
{
|
||||
for (auto icuLangPtr = icu::Locale::getISOLanguages(); *icuLangPtr != NULL; ++icuLangPtr) {
|
||||
const kiwix::ICULanguageInfo lang(*icuLangPtr);
|
||||
iso639_3.insert({lang.iso3Code(), lang.selfName()});
|
||||
}
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
std::string getLanguageSelfName(const std::string& lang)
|
||||
{
|
||||
std::call_once(fillLanguagesFlag, fillLanguagesMap);
|
||||
const auto itr = iso639_3.find(lang);
|
||||
if (itr != iso639_3.end()) {
|
||||
return itr->second;
|
||||
}
|
||||
return lang;
|
||||
};
|
||||
|
||||
} // namespace kiwix
|
||||
70
src/tools/opdsParsingTools.cpp
Normal file
70
src/tools/opdsParsingTools.cpp
Normal file
@@ -0,0 +1,70 @@
|
||||
#include "tools.h"
|
||||
#include <pugixml.hpp>
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
|
||||
namespace
|
||||
{
|
||||
#define VALUE(name) entryNode.child(name).child_value()
|
||||
FeedLanguages parseLanguages(const pugi::xml_document& doc)
|
||||
{
|
||||
pugi::xml_node feedNode = doc.child("feed");
|
||||
FeedLanguages langs;
|
||||
|
||||
for (pugi::xml_node entryNode = feedNode.child("entry"); entryNode;
|
||||
entryNode = entryNode.next_sibling("entry")) {
|
||||
auto title = VALUE("title");
|
||||
auto code = VALUE("dc:language");
|
||||
langs.push_back({code, title});
|
||||
}
|
||||
|
||||
return langs;
|
||||
}
|
||||
|
||||
FeedCategories parseCategories(const pugi::xml_document& doc)
|
||||
{
|
||||
pugi::xml_node feedNode = doc.child("feed");
|
||||
FeedCategories categories;
|
||||
|
||||
for (pugi::xml_node entryNode = feedNode.child("entry"); entryNode;
|
||||
entryNode = entryNode.next_sibling("entry")) {
|
||||
auto title = VALUE("title");
|
||||
categories.push_back(title);
|
||||
}
|
||||
|
||||
return categories;
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
FeedLanguages readLanguagesFromFeed(const std::string& content)
|
||||
{
|
||||
pugi::xml_document doc;
|
||||
pugi::xml_parse_result result
|
||||
= doc.load_buffer((void*)content.data(), content.size());
|
||||
|
||||
if (result) {
|
||||
auto langs = parseLanguages(doc);
|
||||
return langs;
|
||||
}
|
||||
|
||||
return FeedLanguages();
|
||||
}
|
||||
|
||||
FeedCategories readCategoriesFromFeed(const std::string& content)
|
||||
{
|
||||
pugi::xml_document doc;
|
||||
pugi::xml_parse_result result
|
||||
= doc.load_buffer((void*)content.data(), content.size());
|
||||
|
||||
FeedCategories categories;
|
||||
if (result) {
|
||||
categories = parseCategories(doc);
|
||||
return categories;
|
||||
}
|
||||
|
||||
return categories;
|
||||
}
|
||||
|
||||
} // namespace kiwix
|
||||
@@ -32,12 +32,15 @@
|
||||
#endif
|
||||
|
||||
#include "tools/stringTools.h"
|
||||
#include "server/i18n.h"
|
||||
#include "libkiwix-resources.h"
|
||||
|
||||
#include <map>
|
||||
#include <sstream>
|
||||
#include <pugixml.hpp>
|
||||
|
||||
#include <zim/uuid.h>
|
||||
#include <zim/suggestion_iterator.h>
|
||||
|
||||
|
||||
static std::map<std::string, std::string> codeisomapping {
|
||||
@@ -288,67 +291,6 @@ bool kiwix::convertStrToBool(const std::string& value)
|
||||
throw std::domain_error(ss.str());
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
// The counter metadata format is a list of item separated by a `;` :
|
||||
// item0;item1;item2
|
||||
// Each item is a "tuple" mimetype=number.
|
||||
// However, the mimetype may contains parameters:
|
||||
// text/html;raw=true;foo=bar
|
||||
// So the final format may be complex to parse:
|
||||
// key0=value0;key1;foo=bar=value1;key2=value2
|
||||
|
||||
typedef kiwix::MimeCounterType::value_type MimetypeAndCounter;
|
||||
|
||||
std::string readFullMimetypeAndCounterString(std::istream& in)
|
||||
{
|
||||
std::string mtcStr, params;
|
||||
getline(in, mtcStr, ';');
|
||||
if ( mtcStr.find('=') == std::string::npos )
|
||||
{
|
||||
do
|
||||
{
|
||||
if ( !getline(in, params, ';' ) )
|
||||
return std::string();
|
||||
mtcStr += ";" + params;
|
||||
}
|
||||
while ( std::count(params.begin(), params.end(), '=') != 2 );
|
||||
}
|
||||
return mtcStr;
|
||||
}
|
||||
|
||||
MimetypeAndCounter parseASingleMimetypeCounter(const std::string& s)
|
||||
{
|
||||
const std::string::size_type k = s.find_last_of("=");
|
||||
if ( k != std::string::npos )
|
||||
{
|
||||
const std::string mimeType = s.substr(0, k);
|
||||
std::istringstream counterSS(s.substr(k+1));
|
||||
unsigned int counter;
|
||||
if (counterSS >> counter && counterSS.eof())
|
||||
return MimetypeAndCounter{mimeType, counter};
|
||||
}
|
||||
return MimetypeAndCounter{"", 0};
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
kiwix::MimeCounterType kiwix::parseMimetypeCounter(const std::string& counterData)
|
||||
{
|
||||
kiwix::MimeCounterType counters;
|
||||
std::istringstream ss(counterData);
|
||||
|
||||
while (ss)
|
||||
{
|
||||
const std::string mtcStr = readFullMimetypeAndCounterString(ss);
|
||||
const MimetypeAndCounter mtc = parseASingleMimetypeCounter(mtcStr);
|
||||
if ( !mtc.first.empty() )
|
||||
counters.insert(mtc);
|
||||
}
|
||||
|
||||
return counters;
|
||||
}
|
||||
|
||||
std::string kiwix::gen_date_str()
|
||||
{
|
||||
auto now = std::time(0);
|
||||
@@ -380,10 +322,78 @@ kainjow::mustache::data kiwix::onlyAsNonEmptyMustacheValue(const std::string& s)
|
||||
std::string kiwix::render_template(const std::string& template_str, kainjow::mustache::data data)
|
||||
{
|
||||
kainjow::mustache::mustache tmpl(template_str);
|
||||
kainjow::mustache::data urlencode{kainjow::mustache::lambda2{
|
||||
[](const std::string& str,const kainjow::mustache::renderer& r) { return urlEncode(r(str), true); }}};
|
||||
data.set("urlencoded", urlencode);
|
||||
std::stringstream ss;
|
||||
tmpl.render(data, [&ss](const std::string& str) { ss << str; });
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
std::string escapeForJSON(const std::string& s)
|
||||
{
|
||||
std::ostringstream oss;
|
||||
for (char c : s) {
|
||||
if ( c == '\\' ) {
|
||||
oss << "\\\\";
|
||||
} else if ( unsigned(c) < 0x20U ) {
|
||||
oss << "\\u" << std::setw(4) << std::setfill('0') << unsigned(c);
|
||||
} else {
|
||||
oss << c;
|
||||
}
|
||||
}
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string makeFulltextSearchSuggestion(const std::string& lang,
|
||||
const std::string& queryString)
|
||||
{
|
||||
return kiwix::i18n::expandParameterizedString(lang, "suggest-full-text-search",
|
||||
{
|
||||
{"SEARCH_TERMS", queryString}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
} // unnamed namespace
|
||||
|
||||
kiwix::Suggestions::Suggestions()
|
||||
: m_data(kainjow::mustache::data::type::list)
|
||||
{
|
||||
}
|
||||
|
||||
void kiwix::Suggestions::add(const zim::SuggestionItem& suggestion)
|
||||
{
|
||||
kainjow::mustache::data result;
|
||||
|
||||
const std::string label = suggestion.hasSnippet()
|
||||
? suggestion.getSnippet()
|
||||
: suggestion.getTitle();
|
||||
|
||||
result.set("label", escapeForJSON(label));
|
||||
result.set("value", escapeForJSON(suggestion.getTitle()));
|
||||
result.set("kind", "path");
|
||||
result.set("path", escapeForJSON(suggestion.getPath()));
|
||||
result.set("first", m_data.is_empty_list());
|
||||
m_data.push_back(result);
|
||||
}
|
||||
|
||||
void kiwix::Suggestions::addFTSearchSuggestion(const std::string& uiLang,
|
||||
const std::string& queryString)
|
||||
{
|
||||
kainjow::mustache::data result;
|
||||
const std::string label = makeFulltextSearchSuggestion(uiLang, queryString);
|
||||
result.set("label", escapeForJSON(label));
|
||||
result.set("value", escapeForJSON(queryString + " "));
|
||||
result.set("kind", "pattern");
|
||||
result.set("first", m_data.is_empty_list());
|
||||
m_data.push_back(result);
|
||||
}
|
||||
|
||||
std::string kiwix::Suggestions::getJSON() const
|
||||
{
|
||||
kainjow::mustache::data data;
|
||||
data.set("suggestions", m_data);
|
||||
|
||||
return render_template(RESOURCE::templates::suggestion_json, data);
|
||||
}
|
||||
|
||||
@@ -33,6 +33,10 @@ namespace pugi {
|
||||
class xml_node;
|
||||
}
|
||||
|
||||
namespace zim {
|
||||
class SuggestionItem;
|
||||
}
|
||||
|
||||
namespace kiwix
|
||||
{
|
||||
std::string nodeToString(const pugi::xml_node& node);
|
||||
@@ -45,9 +49,6 @@ namespace kiwix
|
||||
const std::string& tagName);
|
||||
bool convertStrToBool(const std::string& value);
|
||||
|
||||
using MimeCounterType = std::map<const std::string, zim::entry_index_type>;
|
||||
MimeCounterType parseMimetypeCounter(const std::string& counterData);
|
||||
|
||||
std::string gen_date_str();
|
||||
std::string gen_uuid(const std::string& s);
|
||||
|
||||
@@ -70,6 +71,22 @@ namespace kiwix
|
||||
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
class Suggestions
|
||||
{
|
||||
public:
|
||||
Suggestions();
|
||||
|
||||
void add(const zim::SuggestionItem& suggestion);
|
||||
|
||||
void addFTSearchSuggestion(const std::string& uiLang,
|
||||
const std::string& query);
|
||||
|
||||
std::string getJSON() const;
|
||||
|
||||
private:
|
||||
kainjow::mustache::data m_data;
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@@ -493,12 +493,14 @@ static std::map<std::string, std::string> extMimeTypes = {
|
||||
{ "jpeg", "image/jpeg"},
|
||||
{ "jpg", "image/jpeg"},
|
||||
{ "gif", "image/gif"},
|
||||
{ "ico", "image/x-icon"},
|
||||
{ "svg", "image/svg+xml"},
|
||||
{ "txt", "text/plain"},
|
||||
{ "xml", "text/xml"},
|
||||
{ "pdf", "application/pdf"},
|
||||
{ "ogg", "application/ogg"},
|
||||
{ "js", "application/javascript"},
|
||||
{ "json", "application/json"},
|
||||
{ "css", "text/css"},
|
||||
{ "otf", "application/vnd.ms-opentype"},
|
||||
{ "ttf", "application/font-ttf"},
|
||||
|
||||
@@ -161,15 +161,14 @@ std::string kiwix::encodeDiples(const std::string& str)
|
||||
return result;
|
||||
}
|
||||
|
||||
/* urlEncode() based on javascript encodeURI() &
|
||||
encodeURIComponent(). Mostly code from rstudio/httpuv (GPLv3) */
|
||||
namespace
|
||||
{
|
||||
|
||||
bool isReservedUrlChar(char c)
|
||||
{
|
||||
switch (c) {
|
||||
case ';':
|
||||
case ',':
|
||||
case '/':
|
||||
case '?':
|
||||
case ':':
|
||||
case '@':
|
||||
@@ -177,22 +176,22 @@ bool isReservedUrlChar(char c)
|
||||
case '=':
|
||||
case '+':
|
||||
case '$':
|
||||
case '#':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool needsEscape(char c, bool encodeReserved)
|
||||
bool isHarmlessUriChar(char c)
|
||||
{
|
||||
if (c >= 'a' && c <= 'z')
|
||||
return false;
|
||||
return true;
|
||||
if (c >= 'A' && c <= 'Z')
|
||||
return false;
|
||||
return true;
|
||||
if (c >= '0' && c <= '9')
|
||||
return false;
|
||||
if (isReservedUrlChar(c))
|
||||
return encodeReserved;
|
||||
return true;
|
||||
|
||||
switch (c) {
|
||||
case '-':
|
||||
case '_':
|
||||
@@ -203,9 +202,10 @@ bool needsEscape(char c, bool encodeReserved)
|
||||
case '\'':
|
||||
case '(':
|
||||
case ')':
|
||||
return false;
|
||||
case '/':
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
int hexToInt(char c) {
|
||||
@@ -230,18 +230,18 @@ int hexToInt(char c) {
|
||||
}
|
||||
}
|
||||
|
||||
std::string kiwix::urlEncode(const std::string& value, bool encodeReserved)
|
||||
} // unnamed namespace
|
||||
|
||||
std::string kiwix::urlEncode(const std::string& value)
|
||||
{
|
||||
std::ostringstream os;
|
||||
os << std::hex << std::uppercase;
|
||||
for (std::string::const_iterator it = value.begin();
|
||||
it != value.end();
|
||||
it++) {
|
||||
|
||||
if (!needsEscape(*it, encodeReserved)) {
|
||||
os << *it;
|
||||
for (const char c : value) {
|
||||
if (isHarmlessUriChar(c)) {
|
||||
os << c;
|
||||
} else {
|
||||
os << '%' << std::setw(2) << static_cast<unsigned int>(static_cast<unsigned char>(*it));
|
||||
const unsigned int charVal = static_cast<unsigned char>(c);
|
||||
os << '%' << std::setw(2) << std::setfill('0') << charVal;
|
||||
}
|
||||
}
|
||||
return os.str();
|
||||
@@ -267,15 +267,15 @@ std::string kiwix::urlDecode(const std::string& value, bool component)
|
||||
int iHi = hexToInt(hi);
|
||||
int iLo = hexToInt(lo);
|
||||
if (iHi < 0 || iLo < 0) {
|
||||
// Invalid escape sequence
|
||||
os << '%' << hi << lo;
|
||||
continue;
|
||||
// Invalid escape sequence
|
||||
os << '%' << hi << lo;
|
||||
continue;
|
||||
}
|
||||
char c = (char)(iHi << 4 | iLo);
|
||||
if (!component && isReservedUrlChar(c)) {
|
||||
os << '%' << hi << lo;
|
||||
os << '%' << hi << lo;
|
||||
} else {
|
||||
os << c;
|
||||
os << c;
|
||||
}
|
||||
} else {
|
||||
os << *it;
|
||||
@@ -415,6 +415,17 @@ bool kiwix::startsWith(const std::string& base, const std::string& start)
|
||||
&& std::equal(start.begin(), start.end(), base.begin());
|
||||
}
|
||||
|
||||
std::string kiwix::stripSuffix(const std::string& str, const std::string& suffix)
|
||||
{
|
||||
if (str.size() > suffix.size()) {
|
||||
const auto subStr = str.substr(str.size() - suffix.size(), str.size());
|
||||
if (subStr == suffix) {
|
||||
return str.substr(0, str.size() - suffix.size());
|
||||
}
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
std::vector<std::string> kiwix::getTitleVariants(const std::string& title) {
|
||||
std::vector<std::string> variants;
|
||||
variants.push_back(title);
|
||||
|
||||
@@ -31,7 +31,6 @@
|
||||
namespace kiwix
|
||||
{
|
||||
std::string beautifyInteger(uint64_t number);
|
||||
std::string beautifyFileSize(uint64_t number);
|
||||
void printStringInHexadecimal(const char* s);
|
||||
void printStringInHexadecimal(icu::UnicodeString s);
|
||||
void stringReplacement(std::string& str,
|
||||
@@ -55,7 +54,9 @@ private:
|
||||
};
|
||||
|
||||
|
||||
std::string urlEncode(const std::string& value, bool encodeReserved = false);
|
||||
/* urlEncode() is the equivalent of JS encodeURIComponent(), with the only
|
||||
* difference that the slash (/) symbol is NOT encoded. */
|
||||
std::string urlEncode(const std::string& value);
|
||||
std::string urlDecode(const std::string& value, bool component = false);
|
||||
|
||||
std::string join(const std::vector<std::string>& list, const std::string& sep);
|
||||
@@ -91,6 +92,8 @@ std::string extractFromString(const std::string& str);
|
||||
|
||||
bool startsWith(const std::string& base, const std::string& start);
|
||||
|
||||
std::string stripSuffix(const std::string& str, const std::string& suffix);
|
||||
|
||||
std::vector<std::string> getTitleVariants(const std::string& title);
|
||||
} //namespace kiwix
|
||||
#endif
|
||||
|
||||
@@ -17,15 +17,41 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
script_path = Path(__file__)
|
||||
|
||||
resource_file = script_path.parent / "i18n_resources_list.txt"
|
||||
translation_dir = script_path.parent / "i18n"
|
||||
translation_dir = script_path.parent / "skin/i18n"
|
||||
language_list_relpath = "skin/languages.js"
|
||||
|
||||
def get_translation_info(filepath):
|
||||
lang_code = Path(filepath).stem
|
||||
with open(filepath, 'r', encoding="utf-8") as f:
|
||||
content = json.load(f)
|
||||
lang_name = content.get("name")
|
||||
return lang_code, lang_name
|
||||
|
||||
language_list = []
|
||||
json_files = translation_dir.glob("*.json")
|
||||
with open(resource_file, 'w', encoding="utf-8") as f:
|
||||
for json in sorted(translation_dir.glob("*.json")):
|
||||
if json.name == "qqq.json":
|
||||
for i18n_file in sorted(translation_dir.glob("*.json")):
|
||||
if i18n_file.name == "qqq.json":
|
||||
continue
|
||||
f.write(str(json.relative_to(script_path.parent)) + '\n')
|
||||
print("Processing", i18n_file.name)
|
||||
if i18n_file.name != "test.json":
|
||||
lang_code, lang_name = get_translation_info(i18n_file)
|
||||
if lang_name:
|
||||
language_list.append((lang_code, lang_name))
|
||||
else:
|
||||
print(f"Warning: missing 'name' in {i18n_file.name}")
|
||||
f.write(str(i18n_file.relative_to(script_path.parent)) + '\n')
|
||||
|
||||
language_list = [{name: code} for code, name in sorted(language_list)]
|
||||
language_list_jsobj_str = json.dumps(language_list,
|
||||
indent=2,
|
||||
ensure_ascii=False)
|
||||
print("Saving", language_list_relpath)
|
||||
fullpath = script_path.parent / language_list_relpath
|
||||
with open(fullpath, 'w', encoding="utf-8") as f:
|
||||
f.write("const uiLanguages = " + language_list_jsobj_str)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
]
|
||||
},
|
||||
"name":"English",
|
||||
"suggest-full-text-search" : "containing '{{{SEARCH_TERMS}}}'..."
|
||||
, "no-such-book" : "No such book: {{BOOK_NAME}}"
|
||||
, "too-many-books" : "Too many books requested ({{NB_BOOKS}}) where limit is {{LIMIT}}"
|
||||
, "no-book-found" : "No book matches selection criteria"
|
||||
, "url-not-found" : "The requested URL \"{{url}}\" was not found on this server."
|
||||
, "suggest-search" : "Make a full text search for <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a>"
|
||||
, "random-article-failure" : "Oops! Failed to pick a random article :("
|
||||
, "invalid-raw-data-type" : "{{DATATYPE}} is not a valid request for raw content."
|
||||
, "no-value-for-arg": "No value provided for argument {{ARGUMENT}}"
|
||||
, "no-query" : "No query provided."
|
||||
, "raw-entry-not-found" : "Cannot find {{DATATYPE}} entry {{ENTRY}}"
|
||||
, "400-page-title" : "Invalid request"
|
||||
, "400-page-heading" : "Invalid request"
|
||||
, "404-page-title" : "Content not found"
|
||||
, "404-page-heading" : "Not Found"
|
||||
, "500-page-title" : "Internal Server Error"
|
||||
, "500-page-heading" : "Internal Server Error"
|
||||
, "fulltext-search-unavailable" : "Fulltext search unavailable"
|
||||
, "no-search-results": "The fulltext search engine is not available for this content."
|
||||
, "library-button-text": "Go to welcome page"
|
||||
, "home-button-text": "Go to the main page of '{{BOOK_TITLE}}'"
|
||||
, "random-page-button-text": "Go to a randomly selected page"
|
||||
, "searchbox-tooltip": "Search '{{BOOK_TITLE}}'"
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Gomoko",
|
||||
"Thibaut120094",
|
||||
"Verdy p"
|
||||
]
|
||||
},
|
||||
"name": "français",
|
||||
"suggest-full-text-search": "contenant « {{{SEARCH_TERMS}}} »...",
|
||||
"no-such-book": "Aucun livre avec ce nom : {{BOOK_NAME}}",
|
||||
"too-many-books": "Trop de livres demandés ({{NB_BOOKS}}) alors que la limite est de {{LIMIT}}",
|
||||
"no-book-found": "Aucun livre ne correspond à ces critères de sélection",
|
||||
"url-not-found": "L’URL demandée « {{url}} » est introuvable sur ce serveur.",
|
||||
"suggest-search": "Faire une recherche en texte intégral de « <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a> »",
|
||||
"random-article-failure": "Oups ! Échec de sélection d’un article aléatoire :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} n’est pas une requête valide pour du contenu brut.",
|
||||
"no-value-for-arg": "Aucune valeur fournie pour l’argument {{ARGUMENT}}",
|
||||
"no-query": "Aucune requête fournie.",
|
||||
"raw-entry-not-found": "Impossible de trouver l’entrée « {{ENTRY}} » de type « {{DATATYPE}} »",
|
||||
"400-page-title": "Requête non valide",
|
||||
"400-page-heading": "Requête non valide",
|
||||
"404-page-title": "Contenu non trouvé",
|
||||
"404-page-heading": "Non trouvé",
|
||||
"500-page-title": "Erreur interne du serveur",
|
||||
"500-page-heading": "Erreur interne du serveur",
|
||||
"fulltext-search-unavailable": "Recherche en texte intégral non disponible",
|
||||
"no-search-results": "Le moteur de recherche en texte intégral n’est pas disponible pour ce contenu.",
|
||||
"library-button-text": "Aller à la page de bienvenue",
|
||||
"home-button-text": "Aller à la page principale de « {{BOOK_TITLE}} »",
|
||||
"random-page-button-text": "Aller à une page sélectionnée aléatoirement",
|
||||
"searchbox-tooltip": "Rechercher « {{BOOK_TITLE}} »"
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Amire80"
|
||||
]
|
||||
},
|
||||
"name": "עברית",
|
||||
"suggest-full-text-search": "מכיל '{{{SEARCH_TERMS}}}'...",
|
||||
"no-such-book": "אין ספר כזה: {{BOOK_NAME}}",
|
||||
"too-many-books": "נתבקשו יותר ספרים ({{NB_BOOKS}}) והמגבלה היא {{LIMIT}}",
|
||||
"no-book-found": "אין ספר שמתאים לתנאים שנבחרו",
|
||||
"url-not-found": "הכתובת המבוקשת \"{{url}}\" לא נמצאה בשרת הזה.",
|
||||
"suggest-search": "לעשות חיפוש טקסט מלא עבור <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a>",
|
||||
"random-article-failure": "אוי! לא עבדה בחירת ערך אקראי :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} הוא לא בקשה תקינה של תוכן גולמי.",
|
||||
"no-value-for-arg": "לא סופק ערך לארגומנט {{ARGUMENT}}",
|
||||
"no-query": "לא סופקה שאילתה.",
|
||||
"raw-entry-not-found": "לא ניתן למצוא את רשומת ה־{{DATATYPE}} בשם {{ENTRY}}",
|
||||
"400-page-title": "בקשה בלתי־תקינה",
|
||||
"400-page-heading": "בקשה בלתי־תקינה",
|
||||
"404-page-title": "התוכן לא נמצא",
|
||||
"404-page-heading": "לא נמצא",
|
||||
"500-page-title": "שגיאת שרת פנימית",
|
||||
"500-page-heading": "שגיאת שרת פנימית",
|
||||
"fulltext-search-unavailable": "חיפוש בטקסט מלא אינו זמין",
|
||||
"no-search-results": "מנוע החיפוש בטקסט מלא אינו זמין עבור התוכן הזה.",
|
||||
"library-button-text": "מעבר לדף הבית \"ברוך בואך\"",
|
||||
"home-button-text": "מעבר לדף הראשי של \"{{BOOK_TITLE}}\"",
|
||||
"random-page-button-text": "מעבר לדף שנבחר אקראית",
|
||||
"searchbox-tooltip": "חיפוש \"{{BOOK_TITLE}}\""
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"MathXplore"
|
||||
]
|
||||
},
|
||||
"no-query": "クエリを指定していません。",
|
||||
"400-page-title": "無効なリクエストです",
|
||||
"400-page-heading": "無効なリクエストです",
|
||||
"404-page-title": "コンテンツが見つかりませんでした",
|
||||
"404-page-heading": "見つかりません",
|
||||
"500-page-title": "内部サーバーエラー",
|
||||
"500-page-heading": "内部サーバーエラー",
|
||||
"fulltext-search-unavailable": "全文検索は利用できません",
|
||||
"no-search-results": "このコンテンツでは全文検索エンジンが利用できません",
|
||||
"library-button-text": "ウェルカムページに移動",
|
||||
"random-page-button-text": "無作為に選ばれたページに移動する"
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"L2212"
|
||||
]
|
||||
},
|
||||
"name": "Sardu",
|
||||
"suggest-full-text-search": "chi cuntenet '{{{SEARCH_TERMS}}}'...",
|
||||
"no-such-book": "Perunu libru cun custu nùmene: {{BOOK_NAME}}",
|
||||
"too-many-books": "Tropu libros pedidos, {{NB_BOOKS}} cando su lìmite est de {{LIMIT}}",
|
||||
"no-book-found": "Perunu libru currispondet a sos critèrios de seletzione",
|
||||
"url-not-found": "S'URL pedidu \"{{url}}\" non s'est atzapadu in custu serbidore.",
|
||||
"suggest-search": "Faghe una chirca de testu intreu pro <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a>",
|
||||
"random-article-failure": "Oops! Sa seletzione de un'artìculu a casu est fallida :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} no est una rechesta vàlida pro cuntenutu puru.",
|
||||
"no-value-for-arg": "Perunu valore frunidu pro s'argumentu {{ARGUMENT}}",
|
||||
"no-query": "Peruna chirca frunida.",
|
||||
"raw-entry-not-found": "Non faghet a atzapare s'elementu {{ENTRY}} de genia {{DATATYPE}}",
|
||||
"400-page-title": "Rechesta non vàlida",
|
||||
"400-page-heading": "Rechesta non vàlida",
|
||||
"404-page-title": "Cuntentu no agatadu",
|
||||
"404-page-heading": "No agatadu",
|
||||
"500-page-title": "Errore internu de su serbidore",
|
||||
"500-page-heading": "Errore internu de su serbidore",
|
||||
"fulltext-search-unavailable": "Chirca de testu integrale no a disponimentu",
|
||||
"no-search-results": "Su motore de chirca de testu integrale no est a disponimentu pro custu cuntenutu.",
|
||||
"library-button-text": "Bae a sa pàgina de bene bènnidu",
|
||||
"home-button-text": "Bae a sa pàgina printzipale de '{{BOOK_TITLE}}'",
|
||||
"random-page-button-text": "Bae a una pàgina seletzionada a manera casuale",
|
||||
"searchbox-tooltip": "Chirca '{{BOOK_TITLE}}'"
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Sabelöga",
|
||||
"WikiPhoenix"
|
||||
]
|
||||
},
|
||||
"name": "Svenska",
|
||||
"suggest-full-text-search": "innehåller '{{{SEARCH_TERMS}}}'...",
|
||||
"no-such-book": "Ingen sådan bok: {{BOOK_NAME}}",
|
||||
"too-many-books": "För många böcker begärda ({{NB_BOOKS}}) där gränsen är {{LIMIT}}",
|
||||
"url-not-found": "Den begärda webbadressen \"{{url}}\" hittades inte på denna server.",
|
||||
"suggest-search": "Utför en fulltextsökning för <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a>",
|
||||
"random-article-failure": "Hoppsan! Kunde inte välja en slumpartikel :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} är ingen giltig begäran för oformaterat innehåll.",
|
||||
"raw-entry-not-found": "Kunde inte hitta {{DATATYPE}}-inlägget {{ENTRY}}",
|
||||
"400-page-title": "Ogiltig begäran",
|
||||
"400-page-heading": "Ogiltig begäran",
|
||||
"404-page-title": "Innehållet hittades inte",
|
||||
"404-page-heading": "Hittades inte",
|
||||
"500-page-title": "Internt serverfel",
|
||||
"500-page-heading": "Internt serverfel",
|
||||
"fulltext-search-unavailable": "Fulltextsökning är inte tillgänglig",
|
||||
"no-search-results": "Sökmaskinen för fulltext är inte tillgänglig för detta innehåll.",
|
||||
"library-button-text": "Gå till hemsidan",
|
||||
"home-button-text": "Gå till huvudsidan för \"{{BOOK_TITLE}}\"",
|
||||
"random-page-button-text": "Gå till en slumpmässigt utvald sida",
|
||||
"searchbox-tooltip": "Sök efter \"{{BOOK_TITLE}}\""
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"GuoPC",
|
||||
"StarrySky"
|
||||
]
|
||||
},
|
||||
"name": "英语",
|
||||
"no-query": "未提供查询。",
|
||||
"400-page-title": "无效请求",
|
||||
"400-page-heading": "无效请求",
|
||||
"404-page-heading": "未找到",
|
||||
"500-page-title": "内部服务器错误",
|
||||
"500-page-heading": "内部服务器错误",
|
||||
"library-button-text": "前往欢迎页面"
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Kly",
|
||||
"Winston Sung"
|
||||
]
|
||||
},
|
||||
"name": "繁體中文",
|
||||
"suggest-full-text-search": "正在包含「{{{SEARCH_TERMS}}}」…",
|
||||
"no-such-book": "沒有這樣的書籍:{{BOOK_NAME}}",
|
||||
"too-many-books": "請求太多個書籍({{NB_BOOKS}}),上限是 {{LIMIT}} 個",
|
||||
"no-book-found": "沒有書籍符合選擇標準",
|
||||
"url-not-found": "在此伺服器上找不到請求的 URL「{{url}}」。",
|
||||
"suggest-search": "建立 <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a> 使用的全文搜尋",
|
||||
"random-article-failure": "哎呀!隨機挑選條目失敗 :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}}不是原始內容的有效請求。",
|
||||
"no-value-for-arg": "沒有為引數 {{ARGUMENT}} 提供內容",
|
||||
"no-query": "未提供查詢。",
|
||||
"raw-entry-not-found": "找不到{{DATATYPE}}項目{{ENTRY}}",
|
||||
"400-page-title": "無效請求",
|
||||
"400-page-heading": "無效請求",
|
||||
"404-page-title": "查無內容",
|
||||
"404-page-heading": "查無頁面",
|
||||
"500-page-title": "內部伺服器錯誤",
|
||||
"500-page-heading": "內部伺服器錯誤",
|
||||
"fulltext-search-unavailable": "全文搜尋無效",
|
||||
"no-search-results": "全文搜尋引擎不適用此內容。",
|
||||
"library-button-text": "前往歡迎首頁",
|
||||
"home-button-text": "前往「{{BOOK_TITLE}}」的首頁",
|
||||
"random-page-button-text": "前往隨機選取頁面",
|
||||
"searchbox-tooltip": "在{{BOOK_TITLE}}搜尋"
|
||||
}
|
||||
@@ -1,20 +1,37 @@
|
||||
i18n/bn.json
|
||||
i18n/cs.json
|
||||
i18n/en.json
|
||||
i18n/fr.json
|
||||
i18n/he.json
|
||||
i18n/hy.json
|
||||
i18n/it.json
|
||||
i18n/ja.json
|
||||
i18n/ko.json
|
||||
i18n/ku-latn.json
|
||||
i18n/mk.json
|
||||
i18n/nqo.json
|
||||
i18n/pl.json
|
||||
i18n/ru.json
|
||||
i18n/sc.json
|
||||
i18n/sk.json
|
||||
i18n/sv.json
|
||||
i18n/tr.json
|
||||
i18n/zh-hans.json
|
||||
i18n/zh-hant.json
|
||||
skin/i18n/ar.json
|
||||
skin/i18n/bn.json
|
||||
skin/i18n/cs.json
|
||||
skin/i18n/de.json
|
||||
skin/i18n/dga.json
|
||||
skin/i18n/el.json
|
||||
skin/i18n/en.json
|
||||
skin/i18n/es.json
|
||||
skin/i18n/fi.json
|
||||
skin/i18n/fr.json
|
||||
skin/i18n/he.json
|
||||
skin/i18n/hi.json
|
||||
skin/i18n/hy.json
|
||||
skin/i18n/ia.json
|
||||
skin/i18n/it.json
|
||||
skin/i18n/ja.json
|
||||
skin/i18n/ko.json
|
||||
skin/i18n/ku-latn.json
|
||||
skin/i18n/lb.json
|
||||
skin/i18n/mk.json
|
||||
skin/i18n/ms.json
|
||||
skin/i18n/nl.json
|
||||
skin/i18n/nqo.json
|
||||
skin/i18n/or.json
|
||||
skin/i18n/pl.json
|
||||
skin/i18n/ru.json
|
||||
skin/i18n/sc.json
|
||||
skin/i18n/sk.json
|
||||
skin/i18n/skr-arab.json
|
||||
skin/i18n/sl.json
|
||||
skin/i18n/sq.json
|
||||
skin/i18n/sv.json
|
||||
skin/i18n/te.json
|
||||
skin/i18n/test.json
|
||||
skin/i18n/tr.json
|
||||
skin/i18n/zh-hans.json
|
||||
skin/i18n/zh-hant.json
|
||||
|
||||
@@ -1,7 +1,18 @@
|
||||
resource_files = run_command(res_manager,
|
||||
'--list-all',
|
||||
files('resources_list.txt')
|
||||
).stdout().strip().split('\n')
|
||||
if meson.version().version_compare('>=0.47.0')
|
||||
resource_files = run_command(
|
||||
res_manager,
|
||||
'--list-all',
|
||||
files('resources_list.txt'),
|
||||
check: true
|
||||
).stdout().strip().split('\n')
|
||||
else
|
||||
resource_files = run_command(
|
||||
res_manager,
|
||||
'--list-all',
|
||||
files('resources_list.txt')
|
||||
).stdout().strip().split('\n')
|
||||
endif
|
||||
|
||||
|
||||
preprocessed_resources = custom_target('preprocessed_resource_files',
|
||||
input: 'resources_list.txt',
|
||||
@@ -14,7 +25,7 @@ preprocessed_resources = custom_target('preprocessed_resource_files',
|
||||
)
|
||||
|
||||
lib_resources = custom_target('resources',
|
||||
input: preprocessed_resources,
|
||||
input: [preprocessed_resources, 'i18n_resources_list.txt'],
|
||||
output: ['libkiwix-resources.cpp', 'libkiwix-resources.h'],
|
||||
command:[res_compiler,
|
||||
'--cxxfile', '@OUTPUT0@',
|
||||
@@ -30,11 +41,24 @@ lib_resources = custom_target('resources',
|
||||
# i18n_resource_files = fs.read('i18n_resources_list.txt').strip().split('\n')
|
||||
# ```
|
||||
# once we move to meson >= 0.57.0
|
||||
i18n_resource_files = run_command(find_program('python3'),
|
||||
'-c',
|
||||
'import sys; f=open(sys.argv[1]); print(f.read())',
|
||||
files('i18n_resources_list.txt')
|
||||
).stdout().strip().split('\n')
|
||||
|
||||
if meson.version().version_compare('>=0.47.0')
|
||||
i18n_resource_files = run_command(
|
||||
find_program('python3'),
|
||||
'-c',
|
||||
'import sys; f=open(sys.argv[1]); print(f.read())',
|
||||
files('i18n_resources_list.txt'),
|
||||
check: true
|
||||
).stdout().strip().split('\n')
|
||||
else
|
||||
i18n_resource_files = run_command(
|
||||
find_program('python3'),
|
||||
'-c',
|
||||
'import sys; f=open(sys.argv[1]); print(f.read())',
|
||||
files('i18n_resources_list.txt'),
|
||||
).stdout().strip().split('\n')
|
||||
endif
|
||||
|
||||
|
||||
i18n_resources = custom_target('i18n_resources',
|
||||
input: i18n_resource_files,
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
skin/caret.png
|
||||
skin/bittorrent.png
|
||||
skin/magnet.png
|
||||
skin/feed.svg
|
||||
skin/langSelector.svg
|
||||
skin/download.png
|
||||
skin/hash.png
|
||||
skin/search-icon.svg
|
||||
skin/iso6391To3.js
|
||||
skin/isotope.pkgd.min.js
|
||||
skin/index.js
|
||||
skin/autoComplete.min.js
|
||||
skin/autoComplete/autoComplete.min.js
|
||||
skin/kiwix.css
|
||||
skin/taskbar.css
|
||||
skin/index.css
|
||||
skin/fonts/Poppins.ttf
|
||||
@@ -15,6 +18,9 @@ skin/fonts/Roboto.ttf
|
||||
skin/search_results.css
|
||||
skin/blank.html
|
||||
skin/viewer.js
|
||||
skin/i18n.js
|
||||
skin/languages.js
|
||||
skin/mustache.min.js
|
||||
viewer.html
|
||||
templates/search_result.html
|
||||
templates/search_result.xml
|
||||
@@ -27,15 +33,17 @@ templates/catalog_entries.xml
|
||||
templates/catalog_v2_root.xml
|
||||
templates/catalog_v2_entries.xml
|
||||
templates/catalog_v2_entry.xml
|
||||
templates/catalog_v2_partial_entry.xml
|
||||
templates/catalog_v2_categories.xml
|
||||
templates/catalog_v2_languages.xml
|
||||
templates/url_of_search_results_css
|
||||
templates/viewer_settings.js
|
||||
templates/no_js_library_page.html
|
||||
templates/no_js_download.html
|
||||
opensearchdescription.xml
|
||||
ft_opensearchdescription.xml
|
||||
catalog_v2_searchdescription.xml
|
||||
skin/css/autoComplete.css
|
||||
skin/css/images/search.svg
|
||||
skin/autoComplete/css/autoComplete.css
|
||||
skin/favicon/android-chrome-192x192.png
|
||||
skin/favicon/android-chrome-512x512.png
|
||||
skin/favicon/apple-touch-icon.png
|
||||
|
||||
201
static/skin/autoComplete/LICENSE
Normal file
201
static/skin/autoComplete/LICENSE
Normal file
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
654
static/skin/autoComplete/autoComplete.js
Normal file
654
static/skin/autoComplete/autoComplete.js
Normal file
@@ -0,0 +1,654 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.autoComplete = factory());
|
||||
}(this, (function () { 'use strict';
|
||||
|
||||
function ownKeys(object, enumerableOnly) {
|
||||
var keys = Object.keys(object);
|
||||
|
||||
if (Object.getOwnPropertySymbols) {
|
||||
var symbols = Object.getOwnPropertySymbols(object);
|
||||
|
||||
if (enumerableOnly) {
|
||||
symbols = symbols.filter(function (sym) {
|
||||
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
|
||||
});
|
||||
}
|
||||
|
||||
keys.push.apply(keys, symbols);
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
function _objectSpread2(target) {
|
||||
for (var i = 1; i < arguments.length; i++) {
|
||||
var source = arguments[i] != null ? arguments[i] : {};
|
||||
|
||||
if (i % 2) {
|
||||
ownKeys(Object(source), true).forEach(function (key) {
|
||||
_defineProperty(target, key, source[key]);
|
||||
});
|
||||
} else if (Object.getOwnPropertyDescriptors) {
|
||||
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
|
||||
} else {
|
||||
ownKeys(Object(source)).forEach(function (key) {
|
||||
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
function _typeof(obj) {
|
||||
"@babel/helpers - typeof";
|
||||
|
||||
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
|
||||
_typeof = function (obj) {
|
||||
return typeof obj;
|
||||
};
|
||||
} else {
|
||||
_typeof = function (obj) {
|
||||
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
|
||||
};
|
||||
}
|
||||
|
||||
return _typeof(obj);
|
||||
}
|
||||
|
||||
function _defineProperty(obj, key, value) {
|
||||
if (key in obj) {
|
||||
Object.defineProperty(obj, key, {
|
||||
value: value,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
} else {
|
||||
obj[key] = value;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function _toConsumableArray(arr) {
|
||||
return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread();
|
||||
}
|
||||
|
||||
function _arrayWithoutHoles(arr) {
|
||||
if (Array.isArray(arr)) return _arrayLikeToArray(arr);
|
||||
}
|
||||
|
||||
function _iterableToArray(iter) {
|
||||
if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
|
||||
}
|
||||
|
||||
function _unsupportedIterableToArray(o, minLen) {
|
||||
if (!o) return;
|
||||
if (typeof o === "string") return _arrayLikeToArray(o, minLen);
|
||||
var n = Object.prototype.toString.call(o).slice(8, -1);
|
||||
if (n === "Object" && o.constructor) n = o.constructor.name;
|
||||
if (n === "Map" || n === "Set") return Array.from(o);
|
||||
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
|
||||
}
|
||||
|
||||
function _arrayLikeToArray(arr, len) {
|
||||
if (len == null || len > arr.length) len = arr.length;
|
||||
|
||||
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
|
||||
|
||||
return arr2;
|
||||
}
|
||||
|
||||
function _nonIterableSpread() {
|
||||
throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
|
||||
}
|
||||
|
||||
function _createForOfIteratorHelper(o, allowArrayLike) {
|
||||
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"];
|
||||
|
||||
if (!it) {
|
||||
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") {
|
||||
if (it) o = it;
|
||||
var i = 0;
|
||||
|
||||
var F = function () {};
|
||||
|
||||
return {
|
||||
s: F,
|
||||
n: function () {
|
||||
if (i >= o.length) return {
|
||||
done: true
|
||||
};
|
||||
return {
|
||||
done: false,
|
||||
value: o[i++]
|
||||
};
|
||||
},
|
||||
e: function (e) {
|
||||
throw e;
|
||||
},
|
||||
f: F
|
||||
};
|
||||
}
|
||||
|
||||
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
|
||||
}
|
||||
|
||||
var normalCompletion = true,
|
||||
didErr = false,
|
||||
err;
|
||||
return {
|
||||
s: function () {
|
||||
it = it.call(o);
|
||||
},
|
||||
n: function () {
|
||||
var step = it.next();
|
||||
normalCompletion = step.done;
|
||||
return step;
|
||||
},
|
||||
e: function (e) {
|
||||
didErr = true;
|
||||
err = e;
|
||||
},
|
||||
f: function () {
|
||||
try {
|
||||
if (!normalCompletion && it.return != null) it.return();
|
||||
} finally {
|
||||
if (didErr) throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var select$1 = function select(element) {
|
||||
return typeof element === "string" ? document.querySelector(element) : element();
|
||||
};
|
||||
var create = function create(tag, options) {
|
||||
var el = typeof tag === "string" ? document.createElement(tag) : tag;
|
||||
for (var key in options) {
|
||||
var val = options[key];
|
||||
if (key === "inside") {
|
||||
val.append(el);
|
||||
} else if (key === "dest") {
|
||||
select$1(val[0]).insertAdjacentElement(val[1], el);
|
||||
} else if (key === "around") {
|
||||
var ref = val;
|
||||
ref.parentNode.insertBefore(el, ref);
|
||||
el.append(ref);
|
||||
if (ref.getAttribute("autofocus") != null) ref.focus();
|
||||
} else if (key in el) {
|
||||
el[key] = val;
|
||||
} else {
|
||||
el.setAttribute(key, val);
|
||||
}
|
||||
}
|
||||
return el;
|
||||
};
|
||||
var getQuery = function getQuery(field) {
|
||||
return field instanceof HTMLInputElement || field instanceof HTMLTextAreaElement ? field.value : field.innerHTML;
|
||||
};
|
||||
var format = function format(value, diacritics) {
|
||||
value = value.toString().toLowerCase();
|
||||
return diacritics ? value.normalize("NFD").replace(/[\u0300-\u036f]/g, "").normalize("NFC") : value;
|
||||
};
|
||||
var debounce = function debounce(callback, duration) {
|
||||
var timer;
|
||||
return function () {
|
||||
clearTimeout(timer);
|
||||
timer = setTimeout(function () {
|
||||
return callback();
|
||||
}, duration);
|
||||
};
|
||||
};
|
||||
var checkTrigger = function checkTrigger(query, condition, threshold) {
|
||||
return condition ? condition(query) : query.length >= threshold;
|
||||
};
|
||||
var mark = function mark(value, cls) {
|
||||
return create("mark", _objectSpread2({
|
||||
innerHTML: value
|
||||
}, typeof cls === "string" && {
|
||||
"class": cls
|
||||
})).outerHTML;
|
||||
};
|
||||
|
||||
var configure = (function (ctx) {
|
||||
var name = ctx.name,
|
||||
options = ctx.options,
|
||||
resultsList = ctx.resultsList,
|
||||
resultItem = ctx.resultItem;
|
||||
for (var option in options) {
|
||||
if (_typeof(options[option]) === "object") {
|
||||
if (!ctx[option]) ctx[option] = {};
|
||||
for (var subOption in options[option]) {
|
||||
ctx[option][subOption] = options[option][subOption];
|
||||
}
|
||||
} else {
|
||||
ctx[option] = options[option];
|
||||
}
|
||||
}
|
||||
ctx.selector = ctx.selector || "#" + name;
|
||||
resultsList.destination = resultsList.destination || ctx.selector;
|
||||
resultsList.id = resultsList.id || name + "_list_" + ctx.id;
|
||||
resultItem.id = resultItem.id || name + "_result";
|
||||
ctx.input = select$1(ctx.selector);
|
||||
});
|
||||
|
||||
var eventEmitter = (function (name, ctx) {
|
||||
ctx.input.dispatchEvent(new CustomEvent(name, {
|
||||
bubbles: true,
|
||||
detail: ctx.feedback,
|
||||
cancelable: true
|
||||
}));
|
||||
});
|
||||
|
||||
var search = (function (query, record, options) {
|
||||
var _ref = options || {},
|
||||
mode = _ref.mode,
|
||||
diacritics = _ref.diacritics,
|
||||
highlight = _ref.highlight;
|
||||
var nRecord = format(record, diacritics);
|
||||
record = record.toString();
|
||||
query = format(query, diacritics);
|
||||
if (mode === "loose") {
|
||||
query = query.replace(/ /g, "");
|
||||
var qLength = query.length;
|
||||
var cursor = 0;
|
||||
var match = Array.from(record).map(function (character, index) {
|
||||
if (cursor < qLength && nRecord[index] === query[cursor]) {
|
||||
character = highlight ? mark(character, highlight) : character;
|
||||
cursor++;
|
||||
}
|
||||
return character;
|
||||
}).join("");
|
||||
if (cursor === qLength) return match;
|
||||
} else {
|
||||
var _match = nRecord.indexOf(query);
|
||||
if (~_match) {
|
||||
query = record.substring(_match, _match + query.length);
|
||||
_match = highlight ? record.replace(query, mark(query, highlight)) : record;
|
||||
return _match;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
var getData = function getData(ctx, query) {
|
||||
return new Promise(function ($return, $error) {
|
||||
var data;
|
||||
data = ctx.data;
|
||||
if (data.cache && data.store) return $return();
|
||||
return new Promise(function ($return, $error) {
|
||||
if (typeof data.src === "function") {
|
||||
return data.src(query).then($return, $error);
|
||||
}
|
||||
return $return(data.src);
|
||||
}).then(function ($await_4) {
|
||||
try {
|
||||
ctx.feedback = data.store = $await_4;
|
||||
eventEmitter("response", ctx);
|
||||
return $return();
|
||||
} catch ($boundEx) {
|
||||
return $error($boundEx);
|
||||
}
|
||||
}, $error);
|
||||
});
|
||||
};
|
||||
var findMatches = function findMatches(query, ctx) {
|
||||
var data = ctx.data,
|
||||
searchEngine = ctx.searchEngine;
|
||||
var matches = [];
|
||||
data.store.forEach(function (value, index) {
|
||||
var find = function find(key) {
|
||||
var record = key ? value[key] : value;
|
||||
var match = typeof searchEngine === "function" ? searchEngine(query, record) : search(query, record, {
|
||||
mode: searchEngine,
|
||||
diacritics: ctx.diacritics,
|
||||
highlight: ctx.resultItem.highlight
|
||||
});
|
||||
if (!match) return;
|
||||
var result = {
|
||||
match: match,
|
||||
value: value
|
||||
};
|
||||
if (key) result.key = key;
|
||||
matches.push(result);
|
||||
};
|
||||
if (data.keys) {
|
||||
var _iterator = _createForOfIteratorHelper(data.keys),
|
||||
_step;
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var key = _step.value;
|
||||
find(key);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
} else {
|
||||
find();
|
||||
}
|
||||
});
|
||||
if (data.filter) matches = data.filter(matches);
|
||||
var results = matches.slice(0, ctx.resultsList.maxResults);
|
||||
ctx.feedback = {
|
||||
query: query,
|
||||
matches: matches,
|
||||
results: results
|
||||
};
|
||||
eventEmitter("results", ctx);
|
||||
};
|
||||
|
||||
var Expand = "aria-expanded";
|
||||
var Active = "aria-activedescendant";
|
||||
var Selected = "aria-selected";
|
||||
var feedback = function feedback(ctx, index) {
|
||||
ctx.feedback.selection = _objectSpread2({
|
||||
index: index
|
||||
}, ctx.feedback.results[index]);
|
||||
};
|
||||
var render = function render(ctx) {
|
||||
var resultsList = ctx.resultsList,
|
||||
list = ctx.list,
|
||||
resultItem = ctx.resultItem,
|
||||
feedback = ctx.feedback;
|
||||
var matches = feedback.matches,
|
||||
results = feedback.results;
|
||||
ctx.cursor = -1;
|
||||
list.innerHTML = "";
|
||||
if (matches.length || resultsList.noResults) {
|
||||
var fragment = new DocumentFragment();
|
||||
results.forEach(function (result, index) {
|
||||
var element = create(resultItem.tag, _objectSpread2({
|
||||
id: "".concat(resultItem.id, "_").concat(index),
|
||||
role: "option",
|
||||
innerHTML: result.match,
|
||||
inside: fragment
|
||||
}, resultItem["class"] && {
|
||||
"class": resultItem["class"]
|
||||
}));
|
||||
if (resultItem.element) resultItem.element(element, result);
|
||||
});
|
||||
list.append(fragment);
|
||||
if (resultsList.element) resultsList.element(list, feedback);
|
||||
open(ctx);
|
||||
} else {
|
||||
close(ctx);
|
||||
}
|
||||
};
|
||||
var open = function open(ctx) {
|
||||
if (ctx.isOpen) return;
|
||||
(ctx.wrapper || ctx.input).setAttribute(Expand, true);
|
||||
ctx.list.removeAttribute("hidden");
|
||||
ctx.isOpen = true;
|
||||
eventEmitter("open", ctx);
|
||||
};
|
||||
var close = function close(ctx) {
|
||||
if (!ctx.isOpen) return;
|
||||
(ctx.wrapper || ctx.input).setAttribute(Expand, false);
|
||||
ctx.input.setAttribute(Active, "");
|
||||
ctx.list.setAttribute("hidden", "");
|
||||
ctx.isOpen = false;
|
||||
eventEmitter("close", ctx);
|
||||
};
|
||||
var goTo = function goTo(index, ctx) {
|
||||
var resultItem = ctx.resultItem;
|
||||
var results = ctx.list.getElementsByTagName(resultItem.tag);
|
||||
var cls = resultItem.selected ? resultItem.selected.split(" ") : false;
|
||||
if (ctx.isOpen && results.length) {
|
||||
var _results$index$classL;
|
||||
var state = ctx.cursor;
|
||||
if (index >= results.length) index = 0;
|
||||
if (index < 0) index = results.length - 1;
|
||||
ctx.cursor = index;
|
||||
if (state > -1) {
|
||||
var _results$state$classL;
|
||||
results[state].removeAttribute(Selected);
|
||||
if (cls) (_results$state$classL = results[state].classList).remove.apply(_results$state$classL, _toConsumableArray(cls));
|
||||
}
|
||||
results[index].setAttribute(Selected, true);
|
||||
if (cls) (_results$index$classL = results[index].classList).add.apply(_results$index$classL, _toConsumableArray(cls));
|
||||
ctx.input.setAttribute(Active, results[ctx.cursor].id);
|
||||
ctx.list.scrollTop = results[index].offsetTop - ctx.list.clientHeight + results[index].clientHeight + 5;
|
||||
ctx.feedback.cursor = ctx.cursor;
|
||||
feedback(ctx, index);
|
||||
eventEmitter("navigate", ctx);
|
||||
}
|
||||
};
|
||||
var next = function next(ctx) {
|
||||
goTo(ctx.cursor + 1, ctx);
|
||||
};
|
||||
var previous = function previous(ctx) {
|
||||
goTo(ctx.cursor - 1, ctx);
|
||||
};
|
||||
var select = function select(ctx, event, index) {
|
||||
index = index >= 0 ? index : ctx.cursor;
|
||||
if (index < 0) return;
|
||||
ctx.feedback.event = event;
|
||||
feedback(ctx, index);
|
||||
eventEmitter("selection", ctx);
|
||||
close(ctx);
|
||||
};
|
||||
var click = function click(event, ctx) {
|
||||
var itemTag = ctx.resultItem.tag.toUpperCase();
|
||||
var items = Array.from(ctx.list.querySelectorAll(itemTag));
|
||||
var item = event.target.closest(itemTag);
|
||||
if (item && item.nodeName === itemTag) {
|
||||
select(ctx, event, items.indexOf(item));
|
||||
}
|
||||
};
|
||||
var navigate = function navigate(event, ctx) {
|
||||
switch (event.keyCode) {
|
||||
case 40:
|
||||
case 38:
|
||||
event.preventDefault();
|
||||
event.keyCode === 40 ? next(ctx) : previous(ctx);
|
||||
break;
|
||||
case 13:
|
||||
if (!ctx.submit) event.preventDefault();
|
||||
if (ctx.cursor >= 0) select(ctx, event);
|
||||
break;
|
||||
case 9:
|
||||
if (ctx.resultsList.tabSelect && ctx.cursor >= 0) select(ctx, event);
|
||||
break;
|
||||
case 27:
|
||||
ctx.input.value = "";
|
||||
close(ctx);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
function start (ctx, q) {
|
||||
var _this = this;
|
||||
return new Promise(function ($return, $error) {
|
||||
var queryVal, condition;
|
||||
queryVal = q || getQuery(ctx.input);
|
||||
queryVal = ctx.query ? ctx.query(queryVal) : queryVal;
|
||||
condition = checkTrigger(queryVal, ctx.trigger, ctx.threshold);
|
||||
if (condition) {
|
||||
return getData(ctx, queryVal).then(function ($await_2) {
|
||||
try {
|
||||
if (ctx.feedback instanceof Error) return $return();
|
||||
findMatches(queryVal, ctx);
|
||||
if (ctx.resultsList) render(ctx);
|
||||
return $If_1.call(_this);
|
||||
} catch ($boundEx) {
|
||||
return $error($boundEx);
|
||||
}
|
||||
}, $error);
|
||||
} else {
|
||||
close(ctx);
|
||||
return $If_1.call(_this);
|
||||
}
|
||||
function $If_1() {
|
||||
return $return();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var eventsManager = function eventsManager(events, callback) {
|
||||
for (var element in events) {
|
||||
for (var event in events[element]) {
|
||||
callback(element, event);
|
||||
}
|
||||
}
|
||||
};
|
||||
var addEvents = function addEvents(ctx) {
|
||||
var events = ctx.events;
|
||||
var run = debounce(function () {
|
||||
return start(ctx);
|
||||
}, ctx.debounce);
|
||||
var publicEvents = ctx.events = _objectSpread2({
|
||||
input: _objectSpread2({}, events && events.input)
|
||||
}, ctx.resultsList && {
|
||||
list: events ? _objectSpread2({}, events.list) : {}
|
||||
});
|
||||
var privateEvents = {
|
||||
input: {
|
||||
input: function input() {
|
||||
run();
|
||||
},
|
||||
keydown: function keydown(event) {
|
||||
navigate(event, ctx);
|
||||
},
|
||||
blur: function blur() {
|
||||
close(ctx);
|
||||
}
|
||||
},
|
||||
list: {
|
||||
mousedown: function mousedown(event) {
|
||||
event.preventDefault();
|
||||
},
|
||||
click: function click$1(event) {
|
||||
click(event, ctx);
|
||||
}
|
||||
}
|
||||
};
|
||||
eventsManager(privateEvents, function (element, event) {
|
||||
if (!ctx.resultsList && event !== "input") return;
|
||||
if (publicEvents[element][event]) return;
|
||||
publicEvents[element][event] = privateEvents[element][event];
|
||||
});
|
||||
eventsManager(publicEvents, function (element, event) {
|
||||
ctx[element].addEventListener(event, publicEvents[element][event]);
|
||||
});
|
||||
};
|
||||
var removeEvents = function removeEvents(ctx) {
|
||||
eventsManager(ctx.events, function (element, event) {
|
||||
ctx[element].removeEventListener(event, ctx.events[element][event]);
|
||||
});
|
||||
};
|
||||
|
||||
function init (ctx) {
|
||||
var _this = this;
|
||||
return new Promise(function ($return, $error) {
|
||||
var placeHolder, resultsList, parentAttrs;
|
||||
placeHolder = ctx.placeHolder;
|
||||
resultsList = ctx.resultsList;
|
||||
parentAttrs = {
|
||||
role: "combobox",
|
||||
"aria-owns": resultsList.id,
|
||||
"aria-haspopup": true,
|
||||
"aria-expanded": false
|
||||
};
|
||||
create(ctx.input, _objectSpread2(_objectSpread2({
|
||||
"aria-controls": resultsList.id,
|
||||
"aria-autocomplete": "both"
|
||||
}, placeHolder && {
|
||||
placeholder: placeHolder
|
||||
}), !ctx.wrapper && _objectSpread2({}, parentAttrs)));
|
||||
if (ctx.wrapper) ctx.wrapper = create("div", _objectSpread2({
|
||||
around: ctx.input,
|
||||
"class": ctx.name + "_wrapper"
|
||||
}, parentAttrs));
|
||||
if (resultsList) ctx.list = create(resultsList.tag, _objectSpread2({
|
||||
dest: [resultsList.destination, resultsList.position],
|
||||
id: resultsList.id,
|
||||
role: "listbox",
|
||||
hidden: "hidden"
|
||||
}, resultsList["class"] && {
|
||||
"class": resultsList["class"]
|
||||
}));
|
||||
addEvents(ctx);
|
||||
if (ctx.data.cache) {
|
||||
return getData(ctx).then(function ($await_2) {
|
||||
try {
|
||||
return $If_1.call(_this);
|
||||
} catch ($boundEx) {
|
||||
return $error($boundEx);
|
||||
}
|
||||
}, $error);
|
||||
}
|
||||
function $If_1() {
|
||||
eventEmitter("init", ctx);
|
||||
return $return();
|
||||
}
|
||||
return $If_1.call(_this);
|
||||
});
|
||||
}
|
||||
|
||||
function extend (autoComplete) {
|
||||
var prototype = autoComplete.prototype;
|
||||
prototype.init = function () {
|
||||
init(this);
|
||||
};
|
||||
prototype.start = function (query) {
|
||||
start(this, query);
|
||||
};
|
||||
prototype.unInit = function () {
|
||||
if (this.wrapper) {
|
||||
var parentNode = this.wrapper.parentNode;
|
||||
parentNode.insertBefore(this.input, this.wrapper);
|
||||
parentNode.removeChild(this.wrapper);
|
||||
}
|
||||
removeEvents(this);
|
||||
};
|
||||
prototype.open = function () {
|
||||
open(this);
|
||||
};
|
||||
prototype.close = function () {
|
||||
close(this);
|
||||
};
|
||||
prototype.goTo = function (index) {
|
||||
goTo(index, this);
|
||||
};
|
||||
prototype.next = function () {
|
||||
next(this);
|
||||
};
|
||||
prototype.previous = function () {
|
||||
previous(this);
|
||||
};
|
||||
prototype.select = function (index) {
|
||||
select(this, null, index);
|
||||
};
|
||||
prototype.search = function (query, record, options) {
|
||||
return search(query, record, options);
|
||||
};
|
||||
}
|
||||
|
||||
function autoComplete(config) {
|
||||
this.options = config;
|
||||
this.id = autoComplete.instances = (autoComplete.instances || 0) + 1;
|
||||
this.name = "autoComplete";
|
||||
this.wrapper = 1;
|
||||
this.threshold = 1;
|
||||
this.debounce = 0;
|
||||
this.resultsList = {
|
||||
position: "afterend",
|
||||
tag: "ul",
|
||||
maxResults: 5
|
||||
};
|
||||
this.resultItem = {
|
||||
tag: "li"
|
||||
};
|
||||
configure(this);
|
||||
extend.call(this, autoComplete);
|
||||
init(this);
|
||||
}
|
||||
|
||||
return autoComplete;
|
||||
|
||||
})));
|
||||
@@ -1,3 +1,4 @@
|
||||
/* Modified from https://github.com/TarekRaafat/autoComplete.js (version 10.2.6)*/
|
||||
.autoComplete_wrapper {
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
@@ -1,8 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" focusable="false" x="0px" y="0px" width="30" height="30" viewBox="0 0 171 171" style=" fill:#000000;">
|
||||
<g fill="none" fill-rule="nonzero" stroke="none" stroke-width="1" stroke-linecap="butt" stroke-linejoin="miter" stroke-miterlimit="10" stroke-dasharray="" stroke-dashoffset="0" font-family="none" font-weight="none" font-size="none" text-anchor="none" style="mix-blend-mode: normal">
|
||||
<path d="M0,171.99609v-171.99609h171.99609v171.99609z" fill="none"></path>
|
||||
<g fill="#ff7a7a">
|
||||
<path d="M74.1,17.1c-31.41272,0 -57,25.58728 -57,57c0,31.41272 25.58728,57 57,57c13.6601,0 26.20509,-4.85078 36.03692,-12.90293l34.03301,34.03301c1.42965,1.48907 3.55262,2.08891 5.55014,1.56818c1.99752,-0.52073 3.55746,-2.08067 4.07819,-4.07819c0.52073,-1.99752 -0.0791,-4.12049 -1.56818,-5.55014l-34.03301,-34.03301c8.05215,-9.83182 12.90293,-22.37682 12.90293,-36.03692c0,-31.41272 -25.58728,-57 -57,-57zM74.1,28.5c25.2517,0 45.6,20.3483 45.6,45.6c0,25.2517 -20.3483,45.6 -45.6,45.6c-25.2517,0 -45.6,-20.3483 -45.6,-45.6c0,-25.2517 20.3483,-45.6 45.6,-45.6z"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.1 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
BIN
static/skin/feed.png
Normal file
BIN
static/skin/feed.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 497 B |
22
static/skin/feed.svg
Normal file
22
static/skin/feed.svg
Normal file
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.3.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="uiLanguageSelectorButton" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
x="0px" y="0px" viewBox="0 0 30 30" style="enable-background:new 0 0 30 30;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
.st1{fill:#CCCCCC;}
|
||||
.st2{fill:#F78422;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M2.9,29.6c-1.4,0-2.6-1.1-2.6-2.6V2.9c0-1.4,1.1-2.6,2.6-2.6h24.1c1.4,0,2.6,1.1,2.6,2.6v24.1
|
||||
c0,1.4-1.1,2.6-2.6,2.6H2.9z"/>
|
||||
<path class="st1" d="M27.1,0.6c1.3,0,2.3,1,2.3,2.3v24.1c0,1.3-1,2.3-2.3,2.3H2.9c-1.3,0-2.3-1-2.3-2.3V2.9c0-1.3,1-2.3,2.3-2.3
|
||||
H27.1 M27.1,0.1H2.9c-1.6,0-2.8,1.3-2.8,2.8v24.1c0,1.6,1.3,2.8,2.8,2.8h24.1c1.6,0,2.8-1.3,2.8-2.8V2.9
|
||||
C29.9,1.4,28.6,0.1,27.1,0.1L27.1,0.1z"/>
|
||||
</g>
|
||||
<g>
|
||||
<path class="st2" d="M18,24h-3c0-5.2-4.2-9.4-9.4-9.4v-3C12.4,11.6,18,17.2,18,24z"/>
|
||||
<path class="st2" d="M24.5,24h-3c-0.1-8.7-7.2-15.9-16-15.9v-3C16,5.1,24.5,13.6,24.5,24z"/>
|
||||
<circle class="st2" cx="8.1" cy="21.6" r="2.6"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
147
static/skin/i18n.js
Normal file
147
static/skin/i18n.js
Normal file
@@ -0,0 +1,147 @@
|
||||
import mustache from '../skin/mustache.min.js?KIWIXCACHEID'
|
||||
|
||||
const Translations = {
|
||||
defaultLanguage: null,
|
||||
currentLanguage: null,
|
||||
promises: {},
|
||||
data: {},
|
||||
|
||||
load: function(lang, asDefault=false) {
|
||||
if ( asDefault ) {
|
||||
this.defaultLanguage = lang;
|
||||
this.loadTranslationsJSON(lang);
|
||||
} else {
|
||||
this.currentLanguage = lang;
|
||||
if ( lang != this.defaultLanguage ) {
|
||||
this.loadTranslationsJSON(lang);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
loadTranslationsJSON: function(lang) {
|
||||
if ( this.promises[lang] )
|
||||
return;
|
||||
|
||||
const errorMsg = `Error loading translations for language '${lang}': `;
|
||||
this.promises[lang] = fetch(`./skin/i18n/${lang}.json`).then(async (resp) => {
|
||||
if ( resp.ok ) {
|
||||
this.data[lang] = JSON.parse(await resp.text());
|
||||
} else {
|
||||
console.log(errorMsg + resp.statusText);
|
||||
}
|
||||
}).catch((err) => {
|
||||
console.log(errorMsg + err);
|
||||
});
|
||||
},
|
||||
|
||||
whenReady: function(callback) {
|
||||
const defaultLangPromise = this.promises[this.defaultLanguage];
|
||||
const currentLangPromise = this.promises[this.currentLanguage];
|
||||
Promise.all([defaultLangPromise, currentLangPromise]).then(callback);
|
||||
},
|
||||
|
||||
get: function(msgId) {
|
||||
const activeTranslation = this.data[this.currentLanguage];
|
||||
|
||||
const r = activeTranslation && activeTranslation[msgId];
|
||||
if ( r )
|
||||
return r;
|
||||
|
||||
const defaultMsgs = this.data[this.defaultLanguage];
|
||||
if ( defaultMsgs )
|
||||
return defaultMsgs[msgId];
|
||||
|
||||
throw "Translations are not loaded";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function $t(msgId, params={}) {
|
||||
try {
|
||||
const msgTemplate = Translations.get(msgId);
|
||||
if ( ! msgTemplate ) {
|
||||
return "Invalid message id: " + msgId;
|
||||
}
|
||||
|
||||
return mustache.render(msgTemplate, params);
|
||||
} catch (err) {
|
||||
return "ERROR: " + err;
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_UI_LANGUAGE = 'en';
|
||||
|
||||
Translations.load(DEFAULT_UI_LANGUAGE, /*asDefault=*/true);
|
||||
|
||||
function getUserLanguage() {
|
||||
return new URLSearchParams(window.location.search).get('userlang')
|
||||
|| window.localStorage.getItem('userlang')
|
||||
|| viewerSettings.defaultUserLanguage
|
||||
|| DEFAULT_UI_LANGUAGE;
|
||||
}
|
||||
|
||||
function setUserLanguage(lang, callback) {
|
||||
window.localStorage.setItem('userlang', lang);
|
||||
Translations.load(lang);
|
||||
Translations.whenReady(callback);
|
||||
}
|
||||
|
||||
function createModalUILanguageSelector() {
|
||||
document.body.insertAdjacentHTML('beforeend',
|
||||
`<div id="uiLanguageSelector" class="modal-wrapper">
|
||||
<div class="modal">
|
||||
<div class="modal-heading">
|
||||
<div class="modal-title">
|
||||
<div>
|
||||
Select UI language
|
||||
</div>
|
||||
</div>
|
||||
<div onclick="window.modalUILanguageSelector.close()" class="modal-close-button">
|
||||
<div>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="14" viewBox="0 0 14 14" fill="none">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.7071 1.70711C14.0976 1.31658 14.0976
|
||||
0.683417 13.7071 0.292893C13.3166 -0.0976311 12.6834 -0.0976311 12.2929 0.292893L7 5.58579L1.70711
|
||||
0.292893C1.31658 -0.0976311 0.683417 -0.0976311 0.292893 0.292893C-0.0976311 0.683417
|
||||
-0.0976311 1.31658 0.292893 1.70711L5.58579 7L0.292893 12.2929C-0.0976311 12.6834
|
||||
-0.0976311 13.3166 0.292893 13.7071C0.683417 14.0976 1.31658 14.0976 1.70711 13.7071L7
|
||||
8.41421L12.2929 13.7071C12.6834 14.0976 13.3166 14.0976 13.7071 13.7071C14.0976 13.3166
|
||||
14.0976 12.6834 13.7071 12.2929L8.41421 7L13.7071 1.70711Z" fill="black" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-content">
|
||||
<select id="ui_language"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>`);
|
||||
|
||||
window.modalUILanguageSelector = {
|
||||
show: () => {
|
||||
document.getElementById('uiLanguageSelector').style.display = 'flex';
|
||||
},
|
||||
|
||||
close: () => {
|
||||
document.getElementById('uiLanguageSelector').style.display = 'none';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function initUILanguageSelector(activeLanguage, languageChangeCallback) {
|
||||
if ( document.getElementById("ui_language") == null ) {
|
||||
createModalUILanguageSelector();
|
||||
}
|
||||
const languageSelector = document.getElementById("ui_language");
|
||||
for (const lang of uiLanguages ) {
|
||||
const lang_name = Object.getOwnPropertyNames(lang)[0];
|
||||
const lang_code = lang[lang_name];
|
||||
const is_selected = lang_code == activeLanguage;
|
||||
languageSelector.appendChild(new Option(lang_name, lang_code, is_selected, is_selected));
|
||||
}
|
||||
languageSelector.onchange = languageChangeCallback;
|
||||
}
|
||||
|
||||
window.$t = $t;
|
||||
window.getUserLanguage = getUserLanguage;
|
||||
window.setUserLanguage = setUserLanguage;
|
||||
window.initUILanguageSelector = initUILanguageSelector;
|
||||
33
static/skin/i18n/ar.json
Normal file
33
static/skin/i18n/ar.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Asma",
|
||||
"Ravan",
|
||||
"محمد أحمد عبد الفتاح"
|
||||
]
|
||||
},
|
||||
"name": "الإنجليزية",
|
||||
"no-such-book": "لا يوجد مثل هذا الكتاب: {{BOOK_NAME}}",
|
||||
"too-many-books": "طلب العديد من الكتب {{NB_BOOKS}} حيث الحد {{LIMIT}}",
|
||||
"no-book-found": "لا يوجد كتاب يطابق معايير الاختيار",
|
||||
"url-not-found": "لم يتم العثور على عنوان URL المطلوب \"{{url}}\" على هذا الخادم.",
|
||||
"suggest-search": "قم بإجراء بحث عن النص الكامل لـ <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a>",
|
||||
"random-article-failure": "مع الأسف! فشل اختيار مقال عشوائي :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} ليس طلبًا صالحًا للمحتوى الأولي.",
|
||||
"no-value-for-arg": "لم يتم تقديم قيمة للوسيطة {{ARGUMENT}}",
|
||||
"no-query": "لم يتم تقديم ملخص.",
|
||||
"raw-entry-not-found": "لا يمكن العثور على إدخال {{DATATYPE}} {{ENTRY}}",
|
||||
"400-page-title": "طلب غير صالح",
|
||||
"400-page-heading": "طلب غير صالح",
|
||||
"404-page-title": "المحتوى غير موجود",
|
||||
"404-page-heading": "لم يتم العثور عليه",
|
||||
"500-page-title": "خطأ في الخادم الداخلي",
|
||||
"500-page-heading": "خطأ في الخادم الداخلي",
|
||||
"fulltext-search-unavailable": "البحث عن النص الكامل غير متاح",
|
||||
"no-search-results": "محرك البحث عن النص الكامل غير متاح لهذا المحتوى.",
|
||||
"library-button-text": "اذهب لصفحة الترحيب",
|
||||
"home-button-text": "انتقل إلى الصفحة الرئيسية لـ \"{{BOOK_TITLE}}\"",
|
||||
"random-page-button-text": "اذهب إلى صفحة عشوائية",
|
||||
"searchbox-tooltip": "بحث \"{{BOOK_TITLE}}\"",
|
||||
"confusion-of-tongues": "قد يشارك في البحث كتابان أو أكثر بلغات مختلفة، مما قد يؤدي إلى نتائج محيرة."
|
||||
}
|
||||
@@ -10,5 +10,9 @@
|
||||
"500-page-heading": "অভ্যন্তরীণ সার্ভার ত্রুটি",
|
||||
"library-button-text": "স্বাগত পাতায় চলুন",
|
||||
"home-button-text": "'{{BOOK_TITLE}}'-এর প্রধান পাতায় চলুন",
|
||||
"searchbox-tooltip": "'{{BOOK_TITLE}}' অনুসন্ধান করুন"
|
||||
"searchbox-tooltip": "'{{BOOK_TITLE}}' অনুসন্ধান করুন",
|
||||
"search": "অনুসন্ধান",
|
||||
"welcome-to-kiwix-server": "কিউইক্স সার্ভারে স্বাগতম",
|
||||
"download-links-title": "বই ডাউনলোড করুন",
|
||||
"preview-book": "প্রাকদর্শন"
|
||||
}
|
||||
57
static/skin/i18n/de.json
Normal file
57
static/skin/i18n/de.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"IMayBeABitShy",
|
||||
"Lucas Werkmeister",
|
||||
"ThisCarthing"
|
||||
]
|
||||
},
|
||||
"name": "Deutsch",
|
||||
"suggest-full-text-search": "enthält '{{{SEARCH_TERMS}}}'...",
|
||||
"no-such-book": "Buch nicht gefunden: {{BOOK_NAME}}",
|
||||
"too-many-books": "Zu viele Bücher angefragt ({{NB_BOOKS}}), die Beschränkung liegt bei {{LIMIT}}",
|
||||
"no-book-found": "Keine Bücher entsprechen den Auswahlkriterien",
|
||||
"url-not-found": "Die angeforderte URL \"{{url}}\" konnte auf diesem Server nicht gefunden werden.",
|
||||
"suggest-search": "Führe eine Volltextsuche nach <a href=\"{{{SEARCH_URL}}}\">{{PATTERN}}</a> durch",
|
||||
"random-article-failure": "Hoppla! Konnte keinen zufälligen Artikel auswählen :(",
|
||||
"invalid-raw-data-type": "{{DATATYPE}} ist keine gültige Anfrage für unverarbeiteten Inhalt",
|
||||
"no-value-for-arg": "Kein Wert für den Parameter {{ARGUMENT}} gegeben",
|
||||
"no-query": "Keine Suchanfrage gegeben.",
|
||||
"raw-entry-not-found": "Eintrag {{ENTRY}} des Typs {{DATATYPE}} konnte nicht gefunden werden.",
|
||||
"400-page-title": "Ungültige Anfrage",
|
||||
"400-page-heading": "Ungültige Anfrage",
|
||||
"404-page-title": "Inhalt nicht gefunden",
|
||||
"404-page-heading": "Nicht gefunden",
|
||||
"500-page-title": "Interner Server-Fehler",
|
||||
"500-page-heading": "Interner Server-Fehler",
|
||||
"fulltext-search-unavailable": "Die Volltestsuche steht nicht zur Verfügung.",
|
||||
"no-search-results": "Die Volltextsuche ist für diesen Inhalt nicht verfügbar.",
|
||||
"library-button-text": "Zur Willkommensseite gehen",
|
||||
"home-button-text": "Zur Hauptseite von '{{BOOK_TITLE}}' gehen",
|
||||
"random-page-button-text": "Zu einer zufällig ausgewählten Seite gehen",
|
||||
"searchbox-tooltip": "Nach '{{BOOK_TITLE}}' suchen",
|
||||
"confusion-of-tongues": "Zwei oder mehr Bücher unterschiedlicher Sprachen werden durchsucht, was zu unübersichtlichen Ergebnissen führen kann.",
|
||||
"welcome-page-overzealous-filter": "Keine Ergebnisse gefunden. Möchten Sie den <a href=\"{{URL}}\">Filter zurücksetzen</a>?",
|
||||
"powered-by-kiwix-html": "Angetrieben durch <a href=\"https://kiwix.org\">Kiwix</a>",
|
||||
"search": "Suchen",
|
||||
"book-filtering-all-categories": "Alle Kategorien",
|
||||
"book-filtering-all-languages": "Alle Sprachen",
|
||||
"count-of-matching-books": "{{COUNT}} Bücher",
|
||||
"download": "Herunterladen",
|
||||
"direct-download-link-text": "Direkt",
|
||||
"direct-download-alt-text": "direkt herunterladen",
|
||||
"hash-download-link-text": "Sha256 Hash",
|
||||
"hash-download-alt-text": "Hash herunterladen",
|
||||
"magnet-link-text": "Magnet Link",
|
||||
"magnet-alt-text": "Magnet Link herunterladen",
|
||||
"torrent-download-link-text": "Torrent-Datei",
|
||||
"torrent-download-alt-text": "Torrent herunterladen",
|
||||
"library-opds-feed-all-entries": "ODPS Feed der Bibliothek - Alle Einträge",
|
||||
"filter-by-tag": "Nach Tag \"{{TAG}}\" filtern",
|
||||
"stop-filtering-by-tag": "Filterung nach Tag \"{{TAG}}\" aufheben",
|
||||
"library-opds-feed-parameterised": "ODPS Feed der Bibliothek - Einträge mit {{#LANG}\nSprache {{LANG}} {{/LANG}}{{#CATEGORY}}\nKategorie: {{CATEGORY}} {{/CATEGORY}}{{#TAG}}\nTag: {{TAG}}{{/TAG}}{{#Q}}\nQuery: {{Q}} {{/Q}}",
|
||||
"welcome-to-kiwix-server": "Wilkommen beim Kiwix Server",
|
||||
"download-links-heading": "Download Links für <b><i>{{BOOK_TITLE}}</i></b>",
|
||||
"download-links-title": "Buch herunterladen",
|
||||
"preview-book": "Vorschau"
|
||||
}
|
||||
21
static/skin/i18n/dga.json
Normal file
21
static/skin/i18n/dga.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Alhaji Yakubu",
|
||||
"Amire80"
|
||||
]
|
||||
},
|
||||
"welcome-page-overzealous-filter": "Duoro kyebe. <a href=\"{{URL}}\">E na boɔra ka fo</a>?",
|
||||
"search": "Bo",
|
||||
"book-filtering-all-categories": "Zagre zaa",
|
||||
"book-filtering-all-languages": "Kɔkɔrɛɛ zaa",
|
||||
"count-of-matching-books": "{{COUNT}} gama",
|
||||
"download": "Tagebo",
|
||||
"direct-download-link-text": "Toribu",
|
||||
"direct-download-alt-text": "Toribu tagebo",
|
||||
"hash-download-alt-text": "Tage bonmannaa",
|
||||
"magnet-link-text": "Kurimaraa sobie",
|
||||
"magnet-alt-text": "Tage kurimaraa",
|
||||
"filter-by-tag": "Guy yi kpuli {{TAG}}",
|
||||
"stop-filtering-by-tag": "Bare gyɛɛbo kpuli {{TAG}}"
|
||||
}
|
||||
22
static/skin/i18n/el.json
Normal file
22
static/skin/i18n/el.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"@metadata": {
|
||||
"authors": [
|
||||
"Kelson",
|
||||
"Norhorn"
|
||||
]
|
||||
},
|
||||
"welcome-page-overzealous-filter": "Κανένα αποτέλεσμα. Θέλετε να <a href=\"{{URL}}\">επαναφέρετε το φίλτρο</a>;",
|
||||
"powered-by-kiwix-html": "Με την υποστήριξη by <a href=\"https://kiwix.org\">Kiwix</a>",
|
||||
"search": "Αναζήτηση",
|
||||
"book-filtering-all-categories": "Όλες οι κατηγορίες",
|
||||
"book-filtering-all-languages": "Όλες οι γλώσσες",
|
||||
"count-of-matching-books": "{{COUNT}} βιβλίο(α)",
|
||||
"download": "Λήψη",
|
||||
"direct-download-link-text": "Απευθείας",
|
||||
"direct-download-alt-text": "άμεση λήψη",
|
||||
"hash-download-alt-text": "λήψη αναγνωριστικού",
|
||||
"torrent-download-link-text": "Αρχείο torrent",
|
||||
"torrent-download-alt-text": "λήψη torrent",
|
||||
"filter-by-tag": "Φίλτρο ανά ετικέτα \"{{TAG}}\"",
|
||||
"stop-filtering-by-tag": "Διακοπή φίλτρου ανά ετικέτα \"{{TAG}}\""
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user