mirror of
https://github.com/Screenly/Anthias.git
synced 2025-12-23 22:38:05 -05:00
Migrate web server back-end from Flask to Django (#2040)
This commit is contained in:
2
.flake8
2
.flake8
@@ -1,3 +1,5 @@
|
||||
[flake8]
|
||||
exclude =
|
||||
anthias_app/migrations/*.py
|
||||
per-file-ignores =
|
||||
bin/migrate.py: E501
|
||||
|
||||
9
.github/workflows/docker-test.yaml
vendored
9
.github/workflows/docker-test.yaml
vendored
@@ -60,11 +60,14 @@ jobs:
|
||||
run: |
|
||||
docker compose -f docker-compose.test.yml up -d
|
||||
|
||||
- name: Run the tests inside the container
|
||||
shell: 'script -q -e -c "bash {0}"'
|
||||
- name: Run the unit tests inside the container
|
||||
run: |
|
||||
docker compose -f docker-compose.test.yml exec anthias-test ./manage.py test --noinput --parallel --exclude-tag=integration
|
||||
|
||||
- name: Run the integration tests inside the container
|
||||
run: |
|
||||
docker compose -f docker-compose.test.yml exec anthias-test bash ./bin/prepare_test_environment.sh -s
|
||||
docker compose -f docker-compose.test.yml exec anthias-test nose2 -v
|
||||
docker compose -f docker-compose.test.yml exec anthias-test ./manage.py test --noinput --tag=integration
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -52,3 +52,7 @@ docker/Dockerfile.test
|
||||
docker-compose.yml
|
||||
|
||||
balena-deploy/
|
||||
db.sqlite3
|
||||
|
||||
# Django
|
||||
staticfiles/
|
||||
|
||||
3
anthias_app/admin.py
Normal file
3
anthias_app/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.contrib import admin # noqa F401
|
||||
|
||||
# Register your models here.
|
||||
6
anthias_app/apps.py
Normal file
6
anthias_app/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AnthiasAppConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'anthias_app'
|
||||
@@ -1,26 +1,27 @@
|
||||
import uuid
|
||||
import yaml
|
||||
from datetime import datetime
|
||||
from flask import render_template
|
||||
from os import getenv, path
|
||||
|
||||
from lib import assets_helper, db
|
||||
from django.shortcuts import render
|
||||
from django.utils import timezone
|
||||
from lib.github import is_up_to_date
|
||||
from lib.utils import get_video_duration
|
||||
from os import getenv, path
|
||||
from anthias_app.models import Asset
|
||||
from settings import settings
|
||||
|
||||
|
||||
def template(template_name, **context):
|
||||
def template(request, template_name, context):
|
||||
"""
|
||||
This is a helper function that is used to render a template
|
||||
with some global context. This is used to avoid having to
|
||||
repeat code in other views.
|
||||
"""
|
||||
This is a template response wrapper that shares the
|
||||
same function signature as Flask's render_template() method
|
||||
but also injects some global context."""
|
||||
|
||||
# Add global contexts
|
||||
context['date_format'] = settings['date_format']
|
||||
context['default_duration'] = settings['default_duration']
|
||||
context['default_streaming_duration'] = (
|
||||
settings['default_streaming_duration'])
|
||||
settings['default_streaming_duration']
|
||||
)
|
||||
context['template_settings'] = {
|
||||
'imports': ['from lib.utils import template_handle_unicode'],
|
||||
'default_filters': ['template_handle_unicode'],
|
||||
@@ -28,7 +29,7 @@ def template(template_name, **context):
|
||||
context['up_to_date'] = is_up_to_date()
|
||||
context['use_24_hour_clock'] = settings['use_24_hour_clock']
|
||||
|
||||
return render_template(template_name, context=context)
|
||||
return render(request, template_name, context)
|
||||
|
||||
|
||||
def prepare_default_asset(**kwargs):
|
||||
@@ -46,7 +47,6 @@ def prepare_default_asset(**kwargs):
|
||||
'asset_id': asset_id,
|
||||
'duration': duration,
|
||||
'end_date': kwargs['end_date'],
|
||||
'is_active': 1,
|
||||
'is_enabled': True,
|
||||
'is_processing': 0,
|
||||
'mimetype': kwargs['mimetype'],
|
||||
@@ -62,7 +62,7 @@ def prepare_default_asset(**kwargs):
|
||||
def add_default_assets():
|
||||
settings.load()
|
||||
|
||||
datetime_now = datetime.now()
|
||||
datetime_now = timezone.now()
|
||||
default_asset_settings = {
|
||||
'start_date': datetime_now,
|
||||
'end_date': datetime_now.replace(year=datetime_now.year + 6),
|
||||
@@ -70,25 +70,28 @@ def add_default_assets():
|
||||
}
|
||||
|
||||
default_assets_yaml = path.join(
|
||||
getenv('HOME'), '.screenly/default_assets.yml')
|
||||
getenv('HOME'),
|
||||
'.screenly/default_assets.yml',
|
||||
)
|
||||
|
||||
with open(default_assets_yaml, 'r') as yaml_file:
|
||||
default_assets = yaml.safe_load(yaml_file).get('assets')
|
||||
with db.conn(settings['database']) as conn:
|
||||
for default_asset in default_assets:
|
||||
default_asset_settings.update({
|
||||
'name': default_asset.get('name'),
|
||||
'uri': default_asset.get('uri'),
|
||||
'mimetype': default_asset.get('mimetype')
|
||||
})
|
||||
asset = prepare_default_asset(**default_asset_settings)
|
||||
if asset:
|
||||
assets_helper.create(conn, asset)
|
||||
|
||||
for default_asset in default_assets:
|
||||
default_asset_settings.update({
|
||||
'name': default_asset.get('name'),
|
||||
'uri': default_asset.get('uri'),
|
||||
'mimetype': default_asset.get('mimetype')
|
||||
})
|
||||
asset = prepare_default_asset(**default_asset_settings)
|
||||
|
||||
if asset:
|
||||
Asset.objects.create(**asset)
|
||||
|
||||
|
||||
def remove_default_assets():
|
||||
settings.load()
|
||||
with db.conn(settings['database']) as conn:
|
||||
for asset in assets_helper.read(conn):
|
||||
if asset['asset_id'].startswith('default_'):
|
||||
assets_helper.delete(conn, asset['asset_id'])
|
||||
|
||||
for asset in Asset.objects.all():
|
||||
if asset.asset_id.startswith('default_'):
|
||||
asset.delete()
|
||||
|
||||
36
anthias_app/migrations/0001_initial.py
Normal file
36
anthias_app/migrations/0001_initial.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Generated by Django 3.2.18 on 2024-08-23 18:45
|
||||
|
||||
import anthias_app.models
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Asset',
|
||||
fields=[
|
||||
('asset_id', models.TextField(default=anthias_app.models.generate_asset_id, editable=False, primary_key=True, serialize=False)),
|
||||
('name', models.TextField(blank=True, null=True)),
|
||||
('uri', models.TextField(blank=True, null=True)),
|
||||
('md5', models.TextField(blank=True, null=True)),
|
||||
('start_date', models.DateTimeField(blank=True, null=True)),
|
||||
('end_date', models.DateTimeField(blank=True, null=True)),
|
||||
('duration', models.TextField(blank=True, null=True)),
|
||||
('mimetype', models.TextField(blank=True, null=True)),
|
||||
('is_enabled', models.IntegerField(default=0)),
|
||||
('is_processing', models.IntegerField(default=0)),
|
||||
('nocache', models.IntegerField(default=0)),
|
||||
('play_order', models.IntegerField(default=0)),
|
||||
('skip_asset_check', models.IntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'assets',
|
||||
},
|
||||
),
|
||||
]
|
||||
38
anthias_app/migrations/0002_auto_20241015_1524.py
Normal file
38
anthias_app/migrations/0002_auto_20241015_1524.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 3.2.18 on 2024-10-15 15:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('anthias_app', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='duration',
|
||||
field=models.BigIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='is_enabled',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='is_processing',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='nocache',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='skip_asset_check',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
0
anthias_app/migrations/__init__.py
Normal file
0
anthias_app/migrations/__init__.py
Normal file
36
anthias_app/models.py
Normal file
36
anthias_app/models.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import uuid
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def generate_asset_id():
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
class Asset(models.Model):
|
||||
asset_id = models.TextField(
|
||||
primary_key=True, default=generate_asset_id, editable=False)
|
||||
name = models.TextField(blank=True, null=True)
|
||||
uri = models.TextField(blank=True, null=True)
|
||||
md5 = models.TextField(blank=True, null=True)
|
||||
start_date = models.DateTimeField(blank=True, null=True)
|
||||
end_date = models.DateTimeField(blank=True, null=True)
|
||||
duration = models.BigIntegerField(blank=True, null=True)
|
||||
mimetype = models.TextField(blank=True, null=True)
|
||||
is_enabled = models.BooleanField(default=False)
|
||||
is_processing = models.BooleanField(default=False)
|
||||
nocache = models.BooleanField(default=False)
|
||||
play_order = models.IntegerField(default=0)
|
||||
skip_asset_check = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
db_table = 'assets'
|
||||
|
||||
def is_active(self):
|
||||
if self.is_enabled and self.start_date and self.end_date:
|
||||
current_time = timezone.now()
|
||||
return (
|
||||
self.start_date < current_time < self.end_date
|
||||
)
|
||||
|
||||
return False
|
||||
3
anthias_app/tests.py
Normal file
3
anthias_app/tests.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.test import TestCase # noqa F401
|
||||
|
||||
# Create your tests here.
|
||||
12
anthias_app/urls.py
Normal file
12
anthias_app/urls.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
app_name = 'anthias_app'
|
||||
|
||||
urlpatterns = [
|
||||
path('', views.index, name='index'),
|
||||
path('settings', views.settings_page, name='settings'),
|
||||
path('system-info', views.system_info, name='system_info'),
|
||||
path('integrations', views.integrations, name='integrations'),
|
||||
path('splash-page', views.splash_page, name='splash_page'),
|
||||
]
|
||||
@@ -1,18 +1,18 @@
|
||||
import ipaddress
|
||||
import logging
|
||||
import psutil
|
||||
from datetime import timedelta
|
||||
from flask import Blueprint, request
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from hurry.filesize import size
|
||||
from os import getenv, statvfs
|
||||
from platform import machine
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from anthias_app.helpers import (
|
||||
add_default_assets,
|
||||
remove_default_assets,
|
||||
template,
|
||||
from os import (
|
||||
getenv,
|
||||
statvfs,
|
||||
)
|
||||
from platform import machine
|
||||
from settings import (
|
||||
CONFIGURABLE_SETTINGS,
|
||||
DEFAULTS,
|
||||
settings,
|
||||
ZmqPublisher,
|
||||
)
|
||||
from urllib.parse import urlparse
|
||||
from lib import (
|
||||
diagnostics,
|
||||
device_helper,
|
||||
@@ -20,31 +20,31 @@ from lib import (
|
||||
from lib.auth import authorized
|
||||
from lib.utils import (
|
||||
connect_to_redis,
|
||||
get_balena_supervisor_version,
|
||||
get_node_ip,
|
||||
get_node_mac_address,
|
||||
is_balena_app,
|
||||
is_demo_node,
|
||||
is_docker,
|
||||
)
|
||||
from settings import (
|
||||
CONFIGURABLE_SETTINGS,
|
||||
DEFAULTS,
|
||||
settings,
|
||||
ZmqPublisher,
|
||||
from .helpers import (
|
||||
add_default_assets,
|
||||
remove_default_assets,
|
||||
template,
|
||||
)
|
||||
import ipaddress
|
||||
import psutil
|
||||
|
||||
|
||||
r = connect_to_redis()
|
||||
anthias_app_bp = Blueprint('anthias_app', __name__)
|
||||
|
||||
|
||||
@anthias_app_bp.route('/')
|
||||
@authorized
|
||||
def index():
|
||||
@require_http_methods(["GET"])
|
||||
def index(request):
|
||||
player_name = settings['player_name']
|
||||
my_ip = urlparse(request.host_url).hostname
|
||||
my_ip = urlparse(request.build_absolute_uri()).hostname
|
||||
is_demo = is_demo_node()
|
||||
balena_uuid = getenv("BALENA_APP_UUID", None)
|
||||
balena_device_uuid = getenv("BALENA_DEVICE_UUID", None)
|
||||
|
||||
ws_addresses = []
|
||||
|
||||
@@ -53,30 +53,28 @@ def index():
|
||||
else:
|
||||
ws_addresses.append('ws://' + my_ip + '/ws/')
|
||||
|
||||
if balena_uuid:
|
||||
if balena_device_uuid:
|
||||
ws_addresses.append(
|
||||
'wss://{}.balena-devices.com/ws/'.format(balena_uuid))
|
||||
'wss://{}.balena-devices.com/ws/'.format(balena_device_uuid)
|
||||
)
|
||||
|
||||
return template(
|
||||
'index.html',
|
||||
ws_addresses=ws_addresses,
|
||||
player_name=player_name,
|
||||
is_demo=is_demo,
|
||||
is_balena=is_balena_app(),
|
||||
)
|
||||
return template(request, 'index.html', {
|
||||
'ws_addresses': ws_addresses,
|
||||
'player_name': player_name,
|
||||
'is_demo': is_demo,
|
||||
'is_balena': is_balena_app(),
|
||||
})
|
||||
|
||||
|
||||
@anthias_app_bp.route('/settings', methods=["GET", "POST"])
|
||||
@authorized
|
||||
def settings_page():
|
||||
@require_http_methods(["GET", "POST"])
|
||||
def settings_page(request):
|
||||
context = {'flash': None}
|
||||
|
||||
if request.method == "POST":
|
||||
if request.method == 'POST':
|
||||
try:
|
||||
# Put some request variables in local variables to make them
|
||||
# easier to read.
|
||||
current_pass = request.form.get('current-password', '')
|
||||
auth_backend = request.form.get('auth_backend', '')
|
||||
current_pass = request.POST.get('current-password', '')
|
||||
auth_backend = request.POST.get('auth_backend', '')
|
||||
|
||||
if (
|
||||
auth_backend != settings['auth_backend']
|
||||
@@ -100,16 +98,19 @@ def settings_page():
|
||||
.check_password(current_pass)
|
||||
)
|
||||
next_auth_backend = settings.auth_backends[auth_backend]
|
||||
next_auth_backend.update_settings(current_pass_correct)
|
||||
next_auth_backend.update_settings(request, current_pass_correct)
|
||||
settings['auth_backend'] = auth_backend
|
||||
|
||||
for field, default in list(CONFIGURABLE_SETTINGS.items()):
|
||||
value = request.form.get(field, default)
|
||||
value = request.POST.get(field, default)
|
||||
|
||||
if not value and field in [
|
||||
'default_duration',
|
||||
'default_streaming_duration',
|
||||
]:
|
||||
if (
|
||||
not value
|
||||
and field in [
|
||||
'default_duration',
|
||||
'default_streaming_duration',
|
||||
]
|
||||
):
|
||||
value = str(0)
|
||||
if isinstance(default, bool):
|
||||
value = value == 'on'
|
||||
@@ -155,14 +156,10 @@ def settings_page():
|
||||
'selected'
|
||||
if settings['auth_backend'] == backend.name
|
||||
else ''
|
||||
)
|
||||
),
|
||||
})
|
||||
|
||||
try:
|
||||
ip_addresses = get_node_ip().split()
|
||||
except Exception as error:
|
||||
logging.warning(f"Error getting IP addresses: {error}")
|
||||
ip_addresses = ['IP_ADDRESS']
|
||||
ip_addresses = get_node_ip().split()
|
||||
|
||||
context.update({
|
||||
'user': settings['user'],
|
||||
@@ -175,12 +172,12 @@ def settings_page():
|
||||
'host_user': getenv('HOST_USER')
|
||||
})
|
||||
|
||||
return template('settings.html', **context)
|
||||
return template(request, 'settings.html', context)
|
||||
|
||||
|
||||
@anthias_app_bp.route('/system-info')
|
||||
@authorized
|
||||
def system_info():
|
||||
@require_http_methods(["GET"])
|
||||
def system_info(request):
|
||||
loadavg = diagnostics.get_load_avg()['15 min']
|
||||
display_power = r.get('display_power')
|
||||
|
||||
@@ -210,49 +207,53 @@ def system_info():
|
||||
if device_model is None and machine() == 'x86_64':
|
||||
device_model = 'Generic x86_64 Device'
|
||||
|
||||
version = '{}@{}'.format(
|
||||
anthias_version = '{}@{}'.format(
|
||||
diagnostics.get_git_branch(),
|
||||
diagnostics.get_git_short_hash()
|
||||
)
|
||||
|
||||
return template(
|
||||
'system-info.html',
|
||||
player_name=player_name,
|
||||
loadavg=loadavg,
|
||||
free_space=free_space,
|
||||
uptime=system_uptime,
|
||||
memory=memory,
|
||||
display_power=display_power,
|
||||
device_model=device_model,
|
||||
version=version,
|
||||
mac_address=get_node_mac_address(),
|
||||
is_balena=is_balena_app(),
|
||||
)
|
||||
context = {
|
||||
'player_name': player_name,
|
||||
'loadavg': loadavg,
|
||||
'free_space': free_space,
|
||||
'uptime': {
|
||||
'days': system_uptime.days,
|
||||
'hours': round(system_uptime.seconds / 3600, 2),
|
||||
},
|
||||
'memory': memory,
|
||||
'display_power': display_power,
|
||||
'device_model': device_model,
|
||||
'anthias_version': anthias_version,
|
||||
'mac_address': get_node_mac_address(),
|
||||
'is_balena': is_balena_app(),
|
||||
}
|
||||
|
||||
return template(request, 'system-info.html', context)
|
||||
|
||||
|
||||
@anthias_app_bp.route('/integrations')
|
||||
@authorized
|
||||
def integrations():
|
||||
|
||||
@require_http_methods(["GET"])
|
||||
def integrations(request):
|
||||
context = {
|
||||
'player_name': settings['player_name'],
|
||||
'is_balena': is_balena_app(),
|
||||
}
|
||||
|
||||
if context['is_balena']:
|
||||
context['balena_device_id'] = getenv('BALENA_DEVICE_UUID')
|
||||
context['balena_app_id'] = getenv('BALENA_APP_ID')
|
||||
context['balena_app_name'] = getenv('BALENA_APP_NAME')
|
||||
context['balena_supervisor_version'] = get_balena_supervisor_version()
|
||||
context['balena_host_os_version'] = getenv('BALENA_HOST_OS_VERSION')
|
||||
context['balena_device_name_at_init'] = getenv(
|
||||
'BALENA_DEVICE_NAME_AT_INIT')
|
||||
context.update({
|
||||
'balena_device_id': getenv('BALENA_DEVICE_UUID'),
|
||||
'balena_app_id': getenv('BALENA_APP_ID'),
|
||||
'balena_app_name': getenv('BALENA_APP_NAME'),
|
||||
'balena_supervisor_version': getenv('BALENA_SUPERVISOR_VERSION'),
|
||||
'balena_host_os_version': getenv('BALENA_HOST_OS_VERSION'),
|
||||
'balena_device_name_at_init': getenv('BALENA_DEVICE_NAME_AT_INIT'),
|
||||
})
|
||||
|
||||
return template('integrations.html', **context)
|
||||
return template(request, 'integrations.html', context)
|
||||
|
||||
|
||||
@anthias_app_bp.route('/splash-page')
|
||||
def splash_page():
|
||||
@require_http_methods(["GET"])
|
||||
def splash_page(request):
|
||||
ip_addresses = []
|
||||
|
||||
for ip_address in get_node_ip().split():
|
||||
@@ -263,4 +264,6 @@ def splash_page():
|
||||
else:
|
||||
ip_addresses.append(f'http://{ip_address}')
|
||||
|
||||
return template('splash-page.html', ip_addresses=ip_addresses)
|
||||
return template(request, 'splash-page.html', {
|
||||
'ip_addresses': ip_addresses
|
||||
})
|
||||
|
||||
0
anthias_django/__init__.py
Normal file
0
anthias_django/__init__.py
Normal file
16
anthias_django/asgi.py
Normal file
16
anthias_django/asgi.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
ASGI config for anthias_django project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings')
|
||||
|
||||
application = get_asgi_application()
|
||||
183
anthias_django/settings.py
Normal file
183
anthias_django/settings.py
Normal file
@@ -0,0 +1,183 @@
|
||||
"""
|
||||
Django settings for anthias_django project.
|
||||
|
||||
Generated by 'django-admin startproject' using Django 3.2.18.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/3.2/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/3.2/ref/settings/
|
||||
"""
|
||||
|
||||
import pytz
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from os import getenv
|
||||
|
||||
from settings import settings as device_settings
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
|
||||
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = getenv('ENVIRONMENT', 'production') in ['development', 'test']
|
||||
|
||||
if not DEBUG:
|
||||
if not device_settings.get('django_secret_key'):
|
||||
# Modify the generated so that string interpolation
|
||||
# errors can be avoided.
|
||||
secret_key = secrets.token_urlsafe(50)
|
||||
device_settings['django_secret_key'] = secret_key
|
||||
device_settings.save()
|
||||
|
||||
SECRET_KEY = device_settings.get('django_secret_key')
|
||||
else:
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = 'django-insecure-7rz*$)g6dk&=h-3imq2xw*iu!zuhfb&w6v482_vs!w@4_gha=j' # noqa: E501
|
||||
|
||||
# @TODO: Resolve hostnames and IP addresses dynamically.
|
||||
ALLOWED_HOSTS = [
|
||||
'127.0.0.1',
|
||||
'localhost',
|
||||
'anthias',
|
||||
'anthias-server'
|
||||
]
|
||||
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'anthias_app.apps.AnthiasAppConfig',
|
||||
'drf_spectacular',
|
||||
'rest_framework',
|
||||
'api.apps.ApiConfig',
|
||||
'django.contrib.admin',
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'dbbackup',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'anthias_django.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
BASE_DIR / 'templates',
|
||||
],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'anthias_django.wsgi.application'
|
||||
|
||||
|
||||
# Database
|
||||
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': (
|
||||
'/data/.screenly/test.db' if getenv('ENVIRONMENT') == 'test'
|
||||
else '/data/.screenly/screenly.db'
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
|
||||
AUTH_MODULE_PREFIX = 'django.contrib.auth.password_validation'
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': f'{AUTH_MODULE_PREFIX}.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': f'{AUTH_MODULE_PREFIX}.MinimumLengthValidator',
|
||||
},
|
||||
{
|
||||
'NAME': f'{AUTH_MODULE_PREFIX}.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': f'{AUTH_MODULE_PREFIX}.NumericPasswordValidator',
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/3.2/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
try:
|
||||
with open('/etc/timezone', 'r') as f:
|
||||
TIME_ZONE = f.read().strip()
|
||||
pytz.timezone(TIME_ZONE) # Checks if the timezone is valid.
|
||||
except (pytz.exceptions.UnknownTimeZoneError, FileNotFoundError):
|
||||
TIME_ZONE = 'UTC'
|
||||
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/3.2/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
STATICFILES_DIRS = [
|
||||
BASE_DIR / 'static',
|
||||
]
|
||||
STATIC_ROOT = '/data/screenly/staticfiles'
|
||||
|
||||
# Default primary key field type
|
||||
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
|
||||
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
|
||||
'EXCEPTION_HANDLER': 'api.helpers.custom_exception_handler',
|
||||
# The project uses custom authentication classes,
|
||||
# so we need to disable the default ones.
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': []
|
||||
}
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'Anthias API',
|
||||
'VERSION': '1.2.0',
|
||||
}
|
||||
|
||||
# `django-dbbackup` settings
|
||||
DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
|
||||
DBBACKUP_STORAGE_OPTIONS = {'location': '/data/.screenly/backups'}
|
||||
DBBACKUP_HOSTNAME = 'anthias'
|
||||
40
anthias_django/urls.py
Normal file
40
anthias_django/urls.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""anthias_django URL Configuration
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/3.2/topics/http/urls/
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.urls import include, path
|
||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView
|
||||
from lib.auth import authorized
|
||||
|
||||
|
||||
class APIDocView(SpectacularRedocView):
|
||||
@authorized
|
||||
def get(self, request, *args, **kwargs):
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path('admin/', admin.site.urls),
|
||||
path('', include('anthias_app.urls')),
|
||||
path('api/', include('api.urls')),
|
||||
path('api/schema/', SpectacularAPIView.as_view(), name='schema'),
|
||||
path(
|
||||
'api/docs/',
|
||||
APIDocView.as_view(url_name='schema'),
|
||||
name='redoc'
|
||||
),
|
||||
]
|
||||
|
||||
# @TODO: Write custom 403 and 404 pages.
|
||||
16
anthias_django/wsgi.py
Normal file
16
anthias_django/wsgi.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
WSGI config for anthias_django project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings')
|
||||
|
||||
application = get_wsgi_application()
|
||||
3
api/admin.py
Normal file
3
api/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.contrib import admin # noqa F401
|
||||
|
||||
# Register your models here.
|
||||
6
api/apps.py
Normal file
6
api/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'api'
|
||||
429
api/helpers.py
429
api/helpers.py
@@ -1,383 +1,38 @@
|
||||
import json
|
||||
import traceback
|
||||
import uuid
|
||||
|
||||
from dateutil import parser as date_parser
|
||||
from flask import escape, make_response
|
||||
from functools import wraps
|
||||
from os import path, rename
|
||||
from past.builtins import basestring
|
||||
from flask_restful_swagger_2 import Schema
|
||||
from werkzeug.wrappers import Request
|
||||
|
||||
from lib import assets_helper, db
|
||||
from lib.utils import (
|
||||
download_video_from_youtube,
|
||||
json_dump,
|
||||
get_video_duration,
|
||||
validate_url,
|
||||
)
|
||||
from settings import settings
|
||||
from rest_framework import status
|
||||
from rest_framework.views import exception_handler
|
||||
from rest_framework.response import Response
|
||||
from anthias_app.models import Asset
|
||||
|
||||
|
||||
class AssetModel(Schema):
|
||||
type = 'object'
|
||||
properties = {
|
||||
'asset_id': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
'uri': {'type': 'string'},
|
||||
'start_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'end_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'duration': {'type': 'string'},
|
||||
'mimetype': {'type': 'string'},
|
||||
'is_active': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'is_enabled': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'is_processing': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'nocache': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'play_order': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'skip_asset_check': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AssetRequestModel(Schema):
|
||||
type = 'object'
|
||||
properties = {
|
||||
'name': {'type': 'string'},
|
||||
'uri': {'type': 'string'},
|
||||
'start_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'end_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'duration': {'type': 'string'},
|
||||
'mimetype': {'type': 'string'},
|
||||
'is_enabled': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'nocache': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'play_order': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'skip_asset_check': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
}
|
||||
}
|
||||
required = [
|
||||
'name', 'uri', 'mimetype', 'is_enabled', 'start_date', 'end_date']
|
||||
|
||||
|
||||
class AssetContentModel(Schema):
|
||||
type = 'object'
|
||||
properties = {
|
||||
'type': {'type': 'string'},
|
||||
'url': {'type': 'string'},
|
||||
'filename': {'type': 'string'},
|
||||
'mimetype': {'type': 'string'},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'format': 'byte'
|
||||
},
|
||||
}
|
||||
required = ['type', 'filename']
|
||||
|
||||
|
||||
class AssetPropertiesModel(Schema):
|
||||
type = 'object'
|
||||
properties = {
|
||||
'name': {'type': 'string'},
|
||||
'start_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'end_date': {
|
||||
'type': 'string',
|
||||
'format': 'date-time'
|
||||
},
|
||||
'duration': {'type': 'string'},
|
||||
'is_active': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'is_enabled': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'nocache': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'play_order': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
},
|
||||
'skip_asset_check': {
|
||||
'type': 'integer',
|
||||
'format': 'int64',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def api_error(error):
|
||||
return make_response(json_dump({'error': error}), 500)
|
||||
|
||||
|
||||
def prepare_asset(request, unique_name=False):
|
||||
req = Request(request.environ)
|
||||
data = None
|
||||
|
||||
# For backward compatibility
|
||||
try:
|
||||
data = json.loads(req.data)
|
||||
except ValueError:
|
||||
data = json.loads(req.form['model'])
|
||||
except TypeError:
|
||||
data = json.loads(req.form['model'])
|
||||
|
||||
def get(key):
|
||||
val = data.get(key, '')
|
||||
if isinstance(val, str):
|
||||
return val.strip()
|
||||
elif isinstance(val, basestring):
|
||||
return val.strip().decode('utf-8')
|
||||
else:
|
||||
return val
|
||||
|
||||
if not all([get('name'), get('uri'), get('mimetype')]):
|
||||
raise Exception(
|
||||
"Not enough information provided. "
|
||||
"Please specify 'name', 'uri', and 'mimetype'."
|
||||
)
|
||||
|
||||
name = escape(get('name'))
|
||||
if unique_name:
|
||||
with db.conn(settings['database']) as conn:
|
||||
names = assets_helper.get_names_of_assets(conn)
|
||||
if name in names:
|
||||
i = 1
|
||||
while True:
|
||||
new_name = '%s-%i' % (name, i)
|
||||
if new_name in names:
|
||||
i += 1
|
||||
else:
|
||||
name = new_name
|
||||
break
|
||||
|
||||
asset = {
|
||||
'name': name,
|
||||
'mimetype': get('mimetype'),
|
||||
'asset_id': get('asset_id'),
|
||||
'is_enabled': get('is_enabled'),
|
||||
'is_processing': get('is_processing'),
|
||||
'nocache': get('nocache'),
|
||||
}
|
||||
|
||||
uri = escape(get('uri'))
|
||||
|
||||
if uri.startswith('/'):
|
||||
if not path.isfile(uri):
|
||||
raise Exception("Invalid file path. Failed to add asset.")
|
||||
else:
|
||||
if not validate_url(uri):
|
||||
raise Exception("Invalid URL. Failed to add asset.")
|
||||
|
||||
if not asset['asset_id']:
|
||||
asset['asset_id'] = uuid.uuid4().hex
|
||||
if uri.startswith('/'):
|
||||
rename(uri, path.join(settings['assetdir'], asset['asset_id']))
|
||||
uri = path.join(settings['assetdir'], asset['asset_id'])
|
||||
|
||||
if 'youtube_asset' in asset['mimetype']:
|
||||
uri, asset['name'], asset['duration'] = download_video_from_youtube(
|
||||
uri, asset['asset_id'])
|
||||
asset['mimetype'] = 'video'
|
||||
asset['is_processing'] = 1
|
||||
|
||||
asset['uri'] = uri
|
||||
|
||||
if "video" in asset['mimetype']:
|
||||
if get('duration') == 'N/A' or int(get('duration')) == 0:
|
||||
asset['duration'] = int(get_video_duration(uri).total_seconds())
|
||||
else:
|
||||
# Crashes if it's not an int. We want that.
|
||||
asset['duration'] = int(get('duration'))
|
||||
|
||||
asset['skip_asset_check'] = (
|
||||
int(get('skip_asset_check'))
|
||||
if int(get('skip_asset_check'))
|
||||
else 0
|
||||
)
|
||||
|
||||
# parse date via python-dateutil and remove timezone info
|
||||
if get('start_date'):
|
||||
asset['start_date'] = date_parser.parse(
|
||||
get('start_date')).replace(tzinfo=None)
|
||||
else:
|
||||
asset['start_date'] = ""
|
||||
|
||||
if get('end_date'):
|
||||
asset['end_date'] = date_parser.parse(
|
||||
get('end_date')).replace(tzinfo=None)
|
||||
else:
|
||||
asset['end_date'] = ""
|
||||
|
||||
return asset
|
||||
|
||||
|
||||
def prepare_asset_v1_2(request_environ, asset_id=None, unique_name=False):
|
||||
data = json.loads(request_environ.data)
|
||||
|
||||
def get(key):
|
||||
val = data.get(key, '')
|
||||
if isinstance(val, str):
|
||||
return val.strip()
|
||||
elif isinstance(val, basestring):
|
||||
return val.strip().decode('utf-8')
|
||||
else:
|
||||
return val
|
||||
|
||||
if not all([get('name'),
|
||||
get('uri'),
|
||||
get('mimetype'),
|
||||
str(get('is_enabled')),
|
||||
get('start_date'),
|
||||
get('end_date')]):
|
||||
raise Exception(
|
||||
"Not enough information provided. Please specify 'name', "
|
||||
"'uri', 'mimetype', 'is_enabled', 'start_date' and 'end_date'."
|
||||
)
|
||||
|
||||
ampfix = "&"
|
||||
name = escape(get('name').replace(ampfix, '&'))
|
||||
if unique_name:
|
||||
with db.conn(settings['database']) as conn:
|
||||
names = assets_helper.get_names_of_assets(conn)
|
||||
if name in names:
|
||||
i = 1
|
||||
while True:
|
||||
new_name = '%s-%i' % (name, i)
|
||||
if new_name in names:
|
||||
i += 1
|
||||
else:
|
||||
name = new_name
|
||||
break
|
||||
|
||||
asset = {
|
||||
'name': name,
|
||||
'mimetype': get('mimetype'),
|
||||
'is_enabled': get('is_enabled'),
|
||||
'nocache': get('nocache')
|
||||
}
|
||||
|
||||
uri = (
|
||||
(get('uri'))
|
||||
.replace(ampfix, '&')
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('\'', ''')
|
||||
.replace('\"', '"')
|
||||
)
|
||||
|
||||
if uri.startswith('/'):
|
||||
if not path.isfile(uri):
|
||||
raise Exception("Invalid file path. Failed to add asset.")
|
||||
else:
|
||||
if not validate_url(uri):
|
||||
raise Exception("Invalid URL. Failed to add asset.")
|
||||
|
||||
if not asset_id:
|
||||
asset['asset_id'] = uuid.uuid4().hex
|
||||
|
||||
if not asset_id and uri.startswith('/'):
|
||||
new_uri = "{}{}".format(
|
||||
path.join(settings['assetdir'], asset['asset_id']), get('ext'))
|
||||
rename(uri, new_uri)
|
||||
uri = new_uri
|
||||
|
||||
if 'youtube_asset' in asset['mimetype']:
|
||||
uri, asset['name'], asset['duration'] = download_video_from_youtube(
|
||||
uri, asset['asset_id'])
|
||||
asset['mimetype'] = 'video'
|
||||
asset['is_processing'] = 1
|
||||
|
||||
asset['uri'] = uri
|
||||
|
||||
if "video" in asset['mimetype']:
|
||||
if get('duration') == 'N/A' or int(get('duration')) == 0:
|
||||
asset['duration'] = int(get_video_duration(uri).total_seconds())
|
||||
elif get('duration'):
|
||||
# Crashes if it's not an int. We want that.
|
||||
asset['duration'] = int(get('duration'))
|
||||
else:
|
||||
asset['duration'] = 10
|
||||
|
||||
asset['play_order'] = get('play_order') if get('play_order') else 0
|
||||
|
||||
asset['skip_asset_check'] = (
|
||||
int(get('skip_asset_check'))
|
||||
if int(get('skip_asset_check'))
|
||||
else 0
|
||||
)
|
||||
|
||||
# parse date via python-dateutil and remove timezone info
|
||||
asset['start_date'] = date_parser.parse(
|
||||
get('start_date')).replace(tzinfo=None)
|
||||
asset['end_date'] = date_parser.parse(get('end_date')).replace(tzinfo=None)
|
||||
|
||||
return asset
|
||||
class AssetCreationException(Exception):
|
||||
def __init__(self, errors):
|
||||
self.errors = errors
|
||||
|
||||
|
||||
def update_asset(asset, data):
|
||||
for key, value in list(data.items()):
|
||||
|
||||
if (
|
||||
key in ['asset_id', 'is_processing', 'mimetype', 'uri'] or
|
||||
key not in asset
|
||||
key in ['asset_id', 'is_processing', 'mimetype', 'uri']
|
||||
or key not in asset
|
||||
):
|
||||
continue
|
||||
|
||||
if key in ['start_date', 'end_date']:
|
||||
value = date_parser.parse(value).replace(tzinfo=None)
|
||||
|
||||
if key in [
|
||||
'play_order',
|
||||
'skip_asset_check',
|
||||
'is_enabled',
|
||||
'is_active',
|
||||
'nocache',
|
||||
]:
|
||||
if (
|
||||
key in [
|
||||
'play_order',
|
||||
'skip_asset_check',
|
||||
'is_enabled',
|
||||
'is_active',
|
||||
'nocache',
|
||||
]
|
||||
):
|
||||
value = int(value)
|
||||
|
||||
if key == 'duration':
|
||||
@@ -388,14 +43,42 @@ def update_asset(asset, data):
|
||||
asset.update({key: value})
|
||||
|
||||
|
||||
# Used as a decorator to catch exceptions and return a JSON response.
|
||||
def api_response(view):
|
||||
@wraps(view)
|
||||
def api_view(*args, **kwargs):
|
||||
try:
|
||||
return view(*args, **kwargs)
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
return api_error(str(e))
|
||||
def custom_exception_handler(exc, context):
|
||||
exception_handler(exc, context)
|
||||
|
||||
return api_view
|
||||
return Response(
|
||||
{'error': str(exc)},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
|
||||
def get_active_asset_ids():
|
||||
enabled_assets = Asset.objects.filter(
|
||||
is_enabled=1,
|
||||
start_date__isnull=False,
|
||||
end_date__isnull=False,
|
||||
)
|
||||
return [
|
||||
asset.asset_id
|
||||
for asset in enabled_assets
|
||||
if asset.is_active()
|
||||
]
|
||||
|
||||
|
||||
def save_active_assets_ordering(active_asset_ids):
|
||||
for i, asset_id in enumerate(active_asset_ids):
|
||||
Asset.objects.filter(asset_id=asset_id).update(play_order=i)
|
||||
|
||||
|
||||
def parse_request(request):
|
||||
data = None
|
||||
|
||||
# For backward compatibility
|
||||
try:
|
||||
data = json.loads(request.data)
|
||||
except ValueError:
|
||||
data = json.loads(request.data['model'])
|
||||
except TypeError:
|
||||
data = json.loads(request.data['model'])
|
||||
|
||||
return data
|
||||
|
||||
0
api/migrations/__init__.py
Normal file
0
api/migrations/__init__.py
Normal file
288
api/serializers.py
Normal file
288
api/serializers.py
Normal file
@@ -0,0 +1,288 @@
|
||||
import uuid
|
||||
from os import path, rename
|
||||
from django.utils import timezone
|
||||
from rest_framework.serializers import (
|
||||
BooleanField,
|
||||
CharField,
|
||||
DateTimeField,
|
||||
IntegerField,
|
||||
ModelSerializer,
|
||||
Serializer,
|
||||
)
|
||||
from anthias_app.models import Asset
|
||||
from lib.utils import (
|
||||
download_video_from_youtube,
|
||||
get_video_duration,
|
||||
validate_url,
|
||||
url_fails,
|
||||
)
|
||||
from settings import settings
|
||||
|
||||
|
||||
class AssetRequestSerializer(Serializer):
|
||||
name = CharField()
|
||||
uri = CharField()
|
||||
start_date = DateTimeField(default_timezone=timezone.utc)
|
||||
end_date = DateTimeField(default_timezone=timezone.utc)
|
||||
duration = IntegerField()
|
||||
mimetype = CharField()
|
||||
is_enabled = BooleanField()
|
||||
nocache = BooleanField()
|
||||
play_order = IntegerField()
|
||||
skip_asset_check = BooleanField()
|
||||
|
||||
|
||||
class AssetSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = [
|
||||
'asset_id',
|
||||
'name',
|
||||
'uri',
|
||||
'start_date',
|
||||
'end_date',
|
||||
'duration',
|
||||
'mimetype',
|
||||
'is_enabled',
|
||||
'nocache',
|
||||
'play_order',
|
||||
'skip_asset_check',
|
||||
'is_active',
|
||||
'is_processing',
|
||||
]
|
||||
|
||||
|
||||
class CreateAssetSerializerV1_1(Serializer):
|
||||
def __init__(self, *args, unique_name=False, **kwargs):
|
||||
self.unique_name = unique_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
name = CharField()
|
||||
uri = CharField()
|
||||
start_date = DateTimeField(default_timezone=timezone.utc, required=False)
|
||||
end_date = DateTimeField(default_timezone=timezone.utc, required=False)
|
||||
duration = IntegerField(required=False)
|
||||
mimetype = CharField()
|
||||
is_enabled = BooleanField(required=False)
|
||||
is_processing = BooleanField(required=False)
|
||||
nocache = BooleanField(required=False)
|
||||
play_order = IntegerField(required=False)
|
||||
skip_asset_check = BooleanField(required=False)
|
||||
|
||||
def validate(self, data):
|
||||
name = data['name']
|
||||
|
||||
if self.unique_name:
|
||||
names = Asset.objects.values_list('name', flat=True)
|
||||
|
||||
if name in names:
|
||||
i = 1
|
||||
while True:
|
||||
new_name = f'{name}-{i}'
|
||||
if new_name in names:
|
||||
i += 1
|
||||
else:
|
||||
name = new_name
|
||||
break
|
||||
|
||||
asset = {
|
||||
'name': name,
|
||||
'mimetype': data.get('mimetype'),
|
||||
'asset_id': data.get('asset_id'),
|
||||
'is_enabled': data.get('is_enabled', False),
|
||||
'is_processing': data.get('is_processing', False),
|
||||
'nocache': data.get('nocache', False),
|
||||
}
|
||||
|
||||
uri = data.get('uri')
|
||||
|
||||
if uri.startswith('/'):
|
||||
if not path.isfile(uri):
|
||||
raise Exception("Invalid file path. Failed to add asset.")
|
||||
else:
|
||||
if not validate_url(uri):
|
||||
raise Exception("Invalid URL. Failed to add asset.")
|
||||
|
||||
if not asset['asset_id']:
|
||||
asset['asset_id'] = uuid.uuid4().hex
|
||||
if uri.startswith('/'):
|
||||
rename(uri, path.join(settings['assetdir'], asset['asset_id']))
|
||||
uri = path.join(settings['assetdir'], asset['asset_id'])
|
||||
|
||||
if 'youtube_asset' in asset['mimetype']:
|
||||
(
|
||||
uri, asset['name'], asset['duration']
|
||||
) = download_video_from_youtube(uri, asset['asset_id'])
|
||||
asset['mimetype'] = 'video'
|
||||
asset['is_processing'] = 1
|
||||
|
||||
asset['uri'] = uri
|
||||
|
||||
if "video" in asset['mimetype']:
|
||||
if data.get('duration') == 0:
|
||||
asset['duration'] = int(
|
||||
get_video_duration(uri).total_seconds())
|
||||
else:
|
||||
# Crashes if it's not an int. We want that.
|
||||
asset['duration'] = data.get('duration')
|
||||
|
||||
asset['skip_asset_check'] = data.get('skip_asset_check', False)
|
||||
|
||||
if data.get('start_date'):
|
||||
asset['start_date'] = data.get('start_date').replace(tzinfo=None)
|
||||
else:
|
||||
asset['start_date'] = ""
|
||||
|
||||
if data.get('end_date'):
|
||||
asset['end_date'] = data.get('end_date').replace(tzinfo=None)
|
||||
else:
|
||||
asset['end_date'] = ""
|
||||
|
||||
if not asset['skip_asset_check'] and url_fails(asset['uri']):
|
||||
raise Exception("Could not retrieve file. Check the asset URL.")
|
||||
|
||||
return asset
|
||||
|
||||
|
||||
class CreateAssetSerializerV1_2(Serializer):
|
||||
def __init__(self, *args, unique_name=False, **kwargs):
|
||||
self.unique_name = unique_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
asset_id = CharField(read_only=True)
|
||||
ext = CharField(write_only=True, required=False)
|
||||
name = CharField()
|
||||
uri = CharField()
|
||||
start_date = DateTimeField(default_timezone=timezone.utc)
|
||||
end_date = DateTimeField(default_timezone=timezone.utc)
|
||||
duration = IntegerField()
|
||||
mimetype = CharField()
|
||||
is_enabled = BooleanField()
|
||||
is_processing = BooleanField(required=False)
|
||||
nocache = BooleanField(required=False)
|
||||
play_order = IntegerField(required=False)
|
||||
skip_asset_check = BooleanField(required=False)
|
||||
|
||||
def prepare_asset(self, data, asset_id=None):
|
||||
ampersand_fix = '&'
|
||||
name = data['name'].replace(ampersand_fix, '&')
|
||||
|
||||
if self.unique_name:
|
||||
names = Asset.objects.values_list('name', flat=True)
|
||||
if name in names:
|
||||
i = 1
|
||||
while True:
|
||||
new_name = f'{name}-{i}'
|
||||
if new_name in names:
|
||||
i += 1
|
||||
else:
|
||||
name = new_name
|
||||
break
|
||||
|
||||
asset = {
|
||||
'name': name,
|
||||
'mimetype': data.get('mimetype'),
|
||||
'is_enabled': data.get('is_enabled', False),
|
||||
'nocache': data.get('nocache', False),
|
||||
}
|
||||
|
||||
uri = (
|
||||
data['uri']
|
||||
.replace(ampersand_fix, '&')
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('\'', ''')
|
||||
.replace('\"', '"')
|
||||
)
|
||||
|
||||
if uri.startswith('/'):
|
||||
if not path.isfile(uri):
|
||||
raise Exception("Invalid file path. Failed to add asset.")
|
||||
else:
|
||||
if not validate_url(uri):
|
||||
raise Exception("Invalid URL. Failed to add asset.")
|
||||
|
||||
if not asset_id:
|
||||
asset['asset_id'] = uuid.uuid4().hex
|
||||
|
||||
if not asset_id and uri.startswith('/'):
|
||||
path_name = path.join(settings['assetdir'], asset['asset_id'])
|
||||
ext_name = data.get('ext', '')
|
||||
new_uri = f'{path_name}{ext_name}'
|
||||
rename(uri, new_uri)
|
||||
uri = new_uri
|
||||
|
||||
if 'youtube_asset' in asset['mimetype']:
|
||||
(
|
||||
uri, asset['name'], asset['duration']
|
||||
) = download_video_from_youtube(uri, asset['asset_id'])
|
||||
asset['mimetype'] = 'video'
|
||||
asset['is_processing'] = True
|
||||
|
||||
asset['uri'] = uri
|
||||
|
||||
if "video" in asset['mimetype']:
|
||||
if data.get('duration') == 0:
|
||||
asset['duration'] = int(
|
||||
get_video_duration(uri).total_seconds())
|
||||
elif data.get('duration'):
|
||||
# Crashes if it's not an int. We want that.
|
||||
asset['duration'] = data.get('duration')
|
||||
else:
|
||||
asset['duration'] = 10
|
||||
|
||||
asset['play_order'] = (
|
||||
data.get('play_order') if data.get('play_order') else 0
|
||||
)
|
||||
|
||||
asset['skip_asset_check'] = (
|
||||
int(data.get('skip_asset_check'))
|
||||
if int(data.get('skip_asset_check'))
|
||||
else 0
|
||||
)
|
||||
|
||||
asset['start_date'] = data.get('start_date').replace(tzinfo=None)
|
||||
asset['end_date'] = data.get('end_date').replace(tzinfo=None)
|
||||
|
||||
if not asset['skip_asset_check'] and url_fails(asset['uri']):
|
||||
raise Exception("Could not retrieve file. Check the asset URL.")
|
||||
|
||||
return asset
|
||||
|
||||
def validate(self, data):
|
||||
return self.prepare_asset(data)
|
||||
|
||||
|
||||
class UpdateAssetSerializer(Serializer):
|
||||
name = CharField()
|
||||
start_date = DateTimeField(default_timezone=timezone.utc)
|
||||
end_date = DateTimeField(default_timezone=timezone.utc)
|
||||
duration = IntegerField()
|
||||
is_enabled = BooleanField()
|
||||
is_processing = BooleanField(required=False)
|
||||
nocache = BooleanField(required=False)
|
||||
play_order = IntegerField(required=False)
|
||||
skip_asset_check = BooleanField(required=False)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
instance.name = validated_data.get('name', instance.name)
|
||||
instance.start_date = validated_data.get(
|
||||
'start_date', instance.start_date)
|
||||
instance.end_date = validated_data.get('end_date', instance.end_date)
|
||||
instance.is_enabled = validated_data.get(
|
||||
'is_enabled', instance.is_enabled)
|
||||
instance.is_processing = validated_data.get(
|
||||
'is_processing', instance.is_processing)
|
||||
instance.nocache = validated_data.get('nocache', instance.nocache)
|
||||
instance.play_order = validated_data.get(
|
||||
'play_order', instance.play_order)
|
||||
instance.skip_asset_check = validated_data.get(
|
||||
'skip_asset_check', instance.skip_asset_check)
|
||||
|
||||
if 'video' not in instance.mimetype:
|
||||
instance.duration = validated_data.get(
|
||||
'duration', instance.duration)
|
||||
|
||||
instance.save()
|
||||
|
||||
return instance
|
||||
333
api/tests.py
Normal file
333
api/tests.py
Normal file
@@ -0,0 +1,333 @@
|
||||
import json
|
||||
|
||||
from django.conf import settings as django_settings
|
||||
from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
from inspect import cleandoc
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from rest_framework.test import APIClient
|
||||
from rest_framework import status
|
||||
from settings import settings as anthias_settings
|
||||
from unittest import mock
|
||||
from unittest_parametrize import parametrize, ParametrizedTestCase
|
||||
|
||||
from anthias_app.models import Asset
|
||||
|
||||
|
||||
ASSET_LIST_V1_1_URL = reverse('api:asset_list_v1_1')
|
||||
ASSET_CREATION_DATA = {
|
||||
'name': 'Anthias',
|
||||
'uri': 'https://anthias.screenly.io',
|
||||
'start_date': '2019-08-24T14:15:22Z',
|
||||
'end_date': '2029-08-24T14:15:22Z',
|
||||
'duration': 20,
|
||||
'mimetype': 'webpage',
|
||||
'is_enabled': 0,
|
||||
'nocache': 0,
|
||||
'play_order': 0,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
parametrize_version = parametrize(
|
||||
'version',
|
||||
[('v1',), ('v1_1',), ('v1_2',)],
|
||||
)
|
||||
|
||||
|
||||
class CRUDAssetEndpointsTest(TestCase, ParametrizedTestCase):
|
||||
def setUp(self):
|
||||
self.client = APIClient()
|
||||
|
||||
def get_assets(self, version):
|
||||
asset_list_url = reverse(f'api:asset_list_{version}')
|
||||
response = self.client.get(asset_list_url)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
return response.data
|
||||
|
||||
def get_request_data(self, data, version):
|
||||
if version in ['v1', 'v1_1']:
|
||||
return {
|
||||
'model': json.dumps(data)
|
||||
}
|
||||
else:
|
||||
return data
|
||||
|
||||
def create_asset(self, data, version):
|
||||
asset_list_url = reverse(f'api:asset_list_{version}')
|
||||
return self.client.post(
|
||||
asset_list_url,
|
||||
data=self.get_request_data(data, version)
|
||||
).data
|
||||
|
||||
def update_asset(self, asset_id, data):
|
||||
return self.client.put(
|
||||
reverse('api:asset_detail_v1_1', args=[asset_id]),
|
||||
data=data
|
||||
).data
|
||||
|
||||
def get_asset(self, asset_id):
|
||||
url = reverse('api:asset_detail_v1_1', args=[asset_id])
|
||||
return self.client.get(url).data
|
||||
|
||||
def delete_asset(self, asset_id):
|
||||
url = reverse('api:asset_detail_v1_1', args=[asset_id])
|
||||
return self.client.delete(url)
|
||||
|
||||
@parametrize_version
|
||||
def test_get_assets_when_first_time_setup_should_initially_return_empty(self, version): # noqa: E501
|
||||
asset_list_url = reverse(f'api:asset_list_{version}')
|
||||
response = self.client.get(asset_list_url)
|
||||
assets = response.data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(len(assets), 0)
|
||||
|
||||
@parametrize_version
|
||||
def test_create_asset_should_return_201(self, version):
|
||||
asset_list_url = reverse(f'api:asset_list_{version}')
|
||||
response = self.client.post(
|
||||
asset_list_url,
|
||||
data=self.get_request_data(ASSET_CREATION_DATA, version)
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
|
||||
self.assertEqual(response.data['name'], 'Anthias')
|
||||
self.assertEqual(response.data['uri'], 'https://anthias.screenly.io')
|
||||
self.assertEqual(response.data['is_enabled'], 0)
|
||||
self.assertEqual(response.data['nocache'], 0)
|
||||
self.assertEqual(response.data['play_order'], 0)
|
||||
self.assertEqual(response.data['skip_asset_check'], 0)
|
||||
|
||||
@parametrize_version
|
||||
def test_get_assets_after_create_should_return_1_asset(self, version):
|
||||
self.create_asset(ASSET_CREATION_DATA, version)
|
||||
|
||||
assets = self.get_assets(version)
|
||||
self.assertEqual(len(assets), 1)
|
||||
|
||||
@parametrize_version
|
||||
def test_get_asset_by_id_should_return_asset(self, version):
|
||||
expected_asset = self.create_asset(ASSET_CREATION_DATA, version)
|
||||
asset_id = expected_asset['asset_id']
|
||||
|
||||
actual_asset = self.get_asset(asset_id)
|
||||
|
||||
self.assertEqual(expected_asset, actual_asset)
|
||||
|
||||
@parametrize_version
|
||||
def test_update_asset_should_return_updated_asset(self, version):
|
||||
expected_asset = self.create_asset(ASSET_CREATION_DATA, version)
|
||||
asset_id = expected_asset['asset_id']
|
||||
updated_asset = self.update_asset(
|
||||
asset_id,
|
||||
data={
|
||||
'model': cleandoc(
|
||||
'''
|
||||
{
|
||||
"name": "Anthias",
|
||||
"uri": "https://anthias.screenly.io",
|
||||
"start_date": "2019-08-24T14:15:22Z",
|
||||
"end_date": "2029-08-24T14:15:22Z",
|
||||
"duration": "15",
|
||||
"mimetype": "webpage",
|
||||
"is_enabled": 1,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}
|
||||
'''
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
self.assertEqual(updated_asset['name'], 'Anthias')
|
||||
self.assertEqual(updated_asset['uri'], 'https://anthias.screenly.io')
|
||||
self.assertEqual(updated_asset['duration'], 15)
|
||||
self.assertEqual(updated_asset['is_enabled'], 1)
|
||||
self.assertEqual(updated_asset['play_order'], 0)
|
||||
|
||||
@parametrize_version
|
||||
def test_delete_asset_should_return_204(self, version):
|
||||
asset = self.create_asset(ASSET_CREATION_DATA, version)
|
||||
asset_id = asset['asset_id']
|
||||
|
||||
response = self.delete_asset(asset_id)
|
||||
assets = self.client.get(ASSET_LIST_V1_1_URL).data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
|
||||
self.assertEqual(len(assets), 0)
|
||||
|
||||
|
||||
class V1EndpointsTest(TestCase, ParametrizedTestCase):
|
||||
def setUp(self):
|
||||
self.client = APIClient()
|
||||
|
||||
def tearDown(self):
|
||||
self.remove_all_asset_files()
|
||||
|
||||
def remove_all_asset_files(self):
|
||||
asset_directory_path = Path(anthias_settings['assetdir'])
|
||||
for file in asset_directory_path.iterdir():
|
||||
file.unlink()
|
||||
|
||||
def get_asset_content_url(self, asset_id):
|
||||
return reverse('api:asset_content_v1', args=[asset_id])
|
||||
|
||||
def test_asset_content(self):
|
||||
asset = Asset.objects.create(**ASSET_CREATION_DATA)
|
||||
asset_id = asset.asset_id
|
||||
|
||||
response = self.client.get(self.get_asset_content_url(asset_id))
|
||||
data = response.data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(data['type'], 'url')
|
||||
self.assertEqual(data['url'], 'https://anthias.screenly.io')
|
||||
|
||||
def test_file_asset(self):
|
||||
project_base_path = django_settings.BASE_DIR
|
||||
image_path = path.join(
|
||||
project_base_path,
|
||||
'static/img/standby.png',
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse('api:file_asset_v1'),
|
||||
data={
|
||||
'file_upload': open(image_path, 'rb'),
|
||||
},
|
||||
)
|
||||
data = response.data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertTrue(path.exists(data['uri']))
|
||||
self.assertEqual(data['ext'], '.png')
|
||||
|
||||
def test_playlist_order(self):
|
||||
playlist_order_url = reverse('api:playlist_order_v1')
|
||||
|
||||
for asset_name in ['Asset #1', 'Asset #2', 'Asset #3']:
|
||||
Asset.objects.create(**{
|
||||
**ASSET_CREATION_DATA,
|
||||
'name': asset_name,
|
||||
})
|
||||
|
||||
self.assertTrue(
|
||||
all([
|
||||
asset.play_order == 0
|
||||
for asset in Asset.objects.all()
|
||||
])
|
||||
)
|
||||
|
||||
asset_1, asset_2, asset_3 = Asset.objects.all()
|
||||
asset_ids = [asset_1.asset_id, asset_2.asset_id, asset_3.asset_id]
|
||||
|
||||
response = self.client.post(
|
||||
playlist_order_url,
|
||||
data={'ids': ','.join(asset_ids)}
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
for asset in [asset_1, asset_2, asset_3]:
|
||||
asset.refresh_from_db()
|
||||
|
||||
self.assertEqual(asset_1.play_order, 0)
|
||||
self.assertEqual(asset_2.play_order, 1)
|
||||
self.assertEqual(asset_3.play_order, 2)
|
||||
|
||||
@parametrize(
|
||||
'command',
|
||||
[
|
||||
('next',),
|
||||
('previous',),
|
||||
('asset&6ee2394e760643748b9353f06f405424',),
|
||||
],
|
||||
)
|
||||
@mock.patch('api.views.v1.ZmqPublisher.send_to_viewer', return_value=None)
|
||||
def test_assets_control(self, send_to_viewer_mock, command):
|
||||
assets_control_url = reverse('api:assets_control_v1', args=[command])
|
||||
response = self.client.get(assets_control_url)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(send_to_viewer_mock.call_count, 1)
|
||||
self.assertEqual(send_to_viewer_mock.call_args[0][0], command)
|
||||
self.assertEqual(response.data, 'Asset switched')
|
||||
|
||||
@mock.patch(
|
||||
'api.views.v1.is_up_to_date',
|
||||
return_value=False
|
||||
)
|
||||
@mock.patch(
|
||||
'lib.diagnostics.get_load_avg',
|
||||
return_value={'15 min': 0.11}
|
||||
)
|
||||
@mock.patch('api.views.v1.size', return_value='15G')
|
||||
@mock.patch('api.views.v1.statvfs', mock.MagicMock())
|
||||
def test_device_info(
|
||||
self,
|
||||
size_mock,
|
||||
get_load_avg_mock,
|
||||
is_up_to_date_mock
|
||||
):
|
||||
is_up_to_date_mock.return_value = False
|
||||
info_url = reverse('api:info_v1')
|
||||
response = self.client.get(info_url)
|
||||
data = response.data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(size_mock.call_count, 1)
|
||||
self.assertEqual(get_load_avg_mock.call_count, 1)
|
||||
self.assertEqual(is_up_to_date_mock.call_count, 1)
|
||||
self.assertEqual(data['viewlog'], 'Not yet implemented')
|
||||
|
||||
@mock.patch(
|
||||
'api.views.v1.reboot_anthias.apply_async',
|
||||
side_effect=(lambda: None)
|
||||
)
|
||||
def test_reboot(self, reboot_anthias_mock):
|
||||
reboot_url = reverse('api:reboot_v1')
|
||||
response = self.client.post(reboot_url)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(reboot_anthias_mock.call_count, 1)
|
||||
|
||||
@mock.patch(
|
||||
'api.views.v1.shutdown_anthias.apply_async',
|
||||
side_effect=(lambda: None)
|
||||
)
|
||||
def test_shutdown(self, shutdown_anthias_mock):
|
||||
shutdown_url = reverse('api:shutdown_v1')
|
||||
response = self.client.post(shutdown_url)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(shutdown_anthias_mock.call_count, 1)
|
||||
|
||||
@mock.patch('api.views.v1.ZmqPublisher.send_to_viewer', return_value=None)
|
||||
def test_viewer_current_asset(self, send_to_viewer_mock):
|
||||
asset = Asset.objects.create(**{
|
||||
**ASSET_CREATION_DATA,
|
||||
'is_enabled': 1,
|
||||
})
|
||||
asset_id = asset.asset_id
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
'api.views.v1.ZmqCollector.recv_json',
|
||||
side_effect=(lambda _: {
|
||||
'current_asset_id': asset_id
|
||||
})
|
||||
)
|
||||
):
|
||||
viewer_current_asset_url = reverse('api:viewer_current_asset_v1')
|
||||
response = self.client.get(viewer_current_asset_url)
|
||||
data = response.data
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(send_to_viewer_mock.call_count, 1)
|
||||
|
||||
self.assertEqual(data['asset_id'], asset_id)
|
||||
self.assertEqual(data['is_active'], 1)
|
||||
77
api/urls.py
Normal file
77
api/urls.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from django.urls import path
|
||||
from .views.v1 import (
|
||||
AssetViewV1,
|
||||
AssetListViewV1,
|
||||
AssetContentView,
|
||||
FileAssetView,
|
||||
PlaylistOrderView,
|
||||
BackupView,
|
||||
RecoverView,
|
||||
AssetsControlView,
|
||||
InfoView,
|
||||
RebootView,
|
||||
ShutdownView,
|
||||
ViewerCurrentAssetView
|
||||
)
|
||||
from .views.v1_1 import (
|
||||
AssetListViewV1_1,
|
||||
AssetViewV1_1
|
||||
)
|
||||
from .views.v1_2 import (
|
||||
AssetListViewV1_2,
|
||||
AssetViewV1_2
|
||||
)
|
||||
|
||||
app_name = 'api'
|
||||
|
||||
urlpatterns = [
|
||||
# v1 endpoints
|
||||
path('v1/assets', AssetListViewV1.as_view(), name='asset_list_v1'),
|
||||
path(
|
||||
'v1/assets/order',
|
||||
PlaylistOrderView.as_view(),
|
||||
name='playlist_order_v1',
|
||||
),
|
||||
path(
|
||||
'v1/assets/control/<str:command>',
|
||||
AssetsControlView.as_view(),
|
||||
name='assets_control_v1',
|
||||
),
|
||||
path(
|
||||
'v1/assets/<str:asset_id>',
|
||||
AssetViewV1.as_view(),
|
||||
name='asset_detail_v1',
|
||||
),
|
||||
path(
|
||||
'v1/assets/<str:asset_id>/content',
|
||||
AssetContentView.as_view(),
|
||||
name='asset_content_v1',
|
||||
),
|
||||
path('v1/file_asset', FileAssetView.as_view(), name='file_asset_v1'),
|
||||
path('v1/backup', BackupView.as_view(), name='backup_v1'),
|
||||
path('v1/recover', RecoverView.as_view(), name='recover_v1'),
|
||||
path('v1/info', InfoView.as_view(), name='info_v1'),
|
||||
path('v1/reboot', RebootView.as_view(), name='reboot_v1'),
|
||||
path('v1/shutdown', ShutdownView.as_view(), name='shutdown_v1'),
|
||||
path(
|
||||
'v1/viewer_current_asset',
|
||||
ViewerCurrentAssetView.as_view(),
|
||||
name='viewer_current_asset_v1',
|
||||
),
|
||||
|
||||
# v1.1 endpoints
|
||||
path('v1.1/assets', AssetListViewV1_1.as_view(), name='asset_list_v1_1'),
|
||||
path(
|
||||
'v1.1/assets/<str:asset_id>',
|
||||
AssetViewV1_1.as_view(),
|
||||
name='asset_detail_v1_1',
|
||||
),
|
||||
|
||||
# v1.2 endpoints
|
||||
path('v1.2/assets', AssetListViewV1_2.as_view(), name='asset_list_v1_2'),
|
||||
path(
|
||||
'v1.2/assets/<str:asset_id>',
|
||||
AssetViewV1_2.as_view(),
|
||||
name='asset_detail_v1_2',
|
||||
)
|
||||
]
|
||||
852
api/views/v1.py
852
api/views/v1.py
@@ -1,445 +1,164 @@
|
||||
import uuid
|
||||
|
||||
from base64 import b64encode
|
||||
from flask import request
|
||||
from flask_restful_swagger_2 import Resource, swagger
|
||||
from mimetypes import guess_type, guess_extension
|
||||
from os import path, remove, statvfs
|
||||
from werkzeug.wrappers import Request
|
||||
|
||||
from api.helpers import (
|
||||
AssetModel,
|
||||
AssetContentModel,
|
||||
api_response,
|
||||
prepare_asset,
|
||||
from inspect import cleandoc
|
||||
from rest_framework import serializers, status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from api.serializers import (
|
||||
AssetSerializer,
|
||||
CreateAssetSerializerV1_1,
|
||||
UpdateAssetSerializer,
|
||||
)
|
||||
from api.helpers import (
|
||||
AssetCreationException,
|
||||
parse_request,
|
||||
save_active_assets_ordering,
|
||||
)
|
||||
from base64 import b64encode
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import (
|
||||
extend_schema,
|
||||
inline_serializer,
|
||||
OpenApiExample,
|
||||
OpenApiParameter,
|
||||
OpenApiRequest,
|
||||
)
|
||||
from celery_tasks import shutdown_anthias, reboot_anthias
|
||||
from hurry.filesize import size
|
||||
from lib import (
|
||||
db,
|
||||
diagnostics,
|
||||
assets_helper,
|
||||
backup_helper,
|
||||
diagnostics
|
||||
)
|
||||
from lib.auth import authorized
|
||||
from lib.github import is_up_to_date
|
||||
from lib.utils import connect_to_redis, url_fails
|
||||
from settings import (
|
||||
settings,
|
||||
ZmqCollector,
|
||||
ZmqPublisher,
|
||||
)
|
||||
from lib.utils import connect_to_redis
|
||||
from mimetypes import guess_type, guess_extension
|
||||
from os import path, remove, statvfs
|
||||
from anthias_app.models import Asset
|
||||
from celery_tasks import reboot_anthias, shutdown_anthias
|
||||
from settings import settings, ZmqCollector, ZmqPublisher
|
||||
|
||||
|
||||
r = connect_to_redis()
|
||||
|
||||
MODEL_STRING_EXAMPLE = """
|
||||
Yes, that is just a string of JSON not JSON itself it will be parsed on the
|
||||
other end. It's recommended to set `Content-Type` to
|
||||
`application/x-www-form-urlencoded` and send the model as a string.
|
||||
|
||||
class Assets(Resource):
|
||||
method_decorators = [authorized]
|
||||
```
|
||||
model: "{
|
||||
"name": "Website",
|
||||
"mimetype": "webpage",
|
||||
"uri": "http://example.com",
|
||||
"is_active": 0,
|
||||
"start_date": "2017-02-02T00:33:00.000Z",
|
||||
"end_date": "2017-03-01T00:33:00.000Z",
|
||||
"duration": "10",
|
||||
"is_enabled": 0,
|
||||
"is_processing": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}"
|
||||
```
|
||||
"""
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'List of assets',
|
||||
'schema': {
|
||||
'type': 'array',
|
||||
'items': AssetModel
|
||||
V1_ASSET_REQUEST = OpenApiRequest(
|
||||
inline_serializer(
|
||||
name='ModelString',
|
||||
fields={
|
||||
'model': serializers.CharField(
|
||||
help_text=MODEL_STRING_EXAMPLE,
|
||||
),
|
||||
},
|
||||
),
|
||||
examples=[
|
||||
OpenApiExample(
|
||||
name='Example 1',
|
||||
value={'model': MODEL_STRING_EXAMPLE}
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
class AssetViewV1(APIView):
|
||||
serializer_class = AssetSerializer
|
||||
|
||||
@extend_schema(summary='Get asset')
|
||||
@authorized
|
||||
def get(self, request, asset_id, format=None):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
return Response(AssetSerializer(asset).data)
|
||||
|
||||
@extend_schema(
|
||||
summary='Update asset',
|
||||
request=V1_ASSET_REQUEST,
|
||||
responses={
|
||||
201: AssetSerializer
|
||||
}
|
||||
})
|
||||
def get(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
return assets
|
||||
)
|
||||
@authorized
|
||||
def put(self, request, asset_id, format=None):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
@api_response
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'model',
|
||||
'in': 'formData',
|
||||
'type': 'string',
|
||||
'description':
|
||||
'''
|
||||
Yes, that is just a string of JSON not JSON itself it will
|
||||
be parsed on the other end.
|
||||
data = parse_request(request)
|
||||
serializer = UpdateAssetSerializer(asset, data=data, partial=False)
|
||||
|
||||
Content-Type: application/x-www-form-urlencoded
|
||||
model: "{
|
||||
"name": "Website",
|
||||
"mimetype": "webpage",
|
||||
"uri": "http://example.com",
|
||||
"is_active": 0,
|
||||
"start_date": "2017-02-02T00:33:00.000Z",
|
||||
"end_date": "2017-03-01T00:33:00.000Z",
|
||||
"duration": "10",
|
||||
"is_enabled": 0,
|
||||
"is_processing": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}"
|
||||
'''
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'201': {
|
||||
'description': 'Asset created',
|
||||
'schema': AssetModel
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
asset = prepare_asset(request)
|
||||
if url_fails(asset['uri']):
|
||||
raise Exception("Could not retrieve file. Check the asset URL.")
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.create(conn, asset), 201
|
||||
|
||||
|
||||
class Asset(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset',
|
||||
'schema': AssetModel
|
||||
}
|
||||
}
|
||||
})
|
||||
def get(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.read(conn, asset_id)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
},
|
||||
{
|
||||
'name': 'model',
|
||||
'in': 'formData',
|
||||
'type': 'string',
|
||||
'description':
|
||||
'''
|
||||
Content-Type: application/x-www-form-urlencoded
|
||||
model: "{
|
||||
"asset_id": "793406aa1fd34b85aa82614004c0e63a",
|
||||
"name": "Website",
|
||||
"mimetype": "webpage",
|
||||
"uri": "http://example.com",
|
||||
"is_active": 0,
|
||||
"start_date": "2017-02-02T00:33:00.000Z",
|
||||
"end_date": "2017-03-01T00:33:00.000Z",
|
||||
"duration": "10",
|
||||
"is_enabled": 0,
|
||||
"is_processing": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}"
|
||||
'''
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset updated',
|
||||
'schema': AssetModel
|
||||
}
|
||||
}
|
||||
})
|
||||
def put(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.update(conn, asset_id, prepare_asset(request))
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
},
|
||||
],
|
||||
'responses': {
|
||||
'204': {
|
||||
'description': 'Deleted'
|
||||
}
|
||||
}
|
||||
})
|
||||
def delete(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
asset = assets_helper.read(conn, asset_id)
|
||||
try:
|
||||
if asset['uri'].startswith(settings['assetdir']):
|
||||
remove(asset['uri'])
|
||||
except OSError:
|
||||
pass
|
||||
assets_helper.delete(conn, asset_id)
|
||||
return '', 204 # return an OK with no content
|
||||
|
||||
|
||||
class FileAsset(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'file_upload',
|
||||
'type': 'file',
|
||||
'in': 'formData',
|
||||
'description': 'File to be sent'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'File path',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
req = Request(request.environ)
|
||||
file_upload = req.files.get('file_upload')
|
||||
filename = file_upload.filename
|
||||
file_type = guess_type(filename)[0]
|
||||
|
||||
if not file_type:
|
||||
raise Exception("Invalid file type.")
|
||||
|
||||
if file_type.split('/')[0] not in ['image', 'video']:
|
||||
raise Exception("Invalid file type.")
|
||||
|
||||
file_path = path.join(
|
||||
settings['assetdir'],
|
||||
uuid.uuid5(uuid.NAMESPACE_URL, filename).hex) + ".tmp"
|
||||
|
||||
if 'Content-Range' in request.headers:
|
||||
range_str = request.headers['Content-Range']
|
||||
start_bytes = int(range_str.split(' ')[1].split('-')[0])
|
||||
with open(file_path, 'ab') as f:
|
||||
f.seek(start_bytes)
|
||||
f.write(file_upload.read())
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
else:
|
||||
file_upload.save(file_path)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return {'uri': file_path, 'ext': guess_extension(file_type)}
|
||||
asset.refresh_from_db()
|
||||
return Response(AssetSerializer(asset).data)
|
||||
|
||||
@extend_schema(summary='Delete asset')
|
||||
@authorized
|
||||
def delete(self, request, asset_id, format=None):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
try:
|
||||
if asset.uri.startswith(settings['assetdir']):
|
||||
remove(asset.uri)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
asset.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class PlaylistOrder(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
class AssetContentView(APIView):
|
||||
@extend_schema(
|
||||
summary='Get asset content',
|
||||
description=cleandoc("""
|
||||
The content of the asset.
|
||||
`type` can either be `file` or `url`.
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'ids',
|
||||
'in': 'formData',
|
||||
'type': 'string',
|
||||
'description':
|
||||
'''
|
||||
Content-Type: application/x-www-form-urlencoded
|
||||
ids: "793406aa1fd34b85aa82614004c0e63a,1c5cfa719d1f4a9abae16c983a18903b,9c41068f3b7e452baf4dc3f9b7906595"
|
||||
comma separated ids
|
||||
''' # noqa: E501
|
||||
},
|
||||
],
|
||||
'responses': {
|
||||
'204': {
|
||||
'description': 'Sorted'
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets_helper.save_ordering(
|
||||
conn, request.form.get('ids', '').split(','))
|
||||
|
||||
|
||||
class Backup(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Backup filename',
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
In case of a file, the fields `mimetype`, `filename`, and `content`
|
||||
will be present. In case of a URL, the field `url` will be present.
|
||||
"""),
|
||||
responses={
|
||||
200: {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'type': 'string'},
|
||||
'url': {'type': 'string'},
|
||||
'filename': {'type': 'string'},
|
||||
'mimetype': {'type': 'string'},
|
||||
'content': {'type': 'string'},
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
filename = backup_helper.create_backup(name=settings['player_name'])
|
||||
return filename, 201
|
||||
)
|
||||
@authorized
|
||||
def get(self, request, asset_id, format=None):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
if path.isfile(asset.uri):
|
||||
filename = asset.name
|
||||
|
||||
class Recover(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'backup_upload',
|
||||
'type': 'file',
|
||||
'in': 'formData'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Recovery successful'
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
req = Request(request.environ)
|
||||
file_upload = (req.files['backup_upload'])
|
||||
filename = file_upload.filename
|
||||
|
||||
if guess_type(filename)[0] != 'application/x-tar':
|
||||
raise Exception("Incorrect file extension.")
|
||||
try:
|
||||
publisher.send_to_viewer('stop')
|
||||
location = path.join("static", filename)
|
||||
file_upload.save(location)
|
||||
backup_helper.recover(location)
|
||||
return "Recovery successful."
|
||||
finally:
|
||||
publisher.send_to_viewer('play')
|
||||
|
||||
|
||||
class Reboot(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Reboot system'
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
reboot_anthias.apply_async()
|
||||
return '', 200
|
||||
|
||||
|
||||
class Shutdown(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Shutdown system'
|
||||
}
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
shutdown_anthias.apply_async()
|
||||
return '', 200
|
||||
|
||||
|
||||
class Info(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
def get(self):
|
||||
# Calculate disk space
|
||||
slash = statvfs("/")
|
||||
free_space = size(slash.f_bavail * slash.f_frsize)
|
||||
display_power = r.get('display_power')
|
||||
|
||||
return {
|
||||
'loadavg': diagnostics.get_load_avg()['15 min'],
|
||||
'free_space': free_space,
|
||||
'display_power': display_power,
|
||||
'up_to_date': is_up_to_date()
|
||||
}
|
||||
|
||||
|
||||
class AssetsControl(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'command',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description':
|
||||
'''
|
||||
Control commands:
|
||||
next - show next asset
|
||||
previous - show previous asset
|
||||
asset&asset_id - show asset with `asset_id` id
|
||||
'''
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset switched'
|
||||
}
|
||||
}
|
||||
})
|
||||
def get(self, command):
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
publisher.send_to_viewer(command)
|
||||
return "Asset switched"
|
||||
|
||||
|
||||
class AssetContent(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description':
|
||||
'''
|
||||
The content of the asset.
|
||||
|
||||
'type' can either be 'file' or 'url'.
|
||||
|
||||
In case of a file, the fields 'mimetype', 'filename', and
|
||||
'content' will be present. In case of a URL, the field
|
||||
'url' will be present.
|
||||
''',
|
||||
'schema': AssetContentModel
|
||||
}
|
||||
}
|
||||
})
|
||||
def get(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
asset = assets_helper.read(conn, asset_id)
|
||||
|
||||
if isinstance(asset, list):
|
||||
raise Exception('Invalid asset ID provided')
|
||||
|
||||
if path.isfile(asset['uri']):
|
||||
filename = asset['name']
|
||||
|
||||
with open(asset['uri'], 'rb') as f:
|
||||
with open(asset.uri, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
mimetype = guess_type(filename)[0]
|
||||
@@ -455,24 +174,303 @@ class AssetContent(Resource):
|
||||
else:
|
||||
result = {
|
||||
'type': 'url',
|
||||
'url': asset['uri']
|
||||
'url': asset.uri
|
||||
}
|
||||
|
||||
return result
|
||||
return Response(result)
|
||||
|
||||
|
||||
class ViewerCurrentAsset(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
class AssetListViewV1(APIView):
|
||||
serializer_class = AssetSerializer
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Currently displayed asset in viewer',
|
||||
'schema': AssetModel
|
||||
@extend_schema(
|
||||
summary='List assets',
|
||||
responses={
|
||||
200: AssetSerializer(many=True)
|
||||
}
|
||||
)
|
||||
@authorized
|
||||
def get(self, request, format=None):
|
||||
queryset = Asset.objects.all()
|
||||
serializer = AssetSerializer(queryset, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
summary='Create asset',
|
||||
request=V1_ASSET_REQUEST,
|
||||
responses={
|
||||
201: AssetSerializer
|
||||
}
|
||||
)
|
||||
@authorized
|
||||
def post(self, request, format=None):
|
||||
data = parse_request(request)
|
||||
|
||||
try:
|
||||
serializer = CreateAssetSerializerV1_1(data=data)
|
||||
if not serializer.is_valid():
|
||||
raise AssetCreationException(serializer.errors)
|
||||
except AssetCreationException as error:
|
||||
return Response(error.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
asset = Asset.objects.create(**serializer.data)
|
||||
|
||||
return Response(
|
||||
AssetSerializer(asset).data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class FileAssetView(APIView):
|
||||
@extend_schema(
|
||||
summary='Upload file asset',
|
||||
request={
|
||||
'multipart/form-data': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'file_upload': {
|
||||
'type': 'string',
|
||||
'format': 'binary'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses={
|
||||
200: {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'uri': {'type': 'string'},
|
||||
'ext': {'type': 'string'}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
def get(self):
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
file_upload = request.data.get('file_upload')
|
||||
filename = file_upload.name
|
||||
file_type = guess_type(filename)[0]
|
||||
|
||||
if not file_type:
|
||||
raise Exception("Invalid file type.")
|
||||
|
||||
if file_type.split('/')[0] not in ['image', 'video']:
|
||||
raise Exception("Invalid file type.")
|
||||
|
||||
file_path = path.join(
|
||||
settings['assetdir'],
|
||||
uuid.uuid5(uuid.NAMESPACE_URL, filename).hex,
|
||||
) + ".tmp"
|
||||
|
||||
if 'Content-Range' in request.headers:
|
||||
range_str = request.headers['Content-Range']
|
||||
start_bytes = int(range_str.split(' ')[1].split('-')[0])
|
||||
with open(file_path, 'ab') as f:
|
||||
f.seek(start_bytes)
|
||||
f.write(file_upload.read())
|
||||
else:
|
||||
with open(file_path, 'wb') as f:
|
||||
f.write(file_upload.read())
|
||||
|
||||
return Response({'uri': file_path, 'ext': guess_extension(file_type)})
|
||||
|
||||
|
||||
class PlaylistOrderView(APIView):
|
||||
@extend_schema(
|
||||
summary='Update playlist order',
|
||||
request={
|
||||
'application/x-www-form-urlencoded': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'ids': {
|
||||
'type': 'string',
|
||||
'description': cleandoc(
|
||||
"""
|
||||
Comma-separated list of asset IDs in the order
|
||||
they should be played. For example:
|
||||
|
||||
`793406aa1fd34b85aa82614004c0e63a,1c5cfa719d1f4a9abae16c983a18903b,9c41068f3b7e452baf4dc3f9b7906595`
|
||||
"""
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
asset_ids = request.data.get('ids', '').split(',')
|
||||
save_active_assets_ordering(asset_ids)
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class BackupView(APIView):
|
||||
@extend_schema(
|
||||
summary='Create backup',
|
||||
description=cleandoc("""
|
||||
Create a backup of the current Anthias instance, which
|
||||
includes the following:
|
||||
* current settings
|
||||
* image and video assets
|
||||
* asset metadata (e.g. name, duration, play order, status),
|
||||
which is stored in a SQLite database
|
||||
"""),
|
||||
responses={
|
||||
201: {
|
||||
'type': 'string',
|
||||
'example': 'anthias-backup-2021-09-16T15-00-00.tar.gz',
|
||||
'description': 'Backup file name'
|
||||
}
|
||||
}
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
filename = backup_helper.create_backup(name=settings['player_name'])
|
||||
return Response(filename, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class RecoverView(APIView):
|
||||
@extend_schema(
|
||||
summary='Recover from backup',
|
||||
description=cleandoc("""
|
||||
Recover data from a backup file. The backup file must be
|
||||
a `.tar.gz` file.
|
||||
"""),
|
||||
request={
|
||||
'multipart/form-data': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'backup_upload': {
|
||||
'type': 'string',
|
||||
'format': 'binary'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses={
|
||||
200: {
|
||||
'type': 'string',
|
||||
'example': 'Recovery successful.',
|
||||
}
|
||||
},
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
file_upload = (request.data.get('backup_upload'))
|
||||
filename = file_upload.name
|
||||
|
||||
if guess_type(filename)[0] != 'application/x-tar':
|
||||
raise Exception("Incorrect file extension.")
|
||||
try:
|
||||
publisher.send_to_viewer('stop')
|
||||
location = path.join("static", filename)
|
||||
|
||||
with open(location, 'wb') as f:
|
||||
f.write(file_upload.read())
|
||||
|
||||
backup_helper.recover(location)
|
||||
|
||||
return Response("Recovery successful.")
|
||||
finally:
|
||||
publisher.send_to_viewer('play')
|
||||
|
||||
|
||||
class AssetsControlView(APIView):
|
||||
@extend_schema(
|
||||
summary='Control asset playback',
|
||||
description=cleandoc("""
|
||||
Use any of the following commands to control asset playback:
|
||||
* `next` - Show the next asset
|
||||
* `previous` - Show the previous asset
|
||||
* `asset&{asset_id}` - Show the asset with the specified `asset_id`
|
||||
"""),
|
||||
responses={
|
||||
200: {
|
||||
'type': 'string',
|
||||
'example': 'Asset switched',
|
||||
}
|
||||
},
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name='command',
|
||||
location=OpenApiParameter.PATH,
|
||||
type=OpenApiTypes.STR,
|
||||
enum=['next', 'previous', 'asset&{asset_id}'],
|
||||
)
|
||||
]
|
||||
)
|
||||
@authorized
|
||||
def get(self, request, command):
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
publisher.send_to_viewer(command)
|
||||
return Response("Asset switched")
|
||||
|
||||
|
||||
class InfoView(APIView):
|
||||
@extend_schema(
|
||||
summary='Get system information',
|
||||
responses={
|
||||
200: {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'viewlog': {'type': 'string'},
|
||||
'loadavg': {'type': 'number'},
|
||||
'free_space': {'type': 'string'},
|
||||
'display_power': {'type': 'string'},
|
||||
'up_to_date': {'type': 'boolean'}
|
||||
},
|
||||
'example': {
|
||||
'viewlog': 'Not yet implemented',
|
||||
'loadavg': 0.1,
|
||||
'free_space': '10G',
|
||||
'display_power': 'on',
|
||||
'up_to_date': True
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
@authorized
|
||||
def get(self, request):
|
||||
viewlog = "Not yet implemented"
|
||||
|
||||
# Calculate disk space
|
||||
slash = statvfs("/")
|
||||
free_space = size(slash.f_bavail * slash.f_frsize)
|
||||
display_power = r.get('display_power')
|
||||
|
||||
return Response({
|
||||
'viewlog': viewlog,
|
||||
'loadavg': diagnostics.get_load_avg()['15 min'],
|
||||
'free_space': free_space,
|
||||
'display_power': display_power,
|
||||
'up_to_date': is_up_to_date()
|
||||
})
|
||||
|
||||
|
||||
class RebootView(APIView):
|
||||
@extend_schema(summary='Reboot system')
|
||||
@authorized
|
||||
def post(self, request):
|
||||
reboot_anthias.apply_async()
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ShutdownView(APIView):
|
||||
@extend_schema(summary='Shut down system')
|
||||
@authorized
|
||||
def post(self, request):
|
||||
shutdown_anthias.apply_async()
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class ViewerCurrentAssetView(APIView):
|
||||
@extend_schema(
|
||||
summary='Get current asset',
|
||||
description='Get the current asset being displayed on the screen',
|
||||
responses={200: AssetSerializer}
|
||||
)
|
||||
@authorized
|
||||
def get(self, request):
|
||||
collector = ZmqCollector.get_instance()
|
||||
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
@@ -482,7 +480,7 @@ class ViewerCurrentAsset(Resource):
|
||||
current_asset_id = collector_result.get('current_asset_id')
|
||||
|
||||
if not current_asset_id:
|
||||
return []
|
||||
return Response([])
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.read(conn, current_asset_id)
|
||||
queryset = Asset.objects.get(asset_id=current_asset_id)
|
||||
return Response(AssetSerializer(queryset).data)
|
||||
|
||||
@@ -1,139 +1,103 @@
|
||||
from flask import request
|
||||
from flask_restful_swagger_2 import Resource, swagger
|
||||
from os import remove
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from api.helpers import (
|
||||
AssetModel,
|
||||
api_response,
|
||||
prepare_asset,
|
||||
from anthias_app.models import Asset
|
||||
from api.helpers import AssetCreationException, parse_request
|
||||
from api.serializers import (
|
||||
AssetSerializer,
|
||||
CreateAssetSerializerV1_1,
|
||||
UpdateAssetSerializer,
|
||||
)
|
||||
from lib import db, assets_helper
|
||||
from api.views.v1 import V1_ASSET_REQUEST
|
||||
from lib.auth import authorized
|
||||
from lib.utils import url_fails
|
||||
from os import remove
|
||||
from settings import settings
|
||||
|
||||
|
||||
class AssetsV1_1(Resource):
|
||||
method_decorators = [authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'List of assets',
|
||||
'schema': {
|
||||
'type': 'array',
|
||||
'items': AssetModel
|
||||
|
||||
}
|
||||
}
|
||||
class AssetListViewV1_1(APIView):
|
||||
@extend_schema(
|
||||
summary='List assets',
|
||||
responses={
|
||||
200: AssetSerializer(many=True)
|
||||
}
|
||||
})
|
||||
def get(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
return assets
|
||||
)
|
||||
@authorized
|
||||
def get(self, request):
|
||||
queryset = Asset.objects.all()
|
||||
serializer = AssetSerializer(queryset, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@api_response
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'in': 'body',
|
||||
'name': 'model',
|
||||
'description': 'Adds a asset',
|
||||
'schema': AssetModel,
|
||||
'required': True
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'201': {
|
||||
'description': 'Asset created',
|
||||
'schema': AssetModel
|
||||
}
|
||||
@extend_schema(
|
||||
summary='Create asset',
|
||||
request=V1_ASSET_REQUEST,
|
||||
responses={
|
||||
201: AssetSerializer
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
asset = prepare_asset(request, unique_name=True)
|
||||
if url_fails(asset['uri']):
|
||||
raise Exception("Could not retrieve file. Check the asset URL.")
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.create(conn, asset), 201
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
data = parse_request(request)
|
||||
|
||||
try:
|
||||
serializer = CreateAssetSerializerV1_1(data=data, unique_name=True)
|
||||
if not serializer.is_valid():
|
||||
raise AssetCreationException(serializer.errors)
|
||||
except AssetCreationException as error:
|
||||
return Response(error.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
asset = Asset.objects.create(**serializer.data)
|
||||
|
||||
return Response(
|
||||
AssetSerializer(asset).data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class AssetV1_1(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset',
|
||||
'schema': AssetModel
|
||||
}
|
||||
class AssetViewV1_1(APIView):
|
||||
@extend_schema(
|
||||
summary='Get asset',
|
||||
responses={
|
||||
200: AssetSerializer,
|
||||
}
|
||||
})
|
||||
def get(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.read(conn, asset_id)
|
||||
)
|
||||
@authorized
|
||||
def get(self, request, asset_id):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
return Response(AssetSerializer(asset).data)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset',
|
||||
'required': True
|
||||
},
|
||||
{
|
||||
'in': 'body',
|
||||
'name': 'model',
|
||||
'description': 'Adds an asset',
|
||||
'schema': AssetModel,
|
||||
'required': True
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset updated',
|
||||
'schema': AssetModel
|
||||
}
|
||||
@extend_schema(
|
||||
summary='Update asset',
|
||||
request=V1_ASSET_REQUEST,
|
||||
responses={
|
||||
200: AssetSerializer
|
||||
}
|
||||
})
|
||||
def put(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.update(conn, asset_id, prepare_asset(request))
|
||||
)
|
||||
@authorized
|
||||
def put(self, request, asset_id):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset',
|
||||
'required': True
|
||||
data = parse_request(request)
|
||||
serializer = UpdateAssetSerializer(asset, data=data, partial=False)
|
||||
|
||||
},
|
||||
],
|
||||
'responses': {
|
||||
'204': {
|
||||
'description': 'Deleted'
|
||||
}
|
||||
}
|
||||
})
|
||||
def delete(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
asset = assets_helper.read(conn, asset_id)
|
||||
try:
|
||||
if asset['uri'].startswith(settings['assetdir']):
|
||||
remove(asset['uri'])
|
||||
except OSError:
|
||||
pass
|
||||
assets_helper.delete(conn, asset_id)
|
||||
return '', 204 # return an OK with no content
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
asset.refresh_from_db()
|
||||
return Response(AssetSerializer(asset).data)
|
||||
|
||||
@extend_schema(summary='Delete asset')
|
||||
@authorized
|
||||
def delete(self, request, asset_id):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
try:
|
||||
if asset.uri.startswith(settings['assetdir']):
|
||||
remove(asset.uri)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
asset.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -1,220 +1,143 @@
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
from flask_restful_swagger_2 import Resource, swagger
|
||||
from os import remove
|
||||
from werkzeug.wrappers import Request
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from anthias_app.models import Asset
|
||||
from api.helpers import (
|
||||
AssetModel,
|
||||
AssetPropertiesModel,
|
||||
AssetRequestModel,
|
||||
api_response,
|
||||
prepare_asset_v1_2,
|
||||
update_asset,
|
||||
AssetCreationException,
|
||||
get_active_asset_ids,
|
||||
save_active_assets_ordering,
|
||||
)
|
||||
from api.serializers import (
|
||||
AssetSerializer,
|
||||
CreateAssetSerializerV1_2,
|
||||
UpdateAssetSerializer,
|
||||
)
|
||||
from lib import db, assets_helper
|
||||
from lib.auth import authorized
|
||||
from lib.utils import url_fails
|
||||
from os import remove
|
||||
from settings import settings
|
||||
|
||||
|
||||
class AssetsV1_2(Resource):
|
||||
method_decorators = [authorized]
|
||||
class AssetListViewV1_2(APIView):
|
||||
serializer_class = AssetSerializer
|
||||
|
||||
@swagger.doc({
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'List of assets',
|
||||
'schema': {
|
||||
'type': 'array',
|
||||
'items': AssetModel
|
||||
}
|
||||
}
|
||||
@extend_schema(
|
||||
summary='List assets',
|
||||
responses={
|
||||
200: AssetSerializer(many=True)
|
||||
}
|
||||
})
|
||||
def get(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.read(conn)
|
||||
)
|
||||
@authorized
|
||||
def get(self, request):
|
||||
queryset = Asset.objects.all()
|
||||
serializer = self.serializer_class(queryset, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@api_response
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'in': 'body',
|
||||
'name': 'model',
|
||||
'description': 'Adds an asset',
|
||||
'schema': AssetRequestModel,
|
||||
'required': True
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'201': {
|
||||
'description': 'Asset created',
|
||||
'schema': AssetModel
|
||||
}
|
||||
@extend_schema(
|
||||
summary='Create asset',
|
||||
request=CreateAssetSerializerV1_2,
|
||||
responses={
|
||||
201: AssetSerializer
|
||||
}
|
||||
})
|
||||
def post(self):
|
||||
request_environ = Request(request.environ)
|
||||
asset = prepare_asset_v1_2(request_environ, unique_name=True)
|
||||
if not asset['skip_asset_check'] and url_fails(asset['uri']):
|
||||
raise Exception("Could not retrieve file. Check the asset URL.")
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
ids_of_active_assets = [
|
||||
x['asset_id'] for x in assets if x['is_active']]
|
||||
)
|
||||
@authorized
|
||||
def post(self, request):
|
||||
try:
|
||||
serializer = CreateAssetSerializerV1_2(
|
||||
data=request.data, unique_name=True)
|
||||
|
||||
asset = assets_helper.create(conn, asset)
|
||||
if not serializer.is_valid():
|
||||
raise AssetCreationException(serializer.errors)
|
||||
except AssetCreationException as error:
|
||||
return Response(error.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if asset['is_active']:
|
||||
ids_of_active_assets.insert(
|
||||
asset['play_order'], asset['asset_id'])
|
||||
assets_helper.save_ordering(conn, ids_of_active_assets)
|
||||
return assets_helper.read(conn, asset['asset_id']), 201
|
||||
active_asset_ids = get_active_asset_ids()
|
||||
asset = Asset.objects.create(**serializer.data)
|
||||
|
||||
if asset.is_active():
|
||||
active_asset_ids.insert(asset.play_order, asset.asset_id)
|
||||
|
||||
save_active_assets_ordering(active_asset_ids)
|
||||
asset.refresh_from_db()
|
||||
|
||||
return Response(
|
||||
AssetSerializer(asset).data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
|
||||
class AssetV1_2(Resource):
|
||||
method_decorators = [api_response, authorized]
|
||||
class AssetViewV1_2(APIView):
|
||||
serializer_class = AssetSerializer
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset'
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset',
|
||||
'schema': AssetModel
|
||||
}
|
||||
@extend_schema(summary='Get asset')
|
||||
@authorized
|
||||
def get(self, request, asset_id):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
serializer = self.serializer_class(asset)
|
||||
return Response(serializer.data)
|
||||
|
||||
def update(self, request, asset_id, partial=False):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
serializer = UpdateAssetSerializer(
|
||||
asset, data=request.data, partial=partial)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
else:
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
active_asset_ids = get_active_asset_ids()
|
||||
|
||||
asset.refresh_from_db()
|
||||
|
||||
try:
|
||||
active_asset_ids.remove(asset.asset_id)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if asset.is_active():
|
||||
active_asset_ids.insert(asset.play_order, asset.asset_id)
|
||||
|
||||
save_active_assets_ordering(active_asset_ids)
|
||||
asset.refresh_from_db()
|
||||
|
||||
return Response(AssetSerializer(asset).data)
|
||||
|
||||
@extend_schema(
|
||||
summary='Update asset',
|
||||
request=UpdateAssetSerializer,
|
||||
responses={
|
||||
200: AssetSerializer
|
||||
}
|
||||
})
|
||||
def get(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
return assets_helper.read(conn, asset_id)
|
||||
)
|
||||
@authorized
|
||||
def patch(self, request, asset_id):
|
||||
return self.update(request, asset_id, partial=True)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'ID of an asset',
|
||||
'required': True
|
||||
},
|
||||
{
|
||||
'in': 'body',
|
||||
'name': 'properties',
|
||||
'description': 'Properties of an asset',
|
||||
'schema': AssetPropertiesModel,
|
||||
'required': True
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset updated',
|
||||
'schema': AssetModel
|
||||
}
|
||||
@extend_schema(
|
||||
summary='Update asset',
|
||||
request=UpdateAssetSerializer,
|
||||
responses={
|
||||
200: AssetSerializer
|
||||
}
|
||||
})
|
||||
def patch(self, asset_id):
|
||||
data = json.loads(request.data)
|
||||
with db.conn(settings['database']) as conn:
|
||||
)
|
||||
@authorized
|
||||
def put(self, request, asset_id):
|
||||
return self.update(request, asset_id, partial=False)
|
||||
|
||||
asset = assets_helper.read(conn, asset_id)
|
||||
if not asset:
|
||||
raise Exception('Asset not found.')
|
||||
update_asset(asset, data)
|
||||
@extend_schema(summary='Delete asset')
|
||||
@authorized
|
||||
def delete(self, request, asset_id):
|
||||
asset = Asset.objects.get(asset_id=asset_id)
|
||||
|
||||
assets = assets_helper.read(conn)
|
||||
ids_of_active_assets = [
|
||||
x['asset_id'] for x in assets if x['is_active']]
|
||||
try:
|
||||
if asset.uri.startswith(settings['assetdir']):
|
||||
remove(asset.uri)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
asset = assets_helper.update(conn, asset_id, asset)
|
||||
asset.delete()
|
||||
|
||||
try:
|
||||
ids_of_active_assets.remove(asset['asset_id'])
|
||||
except ValueError:
|
||||
pass
|
||||
if asset['is_active']:
|
||||
ids_of_active_assets.insert(
|
||||
asset['play_order'], asset['asset_id'])
|
||||
|
||||
assets_helper.save_ordering(conn, ids_of_active_assets)
|
||||
return assets_helper.read(conn, asset_id)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset',
|
||||
'required': True
|
||||
},
|
||||
{
|
||||
'in': 'body',
|
||||
'name': 'model',
|
||||
'description': 'Adds an asset',
|
||||
'schema': AssetRequestModel,
|
||||
'required': True
|
||||
}
|
||||
],
|
||||
'responses': {
|
||||
'200': {
|
||||
'description': 'Asset updated',
|
||||
'schema': AssetModel
|
||||
}
|
||||
}
|
||||
})
|
||||
def put(self, asset_id):
|
||||
asset = prepare_asset_v1_2(request, asset_id)
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
ids_of_active_assets = [
|
||||
x['asset_id'] for x in assets if x['is_active']]
|
||||
|
||||
asset = assets_helper.update(conn, asset_id, asset)
|
||||
|
||||
try:
|
||||
ids_of_active_assets.remove(asset['asset_id'])
|
||||
except ValueError:
|
||||
pass
|
||||
if asset['is_active']:
|
||||
ids_of_active_assets.insert(
|
||||
asset['play_order'], asset['asset_id'])
|
||||
|
||||
assets_helper.save_ordering(conn, ids_of_active_assets)
|
||||
return assets_helper.read(conn, asset_id)
|
||||
|
||||
@swagger.doc({
|
||||
'parameters': [
|
||||
{
|
||||
'name': 'asset_id',
|
||||
'type': 'string',
|
||||
'in': 'path',
|
||||
'description': 'id of an asset',
|
||||
'required': True
|
||||
|
||||
},
|
||||
],
|
||||
'responses': {
|
||||
'204': {
|
||||
'description': 'Deleted'
|
||||
}
|
||||
}
|
||||
})
|
||||
def delete(self, asset_id):
|
||||
with db.conn(settings['database']) as conn:
|
||||
asset = assets_helper.read(conn, asset_id)
|
||||
try:
|
||||
if asset['uri'].startswith(settings['assetdir']):
|
||||
remove(asset['uri'])
|
||||
except OSError:
|
||||
pass
|
||||
assets_helper.delete(conn, asset_id)
|
||||
return '', 204 # return an OK with no content
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -11,6 +11,7 @@ export GIT_SHORT_HASH=$(git rev-parse --short HEAD)
|
||||
export GIT_HASH=$(git rev-parse HEAD)
|
||||
export BASE_IMAGE_TAG=bookworm
|
||||
export DEBIAN_VERSION=bookworm
|
||||
export ENVIRONMENT=${ENVIRONMENT:-production}
|
||||
|
||||
declare -a SERVICES=(
|
||||
server
|
||||
|
||||
191
bin/migrate.py
191
bin/migrate.py
@@ -1,191 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
import sqlite3
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
import datetime
|
||||
|
||||
configdir = os.path.join(os.getenv('HOME'), '.screenly/')
|
||||
database = os.path.join(configdir, 'screenly.db')
|
||||
|
||||
comma = ','.join
|
||||
|
||||
|
||||
def quest(values):
|
||||
return '=?,'.join(values) + '=?'
|
||||
|
||||
|
||||
def query_read_all(keys):
|
||||
return 'SELECT ' + comma(keys) + ' FROM assets ORDER BY name'
|
||||
|
||||
|
||||
def query_update(keys):
|
||||
return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id=?'
|
||||
|
||||
|
||||
def mkdict(keys):
|
||||
return (lambda row: dict([(keys[ki], v) for ki, v in enumerate(row)]))
|
||||
|
||||
|
||||
def is_active(asset):
|
||||
if asset['start_date'] and asset['end_date']:
|
||||
at = datetime.datetime.utcnow()
|
||||
return asset['start_date'] < at and asset['end_date'] > at
|
||||
return False
|
||||
|
||||
|
||||
def read(c):
|
||||
keys = 'asset_id start_date end_date is_enabled'.split(' ')
|
||||
c.execute(query_read_all(keys))
|
||||
mk = mkdict(keys)
|
||||
assets = [mk(asset) for asset in c.fetchall()]
|
||||
return assets
|
||||
|
||||
|
||||
def update(c, asset_id, asset):
|
||||
del asset['asset_id']
|
||||
c.execute(query_update(list(asset.keys())), list(asset.values()) + [asset_id])
|
||||
|
||||
|
||||
def test_column(col, cursor):
|
||||
"""Test if a column is in the db"""
|
||||
try:
|
||||
cursor.execute('SELECT ' + col + ' FROM assets')
|
||||
except sqlite3.OperationalError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
@contextmanager
|
||||
def open_db_get_cursor():
|
||||
with sqlite3.connect(database, detect_types=sqlite3.PARSE_DECLTYPES) as conn:
|
||||
cursor = conn.cursor()
|
||||
yield (cursor, conn)
|
||||
cursor.close()
|
||||
|
||||
|
||||
query_add_play_order = """
|
||||
begin transaction;
|
||||
alter table assets add play_order integer default 0;
|
||||
commit;
|
||||
"""
|
||||
|
||||
query_add_is_processing = """
|
||||
begin transaction;
|
||||
alter table assets add is_processing integer default 0;
|
||||
commit;
|
||||
"""
|
||||
|
||||
query_add_skip_asset_check = """
|
||||
begin transaction;
|
||||
alter table assets add skip_asset_check integer default 0;
|
||||
commit;
|
||||
"""
|
||||
|
||||
|
||||
def migrate_add_column(col, script):
|
||||
with open_db_get_cursor() as (cursor, conn):
|
||||
if test_column(col, cursor):
|
||||
print(f'Column ({col}) already present')
|
||||
else:
|
||||
print(f'Adding new column ({col})')
|
||||
cursor.executescript(script)
|
||||
assets = read(cursor)
|
||||
for asset in assets:
|
||||
asset.update({'play_order': 0})
|
||||
update(cursor, asset['asset_id'], asset)
|
||||
conn.commit()
|
||||
|
||||
|
||||
query_create_assets_table = """
|
||||
create table assets(
|
||||
asset_id text primary key,
|
||||
name text,
|
||||
uri text,
|
||||
md5 text,
|
||||
start_date timestamp,
|
||||
end_date timestamp,
|
||||
duration text,
|
||||
mimetype text,
|
||||
is_enabled integer default 0,
|
||||
nocache integer default 0)"""
|
||||
query_make_asset_id_primary_key = """
|
||||
begin transaction;
|
||||
create table temp as select asset_id,name,uri,md5,start_date,end_date,duration,mimetype,is_enabled,nocache from assets;
|
||||
drop table assets;
|
||||
""" + query_create_assets_table + """;
|
||||
insert or ignore into assets select * from temp;
|
||||
drop table temp;
|
||||
commit;"""
|
||||
|
||||
|
||||
def migrate_make_asset_id_primary_key():
|
||||
has_primary_key = False
|
||||
with open_db_get_cursor() as (cursor, _):
|
||||
table_info = cursor.execute('pragma table_info(assets)')
|
||||
has_primary_key = table_info.fetchone()[-1] == 1
|
||||
if has_primary_key:
|
||||
print('already has primary key')
|
||||
else:
|
||||
with open_db_get_cursor() as (cursor, _):
|
||||
cursor.executescript(query_make_asset_id_primary_key)
|
||||
print('asset_id is primary key')
|
||||
|
||||
|
||||
query_add_is_enabled_and_nocache = """
|
||||
begin transaction;
|
||||
alter table assets add is_enabled integer default 0;
|
||||
alter table assets add nocache integer default 0;
|
||||
commit;
|
||||
"""
|
||||
|
||||
|
||||
def migrate_add_is_enabled_and_nocache():
|
||||
with open_db_get_cursor() as (cursor, conn):
|
||||
col = 'is_enabled,nocache'
|
||||
if test_column(col, cursor):
|
||||
print(f'Column ({col}) already present')
|
||||
else:
|
||||
cursor.executescript(query_add_is_enabled_and_nocache)
|
||||
assets = read(cursor)
|
||||
for asset in assets:
|
||||
asset.update({'is_enabled': is_active(asset)})
|
||||
update(cursor, asset['asset_id'], asset)
|
||||
conn.commit()
|
||||
print(f'Added new columns ({col})')
|
||||
|
||||
|
||||
query_drop_filename = """BEGIN TRANSACTION;
|
||||
CREATE TEMPORARY TABLE assets_backup(asset_id, name, uri, md5, start_date, end_date, duration, mimetype);
|
||||
INSERT INTO assets_backup SELECT asset_id, name, uri, md5, start_date, end_date, duration, mimetype FROM assets;
|
||||
DROP TABLE assets;
|
||||
CREATE TABLE assets(asset_id TEXT, name TEXT, uri TEXT, md5 TEXT, start_date TIMESTAMP, end_date TIMESTAMP, duration TEXT, mimetype TEXT);
|
||||
INSERT INTO assets SELECT asset_id, name, uri, md5, start_date, end_date, duration, mimetype FROM assets_backup;
|
||||
DROP TABLE assets_backup;
|
||||
COMMIT;
|
||||
"""
|
||||
|
||||
|
||||
def migrate_drop_filename():
|
||||
with open_db_get_cursor() as (cursor, _):
|
||||
col = 'filename'
|
||||
if test_column(col, cursor):
|
||||
cursor.executescript(query_drop_filename)
|
||||
print(f'Dropped obsolete column ({col})')
|
||||
else:
|
||||
print(f'Obsolete column ({col}) is not present')
|
||||
# ✂--------
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
migrate_drop_filename()
|
||||
migrate_add_is_enabled_and_nocache()
|
||||
migrate_make_asset_id_primary_key()
|
||||
migrate_add_column('play_order', query_add_play_order)
|
||||
migrate_add_column('is_processing', query_add_is_processing)
|
||||
migrate_add_column('skip_asset_check', query_add_skip_asset_check)
|
||||
print("Migration done.")
|
||||
@@ -47,6 +47,10 @@ EOF
|
||||
|
||||
if [ "$START_SERVER" = true ]; then
|
||||
cd /usr/src/app
|
||||
python server.py &
|
||||
|
||||
./manage.py makemigrations
|
||||
./manage.py migrate --fake-initial
|
||||
./manage.py runserver 127.0.0.1:8080 &
|
||||
|
||||
sleep 3
|
||||
fi
|
||||
|
||||
@@ -5,17 +5,32 @@ ENVIRONMENT=${ENVIRONMENT:-production}
|
||||
mkdir -p \
|
||||
/data/.config \
|
||||
/data/.screenly \
|
||||
/data/.screenly/backups \
|
||||
/data/screenly_assets
|
||||
|
||||
cp -n /usr/src/app/ansible/roles/screenly/files/screenly.conf /data/.screenly/screenly.conf
|
||||
cp -n /usr/src/app/ansible/roles/screenly/files/default_assets.yml /data/.screenly/default_assets.yml
|
||||
cp -n /usr/src/app/ansible/roles/screenly/files/screenly.db /data/.screenly/screenly.db
|
||||
|
||||
echo "Running migration..."
|
||||
python ./bin/migrate.py
|
||||
|
||||
# The following block ensures that the migration is transactional and that the
|
||||
# database is not left in an inconsistent state if the migration fails.
|
||||
|
||||
if [ -f /data/.screenly/screenly.db ]; then
|
||||
./manage.py dbbackup --noinput --clean && \
|
||||
./manage.py migrate --fake-initial --noinput || \
|
||||
./manage.py dbrestore --noinput
|
||||
else
|
||||
./manage.py migrate && \
|
||||
./manage.py dbbackup --noinput --clean
|
||||
fi
|
||||
|
||||
if [[ "$ENVIRONMENT" == "development" ]]; then
|
||||
flask --app server.py run --debug --reload --host 0.0.0.0 --port 8080
|
||||
echo "Starting Django development server..."
|
||||
./manage.py runserver 0.0.0.0:8080
|
||||
else
|
||||
python server.py
|
||||
echo "Generating Django static files..."
|
||||
./manage.py collectstatic --clear --noinput
|
||||
echo "Starting Gunicorn..."
|
||||
python run_gunicorn.py
|
||||
fi
|
||||
|
||||
28
celery_tasks.py
Normal file → Executable file
28
celery_tasks.py
Normal file → Executable file
@@ -1,17 +1,31 @@
|
||||
import django
|
||||
import sh
|
||||
|
||||
from celery import Celery
|
||||
from datetime import timedelta
|
||||
from lib import diagnostics
|
||||
from lib.utils import (
|
||||
connect_to_redis,
|
||||
is_balena_app,
|
||||
reboot_via_balena_supervisor,
|
||||
shutdown_via_balena_supervisor,
|
||||
)
|
||||
from os import getenv, path
|
||||
from tenacity import Retrying, stop_after_attempt, wait_fixed
|
||||
|
||||
try:
|
||||
django.setup()
|
||||
|
||||
# Place imports that uses Django in this block.
|
||||
|
||||
from lib import diagnostics
|
||||
from lib.utils import (
|
||||
connect_to_redis,
|
||||
is_balena_app,
|
||||
reboot_via_balena_supervisor,
|
||||
shutdown_via_balena_supervisor,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
__author__ = "Screenly, Inc"
|
||||
__copyright__ = "Copyright 2012-2024, Screenly, Inc"
|
||||
__license__ = "Dual License: GPLv2 and Commercial License"
|
||||
|
||||
|
||||
CELERY_RESULT_BACKEND = getenv(
|
||||
'CELERY_RESULT_BACKEND', 'redis://localhost:6379/0')
|
||||
|
||||
@@ -13,7 +13,7 @@ services:
|
||||
- ENVIRONMENT=development
|
||||
restart: always
|
||||
volumes:
|
||||
- resin-data:/data
|
||||
- anthias-data:/data
|
||||
- ./:/usr/src/app/
|
||||
|
||||
anthias-websocket:
|
||||
@@ -27,7 +27,7 @@ services:
|
||||
- LISTEN=0.0.0.0
|
||||
restart: always
|
||||
volumes:
|
||||
- resin-data:/data
|
||||
- anthias-data:/data
|
||||
|
||||
anthias-celery:
|
||||
build:
|
||||
@@ -42,7 +42,7 @@ services:
|
||||
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
||||
restart: always
|
||||
volumes:
|
||||
- resin-data:/data
|
||||
- anthias-data:/data
|
||||
|
||||
redis:
|
||||
platform: "linux/amd64"
|
||||
@@ -61,8 +61,8 @@ services:
|
||||
- anthias-websocket
|
||||
restart: always
|
||||
volumes:
|
||||
- resin-data:/data:ro
|
||||
- anthias-data:/data:ro
|
||||
|
||||
volumes:
|
||||
resin-data:
|
||||
anthias-data:
|
||||
redis-data:
|
||||
|
||||
@@ -14,7 +14,7 @@ services:
|
||||
tty: true
|
||||
volumes:
|
||||
- .:/usr/src/app
|
||||
- resin-data:/data
|
||||
- anthias-data:/data
|
||||
|
||||
anthias-celery:
|
||||
build:
|
||||
@@ -29,11 +29,11 @@ services:
|
||||
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
||||
restart: always
|
||||
volumes:
|
||||
- resin-data:/data
|
||||
- anthias-data:/data
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
|
||||
volumes:
|
||||
resin-data:
|
||||
anthias-data:
|
||||
redis-data:
|
||||
|
||||
@@ -39,7 +39,7 @@ services:
|
||||
- resin-data:/data
|
||||
- /home/${USER}/.screenly:/data/.screenly
|
||||
- /home/${USER}/screenly_assets:/data/screenly_assets
|
||||
- /home/${USER}/screenly/static:/data/screenly/static
|
||||
- /home/${USER}/screenly/staticfiles:/data/screenly/staticfiles
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
labels:
|
||||
@@ -142,7 +142,7 @@ services:
|
||||
- resin-data:/data:ro
|
||||
- /home/${USER}/.screenly:/data/.screenly:ro
|
||||
- /home/${USER}/screenly_assets:/data/screenly_assets:ro
|
||||
- /home/${USER}/screenly/static:/data/screenly/static:ro
|
||||
- /home/${USER}/screenly/staticfiles:/data/screenly/staticfiles:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ COPY . /usr/src/app/
|
||||
ENV GIT_HASH=$GIT_HASH
|
||||
ENV GIT_SHORT_HASH=$GIT_SHORT_HASH
|
||||
ENV GIT_BRANCH=$GIT_BRANCH
|
||||
ENV DJANGO_SETTINGS_MODULE="anthias_django.settings"
|
||||
|
||||
CMD celery -A celery_tasks.celery worker \
|
||||
-B -n worker@anthias \
|
||||
|
||||
@@ -12,7 +12,7 @@ ENV GIT_HASH=$GIT_HASH
|
||||
ENV GIT_SHORT_HASH=$GIT_SHORT_HASH
|
||||
ENV GIT_BRANCH=$GIT_BRANCH
|
||||
|
||||
COPY docker/nginx/nginx.conf /etc/nginx/sites-enabled/anthias.conf
|
||||
COPY docker/nginx/nginx.$ENVIRONMENT.conf /etc/nginx/sites-enabled/
|
||||
RUN rm -f /etc/nginx/sites-enabled/default
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
||||
@@ -53,8 +53,8 @@ COPY . /usr/src/app
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
RUN mkdir -p /data/.screenly /data/screenly_assets
|
||||
RUN cp ansible/roles/screenly/files/screenly.db \
|
||||
ansible/roles/screenly/files/screenly.conf /data/.screenly
|
||||
RUN cp ansible/roles/screenly/files/screenly.conf \
|
||||
/data/.screenly
|
||||
|
||||
ENV GIT_HASH=$GIT_HASH
|
||||
ENV GIT_SHORT_HASH=$GIT_SHORT_HASH
|
||||
|
||||
@@ -166,6 +166,7 @@ ENV GIT_HASH=$GIT_HASH
|
||||
ENV GIT_SHORT_HASH=$GIT_SHORT_HASH
|
||||
ENV GIT_BRANCH=$GIT_BRANCH
|
||||
ENV DEVICE_TYPE=$BOARD
|
||||
ENV DJANGO_SETTINGS_MODULE="anthias_django.settings"
|
||||
|
||||
RUN useradd -g video viewer
|
||||
|
||||
|
||||
77
docker/nginx/nginx.development.conf
Normal file
77
docker/nginx/nginx.development.conf
Normal file
@@ -0,0 +1,77 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
upstream anthias {
|
||||
server anthias-server:8080;
|
||||
}
|
||||
|
||||
upstream websocket {
|
||||
server anthias-websocket:9999;
|
||||
}
|
||||
|
||||
server {
|
||||
server_tokens off;
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
|
||||
location / {
|
||||
|
||||
proxy_pass http://anthias;
|
||||
|
||||
client_max_body_size 4G;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host anthias-server;
|
||||
}
|
||||
|
||||
location ~ ^/api/[0-9a-z]+/backup$ {
|
||||
proxy_pass http://anthias;
|
||||
proxy_connect_timeout 1800;
|
||||
proxy_send_timeout 1800;
|
||||
proxy_read_timeout 1800;
|
||||
send_timeout 1800;
|
||||
|
||||
client_max_body_size 4G;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host anthias-server;
|
||||
}
|
||||
|
||||
location /static {
|
||||
proxy_pass http://anthias/static;
|
||||
}
|
||||
|
||||
location /ws {
|
||||
proxy_pass http://websocket;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
location /screenly_assets {
|
||||
allow 172.16.0.0/12;
|
||||
deny all;
|
||||
|
||||
alias /data/screenly_assets;
|
||||
}
|
||||
|
||||
location /static_with_mime {
|
||||
allow 10.0.0.0/8;
|
||||
allow 172.16.0.0/12;
|
||||
allow 192.168.0.0/16;
|
||||
deny all;
|
||||
|
||||
alias /data/screenly/staticfiles;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
# Only allow from localhost and Docker's CIDR
|
||||
allow 172.16.0.0/12;
|
||||
allow 172.0.0.1;
|
||||
deny all;
|
||||
|
||||
server_name *.ngrok.io;
|
||||
listen 80;
|
||||
root /data/screenly_assets;
|
||||
try_files $uri /data/screenly_assets$uri;
|
||||
}
|
||||
@@ -4,10 +4,6 @@ upstream anthias {
|
||||
server anthias-server:8080;
|
||||
}
|
||||
|
||||
upstream wifi-connect {
|
||||
server 192.168.42.1:9090;
|
||||
}
|
||||
|
||||
upstream websocket {
|
||||
server anthias-websocket:9999;
|
||||
}
|
||||
@@ -18,21 +14,12 @@ server {
|
||||
listen [::]:80 default_server;
|
||||
|
||||
location / {
|
||||
|
||||
# Temporarily disables wifi-connect
|
||||
proxy_pass http://anthias;
|
||||
|
||||
#if (-f /data/.screenly/initialized) {
|
||||
# proxy_pass http://anthias;
|
||||
#}
|
||||
#if (!-f /data/.screenly/initialized) {
|
||||
# proxy_pass http://wifi-connect;
|
||||
#}
|
||||
|
||||
client_max_body_size 4G;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Host anthias-server;
|
||||
}
|
||||
|
||||
location ~ ^/api/[0-9a-z]+/backup$ {
|
||||
@@ -45,11 +32,11 @@ server {
|
||||
client_max_body_size 4G;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Host anthias-server;
|
||||
}
|
||||
|
||||
location /static {
|
||||
proxy_pass http://anthias/static;
|
||||
location /static/ {
|
||||
alias /data/screenly/staticfiles/;
|
||||
}
|
||||
|
||||
location /ws {
|
||||
@@ -83,6 +70,6 @@ server {
|
||||
allow 192.168.0.0/16;
|
||||
deny all;
|
||||
|
||||
alias /data/screenly/static;
|
||||
alias /data/screenly/staticfiles;
|
||||
}
|
||||
}
|
||||
@@ -78,10 +78,18 @@ Run the unit tests.
|
||||
```bash
|
||||
$ docker compose \
|
||||
-f docker-compose.test.yml \
|
||||
exec -T anthias-test bash ./bin/prepare_test_environment.sh -s
|
||||
exec anthias-test bash ./bin/prepare_test_environment.sh -s
|
||||
|
||||
# Integration and non-integration tests should be run separately as the
|
||||
# former doesn't run as expected when run together with the latter.
|
||||
|
||||
$ docker compose \
|
||||
-f docker-compose.test.yml \
|
||||
exec -T anthias-test nose2 -v
|
||||
exec anthias-test ./manage.py test --exclude-tag=integration
|
||||
|
||||
$ docker compose \
|
||||
-f docker-compose.test.yml \
|
||||
exec anthias-test ./manage.py test --tag=integration
|
||||
```
|
||||
|
||||
### The QA checklist
|
||||
|
||||
@@ -1,163 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from builtins import filter
|
||||
from . import db
|
||||
from . import queries
|
||||
import datetime
|
||||
|
||||
FIELDS = [
|
||||
"asset_id", "name", "uri", "start_date", "end_date", "duration",
|
||||
"mimetype", "is_enabled", "is_processing", "nocache", "play_order",
|
||||
"skip_asset_check",
|
||||
]
|
||||
|
||||
create_assets_table = 'CREATE TABLE assets(asset_id text primary key, name text, uri text, md5 text, start_date timestamp, end_date timestamp, duration text, mimetype text, is_enabled integer default 0, is_processing integer default 0, nocache integer default 0, play_order integer default 0, skip_asset_check integer default 0)' # noqa: E501
|
||||
|
||||
|
||||
# Note all times are naive for legacy reasons but always UTC.
|
||||
get_time = datetime.datetime.utcnow
|
||||
|
||||
|
||||
def is_active(asset, at_time=None):
|
||||
"""Accepts an asset dictionary and determines if it
|
||||
is active at the given time. If no time is specified, 'now' is used.
|
||||
|
||||
>>> asset = {'asset_id': u'4c8dbce552edb5812d3a866cfe5f159d', 'mimetype': u'web', 'name': u'WireLoad', 'end_date': datetime.datetime(2013, 1, 19, 23, 59), 'uri': u'http://www.wireload.net', 'duration': u'5', 'is_enabled': True, 'nocache': 0, 'play_order': 1, 'start_date': datetime.datetime(2013, 1, 16, 0, 0), 'skip_asset_check': 0};
|
||||
>>> is_active(asset, datetime.datetime(2013, 1, 16, 12, 00))
|
||||
True
|
||||
>>> is_active(asset, datetime.datetime(2014, 1, 1))
|
||||
False
|
||||
|
||||
>>> asset['is_enabled'] = False
|
||||
>>> is_active(asset, datetime.datetime(2013, 1, 16, 12, 00))
|
||||
False
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
if asset['is_enabled'] and asset['start_date'] and asset['end_date']:
|
||||
at = at_time or get_time()
|
||||
return 1 if asset['start_date'] < at < asset['end_date'] else 0
|
||||
return 0
|
||||
|
||||
|
||||
def get_names_of_assets(conn):
|
||||
with db.cursor(conn) as c:
|
||||
c.execute(queries.read_all(['name', ]))
|
||||
return [asset[0] for asset in c.fetchall()]
|
||||
|
||||
|
||||
def get_playlist(conn):
|
||||
"""Returns all currently active assets."""
|
||||
return list(filter(is_active, read(conn)))
|
||||
|
||||
|
||||
def mkdict(keys):
|
||||
"""Returns a function that creates a dict from a database record."""
|
||||
return lambda row: dict([(keys[ki], v) for ki, v in enumerate(row)])
|
||||
|
||||
|
||||
def create(conn, asset):
|
||||
"""
|
||||
Create a database record for an asset.
|
||||
Returns the asset.
|
||||
Asset's is_active field is updated before returning.
|
||||
"""
|
||||
if 'is_active' in asset:
|
||||
asset.pop('is_active')
|
||||
with db.commit(conn) as c:
|
||||
c.execute(queries.create(list(asset.keys())), list(asset.values()))
|
||||
asset.update({'is_active': is_active(asset)})
|
||||
return asset
|
||||
|
||||
|
||||
def create_multiple(conn, assets):
|
||||
"""
|
||||
Create a database record for each asset.
|
||||
Returns asset list.
|
||||
Asset's is_active field is updated before returning.
|
||||
"""
|
||||
|
||||
with db.commit(conn) as c:
|
||||
for asset in assets:
|
||||
if 'is_active' in asset:
|
||||
asset.pop('is_active')
|
||||
|
||||
c.execute(queries.create(list(asset.keys())), list(asset.values()))
|
||||
|
||||
asset.update({'is_active': is_active(asset)})
|
||||
|
||||
return assets
|
||||
|
||||
|
||||
def read(conn, asset_id=None, keys=FIELDS):
|
||||
"""
|
||||
Fetch one or more assets from the database.
|
||||
Returns a list of dicts or one dict.
|
||||
Assets' is_active field is updated before returning.
|
||||
"""
|
||||
assets = []
|
||||
mk = mkdict(keys)
|
||||
with db.cursor(conn) as c:
|
||||
if asset_id is None:
|
||||
c.execute(queries.read_all(keys))
|
||||
else:
|
||||
c.execute(queries.read(keys), [asset_id])
|
||||
assets = [mk(asset) for asset in c.fetchall()]
|
||||
[asset.update({'is_active': is_active(asset)}) for asset in assets]
|
||||
if asset_id and len(assets):
|
||||
return assets[0]
|
||||
return assets
|
||||
|
||||
|
||||
def update(conn, asset_id, asset):
|
||||
"""
|
||||
Update an asset in the database.
|
||||
Returns the asset.
|
||||
Asset's asset_id and is_active field is updated before returning.
|
||||
"""
|
||||
if asset.get('asset_id'):
|
||||
del asset['asset_id']
|
||||
if 'is_active' in asset:
|
||||
asset.pop('is_active')
|
||||
with db.commit(conn) as c:
|
||||
c.execute(
|
||||
queries.update(list(asset.keys())),
|
||||
list(asset.values()) + [asset_id],
|
||||
)
|
||||
asset.update({'asset_id': asset_id})
|
||||
if 'start_date' in asset:
|
||||
asset.update({'is_active': is_active(asset)})
|
||||
return asset
|
||||
|
||||
|
||||
def delete(conn, asset_id):
|
||||
"""Remove an asset from the database."""
|
||||
with db.commit(conn) as c:
|
||||
c.execute(queries.remove, [asset_id])
|
||||
|
||||
|
||||
def save_ordering(db_conn, ids):
|
||||
"""
|
||||
Order assets. Move to last position assets which not presented
|
||||
in list of id
|
||||
"""
|
||||
|
||||
if ids:
|
||||
with db.commit(db_conn) as c:
|
||||
c.execute(
|
||||
queries.multiple_update_with_case(['play_order', ], len(ids)),
|
||||
sum(
|
||||
[
|
||||
[asset_id, play_order]
|
||||
for play_order, asset_id in enumerate(ids)
|
||||
],
|
||||
[],
|
||||
) + ids,
|
||||
)
|
||||
|
||||
# Set the play order to a high value for all inactive assets.
|
||||
with db.commit(db_conn) as c:
|
||||
c.execute(
|
||||
queries.multiple_update_not_in(['play_order', ], len(ids)),
|
||||
[len(ids)] + ids,
|
||||
)
|
||||
93
lib/auth.py
93
lib/auth.py
@@ -1,16 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from builtins import str
|
||||
from builtins import object
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
from functools import wraps
|
||||
import hashlib
|
||||
import os.path
|
||||
|
||||
from flask import request, Response
|
||||
from base64 import b64decode
|
||||
from builtins import str
|
||||
from builtins import object
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from functools import wraps
|
||||
from future.utils import with_metaclass
|
||||
|
||||
|
||||
LINUX_USER = os.getenv('USER', 'pi')
|
||||
|
||||
|
||||
@@ -23,15 +24,14 @@ class Auth(with_metaclass(ABCMeta, object)):
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractproperty
|
||||
def is_authenticated(self):
|
||||
def is_authenticated(self, request):
|
||||
"""
|
||||
See if the user is authenticated for the request.
|
||||
:return: bool
|
||||
"""
|
||||
pass
|
||||
|
||||
def authenticate_if_needed(self):
|
||||
def authenticate_if_needed(self, request):
|
||||
"""
|
||||
If the user performing the request is not authenticated, initiate
|
||||
authentication.
|
||||
@@ -39,14 +39,16 @@ class Auth(with_metaclass(ABCMeta, object)):
|
||||
:return: a Response which initiates authentication or None
|
||||
if already authenticated.
|
||||
"""
|
||||
from django.http import HttpResponse
|
||||
|
||||
try:
|
||||
if not self.is_authenticated:
|
||||
if not self.is_authenticated(request):
|
||||
return self.authenticate()
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
"Authorization backend is unavailable: " + str(e), 503)
|
||||
return HttpResponse(
|
||||
"Authorization backend is unavailable: " + str(e), status=503)
|
||||
|
||||
def update_settings(self, current_pass_correct):
|
||||
def update_settings(self, request, current_pass_correct):
|
||||
"""
|
||||
Submit updated values from Settings page.
|
||||
:param current_pass_correct: the value of "Current Password" field
|
||||
@@ -80,7 +82,7 @@ class NoAuth(Auth):
|
||||
name = ''
|
||||
config = {}
|
||||
|
||||
def is_authenticated(self):
|
||||
def is_authenticated(self, request):
|
||||
return True
|
||||
|
||||
def authenticate(self):
|
||||
@@ -118,27 +120,44 @@ class BasicAuth(Auth):
|
||||
hashed_password = hashlib.sha256(password.encode('utf-8')).hexdigest()
|
||||
return self.settings['password'] == hashed_password
|
||||
|
||||
@property
|
||||
def is_authenticated(self):
|
||||
auth = request.authorization
|
||||
return auth and self._check(auth.username, auth.password)
|
||||
def is_authenticated(self, request):
|
||||
authorization = request.headers.get('Authorization')
|
||||
if not authorization:
|
||||
return False
|
||||
|
||||
content = authorization.split(' ')
|
||||
|
||||
if len(content) != 2:
|
||||
return False
|
||||
|
||||
auth_type = content[0]
|
||||
auth_data = content[1]
|
||||
if auth_type == 'Basic':
|
||||
auth_data = b64decode(auth_data).decode('utf-8')
|
||||
auth_data = auth_data.split(':')
|
||||
if len(auth_data) == 2:
|
||||
username = auth_data[0]
|
||||
password = auth_data[1]
|
||||
return self._check(username, password)
|
||||
return False
|
||||
|
||||
@property
|
||||
def template(self):
|
||||
return 'auth_basic.html', {'user': self.settings['user']}
|
||||
|
||||
def authenticate(self):
|
||||
realm = "Anthias OSE {}".format(self.settings['player_name'])
|
||||
return Response(
|
||||
from django.http import HttpResponse
|
||||
realm = "Anthias {}".format(self.settings['player_name'])
|
||||
return HttpResponse(
|
||||
"Access denied",
|
||||
401,
|
||||
{"WWW-Authenticate": 'Basic realm="{}"'.format(realm)},
|
||||
status=401,
|
||||
headers={"WWW-Authenticate": 'Basic realm="{}"'.format(realm)}
|
||||
)
|
||||
|
||||
def update_settings(self, current_pass_correct):
|
||||
new_user = request.form.get('user', '')
|
||||
new_pass = request.form.get('password', '').encode('utf-8')
|
||||
new_pass2 = request.form.get('password2', '').encode('utf-8')
|
||||
def update_settings(self, request, current_pass_correct):
|
||||
new_user = request.POST.get('user', '')
|
||||
new_pass = request.POST.get('password', '').encode('utf-8')
|
||||
new_pass2 = request.POST.get('password2', '').encode('utf-8')
|
||||
new_pass = hashlib.sha256(new_pass).hexdigest() if new_pass else None
|
||||
new_pass2 = hashlib.sha256(new_pass2).hexdigest() if new_pass else None
|
||||
# Handle auth components
|
||||
@@ -179,17 +198,27 @@ class BasicAuth(Auth):
|
||||
|
||||
|
||||
def authorized(orig):
|
||||
"""
|
||||
Annotation which initiates authentication if the request is unauthorized.
|
||||
:param orig: Flask function
|
||||
:return: Response
|
||||
"""
|
||||
from settings import settings
|
||||
from django.http import HttpRequest
|
||||
from rest_framework.request import Request
|
||||
|
||||
@wraps(orig)
|
||||
def decorated(*args, **kwargs):
|
||||
if not settings.auth:
|
||||
return orig(*args, **kwargs)
|
||||
return settings.auth.authenticate_if_needed() or orig(*args, **kwargs)
|
||||
|
||||
if len(args) == 0:
|
||||
raise ValueError('No request object passed to decorated function')
|
||||
|
||||
request = args[-1]
|
||||
|
||||
if not isinstance(request, (HttpRequest, Request)):
|
||||
raise ValueError(
|
||||
'Request object is not of type HttpRequest or Request')
|
||||
|
||||
return (
|
||||
settings.auth.authenticate_if_needed(request) or
|
||||
orig(*args, **kwargs)
|
||||
)
|
||||
|
||||
return decorated
|
||||
|
||||
@@ -7,7 +7,7 @@ from os import path, getenv, makedirs, remove
|
||||
|
||||
directories = ['.screenly', 'screenly_assets']
|
||||
default_archive_name = "anthias-backup"
|
||||
static_dir = "screenly/static"
|
||||
static_dir = "screenly/staticfiles"
|
||||
|
||||
|
||||
def create_backup(name=default_archive_name):
|
||||
|
||||
23
lib/db.py
23
lib/db.py
@@ -1,23 +0,0 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
def conn(db):
|
||||
return sqlite3.connect(db, detect_types=sqlite3.PARSE_DECLTYPES)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def cursor(connection):
|
||||
cur = connection.cursor()
|
||||
yield cur
|
||||
cur.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def commit(connection):
|
||||
cur = connection.cursor()
|
||||
yield cur
|
||||
connection.commit()
|
||||
cur.close()
|
||||
@@ -4,11 +4,9 @@ from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from builtins import str
|
||||
import os
|
||||
import sqlite3
|
||||
from . import utils
|
||||
import cec
|
||||
from lib import device_helper
|
||||
from pprint import pprint
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@@ -39,18 +37,6 @@ def get_uptime():
|
||||
return uptime_seconds
|
||||
|
||||
|
||||
def get_playlist():
|
||||
anthias_db = os.path.join(os.getenv('HOME'), '.screenly/screenly.db')
|
||||
playlist = []
|
||||
if os.path.isfile(anthias_db):
|
||||
conn = sqlite3.connect(anthias_db)
|
||||
c = conn.cursor()
|
||||
for row in c.execute('SELECT * FROM assets;'):
|
||||
playlist.append(row)
|
||||
c.close
|
||||
return playlist
|
||||
|
||||
|
||||
def get_load_avg():
|
||||
"""
|
||||
Returns load average rounded to two digits.
|
||||
@@ -114,30 +100,3 @@ def get_raspberry_code():
|
||||
|
||||
def get_raspberry_model():
|
||||
return device_helper.parse_cpu_info().get('model', "Unknown")
|
||||
|
||||
|
||||
def compile_report():
|
||||
"""
|
||||
Compile report with various data points.
|
||||
"""
|
||||
report = {}
|
||||
report['cpu_info'] = get_raspberry_code()
|
||||
report['pi_model'] = get_raspberry_model()
|
||||
report['uptime'] = get_uptime()
|
||||
report['display_power'] = get_display_power()
|
||||
report['playlist'] = get_playlist()
|
||||
report['git_hash'] = get_git_hash()
|
||||
report['connectivity'] = try_connectivity()
|
||||
report['loadavg'] = get_load_avg()
|
||||
report['utc_isodate'] = get_utc_isodate()
|
||||
report['debian_version'] = get_debian_version()
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def main():
|
||||
pprint(compile_report())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# flake8: noqa
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
comma = ','.join
|
||||
|
||||
|
||||
def quest(values):
|
||||
return '=?,'.join(values) + '=?'
|
||||
|
||||
|
||||
def quest_2(values, c):
|
||||
return ', '.join([('%s=CASE ' % x) + ("WHEN asset_id=? THEN ? " * c) + 'ELSE asset_id END' for x in values])
|
||||
|
||||
|
||||
exists_table = "SELECT name FROM sqlite_master WHERE type='table' AND name='assets'"
|
||||
|
||||
|
||||
def read_all(keys):
|
||||
return 'select ' + comma(keys) + ' from assets order by play_order'
|
||||
|
||||
|
||||
def read(keys):
|
||||
return 'select ' + comma(keys) + ' from assets where asset_id=?'
|
||||
|
||||
|
||||
def create(keys):
|
||||
return 'insert into assets (' + comma(keys) + ') values (' + comma(['?'] * len(keys)) + ')'
|
||||
|
||||
|
||||
remove = 'delete from assets where asset_id=?'
|
||||
|
||||
|
||||
def update(keys):
|
||||
return 'update assets set ' + quest(keys) + ' where asset_id=?'
|
||||
|
||||
|
||||
def multiple_update(keys, count):
|
||||
return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id IN (' + comma(['?'] * count) + ')'
|
||||
|
||||
|
||||
def multiple_update_not_in(keys, count):
|
||||
return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id NOT IN (' + comma(['?'] * count) + ')'
|
||||
|
||||
|
||||
def multiple_update_with_case(keys, count):
|
||||
return 'UPDATE assets SET ' + quest_2(keys, count) + \
|
||||
' WHERE asset_id IN (' + comma(['?'] * count) + ')'
|
||||
14
lib/utils.py
14
lib/utils.py
@@ -4,7 +4,6 @@ from future import standard_library
|
||||
from builtins import str
|
||||
from builtins import range
|
||||
import certifi
|
||||
from . import db
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@@ -26,7 +25,7 @@ from threading import Thread
|
||||
from time import sleep
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .assets_helper import update
|
||||
from anthias_app.models import Asset
|
||||
|
||||
standard_library.install_aliases()
|
||||
|
||||
@@ -378,9 +377,14 @@ class YoutubeDownloadThread(Thread):
|
||||
def run(self):
|
||||
publisher = ZmqPublisher.get_instance()
|
||||
call(['yt-dlp', '-f', 'mp4', '-o', self.location, self.uri])
|
||||
with db.conn(settings['database']) as conn:
|
||||
update(conn, self.asset_id,
|
||||
{'asset_id': self.asset_id, 'is_processing': 0})
|
||||
|
||||
try:
|
||||
asset = Asset.objects.get(asset_id=self.asset_id)
|
||||
asset.is_processing = 0
|
||||
asset.save()
|
||||
except Asset.DoesNotExist:
|
||||
logging.warning('Asset %s not found', self.asset_id)
|
||||
return
|
||||
|
||||
publisher.send_to_ws_server(self.asset_id)
|
||||
|
||||
|
||||
22
manage.py
Executable file
22
manage.py
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings')
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,3 +0,0 @@
|
||||
[unittest]
|
||||
test-file-pattern = test_*.py
|
||||
plugins = nose2.plugins.attrib
|
||||
@@ -1,10 +1,7 @@
|
||||
Cython==0.29.33
|
||||
Flask==2.2.5
|
||||
future==0.18.3
|
||||
gevent-websocket==0.10.1
|
||||
gevent==24.2.1
|
||||
itsdangerous==2.0.1
|
||||
Jinja2==3.1.4
|
||||
pytz==2022.2.1
|
||||
pyzmq==23.2.1
|
||||
Werkzeug==2.2.3
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
future==0.18.3
|
||||
mock==3.0.5
|
||||
nose2==0.15.1
|
||||
pep8==1.7.1
|
||||
selenium==3.141.0
|
||||
splinter==0.14.0
|
||||
|
||||
@@ -2,22 +2,21 @@ cec==0.2.8
|
||||
celery==5.2.2
|
||||
certifi==2024.7.4
|
||||
cffi==1.14.4
|
||||
click==8.1.7
|
||||
configparser==4.0.2
|
||||
cryptography==3.3.2
|
||||
Cython==0.29.33
|
||||
Flask-Cors==5.0.0
|
||||
flask-restful-swagger-2==0.35
|
||||
flask-swagger-ui==3.36.0
|
||||
Flask==2.2.5
|
||||
Django==3.2.18
|
||||
djangorestframework==3.14.0
|
||||
django-dbbackup==4.2.1
|
||||
drf-spectacular==0.27.2
|
||||
future==0.18.3
|
||||
gevent-websocket==0.10.1
|
||||
gevent==24.2.1
|
||||
gunicorn==22.0.0
|
||||
hurry.filesize==0.9
|
||||
importlib-metadata==4.13.0
|
||||
itsdangerous==2.0.1
|
||||
Jinja2==3.1.4
|
||||
jsonschema==4.17.3 # This is the latest version that doesn't require Rust and Cargo.
|
||||
kombu==5.2.4
|
||||
Mako==1.2.2
|
||||
netifaces==0.10.9
|
||||
@@ -35,6 +34,5 @@ tenacity==9.0.0
|
||||
sh==1.8
|
||||
six==1.15.0
|
||||
urllib3==1.26.19
|
||||
Werkzeug==2.2.3
|
||||
wheel==0.38.1
|
||||
yt-dlp==2024.7.7
|
||||
|
||||
@@ -5,11 +5,13 @@ certifi==2024.7.4
|
||||
configparser==4.0.2
|
||||
cryptography==3.3.2
|
||||
Cython==0.29.33
|
||||
Flask==2.2.5
|
||||
Django==3.2.18
|
||||
django-dbbackup==4.2.1
|
||||
drf-spectacular==0.27.2
|
||||
future==0.18.3
|
||||
idna==3.7
|
||||
itsdangerous==2.0.1
|
||||
Jinja2==3.1.4
|
||||
jsonschema==4.17.3 # This is the latest version that doesn't require Rust and Cargo.
|
||||
netifaces==0.10.9
|
||||
pydbus==0.6.0
|
||||
python-dateutil==2.8.1
|
||||
@@ -22,4 +24,3 @@ tenacity==9.0.0
|
||||
sh==1.8
|
||||
uptime==3.0.1
|
||||
urllib3==1.26.19
|
||||
Werkzeug==2.2.3
|
||||
|
||||
19
run_gunicorn.py
Normal file
19
run_gunicorn.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from gunicorn.app.base import Application
|
||||
from anthias_django import wsgi
|
||||
from settings import LISTEN, PORT
|
||||
|
||||
|
||||
class GunicornApplication(Application):
|
||||
def init(self, parser, opts, args):
|
||||
return {
|
||||
'bind': f'{LISTEN}:{PORT}',
|
||||
'threads': 2,
|
||||
'timeout': 20,
|
||||
}
|
||||
|
||||
def load(self):
|
||||
return wsgi.application
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
GunicornApplication().run()
|
||||
193
server.py
193
server.py
@@ -1,193 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from future import standard_library
|
||||
__author__ = "Screenly, Inc"
|
||||
__copyright__ = "Copyright 2012-2023, Screenly, Inc"
|
||||
__license__ = "Dual License: GPLv2 and Commercial License"
|
||||
|
||||
from os import getenv, path, stat
|
||||
|
||||
from flask import (
|
||||
Flask,
|
||||
make_response,
|
||||
request,
|
||||
send_from_directory,
|
||||
url_for,
|
||||
)
|
||||
from flask_cors import CORS
|
||||
from flask_restful_swagger_2 import Api
|
||||
from flask_swagger_ui import get_swaggerui_blueprint
|
||||
from gunicorn.app.base import Application
|
||||
|
||||
from api.views.v1 import (
|
||||
Asset,
|
||||
AssetContent,
|
||||
Assets,
|
||||
AssetsControl,
|
||||
Backup,
|
||||
FileAsset,
|
||||
Info,
|
||||
PlaylistOrder,
|
||||
Reboot,
|
||||
Recover,
|
||||
Shutdown,
|
||||
ViewerCurrentAsset,
|
||||
)
|
||||
from api.views.v1_1 import (
|
||||
AssetV1_1,
|
||||
AssetsV1_1,
|
||||
)
|
||||
from api.views.v1_2 import (
|
||||
AssetV1_2,
|
||||
AssetsV1_2,
|
||||
)
|
||||
|
||||
|
||||
from lib import assets_helper
|
||||
from lib import db
|
||||
from lib import queries
|
||||
|
||||
from lib.auth import authorized
|
||||
from lib.utils import (
|
||||
json_dump,
|
||||
get_node_ip,
|
||||
connect_to_redis,
|
||||
)
|
||||
from anthias_app.views import anthias_app_bp
|
||||
from settings import LISTEN, PORT, settings
|
||||
|
||||
|
||||
standard_library.install_aliases()
|
||||
|
||||
HOME = getenv('HOME')
|
||||
|
||||
app = Flask(__name__)
|
||||
app.register_blueprint(anthias_app_bp)
|
||||
|
||||
CORS(app)
|
||||
api = Api(app, api_version="v1", title="Anthias API")
|
||||
|
||||
r = connect_to_redis()
|
||||
|
||||
|
||||
################################
|
||||
# Utilities
|
||||
################################
|
||||
|
||||
|
||||
@api.representation('application/json')
|
||||
def output_json(data, code, headers=None):
|
||||
response = make_response(json_dump(data), code)
|
||||
response.headers.extend(headers or {})
|
||||
return response
|
||||
|
||||
|
||||
################################
|
||||
# API
|
||||
################################
|
||||
|
||||
|
||||
api.add_resource(Assets, '/api/v1/assets')
|
||||
api.add_resource(Asset, '/api/v1/assets/<asset_id>')
|
||||
api.add_resource(AssetsV1_1, '/api/v1.1/assets')
|
||||
api.add_resource(AssetV1_1, '/api/v1.1/assets/<asset_id>')
|
||||
api.add_resource(AssetsV1_2, '/api/v1.2/assets')
|
||||
api.add_resource(AssetV1_2, '/api/v1.2/assets/<asset_id>')
|
||||
api.add_resource(AssetContent, '/api/v1/assets/<asset_id>/content')
|
||||
api.add_resource(FileAsset, '/api/v1/file_asset')
|
||||
api.add_resource(PlaylistOrder, '/api/v1/assets/order')
|
||||
api.add_resource(Backup, '/api/v1/backup')
|
||||
api.add_resource(Recover, '/api/v1/recover')
|
||||
api.add_resource(AssetsControl, '/api/v1/assets/control/<command>')
|
||||
api.add_resource(Info, '/api/v1/info')
|
||||
api.add_resource(Reboot, '/api/v1/reboot')
|
||||
api.add_resource(Shutdown, '/api/v1/shutdown')
|
||||
api.add_resource(ViewerCurrentAsset, '/api/v1/viewer_current_asset')
|
||||
|
||||
try:
|
||||
my_ip = get_node_ip()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
SWAGGER_URL = '/api/docs'
|
||||
API_URL = "/api/swagger.json"
|
||||
|
||||
swaggerui_blueprint = get_swaggerui_blueprint(
|
||||
SWAGGER_URL,
|
||||
API_URL,
|
||||
config={
|
||||
'app_name': "Anthias API"
|
||||
}
|
||||
)
|
||||
app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL)
|
||||
|
||||
|
||||
@app.errorhandler(403)
|
||||
def mistake403(code):
|
||||
return 'The parameter you passed has the wrong format!'
|
||||
|
||||
|
||||
@app.errorhandler(404)
|
||||
def mistake404(code):
|
||||
return 'Sorry, this page does not exist!'
|
||||
|
||||
|
||||
################################
|
||||
# Static
|
||||
################################
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def override_url_for():
|
||||
return dict(url_for=dated_url_for)
|
||||
|
||||
|
||||
def dated_url_for(endpoint, **values):
|
||||
if endpoint == 'static':
|
||||
filename = values.get('filename', None)
|
||||
if filename:
|
||||
file_path = path.join(app.root_path,
|
||||
endpoint, filename)
|
||||
if path.isfile(file_path):
|
||||
values['q'] = int(stat(file_path).st_mtime)
|
||||
return url_for(endpoint, **values)
|
||||
|
||||
|
||||
@app.route('/static_with_mime/<string:path>')
|
||||
@authorized
|
||||
def static_with_mime(path):
|
||||
mimetype = request.args['mime'] if 'mime' in request.args else 'auto'
|
||||
return send_from_directory(
|
||||
directory='static', filename=path, mimetype=mimetype)
|
||||
|
||||
|
||||
@app.before_first_request
|
||||
def main():
|
||||
with db.conn(settings['database']) as conn:
|
||||
with db.cursor(conn) as cursor:
|
||||
cursor.execute(queries.exists_table)
|
||||
if cursor.fetchone() is None:
|
||||
cursor.execute(assets_helper.create_assets_table)
|
||||
|
||||
|
||||
def is_development():
|
||||
return getenv('ENVIRONMENT', '') == 'development'
|
||||
|
||||
|
||||
if __name__ == "__main__" and not is_development():
|
||||
config = {
|
||||
'bind': '{}:{}'.format(LISTEN, PORT),
|
||||
'threads': 2,
|
||||
'timeout': 20
|
||||
}
|
||||
|
||||
class GunicornApplication(Application):
|
||||
def init(self, parser, opts, args):
|
||||
return config
|
||||
|
||||
def load(self):
|
||||
return app
|
||||
|
||||
GunicornApplication().run()
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from future import standard_library
|
||||
from builtins import str
|
||||
from builtins import object
|
||||
import hashlib
|
||||
@@ -17,7 +16,6 @@ from collections import UserDict
|
||||
from lib.auth import BasicAuth, NoAuth
|
||||
from lib.errors import ZmqCollectorTimeout
|
||||
|
||||
standard_library.install_aliases()
|
||||
|
||||
CONFIG_DIR = '.screenly/'
|
||||
CONFIG_FILE = 'screenly.conf'
|
||||
@@ -30,12 +28,13 @@ DEFAULTS = {
|
||||
'use_24_hour_clock': False,
|
||||
'use_ssl': False,
|
||||
'auth_backend': '',
|
||||
'websocket_port': '9999'
|
||||
'websocket_port': '9999',
|
||||
'django_secret_key': ''
|
||||
},
|
||||
'viewer': {
|
||||
'audio_output': 'hdmi',
|
||||
'debug_logging': False,
|
||||
'default_duration': '10',
|
||||
'default_duration': 10,
|
||||
'default_streaming_duration': '300',
|
||||
'player_name': '',
|
||||
'resolution': '1920x1080',
|
||||
|
||||
BIN
static/img/anthias-loading.png
Normal file
BIN
static/img/anthias-loading.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
@@ -12,13 +12,13 @@ $().ready ->
|
||||
|
||||
$.ajax({
|
||||
method: "POST"
|
||||
url: "api/v1/backup"
|
||||
url: "/api/v1/backup"
|
||||
timeout: 1800 * 1000
|
||||
})
|
||||
|
||||
.done (data, e) ->
|
||||
if (data)
|
||||
window.location = "static_with_mime/" + data + "?mime=application/x-tgz"
|
||||
window.location = "/static_with_mime/" + data + "?mime=application/x-tgz"
|
||||
|
||||
.fail (data, e) ->
|
||||
$("#request-error .alert").addClass "alert-danger"
|
||||
@@ -40,7 +40,7 @@ $().ready ->
|
||||
$("[name='backup_upload']").click()
|
||||
|
||||
$("[name='backup_upload']").fileupload
|
||||
url: "api/v1/recover"
|
||||
url: "/api/v1/recover"
|
||||
progressall: (e, data) -> if data.loaded and data.total
|
||||
valuenow = data.loaded/data.total*100
|
||||
$(".progress .bar").css "width", valuenow + "%"
|
||||
|
||||
@@ -13,11 +13,11 @@
|
||||
$("#btn-backup").prop("disabled", true);
|
||||
return $.ajax({
|
||||
method: "POST",
|
||||
url: "api/v1/backup",
|
||||
url: "/api/v1/backup",
|
||||
timeout: 1800 * 1000
|
||||
}).done(function(data, e) {
|
||||
if (data) {
|
||||
return window.location = "static_with_mime/" + data + "?mime=application/x-tgz";
|
||||
return window.location = "/static_with_mime/" + data + "?mime=application/x-tgz";
|
||||
}
|
||||
}).fail(function(data, e) {
|
||||
var err, j;
|
||||
@@ -40,7 +40,7 @@
|
||||
return $("[name='backup_upload']").click();
|
||||
});
|
||||
$("[name='backup_upload']").fileupload({
|
||||
url: "api/v1/recover",
|
||||
url: "/api/v1/recover",
|
||||
progressall: function(e, data) {
|
||||
var valuenow;
|
||||
if (data.loaded && data.total) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
||||
<div class="form-group" id="user_group">
|
||||
<label class="small text-secondary">User</label>
|
||||
<input class="form-control" name="user" type="text" value="{{ context.user }}">
|
||||
<input class="form-control" name="user" type="text" value="{{ user }}">
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="form-group col-6" id="password_group">
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
{# vim: ft=htmldjango #}
|
||||
{% load static %}
|
||||
|
||||
<meta charset="utf-8"/>
|
||||
|
||||
{% if context.player_name %}
|
||||
@@ -29,13 +32,13 @@
|
||||
<meta content="/static/favicons/mstile-310x150.png" name="msapplication-wide310x150logo"/>
|
||||
<meta content="/static/favicons/mstile-310x310.png" name="msapplication-square310x310logo"/>
|
||||
|
||||
<link href="{{ url_for('static', filename='css/anthias.css') }}" rel="stylesheet"/>
|
||||
<link href="{% static 'css/anthias.css' %}" type="text/css" rel="stylesheet"/>
|
||||
|
||||
<link href="{{ url_for('static', filename='fontawesome/css/all.css') }}" rel="stylesheet"/>
|
||||
<link href="{% static 'fontawesome/css/all.css' %}" rel="stylesheet"/>
|
||||
|
||||
<script async defer src="https://buttons.github.io/buttons.js"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/jquery-3.7.1.min.js') }}"></script>
|
||||
<script src="{% static 'js/jquery-3.7.1.min.js' %}"></script>
|
||||
|
||||
{% if context.is_demo %}
|
||||
<!-- Global Site Tag (gtag.js) - Google Analytics -->
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
{# vim: ft=htmldjango #}
|
||||
|
||||
{% load static %}
|
||||
|
||||
<div id="request-error" class="navbar navbar fixed-top">
|
||||
<div class="container">
|
||||
<div class="alert" style="display:none">
|
||||
@@ -9,10 +13,10 @@
|
||||
<div class="navbar navbar-header navbar-expand-lg fixed-top bg-dark">
|
||||
<div class="container">
|
||||
<a class="brand" href="/">
|
||||
<img src="static/img/logo-full.svg"/>
|
||||
<img src="{% static 'img/logo-full.svg' %}"/>
|
||||
</a>
|
||||
<ul class="nav float-right">
|
||||
{% if not context.up_to_date and not context.is_balena %}
|
||||
{% if not up_to_date and not is_balena %}
|
||||
<li class="update-available">
|
||||
<a href="/settings#upgrade-section">
|
||||
<i class="fas fa-arrow-circle-down pr-1"></i>
|
||||
@@ -26,7 +30,7 @@
|
||||
Schedule Overview
|
||||
</a>
|
||||
</li>
|
||||
{% if context.is_balena %}
|
||||
{% if is_balena %}
|
||||
<li>
|
||||
<a href="/integrations">
|
||||
<i class="far fa-plus-square pr-1"></i>
|
||||
|
||||
@@ -1,36 +1,42 @@
|
||||
{# vim: ft=htmldjango #}
|
||||
|
||||
{% extends "base.html" %}
|
||||
{% load static %}
|
||||
|
||||
{% block head %}
|
||||
<link href="{{ url_for('static', filename='css/datepicker.css') }}" rel="stylesheet"/>
|
||||
<link href="{{ url_for('static', filename='css/timepicker.css') }}" rel="stylesheet"/>
|
||||
<link href="{% static 'css/datepicker.css' %}" rel="stylesheet"/>
|
||||
<link href="{% static 'css/timepicker.css' %}" rel="stylesheet"/>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/underscore-1.4.3.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.iframe-transport.js') }}"></script>
|
||||
<script src="{% static 'js/underscore-1.4.3.min.js' %}"></script>
|
||||
<script src="{% static 'js/popper.min.js' %}"></script>
|
||||
<script src="{% static 'js/jquery.iframe-transport.js' %}"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/base64js.min.js') }}"></script>
|
||||
<script src="{% static 'js/base64js.min.js' %}"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/backbone-0.9.10.min.js') }}"></script> <!-- needs jquery -->
|
||||
<script src="{% static 'js/backbone-0.9.10.min.js' %}"></script> <!-- needs jquery -->
|
||||
|
||||
<script src="{{ url_for('static', filename='js/jquery-ui-1.10.1.custom.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.fileupload.js') }}"></script>
|
||||
<script src="{% static 'js/jquery-ui-1.10.1.custom.min.js' %}"></script>
|
||||
<script src="{% static 'js/jquery.fileupload.js' %}"></script>
|
||||
<!-- needs jqueryui.widget -->
|
||||
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script> <!-- needs jquery -->
|
||||
<script src="{{ url_for('static', filename='js/bootstrap-datepicker.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap-timepicker.js') }}"></script>
|
||||
<script src="{% static 'js/bootstrap.min.js' %}"></script> <!-- needs jquery -->
|
||||
<script src="{% static 'js/bootstrap-datepicker.js' %}"></script>
|
||||
<script src="{% static 'js/bootstrap-timepicker.js' %}"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/moment.js') }}"></script>
|
||||
<script src="{% static 'js/moment.js' %}"></script>
|
||||
|
||||
{{ ws_addresses|json_script:"ws_addresses" }};
|
||||
|
||||
<script type="text/javascript">
|
||||
var dateFormat = "{{ context.date_format }}";
|
||||
var defaultDuration = {{ context.default_duration }};
|
||||
var defaultStreamingDuration = {{ context.default_streaming_duration }};
|
||||
var use24HourClock = {% if context.use_24_hour_clock %} true; {% else %} false; {% endif %}
|
||||
var wsAddresses = {{ context.ws_addresses|tojson }};
|
||||
var dateFormat = "{{ date_format }}";
|
||||
var defaultDuration = {{ default_duration }};
|
||||
var defaultStreamingDuration = {{ default_streaming_duration }};
|
||||
var use24HourClock = {% if use_24_hour_clock %} true; {% else %} false; {% endif %}
|
||||
var wsAddresses = JSON.parse(document.getElementById('ws_addresses').textContent);
|
||||
</script>
|
||||
<script src="{{ url_for('static', filename='js/anthias.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/main.js') }}"></script>
|
||||
|
||||
<script src="{% static 'js/anthias.js' %}"></script>
|
||||
<script src="{% static 'js/main.js' %}"></script>
|
||||
|
||||
<script id="asset-row-template" type="text/template">
|
||||
<td class="asset_row_name">
|
||||
@@ -317,8 +323,8 @@
|
||||
</a>
|
||||
</div>
|
||||
</h4>
|
||||
{% if context.player_name %}
|
||||
<h4 class="text-white">{{ context.player_name }}</h4>
|
||||
{% if player_name %}
|
||||
<h4 class="text-white">{{ player_name }}</h4>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="row content" style="min-height: 60vh;">
|
||||
{% if context.is_balena %}
|
||||
{% if is_balena %}
|
||||
<div id="balena-section" class="col-12">
|
||||
<h4 class="page-header">
|
||||
<b>Balena</b>
|
||||
@@ -26,34 +26,34 @@
|
||||
<tbody>
|
||||
<tr>
|
||||
<th scope="row">Device Name</th>
|
||||
<td>{{ context.balena_device_name_at_init }}</td>
|
||||
<td>{{ balena_device_name_at_init }}</td>
|
||||
<td>The name of the device on first initialisation.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Device UUID</th>
|
||||
<td>{{ context.balena_device_id }}</td>
|
||||
<td>{{ balena_device_id }}</td>
|
||||
<td>The unique identification number for the device. This is used to identify it on
|
||||
balena.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">App ID</th>
|
||||
<td>{{ context.balena_app_id }}</td>
|
||||
<td>{{ balena_app_id }}</td>
|
||||
<td>ID number of the balena application the device is associated.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">App Name</th>
|
||||
<td>{{ context.balena_app_name }}</td>
|
||||
<td>{{ balena_app_name }}</td>
|
||||
<td>The name of the balena application the device is associated with.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Supervisor Version</th>
|
||||
<td>{{ context.balena_supervisor_version }}</td>
|
||||
<td>{{ balena_supervisor_version }}</td>
|
||||
<td>The current version of the supervisor agent running on the device.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Host OS Version</th>
|
||||
<td>{{ context.balena_host_os_version }}</td>
|
||||
<td>{{ balena_host_os_version }}</td>
|
||||
<td>The version of the host OS.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
||||
@@ -1,18 +1,22 @@
|
||||
{# vim: ft=htmldjango #}
|
||||
|
||||
{% extends "base.html" %}
|
||||
{% load static %}
|
||||
|
||||
{% block head %}
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js') }}"></script>
|
||||
<script src="{% static 'js/popper.min.js' %}"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js') }}"></script> <!-- needs jquery -->
|
||||
<script src="{{ url_for('static', filename='js/jquery-ui-1.10.1.custom.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.fileupload.js') }}"></script> <!-- needs jqueryui.widget -->
|
||||
<script src="{{ url_for('static', filename='js/bootstrap-datepicker.js') }}"></script>
|
||||
<script src="{% static 'js/bootstrap.min.js' %}"></script> <!-- needs jquery -->
|
||||
<script src="{% static 'js/jquery-ui-1.10.1.custom.min.js' %}"></script>
|
||||
<script src="{% static 'js/jquery.fileupload.js' %}"></script> <!-- needs jqueryui.widget -->
|
||||
<script src="{% static 'js/bootstrap-datepicker.js' %}"></script>
|
||||
|
||||
<script src="{{ url_for('static', filename='js/settings.js') }}"></script>
|
||||
<script src="{% static 'js/settings.js' %}"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="container" mode: "0644"
|
||||
>
|
||||
<div class="row py-2">
|
||||
<div class="col-12">
|
||||
<h4 class="page-header text-white">
|
||||
@@ -22,34 +26,36 @@
|
||||
</div>
|
||||
<div class="row content px-3">
|
||||
<div class="col-12 my-3">
|
||||
{% if context.flash %}
|
||||
<div class="alert alert-{{ context.flash['class'] }}">
|
||||
{{ context.flash['message'] }}
|
||||
{% if flash %}
|
||||
<div class="alert alert-{{ flash.class }}">
|
||||
{{ flash.message }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<form method="post" class="row">
|
||||
{% csrf_token %}
|
||||
<div class="form-group col-6 d-flex flex-column justify-content-between">
|
||||
<div class="form-group">
|
||||
<label class="small text-secondary">Player name</label>
|
||||
<input class="form-control" name="player_name" type="text"
|
||||
value="{{ context.player_name }}">
|
||||
value="{{ player_name }}">
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="form-group col-6">
|
||||
<label class="small text-secondary">Default duration (seconds)</label>
|
||||
<input class="form-control" name="default_duration" type="number"
|
||||
value="{{ context.default_duration }}"/>
|
||||
value="{{ default_duration }}"/>
|
||||
</div>
|
||||
<div class="form-group col-6">
|
||||
<label class="small text-secondary">Default streaming duration (seconds)</label>
|
||||
<input class="form-control" name="default_streaming_duration" type="number"
|
||||
value="{{ context.default_streaming_duration }}"/>
|
||||
value="{{ default_streaming_duration }}"/>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="small text-secondary">Audio output</label>
|
||||
<select class="form-control" name="audio_output">
|
||||
{% if context.audio_output == 'hdmi' %}
|
||||
{% if audio_output == 'hdmi' %}
|
||||
<option value="hdmi" selected="selected">HDMI</option>
|
||||
<option value="local">3.5mm jack</option>
|
||||
{% else %}
|
||||
@@ -62,39 +68,39 @@
|
||||
<label class="small text-secondary">Date format</label>
|
||||
<select class="form-control" name="date_format">
|
||||
<option value="mm/dd/yyyy"
|
||||
{% if context.date_format == 'mm/dd/yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'mm/dd/yyyy' %} selected="selected" {% endif %}>
|
||||
month/day/year
|
||||
</option>
|
||||
<option value="dd/mm/yyyy"
|
||||
{% if context.date_format == 'dd/mm/yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'dd/mm/yyyy' %} selected="selected" {% endif %}>
|
||||
day/month/year
|
||||
</option>
|
||||
<option value="yyyy/mm/dd"
|
||||
{% if context.date_format == 'yyyy/mm/dd' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'yyyy/mm/dd' %} selected="selected" {% endif %}>
|
||||
year/month/day
|
||||
</option>
|
||||
<option value="mm-dd-yyyy"
|
||||
{% if context.date_format == 'mm-dd-yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'mm-dd-yyyy' %} selected="selected" {% endif %}>
|
||||
month-day-year
|
||||
</option>
|
||||
<option value="dd-mm-yyyy"
|
||||
{% if context.date_format == 'dd-mm-yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'dd-mm-yyyy' %} selected="selected" {% endif %}>
|
||||
day-month-year
|
||||
</option>
|
||||
<option value="yyyy-mm-dd"
|
||||
{% if context.date_format == 'yyyy-mm-dd' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'yyyy-mm-dd' %} selected="selected" {% endif %}>
|
||||
year-month-day
|
||||
</option>
|
||||
<option value="mm.dd.yyyy"
|
||||
{% if context.date_format == 'mm.dd.yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'mm.dd.yyyy' %} selected="selected" {% endif %}>
|
||||
month.day.year
|
||||
</option>
|
||||
<option value="dd.mm.yyyy"
|
||||
{% if context.date_format == 'dd.mm.yyyy' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'dd.mm.yyyy' %} selected="selected" {% endif %}>
|
||||
day.month.year
|
||||
</option>
|
||||
<option value="yyyy.mm.dd"
|
||||
{% if context.date_format == 'yyyy.mm.dd' %} selected="selected" {% endif %}>
|
||||
{% if date_format == 'yyyy.mm.dd' %} selected="selected" {% endif %}>
|
||||
year.month.day
|
||||
</option>
|
||||
</select>
|
||||
@@ -102,20 +108,20 @@
|
||||
<div class="form-group mb-0">
|
||||
<label class="small text-secondary">Authentication</label>
|
||||
<select class="form-control" id="auth_backend" name="auth_backend">
|
||||
{% for opt in context.auth_backends %}
|
||||
{% for opt in auth_backends %}
|
||||
<option value="{{ opt.name }}" {{ opt.selected }}>{{ opt.text }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{% if context.need_current_password %}
|
||||
{% if need_current_password %}
|
||||
<div class="form-group" id="curpassword_group">
|
||||
<label class="small text-secondary">Current Password</label>
|
||||
<input class="form-control" name="current-password" type="password" value="">
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% for backend in context.auth_backends %}
|
||||
{% for backend in auth_backends %}
|
||||
{% if backend.template %}
|
||||
<div id="auth_backend-{{ backend.name }}">
|
||||
{% include backend.template %}
|
||||
@@ -130,7 +136,7 @@
|
||||
<div class="ml-auto">
|
||||
<label id="splash_checkbox"
|
||||
class="is_enabled-toggle toggle switch-light switch-material small m-0">
|
||||
{% if context.show_splash %}
|
||||
{% if show_splash %}
|
||||
<input name="show_splash" checked="checked" type="checkbox"/>
|
||||
{% else %}
|
||||
<input name="show_splash" type="checkbox"/>
|
||||
@@ -144,7 +150,7 @@
|
||||
<div class="ml-auto">
|
||||
<label id="default_assets_checkbox"
|
||||
class="is_enabled-toggle toggle switch-light switch-material small m-0">
|
||||
{% if context.default_assets %}
|
||||
{% if default_assets %}
|
||||
<input name="default_assets" checked="checked" type="checkbox"/>
|
||||
{% else %}
|
||||
<input name="default_assets" type="checkbox"/>
|
||||
@@ -158,7 +164,7 @@
|
||||
<div class="ml-auto">
|
||||
<label id="shuffle_checkbox"
|
||||
class="is_enabled-toggle toggle switch-light switch-material small m-0">
|
||||
{% if context.shuffle_playlist %}
|
||||
{% if shuffle_playlist %}
|
||||
<input name="shuffle_playlist" checked="checked" type="checkbox"/>
|
||||
{% else %}
|
||||
<input name="shuffle_playlist" type="checkbox"/>
|
||||
@@ -172,7 +178,7 @@
|
||||
<div class="ml-auto">
|
||||
<label id="use_24_hour_clock_checkbox"
|
||||
class="is_enabled-toggle toggle switch-light switch-material small m-0">
|
||||
{% if context.use_24_hour_clock %}
|
||||
{% if use_24_hour_clock %}
|
||||
<input name="use_24_hour_clock" checked="checked" type="checkbox"/>
|
||||
{% else %}
|
||||
<input name="use_24_hour_clock" type="checkbox"/>
|
||||
@@ -186,7 +192,7 @@
|
||||
<div class="ml-auto">
|
||||
<label id="debug_checkbox"
|
||||
class="is_enabled-toggle toggle switch-light switch-material small m-0">
|
||||
{% if context.debug_logging %}
|
||||
{% if debug_logging %}
|
||||
<input name="debug_logging" checked="checked" type="checkbox">
|
||||
{% else %}
|
||||
<input name="debug_logging" type="checkbox">
|
||||
@@ -272,7 +278,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if not (context.is_balena or context.is_docker) %}
|
||||
{% if not is_balena and not is_docker %}
|
||||
{# Reset Wifi #}
|
||||
<div class="container mt-4">
|
||||
<div class="row py-2">
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
{# vim: ft=htmldjango #}
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
{% load static %}
|
||||
<meta charset="utf-8"/>
|
||||
<title>Welcome to Anthias</title>
|
||||
<link href="{{ url_for('static', filename='css/anthias.css') }}" rel="stylesheet"/>
|
||||
<link href="{% static 'css/anthias.css' %}" rel="stylesheet"/>
|
||||
<link href="https://fonts.googleapis.com/css?family=Plus Jakarta Sans" rel="stylesheet">
|
||||
<link rel="icon" href="/static/favicons/favicon.ico">
|
||||
<style type="text/css">
|
||||
@@ -28,15 +31,15 @@
|
||||
<div class="splash-body p-5">
|
||||
<div class="row">
|
||||
<div class="col-12 text-center">
|
||||
{% if context.ip_addresses %}
|
||||
{% if ip_addresses %}
|
||||
<p>
|
||||
To manage the content on this screen, just point your
|
||||
browser to
|
||||
{% if context.ip_addresses | length > 1 %}
|
||||
{% if ip_addresses|length > 1 %}
|
||||
any of
|
||||
{% endif %}
|
||||
the IP
|
||||
{% if context.ip_addresses | length > 1 %}
|
||||
{% if ip_addresses|length > 1 %}
|
||||
addresses
|
||||
{% else %}
|
||||
address
|
||||
@@ -44,7 +47,7 @@
|
||||
provided below.
|
||||
</p>
|
||||
<div class="form-actions">
|
||||
{% for ip_address in context.ip_addresses %}
|
||||
{% for ip_address in ip_addresses %}
|
||||
<a href="{{ ip_address }}">{{ ip_address }}</a>
|
||||
{% if not loop.last %}
|
||||
<br/>
|
||||
|
||||
@@ -22,43 +22,42 @@
|
||||
<tbody>
|
||||
<tr>
|
||||
<th scope="row">Load Average</th>
|
||||
<td>{{ context.loadavg }}</td>
|
||||
<td>{{ loadavg }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Free Space</th>
|
||||
<td>{{ context.free_space }}</td>
|
||||
<td>{{ free_space }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Memory</th>
|
||||
<td>
|
||||
Total: <strong>{{ context.memory.total }}</strong> /
|
||||
Used: <strong>{{ context.memory.used }}</strong> /
|
||||
Free: <strong>{{ context.memory.free }}</strong> /
|
||||
Shared: <strong>{{ context.memory.shared }}</strong> /
|
||||
Buff: <strong>{{ context.memory.buff }}</strong> /
|
||||
Available: <strong>{{ context.memory.available }}</strong>
|
||||
Total: <strong>{{ memory.total }}</strong> /
|
||||
Used: <strong>{{ memory.used }}</strong> /
|
||||
Free: <strong>{{ memory.free }}</strong> /
|
||||
Shared: <strong>{{ memory.shared }}</strong> /
|
||||
Buff: <strong>{{ memory.buff }}</strong> /
|
||||
Available: <strong>{{ memory.available }}</strong>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Uptime</th>
|
||||
<td>{{ context.uptime.days }} days and {{ (context.uptime.seconds / 3600)|round(2) }} hours</td>
|
||||
<td>{{ uptime.days }} days and {{ uptime.hours }} hours</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<th scope="row">Display Power (CEC)</th>
|
||||
<td>{{ context.display_power }}</td>
|
||||
<td>{{ display_power }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Device Model</th>
|
||||
<td>{{ context.device_model }}</td>
|
||||
<td>{{ device_model }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Anthias Version</th>
|
||||
<td>{{ context.version }}</td>
|
||||
<td>{{ anthias_version }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">MAC Address</th>
|
||||
<td>{{ context.mac_address }}</td>
|
||||
<td>{{ mac_address }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
@@ -1,26 +1,32 @@
|
||||
from __future__ import unicode_literals
|
||||
from builtins import object
|
||||
from splinter import Browser
|
||||
from time import sleep
|
||||
from selenium.common.exceptions import ElementNotVisibleException
|
||||
from selenium import webdriver
|
||||
from settings import settings
|
||||
from lib import db
|
||||
from lib import assets_helper
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from datetime import timedelta
|
||||
from django.test import tag
|
||||
from django.utils import timezone
|
||||
from selenium import webdriver
|
||||
from selenium.common.exceptions import ElementNotVisibleException
|
||||
from splinter import Browser
|
||||
from time import sleep
|
||||
from unittest import skip, TestCase
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from anthias_app.models import Asset
|
||||
from settings import settings
|
||||
|
||||
|
||||
main_page_url = 'http://localhost:8080'
|
||||
settings_url = 'http://foo:bar@localhost:8080/settings'
|
||||
system_info_url = 'http://foo:bar@localhost:8080/system_info'
|
||||
|
||||
asset_x = {
|
||||
'mimetype': u'web',
|
||||
'asset_id': u'4c8dbce552edb5812d3a866cfe5f159d',
|
||||
'name': u'WireLoad',
|
||||
'uri': u'https://www.wireload.net',
|
||||
'start_date': datetime.now() - timedelta(days=1),
|
||||
'end_date': datetime.now() + timedelta(days=1),
|
||||
'duration': u'5',
|
||||
'mimetype': 'web',
|
||||
'asset_id': '4c8dbce552edb5812d3a866cfe5f159d',
|
||||
'name': 'WireLoad',
|
||||
'uri': 'http://www.wireload.net',
|
||||
'start_date': timezone.now() - timedelta(days=1),
|
||||
'end_date': timezone.now() + timedelta(days=1),
|
||||
'duration': 5,
|
||||
'is_enabled': 0,
|
||||
'nocache': 0,
|
||||
'play_order': 1,
|
||||
@@ -28,23 +34,19 @@ asset_x = {
|
||||
}
|
||||
|
||||
asset_y = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'7e978f8c1204a6f70770a1eb54a76e9b',
|
||||
'name': u'Google',
|
||||
'uri': u'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': datetime.now() - timedelta(days=1),
|
||||
'end_date': datetime.now() + timedelta(days=1),
|
||||
'duration': u'6',
|
||||
'mimetype': 'image',
|
||||
'asset_id': '7e978f8c1204a6f70770a1eb54a76e9b',
|
||||
'name': 'Google',
|
||||
'uri': 'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': timezone.now() - timedelta(days=1),
|
||||
'end_date': timezone.now() + timedelta(days=1),
|
||||
'duration': 6,
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'play_order': 0,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
main_page_url = 'http://localhost:8080'
|
||||
settings_url = 'http://foo:bar@localhost:8080/settings'
|
||||
system_info_url = 'http://foo:bar@localhost:8080/system_info'
|
||||
|
||||
|
||||
class TemporaryCopy(object):
|
||||
def __init__(self, original_path, base_path):
|
||||
@@ -84,15 +86,10 @@ def wait_for_and_do(browser, query, callback):
|
||||
n += 1
|
||||
|
||||
|
||||
@tag('integration')
|
||||
class WebTest(TestCase):
|
||||
def setUp(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
for asset in assets:
|
||||
assets_helper.delete(conn, asset['asset_id'])
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
Asset.objects.all().delete()
|
||||
|
||||
def test_add_asset_url(self):
|
||||
with get_browser() as browser:
|
||||
@@ -108,25 +105,22 @@ class WebTest(TestCase):
|
||||
sleep(1)
|
||||
|
||||
wait_for_and_do(browser, '#add-form', lambda form: form.click())
|
||||
sleep(1)
|
||||
sleep(1) # Wait for the new-asset panel animation.
|
||||
|
||||
wait_for_and_do(browser, '#save-asset', lambda btn: btn.click())
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3) # The backend needs time to process the request.
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets.first()
|
||||
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets[0]
|
||||
|
||||
self.assertEqual(asset['name'], u'https://example.com')
|
||||
self.assertEqual(asset['uri'], u'https://example.com')
|
||||
self.assertEqual(asset['mimetype'], u'webpage')
|
||||
self.assertEqual(asset['duration'], settings['default_duration'])
|
||||
self.assertEqual(asset.name, 'https://example.com')
|
||||
self.assertEqual(asset.uri, 'https://example.com')
|
||||
self.assertEqual(asset.mimetype, 'webpage')
|
||||
self.assertEqual(asset.duration, settings['default_duration'])
|
||||
|
||||
def test_edit_asset(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets_helper.create(conn, asset_x)
|
||||
asset = Asset.objects.create(**asset_x)
|
||||
|
||||
with get_browser() as browser:
|
||||
browser.visit(main_page_url)
|
||||
@@ -137,21 +131,19 @@ class WebTest(TestCase):
|
||||
wait_for_and_do(
|
||||
browser, 'input[name="duration"]',
|
||||
lambda field: field.fill('333'))
|
||||
sleep(1) # wait for new-asset panel animation
|
||||
sleep(1)
|
||||
|
||||
wait_for_and_do(browser, '#add-form', lambda form: form.click())
|
||||
sleep(1)
|
||||
|
||||
wait_for_and_do(browser, '#save-asset', lambda btn: btn.click())
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets.first()
|
||||
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets[0]
|
||||
|
||||
self.assertEqual(asset['duration'], u'333')
|
||||
self.assertEqual(asset.duration, 333)
|
||||
|
||||
def test_add_asset_image_upload(self):
|
||||
image_file = '/tmp/image.png'
|
||||
@@ -167,20 +159,17 @@ class WebTest(TestCase):
|
||||
wait_for_and_do(
|
||||
browser, 'input[name="file_upload"]',
|
||||
lambda input: input.fill(image_file))
|
||||
sleep(1) # wait for new-asset panel animation
|
||||
sleep(1)
|
||||
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets.first()
|
||||
|
||||
self.assertEqual(len(assets), 1)
|
||||
|
||||
asset = assets[0]
|
||||
|
||||
self.assertEqual(asset['name'], u'image.png')
|
||||
self.assertEqual(asset['mimetype'], u'image')
|
||||
self.assertEqual(asset['duration'], settings['default_duration'])
|
||||
self.assertEqual(asset.name, 'image.png')
|
||||
self.assertEqual(asset.mimetype, 'image')
|
||||
self.assertEqual(asset.duration, settings['default_duration'])
|
||||
|
||||
def test_add_asset_video_upload(self):
|
||||
with (
|
||||
@@ -198,19 +187,17 @@ class WebTest(TestCase):
|
||||
wait_for_and_do(
|
||||
browser, 'input[name="file_upload"]',
|
||||
lambda input: input.fill(video_file))
|
||||
sleep(1) # wait for new-asset panel animation
|
||||
sleep(1) # Wait for the new-asset panel animation.
|
||||
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3) # The backend needs time to process the request.
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets.first()
|
||||
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets[0]
|
||||
|
||||
self.assertEqual(asset['name'], u'video.mov')
|
||||
self.assertEqual(asset['mimetype'], u'video')
|
||||
self.assertEqual(asset['duration'], u'5')
|
||||
self.assertEqual(asset.name, 'video.mov')
|
||||
self.assertEqual(asset.mimetype, 'video')
|
||||
self.assertEqual(asset.duration, 5)
|
||||
|
||||
def test_add_two_assets_upload(self):
|
||||
with (
|
||||
@@ -234,21 +221,20 @@ class WebTest(TestCase):
|
||||
browser, 'input[name="file_upload"]',
|
||||
lambda input: input.fill(video_file))
|
||||
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
|
||||
self.assertEqual(len(assets), 2)
|
||||
self.assertEqual(len(assets), 2)
|
||||
|
||||
self.assertEqual(assets[0]['name'], u'standby.png')
|
||||
self.assertEqual(assets[0]['mimetype'], u'image')
|
||||
self.assertEqual(
|
||||
assets[0]['duration'], settings['default_duration'])
|
||||
self.assertEqual(assets[0].name, 'standby.png')
|
||||
self.assertEqual(assets[0].mimetype, 'image')
|
||||
self.assertEqual(
|
||||
assets[0].duration, settings['default_duration'])
|
||||
|
||||
self.assertEqual(assets[1]['name'], u'video.mov')
|
||||
self.assertEqual(assets[1]['mimetype'], u'video')
|
||||
self.assertEqual(assets[1]['duration'], u'5')
|
||||
self.assertEqual(assets[1].name, 'video.mov')
|
||||
self.assertEqual(assets[1].mimetype, 'video')
|
||||
self.assertEqual(assets[1].duration, 5)
|
||||
|
||||
@skip('fixme')
|
||||
def test_add_asset_streaming(self):
|
||||
@@ -268,23 +254,20 @@ class WebTest(TestCase):
|
||||
sleep(1)
|
||||
|
||||
wait_for_and_do(browser, '#save-asset', lambda btn: btn.click())
|
||||
sleep(10) # backend need time to process request
|
||||
sleep(10)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets.first()
|
||||
|
||||
self.assertEqual(len(assets), 1)
|
||||
asset = assets[0]
|
||||
self.assertEqual(asset.name, 'rtsp://localhost:8091/asset.mov')
|
||||
self.assertEqual(asset.uri, 'rtsp://localhost:8091/asset.mov')
|
||||
self.assertEqual(asset.mimetype, 'streaming')
|
||||
self.assertEqual(
|
||||
asset.duration, settings['default_streaming_duration'])
|
||||
|
||||
self.assertEqual(asset['name'], u'rtsp://localhost:8091/asset.mov')
|
||||
self.assertEqual(asset['uri'], u'rtsp://localhost:8091/asset.mov')
|
||||
self.assertEqual(asset['mimetype'], u'streaming')
|
||||
self.assertEqual(
|
||||
asset['duration'], settings['default_streaming_duration'])
|
||||
|
||||
def test_rm_asset(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets_helper.create(conn, asset_x)
|
||||
def test_remove_asset(self):
|
||||
Asset.objects.create(**asset_x)
|
||||
|
||||
with get_browser() as browser:
|
||||
browser.visit(main_page_url)
|
||||
@@ -293,53 +276,48 @@ class WebTest(TestCase):
|
||||
browser, '.delete-asset-button', lambda btn: btn.click())
|
||||
wait_for_and_do(
|
||||
browser, '.confirm-delete', lambda btn: btn.click())
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
self.assertEqual(len(assets), 0)
|
||||
self.assertEqual(Asset.objects.count(), 0)
|
||||
|
||||
def test_enable_asset(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets_helper.create(conn, asset_x)
|
||||
Asset.objects.create(**asset_x)
|
||||
|
||||
with get_browser() as browser:
|
||||
browser.visit(main_page_url)
|
||||
wait_for_and_do(browser, '.toggle', lambda btn: btn.click())
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
self.assertEqual(len(assets), 1)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
|
||||
asset = assets[0]
|
||||
self.assertEqual(asset['is_enabled'], 1)
|
||||
asset = assets.first()
|
||||
self.assertEqual(asset.is_enabled, 1)
|
||||
|
||||
def test_disable_asset(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
_asset_x = asset_x.copy()
|
||||
_asset_x['is_enabled'] = 1
|
||||
assets_helper.create(conn, _asset_x)
|
||||
Asset.objects.create(**{
|
||||
**asset_x,
|
||||
'is_enabled': 1
|
||||
})
|
||||
|
||||
with get_browser() as browser:
|
||||
browser.visit(main_page_url)
|
||||
|
||||
wait_for_and_do(browser, '.toggle', lambda btn: btn.click())
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
assets = assets_helper.read(conn)
|
||||
self.assertEqual(len(assets), 1)
|
||||
assets = Asset.objects.all()
|
||||
self.assertEqual(len(assets), 1)
|
||||
|
||||
asset = assets[0]
|
||||
self.assertEqual(asset['is_enabled'], 0)
|
||||
asset = assets.first()
|
||||
self.assertEqual(asset.is_enabled, 0)
|
||||
|
||||
def test_reorder_asset(self):
|
||||
with db.conn(settings['database']) as conn:
|
||||
_asset_x = asset_x.copy()
|
||||
_asset_x['is_enabled'] = 1
|
||||
assets_helper.create(conn, _asset_x)
|
||||
assets_helper.create(conn, asset_y)
|
||||
Asset.objects.create(**{
|
||||
**asset_x,
|
||||
'is_enabled': 1
|
||||
})
|
||||
Asset.objects.create(**asset_y)
|
||||
|
||||
with get_browser() as browser:
|
||||
browser.visit(main_page_url)
|
||||
@@ -349,14 +327,13 @@ class WebTest(TestCase):
|
||||
|
||||
asset_y_to_reorder = browser.find_by_id(asset_y['asset_id'])
|
||||
asset_x_for_drag.drag_and_drop(asset_y_to_reorder)
|
||||
sleep(3) # backend need time to process request
|
||||
sleep(3)
|
||||
|
||||
with db.conn(settings['database']) as conn:
|
||||
x = assets_helper.read(conn, asset_x['asset_id'])
|
||||
y = assets_helper.read(conn, asset_y['asset_id'])
|
||||
x = Asset.objects.get(asset_id=asset_x['asset_id'])
|
||||
y = Asset.objects.get(asset_id=asset_y['asset_id'])
|
||||
|
||||
self.assertEqual(x['play_order'], 0)
|
||||
self.assertEqual(y['play_order'], 1)
|
||||
self.assertEqual(x.play_order, 0)
|
||||
self.assertEqual(y.play_order, 1)
|
||||
|
||||
def test_settings_page_should_work(self):
|
||||
with get_browser() as browser:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from __future__ import unicode_literals
|
||||
from os import getenv, path, listdir, system
|
||||
import unittest
|
||||
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
# coding=utf-8
|
||||
from __future__ import unicode_literals
|
||||
from datetime import datetime
|
||||
import unittest
|
||||
import mock
|
||||
from api import helpers
|
||||
|
||||
|
||||
request_ok_json = """
|
||||
{
|
||||
"name": "https://mail.ru",
|
||||
"mimetype": "webpage",
|
||||
"uri": "https://mail.ru",
|
||||
"is_active": false,
|
||||
"start_date": "2016-07-19T12:42:00.000Z",
|
||||
"end_date": "2016-07-26T12:42:00.000Z",
|
||||
"duration": "30",
|
||||
"is_enabled": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}
|
||||
"""
|
||||
|
||||
request_json_no_name = """
|
||||
{
|
||||
"name": null,
|
||||
"mimetype":
|
||||
"webpage", "uri": "https://mail.ru",
|
||||
"is_active": false,
|
||||
"start_date": "2016-07-19T12:42:00.000Z",
|
||||
"end_date": "2016-07-26T12:42:00.000Z",
|
||||
"duration": "30",
|
||||
"is_enabled": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}
|
||||
"""
|
||||
|
||||
request_json_no_mime = """
|
||||
{
|
||||
"name": "https://mail.ru",
|
||||
"mimetype": null,
|
||||
"uri": "https://mail.ru",
|
||||
"is_active": false,
|
||||
"start_date": "2016-07-19T12:42:00.000Z",
|
||||
"end_date": "2016-07-26T12:42:00.000Z",
|
||||
"duration": "30",
|
||||
"is_enabled": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}
|
||||
"""
|
||||
|
||||
request_json2 = """
|
||||
{
|
||||
"name": null,
|
||||
"mimetype": null,
|
||||
"uri": null,
|
||||
"is_active": false,
|
||||
"start_date": "2016-07-19T12:42:00.000Z",
|
||||
"end_date": "2016-07-26T12:42:00.000Z",
|
||||
"duration": "30",
|
||||
"is_enabled": 0,
|
||||
"nocache": 0,
|
||||
"play_order": 0,
|
||||
"skip_asset_check": 0
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class RequestParseTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_asset_should_be_correct_V1_0(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
form={'model': request_ok_json},
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
asset = helpers.prepare_asset(mock.Mock(environ={}))
|
||||
self.assertEqual(asset['duration'], 30)
|
||||
self.assertEqual(asset['is_enabled'], 0)
|
||||
self.assertEqual(asset['mimetype'], u'webpage')
|
||||
self.assertEqual(asset['name'], u'https://mail.ru')
|
||||
self.assertEqual(asset['end_date'], datetime(2016, 7, 26, 12, 42))
|
||||
self.assertEqual(asset['start_date'], datetime(2016, 7, 19, 12, 42))
|
||||
|
||||
def test_exception_should_rise_if_no_name_presented_V1_0(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
form={'model': request_json_no_name},
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
with self.assertRaises(Exception):
|
||||
helpers.prepare_asset(mock.Mock(environ={}))
|
||||
|
||||
def test_exception_should_rise_if_no_mime_presented_V1_0(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
form={'model': request_json_no_mime},
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
with self.assertRaises(Exception):
|
||||
helpers.prepare_asset(mock.Mock(environ={}))
|
||||
|
||||
def test_asset_should_be_correct_V1_1(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
data=request_ok_json,
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
asset = helpers.prepare_asset(mock.Mock(environ={}))
|
||||
self.assertEqual(asset['duration'], 30)
|
||||
self.assertEqual(asset['is_enabled'], 0)
|
||||
self.assertEqual(asset['mimetype'], u'webpage')
|
||||
self.assertEqual(asset['name'], u'https://mail.ru')
|
||||
self.assertEqual(asset['end_date'], datetime(2016, 7, 26, 12, 42))
|
||||
self.assertEqual(asset['start_date'], datetime(2016, 7, 19, 12, 42))
|
||||
|
||||
def test_exception_should_rise_if_no_name_presented_V1_1(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
data=request_json_no_name,
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
with self.assertRaises(Exception):
|
||||
helpers.prepare_asset(mock.Mock(environ={}))
|
||||
|
||||
def test_exception_should_rise_if_no_mime_presented_V1_1(self):
|
||||
helpers.Request = (
|
||||
lambda a: mock.Mock(
|
||||
data=request_json_no_mime,
|
||||
files=mock.Mock(get=lambda a: None),
|
||||
)
|
||||
)
|
||||
with self.assertRaises(Exception):
|
||||
helpers.prepare_asset(mock.Mock(environ={}))
|
||||
@@ -1,21 +1,29 @@
|
||||
import logging
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import time_machine
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from lib import db, assets_helper
|
||||
from datetime import timedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from anthias_app.models import Asset
|
||||
from settings import settings
|
||||
|
||||
import settings
|
||||
import viewer # noqa: E402
|
||||
|
||||
asset_x = {
|
||||
'mimetype': u'web',
|
||||
'asset_id': u'4c8dbce552edb5812d3a866cfe5f159d',
|
||||
'name': u'WireLoad',
|
||||
'uri': u'https://www.wireload.net',
|
||||
'start_date': datetime.now() - timedelta(days=3),
|
||||
'end_date': datetime.now() + timedelta(days=3),
|
||||
'duration': u'5',
|
||||
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
ASSET_X = {
|
||||
'mimetype': 'web',
|
||||
'asset_id': '4c8dbce552edb5812d3a866cfe5f159d',
|
||||
'name': 'WireLoad',
|
||||
'uri': 'http://www.wireload.net',
|
||||
'start_date': timezone.now() - timedelta(days=3),
|
||||
'end_date': timezone.now() + timedelta(days=3),
|
||||
'duration': 5,
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
@@ -23,14 +31,18 @@ asset_x = {
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
asset_y = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'7e978f8c1204a6f70770a1eb54a76e9b',
|
||||
'name': u'Google',
|
||||
'uri': u'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': datetime.now() - timedelta(days=1),
|
||||
'end_date': datetime.now() + timedelta(days=2),
|
||||
'duration': u'6',
|
||||
ASSET_X_DIFF = {
|
||||
'duration': 10
|
||||
}
|
||||
|
||||
ASSET_Y = {
|
||||
'mimetype': 'image',
|
||||
'asset_id': '7e978f8c1204a6f70770a1eb54a76e9b',
|
||||
'name': 'Google',
|
||||
'uri': 'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': timezone.now() - timedelta(days=1),
|
||||
'end_date': timezone.now() + timedelta(days=2),
|
||||
'duration': 6,
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
@@ -38,14 +50,14 @@ asset_y = {
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
asset_z = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'7e978f8c1204a6f70770a1eb54a76e9c',
|
||||
'name': u'Google',
|
||||
'uri': u'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': datetime.now() - timedelta(days=1),
|
||||
'end_date': datetime.now() + timedelta(days=1),
|
||||
'duration': u'6',
|
||||
ASSET_Z = {
|
||||
'mimetype': 'image',
|
||||
'asset_id': '7e978f8c1204a6f70770a1eb54a76e9c',
|
||||
'name': 'Google',
|
||||
'uri': 'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': timezone.now() - timedelta(days=1),
|
||||
'end_date': timezone.now() + timedelta(days=1),
|
||||
'duration': 6,
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
@@ -53,14 +65,14 @@ asset_z = {
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
asset_tomorrow = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'7e978f8c1204a6f70770a1eb54a76e9c',
|
||||
'name': u'Google',
|
||||
'uri': u'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': datetime.now() + timedelta(days=1),
|
||||
'end_date': datetime.now() + timedelta(days=1),
|
||||
'duration': u'6',
|
||||
ASSET_TOMORROW = {
|
||||
'mimetype': 'image',
|
||||
'asset_id': '7e978f8c1204a6f70770a1eb54a76e9c',
|
||||
'name': 'Google',
|
||||
'uri': 'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': timezone.now() + timedelta(days=1),
|
||||
'end_date': timezone.now() + timedelta(days=1),
|
||||
'duration': 6,
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
@@ -71,67 +83,55 @@ asset_tomorrow = {
|
||||
FAKE_DB_PATH = '/tmp/fakedb'
|
||||
|
||||
|
||||
class SchedulerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.old_db_path = settings.settings['database']
|
||||
viewer.db_conn = db.conn(':memory:')
|
||||
with db.commit(viewer.db_conn) as cursor:
|
||||
cursor.execute(assets_helper.create_assets_table)
|
||||
|
||||
class SchedulerTest(TestCase):
|
||||
def tearDown(self):
|
||||
settings.settings['database'] = self.old_db_path
|
||||
settings.settings['shuffle_playlist'] = False
|
||||
viewer.datetime, assets_helper.get_time = (
|
||||
datetime, lambda: datetime.utcnow())
|
||||
viewer.db_conn.close()
|
||||
try:
|
||||
os.remove(FAKE_DB_PATH)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
settings['shuffle_playlist'] = False
|
||||
|
||||
def test_generate_asset_list_assets_should_be_y_and_x(self):
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
def create_assets(self, assets):
|
||||
for asset in assets:
|
||||
Asset.objects.create(**asset)
|
||||
|
||||
def test_generate_asset_list_assets_should_return_list_sorted_by_play_order(self): # noqa: E501
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
assets, _ = viewer.generate_asset_list()
|
||||
self.assertEqual(assets, [asset_y, asset_x])
|
||||
self.assertEqual(assets, [ASSET_Y, ASSET_X])
|
||||
|
||||
def test_generate_asset_list_check_deadline_if_both_active(self):
|
||||
# if x and y currently active
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
_, deadline = viewer.generate_asset_list()
|
||||
self.assertEqual(deadline, asset_y['end_date'])
|
||||
self.assertEqual(deadline, ASSET_Y['end_date'])
|
||||
|
||||
def test_generate_asset_list_check_deadline_if_asset_scheduled(self):
|
||||
"""If asset_x is active and asset_x[end_date] == (now + 3) and
|
||||
asset_tomorrow will be active tomorrow then deadline should be
|
||||
asset_tomorrow[start_date]
|
||||
"""If ASSET_X is active and ASSET_X[end_date] == (now + 3) and
|
||||
ASSET_TOMORROW will be active tomorrow then deadline should be
|
||||
ASSET_TOMORROW[start_date]
|
||||
"""
|
||||
assets_helper.create_multiple(
|
||||
viewer.db_conn, [asset_x, asset_tomorrow])
|
||||
self.create_assets([ASSET_X, ASSET_TOMORROW])
|
||||
_, deadline = viewer.generate_asset_list()
|
||||
self.assertEqual(deadline, asset_tomorrow['start_date'])
|
||||
self.assertEqual(deadline, ASSET_TOMORROW['start_date'])
|
||||
|
||||
def test_get_next_asset_should_be_y_and_x(self):
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
scheduler = viewer.Scheduler()
|
||||
|
||||
expect_y = scheduler.get_next_asset()
|
||||
expect_x = scheduler.get_next_asset()
|
||||
expected_y = scheduler.get_next_asset()
|
||||
expected_x = scheduler.get_next_asset()
|
||||
|
||||
self.assertEqual([expect_y, expect_x], [asset_y, asset_x])
|
||||
self.assertEqual([expected_y, expected_x], [ASSET_Y, ASSET_X])
|
||||
|
||||
def test_keep_same_position_on_playlist_update(self):
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
scheduler = viewer.Scheduler()
|
||||
|
||||
scheduler.get_next_asset()
|
||||
|
||||
assets_helper.create(viewer.db_conn, asset_z)
|
||||
self.create_assets([ASSET_Z])
|
||||
scheduler.update_playlist()
|
||||
|
||||
self.assertEqual(scheduler.index, 1)
|
||||
|
||||
def test_counter_should_increment_after_full_asset_loop(self):
|
||||
settings.settings['shuffle_playlist'] = True
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
settings['shuffle_playlist'] = True
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
scheduler = viewer.Scheduler()
|
||||
|
||||
self.assertEqual(scheduler.counter, 0)
|
||||
@@ -142,14 +142,14 @@ class SchedulerTest(unittest.TestCase):
|
||||
self.assertEqual(scheduler.counter, 1)
|
||||
|
||||
def test_check_get_db_mtime(self):
|
||||
settings.settings['database'] = FAKE_DB_PATH
|
||||
settings['database'] = FAKE_DB_PATH
|
||||
with open(FAKE_DB_PATH, 'a'):
|
||||
os.utime(FAKE_DB_PATH, (0, 0))
|
||||
|
||||
self.assertEqual(0, viewer.Scheduler().get_db_mtime())
|
||||
|
||||
def test_playlist_should_be_updated_after_deadline_reached(self):
|
||||
assets_helper.create_multiple(viewer.db_conn, [asset_x, asset_y])
|
||||
self.create_assets([ASSET_X, ASSET_Y])
|
||||
_, deadline = viewer.generate_asset_list()
|
||||
|
||||
traveller = time_machine.travel(deadline + timedelta(seconds=1))
|
||||
@@ -158,5 +158,5 @@ class SchedulerTest(unittest.TestCase):
|
||||
scheduler = viewer.Scheduler()
|
||||
scheduler.refresh_playlist()
|
||||
|
||||
self.assertEqual([asset_x], scheduler.assets)
|
||||
self.assertEqual([ASSET_X], scheduler.assets)
|
||||
traveller.stop()
|
||||
|
||||
@@ -1,264 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from builtins import object
|
||||
import datetime
|
||||
import functools
|
||||
import unittest
|
||||
|
||||
from lib import db, assets_helper
|
||||
from lib.utils import url_fails
|
||||
from api.helpers import update_asset
|
||||
|
||||
# fixtures chronology
|
||||
#
|
||||
# A B
|
||||
# +===========+ -- asset X
|
||||
# | |
|
||||
# <----+--[--+--[--+--]--+--]--+---> (time)
|
||||
# | | | | | | |
|
||||
# | | +==+=====+==+ | -- asset Y
|
||||
# | | C | | D |
|
||||
# | | | | |
|
||||
# E F G H I -- test points
|
||||
# - X XY Y - -- expected test result
|
||||
date_e = datetime.datetime(2013, 1, 15, 00, 00)
|
||||
date_a = datetime.datetime(2013, 1, 16, 00, 00)
|
||||
date_f = datetime.datetime(2013, 1, 16, 12, 00)
|
||||
date_c = datetime.datetime(2013, 1, 16, 23, 00)
|
||||
date_g = datetime.datetime(2013, 1, 17, 10, 00)
|
||||
date_b = datetime.datetime(2013, 1, 19, 23, 59)
|
||||
date_h = datetime.datetime(2013, 1, 20, 10, 59)
|
||||
date_d = datetime.datetime(2013, 1, 21, 00, 00)
|
||||
|
||||
asset_w = {
|
||||
'mimetype': u'web',
|
||||
'asset_id': u'4c8dbce552edb5812d3a866cfe5f159e',
|
||||
'name': u'いろはにほへど',
|
||||
'uri': u'https://www.wireload.net',
|
||||
'start_date': date_a,
|
||||
'end_date': date_b,
|
||||
'duration': u'5',
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
'play_order': 1,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
asset_w_diff = {
|
||||
'name': u'Tôi có thể ăn thủy tinh mà không hại gì.'
|
||||
}
|
||||
|
||||
asset_x = {
|
||||
'mimetype': u'web',
|
||||
'asset_id': u'4c8dbce552edb5812d3a866cfe5f159d',
|
||||
'name': u'WireLoad',
|
||||
'uri': u'https://www.wireload.net',
|
||||
'start_date': date_a,
|
||||
'end_date': date_b,
|
||||
'duration': u'5',
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
'play_order': 1,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
|
||||
asset_x_diff = {
|
||||
'duration': u'10'
|
||||
}
|
||||
|
||||
asset_y = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'7e978f8c1204a6f70770a1eb54a76e9b',
|
||||
'name': u'Google',
|
||||
'uri': u'https://www.google.com/images/srpr/logo3w.png',
|
||||
'start_date': date_c,
|
||||
'end_date': date_d,
|
||||
'duration': u'6',
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
'play_order': 0,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
asset_y_diff = {
|
||||
'duration': u'324'
|
||||
}
|
||||
asset_z = {
|
||||
'mimetype': u'image',
|
||||
'asset_id': u'9722cd9c45e44dc9b23521be8132b38f',
|
||||
'name': u'url test',
|
||||
'start_date': date_c,
|
||||
'end_date': date_d,
|
||||
'duration': u'1',
|
||||
'is_enabled': 1,
|
||||
'nocache': 0,
|
||||
'is_processing': 0,
|
||||
'skip_asset_check': 0
|
||||
}
|
||||
url_fail = 'https://doesnotwork.example.com'
|
||||
url_redir = 'https://example.com'
|
||||
uri_ = '/home/user/file'
|
||||
|
||||
|
||||
class Req(object):
|
||||
def __init__(self, asset):
|
||||
self.POST = asset
|
||||
|
||||
|
||||
class URLHelperTest(unittest.TestCase):
|
||||
def test_url_1(self):
|
||||
self.assertTrue(url_fails(url_fail))
|
||||
|
||||
def test_url_2(self):
|
||||
self.assertFalse(url_fails(url_redir))
|
||||
|
||||
def test_url_3(self):
|
||||
self.assertFalse(url_fails(uri_))
|
||||
|
||||
|
||||
class DBHelperTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.assertEmpty = functools.partial(self.assertEqual, [])
|
||||
self.conn = db.conn(':memory:')
|
||||
with db.commit(self.conn) as cursor:
|
||||
cursor.execute(assets_helper.create_assets_table)
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.close()
|
||||
# ✂--------
|
||||
|
||||
def test_create_read_asset(self):
|
||||
assets_helper.create(self.conn, asset_x)
|
||||
assets_helper.create(self.conn, asset_y)
|
||||
should_be_y_x = assets_helper.read(self.conn)
|
||||
self.assertEqual([asset_y, asset_x], should_be_y_x)
|
||||
# ✂--------
|
||||
|
||||
def test_create_update_read_asset(self):
|
||||
assets_helper.create(self.conn, asset_x)
|
||||
asset_x_ = asset_x.copy()
|
||||
asset_x_.update(**asset_x_diff)
|
||||
assets_helper.update(self.conn, asset_x['asset_id'], asset_x_)
|
||||
|
||||
assets_helper.create(self.conn, asset_y)
|
||||
asset_y_ = asset_y.copy()
|
||||
asset_y_.update(**asset_y_diff)
|
||||
assets_helper.update(self.conn, asset_y['asset_id'], asset_y_)
|
||||
|
||||
should_be_y__x_ = assets_helper.read(self.conn)
|
||||
self.assertEqual([asset_y_, asset_x_], should_be_y__x_)
|
||||
# ✂--------
|
||||
|
||||
def test_create_delete_asset(self):
|
||||
assets_helper.create(self.conn, asset_x)
|
||||
assets_helper.delete(self.conn, asset_x['asset_id'])
|
||||
|
||||
assets_helper.create(self.conn, asset_y)
|
||||
assets_helper.delete(self.conn, asset_y['asset_id'])
|
||||
|
||||
should_be_empty = assets_helper.read(self.conn)
|
||||
self.assertEmpty(should_be_empty)
|
||||
# ✂--------
|
||||
|
||||
def test_create_update_read_asset_utf8(self):
|
||||
assets_helper.create(self.conn, asset_w)
|
||||
asset_w_ = asset_w.copy()
|
||||
asset_w_.update(**asset_w_diff)
|
||||
assets_helper.update(self.conn, asset_w['asset_id'], asset_w_)
|
||||
|
||||
should_be_w_ = assets_helper.read(self.conn)
|
||||
self.assertEqual([asset_w_], should_be_w_)
|
||||
# ✂--------
|
||||
|
||||
def set_now(self, d):
|
||||
assets_helper.get_time = lambda: d
|
||||
|
||||
def test_get_playlist(self):
|
||||
assets_helper.create(self.conn, asset_x)
|
||||
assets_helper.create(self.conn, asset_y)
|
||||
|
||||
self.set_now(date_e)
|
||||
should_be_empty = assets_helper.get_playlist(self.conn)
|
||||
self.assertEmpty(should_be_empty)
|
||||
|
||||
self.set_now(date_f)
|
||||
[should_be_x] = assets_helper.get_playlist(self.conn)
|
||||
self.assertEqual(asset_x['asset_id'], should_be_x['asset_id'])
|
||||
|
||||
self.set_now(date_g)
|
||||
should_be_y_x = assets_helper.get_playlist(self.conn)
|
||||
self.assertEqual([should_be_y_x[0]['asset_id'],
|
||||
should_be_y_x[1]['asset_id']],
|
||||
[asset_y['asset_id'],
|
||||
asset_x['asset_id']])
|
||||
|
||||
self.set_now(date_h)
|
||||
[should_be_y] = assets_helper.get_playlist(self.conn)
|
||||
self.assertEqual(asset_y['asset_id'], should_be_y['asset_id'])
|
||||
# ✂--------
|
||||
|
||||
def test_set_order(self):
|
||||
assets = [asset_x, asset_y, asset_z, asset_w]
|
||||
for_order = [asset_y, asset_x]
|
||||
|
||||
assets_helper.create_multiple(self.conn, assets)
|
||||
assets_helper.save_ordering(
|
||||
self.conn, [asset['asset_id'] for asset in for_order])
|
||||
|
||||
fetched = assets_helper.read(self.conn)
|
||||
|
||||
self.assertEquals(
|
||||
[(0, asset_y['asset_id']),
|
||||
(1, asset_x['asset_id']),
|
||||
(2, asset_z['asset_id']),
|
||||
(2, asset_w['asset_id'])],
|
||||
[(asset['play_order'], asset['asset_id']) for asset in fetched])
|
||||
|
||||
def test_set_order_empty(self):
|
||||
assets = [asset_x, asset_y, asset_z]
|
||||
|
||||
assets_helper.create_multiple(self.conn, assets)
|
||||
assets_helper.save_ordering(self.conn, [])
|
||||
|
||||
fetched = assets_helper.read(self.conn)
|
||||
|
||||
self.assertEquals(
|
||||
[0, 0, 0], [asset['play_order'] for asset in fetched])
|
||||
|
||||
def test_update_asset(self):
|
||||
asset_x_ = assets_helper.create(self.conn, asset_x)
|
||||
asset_x_copy = asset_x_.copy()
|
||||
data = {
|
||||
'name': 'New name',
|
||||
'mimetype': 'should not setted',
|
||||
'empty': 'non exists field',
|
||||
}
|
||||
|
||||
self.assertEquals(asset_x_, asset_x_copy)
|
||||
|
||||
update_asset(asset_x_copy, data)
|
||||
asset_x_copy = assets_helper.update(
|
||||
self.conn, asset_x_copy.get('id'), asset_x_copy)
|
||||
|
||||
self.assertEquals(asset_x_copy,
|
||||
{'is_enabled': 1,
|
||||
'asset_id': None,
|
||||
'end_date': datetime.datetime(2013, 1, 19, 23, 59),
|
||||
'is_active': 0,
|
||||
'duration': u'5',
|
||||
'is_processing': 0,
|
||||
'mimetype': u'web',
|
||||
'name': 'New name',
|
||||
'nocache': 0,
|
||||
'uri': u'https://www.wireload.net',
|
||||
'skip_asset_check': 0,
|
||||
'play_order': 1,
|
||||
'start_date': datetime.datetime(2013, 1, 16, 0, 0)
|
||||
}
|
||||
)
|
||||
|
||||
self.assertNotEqual(asset_x_, asset_x_copy)
|
||||
@@ -1,10 +1,9 @@
|
||||
from __future__ import unicode_literals
|
||||
import os
|
||||
import sh
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from unittest import skip, TestCase
|
||||
from unittest import TestCase
|
||||
|
||||
user_home_dir = os.getenv('HOME')
|
||||
|
||||
@@ -77,26 +76,13 @@ class SettingsTest(TestCase):
|
||||
shutil.rmtree(CONFIG_DIR)
|
||||
os.getenv = self.orig_getenv
|
||||
|
||||
# This test passes locally but fails on CI.
|
||||
@skip('fixme')
|
||||
def test_anthias_should_exit_if_no_settings_file_found(self):
|
||||
new_env = os.environ.copy()
|
||||
new_env["HOME"] = "/tmp"
|
||||
project_dir = os.path.dirname(__file__)
|
||||
|
||||
with self.assertRaises(sh.ErrorReturnCode_1):
|
||||
sh.python3(project_dir + '/../viewer.py', _env=new_env)
|
||||
|
||||
with self.assertRaises(sh.ErrorReturnCode_1):
|
||||
sh.python3(project_dir + '/../server.py', _env=new_env)
|
||||
|
||||
def test_parse_settings(self):
|
||||
with fake_settings(settings1) as (mod_settings, settings):
|
||||
self.assertEquals(settings['player_name'], 'new player')
|
||||
self.assertEquals(settings['show_splash'], False)
|
||||
self.assertEquals(settings['shuffle_playlist'], True)
|
||||
self.assertEquals(settings['debug_logging'], True)
|
||||
self.assertEquals(settings['default_duration'], '45')
|
||||
self.assertEquals(settings['default_duration'], 45)
|
||||
|
||||
def test_default_settings(self):
|
||||
with fake_settings(empty_settings) as (mod_settings, settings):
|
||||
@@ -132,7 +118,7 @@ class SettingsTest(TestCase):
|
||||
saved = f.read()
|
||||
with fake_settings(saved) as (mod_settings, settings):
|
||||
# changes saved?
|
||||
self.assertEqual(settings['default_duration'], '35')
|
||||
self.assertEqual(settings['default_duration'], 35)
|
||||
self.assertEqual(settings['verify_ssl'], True)
|
||||
# no out of thin air changes?
|
||||
self.assertEqual(settings['audio_output'], 'hdmi')
|
||||
|
||||
@@ -1,18 +1,34 @@
|
||||
# coding=utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
from django.test import TestCase
|
||||
import unittest
|
||||
from lib import utils
|
||||
from lib.utils import handler, template_handle_unicode, url_fails
|
||||
|
||||
url_fail = 'http://doesnotwork.example.com'
|
||||
url_redir = 'http://example.com'
|
||||
uri_ = '/home/user/file'
|
||||
|
||||
|
||||
class UtilsTest(unittest.TestCase):
|
||||
def test_unicode_correctness_in_bottle_templates(self):
|
||||
self.assertEqual(utils.template_handle_unicode('hello'), u'hello')
|
||||
self.assertEqual(template_handle_unicode('hello'), u'hello')
|
||||
self.assertEqual(
|
||||
utils.template_handle_unicode('Привет'),
|
||||
template_handle_unicode('Привет'),
|
||||
u'\u041f\u0440\u0438\u0432\u0435\u0442',
|
||||
)
|
||||
|
||||
def test_json_tz(self):
|
||||
json_str = utils.handler(datetime(2016, 7, 19, 12, 42))
|
||||
json_str = handler(datetime(2016, 7, 19, 12, 42))
|
||||
self.assertEqual(json_str, '2016-07-19T12:42:00+00:00')
|
||||
|
||||
|
||||
class URLHelperTest(TestCase):
|
||||
def test_url_1(self):
|
||||
self.assertTrue(url_fails(url_fail))
|
||||
|
||||
def test_url_2(self):
|
||||
self.assertFalse(url_fails(url_redir))
|
||||
|
||||
def test_url_3(self):
|
||||
self.assertFalse(url_fails(uri_))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import logging
|
||||
import mock
|
||||
import unittest
|
||||
import os
|
||||
@@ -9,6 +9,9 @@ import viewer
|
||||
from time import sleep
|
||||
|
||||
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
class ViewerTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.original_splash_delay = viewer.SPLASH_DELAY
|
||||
@@ -47,6 +50,7 @@ def noop(*a, **k):
|
||||
|
||||
class TestEmptyPl(ViewerTestCase):
|
||||
|
||||
@mock.patch('viewer.SERVER_WAIT_TIMEOUT', 0)
|
||||
@mock.patch('viewer.start_loop', side_effect=noop)
|
||||
@mock.patch('viewer.view_image', side_effect=noop)
|
||||
@mock.patch('viewer.view_webpage', side_effect=noop)
|
||||
@@ -67,7 +71,7 @@ class TestEmptyPl(ViewerTestCase):
|
||||
m_asset_list.assert_called_once()
|
||||
mock_setup.assert_called_once()
|
||||
mock_view_webpage.assert_called_once()
|
||||
mock_view_image.assert_called_once()
|
||||
self.assertEqual(mock_view_image.call_count, 2)
|
||||
mock_start_loop.assert_called_once()
|
||||
|
||||
|
||||
|
||||
89
viewer.py
89
viewer.py
@@ -1,17 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from builtins import bytes
|
||||
from future import standard_library
|
||||
from builtins import filter
|
||||
from builtins import range
|
||||
import django
|
||||
from builtins import object
|
||||
import json
|
||||
import logging
|
||||
import pydbus
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from jinja2 import Template
|
||||
from os import path, getenv, utime, system
|
||||
from random import shuffle
|
||||
@@ -24,26 +22,34 @@ import requests
|
||||
import sh
|
||||
import zmq
|
||||
|
||||
from lib import assets_helper
|
||||
from lib import db
|
||||
from lib.errors import SigalrmException
|
||||
from lib.media_player import MediaPlayerProxy
|
||||
from lib.utils import (
|
||||
url_fails,
|
||||
is_balena_app,
|
||||
get_node_ip,
|
||||
string_to_bool,
|
||||
connect_to_redis,
|
||||
get_balena_device_info,
|
||||
)
|
||||
from settings import settings, LISTEN, PORT, ZmqConsumer
|
||||
|
||||
try:
|
||||
django.setup()
|
||||
|
||||
# Place imports that uses Django in this block.
|
||||
|
||||
from anthias_app.models import Asset
|
||||
from django.utils import timezone
|
||||
from lib.utils import (
|
||||
url_fails,
|
||||
is_balena_app,
|
||||
get_node_ip,
|
||||
string_to_bool,
|
||||
connect_to_redis,
|
||||
get_balena_device_info,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
standard_library.install_aliases()
|
||||
|
||||
|
||||
__author__ = "Screenly, Inc"
|
||||
__copyright__ = "Copyright 2012-2023, Screenly, Inc"
|
||||
__copyright__ = "Copyright 2012-2024, Screenly, Inc"
|
||||
__license__ = "Dual License: GPLv2 and Commercial License"
|
||||
|
||||
|
||||
@@ -68,7 +74,6 @@ browser_bus = None
|
||||
r = connect_to_redis()
|
||||
|
||||
HOME = None
|
||||
db_conn = None
|
||||
|
||||
scheduler = None
|
||||
|
||||
@@ -101,10 +106,9 @@ def navigate_to_asset(asset_id):
|
||||
|
||||
|
||||
def stop_loop():
|
||||
global db_conn, loop_is_stopped
|
||||
global loop_is_stopped
|
||||
loop_is_stopped = True
|
||||
skip_asset()
|
||||
db_conn = None
|
||||
|
||||
|
||||
def play_loop():
|
||||
@@ -229,7 +233,7 @@ class Scheduler(object):
|
||||
|
||||
if self.extra_asset is not None:
|
||||
asset = get_specific_asset(self.extra_asset)
|
||||
if asset and asset['is_processing'] == 0:
|
||||
if asset and asset['is_processing']:
|
||||
self.current_asset_id = self.extra_asset
|
||||
self.extra_asset = None
|
||||
return asset
|
||||
@@ -263,7 +267,7 @@ class Scheduler(object):
|
||||
|
||||
def refresh_playlist(self):
|
||||
logging.debug('refresh_playlist')
|
||||
time_cur = datetime.utcnow()
|
||||
time_cur = timezone.now()
|
||||
|
||||
logging.debug(
|
||||
'refresh: counter: (%s) deadline (%s) timecur (%s)',
|
||||
@@ -307,7 +311,11 @@ class Scheduler(object):
|
||||
|
||||
def get_specific_asset(asset_id):
|
||||
logging.info('Getting specific asset')
|
||||
return assets_helper.read(db_conn, asset_id)
|
||||
try:
|
||||
return Asset.objects.get(asset_id=asset_id).__dict__
|
||||
except Asset.DoesNotExist:
|
||||
logging.debug('Asset %s not found in database', asset_id)
|
||||
return None
|
||||
|
||||
|
||||
def generate_asset_list():
|
||||
@@ -317,15 +325,28 @@ def generate_asset_list():
|
||||
2. Get nearest deadline
|
||||
"""
|
||||
logging.info('Generating asset-list...')
|
||||
assets = assets_helper.read(db_conn)
|
||||
assets = Asset.objects.all()
|
||||
deadlines = [
|
||||
asset['end_date']
|
||||
if assets_helper.is_active(asset)
|
||||
else asset['start_date']
|
||||
asset.end_date
|
||||
if asset.is_active()
|
||||
else asset.start_date
|
||||
for asset in assets
|
||||
]
|
||||
|
||||
playlist = list(filter(assets_helper.is_active, assets))
|
||||
enabled_assets = Asset.objects.filter(
|
||||
is_enabled=True,
|
||||
start_date__isnull=False,
|
||||
end_date__isnull=False,
|
||||
).order_by('play_order')
|
||||
playlist = [
|
||||
{
|
||||
k: v for k, v in asset.__dict__.items()
|
||||
if k not in ['_state', 'md5']
|
||||
}
|
||||
for asset in enabled_assets
|
||||
if asset.is_active()
|
||||
]
|
||||
|
||||
deadline = sorted(deadlines)[0] if len(deadlines) > 0 else None
|
||||
logging.debug('generate_asset_list deadline: %s', deadline)
|
||||
|
||||
@@ -450,7 +471,7 @@ def asset_loop(scheduler):
|
||||
|
||||
|
||||
def setup():
|
||||
global HOME, db_conn, browser_bus
|
||||
global HOME, browser_bus
|
||||
HOME = getenv('HOME')
|
||||
if not HOME:
|
||||
logging.error('No HOME variable')
|
||||
@@ -463,9 +484,8 @@ def setup():
|
||||
signal(SIGALRM, sigalrm)
|
||||
|
||||
load_settings()
|
||||
db_conn = db.conn(settings['database'])
|
||||
|
||||
load_browser()
|
||||
|
||||
bus = pydbus.SessionBus()
|
||||
browser_bus = bus.get('screenly.webview', '/Screenly')
|
||||
|
||||
@@ -482,7 +502,7 @@ def wait_for_node_ip(seconds):
|
||||
def wait_for_server(retries, wt=1):
|
||||
for _ in range(retries):
|
||||
try:
|
||||
response = requests.get('http://{0}:{1}'.format(LISTEN, PORT))
|
||||
response = requests.get(f'http://{LISTEN}:{PORT}/splash-page')
|
||||
response.raise_for_status()
|
||||
break
|
||||
except requests.exceptions.RequestException:
|
||||
@@ -490,22 +510,19 @@ def wait_for_server(retries, wt=1):
|
||||
|
||||
|
||||
def start_loop():
|
||||
global db_conn, loop_is_stopped
|
||||
global loop_is_stopped
|
||||
|
||||
logging.debug('Entering infinite loop.')
|
||||
while True:
|
||||
if loop_is_stopped:
|
||||
sleep(0.1)
|
||||
continue
|
||||
if not db_conn:
|
||||
load_settings()
|
||||
db_conn = db.conn(settings['database'])
|
||||
|
||||
asset_loop(scheduler)
|
||||
|
||||
|
||||
def main():
|
||||
global db_conn, scheduler
|
||||
global scheduler
|
||||
global load_screen_displayed, mq_data
|
||||
|
||||
load_screen_displayed = False
|
||||
@@ -521,6 +538,10 @@ def main():
|
||||
subscriber_2.daemon = True
|
||||
subscriber_2.start()
|
||||
|
||||
# This will prevent white screen from happening before showing the
|
||||
# splash screen with IP addresses.
|
||||
view_image(STANDBY_SCREEN)
|
||||
|
||||
wait_for_server(SERVER_WAIT_TIMEOUT)
|
||||
|
||||
scheduler = Scheduler()
|
||||
|
||||
Reference in New Issue
Block a user