code quality
This commit is contained in:
parent
934817da8a
commit
112770eddf
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -5,5 +5,3 @@ venv/
|
||||||
node_modules/
|
node_modules/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
*.egg
|
*.egg
|
||||||
|
|
||||||
|
|
||||||
|
|
29
.pre-commit-config.yaml
Normal file
29
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v2.3.0
|
||||||
|
hooks:
|
||||||
|
- id: check-yaml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 23.1.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
args:
|
||||||
|
- --line-length=79
|
||||||
|
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
|
||||||
|
- repo: https://github.com/PyCQA/autoflake.git
|
||||||
|
rev: v2.0.2
|
||||||
|
hooks:
|
||||||
|
- id: autoflake
|
||||||
|
args:
|
||||||
|
- --remove-all-unused-imports
|
||||||
|
- repo: https://github.com/PyCQA/flake8.git
|
||||||
|
rev: 6.0.0
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
exclude: instance/sample_settings.py
|
||||||
|
- repo: https://github.com/PyCQA/docformatter.git
|
||||||
|
rev: v1.5.1
|
||||||
|
hooks:
|
||||||
|
- id: docformatter
|
|
@ -104,4 +104,3 @@ in it instead of running commands manually.
|
||||||
|
|
||||||
## More informations
|
## More informations
|
||||||
There are extra informations in `aircox/README.md` and `aircox_streamer/README.md`.
|
There are extra informations in `aircox/README.md` and `aircox_streamer/README.md`.
|
||||||
|
|
||||||
|
|
|
@ -16,4 +16,3 @@ Each program has a directory on the server where user puts its podcasts (in **AI
|
||||||
## Requirements
|
## Requirements
|
||||||
* Sox (and soxi): sound file monitor and quality check
|
* Sox (and soxi): sound file monitor and quality check
|
||||||
* requirements.txt for python's dependecies
|
* requirements.txt for python's dependecies
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -7,3 +7,18 @@ from .program import ProgramAdmin, ScheduleAdmin, StreamAdmin
|
||||||
from .sound import SoundAdmin, TrackAdmin
|
from .sound import SoundAdmin, TrackAdmin
|
||||||
from .station import StationAdmin
|
from .station import StationAdmin
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"filters",
|
||||||
|
"ArticleAdmin",
|
||||||
|
"DiffusionAdmin",
|
||||||
|
"EpisodeAdmin",
|
||||||
|
"LogAdmin",
|
||||||
|
"PageAdmin",
|
||||||
|
"StaticPageAdmin",
|
||||||
|
"ProgramAdmin",
|
||||||
|
"ScheduleAdmin",
|
||||||
|
"StreamAdmin",
|
||||||
|
"SoundAdmin",
|
||||||
|
"TrackAdmin",
|
||||||
|
"StationAdmin",
|
||||||
|
)
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
import copy
|
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from ..models import Article
|
from ..models import Article
|
||||||
from .page import PageAdmin
|
from .page import PageAdmin
|
||||||
|
|
||||||
|
__all__ = ["ArticleAdmin"]
|
||||||
__all__ = ['ArticleAdmin']
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Article)
|
@admin.register(Article)
|
||||||
class ArticleAdmin(PageAdmin):
|
class ArticleAdmin(PageAdmin):
|
||||||
search_fields = PageAdmin.search_fields + ('parent__title',)
|
search_fields = PageAdmin.search_fields + ("parent__title",)
|
||||||
# TODO: readonly field
|
# TODO: readonly field
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,78 +1,83 @@
|
||||||
|
from adminsortable2.admin import SortableAdminBase
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.forms import ModelForm
|
from django.forms import ModelForm
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from adminsortable2.admin import SortableAdminBase
|
|
||||||
|
|
||||||
from ..models import Episode, Diffusion
|
|
||||||
|
|
||||||
|
from ..models import Diffusion, Episode
|
||||||
from .page import PageAdmin
|
from .page import PageAdmin
|
||||||
from .sound import SoundInline, TrackInline
|
from .sound import SoundInline, TrackInline
|
||||||
|
|
||||||
|
|
||||||
class DiffusionBaseAdmin:
|
class DiffusionBaseAdmin:
|
||||||
fields = ('type', 'start', 'end', 'schedule')
|
fields = ("type", "start", "end", "schedule")
|
||||||
readonly_fields = ('schedule',)
|
readonly_fields = ("schedule",)
|
||||||
|
|
||||||
def get_readonly_fields(self, request, obj=None):
|
def get_readonly_fields(self, request, obj=None):
|
||||||
fields = super().get_readonly_fields(request, obj)
|
fields = super().get_readonly_fields(request, obj)
|
||||||
if not request.user.has_perm('aircox_program.scheduling'):
|
if not request.user.has_perm("aircox_program.scheduling"):
|
||||||
fields = fields + ('program', 'start', 'end')
|
fields = fields + ("program", "start", "end")
|
||||||
return [field for field in fields if field in self.fields]
|
return [field for field in fields if field in self.fields]
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Diffusion)
|
@admin.register(Diffusion)
|
||||||
class DiffusionAdmin(DiffusionBaseAdmin, admin.ModelAdmin):
|
class DiffusionAdmin(DiffusionBaseAdmin, admin.ModelAdmin):
|
||||||
def start_date(self, obj):
|
def start_date(self, obj):
|
||||||
return obj.local_start.strftime('%Y/%m/%d %H:%M')
|
return obj.local_start.strftime("%Y/%m/%d %H:%M")
|
||||||
start_date.short_description = _('start')
|
|
||||||
|
start_date.short_description = _("start")
|
||||||
|
|
||||||
def end_date(self, obj):
|
def end_date(self, obj):
|
||||||
return obj.local_end.strftime('%H:%M')
|
return obj.local_end.strftime("%H:%M")
|
||||||
end_date.short_description = _('end')
|
|
||||||
|
|
||||||
list_display = ('episode', 'start_date', 'end_date', 'type', 'initial')
|
end_date.short_description = _("end")
|
||||||
list_filter = ('type', 'start', 'program')
|
|
||||||
list_editable = ('type',)
|
|
||||||
ordering = ('-start', 'id')
|
|
||||||
|
|
||||||
fields = ('type', 'start', 'end', 'initial', 'program', 'schedule')
|
list_display = ("episode", "start_date", "end_date", "type", "initial")
|
||||||
readonly_fields = ('schedule',)
|
list_filter = ("type", "start", "program")
|
||||||
|
list_editable = ("type",)
|
||||||
|
ordering = ("-start", "id")
|
||||||
|
|
||||||
|
fields = ("type", "start", "end", "initial", "program", "schedule")
|
||||||
|
readonly_fields = ("schedule",)
|
||||||
|
|
||||||
|
|
||||||
class DiffusionInline(DiffusionBaseAdmin, admin.TabularInline):
|
class DiffusionInline(DiffusionBaseAdmin, admin.TabularInline):
|
||||||
model = Diffusion
|
model = Diffusion
|
||||||
fk_name = 'episode'
|
fk_name = "episode"
|
||||||
extra = 0
|
extra = 0
|
||||||
|
|
||||||
def has_add_permission(self, request, obj):
|
def has_add_permission(self, request, obj):
|
||||||
return request.user.has_perm('aircox_program.scheduling')
|
return request.user.has_perm("aircox_program.scheduling")
|
||||||
|
|
||||||
|
|
||||||
class EpisodeAdminForm(ModelForm):
|
class EpisodeAdminForm(ModelForm):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.fields['parent'].required = True
|
self.fields["parent"].required = True
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Episode)
|
@admin.register(Episode)
|
||||||
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
||||||
form = EpisodeAdminForm
|
form = EpisodeAdminForm
|
||||||
list_display = PageAdmin.list_display
|
list_display = PageAdmin.list_display
|
||||||
list_filter = tuple(f for f in PageAdmin.list_filter
|
list_filter = tuple(
|
||||||
if f != 'pub_date') + ('diffusion__start', 'pub_date')
|
f for f in PageAdmin.list_filter if f != "pub_date"
|
||||||
search_fields = PageAdmin.search_fields + ('parent__title',)
|
) + (
|
||||||
|
"diffusion__start",
|
||||||
|
"pub_date",
|
||||||
|
)
|
||||||
|
search_fields = PageAdmin.search_fields + ("parent__title",)
|
||||||
# readonly_fields = ('parent',)
|
# readonly_fields = ('parent',)
|
||||||
|
|
||||||
inlines = [TrackInline, SoundInline, DiffusionInline]
|
inlines = [TrackInline, SoundInline, DiffusionInline]
|
||||||
|
|
||||||
def add_view(self, request, object_id, form_url='', context=None):
|
def add_view(self, request, object_id, form_url="", context=None):
|
||||||
context = context or {}
|
context = context or {}
|
||||||
context['init_app'] = True
|
context["init_app"] = True
|
||||||
context['init_el'] = '#inline-tracks'
|
context["init_el"] = "#inline-tracks"
|
||||||
return super().change_view(request, object_id, form_url, context)
|
return super().change_view(request, object_id, form_url, context)
|
||||||
|
|
||||||
def change_view(self, request, object_id, form_url='', context=None):
|
def change_view(self, request, object_id, form_url="", context=None):
|
||||||
context = context or {}
|
context = context or {}
|
||||||
context['init_app'] = True
|
context["init_app"] = True
|
||||||
context['init_el'] = '#inline-tracks'
|
context["init_el"] = "#inline-tracks"
|
||||||
return super().change_view(request, object_id, form_url, context)
|
return super().change_view(request, object_id, form_url, context)
|
||||||
|
|
|
@ -1,63 +1,86 @@
|
||||||
from django.db import models
|
|
||||||
from django.contrib.admin import filters
|
from django.contrib.admin import filters
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.db import models
|
||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
__all__ = ("DateFieldFilter", "DateTimeFieldFilter")
|
||||||
__all__ = ('DateFieldFilter', 'DateTimeField')
|
|
||||||
|
|
||||||
|
|
||||||
class DateFieldFilter(filters.FieldListFilter):
|
class DateFieldFilter(filters.FieldListFilter):
|
||||||
""" Display date input """
|
"""Display date input."""
|
||||||
template = 'admin/aircox/filters/date_filter.html'
|
|
||||||
input_type = 'date'
|
template = "admin/aircox/filters/date_filter.html"
|
||||||
|
input_type = "date"
|
||||||
|
|
||||||
def __init__(self, field, request, params, model, model_admin, field_path):
|
def __init__(self, field, request, params, model, model_admin, field_path):
|
||||||
self.field_generic = '%s__' % field_path
|
self.field_generic = "%s__" % field_path
|
||||||
self.date_params = {k: v for k, v in params.items()
|
self.date_params = {
|
||||||
if k.startswith(self.field_generic)}
|
k: v for k, v in params.items() if k.startswith(self.field_generic)
|
||||||
|
}
|
||||||
|
|
||||||
exact_lookup = 'date' if isinstance(field, models.DateTimeField) else 'exact'
|
exact_lookup = (
|
||||||
|
"date" if isinstance(field, models.DateTimeField) else "exact"
|
||||||
|
)
|
||||||
|
|
||||||
# links as: (label, param, input_type|None, value)
|
# links as: (label, param, input_type|None, value)
|
||||||
self.links = [(_('Exact'), self.field_generic + exact_lookup, self.input_type),
|
self.links = [
|
||||||
(_('Since'), self.field_generic + 'gte', self.input_type),
|
(_("Exact"), self.field_generic + exact_lookup, self.input_type),
|
||||||
(_('Until'), self.field_generic + 'lte', self.input_type)]
|
(_("Since"), self.field_generic + "gte", self.input_type),
|
||||||
|
(_("Until"), self.field_generic + "lte", self.input_type),
|
||||||
|
]
|
||||||
if field.null:
|
if field.null:
|
||||||
self.links.insert(0, (_('None'), self.field_generic + 'isnull', None, '1'))
|
self.links.insert(
|
||||||
|
0, (_("None"), self.field_generic + "isnull", None, "1")
|
||||||
|
)
|
||||||
|
|
||||||
self.query_attrs = {k:v for k,v in request.GET.items()
|
self.query_attrs = {
|
||||||
if k not in self.date_params}
|
k: v for k, v in request.GET.items() if k not in self.date_params
|
||||||
|
}
|
||||||
self.query_string = urlencode(self.query_attrs)
|
self.query_string = urlencode(self.query_attrs)
|
||||||
super().__init__(field, request, params, model, model_admin, field_path)
|
super().__init__(
|
||||||
|
field, request, params, model, model_admin, field_path
|
||||||
|
)
|
||||||
|
|
||||||
def expected_parameters(self):
|
def expected_parameters(self):
|
||||||
return [link[1] for link in self.links]
|
return [link[1] for link in self.links]
|
||||||
|
|
||||||
def choices(self, changelist):
|
def choices(self, changelist):
|
||||||
yield {'label': _('Any'),
|
yield {
|
||||||
'type': None,
|
"label": _("Any"),
|
||||||
'query_string': self.query_string}
|
"type": None,
|
||||||
|
"query_string": self.query_string,
|
||||||
|
}
|
||||||
|
|
||||||
for link in self.links:
|
for link in self.links:
|
||||||
value = len(link) > 3 and link[3] or self.date_params.get(link[1])
|
value = len(link) > 3 and link[3] or self.date_params.get(link[1])
|
||||||
yield {
|
yield {
|
||||||
'label': link[0], 'name': link[1], 'value': value,
|
"label": link[0],
|
||||||
'type': link[2],
|
"name": link[1],
|
||||||
'query_attrs': self.query_attrs,
|
"value": value,
|
||||||
'query_string': urlencode({link[1]: value}) + '&' + self.query_string
|
"type": link[2],
|
||||||
if value else self.query_string,
|
"query_attrs": self.query_attrs,
|
||||||
|
"query_string": urlencode({link[1]: value})
|
||||||
|
+ "&"
|
||||||
|
+ self.query_string
|
||||||
|
if value
|
||||||
|
else self.query_string,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class DateTimeFieldFilter(DateFieldFilter):
|
class DateTimeFieldFilter(DateFieldFilter):
|
||||||
""" Display datetime input """
|
"""Display datetime input."""
|
||||||
input_type = 'datetime-local'
|
|
||||||
|
input_type = "datetime-local"
|
||||||
|
|
||||||
|
|
||||||
filters.FieldListFilter.register(
|
filters.FieldListFilter.register(
|
||||||
lambda f: isinstance(f, models.DateField), DateFieldFilter, take_priority=True)
|
lambda f: isinstance(f, models.DateField),
|
||||||
|
DateFieldFilter,
|
||||||
|
take_priority=True,
|
||||||
|
)
|
||||||
|
|
||||||
filters.FieldListFilter.register(
|
filters.FieldListFilter.register(
|
||||||
lambda f: isinstance(f, models.DateTimeField), DateTimeFieldFilter, take_priority=True)
|
lambda f: isinstance(f, models.DateTimeField),
|
||||||
|
DateTimeFieldFilter,
|
||||||
|
take_priority=True,
|
||||||
|
)
|
||||||
|
|
|
@ -2,12 +2,10 @@ from django.contrib import admin
|
||||||
|
|
||||||
from ..models import Log
|
from ..models import Log
|
||||||
|
|
||||||
|
__all__ = ["LogAdmin"]
|
||||||
__all__ = ['LogAdmin']
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Log)
|
@admin.register(Log)
|
||||||
class LogAdmin(admin.ModelAdmin):
|
class LogAdmin(admin.ModelAdmin):
|
||||||
list_display = ['id', 'date', 'station', 'source', 'type', 'comment']
|
list_display = ["id", "date", "station", "source", "type", "comment"]
|
||||||
list_filter = ['date', 'source', 'station']
|
list_filter = ["date", "source", "station"]
|
||||||
|
|
||||||
|
|
|
@ -1,23 +1,22 @@
|
||||||
class UnrelatedInlineMixin:
|
class UnrelatedInlineMixin:
|
||||||
"""
|
"""Inline class that can be included in an admin change view whose model is
|
||||||
Inline class that can be included in an admin change view whose model
|
not directly related to inline's model."""
|
||||||
is not directly related to inline's model.
|
|
||||||
"""
|
|
||||||
view_model = None
|
view_model = None
|
||||||
parent_model = None
|
parent_model = None
|
||||||
parent_fk = ''
|
parent_fk = ""
|
||||||
|
|
||||||
def __init__(self, parent_model, admin_site):
|
def __init__(self, parent_model, admin_site):
|
||||||
self.view_model = parent_model
|
self.view_model = parent_model
|
||||||
super().__init__(self.parent_model, admin_site)
|
super().__init__(self.parent_model, admin_site)
|
||||||
|
|
||||||
def get_parent(self, view_obj):
|
def get_parent(self, view_obj):
|
||||||
""" Get formset's instance from `obj` of AdminSite's change form. """
|
"""Get formset's instance from `obj` of AdminSite's change form."""
|
||||||
field = self.parent_model._meta.get_field(self.parent_fk).remote_field
|
field = self.parent_model._meta.get_field(self.parent_fk).remote_field
|
||||||
return getattr(view_obj, field.name, None)
|
return getattr(view_obj, field.name, None)
|
||||||
|
|
||||||
def save_parent(self, parent, view_obj):
|
def save_parent(self, parent, view_obj):
|
||||||
""" Save formset's instance. """
|
"""Save formset's instance."""
|
||||||
setattr(parent, self.parent_fk, view_obj)
|
setattr(parent, self.parent_fk, view_obj)
|
||||||
parent.save()
|
parent.save()
|
||||||
return parent
|
return parent
|
||||||
|
@ -25,6 +24,7 @@ class UnrelatedInlineMixin:
|
||||||
def get_formset(self, request, obj):
|
def get_formset(self, request, obj):
|
||||||
ParentFormSet = super().get_formset(request, obj)
|
ParentFormSet = super().get_formset(request, obj)
|
||||||
inline = self
|
inline = self
|
||||||
|
|
||||||
class FormSet(ParentFormSet):
|
class FormSet(ParentFormSet):
|
||||||
view_obj = None
|
view_obj = None
|
||||||
|
|
||||||
|
@ -37,6 +37,5 @@ class UnrelatedInlineMixin:
|
||||||
def save(self):
|
def save(self):
|
||||||
inline.save_parent(self.instance, self.view_obj)
|
inline.save_parent(self.instance, self.view_obj)
|
||||||
return super().save()
|
return super().save()
|
||||||
|
|
||||||
return FormSet
|
return FormSet
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,74 +1,82 @@
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
|
from adminsortable2.admin import SortableInlineAdminMixin
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.http import QueryDict
|
from django.http import QueryDict
|
||||||
from django.utils.safestring import mark_safe
|
from django.utils.safestring import mark_safe
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from adminsortable2.admin import SortableInlineAdminMixin
|
|
||||||
|
|
||||||
from ..models import Category, Comment, NavItem, Page, StaticPage
|
from ..models import Category, Comment, NavItem, Page, StaticPage
|
||||||
|
|
||||||
|
__all__ = ("CategoryAdmin", "PageAdmin", "NavItemInline")
|
||||||
__all__ = ('CategoryAdmin', 'PageAdmin', 'NavItemInline')
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Category)
|
@admin.register(Category)
|
||||||
class CategoryAdmin(admin.ModelAdmin):
|
class CategoryAdmin(admin.ModelAdmin):
|
||||||
list_display = ['pk', 'title', 'slug']
|
list_display = ["pk", "title", "slug"]
|
||||||
list_editable = ['title', 'slug']
|
list_editable = ["title", "slug"]
|
||||||
search_fields = ['title']
|
search_fields = ["title"]
|
||||||
fields = ['title', 'slug']
|
fields = ["title", "slug"]
|
||||||
prepopulated_fields = {"slug": ("title",)}
|
prepopulated_fields = {"slug": ("title",)}
|
||||||
|
|
||||||
|
|
||||||
class BasePageAdmin(admin.ModelAdmin):
|
class BasePageAdmin(admin.ModelAdmin):
|
||||||
list_display = ('cover_thumb', 'title', 'status', 'parent')
|
list_display = ("cover_thumb", "title", "status", "parent")
|
||||||
list_display_links = ('cover_thumb', 'title')
|
list_display_links = ("cover_thumb", "title")
|
||||||
list_editable = ('status',)
|
list_editable = ("status",)
|
||||||
list_filter = ('status',)
|
list_filter = ("status",)
|
||||||
prepopulated_fields = {"slug": ("title",)}
|
prepopulated_fields = {"slug": ("title",)}
|
||||||
|
|
||||||
# prepopulate fields using changelist's filters
|
# prepopulate fields using changelist's filters
|
||||||
prepopulated_filters = ('parent',)
|
prepopulated_filters = ("parent",)
|
||||||
|
|
||||||
search_fields = ('title',)
|
search_fields = ("title",)
|
||||||
|
|
||||||
fieldsets = [
|
fieldsets = [
|
||||||
('', {
|
(
|
||||||
'fields': ['title', 'slug', 'cover', 'content'],
|
"",
|
||||||
}),
|
{
|
||||||
(_('Publication Settings'), {
|
"fields": ["title", "slug", "cover", "content"],
|
||||||
'fields': ['status', 'parent'],
|
},
|
||||||
}),
|
),
|
||||||
|
(
|
||||||
|
_("Publication Settings"),
|
||||||
|
{
|
||||||
|
"fields": ["status", "parent"],
|
||||||
|
},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
change_form_template = 'admin/aircox/page_change_form.html'
|
change_form_template = "admin/aircox/page_change_form.html"
|
||||||
|
|
||||||
def cover_thumb(self, obj):
|
def cover_thumb(self, obj):
|
||||||
return mark_safe('<img src="{}"/>'.format(obj.cover.icons['64'])) \
|
return (
|
||||||
if obj.cover else ''
|
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
|
||||||
|
if obj.cover
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
def get_changeform_initial_data(self, request):
|
def get_changeform_initial_data(self, request):
|
||||||
data = super().get_changeform_initial_data(request)
|
data = super().get_changeform_initial_data(request)
|
||||||
filters = QueryDict(request.GET.get('_changelist_filters', ''))
|
filters = QueryDict(request.GET.get("_changelist_filters", ""))
|
||||||
data['parent'] = filters.get('parent', None)
|
data["parent"] = filters.get("parent", None)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _get_common_context(self, query, extra_context=None):
|
def _get_common_context(self, query, extra_context=None):
|
||||||
extra_context = extra_context or {}
|
extra_context = extra_context or {}
|
||||||
parent = query.get('parent', None)
|
parent = query.get("parent", None)
|
||||||
extra_context['parent'] = None if parent is None else \
|
extra_context["parent"] = (
|
||||||
Page.objects.get_subclass(id=parent)
|
None if parent is None else Page.objects.get_subclass(id=parent)
|
||||||
|
)
|
||||||
return extra_context
|
return extra_context
|
||||||
|
|
||||||
def render_change_form(self, request, context, *args, **kwargs):
|
def render_change_form(self, request, context, *args, **kwargs):
|
||||||
if context['original'] and not 'parent' in context:
|
if context["original"] and "parent" not in context:
|
||||||
context['parent'] = context['original'].parent
|
context["parent"] = context["original"].parent
|
||||||
return super().render_change_form(request, context, *args, **kwargs)
|
return super().render_change_form(request, context, *args, **kwargs)
|
||||||
|
|
||||||
def add_view(self, request, form_url='', extra_context=None):
|
def add_view(self, request, form_url="", extra_context=None):
|
||||||
filters = QueryDict(request.GET.get('_changelist_filters', ''))
|
filters = QueryDict(request.GET.get("_changelist_filters", ""))
|
||||||
extra_context = self._get_common_context(filters, extra_context)
|
extra_context = self._get_common_context(filters, extra_context)
|
||||||
return super().add_view(request, form_url, extra_context)
|
return super().add_view(request, form_url, extra_context)
|
||||||
|
|
||||||
|
@ -78,31 +86,33 @@ class BasePageAdmin(admin.ModelAdmin):
|
||||||
|
|
||||||
|
|
||||||
class PageAdmin(BasePageAdmin):
|
class PageAdmin(BasePageAdmin):
|
||||||
change_list_template = 'admin/aircox/page_change_list.html'
|
change_list_template = "admin/aircox/page_change_list.html"
|
||||||
|
|
||||||
list_display = BasePageAdmin.list_display + ('category',)
|
list_display = BasePageAdmin.list_display + ("category",)
|
||||||
list_editable = BasePageAdmin.list_editable + ('category',)
|
list_editable = BasePageAdmin.list_editable + ("category",)
|
||||||
list_filter = BasePageAdmin.list_filter + ('category', 'pub_date')
|
list_filter = BasePageAdmin.list_filter + ("category", "pub_date")
|
||||||
search_fields = BasePageAdmin.search_fields + ('category__title',)
|
search_fields = BasePageAdmin.search_fields + ("category__title",)
|
||||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||||
|
|
||||||
fieldsets[0][1]['fields'].insert(fieldsets[0][1]['fields'].index('slug') + 1, 'category')
|
fieldsets[0][1]["fields"].insert(
|
||||||
fieldsets[1][1]['fields'] += ('featured', 'allow_comments')
|
fieldsets[0][1]["fields"].index("slug") + 1, "category"
|
||||||
|
)
|
||||||
|
fieldsets[1][1]["fields"] += ("featured", "allow_comments")
|
||||||
|
|
||||||
|
|
||||||
@admin.register(StaticPage)
|
@admin.register(StaticPage)
|
||||||
class StaticPageAdmin(BasePageAdmin):
|
class StaticPageAdmin(BasePageAdmin):
|
||||||
list_display = BasePageAdmin.list_display + ('attach_to',)
|
list_display = BasePageAdmin.list_display + ("attach_to",)
|
||||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||||
|
|
||||||
fieldsets[1][1]['fields'] += ('attach_to',)
|
fieldsets[1][1]["fields"] += ("attach_to",)
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Comment)
|
@admin.register(Comment)
|
||||||
class CommentAdmin(admin.ModelAdmin):
|
class CommentAdmin(admin.ModelAdmin):
|
||||||
list_display = ('page_title', 'date', 'nickname')
|
list_display = ("page_title", "date", "nickname")
|
||||||
list_filter = ('date',)
|
list_filter = ("date",)
|
||||||
search_fields = ('page__title', 'nickname')
|
search_fields = ("page__title", "nickname")
|
||||||
|
|
||||||
def page_title(self, obj):
|
def page_title(self, obj):
|
||||||
return obj.page.title
|
return obj.page.title
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
from copy import copy
|
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.forms import ModelForm
|
from django.forms import ModelForm
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
@ -14,20 +12,20 @@ class ScheduleInlineForm(ModelForm):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
if self.initial:
|
if self.initial:
|
||||||
self.fields['date'].disabled = True
|
self.fields["date"].disabled = True
|
||||||
self.fields['frequency'].disabled = True
|
self.fields["frequency"].disabled = True
|
||||||
|
|
||||||
|
|
||||||
class ScheduleInline(admin.TabularInline):
|
class ScheduleInline(admin.TabularInline):
|
||||||
model = Schedule
|
model = Schedule
|
||||||
form = ScheduleInlineForm
|
form = ScheduleInlineForm
|
||||||
readonly_fields = ('timezone',)
|
readonly_fields = ("timezone",)
|
||||||
extra = 1
|
extra = 1
|
||||||
|
|
||||||
|
|
||||||
class StreamInline(admin.TabularInline):
|
class StreamInline(admin.TabularInline):
|
||||||
model = Stream
|
model = Stream
|
||||||
fields = ['delay', 'begin', 'end']
|
fields = ["delay", "begin", "end"]
|
||||||
extra = 1
|
extra = 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,20 +37,23 @@ class ProgramAdmin(PageAdmin):
|
||||||
schedule.boolean = True
|
schedule.boolean = True
|
||||||
schedule.short_description = _("Schedule")
|
schedule.short_description = _("Schedule")
|
||||||
|
|
||||||
list_display = PageAdmin.list_display + ('schedule', 'station', 'active')
|
list_display = PageAdmin.list_display + ("schedule", "station", "active")
|
||||||
list_filter = PageAdmin.list_filter + ('station', 'active')
|
list_filter = PageAdmin.list_filter + ("station", "active")
|
||||||
prepopulated_fields = {'slug': ('title',)}
|
prepopulated_fields = {"slug": ("title",)}
|
||||||
search_fields = ('title',)
|
search_fields = ("title",)
|
||||||
|
|
||||||
inlines = [ScheduleInline, StreamInline]
|
inlines = [ScheduleInline, StreamInline]
|
||||||
|
|
||||||
def get_fieldsets(self, request, obj=None):
|
def get_fieldsets(self, request, obj=None):
|
||||||
fields = super().get_fieldsets(request, obj)
|
fields = super().get_fieldsets(request, obj)
|
||||||
if request.user.has_perm('aircox.program.scheduling'):
|
if request.user.has_perm("aircox.program.scheduling"):
|
||||||
fields = fields + [
|
fields = fields + [
|
||||||
(_('Program Settings'), {
|
(
|
||||||
'fields': ['active', 'station', 'sync'],
|
_("Program Settings"),
|
||||||
})
|
{
|
||||||
|
"fields": ["active", "station", "sync"],
|
||||||
|
},
|
||||||
|
)
|
||||||
]
|
]
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
|
@ -61,26 +62,32 @@ class ProgramAdmin(PageAdmin):
|
||||||
class ScheduleAdmin(admin.ModelAdmin):
|
class ScheduleAdmin(admin.ModelAdmin):
|
||||||
def program_title(self, obj):
|
def program_title(self, obj):
|
||||||
return obj.program.title
|
return obj.program.title
|
||||||
program_title.short_description = _('Program')
|
|
||||||
|
program_title.short_description = _("Program")
|
||||||
|
|
||||||
def freq(self, obj):
|
def freq(self, obj):
|
||||||
return obj.get_frequency_verbose()
|
return obj.get_frequency_verbose()
|
||||||
freq.short_description = _('Day')
|
|
||||||
|
|
||||||
list_filter = ['frequency', 'program']
|
freq.short_description = _("Day")
|
||||||
list_display = ['program_title', 'freq', 'time', 'timezone', 'duration',
|
|
||||||
'initial']
|
list_filter = ["frequency", "program"]
|
||||||
list_editable = ['time', 'duration', 'initial']
|
list_display = [
|
||||||
|
"program_title",
|
||||||
|
"freq",
|
||||||
|
"time",
|
||||||
|
"timezone",
|
||||||
|
"duration",
|
||||||
|
"initial",
|
||||||
|
]
|
||||||
|
list_editable = ["time", "duration", "initial"]
|
||||||
|
|
||||||
def get_readonly_fields(self, request, obj=None):
|
def get_readonly_fields(self, request, obj=None):
|
||||||
if obj:
|
if obj:
|
||||||
return ['program', 'date', 'frequency']
|
return ["program", "date", "frequency"]
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Stream)
|
@admin.register(Stream)
|
||||||
class StreamAdmin(admin.ModelAdmin):
|
class StreamAdmin(admin.ModelAdmin):
|
||||||
list_display = ('id', 'program', 'delay', 'begin', 'end')
|
list_display = ("id", "program", "delay", "begin", "end")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,40 +1,48 @@
|
||||||
import math
|
import math
|
||||||
|
|
||||||
|
from adminsortable2.admin import SortableAdminBase
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.utils.safestring import mark_safe
|
from django.utils.safestring import mark_safe
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from adminsortable2.admin import SortableAdminBase, SortableInlineAdminMixin
|
|
||||||
|
|
||||||
from ..models import Sound, Track
|
from ..models import Sound, Track
|
||||||
|
|
||||||
|
|
||||||
class TrackInline(admin.TabularInline):
|
class TrackInline(admin.TabularInline):
|
||||||
template = 'admin/aircox/playlist_inline.html'
|
template = "admin/aircox/playlist_inline.html"
|
||||||
model = Track
|
model = Track
|
||||||
extra = 0
|
extra = 0
|
||||||
fields = ('position', 'artist', 'title', 'tags', 'album', 'year', 'info')
|
fields = ("position", "artist", "title", "tags", "album", "year", "info")
|
||||||
|
|
||||||
list_display = ['artist', 'album', 'title', 'tags', 'related']
|
list_display = ["artist", "album", "title", "tags", "related"]
|
||||||
list_filter = ['artist', 'album', 'title', 'tags']
|
list_filter = ["artist", "album", "title", "tags"]
|
||||||
|
|
||||||
|
|
||||||
class SoundTrackInline(TrackInline):
|
class SoundTrackInline(TrackInline):
|
||||||
fields = TrackInline.fields + ('timestamp',)
|
fields = TrackInline.fields + ("timestamp",)
|
||||||
|
|
||||||
|
|
||||||
class SoundInline(admin.TabularInline):
|
class SoundInline(admin.TabularInline):
|
||||||
model = Sound
|
model = Sound
|
||||||
fields = ['type', 'name', 'audio', 'duration', 'is_good_quality',
|
fields = [
|
||||||
'is_public', 'is_downloadable']
|
"type",
|
||||||
readonly_fields = ['type', 'audio', 'duration', 'is_good_quality']
|
"name",
|
||||||
|
"audio",
|
||||||
|
"duration",
|
||||||
|
"is_good_quality",
|
||||||
|
"is_public",
|
||||||
|
"is_downloadable",
|
||||||
|
]
|
||||||
|
readonly_fields = ["type", "audio", "duration", "is_good_quality"]
|
||||||
extra = 0
|
extra = 0
|
||||||
max_num = 0
|
max_num = 0
|
||||||
|
|
||||||
def audio(self, obj):
|
def audio(self, obj):
|
||||||
return mark_safe('<audio src="{}" controls></audio>'
|
return mark_safe(
|
||||||
.format(obj.file.url))
|
'<audio src="{}" controls></audio>'.format(obj.file.url)
|
||||||
audio.short_description = _('Audio')
|
)
|
||||||
|
|
||||||
|
audio.short_description = _("Audio")
|
||||||
|
|
||||||
def get_queryset(self, request):
|
def get_queryset(self, request):
|
||||||
return super().get_queryset(request).available()
|
return super().get_queryset(request).available()
|
||||||
|
@ -43,63 +51,99 @@ class SoundInline(admin.TabularInline):
|
||||||
@admin.register(Sound)
|
@admin.register(Sound)
|
||||||
class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
|
class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||||
fields = None
|
fields = None
|
||||||
list_display = ['id', 'name', 'related',
|
list_display = [
|
||||||
'type', 'duration', 'is_public', 'is_good_quality',
|
"id",
|
||||||
'is_downloadable', 'audio']
|
"name",
|
||||||
list_filter = ('type', 'is_good_quality', 'is_public')
|
"related",
|
||||||
list_editable = ['name', 'is_public', 'is_downloadable']
|
"type",
|
||||||
|
"duration",
|
||||||
search_fields = ['name', 'program__title']
|
"is_public",
|
||||||
fieldsets = [
|
"is_good_quality",
|
||||||
(None, {'fields': ['name', 'file', 'type', 'program', 'episode']}),
|
"is_downloadable",
|
||||||
(None, {'fields': ['duration', 'is_public', 'is_downloadable',
|
"audio",
|
||||||
'is_good_quality', 'mtime']}),
|
|
||||||
]
|
]
|
||||||
readonly_fields = ('file', 'duration', 'type')
|
list_filter = ("type", "is_good_quality", "is_public")
|
||||||
|
list_editable = ["name", "is_public", "is_downloadable"]
|
||||||
|
|
||||||
|
search_fields = ["name", "program__title"]
|
||||||
|
fieldsets = [
|
||||||
|
(None, {"fields": ["name", "file", "type", "program", "episode"]}),
|
||||||
|
(
|
||||||
|
None,
|
||||||
|
{
|
||||||
|
"fields": [
|
||||||
|
"duration",
|
||||||
|
"is_public",
|
||||||
|
"is_downloadable",
|
||||||
|
"is_good_quality",
|
||||||
|
"mtime",
|
||||||
|
]
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
readonly_fields = ("file", "duration", "type")
|
||||||
inlines = [SoundTrackInline]
|
inlines = [SoundTrackInline]
|
||||||
|
|
||||||
def related(self, obj):
|
def related(self, obj):
|
||||||
# TODO: link to episode or program edit
|
# TODO: link to episode or program edit
|
||||||
return obj.episode.title if obj.episode else\
|
return (
|
||||||
obj.program.title if obj.program else ''
|
obj.episode.title
|
||||||
related.short_description = _('Program / Episode')
|
if obj.episode
|
||||||
|
else obj.program.title
|
||||||
|
if obj.program
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
related.short_description = _("Program / Episode")
|
||||||
|
|
||||||
def audio(self, obj):
|
def audio(self, obj):
|
||||||
return mark_safe('<audio src="{}" controls></audio>'
|
return (
|
||||||
.format(obj.file.url)) \
|
mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
|
||||||
if obj.type != Sound.TYPE_REMOVED else ''
|
if obj.type != Sound.TYPE_REMOVED
|
||||||
audio.short_description = _('Audio')
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
def add_view(self, request, form_url='', context=None):
|
audio.short_description = _("Audio")
|
||||||
|
|
||||||
|
def add_view(self, request, form_url="", context=None):
|
||||||
context = context or {}
|
context = context or {}
|
||||||
context['init_app'] = True
|
context["init_app"] = True
|
||||||
context['init_el'] = '#inline-tracks'
|
context["init_el"] = "#inline-tracks"
|
||||||
context['track_timestamp'] = True
|
context["track_timestamp"] = True
|
||||||
return super().add_view(request, form_url, context)
|
return super().add_view(request, form_url, context)
|
||||||
|
|
||||||
def change_view(self, request, object_id, form_url='', context=None):
|
def change_view(self, request, object_id, form_url="", context=None):
|
||||||
context = context or {}
|
context = context or {}
|
||||||
context['init_app'] = True
|
context["init_app"] = True
|
||||||
context['init_el'] = '#inline-tracks'
|
context["init_el"] = "#inline-tracks"
|
||||||
context['track_timestamp'] = True
|
context["track_timestamp"] = True
|
||||||
return super().change_view(request, object_id, form_url, context)
|
return super().change_view(request, object_id, form_url, context)
|
||||||
|
|
||||||
|
|
||||||
@admin.register(Track)
|
@admin.register(Track)
|
||||||
class TrackAdmin(admin.ModelAdmin):
|
class TrackAdmin(admin.ModelAdmin):
|
||||||
def tag_list(self, obj):
|
def tag_list(self, obj):
|
||||||
return u", ".join(o.name for o in obj.tags.all())
|
return ", ".join(o.name for o in obj.tags.all())
|
||||||
|
|
||||||
list_display = ['pk', 'artist', 'title', 'tag_list', 'episode',
|
list_display = [
|
||||||
'sound', 'ts']
|
"pk",
|
||||||
list_editable = ['artist', 'title']
|
"artist",
|
||||||
list_filter = ['artist', 'title', 'tags']
|
"title",
|
||||||
|
"tag_list",
|
||||||
|
"episode",
|
||||||
|
"sound",
|
||||||
|
"ts",
|
||||||
|
]
|
||||||
|
list_editable = ["artist", "title"]
|
||||||
|
list_filter = ["artist", "title", "tags"]
|
||||||
|
|
||||||
search_fields = ['artist', 'title']
|
search_fields = ["artist", "title"]
|
||||||
fieldsets = [
|
fieldsets = [
|
||||||
(_('Playlist'), {'fields': ['episode', 'sound', 'position',
|
(
|
||||||
'timestamp']}),
|
_("Playlist"),
|
||||||
(_('Info'), {'fields': ['artist', 'title', 'info', 'tags']}),
|
{"fields": ["episode", "sound", "position", "timestamp"]},
|
||||||
|
),
|
||||||
|
(_("Info"), {"fields": ["artist", "title", "info", "tags"]}),
|
||||||
]
|
]
|
||||||
|
|
||||||
# TODO on edit: readonly_fields = ['episode', 'sound']
|
# TODO on edit: readonly_fields = ['episode', 'sound']
|
||||||
|
@ -107,10 +151,10 @@ class TrackAdmin(admin.ModelAdmin):
|
||||||
def ts(self, obj):
|
def ts(self, obj):
|
||||||
ts = obj.timestamp
|
ts = obj.timestamp
|
||||||
if ts is None:
|
if ts is None:
|
||||||
return ''
|
return ""
|
||||||
h = math.floor(ts / 3600)
|
h = math.floor(ts / 3600)
|
||||||
m = math.floor((ts - h) / 60)
|
m = math.floor((ts - h) / 60)
|
||||||
s = ts-h*3600-m*60
|
s = ts - h * 3600 - m * 60
|
||||||
return '{:0>2}:{:0>2}:{:0>2}'.format(h, m, s)
|
return "{:0>2}:{:0>2}:{:0>2}".format(h, m, s)
|
||||||
|
|
||||||
ts.short_description = _('timestamp')
|
ts.short_description = _("timestamp")
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
from django.contrib import admin
|
|
||||||
from adminsortable2.admin import SortableAdminBase
|
from adminsortable2.admin import SortableAdminBase
|
||||||
|
from django.contrib import admin
|
||||||
|
|
||||||
from ..models import Port, Station
|
from ..models import Port, Station
|
||||||
from .page import NavItemInline
|
from .page import NavItemInline
|
||||||
|
|
||||||
|
__all__ = ["PortInline", "StationAdmin"]
|
||||||
__all__ = ['PortInline', 'StationAdmin']
|
|
||||||
|
|
||||||
|
|
||||||
class PortInline(admin.StackedInline):
|
class PortInline(admin.StackedInline):
|
||||||
|
@ -15,7 +14,5 @@ class PortInline(admin.StackedInline):
|
||||||
|
|
||||||
@admin.register(Station)
|
@admin.register(Station)
|
||||||
class StationAdmin(SortableAdminBase, admin.ModelAdmin):
|
class StationAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||||
prepopulated_fields = {'slug': ('name',)}
|
prepopulated_fields = {"slug": ("name",)}
|
||||||
inlines = (PortInline, NavItemInline)
|
inlines = (PortInline, NavItemInline)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,18 @@
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.urls import path, include, reverse
|
from django.urls import include, path, reverse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from .models import Comment, Diffusion, Program
|
from .models import Comment, Diffusion, Program
|
||||||
from .views.admin import StatisticsView
|
from .views.admin import StatisticsView
|
||||||
|
|
||||||
|
__all__ = ["AdminSite"]
|
||||||
__all__ = ['AdminSite']
|
|
||||||
|
|
||||||
|
|
||||||
class AdminSite(admin.AdminSite):
|
class AdminSite(admin.AdminSite):
|
||||||
extra_urls = None
|
extra_urls = None
|
||||||
tools = [
|
tools = [
|
||||||
(_('Statistics'), 'admin:tools-stats'),
|
(_("Statistics"), "admin:tools-stats"),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -25,41 +23,52 @@ class AdminSite(admin.AdminSite):
|
||||||
|
|
||||||
def each_context(self, request):
|
def each_context(self, request):
|
||||||
context = super().each_context(request)
|
context = super().each_context(request)
|
||||||
context.update({
|
context.update(
|
||||||
# all programs
|
{
|
||||||
'programs': Program.objects.active().values('pk', 'title') \
|
# all programs
|
||||||
.order_by('title'),
|
"programs": Program.objects.active()
|
||||||
# today's diffusions
|
.values("pk", "title")
|
||||||
'diffusions': Diffusion.objects.date().order_by('start') \
|
.order_by("title"),
|
||||||
.select_related('episode'),
|
# today's diffusions
|
||||||
# TODO: only for dashboard
|
"diffusions": Diffusion.objects.date()
|
||||||
# last comments
|
.order_by("start")
|
||||||
'comments': Comment.objects.order_by('-date')
|
.select_related("episode"),
|
||||||
.select_related('page')[0:10],
|
# TODO: only for dashboard
|
||||||
})
|
# last comments
|
||||||
|
"comments": Comment.objects.order_by("-date").select_related(
|
||||||
|
"page"
|
||||||
|
)[0:10],
|
||||||
|
}
|
||||||
|
)
|
||||||
return context
|
return context
|
||||||
|
|
||||||
def get_urls(self):
|
def get_urls(self):
|
||||||
urls = [
|
urls = (
|
||||||
path('api/', include((self.router.urls, 'api'))),
|
[
|
||||||
path('tools/statistics/',
|
path("api/", include((self.router.urls, "api"))),
|
||||||
self.admin_view(StatisticsView.as_view()),
|
path(
|
||||||
name='tools-stats'),
|
"tools/statistics/",
|
||||||
path('tools/statistics/<date:date>/',
|
self.admin_view(StatisticsView.as_view()),
|
||||||
self.admin_view(StatisticsView.as_view()),
|
name="tools-stats",
|
||||||
name='tools-stats'),
|
),
|
||||||
] + self.extra_urls + super().get_urls()
|
path(
|
||||||
|
"tools/statistics/<date:date>/",
|
||||||
|
self.admin_view(StatisticsView.as_view()),
|
||||||
|
name="tools-stats",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
+ self.extra_urls
|
||||||
|
+ super().get_urls()
|
||||||
|
)
|
||||||
return urls
|
return urls
|
||||||
|
|
||||||
def get_tools(self):
|
def get_tools(self):
|
||||||
return [(label, reverse(url)) for label, url in self.tools]
|
return [(label, reverse(url)) for label, url in self.tools]
|
||||||
|
|
||||||
def route_view(self, url, view, name, admin_view=True, label=None):
|
def route_view(self, url, view, name, admin_view=True, label=None):
|
||||||
self.extra_urls.append(path(
|
self.extra_urls.append(
|
||||||
url, self.admin_view(view) if admin_view else view, name=name
|
path(url, self.admin_view(view) if admin_view else view, name=name)
|
||||||
))
|
)
|
||||||
|
|
||||||
if label:
|
if label:
|
||||||
self.tools.append((label, 'admin:' + name))
|
self.tools.append((label, "admin:" + name))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,11 +3,9 @@ from django.contrib.admin.apps import AdminConfig
|
||||||
|
|
||||||
|
|
||||||
class AircoxConfig(AppConfig):
|
class AircoxConfig(AppConfig):
|
||||||
name = 'aircox'
|
name = "aircox"
|
||||||
verbose_name = 'Aircox'
|
verbose_name = "Aircox"
|
||||||
|
|
||||||
|
|
||||||
class AircoxAdminConfig(AdminConfig):
|
class AircoxAdminConfig(AdminConfig):
|
||||||
default_site = 'aircox.admin_site.AdminSite'
|
default_site = "aircox.admin_site.AdminSite"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,50 +1,61 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
from django.urls.converters import StringConverter
|
from django.urls.converters import StringConverter
|
||||||
|
from django.utils.safestring import mark_safe
|
||||||
|
|
||||||
from .utils import str_to_date
|
__all__ = ("PagePathConverter", "WeekConverter", "DateConverter")
|
||||||
|
|
||||||
|
|
||||||
class PagePathConverter(StringConverter):
|
class PagePathConverter(StringConverter):
|
||||||
""" Match path for pages, including surrounding slashes. """
|
"""Match path for pages, including surrounding slashes."""
|
||||||
regex = r'/?|([-_a-zA-Z0-9]+/)*?'
|
|
||||||
|
regex = r"/?|([-_a-zA-Z0-9]+/)*?"
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
if not value or value[0] != '/':
|
if not value or value[0] != "/":
|
||||||
value = '/' + value
|
value = "/" + value
|
||||||
if len(value) > 1 and value[-1] != '/':
|
if len(value) > 1 and value[-1] != "/":
|
||||||
value = value + '/'
|
value = value + "/"
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
if value[0] == '/':
|
if value[0] == "/":
|
||||||
value = value[1:]
|
value = value[1:]
|
||||||
if value[-1] != '/':
|
if value[-1] != "/":
|
||||||
value = value + '/'
|
value = value + "/"
|
||||||
return mark_safe(value)
|
return mark_safe(value)
|
||||||
|
|
||||||
|
|
||||||
class WeekConverter:
|
class WeekConverter:
|
||||||
""" Converter for date as YYYYY/WW """
|
"""Converter for date as YYYYY/WW."""
|
||||||
regex = r'[0-9]{4}/[0-9]{2}'
|
|
||||||
|
regex = r"[0-9]{4}/[0-9]{2}"
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return datetime.datetime.strptime(value + '/1', '%G/%V/%u').date()
|
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
return value if isinstance(value, str) else \
|
return (
|
||||||
'{:04d}/{:02d}'.format(*value.isocalendar())
|
value
|
||||||
|
if isinstance(value, str)
|
||||||
|
else "{:04d}/{:02d}".format(*value.isocalendar())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DateConverter:
|
class DateConverter:
|
||||||
""" Converter for date as YYYY/MM/DD """
|
"""Converter for date as YYYY/MM/DD."""
|
||||||
regex = r'[0-9]{4}/[0-9]{2}/[0-9]{2}'
|
|
||||||
|
regex = r"[0-9]{4}/[0-9]{2}/[0-9]{2}"
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
value = value.split('/')[:3]
|
value = value.split("/")[:3]
|
||||||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
return value if isinstance(value, str) else \
|
return (
|
||||||
'{:04d}/{:02d}/{:02d}'.format(value.year, value.month, value.day)
|
value
|
||||||
|
if isinstance(value, str)
|
||||||
|
else "{:04d}/{:02d}/{:02d}".format(
|
||||||
|
value.year, value.month, value.day
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
import django_filters as filters
|
import django_filters as filters
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from .models import Page, Episode
|
from .models import Episode, Page
|
||||||
|
|
||||||
|
|
||||||
class PageFilters(filters.FilterSet):
|
class PageFilters(filters.FilterSet):
|
||||||
q = filters.CharFilter(method='search_filter', label=_('Search'))
|
q = filters.CharFilter(method="search_filter", label=_("Search"))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Page
|
model = Page
|
||||||
fields = {
|
fields = {
|
||||||
'category__id': ['in'],
|
"category__id": ["in"],
|
||||||
'pub_date': ['exact', 'gte', 'lte'],
|
"pub_date": ["exact", "gte", "lte"],
|
||||||
}
|
}
|
||||||
|
|
||||||
def search_filter(self, queryset, name, value):
|
def search_filter(self, queryset, name, value):
|
||||||
|
@ -19,7 +19,9 @@ class PageFilters(filters.FilterSet):
|
||||||
|
|
||||||
|
|
||||||
class EpisodeFilters(PageFilters):
|
class EpisodeFilters(PageFilters):
|
||||||
podcast = filters.BooleanFilter(method='podcast_filter', label=_('Podcast'))
|
podcast = filters.BooleanFilter(
|
||||||
|
method="podcast_filter", label=_("Podcast")
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Episode
|
model = Episode
|
||||||
|
@ -29,4 +31,3 @@ class EpisodeFilters(PageFilters):
|
||||||
if value:
|
if value:
|
||||||
return queryset.filter(sound__is_public=True).distinct()
|
return queryset.filter(sound__is_public=True).distinct()
|
||||||
return queryset.filter(sound__isnull=True)
|
return queryset.filter(sound__isnull=True)
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,10 @@ class CommentForm(ModelForm):
|
||||||
email = forms.EmailField(required=False)
|
email = forms.EmailField(required=False)
|
||||||
content = forms.CharField(widget=forms.Textarea())
|
content = forms.CharField(widget=forms.Textarea())
|
||||||
|
|
||||||
nickname.widget.attrs.update({'class': 'input'})
|
nickname.widget.attrs.update({"class": "input"})
|
||||||
email.widget.attrs.update({'class': 'input'})
|
email.widget.attrs.update({"class": "input"})
|
||||||
content.widget.attrs.update({'class': 'textarea'})
|
content.widget.attrs.update({"class": "textarea"})
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Comment
|
model = Comment
|
||||||
fields = ['nickname', 'email', 'content']
|
fields = ["nickname", "email", "content"]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,41 +1,48 @@
|
||||||
|
"""Handle archiving of logs in order to keep database light and fast.
|
||||||
|
|
||||||
|
The logs are archived in gzip files, per day.
|
||||||
"""
|
"""
|
||||||
Handle archiving of logs in order to keep database light and fast. The
|
|
||||||
logs are archived in gzip files, per day.
|
|
||||||
"""
|
|
||||||
from argparse import RawTextHelpFormatter
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
from argparse import RawTextHelpFormatter
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
|
|
||||||
import aircox.settings as settings
|
import aircox.settings as settings
|
||||||
from aircox.models import Log, Station
|
from aircox.models import Log
|
||||||
from aircox.models.log import LogArchiver
|
from aircox.models.log import LogArchiver
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class Command (BaseCommand):
|
__all__ = ("Command",)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
help = __doc__
|
help = __doc__
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.formatter_class = RawTextHelpFormatter
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
group = parser.add_argument_group('actions')
|
group = parser.add_argument_group("actions")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'-a', '--age', type=int,
|
"-a",
|
||||||
|
"--age",
|
||||||
|
type=int,
|
||||||
default=settings.AIRCOX_LOGS_ARCHIVES_AGE,
|
default=settings.AIRCOX_LOGS_ARCHIVES_AGE,
|
||||||
help='minimal age in days of logs to archive. Default is '
|
help="minimal age in days of logs to archive. Default is "
|
||||||
'settings.AIRCOX_LOGS_ARCHIVES_AGE'
|
"settings.AIRCOX_LOGS_ARCHIVES_AGE",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'-k', '--keep', action='store_true',
|
"-k",
|
||||||
help='keep logs in database instead of deleting them'
|
"--keep",
|
||||||
|
action="store_true",
|
||||||
|
help="keep logs in database instead of deleting them",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, age, keep, **options):
|
def handle(self, *args, age, keep, **options):
|
||||||
date = datetime.date.today() - tz.timedelta(days=age)
|
date = datetime.date.today() - tz.timedelta(days=age)
|
||||||
# FIXME: mysql support?
|
# FIXME: mysql support?
|
||||||
logger.info('archive logs for %s and earlier', date)
|
logger.info("archive logs for %s and earlier", date)
|
||||||
count = LogArchiver().archive(Log.objects.filter(date__date__lte=date))
|
count = LogArchiver().archive(Log.objects.filter(date__date__lte=date))
|
||||||
logger.info('total log archived %d', count)
|
logger.info("total log archived %d", count)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""Manage diffusions using schedules, to update, clean up or check diffusions.
|
||||||
Manage diffusions using schedules, to update, clean up or check diffusions.
|
|
||||||
|
|
||||||
A generated diffusion can be unconfirmed, that means that the user must confirm
|
A generated diffusion can be unconfirmed, that means that the user must confirm
|
||||||
it by changing its type to "normal". The behaviour is controlled using
|
it by changing its type to "normal". The behaviour is controlled using
|
||||||
|
@ -13,9 +12,9 @@ from django.core.management.base import BaseCommand
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
|
|
||||||
from aircox.models import Schedule, Diffusion
|
from aircox.models import Diffusion, Schedule
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class Actions:
|
class Actions:
|
||||||
|
@ -26,20 +25,28 @@ class Actions:
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
episodes, diffusions = [], []
|
episodes, diffusions = [], []
|
||||||
for schedule in Schedule.objects.filter(program__active=True,
|
for schedule in Schedule.objects.filter(
|
||||||
initial__isnull=True):
|
program__active=True, initial__isnull=True
|
||||||
|
):
|
||||||
eps, diffs = schedule.diffusions_of_month(self.date)
|
eps, diffs = schedule.diffusions_of_month(self.date)
|
||||||
if eps:
|
if eps:
|
||||||
episodes += eps
|
episodes += eps
|
||||||
if diffs:
|
if diffs:
|
||||||
diffusions += diffs
|
diffusions += diffs
|
||||||
|
|
||||||
logger.info('[update] %s: %d episodes, %d diffusions and reruns',
|
logger.info(
|
||||||
str(schedule), len(eps), len(diffs))
|
"[update] %s: %d episodes, %d diffusions and reruns",
|
||||||
|
str(schedule),
|
||||||
|
len(eps),
|
||||||
|
len(diffs),
|
||||||
|
)
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
logger.info('[update] save %d episodes and %d diffusions',
|
logger.info(
|
||||||
len(episodes), len(diffusions))
|
"[update] save %d episodes and %d diffusions",
|
||||||
|
len(episodes),
|
||||||
|
len(diffusions),
|
||||||
|
)
|
||||||
for episode in episodes:
|
for episode in episodes:
|
||||||
episode.save()
|
episode.save()
|
||||||
for diffusion in diffusions:
|
for diffusion in diffusions:
|
||||||
|
@ -48,9 +55,10 @@ class Actions:
|
||||||
diffusion.save()
|
diffusion.save()
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
qs = Diffusion.objects.filter(type=Diffusion.TYPE_UNCONFIRMED,
|
qs = Diffusion.objects.filter(
|
||||||
start__lt=self.date)
|
type=Diffusion.TYPE_UNCONFIRMED, start__lt=self.date
|
||||||
logger.info('[clean] %d diffusions will be removed', qs.count())
|
)
|
||||||
|
logger.info("[clean] %d diffusions will be removed", qs.count())
|
||||||
qs.delete()
|
qs.delete()
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,45 +69,57 @@ class Command(BaseCommand):
|
||||||
parser.formatter_class = RawTextHelpFormatter
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
today = datetime.date.today()
|
today = datetime.date.today()
|
||||||
|
|
||||||
group = parser.add_argument_group('action')
|
group = parser.add_argument_group("action")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'-u', '--update', action='store_true',
|
"-u",
|
||||||
help='generate (unconfirmed) diffusions for the given month. '
|
"--update",
|
||||||
'These diffusions must be confirmed manually by changing '
|
action="store_true",
|
||||||
'their type to "normal"'
|
help="generate (unconfirmed) diffusions for the given month. "
|
||||||
|
"These diffusions must be confirmed manually by changing "
|
||||||
|
'their type to "normal"',
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'-l', '--clean', action='store_true',
|
"-l",
|
||||||
help='remove unconfirmed diffusions older than the given month'
|
"--clean",
|
||||||
|
action="store_true",
|
||||||
|
help="remove unconfirmed diffusions older than the given month",
|
||||||
)
|
)
|
||||||
|
|
||||||
group = parser.add_argument_group('date')
|
group = parser.add_argument_group("date")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'--year', type=int, default=today.year,
|
"--year",
|
||||||
help='used by update, default is today\'s year')
|
type=int,
|
||||||
|
default=today.year,
|
||||||
|
help="used by update, default is today's year",
|
||||||
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'--month', type=int, default=today.month,
|
"--month",
|
||||||
help='used by update, default is today\'s month')
|
type=int,
|
||||||
|
default=today.month,
|
||||||
|
help="used by update, default is today's month",
|
||||||
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
'--next-month', action='store_true',
|
"--next-month",
|
||||||
help='set the date to the next month of given date'
|
action="store_true",
|
||||||
' (if next month from today'
|
help="set the date to the next month of given date"
|
||||||
|
" (if next month from today",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
date = datetime.date(year=options['year'], month=options['month'],
|
date = datetime.date(
|
||||||
day=1)
|
year=options["year"], month=options["month"], day=1
|
||||||
if options.get('next_month'):
|
)
|
||||||
month = options.get('month')
|
if options.get("next_month"):
|
||||||
|
month = options.get("month")
|
||||||
date += tz.timedelta(days=28)
|
date += tz.timedelta(days=28)
|
||||||
if date.month == month:
|
if date.month == month:
|
||||||
date += tz.timedelta(days=28)
|
date += tz.timedelta(days=28)
|
||||||
date = date.replace(day=1)
|
date = date.replace(day=1)
|
||||||
|
|
||||||
actions = Actions(date)
|
actions = Actions(date)
|
||||||
if options.get('update'):
|
if options.get("update"):
|
||||||
actions.update()
|
actions.update()
|
||||||
if options.get('clean'):
|
if options.get("clean"):
|
||||||
actions.clean()
|
actions.clean()
|
||||||
if options.get('check'):
|
if options.get("check"):
|
||||||
actions.check()
|
actions.check()
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""
|
"""Import one or more playlist for the given sound. Attach it to the provided
|
||||||
Import one or more playlist for the given sound. Attach it to the provided
|
|
||||||
sound.
|
sound.
|
||||||
|
|
||||||
Playlists are in CSV format, where columns are separated with a
|
Playlists are in CSV format, where columns are separated with a
|
||||||
|
@ -10,23 +9,22 @@ The order of the elements is: {settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS}
|
||||||
If 'minutes' or 'seconds' are given, position will be expressed as timed
|
If 'minutes' or 'seconds' are given, position will be expressed as timed
|
||||||
position, instead of position in playlist.
|
position, instead of position in playlist.
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import csv
|
import csv
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from argparse import RawTextHelpFormatter
|
from argparse import RawTextHelpFormatter
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
|
|
||||||
from aircox import settings
|
from aircox import settings
|
||||||
from aircox.models import *
|
from aircox.models import Sound, Track
|
||||||
|
|
||||||
__doc__ = __doc__.format(settings=settings)
|
__doc__ = __doc__.format(settings=settings)
|
||||||
|
|
||||||
__all__ = ('PlaylistImport', 'Command')
|
__all__ = ("PlaylistImport", "Command")
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class PlaylistImport:
|
class PlaylistImport:
|
||||||
|
@ -45,62 +43,74 @@ class PlaylistImport:
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.read()
|
self.read()
|
||||||
if self.track_kwargs.get('sound') is not None:
|
if self.track_kwargs.get("sound") is not None:
|
||||||
self.make_playlist()
|
self.make_playlist()
|
||||||
|
|
||||||
def read(self):
|
def read(self):
|
||||||
if not os.path.exists(self.path):
|
if not os.path.exists(self.path):
|
||||||
return True
|
return True
|
||||||
with open(self.path, 'r') as file:
|
with open(self.path, "r") as file:
|
||||||
logger.info('start reading csv ' + self.path)
|
logger.info("start reading csv " + self.path)
|
||||||
self.data = list(csv.DictReader(
|
self.data = list(
|
||||||
(row for row in file
|
csv.DictReader(
|
||||||
if not (row.startswith('#') or row.startswith('\ufeff#'))
|
(
|
||||||
and row.strip()),
|
row
|
||||||
fieldnames=settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS,
|
for row in file
|
||||||
delimiter=settings.AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER,
|
if not (
|
||||||
quotechar=settings.AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
row.startswith("#") or row.startswith("\ufeff#")
|
||||||
))
|
)
|
||||||
|
and row.strip()
|
||||||
|
),
|
||||||
|
fieldnames=settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS,
|
||||||
|
delimiter=settings.AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER,
|
||||||
|
quotechar=settings.AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def make_playlist(self):
|
def make_playlist(self):
|
||||||
|
"""Make a playlist from the read data, and return it.
|
||||||
|
|
||||||
|
If save is true, save it into the database
|
||||||
"""
|
"""
|
||||||
Make a playlist from the read data, and return it. If save is
|
if self.track_kwargs.get("sound") is None:
|
||||||
true, save it into the database
|
logger.error(
|
||||||
"""
|
"related track's sound is missing. Skip import of "
|
||||||
if self.track_kwargs.get('sound') is None:
|
+ self.path
|
||||||
logger.error('related track\'s sound is missing. Skip import of ' +
|
+ "."
|
||||||
self.path + '.')
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
maps = settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS
|
maps = settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS
|
||||||
tracks = []
|
tracks = []
|
||||||
|
|
||||||
logger.info('parse csv file ' + self.path)
|
logger.info("parse csv file " + self.path)
|
||||||
has_timestamp = ('minutes' or 'seconds') in maps
|
has_timestamp = ("minutes" or "seconds") in maps
|
||||||
for index, line in enumerate(self.data):
|
for index, line in enumerate(self.data):
|
||||||
if ('title' or 'artist') not in line:
|
if ("title" or "artist") not in line:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
timestamp = int(line.get('minutes') or 0) * 60 + \
|
timestamp = (
|
||||||
int(line.get('seconds') or 0) \
|
int(line.get("minutes") or 0) * 60
|
||||||
if has_timestamp else None
|
+ int(line.get("seconds") or 0)
|
||||||
|
if has_timestamp
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
track, created = Track.objects.get_or_create(
|
track, created = Track.objects.get_or_create(
|
||||||
title=line.get('title'),
|
title=line.get("title"),
|
||||||
artist=line.get('artist'),
|
artist=line.get("artist"),
|
||||||
position=index,
|
position=index,
|
||||||
**self.track_kwargs
|
**self.track_kwargs
|
||||||
)
|
)
|
||||||
track.timestamp = timestamp
|
track.timestamp = timestamp
|
||||||
track.info = line.get('info')
|
track.info = line.get("info")
|
||||||
tags = line.get('tags')
|
tags = line.get("tags")
|
||||||
if tags:
|
if tags:
|
||||||
track.tags.add(*tags.lower().split(','))
|
track.tags.add(*tags.lower().split(","))
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'an error occured for track {index}, it may not '
|
"an error occured for track {index}, it may not "
|
||||||
'have been saved: {err}'
|
"have been saved: {err}".format(index=index, err=err)
|
||||||
.format(index=index, err=err)
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -116,33 +126,41 @@ class Command(BaseCommand):
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.formatter_class = RawTextHelpFormatter
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'path', metavar='PATH', type=str,
|
"path",
|
||||||
help='path of the input playlist to read'
|
metavar="PATH",
|
||||||
|
type=str,
|
||||||
|
help="path of the input playlist to read",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--sound', '-s', type=str,
|
"--sound",
|
||||||
help='generate a playlist for the sound of the given path. '
|
"-s",
|
||||||
'If not given, try to match a sound with the same path.'
|
type=str,
|
||||||
|
help="generate a playlist for the sound of the given path. "
|
||||||
|
"If not given, try to match a sound with the same path.",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, path, *args, **options):
|
def handle(self, path, *args, **options):
|
||||||
# FIXME: absolute/relative path of sounds vs given path
|
# FIXME: absolute/relative path of sounds vs given path
|
||||||
if options.get('sound'):
|
if options.get("sound"):
|
||||||
sound = Sound.objects.filter(file__icontains=options.get('sound'))\
|
sound = Sound.objects.filter(
|
||||||
.first()
|
file__icontains=options.get("sound")
|
||||||
|
).first()
|
||||||
else:
|
else:
|
||||||
path_, ext = os.path.splitext(path)
|
path_, ext = os.path.splitext(path)
|
||||||
sound = Sound.objects.filter(path__icontains=path_).first()
|
sound = Sound.objects.filter(path__icontains=path_).first()
|
||||||
|
|
||||||
if not sound:
|
if not sound:
|
||||||
logger.error('no sound found in the database for the path '
|
logger.error(
|
||||||
'{path}'.format(path=path))
|
"no sound found in the database for the path "
|
||||||
|
"{path}".format(path=path)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# FIXME: auto get sound.episode if any
|
# FIXME: auto get sound.episode if any
|
||||||
importer = PlaylistImport(path, sound=sound).run()
|
importer = PlaylistImport(path, sound=sound).run()
|
||||||
for track in importer.tracks:
|
for track in importer.tracks:
|
||||||
logger.info('track #{pos} imported: {title}, by {artist}'.format(
|
logger.info(
|
||||||
pos=track.position, title=track.title, artist=track.artist
|
"track #{pos} imported: {title}, by {artist}".format(
|
||||||
))
|
pos=track.position, title=track.title, artist=track.artist
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
"""
|
"""Monitor sound files; For each program, check for:
|
||||||
Monitor sound files; For each program, check for:
|
|
||||||
- new files;
|
- new files;
|
||||||
- deleted files;
|
- deleted files;
|
||||||
- differences between files and sound;
|
- differences between files and sound;
|
||||||
|
@ -23,23 +23,22 @@ To check quality of files, call the command sound_quality_check using the
|
||||||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||||
Sox (and soxi).
|
Sox (and soxi).
|
||||||
"""
|
"""
|
||||||
from argparse import RawTextHelpFormatter
|
|
||||||
import concurrent.futures as futures
|
|
||||||
import atexit
|
import atexit
|
||||||
|
import concurrent.futures as futures
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
from argparse import RawTextHelpFormatter
|
||||||
from watchdog.observers import Observer
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
|
||||||
from aircox import settings
|
from aircox import settings
|
||||||
from aircox.models import Program, Sound
|
|
||||||
from aircox.management.sound_file import SoundFile
|
from aircox.management.sound_file import SoundFile
|
||||||
from aircox.management.sound_monitor import MonitorHandler
|
from aircox.management.sound_monitor import MonitorHandler
|
||||||
|
from aircox.models import Program, Sound
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
|
@ -47,39 +46,42 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
def report(self, program=None, component=None, *content):
|
def report(self, program=None, component=None, *content):
|
||||||
if not component:
|
if not component:
|
||||||
logger.info('%s: %s', str(program),
|
logger.info(
|
||||||
' '.join([str(c) for c in content]))
|
"%s: %s", str(program), " ".join([str(c) for c in content])
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.info('%s, %s: %s', str(program), str(component),
|
logger.info(
|
||||||
' '.join([str(c) for c in content]))
|
"%s, %s: %s",
|
||||||
|
str(program),
|
||||||
|
str(component),
|
||||||
|
" ".join([str(c) for c in content]),
|
||||||
|
)
|
||||||
|
|
||||||
def scan(self):
|
def scan(self):
|
||||||
"""
|
"""For all programs, scan dirs."""
|
||||||
For all programs, scan dirs
|
logger.info("scan all programs...")
|
||||||
"""
|
|
||||||
logger.info('scan all programs...')
|
|
||||||
programs = Program.objects.filter()
|
programs = Program.objects.filter()
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
for program in programs:
|
for program in programs:
|
||||||
logger.info('#%d %s', program.id, program.title)
|
logger.info("#%d %s", program.id, program.title)
|
||||||
self.scan_for_program(
|
self.scan_for_program(
|
||||||
program, settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
program,
|
||||||
|
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
||||||
type=Sound.TYPE_ARCHIVE,
|
type=Sound.TYPE_ARCHIVE,
|
||||||
)
|
)
|
||||||
self.scan_for_program(
|
self.scan_for_program(
|
||||||
program, settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
program,
|
||||||
|
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
||||||
type=Sound.TYPE_EXCERPT,
|
type=Sound.TYPE_EXCERPT,
|
||||||
)
|
)
|
||||||
dirs.append(os.path.join(program.abspath))
|
dirs.append(os.path.join(program.abspath))
|
||||||
return dirs
|
return dirs
|
||||||
|
|
||||||
def scan_for_program(self, program, subdir, **sound_kwargs):
|
def scan_for_program(self, program, subdir, **sound_kwargs):
|
||||||
"""
|
"""Scan a given directory that is associated to the given program, and
|
||||||
Scan a given directory that is associated to the given program, and
|
update sounds information."""
|
||||||
update sounds information.
|
logger.info("- %s/", subdir)
|
||||||
"""
|
|
||||||
logger.info('- %s/', subdir)
|
|
||||||
if not program.ensure_dir(subdir):
|
if not program.ensure_dir(subdir):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -97,37 +99,49 @@ class Command(BaseCommand):
|
||||||
sounds.append(sound_file.sound.pk)
|
sounds.append(sound_file.sound.pk)
|
||||||
|
|
||||||
# sounds in db & unchecked
|
# sounds in db & unchecked
|
||||||
sounds = Sound.objects.filter(file__startswith=subdir). \
|
sounds = Sound.objects.filter(file__startswith=subdir).exclude(
|
||||||
exclude(pk__in=sounds)
|
pk__in=sounds
|
||||||
|
)
|
||||||
self.check_sounds(sounds, program=program)
|
self.check_sounds(sounds, program=program)
|
||||||
|
|
||||||
def check_sounds(self, qs, **sync_kwargs):
|
def check_sounds(self, qs, **sync_kwargs):
|
||||||
""" Only check for the sound existence or update """
|
"""Only check for the sound existence or update."""
|
||||||
# check files
|
# check files
|
||||||
for sound in qs:
|
for sound in qs:
|
||||||
if sound.check_on_file():
|
if sound.check_on_file():
|
||||||
SoundFile(sound.file.path).sync(sound=sound, **sync_kwargs)
|
SoundFile(sound.file.path).sync(sound=sound, **sync_kwargs)
|
||||||
|
|
||||||
def monitor(self):
|
def monitor(self):
|
||||||
""" Run in monitor mode """
|
"""Run in monitor mode."""
|
||||||
with futures.ThreadPoolExecutor() as pool:
|
with futures.ThreadPoolExecutor() as pool:
|
||||||
archives_handler = MonitorHandler(
|
archives_handler = MonitorHandler(
|
||||||
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR, pool,
|
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
||||||
type=Sound.TYPE_ARCHIVE)
|
pool,
|
||||||
|
type=Sound.TYPE_ARCHIVE,
|
||||||
|
)
|
||||||
excerpts_handler = MonitorHandler(
|
excerpts_handler = MonitorHandler(
|
||||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR, pool,
|
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
||||||
type=Sound.TYPE_EXCERPT)
|
pool,
|
||||||
|
type=Sound.TYPE_EXCERPT,
|
||||||
|
)
|
||||||
|
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
observer.schedule(archives_handler, settings.AIRCOX_PROGRAMS_DIR_ABS,
|
observer.schedule(
|
||||||
recursive=True)
|
archives_handler,
|
||||||
observer.schedule(excerpts_handler, settings.AIRCOX_PROGRAMS_DIR_ABS,
|
settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||||
recursive=True)
|
recursive=True,
|
||||||
|
)
|
||||||
|
observer.schedule(
|
||||||
|
excerpts_handler,
|
||||||
|
settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||||
|
recursive=True,
|
||||||
|
)
|
||||||
observer.start()
|
observer.start()
|
||||||
|
|
||||||
def leave():
|
def leave():
|
||||||
observer.stop()
|
observer.stop()
|
||||||
observer.join()
|
observer.join()
|
||||||
|
|
||||||
atexit.register(leave)
|
atexit.register(leave)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -136,25 +150,31 @@ class Command(BaseCommand):
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.formatter_class = RawTextHelpFormatter
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-q', '--quality_check', action='store_true',
|
"-q",
|
||||||
help='Enable quality check using sound_quality_check on all '
|
"--quality_check",
|
||||||
'sounds marqued as not good'
|
action="store_true",
|
||||||
|
help="Enable quality check using sound_quality_check on all "
|
||||||
|
"sounds marqued as not good",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--scan', action='store_true',
|
"-s",
|
||||||
help='Scan programs directories for changes, plus check for a '
|
"--scan",
|
||||||
' matching diffusion on sounds that have not been yet assigned'
|
action="store_true",
|
||||||
|
help="Scan programs directories for changes, plus check for a "
|
||||||
|
" matching diffusion on sounds that have not been yet assigned",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-m', '--monitor', action='store_true',
|
"-m",
|
||||||
help='Run in monitor mode, watch for modification in the filesystem '
|
"--monitor",
|
||||||
'and react in consequence'
|
action="store_true",
|
||||||
|
help="Run in monitor mode, watch for modification in the "
|
||||||
|
"filesystem and react in consequence",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
if options.get('scan'):
|
if options.get("scan"):
|
||||||
self.scan()
|
self.scan()
|
||||||
#if options.get('quality_check'):
|
# if options.get('quality_check'):
|
||||||
# self.check_quality(check=(not options.get('scan')))
|
# self.check_quality(check=(not options.get('scan')))
|
||||||
if options.get('monitor'):
|
if options.get("monitor"):
|
||||||
self.monitor()
|
self.monitor()
|
||||||
|
|
|
@ -1,17 +1,15 @@
|
||||||
"""
|
"""Analyse and check files using Sox, prints good and bad files."""
|
||||||
Analyse and check files using Sox, prints good and bad files.
|
|
||||||
"""
|
|
||||||
import logging
|
import logging
|
||||||
from argparse import RawTextHelpFormatter
|
from argparse import RawTextHelpFormatter
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
from aircox.management.sound_stats import SoxStats, SoundStats
|
from aircox.management.sound_stats import SoundStats, SoxStats
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class Command (BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = __doc__
|
help = __doc__
|
||||||
sounds = None
|
sounds = None
|
||||||
|
|
||||||
|
@ -19,46 +17,61 @@ class Command (BaseCommand):
|
||||||
parser.formatter_class = RawTextHelpFormatter
|
parser.formatter_class = RawTextHelpFormatter
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'files', metavar='FILE', type=str, nargs='+',
|
"files",
|
||||||
help='file(s) to analyse'
|
metavar="FILE",
|
||||||
|
type=str,
|
||||||
|
nargs="+",
|
||||||
|
help="file(s) to analyse",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sample_length', type=int, default=120,
|
"-s",
|
||||||
help='size of sample to analyse in seconds. If not set (or 0), does'
|
"--sample_length",
|
||||||
' not analyse by sample',
|
type=int,
|
||||||
|
default=120,
|
||||||
|
help="size of sample to analyse in seconds. If not set (or 0), "
|
||||||
|
"does not analyse by sample",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-a', '--attribute', type=str,
|
"-a",
|
||||||
help='attribute name to use to check, that can be:\n' +
|
"--attribute",
|
||||||
', '.join(['"{}"'.format(attr) for attr in SoxStats.attributes])
|
type=str,
|
||||||
|
help="attribute name to use to check, that can be:\n"
|
||||||
|
+ ", ".join(['"{}"'.format(attr) for attr in SoxStats.attributes]),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-r', '--range', type=float, nargs=2,
|
"-r",
|
||||||
help='range of minimal and maximal accepted value such as: '
|
"--range",
|
||||||
'--range min max'
|
type=float,
|
||||||
|
nargs=2,
|
||||||
|
help="range of minimal and maximal accepted value such as: "
|
||||||
|
"--range min max",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-i', '--resume', action='store_true',
|
"-i",
|
||||||
help='print a resume of good and bad files'
|
"--resume",
|
||||||
|
action="store_true",
|
||||||
|
help="print a resume of good and bad files",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
# parameters
|
# parameters
|
||||||
minmax = options.get('range')
|
minmax = options.get("range")
|
||||||
if not minmax:
|
if not minmax:
|
||||||
raise CommandError('no range specified')
|
raise CommandError("no range specified")
|
||||||
|
|
||||||
attr = options.get('attribute')
|
attr = options.get("attribute")
|
||||||
if not attr:
|
if not attr:
|
||||||
raise CommandError('no attribute specified')
|
raise CommandError("no attribute specified")
|
||||||
|
|
||||||
# sound analyse and checks
|
# sound analyse and checks
|
||||||
self.sounds = [SoundStats(path, options.get('sample_length'))
|
self.sounds = [
|
||||||
for path in options.get('files')]
|
SoundStats(path, options.get("sample_length"))
|
||||||
|
for path in options.get("files")
|
||||||
|
]
|
||||||
self.bad = []
|
self.bad = []
|
||||||
self.good = []
|
self.good = []
|
||||||
for sound in self.sounds:
|
for sound in self.sounds:
|
||||||
logger.info('analyse ' + sound.path)
|
logger.info("analyse " + sound.path)
|
||||||
sound.analyse()
|
sound.analyse()
|
||||||
sound.check(attr, minmax[0], minmax[1])
|
sound.check(attr, minmax[0], minmax[1])
|
||||||
if sound.bad:
|
if sound.bad:
|
||||||
|
@ -67,8 +80,8 @@ class Command (BaseCommand):
|
||||||
self.good.append(sound)
|
self.good.append(sound)
|
||||||
|
|
||||||
# resume
|
# resume
|
||||||
if options.get('resume'):
|
if options.get("resume"):
|
||||||
for sound in self.good:
|
for sound in self.good:
|
||||||
logger.info('\033[92m+ %s\033[0m', sound.path)
|
logger.info("\033[92m+ %s\033[0m", sound.path)
|
||||||
for sound in self.bad:
|
for sound in self.bad:
|
||||||
logger.info('\033[91m+ %s\033[0m', sound.path)
|
logger.info("\033[91m+ %s\033[0m", sound.path)
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
"""
|
"""Provide SoundFile which is used to link between database and file system.
|
||||||
Provide SoundFile which is used to link between database and file system.
|
|
||||||
|
|
||||||
|
|
||||||
File name
|
File name
|
||||||
=========
|
=========
|
||||||
|
@ -22,28 +20,27 @@ To check quality of files, call the command sound_quality_check using the
|
||||||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||||
Sox (and soxi).
|
Sox (and soxi).
|
||||||
"""
|
"""
|
||||||
from datetime import date
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
import mutagen
|
import mutagen
|
||||||
|
|
||||||
from django.conf import settings as conf
|
from django.conf import settings as conf
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
from aircox import utils
|
from aircox import utils
|
||||||
from aircox.models import Program, Sound, Track
|
from aircox.models import Program, Sound, Track
|
||||||
|
|
||||||
from .commands.import_playlist import PlaylistImport
|
from .commands.import_playlist import PlaylistImport
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
class SoundFile:
|
class SoundFile:
|
||||||
"""
|
"""Handle synchronisation between sounds on files and database."""
|
||||||
Handle synchronisation between sounds on files and database.
|
|
||||||
"""
|
|
||||||
path = None
|
path = None
|
||||||
info = None
|
info = None
|
||||||
path_info = None
|
path_info = None
|
||||||
|
@ -54,18 +51,22 @@ class SoundFile:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sound_path(self):
|
def sound_path(self):
|
||||||
""" Relative path name """
|
"""Relative path name."""
|
||||||
return self.path.replace(conf.MEDIA_ROOT + '/', '')
|
return self.path.replace(conf.MEDIA_ROOT + "/", "")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def episode(self):
|
def episode(self):
|
||||||
return self.sound and self.sound.episode
|
return self.sound and self.sound.episode
|
||||||
|
|
||||||
def sync(self, sound=None, program=None, deleted=False, keep_deleted=False,
|
def sync(
|
||||||
**kwargs):
|
self,
|
||||||
"""
|
sound=None,
|
||||||
Update related sound model and save it.
|
program=None,
|
||||||
"""
|
deleted=False,
|
||||||
|
keep_deleted=False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""Update related sound model and save it."""
|
||||||
if deleted:
|
if deleted:
|
||||||
return self._on_delete(self.path, keep_deleted)
|
return self._on_delete(self.path, keep_deleted)
|
||||||
|
|
||||||
|
@ -73,26 +74,27 @@ class SoundFile:
|
||||||
if not program:
|
if not program:
|
||||||
program = Program.get_from_path(self.path)
|
program = Program.get_from_path(self.path)
|
||||||
logger.debug('program from path "%s" -> %s', self.path, program)
|
logger.debug('program from path "%s" -> %s', self.path, program)
|
||||||
kwargs['program_id'] = program.pk
|
kwargs["program_id"] = program.pk
|
||||||
|
|
||||||
if sound:
|
if sound:
|
||||||
created = False
|
created = False
|
||||||
else:
|
else:
|
||||||
sound, created = Sound.objects.get_or_create(
|
sound, created = Sound.objects.get_or_create(
|
||||||
file=self.sound_path, defaults=kwargs)
|
file=self.sound_path, defaults=kwargs
|
||||||
|
)
|
||||||
|
|
||||||
self.sound = sound
|
self.sound = sound
|
||||||
self.path_info = self.read_path(self.path)
|
self.path_info = self.read_path(self.path)
|
||||||
|
|
||||||
sound.program = program
|
sound.program = program
|
||||||
if created or sound.check_on_file():
|
if created or sound.check_on_file():
|
||||||
sound.name = self.path_info.get('name')
|
sound.name = self.path_info.get("name")
|
||||||
self.info = self.read_file_info()
|
self.info = self.read_file_info()
|
||||||
if self.info is not None:
|
if self.info is not None:
|
||||||
sound.duration = utils.seconds_to_time(self.info.info.length)
|
sound.duration = utils.seconds_to_time(self.info.info.length)
|
||||||
|
|
||||||
# check for episode
|
# check for episode
|
||||||
if sound.episode is None and 'year' in self.path_info:
|
if sound.episode is None and "year" in self.path_info:
|
||||||
sound.episode = self.find_episode(sound, self.path_info)
|
sound.episode = self.find_episode(sound, self.path_info)
|
||||||
sound.save()
|
sound.save()
|
||||||
|
|
||||||
|
@ -114,8 +116,9 @@ class SoundFile:
|
||||||
Sound.objects.path(self.path).delete()
|
Sound.objects.path(self.path).delete()
|
||||||
|
|
||||||
def read_path(self, path):
|
def read_path(self, path):
|
||||||
"""
|
"""Parse path name returning dictionary of extracted info. It can
|
||||||
Parse path name returning dictionary of extracted info. It can contain:
|
contain:
|
||||||
|
|
||||||
- `year`, `month`, `day`: diffusion date
|
- `year`, `month`, `day`: diffusion date
|
||||||
- `hour`, `minute`: diffusion time
|
- `hour`, `minute`: diffusion time
|
||||||
- `n`: sound arbitrary number (used for sound ordering)
|
- `n`: sound arbitrary number (used for sound ordering)
|
||||||
|
@ -126,29 +129,29 @@ class SoundFile:
|
||||||
reg_match = self._path_re.search(basename)
|
reg_match = self._path_re.search(basename)
|
||||||
if reg_match:
|
if reg_match:
|
||||||
info = reg_match.groupdict()
|
info = reg_match.groupdict()
|
||||||
for k in ('year', 'month', 'day', 'hour', 'minute', 'n'):
|
for k in ("year", "month", "day", "hour", "minute", "n"):
|
||||||
if info.get(k) is not None:
|
if info.get(k) is not None:
|
||||||
info[k] = int(info[k])
|
info[k] = int(info[k])
|
||||||
|
|
||||||
name = info.get('name')
|
name = info.get("name")
|
||||||
info['name'] = name and self._into_name(name) or basename
|
info["name"] = name and self._into_name(name) or basename
|
||||||
else:
|
else:
|
||||||
info = {'name': basename}
|
info = {"name": basename}
|
||||||
return info
|
return info
|
||||||
|
|
||||||
_path_re = re.compile(
|
_path_re = re.compile(
|
||||||
'^(?P<year>[0-9]{4})(?P<month>[0-9]{2})(?P<day>[0-9]{2})'
|
"^(?P<year>[0-9]{4})(?P<month>[0-9]{2})(?P<day>[0-9]{2})"
|
||||||
'(_(?P<hour>[0-9]{2})h(?P<minute>[0-9]{2}))?'
|
"(_(?P<hour>[0-9]{2})h(?P<minute>[0-9]{2}))?"
|
||||||
'(_(?P<n>[0-9]+))?'
|
"(_(?P<n>[0-9]+))?"
|
||||||
'_?[ -]*(?P<name>.*)$'
|
"_?[ -]*(?P<name>.*)$"
|
||||||
)
|
)
|
||||||
|
|
||||||
def _into_name(self, name):
|
def _into_name(self, name):
|
||||||
name = name.replace('_', ' ')
|
name = name.replace("_", " ")
|
||||||
return ' '.join(r.capitalize() for r in name.split(' '))
|
return " ".join(r.capitalize() for r in name.split(" "))
|
||||||
|
|
||||||
def read_file_info(self):
|
def read_file_info(self):
|
||||||
""" Read file information and metadata. """
|
"""Read file information and metadata."""
|
||||||
try:
|
try:
|
||||||
if os.path.exists(self.path):
|
if os.path.exists(self.path):
|
||||||
return mutagen.File(self.path)
|
return mutagen.File(self.path)
|
||||||
|
@ -157,22 +160,21 @@ class SoundFile:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def find_episode(self, sound, path_info):
|
def find_episode(self, sound, path_info):
|
||||||
"""
|
"""For a given program, check if there is an initial diffusion to
|
||||||
For a given program, check if there is an initial diffusion
|
associate to, using the date info we have. Update self.sound and save
|
||||||
to associate to, using the date info we have. Update self.sound
|
it consequently.
|
||||||
and save it consequently.
|
|
||||||
|
|
||||||
We only allow initial diffusion since there should be no
|
We only allow initial diffusion since there should be no rerun.
|
||||||
rerun.
|
|
||||||
"""
|
"""
|
||||||
program, pi = sound.program, path_info
|
program, pi = sound.program, path_info
|
||||||
if 'year' not in pi or not sound or sound.episode:
|
if "year" not in pi or not sound or sound.episode:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
year, month, day = pi.get('year'), pi.get('month'), pi.get('day')
|
year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
|
||||||
if pi.get('hour') is not None:
|
if pi.get("hour") is not None:
|
||||||
at = tz.datetime(year, month, day, pi.get('hour', 0),
|
at = tz.datetime(
|
||||||
pi.get('minute', 0))
|
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
|
||||||
|
)
|
||||||
at = tz.get_current_timezone().localize(at)
|
at = tz.get_current_timezone().localize(at)
|
||||||
else:
|
else:
|
||||||
at = date(year, month, day)
|
at = date(year, month, day)
|
||||||
|
@ -181,13 +183,12 @@ class SoundFile:
|
||||||
if not diffusion:
|
if not diffusion:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.debug('%s <--> %s', sound.file.name, str(diffusion.episode))
|
logger.debug("%s <--> %s", sound.file.name, str(diffusion.episode))
|
||||||
return diffusion.episode
|
return diffusion.episode
|
||||||
|
|
||||||
def find_playlist(self, sound=None, use_meta=True):
|
def find_playlist(self, sound=None, use_meta=True):
|
||||||
"""
|
"""Find a playlist file corresponding to the sound path, such as:
|
||||||
Find a playlist file corresponding to the sound path, such as:
|
my_sound.ogg => my_sound.csv.
|
||||||
my_sound.ogg => my_sound.csv
|
|
||||||
|
|
||||||
Use sound's file metadata if no corresponding playlist has been
|
Use sound's file metadata if no corresponding playlist has been
|
||||||
found and `use_meta` is True.
|
found and `use_meta` is True.
|
||||||
|
@ -199,7 +200,7 @@ class SoundFile:
|
||||||
|
|
||||||
# import playlist
|
# import playlist
|
||||||
path_noext, ext = os.path.splitext(self.sound.file.path)
|
path_noext, ext = os.path.splitext(self.sound.file.path)
|
||||||
path = path_noext + '.csv'
|
path = path_noext + ".csv"
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
PlaylistImport(path, sound=sound).run()
|
PlaylistImport(path, sound=sound).run()
|
||||||
# use metadata
|
# use metadata
|
||||||
|
@ -209,18 +210,27 @@ class SoundFile:
|
||||||
if self.info and self.info.tags:
|
if self.info and self.info.tags:
|
||||||
tags = self.info.tags
|
tags = self.info.tags
|
||||||
title, artist, album, year = tuple(
|
title, artist, album, year = tuple(
|
||||||
t and ', '.join(t) for t in (
|
t and ", ".join(t)
|
||||||
tags.get(k) for k in ('title', 'artist', 'album',
|
for t in (
|
||||||
'year'))
|
tags.get(k)
|
||||||
|
for k in ("title", "artist", "album", "year")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
title = (
|
||||||
|
title
|
||||||
|
or (self.path_info and self.path_info.get("name"))
|
||||||
|
or os.path.basename(path_noext)
|
||||||
|
)
|
||||||
|
info = (
|
||||||
|
"{} ({})".format(album, year)
|
||||||
|
if album and year
|
||||||
|
else album or year or ""
|
||||||
|
)
|
||||||
|
track = Track(
|
||||||
|
sound=sound,
|
||||||
|
position=int(tags.get("tracknumber", 0)),
|
||||||
|
title=title,
|
||||||
|
artist=artist or _("unknown"),
|
||||||
|
info=info,
|
||||||
)
|
)
|
||||||
title = title or (self.path_info and
|
|
||||||
self.path_info.get('name')) or \
|
|
||||||
os.path.basename(path_noext)
|
|
||||||
info = '{} ({})'.format(album, year) if album and year else \
|
|
||||||
album or year or ''
|
|
||||||
track = Track(sound=sound,
|
|
||||||
position=int(tags.get('tracknumber', 0)),
|
|
||||||
title=title,
|
|
||||||
artist=artist or _('unknown'),
|
|
||||||
info=info)
|
|
||||||
track.save()
|
track.save()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#! /usr/bin/env python3
|
#! /usr/bin/env python3
|
||||||
|
|
||||||
"""
|
"""Monitor sound files; For each program, check for:
|
||||||
Monitor sound files; For each program, check for:
|
|
||||||
- new files;
|
- new files;
|
||||||
- deleted files;
|
- deleted files;
|
||||||
- differences between files and sound;
|
- differences between files and sound;
|
||||||
|
@ -23,9 +23,9 @@ To check quality of files, call the command sound_quality_check using the
|
||||||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||||
Sox (and soxi).
|
Sox (and soxi).
|
||||||
"""
|
"""
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from watchdog.events import PatternMatchingEventHandler
|
from watchdog.events import PatternMatchingEventHandler
|
||||||
|
|
||||||
|
@ -34,12 +34,17 @@ from aircox.models import Sound
|
||||||
|
|
||||||
from .sound_file import SoundFile
|
from .sound_file import SoundFile
|
||||||
|
|
||||||
|
logger = logging.getLogger("aircox.commands")
|
||||||
logger = logging.getLogger('aircox.commands')
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('NotifyHandler', 'CreateHandler', 'DeleteHandler',
|
__all__ = (
|
||||||
'MoveHandler', 'ModifiedHandler', 'MonitorHandler',)
|
"NotifyHandler",
|
||||||
|
"CreateHandler",
|
||||||
|
"DeleteHandler",
|
||||||
|
"MoveHandler",
|
||||||
|
"ModifiedHandler",
|
||||||
|
"MonitorHandler",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NotifyHandler:
|
class NotifyHandler:
|
||||||
|
@ -63,34 +68,34 @@ class NotifyHandler:
|
||||||
|
|
||||||
|
|
||||||
class CreateHandler(NotifyHandler):
|
class CreateHandler(NotifyHandler):
|
||||||
log_msg = 'Sound file created: {sound_file.path}'
|
log_msg = "Sound file created: {sound_file.path}"
|
||||||
|
|
||||||
|
|
||||||
class DeleteHandler(NotifyHandler):
|
class DeleteHandler(NotifyHandler):
|
||||||
log_msg = 'Sound file deleted: {sound_file.path}'
|
log_msg = "Sound file deleted: {sound_file.path}"
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
kwargs['deleted'] = True
|
kwargs["deleted"] = True
|
||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MoveHandler(NotifyHandler):
|
class MoveHandler(NotifyHandler):
|
||||||
log_msg = 'Sound file moved: {event.src_path} -> {event.dest_path}'
|
log_msg = "Sound file moved: {event.src_path} -> {event.dest_path}"
|
||||||
|
|
||||||
def __call__(self, event, **kw):
|
def __call__(self, event, **kw):
|
||||||
sound = Sound.objects.filter(file=event.src_path)
|
sound = Sound.objects.filter(file=event.src_path)
|
||||||
# FIXME: this is wrong
|
# FIXME: this is wrong
|
||||||
if sound:
|
if sound:
|
||||||
kw['sound'] = sound
|
kw["sound"] = sound
|
||||||
kw['path'] = event.src_path
|
kw["path"] = event.src_path
|
||||||
else:
|
else:
|
||||||
kw['path'] = event.dest_path
|
kw["path"] = event.dest_path
|
||||||
return super().__call__(event, **kw)
|
return super().__call__(event, **kw)
|
||||||
|
|
||||||
|
|
||||||
class ModifiedHandler(NotifyHandler):
|
class ModifiedHandler(NotifyHandler):
|
||||||
timeout_delta = timedelta(seconds=30)
|
timeout_delta = timedelta(seconds=30)
|
||||||
log_msg = 'Sound file updated: {sound_file.path}'
|
log_msg = "Sound file updated: {sound_file.path}"
|
||||||
|
|
||||||
def wait(self):
|
def wait(self):
|
||||||
# multiple call of this handler can be done consecutively, we block
|
# multiple call of this handler can be done consecutively, we block
|
||||||
|
@ -108,9 +113,8 @@ class ModifiedHandler(NotifyHandler):
|
||||||
|
|
||||||
|
|
||||||
class MonitorHandler(PatternMatchingEventHandler):
|
class MonitorHandler(PatternMatchingEventHandler):
|
||||||
"""
|
"""Event handler for watchdog, in order to be used in monitoring."""
|
||||||
Event handler for watchdog, in order to be used in monitoring.
|
|
||||||
"""
|
|
||||||
pool = None
|
pool = None
|
||||||
jobs = {}
|
jobs = {}
|
||||||
|
|
||||||
|
@ -118,35 +122,39 @@ class MonitorHandler(PatternMatchingEventHandler):
|
||||||
"""
|
"""
|
||||||
:param str subdir: sub-directory in program dirs to monitor \
|
:param str subdir: sub-directory in program dirs to monitor \
|
||||||
(AIRCOX_SOUND_ARCHIVES_SUBDIR or AIRCOX_SOUND_EXCERPTS_SUBDIR);
|
(AIRCOX_SOUND_ARCHIVES_SUBDIR or AIRCOX_SOUND_EXCERPTS_SUBDIR);
|
||||||
:param concurrent.futures.Executor pool: pool executing jobs on file change;
|
:param concurrent.futures.Executor pool: pool executing jobs on file
|
||||||
|
change;
|
||||||
:param **sync_kw: kwargs passed to `SoundFile.sync`;
|
:param **sync_kw: kwargs passed to `SoundFile.sync`;
|
||||||
"""
|
"""
|
||||||
self.subdir = subdir
|
self.subdir = subdir
|
||||||
self.pool = pool
|
self.pool = pool
|
||||||
self.sync_kw = sync_kw
|
self.sync_kw = sync_kw
|
||||||
|
|
||||||
patterns = ['*/{}/*{}'.format(self.subdir, ext)
|
patterns = [
|
||||||
for ext in settings.AIRCOX_SOUND_FILE_EXT]
|
"*/{}/*{}".format(self.subdir, ext)
|
||||||
|
for ext in settings.AIRCOX_SOUND_FILE_EXT
|
||||||
|
]
|
||||||
super().__init__(patterns=patterns, ignore_directories=True)
|
super().__init__(patterns=patterns, ignore_directories=True)
|
||||||
|
|
||||||
def on_created(self, event):
|
def on_created(self, event):
|
||||||
self._submit(CreateHandler(), event, 'new', **self.sync_kw)
|
self._submit(CreateHandler(), event, "new", **self.sync_kw)
|
||||||
|
|
||||||
def on_deleted(self, event):
|
def on_deleted(self, event):
|
||||||
self._submit(DeleteHandler(), event, 'del')
|
self._submit(DeleteHandler(), event, "del")
|
||||||
|
|
||||||
def on_moved(self, event):
|
def on_moved(self, event):
|
||||||
self._submit(MoveHandler(), event, 'mv', **self.sync_kw)
|
self._submit(MoveHandler(), event, "mv", **self.sync_kw)
|
||||||
|
|
||||||
def on_modified(self, event):
|
def on_modified(self, event):
|
||||||
self._submit(ModifiedHandler(), event, 'up', **self.sync_kw)
|
self._submit(ModifiedHandler(), event, "up", **self.sync_kw)
|
||||||
|
|
||||||
def _submit(self, handler, event, job_key_prefix, **kwargs):
|
def _submit(self, handler, event, job_key_prefix, **kwargs):
|
||||||
|
"""Send handler job to pool if not already running.
|
||||||
|
|
||||||
|
Return tuple with running job and boolean indicating if its a
|
||||||
|
new one.
|
||||||
"""
|
"""
|
||||||
Send handler job to pool if not already running.
|
key = job_key_prefix + ":" + event.src_path
|
||||||
Return tuple with running job and boolean indicating if its a new one.
|
|
||||||
"""
|
|
||||||
key = job_key_prefix + ':' + event.src_path
|
|
||||||
job = self.jobs.get(key)
|
job = self.jobs.get(key)
|
||||||
if job and not job.future.done():
|
if job and not job.future.done():
|
||||||
job.ping()
|
job.ping()
|
||||||
|
@ -158,5 +166,6 @@ class MonitorHandler(PatternMatchingEventHandler):
|
||||||
def done(r):
|
def done(r):
|
||||||
if self.jobs.get(key) is handler:
|
if self.jobs.get(key) is handler:
|
||||||
del self.jobs[key]
|
del self.jobs[key]
|
||||||
|
|
||||||
handler.future.add_done_callback(done)
|
handler.future.add_done_callback(done)
|
||||||
return handler, True
|
return handler, True
|
||||||
|
|
|
@ -1,30 +1,31 @@
|
||||||
"""
|
"""Provide sound analysis class using Sox."""
|
||||||
Provide sound analysis class using Sox.
|
|
||||||
"""
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.commands')
|
logger = logging.getLogger("aircox.commands")
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('SoxStats', 'SoundStats')
|
__all__ = ("SoxStats", "SoundStats")
|
||||||
|
|
||||||
|
|
||||||
class SoxStats:
|
class SoxStats:
|
||||||
"""
|
"""Run Sox process and parse output."""
|
||||||
Run Sox process and parse output
|
|
||||||
"""
|
|
||||||
attributes = [
|
attributes = [
|
||||||
'DC offset', 'Min level', 'Max level',
|
"DC offset",
|
||||||
'Pk lev dB', 'RMS lev dB', 'RMS Pk dB',
|
"Min level",
|
||||||
'RMS Tr dB', 'Flat factor', 'Length s',
|
"Max level",
|
||||||
|
"Pk lev dB",
|
||||||
|
"RMS lev dB",
|
||||||
|
"RMS Pk dB",
|
||||||
|
"RMS Tr dB",
|
||||||
|
"Flat factor",
|
||||||
|
"Length s",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, path, **kwargs):
|
def __init__(self, path, **kwargs):
|
||||||
"""
|
"""If path is given, call analyse with path and kwargs."""
|
||||||
If path is given, call analyse with path and kwargs
|
|
||||||
"""
|
|
||||||
self.values = {}
|
self.values = {}
|
||||||
if path:
|
if path:
|
||||||
self.analyse(path, **kwargs)
|
self.analyse(path, **kwargs)
|
||||||
|
@ -34,82 +35,95 @@ class SoxStats:
|
||||||
|
|
||||||
def parse(self, output):
|
def parse(self, output):
|
||||||
for attr in self.attributes:
|
for attr in self.attributes:
|
||||||
value = re.search(attr + r'\s+(?P<value>\S+)', output)
|
value = re.search(attr + r"\s+(?P<value>\S+)", output)
|
||||||
value = value and value.groupdict()
|
value = value and value.groupdict()
|
||||||
if value:
|
if value:
|
||||||
try:
|
try:
|
||||||
value = float(value.get('value'))
|
value = float(value.get("value"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
value = None
|
value = None
|
||||||
self.values[attr] = value
|
self.values[attr] = value
|
||||||
self.values['length'] = self.values['Length s']
|
self.values["length"] = self.values["Length s"]
|
||||||
|
|
||||||
def analyse(self, path, at=None, length=None):
|
def analyse(self, path, at=None, length=None):
|
||||||
"""
|
"""If at and length are given use them as excerpt to analyse."""
|
||||||
If at and length are given use them as excerpt to analyse.
|
args = ["sox", path, "-n"]
|
||||||
"""
|
|
||||||
args = ['sox', path, '-n']
|
|
||||||
|
|
||||||
if at is not None and length is not None:
|
if at is not None and length is not None:
|
||||||
args += ['trim', str(at), str(length)]
|
args += ["trim", str(at), str(length)]
|
||||||
|
|
||||||
args.append('stats')
|
args.append("stats")
|
||||||
|
|
||||||
p = subprocess.Popen(args, stdout=subprocess.PIPE,
|
p = subprocess.Popen(
|
||||||
stderr=subprocess.PIPE)
|
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
# sox outputs to stderr (my god WHYYYY)
|
# sox outputs to stderr (my god WHYYYY)
|
||||||
out_, out = p.communicate()
|
out_, out = p.communicate()
|
||||||
self.parse(str(out, encoding='utf-8'))
|
self.parse(str(out, encoding="utf-8"))
|
||||||
|
|
||||||
|
|
||||||
class SoundStats:
|
class SoundStats:
|
||||||
path = None # file path
|
path = None # file path
|
||||||
sample_length = 120 # default sample length in seconds
|
sample_length = 120 # default sample length in seconds
|
||||||
stats = None # list of samples statistics
|
stats = None # list of samples statistics
|
||||||
bad = None # list of bad samples
|
bad = None # list of bad samples
|
||||||
good = None # list of good samples
|
good = None # list of good samples
|
||||||
|
|
||||||
def __init__(self, path, sample_length=None):
|
def __init__(self, path, sample_length=None):
|
||||||
self.path = path
|
self.path = path
|
||||||
self.sample_length = sample_length if sample_length is not None \
|
self.sample_length = (
|
||||||
else self.sample_length
|
sample_length if sample_length is not None else self.sample_length
|
||||||
|
)
|
||||||
|
|
||||||
def get_file_stats(self):
|
def get_file_stats(self):
|
||||||
return self.stats and self.stats[0]
|
return self.stats and self.stats[0]
|
||||||
|
|
||||||
def analyse(self):
|
def analyse(self):
|
||||||
logger.debug('complete file analysis')
|
logger.debug("complete file analysis")
|
||||||
self.stats = [SoxStats(self.path)]
|
self.stats = [SoxStats(self.path)]
|
||||||
position = 0
|
position = 0
|
||||||
length = self.stats[0].get('length')
|
length = self.stats[0].get("length")
|
||||||
|
|
||||||
if not self.sample_length:
|
if not self.sample_length:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.debug('start samples analysis...')
|
logger.debug("start samples analysis...")
|
||||||
while position < length:
|
while position < length:
|
||||||
stats = SoxStats(self.path, at=position, length=self.sample_length)
|
stats = SoxStats(self.path, at=position, length=self.sample_length)
|
||||||
self.stats.append(stats)
|
self.stats.append(stats)
|
||||||
position += self.sample_length
|
position += self.sample_length
|
||||||
|
|
||||||
def check(self, name, min_val, max_val):
|
def check(self, name, min_val, max_val):
|
||||||
self.good = [index for index, stats in enumerate(self.stats)
|
self.good = [
|
||||||
if min_val <= stats.get(name) <= max_val]
|
index
|
||||||
self.bad = [index for index, stats in enumerate(self.stats)
|
for index, stats in enumerate(self.stats)
|
||||||
if index not in self.good]
|
if min_val <= stats.get(name) <= max_val
|
||||||
|
]
|
||||||
|
self.bad = [
|
||||||
|
index
|
||||||
|
for index, stats in enumerate(self.stats)
|
||||||
|
if index not in self.good
|
||||||
|
]
|
||||||
self.resume()
|
self.resume()
|
||||||
|
|
||||||
def resume(self):
|
def resume(self):
|
||||||
def view(array): return [
|
def view(array):
|
||||||
'file' if index == 0 else
|
return [
|
||||||
'sample {} (at {} seconds)'.format(
|
"file"
|
||||||
index, (index-1) * self.sample_length)
|
if index == 0
|
||||||
for index in array
|
else "sample {} (at {} seconds)".format(
|
||||||
]
|
index, (index - 1) * self.sample_length
|
||||||
|
)
|
||||||
|
for index in array
|
||||||
|
]
|
||||||
|
|
||||||
if self.good:
|
if self.good:
|
||||||
logger.debug(self.path + ' -> good: \033[92m%s\033[0m',
|
logger.debug(
|
||||||
', '.join(view(self.good)))
|
self.path + " -> good: \033[92m%s\033[0m",
|
||||||
|
", ".join(view(self.good)),
|
||||||
|
)
|
||||||
if self.bad:
|
if self.bad:
|
||||||
logger.debug(self.path + ' -> bad: \033[91m%s\033[0m',
|
logger.debug(
|
||||||
', '.join(view(self.bad)))
|
self.path + " -> bad: \033[91m%s\033[0m",
|
||||||
|
", ".join(view(self.bad)),
|
||||||
|
)
|
||||||
|
|
|
@ -5,13 +5,13 @@ from django.utils import timezone as tz
|
||||||
from .models import Station
|
from .models import Station
|
||||||
from .utils import Redirect
|
from .utils import Redirect
|
||||||
|
|
||||||
|
__all__ = ("AircoxMiddleware",)
|
||||||
__all__ = ['AircoxMiddleware']
|
|
||||||
|
|
||||||
|
|
||||||
class AircoxMiddleware(object):
|
class AircoxMiddleware(object):
|
||||||
"""
|
"""Middleware used to get default info for the given website.
|
||||||
Middleware used to get default info for the given website. Theses
|
|
||||||
|
Theses
|
||||||
This middleware must be set after the middleware
|
This middleware must be set after the middleware
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
"""
|
"""
|
||||||
|
@ -20,11 +20,11 @@ class AircoxMiddleware(object):
|
||||||
self.get_response = get_response
|
self.get_response = get_response
|
||||||
|
|
||||||
def get_station(self, request):
|
def get_station(self, request):
|
||||||
""" Return station for the provided request """
|
"""Return station for the provided request."""
|
||||||
expr = Q(default=True) | Q(hosts__contains=request.get_host())
|
expr = Q(default=True) | Q(hosts__contains=request.get_host())
|
||||||
# case = Case(When(hosts__contains=request.get_host(), then=Value(0)),
|
# case = Case(When(hosts__contains=request.get_host(), then=Value(0)),
|
||||||
# When(default=True, then=Value(32)))
|
# When(default=True, then=Value(32)))
|
||||||
return Station.objects.filter(expr).order_by('default').first()
|
return Station.objects.filter(expr).order_by("default").first()
|
||||||
# .annotate(resolve_priority=case) \
|
# .annotate(resolve_priority=case) \
|
||||||
# .order_by('resolve_priority').first()
|
# .order_by('resolve_priority').first()
|
||||||
|
|
||||||
|
@ -33,10 +33,10 @@ class AircoxMiddleware(object):
|
||||||
# required
|
# required
|
||||||
timezone = None
|
timezone = None
|
||||||
try:
|
try:
|
||||||
timezone = request.session.get('aircox.timezone')
|
timezone = request.session.get("aircox.timezone")
|
||||||
if timezone:
|
if timezone:
|
||||||
timezone = pytz.timezone(timezone)
|
timezone = pytz.timezone(timezone)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not timezone:
|
if not timezone:
|
||||||
|
|
|
@ -1,12 +1,48 @@
|
||||||
from .article import *
|
|
||||||
from .page import *
|
|
||||||
from .program import *
|
|
||||||
from .episode import *
|
|
||||||
from .log import *
|
|
||||||
from .sound import *
|
|
||||||
from .station import *
|
|
||||||
from .user_settings import *
|
|
||||||
|
|
||||||
from . import signals
|
from . import signals
|
||||||
|
from .article import Article
|
||||||
|
from .episode import Diffusion, DiffusionQuerySet, Episode
|
||||||
|
from .log import Log, LogArchiver, LogQuerySet
|
||||||
|
from .page import Category, Comment, NavItem, Page, PageQuerySet, StaticPage
|
||||||
|
from .program import (
|
||||||
|
BaseRerun,
|
||||||
|
BaseRerunQuerySet,
|
||||||
|
Program,
|
||||||
|
ProgramChildQuerySet,
|
||||||
|
ProgramQuerySet,
|
||||||
|
Schedule,
|
||||||
|
Stream,
|
||||||
|
)
|
||||||
|
from .sound import Sound, SoundQuerySet, Track
|
||||||
|
from .station import Port, Station, StationQuerySet
|
||||||
|
from .user_settings import UserSettings
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"signals",
|
||||||
|
"Article",
|
||||||
|
"Episode",
|
||||||
|
"Diffusion",
|
||||||
|
"DiffusionQuerySet",
|
||||||
|
"Log",
|
||||||
|
"LogQuerySet",
|
||||||
|
"LogArchiver",
|
||||||
|
"Category",
|
||||||
|
"PageQuerySet",
|
||||||
|
"Page",
|
||||||
|
"StaticPage",
|
||||||
|
"Comment",
|
||||||
|
"NavItem",
|
||||||
|
"Program",
|
||||||
|
"ProgramQuerySet",
|
||||||
|
"Stream",
|
||||||
|
"Schedule",
|
||||||
|
"ProgramChildQuerySet",
|
||||||
|
"BaseRerun",
|
||||||
|
"BaseRerunQuerySet",
|
||||||
|
"Sound",
|
||||||
|
"SoundQuerySet",
|
||||||
|
"Track",
|
||||||
|
"Station",
|
||||||
|
"StationQuerySet",
|
||||||
|
"Port",
|
||||||
|
"UserSettings",
|
||||||
|
)
|
||||||
|
|
|
@ -3,16 +3,14 @@ from django.utils.translation import gettext_lazy as _
|
||||||
from .page import Page
|
from .page import Page
|
||||||
from .program import ProgramChildQuerySet
|
from .program import ProgramChildQuerySet
|
||||||
|
|
||||||
|
__all__ = ("Article",)
|
||||||
__all__ = ('Article',)
|
|
||||||
|
|
||||||
|
|
||||||
class Article(Page):
|
class Article(Page):
|
||||||
detail_url_name = 'article-detail'
|
detail_url_name = "article-detail"
|
||||||
|
|
||||||
objects = ProgramChildQuerySet.as_manager()
|
objects = ProgramChildQuerySet.as_manager()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Article')
|
verbose_name = _("Article")
|
||||||
verbose_name_plural = _('Articles')
|
verbose_name_plural = _("Articles")
|
||||||
|
|
||||||
|
|
|
@ -3,45 +3,51 @@ import datetime
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
from easy_thumbnails.files import get_thumbnailer
|
from easy_thumbnails.files import get_thumbnailer
|
||||||
|
|
||||||
from aircox import settings, utils
|
from aircox import settings, utils
|
||||||
from .program import ProgramChildQuerySet, \
|
|
||||||
BaseRerun, BaseRerunQuerySet, Schedule
|
|
||||||
from .page import Page
|
from .page import Page
|
||||||
|
from .program import (
|
||||||
|
BaseRerun,
|
||||||
|
BaseRerunQuerySet,
|
||||||
|
ProgramChildQuerySet,
|
||||||
|
Schedule,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = ("Episode", "Diffusion", "DiffusionQuerySet")
|
||||||
__all__ = ('Episode', 'Diffusion', 'DiffusionQuerySet')
|
|
||||||
|
|
||||||
|
|
||||||
class Episode(Page):
|
class Episode(Page):
|
||||||
objects = ProgramChildQuerySet.as_manager()
|
objects = ProgramChildQuerySet.as_manager()
|
||||||
detail_url_name = 'episode-detail'
|
detail_url_name = "episode-detail"
|
||||||
item_template_name = 'aircox/widgets/episode_item.html'
|
item_template_name = "aircox/widgets/episode_item.html"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def program(self):
|
def program(self):
|
||||||
return getattr(self.parent, 'program', None)
|
return getattr(self.parent, "program", None)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def podcasts(self):
|
def podcasts(self):
|
||||||
""" Return serialized data about podcasts. """
|
"""Return serialized data about podcasts."""
|
||||||
from ..serializers import PodcastSerializer
|
from ..serializers import PodcastSerializer
|
||||||
podcasts = [PodcastSerializer(s).data
|
|
||||||
for s in self.sound_set.public().order_by('type')]
|
podcasts = [
|
||||||
|
PodcastSerializer(s).data
|
||||||
|
for s in self.sound_set.public().order_by("type")
|
||||||
|
]
|
||||||
if self.cover:
|
if self.cover:
|
||||||
options = {'size': (128, 128), 'crop': 'scale'}
|
options = {"size": (128, 128), "crop": "scale"}
|
||||||
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
||||||
else:
|
else:
|
||||||
cover = None
|
cover = None
|
||||||
|
|
||||||
for index, podcast in enumerate(podcasts):
|
for index, podcast in enumerate(podcasts):
|
||||||
podcasts[index]['cover'] = cover
|
podcasts[index]["cover"] = cover
|
||||||
podcasts[index]['page_url'] = self.get_absolute_url()
|
podcasts[index]["page_url"] = self.get_absolute_url()
|
||||||
podcasts[index]['page_title'] = self.title
|
podcasts[index]["page_title"] = self.title
|
||||||
return podcasts
|
return podcasts
|
||||||
|
|
||||||
@program.setter
|
@program.setter
|
||||||
|
@ -49,8 +55,8 @@ class Episode(Page):
|
||||||
self.parent = value
|
self.parent = value
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Episode')
|
verbose_name = _("Episode")
|
||||||
verbose_name_plural = _('Episodes')
|
verbose_name_plural = _("Episodes")
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
if not self.is_published:
|
if not self.is_published:
|
||||||
|
@ -59,82 +65,89 @@ class Episode(Page):
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if self.parent is None:
|
if self.parent is None:
|
||||||
raise ValueError('missing parent program')
|
raise ValueError("missing parent program")
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_default_title(cls, page, date):
|
def get_default_title(cls, page, date):
|
||||||
return settings.AIRCOX_EPISODE_TITLE.format(
|
return settings.AIRCOX_EPISODE_TITLE.format(
|
||||||
program=page,
|
program=page,
|
||||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT)
|
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_init_kwargs_from(cls, page, date, title=None, **kwargs):
|
def get_init_kwargs_from(cls, page, date, title=None, **kwargs):
|
||||||
""" Get default Episode's title """
|
"""Get default Episode's title."""
|
||||||
title = settings.AIRCOX_EPISODE_TITLE.format(
|
title = (
|
||||||
program=page,
|
settings.AIRCOX_EPISODE_TITLE.format(
|
||||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
program=page,
|
||||||
) if title is None else title
|
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
||||||
return super().get_init_kwargs_from(page, title=title, program=page,
|
)
|
||||||
**kwargs)
|
if title is None
|
||||||
|
else title
|
||||||
|
)
|
||||||
|
return super().get_init_kwargs_from(
|
||||||
|
page, title=title, program=page, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DiffusionQuerySet(BaseRerunQuerySet):
|
class DiffusionQuerySet(BaseRerunQuerySet):
|
||||||
def episode(self, episode=None, id=None):
|
def episode(self, episode=None, id=None):
|
||||||
""" Diffusions for this episode """
|
"""Diffusions for this episode."""
|
||||||
return self.filter(episode=episode) if id is None else \
|
return (
|
||||||
self.filter(episode__id=id)
|
self.filter(episode=episode)
|
||||||
|
if id is None
|
||||||
|
else self.filter(episode__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def on_air(self):
|
def on_air(self):
|
||||||
""" On air diffusions """
|
"""On air diffusions."""
|
||||||
return self.filter(type=Diffusion.TYPE_ON_AIR)
|
return self.filter(type=Diffusion.TYPE_ON_AIR)
|
||||||
|
|
||||||
# TODO: rename to `datetime`
|
# TODO: rename to `datetime`
|
||||||
def now(self, now=None, order=True):
|
def now(self, now=None, order=True):
|
||||||
""" Diffusions occuring now """
|
"""Diffusions occuring now."""
|
||||||
now = now or tz.now()
|
now = now or tz.now()
|
||||||
qs = self.filter(start__lte=now, end__gte=now).distinct()
|
qs = self.filter(start__lte=now, end__gte=now).distinct()
|
||||||
return qs.order_by('start') if order else qs
|
return qs.order_by("start") if order else qs
|
||||||
|
|
||||||
def date(self, date=None, order=True):
|
def date(self, date=None, order=True):
|
||||||
""" Diffusions occuring date. """
|
"""Diffusions occuring date."""
|
||||||
date = date or datetime.date.today()
|
date = date or datetime.date.today()
|
||||||
start = tz.datetime.combine(date, datetime.time())
|
start = tz.datetime.combine(date, datetime.time())
|
||||||
end = tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
|
end = tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
|
||||||
# start = tz.get_current_timezone().localize(start)
|
# start = tz.get_current_timezone().localize(start)
|
||||||
# end = tz.get_current_timezone().localize(end)
|
# end = tz.get_current_timezone().localize(end)
|
||||||
qs = self.filter(start__range=(start, end))
|
qs = self.filter(start__range=(start, end))
|
||||||
return qs.order_by('start') if order else qs
|
return qs.order_by("start") if order else qs
|
||||||
|
|
||||||
def at(self, date, order=True):
|
def at(self, date, order=True):
|
||||||
""" Return diffusions at specified date or datetime """
|
"""Return diffusions at specified date or datetime."""
|
||||||
return self.now(date, order) if isinstance(date, tz.datetime) else \
|
return (
|
||||||
self.date(date, order)
|
self.now(date, order)
|
||||||
|
if isinstance(date, tz.datetime)
|
||||||
|
else self.date(date, order)
|
||||||
|
)
|
||||||
|
|
||||||
def after(self, date=None):
|
def after(self, date=None):
|
||||||
"""
|
"""Return a queryset of diffusions that happen after the given date
|
||||||
Return a queryset of diffusions that happen after the given
|
(default: today)."""
|
||||||
date (default: today).
|
|
||||||
"""
|
|
||||||
date = utils.date_or_default(date)
|
date = utils.date_or_default(date)
|
||||||
if isinstance(date, tz.datetime):
|
if isinstance(date, tz.datetime):
|
||||||
qs = self.filter(Q(start__gte=date) | Q(end__gte=date))
|
qs = self.filter(Q(start__gte=date) | Q(end__gte=date))
|
||||||
else:
|
else:
|
||||||
qs = self.filter(Q(start__date__gte=date) | Q(end__date__gte=date))
|
qs = self.filter(Q(start__date__gte=date) | Q(end__date__gte=date))
|
||||||
return qs.order_by('start')
|
return qs.order_by("start")
|
||||||
|
|
||||||
def before(self, date=None):
|
def before(self, date=None):
|
||||||
"""
|
"""Return a queryset of diffusions that finish before the given date
|
||||||
Return a queryset of diffusions that finish before the given
|
(default: today)."""
|
||||||
date (default: today).
|
|
||||||
"""
|
|
||||||
date = utils.date_or_default(date)
|
date = utils.date_or_default(date)
|
||||||
if isinstance(date, tz.datetime):
|
if isinstance(date, tz.datetime):
|
||||||
qs = self.filter(start__lt=date)
|
qs = self.filter(start__lt=date)
|
||||||
else:
|
else:
|
||||||
qs = self.filter(start__date__lt=date)
|
qs = self.filter(start__date__lt=date)
|
||||||
return qs.order_by('start')
|
return qs.order_by("start")
|
||||||
|
|
||||||
def range(self, start, end):
|
def range(self, start, end):
|
||||||
# FIXME can return dates that are out of range...
|
# FIXME can return dates that are out of range...
|
||||||
|
@ -142,10 +155,9 @@ class DiffusionQuerySet(BaseRerunQuerySet):
|
||||||
|
|
||||||
|
|
||||||
class Diffusion(BaseRerun):
|
class Diffusion(BaseRerun):
|
||||||
"""
|
"""A Diffusion is an occurrence of a Program that is scheduled on the
|
||||||
A Diffusion is an occurrence of a Program that is scheduled on the
|
station's timetable. It can be a rerun of a previous diffusion. In such a
|
||||||
station's timetable. It can be a rerun of a previous diffusion. In such
|
case, use rerun's info instead of its own.
|
||||||
a case, use rerun's info instead of its own.
|
|
||||||
|
|
||||||
A Diffusion without any rerun is named Episode (previously, a
|
A Diffusion without any rerun is named Episode (previously, a
|
||||||
Diffusion was different from an Episode, but in the end, an
|
Diffusion was different from an Episode, but in the end, an
|
||||||
|
@ -159,29 +171,37 @@ class Diffusion(BaseRerun):
|
||||||
- cancel: the diffusion has been canceled
|
- cancel: the diffusion has been canceled
|
||||||
- stop: the diffusion has been manually stopped
|
- stop: the diffusion has been manually stopped
|
||||||
"""
|
"""
|
||||||
|
|
||||||
objects = DiffusionQuerySet.as_manager()
|
objects = DiffusionQuerySet.as_manager()
|
||||||
|
|
||||||
TYPE_ON_AIR = 0x00
|
TYPE_ON_AIR = 0x00
|
||||||
TYPE_UNCONFIRMED = 0x01
|
TYPE_UNCONFIRMED = 0x01
|
||||||
TYPE_CANCEL = 0x02
|
TYPE_CANCEL = 0x02
|
||||||
TYPE_CHOICES = (
|
TYPE_CHOICES = (
|
||||||
(TYPE_ON_AIR, _('on air')),
|
(TYPE_ON_AIR, _("on air")),
|
||||||
(TYPE_UNCONFIRMED, _('not confirmed')),
|
(TYPE_UNCONFIRMED, _("not confirmed")),
|
||||||
(TYPE_CANCEL, _('cancelled')),
|
(TYPE_CANCEL, _("cancelled")),
|
||||||
)
|
)
|
||||||
|
|
||||||
episode = models.ForeignKey(
|
episode = models.ForeignKey(
|
||||||
Episode, models.CASCADE, verbose_name=_('episode'),
|
Episode,
|
||||||
|
models.CASCADE,
|
||||||
|
verbose_name=_("episode"),
|
||||||
)
|
)
|
||||||
schedule = models.ForeignKey(
|
schedule = models.ForeignKey(
|
||||||
Schedule, models.CASCADE, verbose_name=_('schedule'),
|
Schedule,
|
||||||
blank=True, null=True,
|
models.CASCADE,
|
||||||
|
verbose_name=_("schedule"),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
)
|
)
|
||||||
type = models.SmallIntegerField(
|
type = models.SmallIntegerField(
|
||||||
verbose_name=_('type'), default=TYPE_ON_AIR, choices=TYPE_CHOICES,
|
verbose_name=_("type"),
|
||||||
|
default=TYPE_ON_AIR,
|
||||||
|
choices=TYPE_CHOICES,
|
||||||
)
|
)
|
||||||
start = models.DateTimeField(_('start'), db_index=True)
|
start = models.DateTimeField(_("start"), db_index=True)
|
||||||
end = models.DateTimeField(_('end'), db_index=True)
|
end = models.DateTimeField(_("end"), db_index=True)
|
||||||
# port = models.ForeignKey(
|
# port = models.ForeignKey(
|
||||||
# 'self',
|
# 'self',
|
||||||
# verbose_name = _('port'),
|
# verbose_name = _('port'),
|
||||||
|
@ -190,33 +210,33 @@ class Diffusion(BaseRerun):
|
||||||
# help_text = _('use this input port'),
|
# help_text = _('use this input port'),
|
||||||
# )
|
# )
|
||||||
|
|
||||||
item_template_name = 'aircox/widgets/diffusion_item.html'
|
item_template_name = "aircox/widgets/diffusion_item.html"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Diffusion')
|
verbose_name = _("Diffusion")
|
||||||
verbose_name_plural = _('Diffusions')
|
verbose_name_plural = _("Diffusions")
|
||||||
permissions = (
|
permissions = (
|
||||||
('programming', _('edit the diffusions\' planification')),
|
("programming", _("edit the diffusions' planification")),
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
str_ = '{episode} - {date}'.format(
|
str_ = "{episode} - {date}".format(
|
||||||
episode=self.episode and self.episode.title,
|
episode=self.episode and self.episode.title,
|
||||||
date=self.local_start.strftime('%Y/%m/%d %H:%M%z'),
|
date=self.local_start.strftime("%Y/%m/%d %H:%M%z"),
|
||||||
)
|
)
|
||||||
if self.initial:
|
if self.initial:
|
||||||
str_ += ' ({})'.format(_('rerun'))
|
str_ += " ({})".format(_("rerun"))
|
||||||
return str_
|
return str_
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
if self.is_initial and self.episode != self._initial['episode']:
|
if self.is_initial and self.episode != self._initial["episode"]:
|
||||||
self.rerun_set.update(episode=self.episode, program=self.program)
|
self.rerun_set.update(episode=self.episode, program=self.program)
|
||||||
|
|
||||||
#def save(self, no_check=False, *args, **kwargs):
|
# def save(self, no_check=False, *args, **kwargs):
|
||||||
#if self.start != self._initial['start'] or \
|
# if self.start != self._initial['start'] or \
|
||||||
# self.end != self._initial['end']:
|
# self.end != self._initial['end']:
|
||||||
# self.check_conflicts()
|
# self.check_conflicts()
|
||||||
|
|
||||||
def save_rerun(self):
|
def save_rerun(self):
|
||||||
self.episode = self.initial.episode
|
self.episode = self.initial.episode
|
||||||
|
@ -231,85 +251,96 @@ class Diffusion(BaseRerun):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def date(self):
|
def date(self):
|
||||||
""" Return diffusion start as a date. """
|
"""Return diffusion start as a date."""
|
||||||
|
|
||||||
return utils.cast_date(self.start)
|
return utils.cast_date(self.start)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def local_start(self):
|
def local_start(self):
|
||||||
"""
|
"""Return a version of self.date that is localized to self.timezone;
|
||||||
Return a version of self.date that is localized to self.timezone;
|
This is needed since datetime are stored as UTC date and we want to get
|
||||||
This is needed since datetime are stored as UTC date and we want
|
it as local time."""
|
||||||
to get it as local time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return tz.localtime(self.start, tz.get_current_timezone())
|
return tz.localtime(self.start, tz.get_current_timezone())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def local_end(self):
|
def local_end(self):
|
||||||
"""
|
"""Return a version of self.date that is localized to self.timezone;
|
||||||
Return a version of self.date that is localized to self.timezone;
|
This is needed since datetime are stored as UTC date and we want to get
|
||||||
This is needed since datetime are stored as UTC date and we want
|
it as local time."""
|
||||||
to get it as local time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return tz.localtime(self.end, tz.get_current_timezone())
|
return tz.localtime(self.end, tz.get_current_timezone())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_now(self):
|
def is_now(self):
|
||||||
""" True if diffusion is currently running """
|
"""True if diffusion is currently running."""
|
||||||
now = tz.now()
|
now = tz.now()
|
||||||
return self.type == self.TYPE_ON_AIR and \
|
return (
|
||||||
self.start <= now and self.end >= now
|
self.type == self.TYPE_ON_AIR
|
||||||
|
and self.start <= now
|
||||||
|
and self.end >= now
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_live(self):
|
def is_live(self):
|
||||||
""" True if Diffusion is live (False if there are sounds files). """
|
"""True if Diffusion is live (False if there are sounds files)."""
|
||||||
return self.type == self.TYPE_ON_AIR and \
|
return (
|
||||||
not self.episode.sound_set.archive().count()
|
self.type == self.TYPE_ON_AIR
|
||||||
|
and not self.episode.sound_set.archive().count()
|
||||||
|
)
|
||||||
|
|
||||||
def get_playlist(self, **types):
|
def get_playlist(self, **types):
|
||||||
"""
|
"""Returns sounds as a playlist (list of *local* archive file path).
|
||||||
Returns sounds as a playlist (list of *local* archive file path).
|
|
||||||
The given arguments are passed to ``get_sounds``.
|
The given arguments are passed to ``get_sounds``.
|
||||||
"""
|
"""
|
||||||
from .sound import Sound
|
from .sound import Sound
|
||||||
return list(self.get_sounds(**types)
|
|
||||||
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
return list(
|
||||||
.values_list('path', flat=True))
|
self.get_sounds(**types)
|
||||||
|
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
||||||
|
.values_list("path", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
def get_sounds(self, **types):
|
def get_sounds(self, **types):
|
||||||
"""
|
"""Return a queryset of sounds related to this diffusion, ordered by
|
||||||
Return a queryset of sounds related to this diffusion,
|
type then path.
|
||||||
ordered by type then path.
|
|
||||||
|
|
||||||
**types: filter on the given sound types name, as `archive=True`
|
**types: filter on the given sound types name, as `archive=True`
|
||||||
"""
|
"""
|
||||||
from .sound import Sound
|
from .sound import Sound
|
||||||
sounds = (self.initial or self).sound_set.order_by('type', 'path')
|
|
||||||
_in = [getattr(Sound.Type, name)
|
sounds = (self.initial or self).sound_set.order_by("type", "path")
|
||||||
for name, value in types.items() if value]
|
_in = [
|
||||||
|
getattr(Sound.Type, name) for name, value in types.items() if value
|
||||||
|
]
|
||||||
|
|
||||||
return sounds.filter(type__in=_in)
|
return sounds.filter(type__in=_in)
|
||||||
|
|
||||||
def is_date_in_range(self, date=None):
|
def is_date_in_range(self, date=None):
|
||||||
"""
|
"""Return true if the given date is in the diffusion's start-end
|
||||||
Return true if the given date is in the diffusion's start-end
|
range."""
|
||||||
range.
|
|
||||||
"""
|
|
||||||
date = date or tz.now()
|
date = date or tz.now()
|
||||||
|
|
||||||
return self.start < date < self.end
|
return self.start < date < self.end
|
||||||
|
|
||||||
def get_conflicts(self):
|
def get_conflicts(self):
|
||||||
""" Return conflicting diffusions queryset """
|
"""Return conflicting diffusions queryset."""
|
||||||
|
|
||||||
# conflicts=Diffusion.objects.filter(Q(start__lt=OuterRef('start'), end__gt=OuterRef('end')) | Q(start__gt=OuterRef('start'), start__lt=OuterRef('end')))
|
# conflicts=Diffusion.objects.filter(
|
||||||
# diffs= Diffusion.objects.annotate(conflict_with=Exists(conflicts)).filter(conflict_with=True)
|
# Q(start__lt=OuterRef('start'), end__gt=OuterRef('end')) |
|
||||||
return Diffusion.objects.filter(
|
# Q(start__gt=OuterRef('start'), start__lt=OuterRef('end'))
|
||||||
Q(start__lt=self.start, end__gt=self.start) |
|
# )
|
||||||
Q(start__gt=self.start, start__lt=self.end)
|
# diffs= Diffusion.objects.annotate(conflict_with=Exists(conflicts))
|
||||||
).exclude(pk=self.pk).distinct()
|
# .filter(conflict_with=True)
|
||||||
|
return (
|
||||||
|
Diffusion.objects.filter(
|
||||||
|
Q(start__lt=self.start, end__gt=self.start)
|
||||||
|
| Q(start__gt=self.start, start__lt=self.end)
|
||||||
|
)
|
||||||
|
.exclude(pk=self.pk)
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
def check_conflicts(self):
|
def check_conflicts(self):
|
||||||
conflicts = self.get_conflicts()
|
conflicts = self.get_conflicts()
|
||||||
|
@ -320,7 +351,7 @@ class Diffusion(BaseRerun):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._initial = {
|
self._initial = {
|
||||||
'start': self.start,
|
"start": self.start,
|
||||||
'end': self.end,
|
"end": self.end,
|
||||||
'episode': getattr(self, 'episode', None),
|
"episode": getattr(self, "episode", None),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,32 +1,34 @@
|
||||||
from collections import deque
|
|
||||||
import datetime
|
import datetime
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from aircox import settings
|
from aircox import settings
|
||||||
|
|
||||||
from .episode import Diffusion
|
from .episode import Diffusion
|
||||||
from .sound import Sound, Track
|
from .sound import Sound, Track
|
||||||
from .station import Station
|
from .station import Station
|
||||||
|
|
||||||
|
logger = logging.getLogger("aircox")
|
||||||
logger = logging.getLogger('aircox')
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('Log', 'LogQuerySet', 'LogArchiver')
|
__all__ = ("Log", "LogQuerySet", "LogArchiver")
|
||||||
|
|
||||||
|
|
||||||
class LogQuerySet(models.QuerySet):
|
class LogQuerySet(models.QuerySet):
|
||||||
def station(self, station=None, id=None):
|
def station(self, station=None, id=None):
|
||||||
return self.filter(station=station) if id is None else \
|
return (
|
||||||
self.filter(station_id=id)
|
self.filter(station=station)
|
||||||
|
if id is None
|
||||||
|
else self.filter(station_id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def date(self, date):
|
def date(self, date):
|
||||||
start = tz.datetime.combine(date, datetime.time())
|
start = tz.datetime.combine(date, datetime.time())
|
||||||
|
@ -36,9 +38,11 @@ class LogQuerySet(models.QuerySet):
|
||||||
# return self.filter(date__date=date)
|
# return self.filter(date__date=date)
|
||||||
|
|
||||||
def after(self, date):
|
def after(self, date):
|
||||||
return self.filter(date__gte=date) \
|
return (
|
||||||
if isinstance(date, tz.datetime) else \
|
self.filter(date__gte=date)
|
||||||
self.filter(date__date__gte=date)
|
if isinstance(date, tz.datetime)
|
||||||
|
else self.filter(date__date__gte=date)
|
||||||
|
)
|
||||||
|
|
||||||
def on_air(self):
|
def on_air(self):
|
||||||
return self.filter(type=Log.TYPE_ON_AIR)
|
return self.filter(type=Log.TYPE_ON_AIR)
|
||||||
|
@ -57,64 +61,80 @@ class LogQuerySet(models.QuerySet):
|
||||||
|
|
||||||
|
|
||||||
class Log(models.Model):
|
class Log(models.Model):
|
||||||
"""
|
"""Log sounds and diffusions that are played on the station.
|
||||||
Log sounds and diffusions that are played on the station.
|
|
||||||
|
|
||||||
This only remember what has been played on the outputs, not on each
|
This only remember what has been played on the outputs, not on each
|
||||||
source; Source designate here which source is responsible of that.
|
source; Source designate here which source is responsible of that.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
TYPE_STOP = 0x00
|
TYPE_STOP = 0x00
|
||||||
""" Source has been stopped, e.g. manually """
|
"""Source has been stopped, e.g. manually."""
|
||||||
# Rule: \/ diffusion != null \/ sound != null
|
# Rule: \/ diffusion != null \/ sound != null
|
||||||
TYPE_START = 0x01
|
TYPE_START = 0x01
|
||||||
""" Diffusion or sound has been request to be played. """
|
"""Diffusion or sound has been request to be played."""
|
||||||
TYPE_CANCEL = 0x02
|
TYPE_CANCEL = 0x02
|
||||||
""" Diffusion has been canceled. """
|
"""Diffusion has been canceled."""
|
||||||
# Rule: \/ sound != null /\ track == null
|
# Rule: \/ sound != null /\ track == null
|
||||||
# \/ sound == null /\ track != null
|
# \/ sound == null /\ track != null
|
||||||
# \/ sound == null /\ track == null /\ comment = sound_path
|
# \/ sound == null /\ track == null /\ comment = sound_path
|
||||||
TYPE_ON_AIR = 0x03
|
TYPE_ON_AIR = 0x03
|
||||||
""" Sound or diffusion occured on air """
|
"""Sound or diffusion occured on air."""
|
||||||
TYPE_OTHER = 0x04
|
TYPE_OTHER = 0x04
|
||||||
""" Other log """
|
"""Other log."""
|
||||||
TYPE_CHOICES = (
|
TYPE_CHOICES = (
|
||||||
(TYPE_STOP, _('stop')), (TYPE_START, _('start')),
|
(TYPE_STOP, _("stop")),
|
||||||
(TYPE_CANCEL, _('cancelled')), (TYPE_ON_AIR, _('on air')),
|
(TYPE_START, _("start")),
|
||||||
(TYPE_OTHER, _('other'))
|
(TYPE_CANCEL, _("cancelled")),
|
||||||
|
(TYPE_ON_AIR, _("on air")),
|
||||||
|
(TYPE_OTHER, _("other")),
|
||||||
)
|
)
|
||||||
|
|
||||||
station = models.ForeignKey(
|
station = models.ForeignKey(
|
||||||
Station, models.CASCADE,
|
Station,
|
||||||
verbose_name=_('station'), help_text=_('related station'),
|
models.CASCADE,
|
||||||
|
verbose_name=_("station"),
|
||||||
|
help_text=_("related station"),
|
||||||
)
|
)
|
||||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||||
date = models.DateTimeField(_('date'), default=tz.now, db_index=True)
|
date = models.DateTimeField(_("date"), default=tz.now, db_index=True)
|
||||||
source = models.CharField(
|
source = models.CharField(
|
||||||
# we use a CharField to avoid loosing logs information if the
|
# we use a CharField to avoid loosing logs information if the
|
||||||
# source is removed
|
# source is removed
|
||||||
max_length=64, blank=True, null=True,
|
max_length=64,
|
||||||
verbose_name=_('source'),
|
blank=True,
|
||||||
help_text=_('identifier of the source related to this log'),
|
null=True,
|
||||||
|
verbose_name=_("source"),
|
||||||
|
help_text=_("identifier of the source related to this log"),
|
||||||
)
|
)
|
||||||
comment = models.CharField(
|
comment = models.CharField(
|
||||||
max_length=512, blank=True, null=True,
|
max_length=512,
|
||||||
verbose_name=_('comment'),
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
verbose_name=_("comment"),
|
||||||
)
|
)
|
||||||
sound = models.ForeignKey(
|
sound = models.ForeignKey(
|
||||||
Sound, models.SET_NULL,
|
Sound,
|
||||||
blank=True, null=True, db_index=True,
|
models.SET_NULL,
|
||||||
verbose_name=_('Sound'),
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
verbose_name=_("Sound"),
|
||||||
)
|
)
|
||||||
track = models.ForeignKey(
|
track = models.ForeignKey(
|
||||||
Track, models.SET_NULL,
|
Track,
|
||||||
blank=True, null=True, db_index=True,
|
models.SET_NULL,
|
||||||
verbose_name=_('Track'),
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
verbose_name=_("Track"),
|
||||||
)
|
)
|
||||||
diffusion = models.ForeignKey(
|
diffusion = models.ForeignKey(
|
||||||
Diffusion, models.SET_NULL,
|
Diffusion,
|
||||||
blank=True, null=True, db_index=True,
|
models.SET_NULL,
|
||||||
verbose_name=_('Diffusion'),
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
verbose_name=_("Diffusion"),
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = LogQuerySet.as_manager()
|
objects = LogQuerySet.as_manager()
|
||||||
|
@ -126,11 +146,9 @@ class Log(models.Model):
|
||||||
# FIXME: required????
|
# FIXME: required????
|
||||||
@property
|
@property
|
||||||
def local_date(self):
|
def local_date(self):
|
||||||
"""
|
"""Return a version of self.date that is localized to self.timezone;
|
||||||
Return a version of self.date that is localized to self.timezone;
|
This is needed since datetime are stored as UTC date and we want to get
|
||||||
This is needed since datetime are stored as UTC date and we want
|
it as local time."""
|
||||||
to get it as local time.
|
|
||||||
"""
|
|
||||||
return tz.localtime(self.date, tz.get_current_timezone())
|
return tz.localtime(self.date, tz.get_current_timezone())
|
||||||
|
|
||||||
# prepare for the future on crash + ease the use in merged lists with
|
# prepare for the future on crash + ease the use in merged lists with
|
||||||
|
@ -140,13 +158,16 @@ class Log(models.Model):
|
||||||
return self.date
|
return self.date
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Log')
|
verbose_name = _("Log")
|
||||||
verbose_name_plural = _('Logs')
|
verbose_name_plural = _("Logs")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '#{} ({}, {}, {})'.format(
|
return "#{} ({}, {}, {})".format(
|
||||||
self.pk, self.get_type_display(),
|
self.pk,
|
||||||
self.source, self.local_date.strftime('%Y/%m/%d %H:%M%z'))
|
self.get_type_display(),
|
||||||
|
self.source,
|
||||||
|
self.local_date.strftime("%Y/%m/%d %H:%M%z"),
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __list_append(cls, object_list, items):
|
def __list_append(cls, object_list, items):
|
||||||
|
@ -154,15 +175,15 @@ class Log(models.Model):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def merge_diffusions(cls, logs, diffs, count=None):
|
def merge_diffusions(cls, logs, diffs, count=None):
|
||||||
"""
|
"""Merge logs and diffusions together.
|
||||||
Merge logs and diffusions together. `logs` can either be a queryset
|
|
||||||
or a list ordered by `Log.date`.
|
`logs` can either be a queryset or a list ordered by `Log.date`.
|
||||||
"""
|
"""
|
||||||
# TODO: limit count
|
# TODO: limit count
|
||||||
# FIXME: log may be iterable (in stats view)
|
# FIXME: log may be iterable (in stats view)
|
||||||
if isinstance(logs, models.QuerySet):
|
if isinstance(logs, models.QuerySet):
|
||||||
logs = list(logs.order_by('-date'))
|
logs = list(logs.order_by("-date"))
|
||||||
diffs = deque(diffs.on_air().before().order_by('-start'))
|
diffs = deque(diffs.on_air().before().order_by("-start"))
|
||||||
object_list = []
|
object_list = []
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -177,8 +198,10 @@ class Log(models.Model):
|
||||||
diff = diffs.popleft()
|
diff = diffs.popleft()
|
||||||
|
|
||||||
# - takes all logs after diff start
|
# - takes all logs after diff start
|
||||||
index = next((i for i, v in enumerate(logs)
|
index = next(
|
||||||
if v.date <= diff.end), len(logs))
|
(i for i, v in enumerate(logs) if v.date <= diff.end),
|
||||||
|
len(logs),
|
||||||
|
)
|
||||||
if index is not None and index > 0:
|
if index is not None and index > 0:
|
||||||
object_list += logs[:index]
|
object_list += logs[:index]
|
||||||
logs = logs[index:]
|
logs = logs[index:]
|
||||||
|
@ -186,12 +209,14 @@ class Log(models.Model):
|
||||||
if len(logs):
|
if len(logs):
|
||||||
# FIXME
|
# FIXME
|
||||||
# - last log while diff is running
|
# - last log while diff is running
|
||||||
#if logs[0].date > diff.start:
|
# if logs[0].date > diff.start:
|
||||||
# object_list.append(logs[0])
|
# object_list.append(logs[0])
|
||||||
|
|
||||||
# - skips logs while diff is running
|
# - skips logs while diff is running
|
||||||
index = next((i for i, v in enumerate(logs)
|
index = next(
|
||||||
if v.date < diff.start), len(logs))
|
(i for i, v in enumerate(logs) if v.date < diff.start),
|
||||||
|
len(logs),
|
||||||
|
)
|
||||||
if index is not None and index > 0:
|
if index is not None and index > 0:
|
||||||
logs = logs[index:]
|
logs = logs[index:]
|
||||||
|
|
||||||
|
@ -203,18 +228,22 @@ class Log(models.Model):
|
||||||
def print(self):
|
def print(self):
|
||||||
r = []
|
r = []
|
||||||
if self.diffusion:
|
if self.diffusion:
|
||||||
r.append('diff: ' + str(self.diffusion_id))
|
r.append("diff: " + str(self.diffusion_id))
|
||||||
if self.sound:
|
if self.sound:
|
||||||
r.append('sound: ' + str(self.sound_id))
|
r.append("sound: " + str(self.sound_id))
|
||||||
if self.track:
|
if self.track:
|
||||||
r.append('track: ' + str(self.track_id))
|
r.append("track: " + str(self.track_id))
|
||||||
logger.info('log %s: %s%s', str(self), self.comment or '',
|
logger.info(
|
||||||
' (' + ', '.join(r) + ')' if r else '')
|
"log %s: %s%s",
|
||||||
|
str(self),
|
||||||
|
self.comment or "",
|
||||||
|
" (" + ", ".join(r) + ")" if r else "",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LogArchiver:
|
class LogArchiver:
|
||||||
""" Commodity class used to manage archives of logs. """
|
"""Commodity class used to manage archives of logs."""
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def fields(self):
|
def fields(self):
|
||||||
return Log._meta.get_fields()
|
return Log._meta.get_fields()
|
||||||
|
@ -223,13 +252,14 @@ class LogArchiver:
|
||||||
def get_path(station, date):
|
def get_path(station, date):
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
settings.AIRCOX_LOGS_ARCHIVES_DIR,
|
settings.AIRCOX_LOGS_ARCHIVES_DIR,
|
||||||
'{}_{}.log.gz'.format(date.strftime("%Y%m%d"), station.pk)
|
"{}_{}.log.gz".format(date.strftime("%Y%m%d"), station.pk),
|
||||||
)
|
)
|
||||||
|
|
||||||
def archive(self, qs, keep=False):
|
def archive(self, qs, keep=False):
|
||||||
"""
|
"""Archive logs of the given queryset.
|
||||||
Archive logs of the given queryset. Delete archived logs if not
|
|
||||||
`keep`. Return the count of archived logs
|
Delete archived logs if not `keep`. Return the count of archived
|
||||||
|
logs
|
||||||
"""
|
"""
|
||||||
if not qs.exists():
|
if not qs.exists():
|
||||||
return 0
|
return 0
|
||||||
|
@ -242,8 +272,10 @@ class LogArchiver:
|
||||||
# exists yet <3
|
# exists yet <3
|
||||||
for (station, date), logs in logs.items():
|
for (station, date), logs in logs.items():
|
||||||
path = self.get_path(station, date)
|
path = self.get_path(station, date)
|
||||||
with gzip.open(path, 'ab') as archive:
|
with gzip.open(path, "ab") as archive:
|
||||||
data = yaml.dump([self.serialize(l) for l in logs]).encode('utf8')
|
data = yaml.dump(
|
||||||
|
[self.serialize(line) for line in logs]
|
||||||
|
).encode("utf8")
|
||||||
archive.write(data)
|
archive.write(data)
|
||||||
|
|
||||||
if not keep:
|
if not keep:
|
||||||
|
@ -253,11 +285,9 @@ class LogArchiver:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sort_logs(qs):
|
def sort_logs(qs):
|
||||||
"""
|
"""Sort logs by station and date and return a dict of `{
|
||||||
Sort logs by station and date and return a dict of
|
(station,date): [logs] }`."""
|
||||||
`{ (station,date): [logs] }`.
|
qs = qs.order_by("date")
|
||||||
"""
|
|
||||||
qs = qs.order_by('date')
|
|
||||||
logs = {}
|
logs = {}
|
||||||
for log in qs:
|
for log in qs:
|
||||||
key = (log.station, log.date)
|
key = (log.station, log.date)
|
||||||
|
@ -268,44 +298,45 @@ class LogArchiver:
|
||||||
return logs
|
return logs
|
||||||
|
|
||||||
def serialize(self, log):
|
def serialize(self, log):
|
||||||
""" Serialize log """
|
"""Serialize log."""
|
||||||
return {i.attname: getattr(log, i.attname)
|
return {i.attname: getattr(log, i.attname) for i in self.fields}
|
||||||
for i in self.fields}
|
|
||||||
|
|
||||||
def load(self, station, date):
|
def load(self, station, date):
|
||||||
""" Load an archive returning logs in a list. """
|
"""Load an archive returning logs in a list."""
|
||||||
path = self.get_path(station, date)
|
path = self.get_path(station, date)
|
||||||
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
with gzip.open(path, 'rb') as archive:
|
with gzip.open(path, "rb") as archive:
|
||||||
data = archive.read()
|
data = archive.read()
|
||||||
logs = yaml.load(data)
|
logs = yaml.load(data)
|
||||||
|
|
||||||
# we need to preload diffusions, sounds and tracks
|
# we need to preload diffusions, sounds and tracks
|
||||||
rels = {
|
rels = {
|
||||||
'diffusion': self.get_relations(logs, Diffusion, 'diffusion'),
|
"diffusion": self.get_relations(logs, Diffusion, "diffusion"),
|
||||||
'sound': self.get_relations(logs, Sound, 'sound'),
|
"sound": self.get_relations(logs, Sound, "sound"),
|
||||||
'track': self.get_relations(logs, Track, 'track'),
|
"track": self.get_relations(logs, Track, "track"),
|
||||||
}
|
}
|
||||||
|
|
||||||
def rel_obj(log, attr):
|
def rel_obj(log, attr):
|
||||||
rel_id = log.get(attr + '_id')
|
rel_id = log.get(attr + "_id")
|
||||||
return rels[attr][rel_id] if rel_id else None
|
return rels[attr][rel_id] if rel_id else None
|
||||||
|
|
||||||
return [Log(diffusion=rel_obj(log, 'diffusion'),
|
return [
|
||||||
sound=rel_obj(log, 'sound'),
|
Log(
|
||||||
track=rel_obj(log, 'track'),
|
diffusion=rel_obj(log, "diffusion"),
|
||||||
**log) for log in logs]
|
sound=rel_obj(log, "sound"),
|
||||||
|
track=rel_obj(log, "track"),
|
||||||
|
**log
|
||||||
|
)
|
||||||
|
for log in logs
|
||||||
|
]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_relations(logs, model, attr):
|
def get_relations(logs, model, attr):
|
||||||
"""
|
"""From a list of dict representing logs, retrieve related objects of
|
||||||
From a list of dict representing logs, retrieve related objects
|
the given type."""
|
||||||
of the given type.
|
attr_id = attr + "_id"
|
||||||
"""
|
|
||||||
attr_id = attr + '_id'
|
|
||||||
pks = (log[attr_id] for log in logs if attr_id in log)
|
pks = (log[attr_id] for log in logs if attr_id in log)
|
||||||
return {rel.pk: rel for rel in model.objects.filter(pk__in=pks)}
|
return {rel.pk: rel for rel in model.objects.filter(pk__in=pks)}
|
||||||
|
|
||||||
|
|
|
@ -1,38 +1,42 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.db import models
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils import timezone as tz
|
|
||||||
from django.utils.text import slugify
|
|
||||||
from django.utils.html import format_html
|
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils.functional import cached_property
|
|
||||||
|
|
||||||
import bleach
|
import bleach
|
||||||
from ckeditor_uploader.fields import RichTextUploadingField
|
from ckeditor_uploader.fields import RichTextUploadingField
|
||||||
|
from django.db import models
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils import timezone as tz
|
||||||
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.html import format_html
|
||||||
|
from django.utils.safestring import mark_safe
|
||||||
|
from django.utils.text import slugify
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
from filer.fields.image import FilerImageField
|
from filer.fields.image import FilerImageField
|
||||||
from model_utils.managers import InheritanceQuerySet
|
from model_utils.managers import InheritanceQuerySet
|
||||||
|
|
||||||
from .station import Station
|
from .station import Station
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
__all__ = ('Category', 'PageQuerySet',
|
"Category",
|
||||||
'Page', 'StaticPage', 'Comment', 'NavItem')
|
"PageQuerySet",
|
||||||
|
"Page",
|
||||||
|
"StaticPage",
|
||||||
|
"Comment",
|
||||||
|
"NavItem",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
headline_re = re.compile(r'(<p>)?'
|
headline_re = re.compile(
|
||||||
r'(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))'
|
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
|
||||||
r'(</p>)?')
|
)
|
||||||
|
|
||||||
|
|
||||||
class Category(models.Model):
|
class Category(models.Model):
|
||||||
title = models.CharField(_('title'), max_length=64)
|
title = models.CharField(_("title"), max_length=64)
|
||||||
slug = models.SlugField(_('slug'), max_length=64, db_index=True)
|
slug = models.SlugField(_("slug"), max_length=64, db_index=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Category')
|
verbose_name = _("Category")
|
||||||
verbose_name_plural = _('Categories')
|
verbose_name_plural = _("Categories")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.title
|
return self.title
|
||||||
|
@ -49,68 +53,90 @@ class BasePageQuerySet(InheritanceQuerySet):
|
||||||
return self.filter(status=Page.STATUS_TRASH)
|
return self.filter(status=Page.STATUS_TRASH)
|
||||||
|
|
||||||
def parent(self, parent=None, id=None):
|
def parent(self, parent=None, id=None):
|
||||||
""" Return pages having this parent. """
|
"""Return pages having this parent."""
|
||||||
return self.filter(parent=parent) if id is None else \
|
return (
|
||||||
self.filter(parent__id=id)
|
self.filter(parent=parent)
|
||||||
|
if id is None
|
||||||
|
else self.filter(parent__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def search(self, q, search_content=True):
|
def search(self, q, search_content=True):
|
||||||
if search_content:
|
if search_content:
|
||||||
return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
|
return self.filter(
|
||||||
|
models.Q(title__icontains=q) | models.Q(content__icontains=q)
|
||||||
|
)
|
||||||
return self.filter(title__icontains=q)
|
return self.filter(title__icontains=q)
|
||||||
|
|
||||||
|
|
||||||
class BasePage(models.Model):
|
class BasePage(models.Model):
|
||||||
""" Base class for publishable content """
|
"""Base class for publishable content."""
|
||||||
|
|
||||||
STATUS_DRAFT = 0x00
|
STATUS_DRAFT = 0x00
|
||||||
STATUS_PUBLISHED = 0x10
|
STATUS_PUBLISHED = 0x10
|
||||||
STATUS_TRASH = 0x20
|
STATUS_TRASH = 0x20
|
||||||
STATUS_CHOICES = (
|
STATUS_CHOICES = (
|
||||||
(STATUS_DRAFT, _('draft')),
|
(STATUS_DRAFT, _("draft")),
|
||||||
(STATUS_PUBLISHED, _('published')),
|
(STATUS_PUBLISHED, _("published")),
|
||||||
(STATUS_TRASH, _('trash')),
|
(STATUS_TRASH, _("trash")),
|
||||||
)
|
)
|
||||||
|
|
||||||
parent = models.ForeignKey('self', models.CASCADE, blank=True, null=True,
|
parent = models.ForeignKey(
|
||||||
db_index=True, related_name='child_set')
|
"self",
|
||||||
|
models.CASCADE,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
related_name="child_set",
|
||||||
|
)
|
||||||
title = models.CharField(max_length=100)
|
title = models.CharField(max_length=100)
|
||||||
slug = models.SlugField(_('slug'), max_length=120, blank=True, unique=True,
|
slug = models.SlugField(
|
||||||
db_index=True)
|
_("slug"), max_length=120, blank=True, unique=True, db_index=True
|
||||||
|
)
|
||||||
status = models.PositiveSmallIntegerField(
|
status = models.PositiveSmallIntegerField(
|
||||||
_('status'), default=STATUS_DRAFT, choices=STATUS_CHOICES,
|
_("status"),
|
||||||
|
default=STATUS_DRAFT,
|
||||||
|
choices=STATUS_CHOICES,
|
||||||
)
|
)
|
||||||
cover = FilerImageField(
|
cover = FilerImageField(
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
verbose_name=_('cover'), null=True, blank=True,
|
verbose_name=_("cover"),
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
)
|
)
|
||||||
content = RichTextUploadingField(
|
content = RichTextUploadingField(
|
||||||
_('content'), blank=True, null=True,
|
_("content"),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = BasePageQuerySet.as_manager()
|
objects = BasePageQuerySet.as_manager()
|
||||||
|
|
||||||
detail_url_name = None
|
detail_url_name = None
|
||||||
item_template_name = 'aircox/widgets/page_item.html'
|
item_template_name = "aircox/widgets/page_item.html"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '{}'.format(self.title or self.pk)
|
return "{}".format(self.title or self.pk)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.slug:
|
if not self.slug:
|
||||||
self.slug = slugify(self.title)[:100]
|
self.slug = slugify(self.title)[:100]
|
||||||
count = Page.objects.filter(slug__startswith=self.slug).count()
|
count = Page.objects.filter(slug__startswith=self.slug).count()
|
||||||
if count:
|
if count:
|
||||||
self.slug += '-' + str(count)
|
self.slug += "-" + str(count)
|
||||||
|
|
||||||
if self.parent and not self.cover:
|
if self.parent and not self.cover:
|
||||||
self.cover = self.parent.cover
|
self.cover = self.parent.cover
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return reverse(self.detail_url_name, kwargs={'slug': self.slug}) \
|
return (
|
||||||
if self.is_published else '#'
|
reverse(self.detail_url_name, kwargs={"slug": self.slug})
|
||||||
|
if self.is_published
|
||||||
|
else "#"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_draft(self):
|
def is_draft(self):
|
||||||
|
@ -133,15 +159,15 @@ class BasePage(models.Model):
|
||||||
@cached_property
|
@cached_property
|
||||||
def headline(self):
|
def headline(self):
|
||||||
if not self.content:
|
if not self.content:
|
||||||
return ''
|
return ""
|
||||||
content = bleach.clean(self.content, tags=[], strip=True)
|
content = bleach.clean(self.content, tags=[], strip=True)
|
||||||
headline = headline_re.search(content)
|
headline = headline_re.search(content)
|
||||||
return mark_safe(headline.groupdict()['headline']) if headline else ''
|
return mark_safe(headline.groupdict()["headline"]) if headline else ""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_init_kwargs_from(cls, page, **kwargs):
|
def get_init_kwargs_from(cls, page, **kwargs):
|
||||||
kwargs.setdefault('cover', page.cover)
|
kwargs.setdefault("cover", page.cover)
|
||||||
kwargs.setdefault('category', page.category)
|
kwargs.setdefault("category", page.category)
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -151,30 +177,39 @@ class BasePage(models.Model):
|
||||||
|
|
||||||
class PageQuerySet(BasePageQuerySet):
|
class PageQuerySet(BasePageQuerySet):
|
||||||
def published(self):
|
def published(self):
|
||||||
return self.filter(status=Page.STATUS_PUBLISHED,
|
return self.filter(
|
||||||
pub_date__lte=tz.now())
|
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Page(BasePage):
|
class Page(BasePage):
|
||||||
""" Base Page model used for articles and other dated content. """
|
"""Base Page model used for articles and other dated content."""
|
||||||
|
|
||||||
category = models.ForeignKey(
|
category = models.ForeignKey(
|
||||||
Category, models.SET_NULL,
|
Category,
|
||||||
verbose_name=_('category'), blank=True, null=True, db_index=True
|
models.SET_NULL,
|
||||||
|
verbose_name=_("category"),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
)
|
)
|
||||||
pub_date = models.DateTimeField(
|
pub_date = models.DateTimeField(
|
||||||
_('publication date'), blank=True, null=True, db_index=True)
|
_("publication date"), blank=True, null=True, db_index=True
|
||||||
|
)
|
||||||
featured = models.BooleanField(
|
featured = models.BooleanField(
|
||||||
_('featured'), default=False,
|
_("featured"),
|
||||||
|
default=False,
|
||||||
)
|
)
|
||||||
allow_comments = models.BooleanField(
|
allow_comments = models.BooleanField(
|
||||||
_('allow comments'), default=True,
|
_("allow comments"),
|
||||||
|
default=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = PageQuerySet.as_manager()
|
objects = PageQuerySet.as_manager()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Publication')
|
verbose_name = _("Publication")
|
||||||
verbose_name_plural = _('Publications')
|
verbose_name_plural = _("Publications")
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if self.is_published and self.pub_date is None:
|
if self.is_published and self.pub_date is None:
|
||||||
|
@ -188,8 +223,9 @@ class Page(BasePage):
|
||||||
|
|
||||||
|
|
||||||
class StaticPage(BasePage):
|
class StaticPage(BasePage):
|
||||||
""" Static page that eventually can be attached to a specific view. """
|
"""Static page that eventually can be attached to a specific view."""
|
||||||
detail_url_name = 'static-page-detail'
|
|
||||||
|
detail_url_name = "static-page-detail"
|
||||||
|
|
||||||
ATTACH_TO_HOME = 0x00
|
ATTACH_TO_HOME = 0x00
|
||||||
ATTACH_TO_DIFFUSIONS = 0x01
|
ATTACH_TO_DIFFUSIONS = 0x01
|
||||||
|
@ -199,25 +235,28 @@ class StaticPage(BasePage):
|
||||||
ATTACH_TO_ARTICLES = 0x05
|
ATTACH_TO_ARTICLES = 0x05
|
||||||
|
|
||||||
ATTACH_TO_CHOICES = (
|
ATTACH_TO_CHOICES = (
|
||||||
(ATTACH_TO_HOME, _('Home page')),
|
(ATTACH_TO_HOME, _("Home page")),
|
||||||
(ATTACH_TO_DIFFUSIONS, _('Diffusions page')),
|
(ATTACH_TO_DIFFUSIONS, _("Diffusions page")),
|
||||||
(ATTACH_TO_LOGS, _('Logs page')),
|
(ATTACH_TO_LOGS, _("Logs page")),
|
||||||
(ATTACH_TO_PROGRAMS, _('Programs list')),
|
(ATTACH_TO_PROGRAMS, _("Programs list")),
|
||||||
(ATTACH_TO_EPISODES, _('Episodes list')),
|
(ATTACH_TO_EPISODES, _("Episodes list")),
|
||||||
(ATTACH_TO_ARTICLES, _('Articles list')),
|
(ATTACH_TO_ARTICLES, _("Articles list")),
|
||||||
)
|
)
|
||||||
VIEWS = {
|
VIEWS = {
|
||||||
ATTACH_TO_HOME: 'home',
|
ATTACH_TO_HOME: "home",
|
||||||
ATTACH_TO_DIFFUSIONS: 'diffusion-list',
|
ATTACH_TO_DIFFUSIONS: "diffusion-list",
|
||||||
ATTACH_TO_LOGS: 'log-list',
|
ATTACH_TO_LOGS: "log-list",
|
||||||
ATTACH_TO_PROGRAMS: 'program-list',
|
ATTACH_TO_PROGRAMS: "program-list",
|
||||||
ATTACH_TO_EPISODES: 'episode-list',
|
ATTACH_TO_EPISODES: "episode-list",
|
||||||
ATTACH_TO_ARTICLES: 'article-list',
|
ATTACH_TO_ARTICLES: "article-list",
|
||||||
}
|
}
|
||||||
|
|
||||||
attach_to = models.SmallIntegerField(
|
attach_to = models.SmallIntegerField(
|
||||||
_('attach to'), choices=ATTACH_TO_CHOICES, blank=True, null=True,
|
_("attach to"),
|
||||||
help_text=_('display this page content to related element'),
|
choices=ATTACH_TO_CHOICES,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text=_("display this page content to related element"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
|
@ -228,49 +267,65 @@ class StaticPage(BasePage):
|
||||||
|
|
||||||
class Comment(models.Model):
|
class Comment(models.Model):
|
||||||
page = models.ForeignKey(
|
page = models.ForeignKey(
|
||||||
Page, models.CASCADE, verbose_name=_('related page'),
|
Page,
|
||||||
|
models.CASCADE,
|
||||||
|
verbose_name=_("related page"),
|
||||||
db_index=True,
|
db_index=True,
|
||||||
# TODO: allow_comment filter
|
# TODO: allow_comment filter
|
||||||
)
|
)
|
||||||
nickname = models.CharField(_('nickname'), max_length=32)
|
nickname = models.CharField(_("nickname"), max_length=32)
|
||||||
email = models.EmailField(_('email'), max_length=32)
|
email = models.EmailField(_("email"), max_length=32)
|
||||||
date = models.DateTimeField(auto_now_add=True)
|
date = models.DateTimeField(auto_now_add=True)
|
||||||
content = models.TextField(_('content'), max_length=1024)
|
content = models.TextField(_("content"), max_length=1024)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Comment')
|
verbose_name = _("Comment")
|
||||||
verbose_name_plural = _('Comments')
|
verbose_name_plural = _("Comments")
|
||||||
|
|
||||||
|
|
||||||
class NavItem(models.Model):
|
class NavItem(models.Model):
|
||||||
""" Navigation menu items """
|
"""Navigation menu items."""
|
||||||
|
|
||||||
station = models.ForeignKey(
|
station = models.ForeignKey(
|
||||||
Station, models.CASCADE, verbose_name=_('station'))
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
menu = models.SlugField(_('menu'), max_length=24)
|
)
|
||||||
order = models.PositiveSmallIntegerField(_('order'))
|
menu = models.SlugField(_("menu"), max_length=24)
|
||||||
text = models.CharField(_('title'), max_length=64)
|
order = models.PositiveSmallIntegerField(_("order"))
|
||||||
url = models.CharField(_('url'), max_length=256, blank=True, null=True)
|
text = models.CharField(_("title"), max_length=64)
|
||||||
page = models.ForeignKey(StaticPage, models.CASCADE, db_index=True,
|
url = models.CharField(_("url"), max_length=256, blank=True, null=True)
|
||||||
verbose_name=_('page'), blank=True, null=True)
|
page = models.ForeignKey(
|
||||||
|
StaticPage,
|
||||||
|
models.CASCADE,
|
||||||
|
db_index=True,
|
||||||
|
verbose_name=_("page"),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Menu item')
|
verbose_name = _("Menu item")
|
||||||
verbose_name_plural = _('Menu items')
|
verbose_name_plural = _("Menu items")
|
||||||
ordering = ('order', 'pk')
|
ordering = ("order", "pk")
|
||||||
|
|
||||||
def get_url(self):
|
def get_url(self):
|
||||||
return self.url if self.url else \
|
return (
|
||||||
self.page.get_absolute_url() if self.page else None
|
self.url
|
||||||
|
if self.url
|
||||||
|
else self.page.get_absolute_url()
|
||||||
|
if self.page
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
def render(self, request, css_class='', active_class=''):
|
def render(self, request, css_class="", active_class=""):
|
||||||
url = self.get_url()
|
url = self.get_url()
|
||||||
if active_class and request.path.startswith(url):
|
if active_class and request.path.startswith(url):
|
||||||
css_class += ' ' + active_class
|
css_class += " " + active_class
|
||||||
|
|
||||||
if not url:
|
if not url:
|
||||||
return self.text
|
return self.text
|
||||||
elif not css_class:
|
elif not css_class:
|
||||||
return format_html('<a href="{}">{}</a>', url, self.text)
|
return format_html('<a href="{}">{}</a>', url, self.text)
|
||||||
else:
|
else:
|
||||||
return format_html('<a href="{}" class="{}">{}</a>', url,
|
return format_html(
|
||||||
css_class, self.text)
|
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
|
||||||
|
)
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import calendar
|
import calendar
|
||||||
from collections import OrderedDict
|
|
||||||
from enum import IntEnum
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
from collections import OrderedDict
|
||||||
|
from enum import IntEnum
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from django.conf import settings as conf
|
from django.conf import settings as conf
|
||||||
|
@ -12,19 +12,26 @@ from django.db import models
|
||||||
from django.db.models import F
|
from django.db.models import F
|
||||||
from django.db.models.functions import Concat, Substr
|
from django.db.models.functions import Concat, Substr
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from aircox import settings, utils
|
from aircox import settings, utils
|
||||||
|
|
||||||
from .page import Page, PageQuerySet
|
from .page import Page, PageQuerySet
|
||||||
from .station import Station
|
from .station import Station
|
||||||
|
|
||||||
|
logger = logging.getLogger("aircox")
|
||||||
logger = logging.getLogger('aircox')
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('Program', 'ProgramQuerySet', 'Stream', 'Schedule',
|
__all__ = (
|
||||||
'ProgramChildQuerySet', 'BaseRerun', 'BaseRerunQuerySet')
|
"Program",
|
||||||
|
"ProgramQuerySet",
|
||||||
|
"Stream",
|
||||||
|
"Schedule",
|
||||||
|
"ProgramChildQuerySet",
|
||||||
|
"BaseRerun",
|
||||||
|
"BaseRerunQuerySet",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProgramQuerySet(PageQuerySet):
|
class ProgramQuerySet(PageQuerySet):
|
||||||
|
@ -37,8 +44,7 @@ class ProgramQuerySet(PageQuerySet):
|
||||||
|
|
||||||
|
|
||||||
class Program(Page):
|
class Program(Page):
|
||||||
"""
|
"""A Program can either be a Streamed or a Scheduled program.
|
||||||
A Program can either be a Streamed or a Scheduled program.
|
|
||||||
|
|
||||||
A Streamed program is used to generate non-stop random playlists when there
|
A Streamed program is used to generate non-stop random playlists when there
|
||||||
is not scheduled diffusion. In such a case, a Stream is used to describe
|
is not scheduled diffusion. In such a case, a Stream is used to describe
|
||||||
|
@ -49,32 +55,35 @@ class Program(Page):
|
||||||
Renaming a Program rename the corresponding directory to matches the new
|
Renaming a Program rename the corresponding directory to matches the new
|
||||||
name if it does not exists.
|
name if it does not exists.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# explicit foreign key in order to avoid related name clashes
|
# explicit foreign key in order to avoid related name clashes
|
||||||
station = models.ForeignKey(Station, models.CASCADE,
|
station = models.ForeignKey(
|
||||||
verbose_name=_('station'))
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
|
)
|
||||||
active = models.BooleanField(
|
active = models.BooleanField(
|
||||||
_('active'),
|
_("active"),
|
||||||
default=True,
|
default=True,
|
||||||
help_text=_('if not checked this program is no longer active')
|
help_text=_("if not checked this program is no longer active"),
|
||||||
)
|
)
|
||||||
sync = models.BooleanField(
|
sync = models.BooleanField(
|
||||||
_('syncronise'),
|
_("syncronise"),
|
||||||
default=True,
|
default=True,
|
||||||
help_text=_('update later diffusions according to schedule changes')
|
help_text=_("update later diffusions according to schedule changes"),
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = ProgramQuerySet.as_manager()
|
objects = ProgramQuerySet.as_manager()
|
||||||
detail_url_name = 'program-detail'
|
detail_url_name = "program-detail"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
""" Return program's directory path """
|
"""Return program's directory path."""
|
||||||
return os.path.join(settings.AIRCOX_PROGRAMS_DIR,
|
return os.path.join(
|
||||||
self.slug.replace('-', '_'))
|
settings.AIRCOX_PROGRAMS_DIR, self.slug.replace("-", "_")
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def abspath(self):
|
def abspath(self):
|
||||||
""" Return absolute path to program's dir """
|
"""Return absolute path to program's dir."""
|
||||||
return os.path.join(conf.MEDIA_ROOT, self.path)
|
return os.path.join(conf.MEDIA_ROOT, self.path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -93,69 +102,88 @@ class Program(Page):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_from_path(cl, path):
|
def get_from_path(cl, path):
|
||||||
"""
|
"""Return a Program from the given path.
|
||||||
Return a Program from the given path. We assume the path has been
|
|
||||||
given in a previous time by this model (Program.path getter).
|
We assume the path has been given in a previous time by this
|
||||||
|
model (Program.path getter).
|
||||||
"""
|
"""
|
||||||
if path.startswith(settings.AIRCOX_PROGRAMS_DIR_ABS):
|
if path.startswith(settings.AIRCOX_PROGRAMS_DIR_ABS):
|
||||||
path = path.replace(settings.AIRCOX_PROGRAMS_DIR_ABS, '')
|
path = path.replace(settings.AIRCOX_PROGRAMS_DIR_ABS, "")
|
||||||
while path[0] == '/':
|
while path[0] == "/":
|
||||||
path = path[1:]
|
path = path[1:]
|
||||||
path = path[:path.index('/')]
|
path = path[: path.index("/")]
|
||||||
return cl.objects.filter(slug=path.replace('_','-')).first()
|
return cl.objects.filter(slug=path.replace("_", "-")).first()
|
||||||
|
|
||||||
def ensure_dir(self, subdir=None):
|
def ensure_dir(self, subdir=None):
|
||||||
|
"""Make sur the program's dir exists (and optionally subdir).
|
||||||
|
|
||||||
|
Return True if the dir (or subdir) exists.
|
||||||
"""
|
"""
|
||||||
Make sur the program's dir exists (and optionally subdir). Return True
|
path = os.path.join(self.abspath, subdir) if subdir else self.abspath
|
||||||
if the dir (or subdir) exists.
|
|
||||||
"""
|
|
||||||
path = os.path.join(self.abspath, subdir) if subdir else \
|
|
||||||
self.abspath
|
|
||||||
os.makedirs(path, exist_ok=True)
|
os.makedirs(path, exist_ok=True)
|
||||||
return os.path.exists(path)
|
return os.path.exists(path)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Program')
|
verbose_name = _("Program")
|
||||||
verbose_name_plural = _('Programs')
|
verbose_name_plural = _("Programs")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
def save(self, *kargs, **kwargs):
|
def save(self, *kargs, **kwargs):
|
||||||
from .sound import Sound
|
from .sound import Sound
|
||||||
|
|
||||||
super().save(*kargs, **kwargs)
|
super().save(*kargs, **kwargs)
|
||||||
|
|
||||||
# TODO: move in signals
|
# TODO: move in signals
|
||||||
path_ = getattr(self, '__initial_path', None)
|
path_ = getattr(self, "__initial_path", None)
|
||||||
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
||||||
if path_ is not None and path_ != self.path and \
|
if (
|
||||||
os.path.exists(abspath) and not os.path.exists(self.abspath):
|
path_ is not None
|
||||||
logger.info('program #%s\'s dir changed to %s - update it.',
|
and path_ != self.path
|
||||||
self.id, self.title)
|
and os.path.exists(abspath)
|
||||||
|
and not os.path.exists(self.abspath)
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
"program #%s's dir changed to %s - update it.",
|
||||||
|
self.id,
|
||||||
|
self.title,
|
||||||
|
)
|
||||||
|
|
||||||
shutil.move(abspath, self.abspath)
|
shutil.move(abspath, self.abspath)
|
||||||
Sound.objects.filter(path__startswith=path_) \
|
Sound.objects.filter(path__startswith=path_).update(
|
||||||
.update(file=Concat('file', Substr(F('file'), len(path_))))
|
file=Concat("file", Substr(F("file"), len(path_)))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProgramChildQuerySet(PageQuerySet):
|
class ProgramChildQuerySet(PageQuerySet):
|
||||||
def station(self, station=None, id=None):
|
def station(self, station=None, id=None):
|
||||||
return self.filter(parent__program__station=station) if id is None else \
|
return (
|
||||||
self.filter(parent__program__station__id=id)
|
self.filter(parent__program__station=station)
|
||||||
|
if id is None
|
||||||
|
else self.filter(parent__program__station__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def program(self, program=None, id=None):
|
def program(self, program=None, id=None):
|
||||||
return self.parent(program, id)
|
return self.parent(program, id)
|
||||||
|
|
||||||
|
|
||||||
class BaseRerunQuerySet(models.QuerySet):
|
class BaseRerunQuerySet(models.QuerySet):
|
||||||
""" Queryset for BaseRerun (sub)classes. """
|
"""Queryset for BaseRerun (sub)classes."""
|
||||||
|
|
||||||
def station(self, station=None, id=None):
|
def station(self, station=None, id=None):
|
||||||
return self.filter(program__station=station) if id is None else \
|
return (
|
||||||
self.filter(program__station__id=id)
|
self.filter(program__station=station)
|
||||||
|
if id is None
|
||||||
|
else self.filter(program__station__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def program(self, program=None, id=None):
|
def program(self, program=None, id=None):
|
||||||
return self.filter(program=program) if id is None else \
|
return (
|
||||||
self.filter(program__id=id)
|
self.filter(program=program)
|
||||||
|
if id is None
|
||||||
|
else self.filter(program__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def rerun(self):
|
def rerun(self):
|
||||||
return self.filter(initial__isnull=False)
|
return self.filter(initial__isnull=False)
|
||||||
|
@ -165,19 +193,27 @@ class BaseRerunQuerySet(models.QuerySet):
|
||||||
|
|
||||||
|
|
||||||
class BaseRerun(models.Model):
|
class BaseRerun(models.Model):
|
||||||
|
"""Abstract model offering rerun facilities.
|
||||||
|
|
||||||
|
Assume `start` is a datetime field or attribute implemented by
|
||||||
|
subclass.
|
||||||
"""
|
"""
|
||||||
Abstract model offering rerun facilities. Assume `start` is a
|
|
||||||
datetime field or attribute implemented by subclass.
|
|
||||||
"""
|
|
||||||
program = models.ForeignKey(
|
program = models.ForeignKey(
|
||||||
Program, models.CASCADE, db_index=True,
|
Program,
|
||||||
verbose_name=_('related program'),
|
models.CASCADE,
|
||||||
|
db_index=True,
|
||||||
|
verbose_name=_("related program"),
|
||||||
)
|
)
|
||||||
initial = models.ForeignKey(
|
initial = models.ForeignKey(
|
||||||
'self', models.SET_NULL, related_name='rerun_set',
|
"self",
|
||||||
verbose_name=_('rerun of'),
|
models.SET_NULL,
|
||||||
limit_choices_to={'initial__isnull': True},
|
related_name="rerun_set",
|
||||||
blank=True, null=True, db_index=True,
|
verbose_name=_("rerun of"),
|
||||||
|
limit_choices_to={"initial__isnull": True},
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = BaseRerunQuerySet.as_manager()
|
objects = BaseRerunQuerySet.as_manager()
|
||||||
|
@ -212,25 +248,27 @@ class BaseRerun(models.Model):
|
||||||
return self.initial is not None
|
return self.initial is not None
|
||||||
|
|
||||||
def get_initial(self):
|
def get_initial(self):
|
||||||
""" Return the initial schedule (self or initial) """
|
"""Return the initial schedule (self or initial)"""
|
||||||
return self if self.initial is None else self.initial.get_initial()
|
return self if self.initial is None else self.initial.get_initial()
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
super().clean()
|
super().clean()
|
||||||
if self.initial is not None and self.initial.start >= self.start:
|
if self.initial is not None and self.initial.start >= self.start:
|
||||||
raise ValidationError({
|
raise ValidationError(
|
||||||
'initial': _('rerun must happen after original')
|
{"initial": _("rerun must happen after original")}
|
||||||
})
|
)
|
||||||
|
|
||||||
|
|
||||||
# ? BIG FIXME: self.date is still used as datetime
|
# ? BIG FIXME: self.date is still used as datetime
|
||||||
class Schedule(BaseRerun):
|
class Schedule(BaseRerun):
|
||||||
|
"""A Schedule defines time slots of programs' diffusions.
|
||||||
|
|
||||||
|
It can be an initial run or a rerun (in such case it is linked to
|
||||||
|
the related schedule).
|
||||||
"""
|
"""
|
||||||
A Schedule defines time slots of programs' diffusions. It can be an initial
|
|
||||||
run or a rerun (in such case it is linked to the related schedule).
|
# Frequency for schedules. Basically, it is a mask of bits where each bit
|
||||||
"""
|
# is a week. Bits > rank 5 are used for special schedules.
|
||||||
# Frequency for schedules. Basically, it is a mask of bits where each bit is
|
|
||||||
# a week. Bits > rank 5 are used for special schedules.
|
|
||||||
# Important: the first week is always the first week where the weekday of
|
# Important: the first week is always the first week where the weekday of
|
||||||
# the schedule is present.
|
# the schedule is present.
|
||||||
# For ponctual programs, there is no need for a schedule, only a diffusion
|
# For ponctual programs, there is no need for a schedule, only a diffusion
|
||||||
|
@ -247,45 +285,55 @@ class Schedule(BaseRerun):
|
||||||
one_on_two = 0b100000
|
one_on_two = 0b100000
|
||||||
|
|
||||||
date = models.DateField(
|
date = models.DateField(
|
||||||
_('date'), help_text=_('date of the first diffusion'),
|
_("date"),
|
||||||
|
help_text=_("date of the first diffusion"),
|
||||||
)
|
)
|
||||||
time = models.TimeField(
|
time = models.TimeField(
|
||||||
_('time'), help_text=_('start time'),
|
_("time"),
|
||||||
|
help_text=_("start time"),
|
||||||
)
|
)
|
||||||
timezone = models.CharField(
|
timezone = models.CharField(
|
||||||
_('timezone'),
|
_("timezone"),
|
||||||
default=tz.get_current_timezone, max_length=100,
|
default=tz.get_current_timezone,
|
||||||
|
max_length=100,
|
||||||
choices=[(x, x) for x in pytz.all_timezones],
|
choices=[(x, x) for x in pytz.all_timezones],
|
||||||
help_text=_('timezone used for the date')
|
help_text=_("timezone used for the date"),
|
||||||
)
|
)
|
||||||
duration = models.TimeField(
|
duration = models.TimeField(
|
||||||
_('duration'),
|
_("duration"),
|
||||||
help_text=_('regular duration'),
|
help_text=_("regular duration"),
|
||||||
)
|
)
|
||||||
frequency = models.SmallIntegerField(
|
frequency = models.SmallIntegerField(
|
||||||
_('frequency'),
|
_("frequency"),
|
||||||
choices=[(int(y), {
|
choices=[
|
||||||
'ponctual': _('ponctual'),
|
(
|
||||||
'first': _('1st {day} of the month'),
|
int(y),
|
||||||
'second': _('2nd {day} of the month'),
|
{
|
||||||
'third': _('3rd {day} of the month'),
|
"ponctual": _("ponctual"),
|
||||||
'fourth': _('4th {day} of the month'),
|
"first": _("1st {day} of the month"),
|
||||||
'last': _('last {day} of the month'),
|
"second": _("2nd {day} of the month"),
|
||||||
'first_and_third': _('1st and 3rd {day} of the month'),
|
"third": _("3rd {day} of the month"),
|
||||||
'second_and_fourth': _('2nd and 4th {day} of the month'),
|
"fourth": _("4th {day} of the month"),
|
||||||
'every': _('{day}'),
|
"last": _("last {day} of the month"),
|
||||||
'one_on_two': _('one {day} on two'),
|
"first_and_third": _("1st and 3rd {day} of the month"),
|
||||||
}[x]) for x, y in Frequency.__members__.items()],
|
"second_and_fourth": _("2nd and 4th {day} of the month"),
|
||||||
|
"every": _("{day}"),
|
||||||
|
"one_on_two": _("one {day} on two"),
|
||||||
|
}[x],
|
||||||
|
)
|
||||||
|
for x, y in Frequency.__members__.items()
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Schedule')
|
verbose_name = _("Schedule")
|
||||||
verbose_name_plural = _('Schedules')
|
verbose_name_plural = _("Schedules")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '{} - {}, {}'.format(
|
return "{} - {}, {}".format(
|
||||||
self.program.title, self.get_frequency_verbose(),
|
self.program.title,
|
||||||
self.time.strftime('%H:%M')
|
self.get_frequency_verbose(),
|
||||||
|
self.time.strftime("%H:%M"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_rerun(self, *args, **kwargs):
|
def save_rerun(self, *args, **kwargs):
|
||||||
|
@ -295,31 +343,35 @@ class Schedule(BaseRerun):
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def tz(self):
|
def tz(self):
|
||||||
""" Pytz timezone of the schedule. """
|
"""Pytz timezone of the schedule."""
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
return pytz.timezone(self.timezone)
|
return pytz.timezone(self.timezone)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def start(self):
|
def start(self):
|
||||||
""" Datetime of the start (timezone unaware) """
|
"""Datetime of the start (timezone unaware)"""
|
||||||
return tz.datetime.combine(self.date, self.time)
|
return tz.datetime.combine(self.date, self.time)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def end(self):
|
def end(self):
|
||||||
""" Datetime of the end """
|
"""Datetime of the end."""
|
||||||
return self.start + utils.to_timedelta(self.duration)
|
return self.start + utils.to_timedelta(self.duration)
|
||||||
|
|
||||||
def get_frequency_verbose(self):
|
def get_frequency_verbose(self):
|
||||||
""" Return frequency formated for display """
|
"""Return frequency formated for display."""
|
||||||
from django.template.defaultfilters import date
|
from django.template.defaultfilters import date
|
||||||
return self.get_frequency_display().format(
|
|
||||||
day=date(self.date, 'l')
|
return (
|
||||||
).capitalize()
|
self.get_frequency_display()
|
||||||
|
.format(day=date(self.date, "l"))
|
||||||
|
.capitalize()
|
||||||
|
)
|
||||||
|
|
||||||
# initial cached data
|
# initial cached data
|
||||||
__initial = None
|
__initial = None
|
||||||
|
|
||||||
def changed(self, fields=['date', 'duration', 'frequency', 'timezone']):
|
def changed(self, fields=["date", "duration", "frequency", "timezone"]):
|
||||||
initial = self._Schedule__initial
|
initial = self._Schedule__initial
|
||||||
|
|
||||||
if not initial:
|
if not initial:
|
||||||
|
@ -334,15 +386,13 @@ class Schedule(BaseRerun):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def normalize(self, date):
|
def normalize(self, date):
|
||||||
"""
|
"""Return a datetime set to schedule's time for the provided date,
|
||||||
Return a datetime set to schedule's time for the provided date,
|
handling timezone (based on schedule's timezone)."""
|
||||||
handling timezone (based on schedule's timezone).
|
|
||||||
"""
|
|
||||||
date = tz.datetime.combine(date, self.time)
|
date = tz.datetime.combine(date, self.time)
|
||||||
return self.tz.normalize(self.tz.localize(date))
|
return self.tz.normalize(self.tz.localize(date))
|
||||||
|
|
||||||
def dates_of_month(self, date):
|
def dates_of_month(self, date):
|
||||||
""" Return normalized diffusion dates of provided date's month. """
|
"""Return normalized diffusion dates of provided date's month."""
|
||||||
if self.frequency == Schedule.Frequency.ponctual:
|
if self.frequency == Schedule.Frequency.ponctual:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -352,7 +402,8 @@ class Schedule(BaseRerun):
|
||||||
# last of the month
|
# last of the month
|
||||||
if freq == Schedule.Frequency.last:
|
if freq == Schedule.Frequency.last:
|
||||||
date = date.replace(
|
date = date.replace(
|
||||||
day=calendar.monthrange(date.year, date.month)[1])
|
day=calendar.monthrange(date.year, date.month)[1]
|
||||||
|
)
|
||||||
date_wday = date.weekday()
|
date_wday = date.weekday()
|
||||||
|
|
||||||
# end of month before the wanted weekday: move one week back
|
# end of month before the wanted weekday: move one week back
|
||||||
|
@ -361,56 +412,72 @@ class Schedule(BaseRerun):
|
||||||
date += tz.timedelta(days=sched_wday - date_wday)
|
date += tz.timedelta(days=sched_wday - date_wday)
|
||||||
return [self.normalize(date)]
|
return [self.normalize(date)]
|
||||||
|
|
||||||
# move to the first day of the month that matches the schedule's weekday
|
# move to the first day of the month that matches the schedule's
|
||||||
# check on SO#3284452 for the formula
|
# weekday. Check on SO#3284452 for the formula
|
||||||
date_wday, month = date.weekday(), date.month
|
date_wday, month = date.weekday(), date.month
|
||||||
date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) -
|
date += tz.timedelta(
|
||||||
date_wday + sched_wday)
|
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
|
||||||
|
)
|
||||||
|
|
||||||
if freq == Schedule.Frequency.one_on_two:
|
if freq == Schedule.Frequency.one_on_two:
|
||||||
# - adjust date with modulo 14 (= 2 weeks in days)
|
# - adjust date with modulo 14 (= 2 weeks in days)
|
||||||
# - there are max 3 "weeks on two" per month
|
# - there are max 3 "weeks on two" per month
|
||||||
if (date - self.date).days % 14:
|
if (date - self.date).days % 14:
|
||||||
date += tz.timedelta(days=7)
|
date += tz.timedelta(days=7)
|
||||||
dates = (date + tz.timedelta(days=14*i) for i in range(0, 3))
|
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
|
||||||
else:
|
else:
|
||||||
dates = (date + tz.timedelta(days=7*week) for week in range(0, 5)
|
dates = (
|
||||||
if freq & (0b1 << week))
|
date + tz.timedelta(days=7 * week)
|
||||||
|
for week in range(0, 5)
|
||||||
|
if freq & (0b1 << week)
|
||||||
|
)
|
||||||
|
|
||||||
return [self.normalize(date) for date in dates if date.month == month]
|
return [self.normalize(date) for date in dates if date.month == month]
|
||||||
|
|
||||||
|
|
||||||
def _exclude_existing_date(self, dates):
|
def _exclude_existing_date(self, dates):
|
||||||
from .episode import Diffusion
|
from .episode import Diffusion
|
||||||
saved = set(Diffusion.objects.filter(start__in=dates)
|
|
||||||
.values_list('start', flat=True))
|
saved = set(
|
||||||
|
Diffusion.objects.filter(start__in=dates).values_list(
|
||||||
|
"start", flat=True
|
||||||
|
)
|
||||||
|
)
|
||||||
return [date for date in dates if date not in saved]
|
return [date for date in dates if date not in saved]
|
||||||
|
|
||||||
|
|
||||||
def diffusions_of_month(self, date):
|
def diffusions_of_month(self, date):
|
||||||
"""
|
"""Get episodes and diffusions for month of provided date, including
|
||||||
Get episodes and diffusions for month of provided date, including
|
|
||||||
reruns.
|
reruns.
|
||||||
|
|
||||||
:returns: tuple([Episode], [Diffusion])
|
:returns: tuple([Episode], [Diffusion])
|
||||||
"""
|
"""
|
||||||
from .episode import Diffusion, Episode
|
from .episode import Diffusion, Episode
|
||||||
if self.initial is not None or \
|
|
||||||
self.frequency == Schedule.Frequency.ponctual:
|
if (
|
||||||
|
self.initial is not None
|
||||||
|
or self.frequency == Schedule.Frequency.ponctual
|
||||||
|
):
|
||||||
return [], []
|
return [], []
|
||||||
|
|
||||||
# dates for self and reruns as (date, initial)
|
# dates for self and reruns as (date, initial)
|
||||||
reruns = [(rerun, rerun.date - self.date)
|
reruns = [
|
||||||
for rerun in self.rerun_set.all()]
|
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
|
||||||
|
]
|
||||||
|
|
||||||
dates = OrderedDict((date, None) for date in self.dates_of_month(date))
|
dates = OrderedDict((date, None) for date in self.dates_of_month(date))
|
||||||
dates.update([(rerun.normalize(date.date() + delta), date)
|
dates.update(
|
||||||
for date in dates.keys() for rerun, delta in reruns])
|
[
|
||||||
|
(rerun.normalize(date.date() + delta), date)
|
||||||
|
for date in dates.keys()
|
||||||
|
for rerun, delta in reruns
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# remove dates corresponding to existing diffusions
|
# remove dates corresponding to existing diffusions
|
||||||
saved = set(Diffusion.objects.filter(start__in=dates.keys(),
|
saved = set(
|
||||||
program=self.program,
|
Diffusion.objects.filter(
|
||||||
schedule=self)
|
start__in=dates.keys(), program=self.program, schedule=self
|
||||||
.values_list('start', flat=True))
|
).values_list("start", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
# make diffs
|
# make diffs
|
||||||
duration = utils.to_timedelta(self.duration)
|
duration = utils.to_timedelta(self.duration)
|
||||||
|
@ -430,8 +497,12 @@ class Schedule(BaseRerun):
|
||||||
initial = diffusions[initial]
|
initial = diffusions[initial]
|
||||||
|
|
||||||
diffusions[date] = Diffusion(
|
diffusions[date] = Diffusion(
|
||||||
episode=episode, schedule=self, type=Diffusion.TYPE_ON_AIR,
|
episode=episode,
|
||||||
initial=initial, start=date, end=date+duration
|
schedule=self,
|
||||||
|
type=Diffusion.TYPE_ON_AIR,
|
||||||
|
initial=initial,
|
||||||
|
start=date,
|
||||||
|
end=date + duration,
|
||||||
)
|
)
|
||||||
return episodes.values(), diffusions.values()
|
return episodes.values(), diffusions.values()
|
||||||
|
|
||||||
|
@ -440,36 +511,38 @@ class Schedule(BaseRerun):
|
||||||
|
|
||||||
# TODO/FIXME: use validators?
|
# TODO/FIXME: use validators?
|
||||||
if self.initial is not None and self.date > self.date:
|
if self.initial is not None and self.date > self.date:
|
||||||
raise ValueError('initial must be later')
|
raise ValueError("initial must be later")
|
||||||
|
|
||||||
|
|
||||||
class Stream(models.Model):
|
class Stream(models.Model):
|
||||||
"""
|
"""When there are no program scheduled, it is possible to play sounds in
|
||||||
When there are no program scheduled, it is possible to play sounds
|
order to avoid blanks. A Stream is a Program that plays this role, and
|
||||||
in order to avoid blanks. A Stream is a Program that plays this role,
|
whose linked to a Stream.
|
||||||
and whose linked to a Stream.
|
|
||||||
|
|
||||||
All sounds that are marked as good and that are under the related
|
All sounds that are marked as good and that are under the related
|
||||||
program's archive dir are elligible for the sound's selection.
|
program's archive dir are elligible for the sound's selection.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
program = models.ForeignKey(
|
program = models.ForeignKey(
|
||||||
Program, models.CASCADE,
|
Program,
|
||||||
verbose_name=_('related program'),
|
models.CASCADE,
|
||||||
|
verbose_name=_("related program"),
|
||||||
)
|
)
|
||||||
delay = models.TimeField(
|
delay = models.TimeField(
|
||||||
_('delay'), blank=True, null=True,
|
_("delay"),
|
||||||
help_text=_('minimal delay between two sound plays')
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text=_("minimal delay between two sound plays"),
|
||||||
)
|
)
|
||||||
begin = models.TimeField(
|
begin = models.TimeField(
|
||||||
_('begin'), blank=True, null=True,
|
_("begin"),
|
||||||
help_text=_('used to define a time range this stream is '
|
blank=True,
|
||||||
'played')
|
null=True,
|
||||||
|
help_text=_("used to define a time range this stream is " "played"),
|
||||||
)
|
)
|
||||||
end = models.TimeField(
|
end = models.TimeField(
|
||||||
_('end'),
|
_("end"),
|
||||||
blank=True, null=True,
|
blank=True,
|
||||||
help_text=_('used to define a time range this stream is '
|
null=True,
|
||||||
'played')
|
help_text=_("used to define a time range this stream is " "played"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
import pytz
|
from django.contrib.auth.models import Group, Permission, User
|
||||||
|
|
||||||
from django.contrib.auth.models import User, Group, Permission
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import signals
|
from django.db.models import signals
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
@ -18,9 +16,7 @@ from . import Diffusion, Episode, Page, Program, Schedule
|
||||||
#
|
#
|
||||||
@receiver(signals.post_save, sender=User)
|
@receiver(signals.post_save, sender=User)
|
||||||
def user_default_groups(sender, instance, created, *args, **kwargs):
|
def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||||
"""
|
"""Set users to different default groups."""
|
||||||
Set users to different default groups
|
|
||||||
"""
|
|
||||||
if not created or instance.is_superuser:
|
if not created or instance.is_superuser:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -32,7 +28,8 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||||
if created and permissions:
|
if created and permissions:
|
||||||
for codename in permissions:
|
for codename in permissions:
|
||||||
permission = Permission.objects.filter(
|
permission = Permission.objects.filter(
|
||||||
codename=codename).first()
|
codename=codename
|
||||||
|
).first()
|
||||||
if permission:
|
if permission:
|
||||||
group.permissions.add(permission)
|
group.permissions.add(permission)
|
||||||
group.save()
|
group.save()
|
||||||
|
@ -42,43 +39,40 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||||
@receiver(signals.post_save, sender=Page)
|
@receiver(signals.post_save, sender=Page)
|
||||||
def page_post_save(sender, instance, created, *args, **kwargs):
|
def page_post_save(sender, instance, created, *args, **kwargs):
|
||||||
if not created and instance.cover:
|
if not created and instance.cover:
|
||||||
Page.objects.filter(parent=instance, cover__isnull=True) \
|
Page.objects.filter(parent=instance, cover__isnull=True).update(
|
||||||
.update(cover=instance.cover)
|
cover=instance.cover
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_save, sender=Program)
|
@receiver(signals.post_save, sender=Program)
|
||||||
def program_post_save(sender, instance, created, *args, **kwargs):
|
def program_post_save(sender, instance, created, *args, **kwargs):
|
||||||
"""
|
"""Clean-up later diffusions when a program becomes inactive."""
|
||||||
Clean-up later diffusions when a program becomes inactive
|
|
||||||
"""
|
|
||||||
if not instance.active:
|
if not instance.active:
|
||||||
Diffusion.object.program(instance).after(tz.now()).delete()
|
Diffusion.object.program(instance).after(tz.now()).delete()
|
||||||
Episode.object.parent(instance).filter(diffusion__isnull=True) \
|
Episode.object.parent(instance).filter(diffusion__isnull=True).delete()
|
||||||
.delete()
|
|
||||||
|
|
||||||
cover = getattr(instance, '__initial_cover', None)
|
cover = getattr(instance, "__initial_cover", None)
|
||||||
if cover is None and instance.cover is not None:
|
if cover is None and instance.cover is not None:
|
||||||
Episode.objects.parent(instance) \
|
Episode.objects.parent(instance).filter(cover__isnull=True).update(
|
||||||
.filter(cover__isnull=True) \
|
cover=instance.cover
|
||||||
.update(cover=instance.cover)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.pre_save, sender=Schedule)
|
@receiver(signals.pre_save, sender=Schedule)
|
||||||
def schedule_pre_save(sender, instance, *args, **kwargs):
|
def schedule_pre_save(sender, instance, *args, **kwargs):
|
||||||
if getattr(instance, 'pk') is not None:
|
if getattr(instance, "pk") is not None:
|
||||||
instance._initial = Schedule.objects.get(pk=instance.pk)
|
instance._initial = Schedule.objects.get(pk=instance.pk)
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_save, sender=Schedule)
|
@receiver(signals.post_save, sender=Schedule)
|
||||||
def schedule_post_save(sender, instance, created, *args, **kwargs):
|
def schedule_post_save(sender, instance, created, *args, **kwargs):
|
||||||
"""
|
"""Handles Schedule's time, duration and timezone changes and update
|
||||||
Handles Schedule's time, duration and timezone changes and update
|
corresponding diffusions accordingly."""
|
||||||
corresponding diffusions accordingly.
|
initial = getattr(instance, "_initial", None)
|
||||||
"""
|
if not initial or (
|
||||||
initial = getattr(instance, '_initial', None)
|
(instance.time, instance.duration, instance.timezone)
|
||||||
if not initial or ((instance.time, instance.duration, instance.timezone) ==
|
== (initial.time, initial.duration, initial.timezone)
|
||||||
(initial.time, initial.duration, initial.timezone)):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
today = tz.datetime.today()
|
today = tz.datetime.today()
|
||||||
|
@ -94,14 +88,15 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
||||||
|
|
||||||
@receiver(signals.pre_delete, sender=Schedule)
|
@receiver(signals.pre_delete, sender=Schedule)
|
||||||
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
||||||
""" Delete later corresponding diffusion to a changed schedule. """
|
"""Delete later corresponding diffusion to a changed schedule."""
|
||||||
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
||||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True,
|
Episode.objects.filter(
|
||||||
sound__isnull=True).delete()
|
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_delete, sender=Diffusion)
|
@receiver(signals.post_delete, sender=Diffusion)
|
||||||
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
||||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True,
|
Episode.objects.filter(
|
||||||
sound__isnull=True).delete()
|
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
|
|
@ -6,18 +6,17 @@ from django.db import models
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from taggit.managers import TaggableManager
|
from taggit.managers import TaggableManager
|
||||||
|
|
||||||
from aircox import settings
|
from aircox import settings
|
||||||
from .program import Program
|
|
||||||
from .episode import Episode
|
from .episode import Episode
|
||||||
|
from .program import Program
|
||||||
|
|
||||||
|
logger = logging.getLogger("aircox")
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('aircox')
|
__all__ = ("Sound", "SoundQuerySet", "Track")
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('Sound', 'SoundQuerySet', 'Track')
|
|
||||||
|
|
||||||
|
|
||||||
class SoundQuerySet(models.QuerySet):
|
class SoundQuerySet(models.QuerySet):
|
||||||
|
@ -37,122 +36,150 @@ class SoundQuerySet(models.QuerySet):
|
||||||
return self.exclude(type=Sound.TYPE_REMOVED)
|
return self.exclude(type=Sound.TYPE_REMOVED)
|
||||||
|
|
||||||
def public(self):
|
def public(self):
|
||||||
""" Return sounds available as podcasts """
|
"""Return sounds available as podcasts."""
|
||||||
return self.filter(is_public=True)
|
return self.filter(is_public=True)
|
||||||
|
|
||||||
def downloadable(self):
|
def downloadable(self):
|
||||||
""" Return sounds available as podcasts """
|
"""Return sounds available as podcasts."""
|
||||||
return self.filter(is_downloadable=True)
|
return self.filter(is_downloadable=True)
|
||||||
|
|
||||||
def archive(self):
|
def archive(self):
|
||||||
""" Return sounds that are archives """
|
"""Return sounds that are archives."""
|
||||||
return self.filter(type=Sound.TYPE_ARCHIVE)
|
return self.filter(type=Sound.TYPE_ARCHIVE)
|
||||||
|
|
||||||
def path(self, paths):
|
def path(self, paths):
|
||||||
if isinstance(paths, str):
|
if isinstance(paths, str):
|
||||||
return self.filter(file=paths.replace(conf.MEDIA_ROOT + '/', ''))
|
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
|
||||||
return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + '/', '')
|
return self.filter(
|
||||||
for p in paths))
|
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
|
||||||
|
)
|
||||||
|
|
||||||
def playlist(self, archive=True, order_by=True):
|
def playlist(self, archive=True, order_by=True):
|
||||||
"""
|
"""Return files absolute paths as a flat list (exclude sound without
|
||||||
Return files absolute paths as a flat list (exclude sound without path).
|
path).
|
||||||
|
|
||||||
If `order_by` is True, order by path.
|
If `order_by` is True, order by path.
|
||||||
"""
|
"""
|
||||||
if archive:
|
if archive:
|
||||||
self = self.archive()
|
self = self.archive()
|
||||||
if order_by:
|
if order_by:
|
||||||
self = self.order_by('file')
|
self = self.order_by("file")
|
||||||
return [os.path.join(conf.MEDIA_ROOT, file) for file in self.filter(file__isnull=False) \
|
return [
|
||||||
.values_list('file', flat=True)]
|
os.path.join(conf.MEDIA_ROOT, file)
|
||||||
|
for file in self.filter(file__isnull=False).values_list(
|
||||||
|
"file", flat=True
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
def search(self, query):
|
def search(self, query):
|
||||||
return self.filter(
|
return self.filter(
|
||||||
Q(name__icontains=query) | Q(file__icontains=query) |
|
Q(name__icontains=query)
|
||||||
Q(program__title__icontains=query) |
|
| Q(file__icontains=query)
|
||||||
Q(episode__title__icontains=query)
|
| Q(program__title__icontains=query)
|
||||||
|
| Q(episode__title__icontains=query)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# - provide a default name based on program and episode
|
# - provide a default name based on program and episode
|
||||||
class Sound(models.Model):
|
class Sound(models.Model):
|
||||||
"""
|
"""A Sound is the representation of a sound file that can be either an
|
||||||
A Sound is the representation of a sound file that can be either an excerpt
|
excerpt or a complete archive of the related diffusion."""
|
||||||
or a complete archive of the related diffusion.
|
|
||||||
"""
|
|
||||||
TYPE_OTHER = 0x00
|
TYPE_OTHER = 0x00
|
||||||
TYPE_ARCHIVE = 0x01
|
TYPE_ARCHIVE = 0x01
|
||||||
TYPE_EXCERPT = 0x02
|
TYPE_EXCERPT = 0x02
|
||||||
TYPE_REMOVED = 0x03
|
TYPE_REMOVED = 0x03
|
||||||
TYPE_CHOICES = (
|
TYPE_CHOICES = (
|
||||||
(TYPE_OTHER, _('other')), (TYPE_ARCHIVE, _('archive')),
|
(TYPE_OTHER, _("other")),
|
||||||
(TYPE_EXCERPT, _('excerpt')), (TYPE_REMOVED, _('removed'))
|
(TYPE_ARCHIVE, _("archive")),
|
||||||
|
(TYPE_EXCERPT, _("excerpt")),
|
||||||
|
(TYPE_REMOVED, _("removed")),
|
||||||
)
|
)
|
||||||
|
|
||||||
name = models.CharField(_('name'), max_length=64)
|
name = models.CharField(_("name"), max_length=64)
|
||||||
program = models.ForeignKey(
|
program = models.ForeignKey(
|
||||||
Program, models.CASCADE, blank=True, # NOT NULL
|
Program,
|
||||||
verbose_name=_('program'),
|
models.CASCADE,
|
||||||
help_text=_('program related to it'),
|
blank=True, # NOT NULL
|
||||||
|
verbose_name=_("program"),
|
||||||
|
help_text=_("program related to it"),
|
||||||
db_index=True,
|
db_index=True,
|
||||||
)
|
)
|
||||||
episode = models.ForeignKey(
|
episode = models.ForeignKey(
|
||||||
Episode, models.SET_NULL, blank=True, null=True,
|
Episode,
|
||||||
verbose_name=_('episode'),
|
models.SET_NULL,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
verbose_name=_("episode"),
|
||||||
db_index=True,
|
db_index=True,
|
||||||
)
|
)
|
||||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||||
position = models.PositiveSmallIntegerField(
|
position = models.PositiveSmallIntegerField(
|
||||||
_('order'), default=0, help_text=_('position in the playlist'),
|
_("order"),
|
||||||
|
default=0,
|
||||||
|
help_text=_("position in the playlist"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _upload_to(self, filename):
|
def _upload_to(self, filename):
|
||||||
subdir = settings.AIRCOX_SOUND_ARCHIVES_SUBDIR \
|
subdir = (
|
||||||
if self.type == self.TYPE_ARCHIVE else \
|
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR
|
||||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR
|
if self.type == self.TYPE_ARCHIVE
|
||||||
|
else settings.AIRCOX_SOUND_EXCERPTS_SUBDIR
|
||||||
|
)
|
||||||
return os.path.join(self.program.path, subdir, filename)
|
return os.path.join(self.program.path, subdir, filename)
|
||||||
|
|
||||||
file = models.FileField(
|
file = models.FileField(
|
||||||
_('file'), upload_to=_upload_to, max_length=256,
|
_("file"),
|
||||||
db_index=True, unique=True,
|
upload_to=_upload_to,
|
||||||
|
max_length=256,
|
||||||
|
db_index=True,
|
||||||
|
unique=True,
|
||||||
)
|
)
|
||||||
duration = models.TimeField(
|
duration = models.TimeField(
|
||||||
_('duration'),
|
_("duration"),
|
||||||
blank=True, null=True,
|
blank=True,
|
||||||
help_text=_('duration of the sound'),
|
null=True,
|
||||||
|
help_text=_("duration of the sound"),
|
||||||
)
|
)
|
||||||
mtime = models.DateTimeField(
|
mtime = models.DateTimeField(
|
||||||
_('modification time'),
|
_("modification time"),
|
||||||
blank=True, null=True,
|
blank=True,
|
||||||
help_text=_('last modification date and time'),
|
null=True,
|
||||||
|
help_text=_("last modification date and time"),
|
||||||
)
|
)
|
||||||
is_good_quality = models.BooleanField(
|
is_good_quality = models.BooleanField(
|
||||||
_('good quality'), help_text=_('sound meets quality requirements'),
|
_("good quality"),
|
||||||
blank=True, null=True
|
help_text=_("sound meets quality requirements"),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
)
|
)
|
||||||
is_public = models.BooleanField(
|
is_public = models.BooleanField(
|
||||||
_('public'), help_text=_('whether it is publicly available as podcast'),
|
_("public"),
|
||||||
|
help_text=_("whether it is publicly available as podcast"),
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
is_downloadable = models.BooleanField(
|
is_downloadable = models.BooleanField(
|
||||||
_('downloadable'),
|
_("downloadable"),
|
||||||
help_text=_('whether it can be publicly downloaded by visitors (sound must be public)'),
|
help_text=_(
|
||||||
|
"whether it can be publicly downloaded by visitors (sound must be "
|
||||||
|
"public)"
|
||||||
|
),
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = SoundQuerySet.as_manager()
|
objects = SoundQuerySet.as_manager()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Sound')
|
verbose_name = _("Sound")
|
||||||
verbose_name_plural = _('Sounds')
|
verbose_name_plural = _("Sounds")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self):
|
def url(self):
|
||||||
return self.file and self.file.url
|
return self.file and self.file.url
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '/'.join(self.file.path.split('/')[-3:])
|
return "/".join(self.file.path.split("/")[-3:])
|
||||||
|
|
||||||
def save(self, check=True, *args, **kwargs):
|
def save(self, check=True, *args, **kwargs):
|
||||||
if self.episode is not None and self.program is None:
|
if self.episode is not None and self.program is None:
|
||||||
|
@ -166,29 +193,28 @@ class Sound(models.Model):
|
||||||
|
|
||||||
# TODO: rename get_file_mtime(self)
|
# TODO: rename get_file_mtime(self)
|
||||||
def get_mtime(self):
|
def get_mtime(self):
|
||||||
"""
|
"""Get the last modification date from file."""
|
||||||
Get the last modification date from file
|
|
||||||
"""
|
|
||||||
mtime = os.stat(self.file.path).st_mtime
|
mtime = os.stat(self.file.path).st_mtime
|
||||||
mtime = tz.datetime.fromtimestamp(mtime)
|
mtime = tz.datetime.fromtimestamp(mtime)
|
||||||
mtime = mtime.replace(microsecond=0)
|
mtime = mtime.replace(microsecond=0)
|
||||||
return tz.make_aware(mtime, tz.get_current_timezone())
|
return tz.make_aware(mtime, tz.get_current_timezone())
|
||||||
|
|
||||||
def file_exists(self):
|
def file_exists(self):
|
||||||
""" Return true if the file still exists. """
|
"""Return true if the file still exists."""
|
||||||
|
|
||||||
return os.path.exists(self.file.path)
|
return os.path.exists(self.file.path)
|
||||||
|
|
||||||
# TODO: rename to sync_fs()
|
# TODO: rename to sync_fs()
|
||||||
def check_on_file(self):
|
def check_on_file(self):
|
||||||
"""
|
"""Check sound file info again'st self, and update informations if
|
||||||
Check sound file info again'st self, and update informations if
|
needed (do not save).
|
||||||
needed (do not save). Return True if there was changes.
|
|
||||||
|
Return True if there was changes.
|
||||||
"""
|
"""
|
||||||
if not self.file_exists():
|
if not self.file_exists():
|
||||||
if self.type == self.TYPE_REMOVED:
|
if self.type == self.TYPE_REMOVED:
|
||||||
return
|
return
|
||||||
logger.debug('sound %s: has been removed', self.file.name)
|
logger.debug("sound %s: has been removed", self.file.name)
|
||||||
self.type = self.TYPE_REMOVED
|
self.type = self.TYPE_REMOVED
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -197,9 +223,11 @@ class Sound(models.Model):
|
||||||
|
|
||||||
if self.type == self.TYPE_REMOVED and self.program:
|
if self.type == self.TYPE_REMOVED and self.program:
|
||||||
changed = True
|
changed = True
|
||||||
self.type = self.TYPE_ARCHIVE \
|
self.type = (
|
||||||
if self.file.name.startswith(self.program.archives_path) else \
|
self.TYPE_ARCHIVE
|
||||||
self.TYPE_EXCERPT
|
if self.file.name.startswith(self.program.archives_path)
|
||||||
|
else self.TYPE_EXCERPT
|
||||||
|
)
|
||||||
|
|
||||||
# check mtime -> reset quality if changed (assume file changed)
|
# check mtime -> reset quality if changed (assume file changed)
|
||||||
mtime = self.get_mtime()
|
mtime = self.get_mtime()
|
||||||
|
@ -207,8 +235,10 @@ class Sound(models.Model):
|
||||||
if self.mtime != mtime:
|
if self.mtime != mtime:
|
||||||
self.mtime = mtime
|
self.mtime = mtime
|
||||||
self.is_good_quality = None
|
self.is_good_quality = None
|
||||||
logger.debug('sound %s: m_time has changed. Reset quality info',
|
logger.debug(
|
||||||
self.file.name)
|
"sound %s: m_time has changed. Reset quality info",
|
||||||
|
self.file.name,
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
@ -218,7 +248,7 @@ class Sound(models.Model):
|
||||||
# FIXME: later, remove date?
|
# FIXME: later, remove date?
|
||||||
name = os.path.basename(self.file.name)
|
name = os.path.basename(self.file.name)
|
||||||
name = os.path.splitext(name)[0]
|
name = os.path.splitext(name)[0]
|
||||||
self.name = name.replace('_', ' ').strip()
|
self.name = name.replace("_", " ").strip()
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
@ -226,53 +256,67 @@ class Sound(models.Model):
|
||||||
|
|
||||||
|
|
||||||
class Track(models.Model):
|
class Track(models.Model):
|
||||||
|
"""Track of a playlist of an object.
|
||||||
|
|
||||||
|
The position can either be expressed as the position in the playlist
|
||||||
|
or as the moment in seconds it started.
|
||||||
"""
|
"""
|
||||||
Track of a playlist of an object. The position can either be expressed
|
|
||||||
as the position in the playlist or as the moment in seconds it started.
|
|
||||||
"""
|
|
||||||
episode = models.ForeignKey(
|
episode = models.ForeignKey(
|
||||||
Episode, models.CASCADE, blank=True, null=True,
|
Episode,
|
||||||
verbose_name=_('episode'),
|
models.CASCADE,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
verbose_name=_("episode"),
|
||||||
)
|
)
|
||||||
sound = models.ForeignKey(
|
sound = models.ForeignKey(
|
||||||
Sound, models.CASCADE, blank=True, null=True,
|
Sound,
|
||||||
verbose_name=_('sound'),
|
models.CASCADE,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
verbose_name=_("sound"),
|
||||||
)
|
)
|
||||||
position = models.PositiveSmallIntegerField(
|
position = models.PositiveSmallIntegerField(
|
||||||
_('order'), default=0, help_text=_('position in the playlist'),
|
_("order"),
|
||||||
|
default=0,
|
||||||
|
help_text=_("position in the playlist"),
|
||||||
)
|
)
|
||||||
timestamp = models.PositiveSmallIntegerField(
|
timestamp = models.PositiveSmallIntegerField(
|
||||||
_('timestamp'),
|
_("timestamp"),
|
||||||
blank=True, null=True,
|
blank=True,
|
||||||
help_text=_('position (in seconds)')
|
null=True,
|
||||||
|
help_text=_("position (in seconds)"),
|
||||||
)
|
)
|
||||||
title = models.CharField(_('title'), max_length=128)
|
title = models.CharField(_("title"), max_length=128)
|
||||||
artist = models.CharField(_('artist'), max_length=128)
|
artist = models.CharField(_("artist"), max_length=128)
|
||||||
album = models.CharField(_('album'), max_length=128, null=True, blank=True)
|
album = models.CharField(_("album"), max_length=128, null=True, blank=True)
|
||||||
tags = TaggableManager(verbose_name=_('tags'), blank=True)
|
tags = TaggableManager(verbose_name=_("tags"), blank=True)
|
||||||
year = models.IntegerField(_('year'), blank=True, null=True)
|
year = models.IntegerField(_("year"), blank=True, null=True)
|
||||||
# FIXME: remove?
|
# FIXME: remove?
|
||||||
info = models.CharField(
|
info = models.CharField(
|
||||||
_('information'),
|
_("information"),
|
||||||
max_length=128,
|
max_length=128,
|
||||||
blank=True, null=True,
|
blank=True,
|
||||||
help_text=_('additional informations about this track, such as '
|
null=True,
|
||||||
'the version, if is it a remix, features, etc.'),
|
help_text=_(
|
||||||
|
"additional informations about this track, such as "
|
||||||
|
"the version, if is it a remix, features, etc."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Track')
|
verbose_name = _("Track")
|
||||||
verbose_name_plural = _('Tracks')
|
verbose_name_plural = _("Tracks")
|
||||||
ordering = ('position',)
|
ordering = ("position",)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '{self.artist} -- {self.title} -- {self.position}'.format(
|
return "{self.artist} -- {self.title} -- {self.position}".format(
|
||||||
self=self)
|
self=self
|
||||||
|
)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if (self.sound is None and self.episode is None) or \
|
if (self.sound is None and self.episode is None) or (
|
||||||
(self.sound is not None and self.episode is not None):
|
self.sound is not None and self.episode is not None
|
||||||
raise ValueError('sound XOR episode is required')
|
):
|
||||||
|
raise ValueError("sound XOR episode is required")
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,25 +1,20 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
from filer.fields.image import FilerImageField
|
from filer.fields.image import FilerImageField
|
||||||
|
|
||||||
from .. import settings
|
from .. import settings
|
||||||
|
|
||||||
|
__all__ = ("Station", "StationQuerySet", "Port")
|
||||||
__all__ = ('Station', 'StationQuerySet', 'Port')
|
|
||||||
|
|
||||||
|
|
||||||
class StationQuerySet(models.QuerySet):
|
class StationQuerySet(models.QuerySet):
|
||||||
def default(self, station=None):
|
def default(self, station=None):
|
||||||
"""
|
"""Return station model instance, using defaults or given one."""
|
||||||
Return station model instance, using defaults or
|
|
||||||
given one.
|
|
||||||
"""
|
|
||||||
if station is None:
|
if station is None:
|
||||||
return self.order_by('-default', 'pk').first()
|
return self.order_by("-default", "pk").first()
|
||||||
return self.filter(pk=station).first()
|
return self.filter(pk=station).first()
|
||||||
|
|
||||||
def active(self):
|
def active(self):
|
||||||
|
@ -27,66 +22,79 @@ class StationQuerySet(models.QuerySet):
|
||||||
|
|
||||||
|
|
||||||
class Station(models.Model):
|
class Station(models.Model):
|
||||||
"""
|
"""Represents a radio station, to which multiple programs are attached and
|
||||||
Represents a radio station, to which multiple programs are attached
|
that is used as the top object for everything.
|
||||||
and that is used as the top object for everything.
|
|
||||||
|
|
||||||
A Station holds controllers for the audio stream generation too.
|
A Station holds controllers for the audio stream generation too.
|
||||||
Theses are set up when needed (at the first access to these elements)
|
Theses are set up when needed (at the first access to these
|
||||||
then cached.
|
elements) then cached.
|
||||||
"""
|
"""
|
||||||
name = models.CharField(_('name'), max_length=64)
|
|
||||||
slug = models.SlugField(_('slug'), max_length=64, unique=True)
|
name = models.CharField(_("name"), max_length=64)
|
||||||
|
slug = models.SlugField(_("slug"), max_length=64, unique=True)
|
||||||
# FIXME: remove - should be decided only by Streamer controller + settings
|
# FIXME: remove - should be decided only by Streamer controller + settings
|
||||||
path = models.CharField(
|
path = models.CharField(
|
||||||
_('path'),
|
_("path"),
|
||||||
help_text=_('path to the working directory'),
|
help_text=_("path to the working directory"),
|
||||||
max_length=256,
|
max_length=256,
|
||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
default = models.BooleanField(
|
default = models.BooleanField(
|
||||||
_('default station'),
|
_("default station"),
|
||||||
default=False,
|
default=False,
|
||||||
help_text=_('use this station as the main one.')
|
help_text=_("use this station as the main one."),
|
||||||
)
|
)
|
||||||
active = models.BooleanField(
|
active = models.BooleanField(
|
||||||
_('active'),
|
_("active"),
|
||||||
default=True,
|
default=True,
|
||||||
help_text=_('whether this station is still active or not.')
|
help_text=_("whether this station is still active or not."),
|
||||||
)
|
)
|
||||||
logo = FilerImageField(
|
logo = FilerImageField(
|
||||||
on_delete=models.SET_NULL, null=True, blank=True,
|
on_delete=models.SET_NULL,
|
||||||
verbose_name=_('Logo'),
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
verbose_name=_("Logo"),
|
||||||
)
|
)
|
||||||
hosts = models.TextField(
|
hosts = models.TextField(
|
||||||
_("website's urls"), max_length=512, null=True, blank=True,
|
_("website's urls"),
|
||||||
help_text=_('specify one url per line')
|
max_length=512,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text=_("specify one url per line"),
|
||||||
)
|
)
|
||||||
audio_streams = models.TextField(
|
audio_streams = models.TextField(
|
||||||
_("audio streams"), max_length=2048, null=True, blank=True,
|
_("audio streams"),
|
||||||
help_text=_("Audio streams urls used by station's player. One url "
|
max_length=2048,
|
||||||
"a line.")
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text=_(
|
||||||
|
"Audio streams urls used by station's player. One url " "a line."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
default_cover = FilerImageField(
|
default_cover = FilerImageField(
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
verbose_name=_('Default pages\' cover'), null=True, blank=True,
|
verbose_name=_("Default pages' cover"),
|
||||||
related_name='+',
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="+",
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = StationQuerySet.as_manager()
|
objects = StationQuerySet.as_manager()
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def streams(self):
|
def streams(self):
|
||||||
""" Audio streams as list of urls. """
|
"""Audio streams as list of urls."""
|
||||||
return self.audio_streams.split('\n') if self.audio_streams else []
|
return self.audio_streams.split("\n") if self.audio_streams else []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def save(self, make_sources=True, *args, **kwargs):
|
def save(self, make_sources=True, *args, **kwargs):
|
||||||
if not self.path:
|
if not self.path:
|
||||||
self.path = os.path.join(settings.AIRCOX_CONTROLLERS_WORKING_DIR,
|
self.path = os.path.join(
|
||||||
self.slug.replace('-', '_'))
|
settings.AIRCOX_CONTROLLERS_WORKING_DIR,
|
||||||
|
self.slug.replace("-", "_"),
|
||||||
|
)
|
||||||
|
|
||||||
if self.default:
|
if self.default:
|
||||||
qs = Station.objects.filter(default=True)
|
qs = Station.objects.filter(default=True)
|
||||||
|
@ -99,22 +107,20 @@ class Station(models.Model):
|
||||||
|
|
||||||
class PortQuerySet(models.QuerySet):
|
class PortQuerySet(models.QuerySet):
|
||||||
def active(self, value=True):
|
def active(self, value=True):
|
||||||
""" Active ports """
|
"""Active ports."""
|
||||||
return self.filter(active=value)
|
return self.filter(active=value)
|
||||||
|
|
||||||
def output(self):
|
def output(self):
|
||||||
""" Filter in output ports """
|
"""Filter in output ports."""
|
||||||
return self.filter(direction=Port.DIRECTION_OUTPUT)
|
return self.filter(direction=Port.DIRECTION_OUTPUT)
|
||||||
|
|
||||||
def input(self):
|
def input(self):
|
||||||
""" Fitler in input ports """
|
"""Fitler in input ports."""
|
||||||
return self.filter(direction=Port.DIRECTION_INPUT)
|
return self.filter(direction=Port.DIRECTION_INPUT)
|
||||||
|
|
||||||
|
|
||||||
class Port(models.Model):
|
class Port(models.Model):
|
||||||
"""
|
"""Represent an audio input/output for the audio stream generation.
|
||||||
Represent an audio input/output for the audio stream
|
|
||||||
generation.
|
|
||||||
|
|
||||||
You might want to take a look to LiquidSoap's documentation
|
You might want to take a look to LiquidSoap's documentation
|
||||||
for the options available for each kind of input/output.
|
for the options available for each kind of input/output.
|
||||||
|
@ -122,10 +128,13 @@ class Port(models.Model):
|
||||||
Some port types may be not available depending on the
|
Some port types may be not available depending on the
|
||||||
direction of the port.
|
direction of the port.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DIRECTION_INPUT = 0x00
|
DIRECTION_INPUT = 0x00
|
||||||
DIRECTION_OUTPUT = 0x01
|
DIRECTION_OUTPUT = 0x01
|
||||||
DIRECTION_CHOICES = ((DIRECTION_INPUT, _('input')),
|
DIRECTION_CHOICES = (
|
||||||
(DIRECTION_OUTPUT, _('output')))
|
(DIRECTION_INPUT, _("input")),
|
||||||
|
(DIRECTION_OUTPUT, _("output")),
|
||||||
|
)
|
||||||
|
|
||||||
TYPE_JACK = 0x00
|
TYPE_JACK = 0x00
|
||||||
TYPE_ALSA = 0x01
|
TYPE_ALSA = 0x01
|
||||||
|
@ -135,27 +144,34 @@ class Port(models.Model):
|
||||||
TYPE_HTTPS = 0x05
|
TYPE_HTTPS = 0x05
|
||||||
TYPE_FILE = 0x06
|
TYPE_FILE = 0x06
|
||||||
TYPE_CHOICES = (
|
TYPE_CHOICES = (
|
||||||
(TYPE_JACK, 'jack'), (TYPE_ALSA, 'alsa'),
|
(TYPE_JACK, "jack"),
|
||||||
(TYPE_PULSEAUDIO, 'pulseaudio'), (TYPE_ICECAST, 'icecast'),
|
(TYPE_ALSA, "alsa"),
|
||||||
(TYPE_HTTP, 'http'), (TYPE_HTTPS, 'https'),
|
(TYPE_PULSEAUDIO, "pulseaudio"),
|
||||||
(TYPE_FILE, _('file'))
|
(TYPE_ICECAST, "icecast"),
|
||||||
|
(TYPE_HTTP, "http"),
|
||||||
|
(TYPE_HTTPS, "https"),
|
||||||
|
(TYPE_FILE, _("file")),
|
||||||
)
|
)
|
||||||
|
|
||||||
station = models.ForeignKey(
|
station = models.ForeignKey(
|
||||||
Station, models.CASCADE, verbose_name=_('station'))
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
|
)
|
||||||
direction = models.SmallIntegerField(
|
direction = models.SmallIntegerField(
|
||||||
_('direction'), choices=DIRECTION_CHOICES)
|
_("direction"), choices=DIRECTION_CHOICES
|
||||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
)
|
||||||
|
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||||
active = models.BooleanField(
|
active = models.BooleanField(
|
||||||
_('active'), default=True,
|
_("active"), default=True, help_text=_("this port is active")
|
||||||
help_text=_('this port is active')
|
|
||||||
)
|
)
|
||||||
settings = models.TextField(
|
settings = models.TextField(
|
||||||
_('port settings'),
|
_("port settings"),
|
||||||
help_text=_('list of comma separated params available; '
|
help_text=_(
|
||||||
'this is put in the output config file as raw code; '
|
"list of comma separated params available; "
|
||||||
'plugin related'),
|
"this is put in the output config file as raw code; "
|
||||||
blank=True, null=True
|
"plugin related"
|
||||||
|
),
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
objects = PortQuerySet.as_manager()
|
objects = PortQuerySet.as_manager()
|
||||||
|
@ -163,22 +179,17 @@ class Port(models.Model):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{direction}: {type} #{id}".format(
|
return "{direction}: {type} #{id}".format(
|
||||||
direction=self.get_direction_display(),
|
direction=self.get_direction_display(),
|
||||||
type=self.get_type_display(), id=self.pk or ''
|
type=self.get_type_display(),
|
||||||
|
id=self.pk or "",
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_valid_type(self):
|
def is_valid_type(self):
|
||||||
"""
|
"""Return True if the type is available for the given direction."""
|
||||||
Return True if the type is available for the given direction.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self.direction == self.DIRECTION_INPUT:
|
if self.direction == self.DIRECTION_INPUT:
|
||||||
return self.type not in (
|
return self.type not in (self.TYPE_ICECAST, self.TYPE_FILE)
|
||||||
self.TYPE_ICECAST, self.TYPE_FILE
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.type not in (
|
return self.type not in (self.TYPE_HTTP, self.TYPE_HTTPS)
|
||||||
self.TYPE_HTTP, self.TYPE_HTTPS
|
|
||||||
)
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.is_valid_type():
|
if not self.is_valid_type():
|
||||||
|
@ -187,4 +198,3 @@ class Port(models.Model):
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,20 @@
|
||||||
from django.db import models
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
__all__ = ("UserSettings",)
|
||||||
|
|
||||||
|
|
||||||
class UserSettings(models.Model):
|
class UserSettings(models.Model):
|
||||||
"""
|
"""Store user's settings."""
|
||||||
Store user's settings.
|
|
||||||
"""
|
|
||||||
user = models.OneToOneField(
|
user = models.OneToOneField(
|
||||||
User, models.CASCADE, verbose_name=_('User'),
|
User,
|
||||||
related_name='aircox_settings')
|
models.CASCADE,
|
||||||
playlist_editor_columns = models.JSONField(
|
verbose_name=_("User"),
|
||||||
_('Playlist Editor Columns'))
|
related_name="aircox_settings",
|
||||||
|
)
|
||||||
|
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
|
||||||
playlist_editor_sep = models.CharField(
|
playlist_editor_sep = models.CharField(
|
||||||
_('Playlist Editor Separator'), max_length=16)
|
_("Playlist Editor Separator"), max_length=16
|
||||||
|
)
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
from .log import *
|
from .admin import TrackSerializer, UserSettingsSerializer
|
||||||
from .sound import *
|
from .log import LogInfo, LogInfoSerializer
|
||||||
from .admin import *
|
from .sound import PodcastSerializer, SoundSerializer
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"TrackSerializer",
|
||||||
|
"UserSettingsSerializer",
|
||||||
|
"LogInfo",
|
||||||
|
"LogInfoSerializer",
|
||||||
|
"SoundSerializer",
|
||||||
|
"PodcastSerializer",
|
||||||
|
)
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from taggit.serializers import TagListSerializerField, TaggitSerializer
|
from taggit.serializers import TaggitSerializer, TagListSerializerField
|
||||||
|
|
||||||
from ..models import Track, UserSettings
|
from ..models import Track, UserSettings
|
||||||
|
|
||||||
|
__all__ = ("TrackSerializer", "UserSettingsSerializer")
|
||||||
__all__ = ('TrackSerializer', 'UserSettingsSerializer')
|
|
||||||
|
|
||||||
|
|
||||||
class TrackSerializer(TaggitSerializer, serializers.ModelSerializer):
|
class TrackSerializer(TaggitSerializer, serializers.ModelSerializer):
|
||||||
|
@ -12,19 +11,29 @@ class TrackSerializer(TaggitSerializer, serializers.ModelSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Track
|
model = Track
|
||||||
fields = ('pk', 'artist', 'title', 'album', 'year', 'position',
|
fields = (
|
||||||
'info', 'tags', 'episode', 'sound', 'timestamp')
|
"pk",
|
||||||
|
"artist",
|
||||||
|
"title",
|
||||||
|
"album",
|
||||||
|
"year",
|
||||||
|
"position",
|
||||||
|
"info",
|
||||||
|
"tags",
|
||||||
|
"episode",
|
||||||
|
"sound",
|
||||||
|
"timestamp",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class UserSettingsSerializer(serializers.ModelSerializer):
|
class UserSettingsSerializer(serializers.ModelSerializer):
|
||||||
# TODO: validate fields values (playlist_editor_columns at least)
|
# TODO: validate fields values (playlist_editor_columns at least)
|
||||||
class Meta:
|
class Meta:
|
||||||
model = UserSettings
|
model = UserSettings
|
||||||
fields = ('playlist_editor_columns', 'playlist_editor_sep')
|
fields = ("playlist_editor_columns", "playlist_editor_sep")
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
user = self.context.get('user')
|
user = self.context.get("user")
|
||||||
if user:
|
if user:
|
||||||
validated_data['user_id'] = user.id
|
validated_data["user_id"] = user.id
|
||||||
return super().create(validated_data)
|
return super().create(validated_data)
|
||||||
|
|
||||||
|
|
|
@ -2,14 +2,13 @@ from rest_framework import serializers
|
||||||
|
|
||||||
from ..models import Diffusion, Log
|
from ..models import Diffusion, Log
|
||||||
|
|
||||||
|
__all__ = ("LogInfo", "LogInfoSerializer")
|
||||||
__all__ = ('LogInfo', 'LogInfoSerializer')
|
|
||||||
|
|
||||||
|
|
||||||
class LogInfo:
|
class LogInfo:
|
||||||
obj = None
|
obj = None
|
||||||
start, end = None, None
|
start, end = None, None
|
||||||
title, artist = '', ''
|
title, artist = "", ""
|
||||||
url, cover = None, None
|
url, cover = None, None
|
||||||
info = None
|
info = None
|
||||||
|
|
||||||
|
@ -20,17 +19,17 @@ class LogInfo:
|
||||||
elif isinstance(obj, Log):
|
elif isinstance(obj, Log):
|
||||||
self.from_log(obj)
|
self.from_log(obj)
|
||||||
else:
|
else:
|
||||||
raise ValueError('`obj` must be a Diffusion or a Track Log.')
|
raise ValueError("`obj` must be a Diffusion or a Track Log.")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self):
|
||||||
return 'track' if isinstance(self.obj, Log) else 'diffusion'
|
return "track" if isinstance(self.obj, Log) else "diffusion"
|
||||||
|
|
||||||
def from_diffusion(self, obj):
|
def from_diffusion(self, obj):
|
||||||
episode = obj.episode
|
episode = obj.episode
|
||||||
self.start, self.end = obj.start, obj.end
|
self.start, self.end = obj.start, obj.end
|
||||||
self.title, self.url = episode.title, episode.get_absolute_url()
|
self.title, self.url = episode.title, episode.get_absolute_url()
|
||||||
self.cover = episode.cover and episode.cover.icons['64']
|
self.cover = episode.cover and episode.cover.icons["64"]
|
||||||
self.info = episode.category and episode.category.title
|
self.info = episode.category and episode.category.title
|
||||||
self.obj = obj
|
self.obj = obj
|
||||||
|
|
||||||
|
|
|
@ -2,14 +2,27 @@ from rest_framework import serializers
|
||||||
|
|
||||||
from ..models import Sound
|
from ..models import Sound
|
||||||
|
|
||||||
|
__all__ = ("SoundSerializer", "PodcastSerializer")
|
||||||
|
|
||||||
|
|
||||||
class SoundSerializer(serializers.ModelSerializer):
|
class SoundSerializer(serializers.ModelSerializer):
|
||||||
file = serializers.FileField(use_url=False)
|
file = serializers.FileField(use_url=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Sound
|
model = Sound
|
||||||
fields = ['pk', 'name', 'program', 'episode', 'type', 'file',
|
fields = [
|
||||||
'duration', 'mtime', 'is_good_quality', 'is_public', 'url']
|
"pk",
|
||||||
|
"name",
|
||||||
|
"program",
|
||||||
|
"episode",
|
||||||
|
"type",
|
||||||
|
"file",
|
||||||
|
"duration",
|
||||||
|
"mtime",
|
||||||
|
"is_good_quality",
|
||||||
|
"is_public",
|
||||||
|
"url",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class PodcastSerializer(serializers.ModelSerializer):
|
class PodcastSerializer(serializers.ModelSerializer):
|
||||||
|
@ -17,5 +30,14 @@ class PodcastSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Sound
|
model = Sound
|
||||||
fields = ['pk', 'name', 'program', 'episode', 'type',
|
fields = [
|
||||||
'duration', 'mtime', 'url', 'is_downloadable']
|
"pk",
|
||||||
|
"name",
|
||||||
|
"program",
|
||||||
|
"episode",
|
||||||
|
"type",
|
||||||
|
"duration",
|
||||||
|
"mtime",
|
||||||
|
"url",
|
||||||
|
"is_downloadable",
|
||||||
|
]
|
||||||
|
|
|
@ -2,61 +2,11 @@ import os
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
# TODO:
|
|
||||||
# - items() iteration
|
|
||||||
# - sub-settings as values
|
|
||||||
# - validate() settings
|
|
||||||
# - Meta inner-class?
|
|
||||||
# - custom settings class instead of default
|
|
||||||
#class BaseSettings:
|
|
||||||
# deprecated = set()
|
|
||||||
#
|
|
||||||
# def __init__(self, user_conf):
|
|
||||||
# if user_conf:
|
|
||||||
# for key, value in user_conf.items():
|
|
||||||
# if not hasattr(self, key):
|
|
||||||
# if key in self.deprecated:
|
|
||||||
# raise ValueError('"{}" config is deprecated'.format(key))
|
|
||||||
# else:
|
|
||||||
# raise ValueError('"{}" is not a config value'.format(key))
|
|
||||||
# setattr(self, key, value)
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#class Settings(BaseSettings):
|
|
||||||
# default_user_groups = {
|
|
||||||
#
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# programs_dir = os.path.join(settings.MEDIA_ROOT, 'programs'),
|
|
||||||
# """ Programs data directory. """
|
|
||||||
# episode_title = '{program.title} - {date}'
|
|
||||||
# """ Default episodes title. """
|
|
||||||
# episode_title_date_format = '%-d %B %Y'
|
|
||||||
# """ Date format used in episode title. """
|
|
||||||
#
|
|
||||||
# logs_archives_dir = os.path.join(settings.PROJECT_ROOT, 'logs/archives')
|
|
||||||
# """ Directory where logs are saved once archived """
|
|
||||||
# logs_archive_age = 30
|
|
||||||
# """ Default age of log before being archived """
|
|
||||||
#
|
|
||||||
# sounds_default_dir = os.path.join(settings.MEDIA_ROOT, 'programs/defaults')
|
|
||||||
# sound_archive_dir = 'archives'
|
|
||||||
# sound_excerpt_dir = 'excerpts'
|
|
||||||
# sound_quality = {
|
|
||||||
# 'attribute': 'RMS lev dB',
|
|
||||||
# 'range': (-18.0, -8.0),
|
|
||||||
# 'sample_length': 120,
|
|
||||||
# }
|
|
||||||
# sound_ext = ('.ogg', '.flac', '.wav', '.mp3', '.opus')
|
|
||||||
#
|
|
||||||
# # TODO: move into aircox_streamer
|
|
||||||
# streamer_working_dir = '/tmp/aircox'
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
|
|
||||||
def ensure(key, default):
|
def ensure(key, default):
|
||||||
globals()[key] = getattr(settings, key, default)
|
value = getattr(settings, key, default)
|
||||||
|
globals()[key] = value
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
@ -64,85 +14,101 @@ def ensure(key, default):
|
||||||
########################################################################
|
########################################################################
|
||||||
# group to assign to users at their creation, along with the permissions
|
# group to assign to users at their creation, along with the permissions
|
||||||
# to add to each group.
|
# to add to each group.
|
||||||
ensure('AIRCOX_DEFAULT_USER_GROUPS', {
|
ensure(
|
||||||
'radio hosts': (
|
"AIRCOX_DEFAULT_USER_GROUPS",
|
||||||
# TODO include content_type in order to avoid clash with potential
|
{
|
||||||
# extra applications
|
"radio hosts": (
|
||||||
|
# TODO include content_type in order to avoid clash with potential
|
||||||
# aircox
|
# extra applications
|
||||||
'change_program', 'change_episode', 'change_diffusion',
|
# aircox
|
||||||
'add_comment', 'change_comment', 'delete_comment',
|
"change_program",
|
||||||
'add_article', 'change_article', 'delete_article',
|
"change_episode",
|
||||||
'change_sound',
|
"change_diffusion",
|
||||||
'add_track', 'change_track', 'delete_track',
|
"add_comment",
|
||||||
|
"change_comment",
|
||||||
# taggit
|
"delete_comment",
|
||||||
'add_tag', 'change_tag', 'delete_tag',
|
"add_article",
|
||||||
|
"change_article",
|
||||||
# filer
|
"delete_article",
|
||||||
'add_folder', 'change_folder', 'delete_folder', 'can_use_directory_listing',
|
"change_sound",
|
||||||
'add_image', 'change_image', 'delete_image',
|
"add_track",
|
||||||
),
|
"change_track",
|
||||||
})
|
"delete_track",
|
||||||
|
# taggit
|
||||||
|
"add_tag",
|
||||||
|
"change_tag",
|
||||||
|
"delete_tag",
|
||||||
|
# filer
|
||||||
|
"add_folder",
|
||||||
|
"change_folder",
|
||||||
|
"delete_folder",
|
||||||
|
"can_use_directory_listing",
|
||||||
|
"add_image",
|
||||||
|
"change_image",
|
||||||
|
"delete_image",
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
# Directory for the programs data
|
# Directory for the programs data
|
||||||
ensure('AIRCOX_PROGRAMS_DIR', 'programs')
|
AIRCOX_PROGRAMS_DIR = ensure("AIRCOX_PROGRAMS_DIR", "programs")
|
||||||
ensure('AIRCOX_PROGRAMS_DIR_ABS', os.path.join(settings.MEDIA_ROOT,
|
ensure(
|
||||||
AIRCOX_PROGRAMS_DIR))
|
"AIRCOX_PROGRAMS_DIR_ABS",
|
||||||
|
os.path.join(settings.MEDIA_ROOT, AIRCOX_PROGRAMS_DIR),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
# Programs & Episodes
|
# Programs & Episodes
|
||||||
########################################################################
|
########################################################################
|
||||||
# default title for episodes
|
# default title for episodes
|
||||||
ensure('AIRCOX_EPISODE_TITLE', '{program.title} - {date}')
|
ensure("AIRCOX_EPISODE_TITLE", "{program.title} - {date}")
|
||||||
# date format in episode title (python's strftime)
|
# date format in episode title (python's strftime)
|
||||||
ensure('AIRCOX_EPISODE_TITLE_DATE_FORMAT', '%-d %B %Y')
|
ensure("AIRCOX_EPISODE_TITLE_DATE_FORMAT", "%-d %B %Y")
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
# Logs & Archives
|
# Logs & Archives
|
||||||
########################################################################
|
########################################################################
|
||||||
# Directory where to save logs' archives
|
# Directory where to save logs' archives
|
||||||
ensure('AIRCOX_LOGS_ARCHIVES_DIR', os.path.join(settings.PROJECT_ROOT, 'logs/archives'))
|
ensure(
|
||||||
|
"AIRCOX_LOGS_ARCHIVES_DIR",
|
||||||
|
os.path.join(settings.PROJECT_ROOT, "logs/archives"),
|
||||||
|
)
|
||||||
# In days, minimal age of a log before it is archived
|
# In days, minimal age of a log before it is archived
|
||||||
ensure('AIRCOX_LOGS_ARCHIVES_AGE', 60)
|
ensure("AIRCOX_LOGS_ARCHIVES_AGE", 60)
|
||||||
|
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
# Sounds
|
# Sounds
|
||||||
########################################################################
|
########################################################################
|
||||||
# Sub directory used for the complete episode sounds
|
# Sub directory used for the complete episode sounds
|
||||||
ensure('AIRCOX_SOUND_ARCHIVES_SUBDIR', 'archives')
|
ensure("AIRCOX_SOUND_ARCHIVES_SUBDIR", "archives")
|
||||||
# Sub directory used for the excerpts of the episode
|
# Sub directory used for the excerpts of the episode
|
||||||
ensure('AIRCOX_SOUND_EXCERPTS_SUBDIR', 'excerpts')
|
ensure("AIRCOX_SOUND_EXCERPTS_SUBDIR", "excerpts")
|
||||||
|
|
||||||
# Quality attributes passed to sound_quality_check from sounds_monitor
|
# Quality attributes passed to sound_quality_check from sounds_monitor
|
||||||
ensure('AIRCOX_SOUND_QUALITY', {
|
ensure(
|
||||||
'attribute': 'RMS lev dB',
|
"AIRCOX_SOUND_QUALITY",
|
||||||
'range': (-18.0, -8.0),
|
{
|
||||||
'sample_length': 120,
|
"attribute": "RMS lev dB",
|
||||||
}
|
"range": (-18.0, -8.0),
|
||||||
|
"sample_length": 120,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Extension of sound files
|
# Extension of sound files
|
||||||
ensure(
|
ensure("AIRCOX_SOUND_FILE_EXT", (".ogg", ".flac", ".wav", ".mp3", ".opus"))
|
||||||
'AIRCOX_SOUND_FILE_EXT',
|
|
||||||
('.ogg', '.flac', '.wav', '.mp3', '.opus')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Tag sounds as deleted instead of deleting them when file has been removed
|
# Tag sounds as deleted instead of deleting them when file has been removed
|
||||||
# from filesystem (sound monitoring)
|
# from filesystem (sound monitoring)
|
||||||
ensure(
|
ensure("AIRCOX_SOUND_KEEP_DELETED", False)
|
||||||
'AIRCOX_SOUND_KEEP_DELETED',
|
|
||||||
False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
# Streamer & Controllers
|
# Streamer & Controllers
|
||||||
########################################################################
|
########################################################################
|
||||||
# Controllers working directory
|
# Controllers working directory
|
||||||
ensure('AIRCOX_CONTROLLERS_WORKING_DIR', '/tmp/aircox')
|
ensure("AIRCOX_CONTROLLERS_WORKING_DIR", "/tmp/aircox")
|
||||||
|
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
@ -150,12 +116,10 @@ ensure('AIRCOX_CONTROLLERS_WORKING_DIR', '/tmp/aircox')
|
||||||
########################################################################
|
########################################################################
|
||||||
# Columns for CSV file
|
# Columns for CSV file
|
||||||
ensure(
|
ensure(
|
||||||
'AIRCOX_IMPORT_PLAYLIST_CSV_COLS',
|
"AIRCOX_IMPORT_PLAYLIST_CSV_COLS",
|
||||||
('artist', 'title', 'minutes', 'seconds', 'tags', 'info')
|
("artist", "title", "minutes", "seconds", "tags", "info"),
|
||||||
)
|
)
|
||||||
# Column delimiter of csv text files
|
# Column delimiter of csv text files
|
||||||
ensure('AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER', ';')
|
ensure("AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER", ";")
|
||||||
# Text delimiter of csv text files
|
# Text delimiter of csv text files
|
||||||
ensure('AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE', '"')
|
ensure("AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE", '"')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,4 +4,3 @@
|
||||||
<input type="datetime-local" value="choice.value" />
|
<input type="datetime-local" value="choice.value" />
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
<h3>{% blocktranslate with filter_title=title %} By {{ filter_title }} {% endblocktranslate %}</h3>
|
<h3>{% blocktranslate with filter_title=title %} By {{ filter_title }} {% endblocktranslate %}</h3>
|
||||||
{% block content %}{% endblock %}
|
{% block content %}{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -42,4 +42,3 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -17,5 +17,3 @@
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -80,5 +80,3 @@
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,5 +6,3 @@
|
||||||
<img src="{% static "aircox/logo.png" %}"/>
|
<img src="{% static "aircox/logo.png" %}"/>
|
||||||
</a>
|
</a>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,4 +3,3 @@
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div v-pre>{{ block.super }}</div>
|
<div v-pre>{{ block.super }}</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -92,4 +92,3 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -28,4 +28,3 @@
|
||||||
</section>
|
</section>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -164,5 +164,3 @@ Usefull context:
|
||||||
<div id="player">{% include "aircox/widgets/player.html" %}</div>
|
<div id="player">{% include "aircox/widgets/player.html" %}</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,4 +6,3 @@
|
||||||
—
|
—
|
||||||
{{ station.name }}
|
{{ station.name }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -84,4 +84,3 @@
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -27,4 +27,3 @@
|
||||||
</section>
|
</section>
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -78,5 +78,3 @@
|
||||||
</section>
|
</section>
|
||||||
{{ block.super }}
|
{{ block.super }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,4 +15,3 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -83,5 +83,3 @@
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
</section>
|
</section>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,4 +27,3 @@
|
||||||
{% include "aircox/widgets/log_list.html" %}
|
{% include "aircox/widgets/log_list.html" %}
|
||||||
</section>
|
</section>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -88,6 +88,3 @@ Context:
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -60,4 +60,3 @@
|
||||||
</form>
|
</form>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -65,4 +65,3 @@
|
||||||
</section>
|
</section>
|
||||||
{{ block.super }}
|
{{ block.super }}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -4,5 +4,3 @@
|
||||||
{% blocktranslate %}Recently on {{ program }}{% endblocktranslate %}
|
{% blocktranslate %}Recently on {{ program }}{% endblocktranslate %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -69,5 +69,3 @@ Context variables:
|
||||||
{% block actions %}{% endblock %}
|
{% block actions %}{% endblock %}
|
||||||
</article>
|
</article>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -44,5 +44,3 @@ An empty date results to a title or a separator
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,4 +8,3 @@ Context:
|
||||||
{% include "aircox/widgets/episode_item.html" %}
|
{% include "aircox/widgets/episode_item.html" %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
|
|
||||||
|
|
|
@ -19,4 +19,3 @@ Context:
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
|
|
@ -56,4 +56,3 @@ Context variables:
|
||||||
</button>
|
</button>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -20,4 +20,3 @@ for design review.
|
||||||
{% include "aircox/widgets/track_item.html" %}
|
{% include "aircox/widgets/track_item.html" %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
|
@ -28,4 +28,3 @@ Context:
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</table>
|
</table>
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
|
|
||||||
|
|
|
@ -3,4 +3,3 @@
|
||||||
{% block card_title %}
|
{% block card_title %}
|
||||||
{% block title %}{{ block.super }}{% endblock %}
|
{% block title %}{{ block.super }}{% endblock %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -24,5 +24,3 @@ Context:
|
||||||
</ul>
|
</ul>
|
||||||
</nav>
|
</nav>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -46,4 +46,3 @@ The audio player
|
||||||
</template>
|
</template>
|
||||||
</a-player>
|
</a-player>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -15,4 +15,3 @@ List item for a podcast.
|
||||||
:actions="['play']">
|
:actions="['play']">
|
||||||
</a-sound-item>
|
</a-sound-item>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -11,4 +11,3 @@ Context:
|
||||||
— {{ object.artist }}
|
— {{ object.artist }}
|
||||||
{% if object.info %}(<i>{{ object.info }}</i>){% endif %}
|
{% if object.info %}(<i>{{ object.info }}</i>){% endif %}
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import random
|
|
||||||
import json
|
import json
|
||||||
|
import random
|
||||||
|
|
||||||
from django import template
|
from django import template
|
||||||
from django.contrib.admin.templatetags.admin_urls import admin_urlname
|
from django.contrib.admin.templatetags.admin_urls import admin_urlname
|
||||||
|
@ -11,16 +11,16 @@ random.seed()
|
||||||
register = template.Library()
|
register = template.Library()
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name='admin_url')
|
@register.filter(name="admin_url")
|
||||||
def do_admin_url(obj, arg, pass_id=True):
|
def do_admin_url(obj, arg, pass_id=True):
|
||||||
""" Reverse admin url for object """
|
"""Reverse admin url for object."""
|
||||||
name = admin_urlname(obj._meta, arg)
|
name = admin_urlname(obj._meta, arg)
|
||||||
return reverse(name, args=(obj.id,)) if pass_id else reverse(name)
|
return reverse(name, args=(obj.id,)) if pass_id else reverse(name)
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name='get_tracks')
|
@register.filter(name="get_tracks")
|
||||||
def do_get_tracks(obj):
|
def do_get_tracks(obj):
|
||||||
""" Get a list of track for the provided log, diffusion, or episode """
|
"""Get a list of track for the provided log, diffusion, or episode."""
|
||||||
if isinstance(obj, Log):
|
if isinstance(obj, Log):
|
||||||
return (obj.track,)
|
return (obj.track,)
|
||||||
|
|
||||||
|
@ -29,65 +29,71 @@ def do_get_tracks(obj):
|
||||||
return obj.track_set.all()
|
return obj.track_set.all()
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='has_perm', takes_context=True)
|
@register.simple_tag(name="has_perm", takes_context=True)
|
||||||
def do_has_perm(context, obj, perm, user=None):
|
def do_has_perm(context, obj, perm, user=None):
|
||||||
""" Return True if ``user.has_perm('[APP].[perm]_[MODEL]')`` """
|
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
|
||||||
if user is None:
|
if user is None:
|
||||||
user = context['request'].user
|
user = context["request"].user
|
||||||
return user.has_perm('{}.{}_{}'.format(
|
return user.has_perm(
|
||||||
obj._meta.app_label, perm, obj._meta.model_name))
|
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name='is_diffusion')
|
@register.filter(name="is_diffusion")
|
||||||
def do_is_diffusion(obj):
|
def do_is_diffusion(obj):
|
||||||
""" Return True if object is a Diffusion. """
|
"""Return True if object is a Diffusion."""
|
||||||
return isinstance(obj, Diffusion)
|
return isinstance(obj, Diffusion)
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name='json')
|
@register.filter(name="json")
|
||||||
def do_json(obj, fields=""):
|
def do_json(obj, fields=""):
|
||||||
""" Return object as json """
|
"""Return object as json."""
|
||||||
if fields:
|
if fields:
|
||||||
obj = {k: getattr(obj, k, None)
|
obj = {k: getattr(obj, k, None) for k in ",".split(fields)}
|
||||||
for k in ','.split(fields)}
|
|
||||||
return json.dumps(obj)
|
return json.dumps(obj)
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='player_live_attr', takes_context=True)
|
@register.simple_tag(name="player_live_attr", takes_context=True)
|
||||||
def do_player_live_attr(context):
|
def do_player_live_attr(context):
|
||||||
""" Player 'live-args' attribute value """
|
"""Player 'live-args' attribute value."""
|
||||||
station = getattr(context['request'], 'station', None)
|
station = getattr(context["request"], "station", None)
|
||||||
return json.dumps({
|
return json.dumps(
|
||||||
'url': reverse('api:live'),
|
{
|
||||||
'src': station and station.audio_streams.split('\n')
|
"url": reverse("api:live"),
|
||||||
})
|
"src": station and station.audio_streams.split("\n"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='nav_items', takes_context=True)
|
@register.simple_tag(name="nav_items", takes_context=True)
|
||||||
def do_nav_items(context, menu, **kwargs):
|
def do_nav_items(context, menu, **kwargs):
|
||||||
""" Render navigation items for the provided menu name. """
|
"""Render navigation items for the provided menu name."""
|
||||||
station, request = context['station'], context['request']
|
station, request = context["station"], context["request"]
|
||||||
return [(item, item.render(request, **kwargs))
|
return [
|
||||||
for item in station.navitem_set.filter(menu=menu)]
|
(item, item.render(request, **kwargs))
|
||||||
|
for item in station.navitem_set.filter(menu=menu)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='update_query')
|
@register.simple_tag(name="update_query")
|
||||||
def do_update_query(obj, **kwargs):
|
def do_update_query(obj, **kwargs):
|
||||||
""" Replace provided querydict's values with **kwargs. """
|
"""Replace provided querydict's values with **kwargs."""
|
||||||
for k, v in kwargs.items():
|
for k, v in kwargs.items():
|
||||||
if v is not None:
|
if v is not None:
|
||||||
obj[k] = list(v) if hasattr(v, '__iter__') else [v]
|
obj[k] = list(v) if hasattr(v, "__iter__") else [v]
|
||||||
elif k in obj:
|
elif k in obj:
|
||||||
obj.pop(k)
|
obj.pop(k)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name='verbose_name')
|
@register.filter(name="verbose_name")
|
||||||
def do_verbose_name(obj, plural=False):
|
def do_verbose_name(obj, plural=False):
|
||||||
"""
|
"""Return model's verbose name (singular or plural) or `obj` if it is a
|
||||||
Return model's verbose name (singular or plural) or `obj` if it is a
|
string (can act for default values)."""
|
||||||
string (can act for default values).
|
return (
|
||||||
"""
|
obj
|
||||||
return obj if isinstance(obj, str) else \
|
if isinstance(obj, str)
|
||||||
obj._meta.verbose_name_plural if plural else \
|
else obj._meta.verbose_name_plural
|
||||||
obj._meta.verbose_name
|
if plural
|
||||||
|
else obj._meta.verbose_name
|
||||||
|
)
|
||||||
|
|
|
@ -1,63 +1,65 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from django import template
|
from django import template
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from aircox.serializers.admin import UserSettingsSerializer
|
from aircox.serializers.admin import UserSettingsSerializer
|
||||||
|
|
||||||
|
__all__ = ("register", "do_get_admin_tools", "do_track_inline_data")
|
||||||
__all__ = ('register', 'do_get_admin_tools', 'do_track_inline_data')
|
|
||||||
|
|
||||||
|
|
||||||
register = template.Library()
|
register = template.Library()
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='get_admin_tools')
|
@register.simple_tag(name="get_admin_tools")
|
||||||
def do_get_admin_tools():
|
def do_get_admin_tools():
|
||||||
return admin.site.get_tools()
|
return admin.site.get_tools()
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='track_inline_data', takes_context=True)
|
@register.simple_tag(name="track_inline_data", takes_context=True)
|
||||||
def do_track_inline_data(context, formset):
|
def do_track_inline_data(context, formset):
|
||||||
"""
|
"""Return initial data for playlist editor as dict. Keys are:
|
||||||
Return initial data for playlist editor as dict. Keys are:
|
|
||||||
- ``items``: list of items. Extra keys:
|
- ``items``: list of items. Extra keys:
|
||||||
- ``__error__``: dict of form fields errors
|
- ``__error__``: dict of form fields errors
|
||||||
- ``settings``: user's settings
|
- ``settings``: user's settings
|
||||||
"""
|
"""
|
||||||
items = []
|
items = []
|
||||||
for form in formset.forms:
|
for form in formset.forms:
|
||||||
item = {name: form[name].value()
|
item = {name: form[name].value() for name in form.fields.keys()}
|
||||||
for name in form.fields.keys()}
|
item["__errors__"] = form.errors
|
||||||
item['__errors__'] = form.errors
|
|
||||||
|
|
||||||
# hack for playlist editor
|
# hack for playlist editor
|
||||||
tags = item.get('tags')
|
tags = item.get("tags")
|
||||||
if tags and not isinstance(tags, str):
|
if tags and not isinstance(tags, str):
|
||||||
item['tags'] = ', '.join(tag.name for tag in tags)
|
item["tags"] = ", ".join(tag.name for tag in tags)
|
||||||
items.append(item)
|
items.append(item)
|
||||||
|
|
||||||
data = {"items": items}
|
data = {"items": items}
|
||||||
user = context['request'].user
|
user = context["request"].user
|
||||||
settings = getattr(user, 'aircox_settings', None)
|
settings = getattr(user, "aircox_settings", None)
|
||||||
data['settings'] = settings and UserSettingsSerializer(settings).data
|
data["settings"] = settings and UserSettingsSerializer(settings).data
|
||||||
source = json.dumps(data)
|
source = json.dumps(data)
|
||||||
return source
|
return source
|
||||||
|
|
||||||
|
|
||||||
track_inline_labels_ = {
|
track_inline_labels_ = {
|
||||||
'artist': _('Artist'), 'album': _('Album'), 'title': _('Title'),
|
"artist": _("Artist"),
|
||||||
'tags': _('Tags'), 'year': _('Year'),
|
"album": _("Album"),
|
||||||
'save_settings': _('Save Settings'),
|
"title": _("Title"),
|
||||||
'discard_changes': _('Discard changes'),
|
"tags": _("Tags"),
|
||||||
'columns': _('Columns'),
|
"year": _("Year"),
|
||||||
'add_track': _('Add a track'),
|
"save_settings": _("Save Settings"),
|
||||||
'remove_track': _('Remove'),
|
"discard_changes": _("Discard changes"),
|
||||||
'timestamp': _('Timestamp'),
|
"columns": _("Columns"),
|
||||||
|
"add_track": _("Add a track"),
|
||||||
|
"remove_track": _("Remove"),
|
||||||
|
"timestamp": _("Timestamp"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name='track_inline_labels')
|
@register.simple_tag(name="track_inline_labels")
|
||||||
def do_track_inline_labels():
|
def do_track_inline_labels():
|
||||||
""" Return labels for columns in playlist editor as dict """
|
"""Return labels for columns in playlist editor as dict."""
|
||||||
return json.dumps({k: str(v) for k, v in track_inline_labels_.items()})
|
return json.dumps({k: str(v) for k, v in track_inline_labels_.items()})
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
from .management import *
|
from . import management
|
||||||
|
|
||||||
|
__all__ = ("management",)
|
||||||
|
|
|
@ -1,2 +1,15 @@
|
||||||
from .sound_file import *
|
from .sound_file import SoundFileTestCase
|
||||||
from .sound_monitor import *
|
from .sound_monitor import (
|
||||||
|
ModifiedHandlerTestCase,
|
||||||
|
MonitorHandlerTestCase,
|
||||||
|
MoveHandlerTestCase,
|
||||||
|
NotifyHandlerTestCase,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"SoundFileTestCase",
|
||||||
|
"NotifyHandlerTestCase",
|
||||||
|
"MoveHandlerTestCase",
|
||||||
|
"ModifiedHandlerTestCase",
|
||||||
|
"MonitorHandlerTestCase",
|
||||||
|
)
|
||||||
|
|
|
@ -7,30 +7,56 @@ from django.utils import timezone as tz
|
||||||
from aircox import models
|
from aircox import models
|
||||||
from aircox.management.sound_file import SoundFile
|
from aircox.management.sound_file import SoundFile
|
||||||
|
|
||||||
|
__all__ = ("SoundFileTestCase",)
|
||||||
__all__ = ('SoundFileTestCase',)
|
|
||||||
|
|
||||||
|
|
||||||
class SoundFileTestCase(TestCase):
|
class SoundFileTestCase(TestCase):
|
||||||
path_infos = {
|
path_infos = {
|
||||||
'test/20220101_10h13_1_sample_1.mp3': {
|
"test/20220101_10h13_1_sample_1.mp3": {
|
||||||
'year': 2022, 'month': 1, 'day': 1, 'hour': 10, 'minute': 13,
|
"year": 2022,
|
||||||
'n': 1, 'name': 'Sample 1'},
|
"month": 1,
|
||||||
'test/20220102_10h13_sample_2.mp3': {
|
"day": 1,
|
||||||
'year': 2022, 'month': 1, 'day': 2, 'hour': 10, 'minute': 13,
|
"hour": 10,
|
||||||
'name': 'Sample 2'},
|
"minute": 13,
|
||||||
'test/20220103_1_sample_3.mp3': {
|
"n": 1,
|
||||||
'year': 2022, 'month': 1, 'day': 3, 'n': 1, 'name': 'Sample 3'},
|
"name": "Sample 1",
|
||||||
'test/20220104_sample_4.mp3': {
|
},
|
||||||
'year': 2022, 'month': 1, 'day': 4, 'name': 'Sample 4'},
|
"test/20220102_10h13_sample_2.mp3": {
|
||||||
'test/20220105.mp3': {
|
"year": 2022,
|
||||||
'year': 2022, 'month': 1, 'day': 5, 'name': '20220105'},
|
"month": 1,
|
||||||
|
"day": 2,
|
||||||
|
"hour": 10,
|
||||||
|
"minute": 13,
|
||||||
|
"name": "Sample 2",
|
||||||
|
},
|
||||||
|
"test/20220103_1_sample_3.mp3": {
|
||||||
|
"year": 2022,
|
||||||
|
"month": 1,
|
||||||
|
"day": 3,
|
||||||
|
"n": 1,
|
||||||
|
"name": "Sample 3",
|
||||||
|
},
|
||||||
|
"test/20220104_sample_4.mp3": {
|
||||||
|
"year": 2022,
|
||||||
|
"month": 1,
|
||||||
|
"day": 4,
|
||||||
|
"name": "Sample 4",
|
||||||
|
},
|
||||||
|
"test/20220105.mp3": {
|
||||||
|
"year": 2022,
|
||||||
|
"month": 1,
|
||||||
|
"day": 5,
|
||||||
|
"name": "20220105",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
subdir_prefix = "test"
|
||||||
|
sound_files = {
|
||||||
|
k: r
|
||||||
|
for k, r in (
|
||||||
|
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
|
||||||
|
for path in path_infos.keys()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
subdir_prefix = 'test'
|
|
||||||
sound_files = {k: r for k, r in (
|
|
||||||
(path, SoundFile(conf.MEDIA_ROOT + '/' + path))
|
|
||||||
for path in path_infos.keys()
|
|
||||||
)}
|
|
||||||
|
|
||||||
def test_sound_path(self):
|
def test_sound_path(self):
|
||||||
for path, sound_file in self.sound_files.items():
|
for path, sound_file in self.sound_files.items():
|
||||||
|
@ -45,21 +71,25 @@ class SoundFileTestCase(TestCase):
|
||||||
self.assertEqual(expected, result, "path: {}".format(path))
|
self.assertEqual(expected, result, "path: {}".format(path))
|
||||||
|
|
||||||
def _setup_diff(self, program, info):
|
def _setup_diff(self, program, info):
|
||||||
episode = models.Episode(program=program, title='test-episode')
|
episode = models.Episode(program=program, title="test-episode")
|
||||||
at = tz.datetime(**{
|
at = tz.datetime(
|
||||||
k: info[k] for k in ('year', 'month', 'day', 'hour', 'minute')
|
**{
|
||||||
if info.get(k)
|
k: info[k]
|
||||||
})
|
for k in ("year", "month", "day", "hour", "minute")
|
||||||
|
if info.get(k)
|
||||||
|
}
|
||||||
|
)
|
||||||
at = tz.make_aware(at)
|
at = tz.make_aware(at)
|
||||||
diff = models.Diffusion(episode=episode, start=at,
|
diff = models.Diffusion(
|
||||||
end=at+timedelta(hours=1))
|
episode=episode, start=at, end=at + timedelta(hours=1)
|
||||||
|
)
|
||||||
episode.save()
|
episode.save()
|
||||||
diff.save()
|
diff.save()
|
||||||
return diff
|
return diff
|
||||||
|
|
||||||
def test_find_episode(self):
|
def test_find_episode(self):
|
||||||
station = models.Station(name='test-station')
|
station = models.Station(name="test-station")
|
||||||
program = models.Program(station=station, title='test')
|
program = models.Program(station=station, title="test")
|
||||||
station.save()
|
station.save()
|
||||||
program.save()
|
program.save()
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,21 @@
|
||||||
import concurrent.futures as futures
|
import concurrent.futures as futures
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import time
|
import time
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
from aircox.management.sound_monitor import \
|
from aircox.management.sound_monitor import (
|
||||||
NotifyHandler, MoveHandler, ModifiedHandler, MonitorHandler
|
ModifiedHandler,
|
||||||
|
MonitorHandler,
|
||||||
|
NotifyHandler,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
__all__ = ('NotifyHandlerTestCase', 'MoveHandlerTestCase',
|
"NotifyHandlerTestCase",
|
||||||
'ModifiedHandlerTestCase', 'MonitorHandlerTestCase',)
|
"MoveHandlerTestCase",
|
||||||
|
"ModifiedHandlerTestCase",
|
||||||
|
"MonitorHandlerTestCase",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FakeEvent:
|
class FakeEvent:
|
||||||
|
@ -57,21 +63,21 @@ class ModifiedHandlerTestCase(TestCase):
|
||||||
class MonitorHandlerTestCase(TestCase):
|
class MonitorHandlerTestCase(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
pool = futures.ThreadPoolExecutor(2)
|
pool = futures.ThreadPoolExecutor(2)
|
||||||
self.monitor = MonitorHandler('archives', pool)
|
self.monitor = MonitorHandler("archives", pool)
|
||||||
|
|
||||||
def test_submit_new_job(self):
|
def test_submit_new_job(self):
|
||||||
event = FakeEvent(src_path='dummy_src')
|
event = FakeEvent(src_path="dummy_src")
|
||||||
handler = NotifyHandler()
|
handler = NotifyHandler()
|
||||||
result, _ = self.monitor._submit(handler, event, 'up')
|
result, _ = self.monitor._submit(handler, event, "up")
|
||||||
self.assertIs(handler, result)
|
self.assertIs(handler, result)
|
||||||
self.assertIsInstance(handler.future, futures.Future)
|
self.assertIsInstance(handler.future, futures.Future)
|
||||||
self.monitor.pool.shutdown()
|
self.monitor.pool.shutdown()
|
||||||
|
|
||||||
def test_submit_job_exists(self):
|
def test_submit_job_exists(self):
|
||||||
event = FakeEvent(src_path='dummy_src')
|
event = FakeEvent(src_path="dummy_src")
|
||||||
|
|
||||||
job_1, new_1 = self.monitor._submit(WaitHandler(), event, 'up')
|
job_1, new_1 = self.monitor._submit(WaitHandler(), event, "up")
|
||||||
job_2, new_2 = self.monitor._submit(NotifyHandler(), event, 'up')
|
job_2, new_2 = self.monitor._submit(NotifyHandler(), event, "up")
|
||||||
self.assertIs(job_1, job_2)
|
self.assertIs(job_1, job_2)
|
||||||
self.assertTrue(new_1)
|
self.assertTrue(new_1)
|
||||||
self.assertFalse(new_2)
|
self.assertFalse(new_2)
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
import datetime
|
|
||||||
import calendar
|
import calendar
|
||||||
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
|
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
|
|
||||||
from aircox.models import *
|
from aircox.models import Schedule
|
||||||
|
|
||||||
logger = logging.getLogger('aircox.test')
|
logger = logging.getLogger("aircox.test")
|
||||||
logger.setLevel('INFO')
|
logger.setLevel("INFO")
|
||||||
|
|
||||||
|
|
||||||
class ScheduleCheck (TestCase):
|
class ScheduleCheck(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.schedules = [
|
self.schedules = [
|
||||||
Schedule(
|
Schedule(
|
||||||
|
@ -25,15 +25,16 @@ class ScheduleCheck (TestCase):
|
||||||
|
|
||||||
def test_frequencies(self):
|
def test_frequencies(self):
|
||||||
for schedule in self.schedules:
|
for schedule in self.schedules:
|
||||||
logger.info('- test frequency %s' %
|
logger.info(
|
||||||
schedule.get_frequency_display())
|
"- test frequency %s" % schedule.get_frequency_display()
|
||||||
|
)
|
||||||
date = schedule.date
|
date = schedule.date
|
||||||
count = 24
|
count = 24
|
||||||
while count:
|
while count:
|
||||||
logger.info('- month %(month)s/%(year)s' % {
|
logger.info(
|
||||||
'month': date.month,
|
"- month %(month)s/%(year)s"
|
||||||
'year': date.year
|
% {"month": date.month, "year": date.year}
|
||||||
})
|
)
|
||||||
count -= 1
|
count -= 1
|
||||||
dates = schedule.dates_of_month(date)
|
dates = schedule.dates_of_month(date)
|
||||||
if schedule.frequency == schedule.Frequency.one_on_two:
|
if schedule.frequency == schedule.Frequency.one_on_two:
|
||||||
|
|
131
aircox/urls.py
131
aircox/urls.py
|
@ -1,18 +1,16 @@
|
||||||
from django.urls import include, path, register_converter
|
from django.urls import include, path, register_converter
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from . import models, views, viewsets
|
from . import models, views, viewsets
|
||||||
from .converters import PagePathConverter, DateConverter, WeekConverter
|
from .converters import DateConverter, PagePathConverter, WeekConverter
|
||||||
|
|
||||||
|
__all__ = ["api", "urls"]
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['api', 'urls']
|
register_converter(PagePathConverter, "page_path")
|
||||||
|
register_converter(DateConverter, "date")
|
||||||
|
register_converter(WeekConverter, "week")
|
||||||
register_converter(PagePathConverter, 'page_path')
|
|
||||||
register_converter(DateConverter, 'date')
|
|
||||||
register_converter(WeekConverter, 'week')
|
|
||||||
|
|
||||||
|
|
||||||
# urls = [
|
# urls = [
|
||||||
|
@ -23,69 +21,92 @@ register_converter(WeekConverter, 'week')
|
||||||
|
|
||||||
|
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
router.register('sound', viewsets.SoundViewSet, basename='sound')
|
router.register("sound", viewsets.SoundViewSet, basename="sound")
|
||||||
router.register('track', viewsets.TrackROViewSet, basename='track')
|
router.register("track", viewsets.TrackROViewSet, basename="track")
|
||||||
|
|
||||||
|
|
||||||
api = [
|
api = [
|
||||||
path('logs/', views.LogListAPIView.as_view(), name='live'),
|
path("logs/", views.LogListAPIView.as_view(), name="live"),
|
||||||
path('user/settings/', viewsets.UserSettingsViewSet.as_view(
|
path(
|
||||||
{'get': 'retrieve', 'post': 'update', 'put': 'update'}),
|
"user/settings/",
|
||||||
name='user-settings'),
|
viewsets.UserSettingsViewSet.as_view(
|
||||||
|
{"get": "retrieve", "post": "update", "put": "update"}
|
||||||
|
),
|
||||||
|
name="user-settings",
|
||||||
|
),
|
||||||
] + router.urls
|
] + router.urls
|
||||||
|
|
||||||
|
|
||||||
urls = [
|
urls = [
|
||||||
path('', views.HomeView.as_view(), name='home'),
|
path("", views.HomeView.as_view(), name="home"),
|
||||||
path('api/', include((api, 'aircox'), namespace='api')),
|
path("api/", include((api, "aircox"), namespace="api")),
|
||||||
|
|
||||||
# path('', views.PageDetailView.as_view(model=models.Article),
|
# path('', views.PageDetailView.as_view(model=models.Article),
|
||||||
# name='home'),
|
# name='home'),
|
||||||
path(_('articles/'),
|
path(
|
||||||
views.ArticleListView.as_view(model=models.Article),
|
_("articles/"),
|
||||||
name='article-list'),
|
views.ArticleListView.as_view(model=models.Article),
|
||||||
path(_('articles/<slug:slug>/'),
|
name="article-list",
|
||||||
views.ArticleDetailView.as_view(),
|
),
|
||||||
name='article-detail'),
|
path(
|
||||||
|
_("articles/<slug:slug>/"),
|
||||||
path(_('episodes/'),
|
views.ArticleDetailView.as_view(),
|
||||||
views.EpisodeListView.as_view(), name='episode-list'),
|
name="article-detail",
|
||||||
path(_('episodes/<slug:slug>/'),
|
),
|
||||||
views.EpisodeDetailView.as_view(), name='episode-detail'),
|
path(_("episodes/"), views.EpisodeListView.as_view(), name="episode-list"),
|
||||||
path(_('week/'),
|
path(
|
||||||
views.DiffusionListView.as_view(), name='diffusion-list'),
|
_("episodes/<slug:slug>/"),
|
||||||
path(_('week/<date:date>/'),
|
views.EpisodeDetailView.as_view(),
|
||||||
views.DiffusionListView.as_view(), name='diffusion-list'),
|
name="episode-detail",
|
||||||
|
),
|
||||||
path(_('logs/'), views.LogListView.as_view(), name='log-list'),
|
path(_("week/"), views.DiffusionListView.as_view(), name="diffusion-list"),
|
||||||
path(_('logs/<date:date>/'), views.LogListView.as_view(), name='log-list'),
|
path(
|
||||||
|
_("week/<date:date>/"),
|
||||||
|
views.DiffusionListView.as_view(),
|
||||||
|
name="diffusion-list",
|
||||||
|
),
|
||||||
|
path(_("logs/"), views.LogListView.as_view(), name="log-list"),
|
||||||
|
path(_("logs/<date:date>/"), views.LogListView.as_view(), name="log-list"),
|
||||||
# path('<page_path:path>', views.route_page, name='page'),
|
# path('<page_path:path>', views.route_page, name='page'),
|
||||||
|
path(
|
||||||
path(_('publications/'),
|
_("publications/"),
|
||||||
views.PageListView.as_view(model=models.Page), name='page-list'),
|
views.PageListView.as_view(model=models.Page),
|
||||||
|
name="page-list",
|
||||||
path(_('pages/'), views.BasePageListView.as_view(
|
),
|
||||||
|
path(
|
||||||
|
_("pages/"),
|
||||||
|
views.BasePageListView.as_view(
|
||||||
model=models.StaticPage,
|
model=models.StaticPage,
|
||||||
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
||||||
),
|
),
|
||||||
name='static-page-list'
|
name="static-page-list",
|
||||||
),
|
),
|
||||||
path(_('pages/<slug:slug>/'), views.BasePageDetailView.as_view(
|
path(
|
||||||
|
_("pages/<slug:slug>/"),
|
||||||
|
views.BasePageDetailView.as_view(
|
||||||
model=models.StaticPage,
|
model=models.StaticPage,
|
||||||
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
||||||
),
|
),
|
||||||
name='static-page-detail'
|
name="static-page-detail",
|
||||||
|
),
|
||||||
|
path(_("programs/"), views.ProgramListView.as_view(), name="program-list"),
|
||||||
|
path(
|
||||||
|
_("programs/<slug:slug>/"),
|
||||||
|
views.ProgramDetailView.as_view(),
|
||||||
|
name="program-detail",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
_("programs/<slug:parent_slug>/episodes/"),
|
||||||
|
views.EpisodeListView.as_view(),
|
||||||
|
name="episode-list",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
_("programs/<slug:parent_slug>/articles/"),
|
||||||
|
views.ArticleListView.as_view(),
|
||||||
|
name="article-list",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
_("programs/<slug:parent_slug>/publications/"),
|
||||||
|
views.ProgramPageListView.as_view(),
|
||||||
|
name="program-page-list",
|
||||||
),
|
),
|
||||||
|
|
||||||
path(_('programs/'), views.ProgramListView.as_view(),
|
|
||||||
name='program-list'),
|
|
||||||
path(_('programs/<slug:slug>/'),
|
|
||||||
views.ProgramDetailView.as_view(), name='program-detail'),
|
|
||||||
path(_('programs/<slug:parent_slug>/episodes/'),
|
|
||||||
views.EpisodeListView.as_view(), name='episode-list'),
|
|
||||||
path(_('programs/<slug:parent_slug>/articles/'),
|
|
||||||
views.ArticleListView.as_view(), name='article-list'),
|
|
||||||
path(_('programs/<slug:parent_slug>/publications/'),
|
|
||||||
views.ProgramPageListView.as_view(), name='program-page-list'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -1,44 +1,51 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import django.utils.timezone as tz
|
import django.utils.timezone as tz
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
__all__ = ['Redirect', 'redirect', 'date_range', 'cast_date',
|
"Redirect",
|
||||||
'date_or_default', 'to_timedelta', 'seconds_to_time']
|
"redirect",
|
||||||
|
"date_range",
|
||||||
|
"cast_date",
|
||||||
|
"date_or_default",
|
||||||
|
"to_timedelta",
|
||||||
|
"seconds_to_time",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Redirect(Exception):
|
class Redirect(Exception):
|
||||||
""" Redirect exception -- see `redirect()`. """
|
"""Redirect exception -- see `redirect()`."""
|
||||||
|
|
||||||
def __init__(self, url):
|
def __init__(self, url):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
def redirect(url):
|
def redirect(url):
|
||||||
|
"""Raise a Redirect exception in order to response a redirection to client.
|
||||||
|
|
||||||
|
AircoxMiddleware must be enabled.
|
||||||
"""
|
"""
|
||||||
Raise a Redirect exception in order to response a redirection to client.
|
|
||||||
AircoxMiddleware must be enabled. """
|
|
||||||
raise Redirect(url)
|
raise Redirect(url)
|
||||||
|
|
||||||
|
|
||||||
def str_to_date(value, sep='/'):
|
def str_to_date(value, sep="/"):
|
||||||
"""
|
"""Return a date from the provided `value` string, formated as "yyyy/mm/dd"
|
||||||
Return a date from the provided `value` string, formated as "yyyy/mm/dd"
|
|
||||||
(or "dd/mm/yyyy" if `reverse` is True).
|
(or "dd/mm/yyyy" if `reverse` is True).
|
||||||
|
|
||||||
Raises ValueError for incorrect value format.
|
Raises ValueError for incorrect value format.
|
||||||
"""
|
"""
|
||||||
value = value.split(sep)[:3]
|
value = value.split(sep)[:3]
|
||||||
if len(value) < 3:
|
if len(value) < 3:
|
||||||
return ValueError('incorrect date format')
|
return ValueError("incorrect date format")
|
||||||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||||
|
|
||||||
|
|
||||||
def date_range(date, delta=None, **delta_kwargs):
|
def date_range(date, delta=None, **delta_kwargs):
|
||||||
"""
|
"""Return a range of provided date such as `[date-delta, date+delta]`.
|
||||||
Return a range of provided date such as `[date-delta, date+delta]`.
|
|
||||||
:param date: the reference date
|
:param date: the reference date
|
||||||
:param delta: timedelta
|
:param delta: timedelta
|
||||||
:param \**delta_kwargs: timedelta init arguments
|
:param **delta_kwargs: timedelta init arguments
|
||||||
|
|
||||||
Return a datetime range for a given day, as:
|
Return a datetime range for a given day, as:
|
||||||
```(date, 0:0:0:0; date, 23:59:59:999)```.
|
```(date, 0:0:0:0; date, 23:59:59:999)```.
|
||||||
|
@ -48,21 +55,26 @@ def date_range(date, delta=None, **delta_kwargs):
|
||||||
|
|
||||||
|
|
||||||
def cast_date(date, into=datetime.date):
|
def cast_date(date, into=datetime.date):
|
||||||
"""
|
"""Cast a given date into the provided class' instance.
|
||||||
Cast a given date into the provided class' instance. Make datetime
|
|
||||||
aware of timezone.
|
Make datetime aware of timezone.
|
||||||
"""
|
"""
|
||||||
date = into(date.year, date.month, date.day)
|
date = into(date.year, date.month, date.day)
|
||||||
return tz.make_aware(date) if issubclass(into, tz.datetime) else date
|
return tz.make_aware(date) if issubclass(into, tz.datetime) else date
|
||||||
|
|
||||||
|
|
||||||
def date_or_default(date, into=None):
|
def date_or_default(date, into=None):
|
||||||
|
"""Return date if not None, otherwise return now.
|
||||||
|
|
||||||
|
Cast result into provided type if any.
|
||||||
"""
|
"""
|
||||||
Return date if not None, otherwise return now. Cast result into provided
|
date = (
|
||||||
type if any.
|
date
|
||||||
"""
|
if date is not None
|
||||||
date = date if date is not None else datetime.date.today() \
|
else datetime.date.today()
|
||||||
if into is not None and issubclass(into, datetime.date) else tz.now()
|
if into is not None and issubclass(into, datetime.date)
|
||||||
|
else tz.now()
|
||||||
|
)
|
||||||
|
|
||||||
if into is not None:
|
if into is not None:
|
||||||
date = cast_date(date, into)
|
date = cast_date(date, into)
|
||||||
|
@ -73,30 +85,26 @@ def date_or_default(date, into=None):
|
||||||
|
|
||||||
|
|
||||||
def to_timedelta(time):
|
def to_timedelta(time):
|
||||||
"""
|
"""Transform a datetime or a time instance to a timedelta, only using time
|
||||||
Transform a datetime or a time instance to a timedelta,
|
info."""
|
||||||
only using time info
|
|
||||||
"""
|
|
||||||
return datetime.timedelta(
|
return datetime.timedelta(
|
||||||
hours=time.hour,
|
hours=time.hour, minutes=time.minute, seconds=time.second
|
||||||
minutes=time.minute,
|
|
||||||
seconds=time.second
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def to_seconds(time):
|
def to_seconds(time):
|
||||||
""" Return total seconds for provided time """
|
"""Return total seconds for provided time."""
|
||||||
return 3600 * time.hour + 60 * time.minute + time.second
|
return 3600 * time.hour + 60 * time.minute + time.second
|
||||||
|
|
||||||
|
|
||||||
def seconds_to_time(seconds):
|
def seconds_to_time(seconds):
|
||||||
"""
|
"""Seconds to datetime.time."""
|
||||||
Seconds to datetime.time
|
|
||||||
"""
|
|
||||||
seconds, microseconds = divmod(seconds, 1)
|
seconds, microseconds = divmod(seconds, 1)
|
||||||
minutes, seconds = divmod(seconds, 60)
|
minutes, seconds = divmod(seconds, 60)
|
||||||
hours, minutes = divmod(minutes, 60)
|
hours, minutes = divmod(minutes, 60)
|
||||||
return datetime.time(hour=int(hours), minute=int(minutes), second=int(seconds),
|
return datetime.time(
|
||||||
microsecond=int(microseconds*100000))
|
hour=int(hours),
|
||||||
|
minute=int(minutes),
|
||||||
|
second=int(seconds),
|
||||||
|
microsecond=int(microseconds * 100000),
|
||||||
|
)
|
||||||
|
|
|
@ -1,12 +1,40 @@
|
||||||
from . import admin
|
from . import admin
|
||||||
|
|
||||||
from .base import BaseView, BaseAPIView
|
|
||||||
from .home import HomeView
|
|
||||||
|
|
||||||
from .article import ArticleDetailView, ArticleListView
|
from .article import ArticleDetailView, ArticleListView
|
||||||
from .episode import EpisodeDetailView, EpisodeListView, DiffusionListView
|
from .base import BaseAPIView, BaseView
|
||||||
from .log import LogListView, LogListAPIView
|
from .episode import DiffusionListView, EpisodeDetailView, EpisodeListView
|
||||||
from .page import BasePageListView, BasePageDetailView, PageListView, PageDetailView
|
from .home import HomeView
|
||||||
from .program import ProgramDetailView, ProgramListView, \
|
from .log import LogListAPIView, LogListView
|
||||||
ProgramPageDetailView, ProgramPageListView
|
from .page import (
|
||||||
|
BasePageDetailView,
|
||||||
|
BasePageListView,
|
||||||
|
PageDetailView,
|
||||||
|
PageListView,
|
||||||
|
)
|
||||||
|
from .program import (
|
||||||
|
ProgramDetailView,
|
||||||
|
ProgramListView,
|
||||||
|
ProgramPageDetailView,
|
||||||
|
ProgramPageListView,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"admin",
|
||||||
|
"ArticleDetailView",
|
||||||
|
"ArticleListView",
|
||||||
|
"BaseAPIView",
|
||||||
|
"BaseView",
|
||||||
|
"DiffusionListView",
|
||||||
|
"EpisodeDetailView",
|
||||||
|
"EpisodeListView",
|
||||||
|
"HomeView",
|
||||||
|
"LogListAPIView",
|
||||||
|
"LogListView",
|
||||||
|
"BasePageDetailView",
|
||||||
|
"BasePageListView",
|
||||||
|
"PageDetailView",
|
||||||
|
"PageListView",
|
||||||
|
"ProgramDetailView",
|
||||||
|
"ProgramListView",
|
||||||
|
"ProgramPageDetailView",
|
||||||
|
"ProgramPageListView",
|
||||||
|
)
|
||||||
|
|
|
@ -3,17 +3,16 @@ from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views.generic import ListView
|
from django.views.generic import ListView
|
||||||
|
|
||||||
from .log import LogListView
|
|
||||||
from ..models.log import LogArchiver
|
from ..models.log import LogArchiver
|
||||||
|
from .log import LogListView
|
||||||
|
|
||||||
|
__all__ = ["AdminMixin", "StatisticsView"]
|
||||||
__all__ = ['AdminMixin', 'StatisticsView']
|
|
||||||
|
|
||||||
|
|
||||||
class AdminMixin(LoginRequiredMixin, UserPassesTestMixin):
|
class AdminMixin(LoginRequiredMixin, UserPassesTestMixin):
|
||||||
title = ''
|
title = ""
|
||||||
init_app = True
|
init_app = True
|
||||||
""" If true, create vue app. """
|
"""If true, create vue app."""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def station(self):
|
def station(self):
|
||||||
|
@ -24,19 +23,23 @@ class AdminMixin(LoginRequiredMixin, UserPassesTestMixin):
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs.update(admin.site.each_context(self.request))
|
kwargs.update(admin.site.each_context(self.request))
|
||||||
kwargs.setdefault('title', self.title)
|
kwargs.setdefault("title", self.title)
|
||||||
kwargs.setdefault('station', self.station)
|
kwargs.setdefault("station", self.station)
|
||||||
kwargs.setdefault('init_app', self.init_app)
|
kwargs.setdefault("init_app", self.init_app)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class StatisticsView(AdminMixin, LogListView, ListView):
|
class StatisticsView(AdminMixin, LogListView, ListView):
|
||||||
template_name = 'admin/aircox/statistics.html'
|
template_name = "admin/aircox/statistics.html"
|
||||||
redirect_date_url = 'admin:tools-stats'
|
redirect_date_url = "admin:tools-stats"
|
||||||
title = _('Statistics')
|
title = _("Statistics")
|
||||||
date = None
|
date = None
|
||||||
|
|
||||||
def get_object_list(self, logs, full=False):
|
def get_object_list(self, logs, full=False):
|
||||||
if not logs.exists():
|
if not logs.exists():
|
||||||
logs = LogArchiver().load(self.station, self.date) if self.date else []
|
logs = (
|
||||||
|
LogArchiver().load(self.station, self.date)
|
||||||
|
if self.date
|
||||||
|
else []
|
||||||
|
)
|
||||||
return super().get_object_list(logs, True)
|
return super().get_object_list(logs, True)
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
from ..models import Article, Program, StaticPage
|
from ..models import Article, Program, StaticPage
|
||||||
from .page import PageDetailView, PageListView
|
from .page import PageDetailView, PageListView
|
||||||
|
|
||||||
|
__all__ = ["ArticleDetailView", "ArticleListView"]
|
||||||
__all__ = ['ArticleDetailView', 'ArticleListView']
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleDetailView(PageDetailView):
|
class ArticleDetailView(PageDetailView):
|
||||||
|
@ -10,8 +9,11 @@ class ArticleDetailView(PageDetailView):
|
||||||
model = Article
|
model = Article
|
||||||
|
|
||||||
def get_sidebar_queryset(self):
|
def get_sidebar_queryset(self):
|
||||||
qs = Article.objects.published().select_related('cover') \
|
qs = (
|
||||||
.order_by('-pub_date')
|
Article.objects.published()
|
||||||
|
.select_related("cover")
|
||||||
|
.order_by("-pub_date")
|
||||||
|
)
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,5 +22,3 @@ class ArticleListView(PageListView):
|
||||||
has_headline = True
|
has_headline = True
|
||||||
parent_model = Program
|
parent_model = Program
|
||||||
attach_to_value = StaticPage.ATTACH_TO_ARTICLES
|
attach_to_value = StaticPage.ATTACH_TO_ARTICLES
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
from django.views.generic.base import TemplateResponseMixin, ContextMixin
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
from django.views.generic.base import ContextMixin, TemplateResponseMixin
|
||||||
|
|
||||||
from ..models import Page
|
from ..models import Page
|
||||||
|
|
||||||
|
__all__ = ("BaseView", "BaseAPIView")
|
||||||
__all__ = ('BaseView', 'BaseAPIView')
|
|
||||||
|
|
||||||
|
|
||||||
class BaseView(TemplateResponseMixin, ContextMixin):
|
class BaseView(TemplateResponseMixin, ContextMixin):
|
||||||
has_sidebar = True
|
has_sidebar = True
|
||||||
""" Show side navigation """
|
"""Show side navigation."""
|
||||||
has_filters = False
|
has_filters = False
|
||||||
""" Show filters nav """
|
"""Show filters nav."""
|
||||||
list_count = 5
|
list_count = 5
|
||||||
""" Item count for small lists displayed on page. """
|
"""Item count for small lists displayed on page."""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def station(self):
|
def station(self):
|
||||||
|
@ -23,37 +22,43 @@ class BaseView(TemplateResponseMixin, ContextMixin):
|
||||||
# return super().get_queryset().station(self.station)
|
# return super().get_queryset().station(self.station)
|
||||||
|
|
||||||
def get_sidebar_queryset(self):
|
def get_sidebar_queryset(self):
|
||||||
""" Return a queryset of items to render on the side nav. """
|
"""Return a queryset of items to render on the side nav."""
|
||||||
return Page.objects.select_subclasses().published() \
|
return (
|
||||||
.order_by('-pub_date')
|
Page.objects.select_subclasses().published().order_by("-pub_date")
|
||||||
|
)
|
||||||
|
|
||||||
def get_sidebar_url(self):
|
def get_sidebar_url(self):
|
||||||
return reverse('page-list')
|
return reverse("page-list")
|
||||||
|
|
||||||
def get_page(self):
|
def get_page(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs.setdefault('station', self.station)
|
kwargs.setdefault("station", self.station)
|
||||||
kwargs.setdefault('page', self.get_page())
|
kwargs.setdefault("page", self.get_page())
|
||||||
kwargs.setdefault('has_filters', self.has_filters)
|
kwargs.setdefault("has_filters", self.has_filters)
|
||||||
|
|
||||||
has_sidebar = kwargs.setdefault('has_sidebar', self.has_sidebar)
|
has_sidebar = kwargs.setdefault("has_sidebar", self.has_sidebar)
|
||||||
if has_sidebar and 'sidebar_object_list' not in kwargs:
|
if has_sidebar and "sidebar_object_list" not in kwargs:
|
||||||
sidebar_object_list = self.get_sidebar_queryset()
|
sidebar_object_list = self.get_sidebar_queryset()
|
||||||
if sidebar_object_list is not None:
|
if sidebar_object_list is not None:
|
||||||
kwargs['sidebar_object_list'] = sidebar_object_list[:self.list_count]
|
kwargs["sidebar_object_list"] = sidebar_object_list[
|
||||||
kwargs['sidebar_list_url'] = self.get_sidebar_url()
|
: self.list_count
|
||||||
|
]
|
||||||
|
kwargs["sidebar_list_url"] = self.get_sidebar_url()
|
||||||
|
|
||||||
if 'audio_streams' not in kwargs:
|
if "audio_streams" not in kwargs:
|
||||||
streams = self.station.audio_streams
|
streams = self.station.audio_streams
|
||||||
streams = streams and streams.split('\n')
|
streams = streams and streams.split("\n")
|
||||||
kwargs['audio_streams'] = streams
|
kwargs["audio_streams"] = streams
|
||||||
|
|
||||||
if 'model' not in kwargs:
|
if "model" not in kwargs:
|
||||||
model = getattr(self, 'model', None) or \
|
model = (
|
||||||
hasattr(self, 'object') and type(self.object)
|
getattr(self, "model", None)
|
||||||
kwargs['model'] = model
|
or hasattr(self, "object")
|
||||||
|
and type(self.object)
|
||||||
|
)
|
||||||
|
kwargs["model"] = model
|
||||||
|
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
@ -66,5 +71,3 @@ class BaseAPIView:
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().station(self.station)
|
return super().get_queryset().station(self.station)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,43 +1,45 @@
|
||||||
from collections import OrderedDict
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.views.generic import ListView
|
from django.views.generic import ListView
|
||||||
|
|
||||||
from ..filters import EpisodeFilters
|
from ..filters import EpisodeFilters
|
||||||
from ..models import Diffusion, Episode, Program, StaticPage, Sound
|
from ..models import Diffusion, Episode, Program, StaticPage
|
||||||
from .base import BaseView
|
from .base import BaseView
|
||||||
from .program import ProgramPageDetailView
|
from .mixins import AttachedToMixin, GetDateMixin
|
||||||
from .page import PageListView
|
from .page import PageListView
|
||||||
from .mixins import AttachedToMixin, GetDateMixin, ParentMixin
|
from .program import ProgramPageDetailView
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
__all__ = ['EpisodeDetailView', 'EpisodeListView', 'DiffusionListView', 'SoundListView']
|
"EpisodeDetailView",
|
||||||
|
"EpisodeListView",
|
||||||
|
"DiffusionListView",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class EpisodeDetailView(ProgramPageDetailView):
|
class EpisodeDetailView(ProgramPageDetailView):
|
||||||
model = Episode
|
model = Episode
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
if not 'tracks' in kwargs:
|
if "tracks" not in kwargs:
|
||||||
kwargs['tracks'] = self.object.track_set.order_by('position')
|
kwargs["tracks"] = self.object.track_set.order_by("position")
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class EpisodeListView(PageListView):
|
class EpisodeListView(PageListView):
|
||||||
model = Episode
|
model = Episode
|
||||||
filterset_class = EpisodeFilters
|
filterset_class = EpisodeFilters
|
||||||
item_template_name = 'aircox/widgets/episode_item.html'
|
item_template_name = "aircox/widgets/episode_item.html"
|
||||||
has_headline = True
|
has_headline = True
|
||||||
parent_model = Program
|
parent_model = Program
|
||||||
attach_to_value = StaticPage.ATTACH_TO_EPISODES
|
attach_to_value = StaticPage.ATTACH_TO_EPISODES
|
||||||
|
|
||||||
|
|
||||||
class DiffusionListView(GetDateMixin, AttachedToMixin, BaseView, ListView):
|
class DiffusionListView(GetDateMixin, AttachedToMixin, BaseView, ListView):
|
||||||
""" View for timetables """
|
"""View for timetables."""
|
||||||
|
|
||||||
model = Diffusion
|
model = Diffusion
|
||||||
has_filters = True
|
has_filters = True
|
||||||
redirect_date_url = 'diffusion-list'
|
redirect_date_url = "diffusion-list"
|
||||||
attach_to_value = StaticPage.ATTACH_TO_DIFFUSIONS
|
attach_to_value = StaticPage.ATTACH_TO_DIFFUSIONS
|
||||||
|
|
||||||
def get_date(self):
|
def get_date(self):
|
||||||
|
@ -45,10 +47,9 @@ class DiffusionListView(GetDateMixin, AttachedToMixin, BaseView, ListView):
|
||||||
return date if date is not None else datetime.date.today()
|
return date if date is not None else datetime.date.today()
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().date(self.date).order_by('start')
|
return super().get_queryset().date(self.date).order_by("start")
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
start = self.date - datetime.timedelta(days=self.date.weekday())
|
start = self.date - datetime.timedelta(days=self.date.weekday())
|
||||||
dates = [start + datetime.timedelta(days=i) for i in range(0, 7)]
|
dates = [start + datetime.timedelta(days=i) for i in range(0, 7)]
|
||||||
return super().get_context_data(date=self.date, dates=dates, **kwargs)
|
return super().get_context_data(date=self.date, dates=dates, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,17 @@
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
from django.utils.translation import gettext as _
|
|
||||||
from django.utils import timezone as tz
|
from django.utils import timezone as tz
|
||||||
from django.views.generic import ListView
|
from django.views.generic import ListView
|
||||||
|
|
||||||
from ..models import Diffusion, Log, Page, StaticPage
|
from ..models import Diffusion, Log, Page, StaticPage
|
||||||
from .base import BaseView
|
from .base import BaseView
|
||||||
from .page import PageListView
|
|
||||||
|
|
||||||
|
|
||||||
class HomeView(BaseView, ListView):
|
class HomeView(BaseView, ListView):
|
||||||
template_name = 'aircox/home.html'
|
template_name = "aircox/home.html"
|
||||||
model = Diffusion
|
model = Diffusion
|
||||||
attach_to_value = StaticPage.ATTACH_TO_HOME
|
attach_to_value = StaticPage.ATTACH_TO_HOME
|
||||||
queryset = Diffusion.objects.on_air().select_related('episode')
|
queryset = Diffusion.objects.on_air().select_related("episode")
|
||||||
logs_count = 5
|
logs_count = 5
|
||||||
publications_count = 5
|
publications_count = 5
|
||||||
has_filters = False
|
has_filters = False
|
||||||
|
@ -32,15 +30,16 @@ class HomeView(BaseView, ListView):
|
||||||
current_diff = Diffusion.objects.on_air().now(now).first()
|
current_diff = Diffusion.objects.on_air().now(now).first()
|
||||||
next_diffs = Diffusion.objects.on_air().after(now)
|
next_diffs = Diffusion.objects.on_air().after(now)
|
||||||
if current_diff:
|
if current_diff:
|
||||||
diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:2])
|
diffs = [current_diff] + list(
|
||||||
|
next_diffs.exclude(pk=current_diff.pk)[:2]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
diffs = next_diffs[:3]
|
diffs = next_diffs[:3]
|
||||||
return diffs
|
return diffs
|
||||||
|
|
||||||
def get_last_publications(self):
|
def get_last_publications(self):
|
||||||
# note: with postgres db, possible to use distinct()
|
# note: with postgres db, possible to use distinct()
|
||||||
qs = Page.objects.select_subclasses().published() \
|
qs = Page.objects.select_subclasses().published().order_by("-pub_date")
|
||||||
.order_by('-pub_date')
|
|
||||||
parents = set()
|
parents = set()
|
||||||
items = []
|
items = []
|
||||||
for publication in qs:
|
for publication in qs:
|
||||||
|
@ -54,8 +53,7 @@ class HomeView(BaseView, ListView):
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
context = super().get_context_data(**kwargs)
|
context = super().get_context_data(**kwargs)
|
||||||
context['logs'] = self.get_logs(context['object_list'])
|
context["logs"] = self.get_logs(context["object_list"])
|
||||||
context['next_diffs'] = self.get_next_diffs()
|
context["next_diffs"] = self.get_next_diffs()
|
||||||
context['last_publications'] = self.get_last_publications()[:5]
|
context["last_publications"] = self.get_last_publications()[:5]
|
||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,17 @@
|
||||||
from collections import deque
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from django.utils import timezone as tz
|
||||||
from django.utils.decorators import method_decorator
|
from django.utils.decorators import method_decorator
|
||||||
from django.views.decorators.cache import cache_page
|
from django.views.decorators.cache import cache_page
|
||||||
from django.views.generic import ListView
|
from django.views.generic import ListView
|
||||||
from django.utils import timezone as tz
|
|
||||||
|
|
||||||
from rest_framework.generics import ListAPIView
|
from rest_framework.generics import ListAPIView
|
||||||
from rest_framework import viewsets
|
|
||||||
from rest_framework.decorators import action
|
|
||||||
|
|
||||||
from ..models import Diffusion, Log, StaticPage
|
from ..models import Diffusion, Log, StaticPage
|
||||||
from ..serializers import LogInfo, LogInfoSerializer
|
from ..serializers import LogInfo, LogInfoSerializer
|
||||||
from .base import BaseView, BaseAPIView
|
from .base import BaseAPIView, BaseView
|
||||||
from .mixins import GetDateMixin, AttachedToMixin
|
from .mixins import AttachedToMixin, GetDateMixin
|
||||||
|
|
||||||
|
__all__ = ["LogListMixin", "LogListView"]
|
||||||
__all__ = ['LogListMixin', 'LogListView']
|
|
||||||
|
|
||||||
|
|
||||||
class LogListMixin(GetDateMixin):
|
class LogListMixin(GetDateMixin):
|
||||||
|
@ -32,21 +27,39 @@ class LogListMixin(GetDateMixin):
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
# only get logs for tracks: log for diffusion will be retrieved
|
# only get logs for tracks: log for diffusion will be retrieved
|
||||||
# by the diffusions' queryset.
|
# by the diffusions' queryset.
|
||||||
qs = super().get_queryset().on_air().filter(track__isnull=False) \
|
qs = (
|
||||||
.filter(date__lte=tz.now())
|
super()
|
||||||
return qs.date(self.date) if self.date is not None else \
|
.get_queryset()
|
||||||
qs.after(self.min_date) if self.min_date is not None else qs
|
.on_air()
|
||||||
|
.filter(track__isnull=False)
|
||||||
|
.filter(date__lte=tz.now())
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
qs.date(self.date)
|
||||||
|
if self.date is not None
|
||||||
|
else qs.after(self.min_date)
|
||||||
|
if self.min_date is not None
|
||||||
|
else qs
|
||||||
|
)
|
||||||
|
|
||||||
def get_diffusions_queryset(self):
|
def get_diffusions_queryset(self):
|
||||||
qs = Diffusion.objects.station(self.station).on_air() \
|
qs = (
|
||||||
.filter(start__lte=tz.now())
|
Diffusion.objects.station(self.station)
|
||||||
return qs.date(self.date) if self.date is not None else \
|
.on_air()
|
||||||
qs.after(self.min_date) if self.min_date is not None else qs
|
.filter(start__lte=tz.now())
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
qs.date(self.date)
|
||||||
|
if self.date is not None
|
||||||
|
else qs.after(self.min_date)
|
||||||
|
if self.min_date is not None
|
||||||
|
else qs
|
||||||
|
)
|
||||||
|
|
||||||
def get_object_list(self, logs, full=False):
|
def get_object_list(self, logs, full=False):
|
||||||
"""
|
"""Return diffusions merged to the provided logs iterable.
|
||||||
Return diffusions merged to the provided logs iterable. If
|
|
||||||
`full`, sort items by date without merging.
|
If `full`, sort items by date without merging.
|
||||||
"""
|
"""
|
||||||
diffs = self.get_diffusions_queryset()
|
diffs = self.get_diffusions_queryset()
|
||||||
if self.request.user.is_staff and full:
|
if self.request.user.is_staff and full:
|
||||||
|
@ -55,11 +68,10 @@ class LogListMixin(GetDateMixin):
|
||||||
|
|
||||||
|
|
||||||
class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
||||||
"""
|
"""Return list of logs for the provided date (from `kwargs` or
|
||||||
Return list of logs for the provided date (from `kwargs` or
|
`request.GET`, defaults to today)."""
|
||||||
`request.GET`, defaults to today).
|
|
||||||
"""
|
redirect_date_url = "log-list"
|
||||||
redirect_date_url = 'log-list'
|
|
||||||
has_filters = True
|
has_filters = True
|
||||||
attach_to_value = StaticPage.ATTACH_TO_LOGS
|
attach_to_value = StaticPage.ATTACH_TO_LOGS
|
||||||
|
|
||||||
|
@ -72,24 +84,28 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
||||||
# `super()...` must be called before updating kwargs, in order
|
# `super()...` must be called before updating kwargs, in order
|
||||||
# to get `self.object_list`
|
# to get `self.object_list`
|
||||||
kwargs = super().get_context_data(**kwargs)
|
kwargs = super().get_context_data(**kwargs)
|
||||||
kwargs.update({
|
kwargs.update(
|
||||||
'date': self.date,
|
{
|
||||||
'dates': (today - datetime.timedelta(days=i) for i in range(0, 7)),
|
"date": self.date,
|
||||||
'object_list': self.get_object_list(self.object_list),
|
"dates": (
|
||||||
})
|
today - datetime.timedelta(days=i) for i in range(0, 7)
|
||||||
|
),
|
||||||
|
"object_list": self.get_object_list(self.object_list),
|
||||||
|
}
|
||||||
|
)
|
||||||
return kwargs
|
return kwargs
|
||||||
|
|
||||||
|
|
||||||
# Logs are accessible through API only with this list view
|
# Logs are accessible through API only with this list view
|
||||||
class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
||||||
"""
|
"""Return logs list, including diffusions. By default return logs of the
|
||||||
Return logs list, including diffusions. By default return logs of
|
last 30 minutes.
|
||||||
the last 30 minutes.
|
|
||||||
|
|
||||||
Available GET parameters:
|
Available GET parameters:
|
||||||
- "date": return logs for a specified date (
|
- "date": return logs for a specified date (
|
||||||
- "full": (staff user only) don't merge diffusion and logs
|
- "full": (staff user only) don't merge diffusion and logs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
serializer_class = LogInfoSerializer
|
serializer_class = LogInfoSerializer
|
||||||
queryset = Log.objects.all()
|
queryset = Log.objects.all()
|
||||||
|
|
||||||
|
@ -107,7 +123,7 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
||||||
return [LogInfo(obj) for obj in super().get_object_list(logs, full)]
|
return [LogInfo(obj) for obj in super().get_object_list(logs, full)]
|
||||||
|
|
||||||
def get_serializer(self, queryset, *args, **kwargs):
|
def get_serializer(self, queryset, *args, **kwargs):
|
||||||
full = bool(self.request.GET.get('full'))
|
full = bool(self.request.GET.get("full"))
|
||||||
return super().get_serializer(self.get_object_list(queryset, full),
|
return super().get_serializer(
|
||||||
*args, **kwargs)
|
self.get_object_list(queryset, full), *args, **kwargs
|
||||||
|
)
|
||||||
|
|
|
@ -1,49 +1,54 @@
|
||||||
from django.shortcuts import get_object_or_404, redirect
|
from django.shortcuts import get_object_or_404, redirect
|
||||||
from django.urls import reverse
|
|
||||||
|
|
||||||
from ..utils import str_to_date
|
|
||||||
from ..models import StaticPage
|
from ..models import StaticPage
|
||||||
|
from ..utils import str_to_date
|
||||||
|
|
||||||
|
__all__ = ["GetDateMixin", "ParentMixin", "AttachedToMixin"]
|
||||||
__all__ = ['GetDateMixin', 'ParentMixin', 'AttachedToMixin']
|
|
||||||
|
|
||||||
|
|
||||||
class GetDateMixin:
|
class GetDateMixin:
|
||||||
"""
|
"""Mixin offering utils to get date by `request.GET` or `kwargs['date']`"""
|
||||||
Mixin offering utils to get date by `request.GET` or
|
|
||||||
`kwargs['date']`
|
|
||||||
"""
|
|
||||||
date = None
|
date = None
|
||||||
redirect_date_url = None
|
redirect_date_url = None
|
||||||
|
|
||||||
def get_date(self):
|
def get_date(self):
|
||||||
date = self.request.GET.get('date')
|
date = self.request.GET.get("date")
|
||||||
return str_to_date(date, '-') if date is not None else \
|
return (
|
||||||
self.kwargs['date'] if 'date' in self.kwargs else None
|
str_to_date(date, "-")
|
||||||
|
if date is not None
|
||||||
|
else self.kwargs["date"]
|
||||||
|
if "date" in self.kwargs
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
if self.redirect_date_url and self.request.GET.get('date'):
|
if self.redirect_date_url and self.request.GET.get("date"):
|
||||||
return redirect(self.redirect_date_url,
|
return redirect(
|
||||||
date=self.request.GET['date'].replace('-', '/'))
|
self.redirect_date_url,
|
||||||
|
date=self.request.GET["date"].replace("-", "/"),
|
||||||
|
)
|
||||||
|
|
||||||
self.date = self.get_date()
|
self.date = self.get_date()
|
||||||
return super().get(*args, **kwargs)
|
return super().get(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class ParentMixin:
|
class ParentMixin:
|
||||||
|
"""Optional parent page for a list view.
|
||||||
|
|
||||||
|
Parent is fetched and passed to the template context when
|
||||||
|
`parent_model` is provided (queryset is filtered by parent page in
|
||||||
|
such case).
|
||||||
"""
|
"""
|
||||||
Optional parent page for a list view. Parent is fetched and passed to the
|
|
||||||
template context when `parent_model` is provided (queryset is filtered by
|
|
||||||
parent page in such case).
|
|
||||||
"""
|
|
||||||
parent_model = None
|
parent_model = None
|
||||||
""" Parent model """
|
"""Parent model."""
|
||||||
parent_url_kwarg = 'parent_slug'
|
parent_url_kwarg = "parent_slug"
|
||||||
""" Url lookup argument """
|
"""Url lookup argument."""
|
||||||
parent_field = 'slug'
|
parent_field = "slug"
|
||||||
""" Parent field for url lookup """
|
"""Parent field for url lookup."""
|
||||||
parent = None
|
parent = None
|
||||||
""" Parent page object """
|
"""Parent page object."""
|
||||||
|
|
||||||
def get_parent(self, request, *args, **kwargs):
|
def get_parent(self, request, *args, **kwargs):
|
||||||
if self.parent_model is None or self.parent_url_kwarg not in kwargs:
|
if self.parent_model is None or self.parent_url_kwarg not in kwargs:
|
||||||
|
@ -51,7 +56,8 @@ class ParentMixin:
|
||||||
|
|
||||||
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
||||||
return get_object_or_404(
|
return get_object_or_404(
|
||||||
self.parent_model.objects.select_related('cover'), **lookup)
|
self.parent_model.objects.select_related("cover"), **lookup
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
self.parent = self.get_parent(request, *args, **kwargs)
|
self.parent = self.get_parent(request, *args, **kwargs)
|
||||||
|
@ -63,26 +69,31 @@ class ParentMixin:
|
||||||
return super().get_queryset()
|
return super().get_queryset()
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
self.parent = kwargs.setdefault('parent', self.parent)
|
self.parent = kwargs.setdefault("parent", self.parent)
|
||||||
if self.parent is not None:
|
if self.parent is not None:
|
||||||
kwargs.setdefault('cover', self.parent.cover)
|
kwargs.setdefault("cover", self.parent.cover)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class AttachedToMixin:
|
class AttachedToMixin:
|
||||||
""" Mixin for views that can have a static page attached to it. """
|
"""Mixin for views that can have a static page attached to it."""
|
||||||
|
|
||||||
attach_to_value = None
|
attach_to_value = None
|
||||||
""" Value of StaticPage.attach_to """
|
"""Value of StaticPage.attach_to."""
|
||||||
|
|
||||||
def get_page(self):
|
def get_page(self):
|
||||||
if self.attach_to_value is not None:
|
if self.attach_to_value is not None:
|
||||||
return StaticPage.objects.filter(attach_to=self.attach_to_value) \
|
return (
|
||||||
.published().first()
|
StaticPage.objects.filter(attach_to=self.attach_to_value)
|
||||||
|
.published()
|
||||||
|
.first()
|
||||||
|
)
|
||||||
return super().get_page()
|
return super().get_page()
|
||||||
|
|
||||||
|
|
||||||
class FiltersMixin:
|
class FiltersMixin:
|
||||||
""" Mixin integrating Django filters' filter set """
|
"""Mixin integrating Django filters' filter set."""
|
||||||
|
|
||||||
filterset = None
|
filterset = None
|
||||||
filterset_class = None
|
filterset_class = None
|
||||||
|
|
||||||
|
@ -97,13 +108,12 @@ class FiltersMixin:
|
||||||
return query
|
return query
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
filterset = kwargs.setdefault('filterset', self.filterset)
|
filterset = kwargs.setdefault("filterset", self.filterset)
|
||||||
if filterset.is_valid():
|
if filterset.is_valid():
|
||||||
kwargs['filterset_data'] = filterset.form.cleaned_data
|
kwargs["filterset_data"] = filterset.form.cleaned_data
|
||||||
else:
|
else:
|
||||||
kwargs['filterset_data'] = {}
|
kwargs["filterset_data"] = {}
|
||||||
|
|
||||||
params = self.request.GET.copy()
|
params = self.request.GET.copy()
|
||||||
kwargs['get_params'] = params.pop('page', True) and params
|
kwargs["get_params"] = params.pop("page", True) and params
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
|
@ -1,25 +1,28 @@
|
||||||
|
|
||||||
from django.http import Http404, HttpResponse
|
from django.http import Http404, HttpResponse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django.views.generic import DetailView, ListView
|
from django.views.generic import DetailView, ListView
|
||||||
|
|
||||||
from honeypot.decorators import check_honeypot
|
from honeypot.decorators import check_honeypot
|
||||||
|
|
||||||
from ..filters import PageFilters
|
from ..filters import PageFilters
|
||||||
from ..forms import CommentForm
|
from ..forms import CommentForm
|
||||||
from ..models import Category, Comment
|
from ..models import Comment
|
||||||
from ..utils import Redirect
|
from ..utils import Redirect
|
||||||
from .base import BaseView
|
from .base import BaseView
|
||||||
from .mixins import AttachedToMixin, FiltersMixin, ParentMixin
|
from .mixins import AttachedToMixin, FiltersMixin, ParentMixin
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
__all__ = ['BasePageListView', 'BasePageDetailView', 'PageDetailView', 'PageListView']
|
"BasePageListView",
|
||||||
|
"BasePageDetailView",
|
||||||
|
"PageDetailView",
|
||||||
|
"PageListView",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
||||||
""" Base view class for BasePage list. """
|
"""Base view class for BasePage list."""
|
||||||
template_name = 'aircox/basepage_list.html'
|
|
||||||
item_template_name = 'aircox/widgets/page_item.html'
|
template_name = "aircox/basepage_list.html"
|
||||||
|
item_template_name = "aircox/widgets/page_item.html"
|
||||||
has_sidebar = True
|
has_sidebar = True
|
||||||
|
|
||||||
paginate_by = 30
|
paginate_by = 30
|
||||||
|
@ -29,35 +32,39 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
||||||
return super().get(*args, **kwargs)
|
return super().get(*args, **kwargs)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().select_subclasses().published() \
|
return (
|
||||||
.select_related('cover')
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.select_subclasses()
|
||||||
|
.published()
|
||||||
|
.select_related("cover")
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs.setdefault('item_template_name', self.item_template_name)
|
kwargs.setdefault("item_template_name", self.item_template_name)
|
||||||
kwargs.setdefault('has_headline', self.has_headline)
|
kwargs.setdefault("has_headline", self.has_headline)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class BasePageDetailView(BaseView, DetailView):
|
class BasePageDetailView(BaseView, DetailView):
|
||||||
""" Base view class for BasePage. """
|
"""Base view class for BasePage."""
|
||||||
template_name = 'aircox/basepage_detail.html'
|
|
||||||
context_object_name = 'page'
|
template_name = "aircox/basepage_detail.html"
|
||||||
|
context_object_name = "page"
|
||||||
has_filters = False
|
has_filters = False
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().select_related('cover')
|
return super().get_queryset().select_related("cover")
|
||||||
|
|
||||||
# This should not exists: it allows mapping not published pages
|
# This should not exists: it allows mapping not published pages
|
||||||
# or it should be only used for trashed pages.
|
# or it should be only used for trashed pages.
|
||||||
def not_published_redirect(self, page):
|
def not_published_redirect(self, page):
|
||||||
"""
|
"""When a page is not published, redirect to the returned url instead
|
||||||
When a page is not published, redirect to the returned url instead of an
|
of an HTTP 404 code."""
|
||||||
HTTP 404 code.
|
|
||||||
"""
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_object(self):
|
def get_object(self):
|
||||||
if getattr(self, 'object', None):
|
if getattr(self, "object", None):
|
||||||
return self.object
|
return self.object
|
||||||
|
|
||||||
obj = super().get_object()
|
obj = super().get_object()
|
||||||
|
@ -65,7 +72,7 @@ class BasePageDetailView(BaseView, DetailView):
|
||||||
redirect_url = self.not_published_redirect(obj)
|
redirect_url = self.not_published_redirect(obj)
|
||||||
if redirect_url:
|
if redirect_url:
|
||||||
raise Redirect(redirect_url)
|
raise Redirect(redirect_url)
|
||||||
raise Http404('%s not found' % self.model._meta.verbose_name)
|
raise Http404("%s not found" % self.model._meta.verbose_name)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def get_page(self):
|
def get_page(self):
|
||||||
|
@ -73,7 +80,8 @@ class BasePageDetailView(BaseView, DetailView):
|
||||||
|
|
||||||
|
|
||||||
class PageListView(FiltersMixin, BasePageListView):
|
class PageListView(FiltersMixin, BasePageListView):
|
||||||
""" Page list view. """
|
"""Page list view."""
|
||||||
|
|
||||||
filterset_class = PageFilters
|
filterset_class = PageFilters
|
||||||
template_name = None
|
template_name = None
|
||||||
has_filters = True
|
has_filters = True
|
||||||
|
@ -81,58 +89,65 @@ class PageListView(FiltersMixin, BasePageListView):
|
||||||
filters = None
|
filters = None
|
||||||
|
|
||||||
def get_template_names(self):
|
def get_template_names(self):
|
||||||
return super().get_template_names() + ['aircox/page_list.html']
|
return super().get_template_names() + ["aircox/page_list.html"]
|
||||||
|
|
||||||
def get_filterset(self, data, query):
|
def get_filterset(self, data, query):
|
||||||
# FIXME: not the most efficient, cause join then split (in django filters)
|
# FIXME: not the most efficient, cause join then split (django filters)
|
||||||
data['category__id__in'] = ','.join(data.getlist('category__id__in'))
|
data["category__id__in"] = ",".join(data.getlist("category__id__in"))
|
||||||
return super().get_filterset(data, query)
|
return super().get_filterset(data, query)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
qs = super().get_queryset().select_related('category') \
|
qs = (
|
||||||
.order_by('-pub_date')
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.select_related("category")
|
||||||
|
.order_by("-pub_date")
|
||||||
|
)
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs['categories'] = self.model.objects.published() \
|
kwargs["categories"] = (
|
||||||
.filter(category__isnull=False) \
|
self.model.objects.published()
|
||||||
.values_list('category__title', 'category__id') \
|
.filter(category__isnull=False)
|
||||||
.distinct()
|
.values_list("category__title", "category__id")
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PageDetailView(BasePageDetailView):
|
class PageDetailView(BasePageDetailView):
|
||||||
""" Base view class for pages. """
|
"""Base view class for pages."""
|
||||||
|
|
||||||
template_name = None
|
template_name = None
|
||||||
context_object_name = 'page'
|
context_object_name = "page"
|
||||||
has_filters = False
|
has_filters = False
|
||||||
|
|
||||||
def get_template_names(self):
|
def get_template_names(self):
|
||||||
return super().get_template_names() + ['aircox/page_detail.html']
|
return super().get_template_names() + ["aircox/page_detail.html"]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().select_related('category')
|
return super().get_queryset().select_related("category")
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
if self.object.allow_comments and not 'comment_form' in kwargs:
|
if self.object.allow_comments and "comment_form" not in kwargs:
|
||||||
kwargs['comment_form'] = CommentForm()
|
kwargs["comment_form"] = CommentForm()
|
||||||
kwargs['comments'] = Comment.objects.filter(page=self.object) \
|
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by(
|
||||||
.order_by('-date')
|
"-date"
|
||||||
|
)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def as_view(cls, *args, **kwargs):
|
def as_view(cls, *args, **kwargs):
|
||||||
view = super(PageDetailView, cls).as_view(*args, **kwargs)
|
view = super(PageDetailView, cls).as_view(*args, **kwargs)
|
||||||
return check_honeypot(view, field_name='website')
|
return check_honeypot(view, field_name="website")
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
self.object = self.get_object()
|
self.object = self.get_object()
|
||||||
if not self.object.allow_comments:
|
if not self.object.allow_comments:
|
||||||
return HttpResponse(_('comments are not allowed'), status=503)
|
return HttpResponse(_("comments are not allowed"), status=503)
|
||||||
|
|
||||||
form = CommentForm(request.POST)
|
form = CommentForm(request.POST)
|
||||||
comment = form.save(commit=False)
|
comment = form.save(commit=False)
|
||||||
comment.page = self.object
|
comment.page = self.object
|
||||||
comment.save()
|
comment.save()
|
||||||
return self.get(request, *args, **kwargs)
|
return self.get(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,10 @@
|
||||||
from django.db.models import Q
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
|
||||||
from django.shortcuts import get_object_or_404
|
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
|
|
||||||
from ..models import Episode, Program, Page, StaticPage
|
from ..models import Page, Program, StaticPage
|
||||||
from .mixins import ParentMixin, AttachedToMixin
|
from .mixins import ParentMixin
|
||||||
from .page import PageDetailView, PageListView
|
from .page import PageDetailView, PageListView
|
||||||
|
|
||||||
|
__all__ = ["ProgramPageDetailView", "ProgramDetailView", "ProgramPageListView"]
|
||||||
__all__ = ['ProgramPageDetailView', 'ProgramDetailView', 'ProgramPageListView']
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProgramMixin:
|
class BaseProgramMixin:
|
||||||
|
@ -16,12 +12,13 @@ class BaseProgramMixin:
|
||||||
return self.object
|
return self.object
|
||||||
|
|
||||||
def get_sidebar_url(self):
|
def get_sidebar_url(self):
|
||||||
return reverse('program-page-list',
|
return reverse(
|
||||||
kwargs={"parent_slug": self.program.slug})
|
"program-page-list", kwargs={"parent_slug": self.program.slug}
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
self.program = self.get_program()
|
self.program = self.get_program()
|
||||||
kwargs['program'] = self.program
|
kwargs["program"] = self.program
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,9 +36,9 @@ class ProgramListView(PageListView):
|
||||||
|
|
||||||
# FIXME: not used
|
# FIXME: not used
|
||||||
class ProgramPageDetailView(BaseProgramMixin, ParentMixin, PageDetailView):
|
class ProgramPageDetailView(BaseProgramMixin, ParentMixin, PageDetailView):
|
||||||
"""
|
"""Base view class for a page that is displayed as a program's child
|
||||||
Base view class for a page that is displayed as a program's child page.
|
page."""
|
||||||
"""
|
|
||||||
parent_model = Program
|
parent_model = Program
|
||||||
|
|
||||||
def get_program(self):
|
def get_program(self):
|
||||||
|
@ -61,6 +58,5 @@ class ProgramPageListView(BaseProgramMixin, PageListView):
|
||||||
return self.parent
|
return self.parent
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs.setdefault('sidebar_url_parent', None)
|
kwargs.setdefault("sidebar_url_parent", None)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
|
|
|
@ -1,23 +1,27 @@
|
||||||
|
from django_filters import rest_framework as filters
|
||||||
from rest_framework import status, viewsets
|
from rest_framework import status, viewsets
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from django_filters import rest_framework as filters
|
|
||||||
|
|
||||||
from .models import Sound, Track
|
from .models import Sound, Track
|
||||||
from .serializers import SoundSerializer, admin
|
from .serializers import SoundSerializer, admin
|
||||||
from .views import BaseAPIView
|
from .views import BaseAPIView
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
__all__ = ('SoundFilter', 'SoundViewSet', 'TrackFilter', 'TrackROViewSet',
|
"SoundFilter",
|
||||||
'UserSettingsViewSet')
|
"SoundViewSet",
|
||||||
|
"TrackFilter",
|
||||||
|
"TrackROViewSet",
|
||||||
|
"UserSettingsViewSet",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SoundFilter(filters.FilterSet):
|
class SoundFilter(filters.FilterSet):
|
||||||
station = filters.NumberFilter(field_name='program__station__id')
|
station = filters.NumberFilter(field_name="program__station__id")
|
||||||
program = filters.NumberFilter(field_name='program_id')
|
program = filters.NumberFilter(field_name="program_id")
|
||||||
episode = filters.NumberFilter(field_name='episode_id')
|
episode = filters.NumberFilter(field_name="episode_id")
|
||||||
search = filters.CharFilter(field_name='search', method='search_filter')
|
search = filters.CharFilter(field_name="search", method="search_filter")
|
||||||
|
|
||||||
def search_filter(self, queryset, name, value):
|
def search_filter(self, queryset, name, value):
|
||||||
return queryset.search(value)
|
return queryset.search(value)
|
||||||
|
@ -25,29 +29,30 @@ class SoundFilter(filters.FilterSet):
|
||||||
|
|
||||||
class SoundViewSet(BaseAPIView, viewsets.ModelViewSet):
|
class SoundViewSet(BaseAPIView, viewsets.ModelViewSet):
|
||||||
serializer_class = SoundSerializer
|
serializer_class = SoundSerializer
|
||||||
queryset = Sound.objects.available().order_by('-pk')
|
queryset = Sound.objects.available().order_by("-pk")
|
||||||
filter_backends = (filters.DjangoFilterBackend,)
|
filter_backends = (filters.DjangoFilterBackend,)
|
||||||
filterset_class = SoundFilter
|
filterset_class = SoundFilter
|
||||||
|
|
||||||
|
|
||||||
# --- admin
|
# --- admin
|
||||||
class TrackFilter(filters.FilterSet):
|
class TrackFilter(filters.FilterSet):
|
||||||
artist = filters.CharFilter(field_name='artist', lookup_expr='icontains')
|
artist = filters.CharFilter(field_name="artist", lookup_expr="icontains")
|
||||||
album = filters.CharFilter(field_name='album', lookup_expr='icontains')
|
album = filters.CharFilter(field_name="album", lookup_expr="icontains")
|
||||||
title = filters.CharFilter(field_name='title', lookup_expr='icontains')
|
title = filters.CharFilter(field_name="title", lookup_expr="icontains")
|
||||||
|
|
||||||
|
|
||||||
class TrackROViewSet(viewsets.ReadOnlyModelViewSet):
|
class TrackROViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
""" Track viewset used for auto completion """
|
"""Track viewset used for auto completion."""
|
||||||
|
|
||||||
serializer_class = admin.TrackSerializer
|
serializer_class = admin.TrackSerializer
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
filter_backends = (filters.DjangoFilterBackend,)
|
filter_backends = (filters.DjangoFilterBackend,)
|
||||||
filterset_class = TrackFilter
|
filterset_class = TrackFilter
|
||||||
queryset = Track.objects.all()
|
queryset = Track.objects.all()
|
||||||
|
|
||||||
@action(name='autocomplete', detail=False)
|
@action(name="autocomplete", detail=False)
|
||||||
def autocomplete(self, request):
|
def autocomplete(self, request):
|
||||||
field = request.GET.get('field', None)
|
field = request.GET.get("field", None)
|
||||||
if field:
|
if field:
|
||||||
queryset = self.filter_queryset(self.get_queryset())
|
queryset = self.filter_queryset(self.get_queryset())
|
||||||
values = queryset.values_list(field, flat=True).distinct()
|
values = queryset.values_list(field, flat=True).distinct()
|
||||||
|
@ -56,36 +61,38 @@ class TrackROViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
|
||||||
|
|
||||||
class UserSettingsViewSet(viewsets.ViewSet):
|
class UserSettingsViewSet(viewsets.ViewSet):
|
||||||
|
"""User's settings specific to aircox.
|
||||||
|
|
||||||
|
Allow only to create and edit user's own settings.
|
||||||
"""
|
"""
|
||||||
User's settings specific to aircox. Allow only to create and edit
|
|
||||||
user's own settings.
|
|
||||||
"""
|
|
||||||
serializer_class = admin.UserSettingsSerializer
|
serializer_class = admin.UserSettingsSerializer
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def get_serializer(self, instance=None, **kwargs):
|
def get_serializer(self, instance=None, **kwargs):
|
||||||
return self.serializer_class(
|
return self.serializer_class(
|
||||||
instance=instance, context={'user': self.request.user},
|
instance=instance, context={"user": self.request.user}, **kwargs
|
||||||
**kwargs)
|
)
|
||||||
|
|
||||||
@action(detail=False, methods=['GET'])
|
@action(detail=False, methods=["GET"])
|
||||||
def retrieve(self, request):
|
def retrieve(self, request):
|
||||||
user = self.request.user
|
user = self.request.user
|
||||||
settings = getattr(user, 'aircox_settings', None)
|
settings = getattr(user, "aircox_settings", None)
|
||||||
data = settings and self.get_serializer(settings) or None
|
data = settings and self.get_serializer(settings) or None
|
||||||
return Response(data)
|
return Response(data)
|
||||||
|
|
||||||
@action(detail=False, methods=['POST', 'PUT'])
|
@action(detail=False, methods=["POST", "PUT"])
|
||||||
def update(self, request):
|
def update(self, request):
|
||||||
user = self.request.user
|
user = self.request.user
|
||||||
settings = getattr(user, 'aircox_settings', None)
|
settings = getattr(user, "aircox_settings", None)
|
||||||
data = dict(request.data)
|
data = dict(request.data)
|
||||||
data['user_id'] = self.request.user
|
data["user_id"] = self.request.user
|
||||||
serializer = self.get_serializer(instance=settings, data=request.data)
|
serializer = self.get_serializer(instance=settings, data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
return Response({'status': 'ok'})
|
return Response({"status": "ok"})
|
||||||
else:
|
else:
|
||||||
return Response({'errors': serializer.errors},
|
return Response(
|
||||||
status=status.HTTP_400_BAD_REQUEST)
|
{"errors": serializer.errors},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
|
@ -15,4 +15,3 @@ This application allows to:
|
||||||
- generate config file and playlists: regular Django template file in `scripts/station.liq`;
|
- generate config file and playlists: regular Django template file in `scripts/station.liq`;
|
||||||
- monitor what is being played and what has to be played using Telnet to communicate
|
- monitor what is being played and what has to be played using Telnet to communicate
|
||||||
with Liquidsoap process;
|
with Liquidsoap process;
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,4 @@ from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
class AircoxStreamerConfig(AppConfig):
|
class AircoxStreamerConfig(AppConfig):
|
||||||
name = 'aircox_streamer'
|
name = "aircox_streamer"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,24 +1,22 @@
|
||||||
import socket
|
|
||||||
import re
|
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
|
||||||
|
response_re = re.compile(r"(.*)\s+END\s*$")
|
||||||
response_re = re.compile(r'(.*)\s+END\s*$')
|
|
||||||
key_val_re = re.compile(r'(?P<key>[^=]+)="?(?P<value>([^"]|\\")+)"?')
|
key_val_re = re.compile(r'(?P<key>[^=]+)="?(?P<value>([^"]|\\")+)"?')
|
||||||
|
|
||||||
|
|
||||||
class Connector:
|
class Connector:
|
||||||
|
"""Connection to AF_UNIX or AF_INET, get and send data.
|
||||||
|
|
||||||
|
Received data can be parsed from list of `key=value` or JSON.
|
||||||
"""
|
"""
|
||||||
Connection to AF_UNIX or AF_INET, get and send data. Received
|
|
||||||
data can be parsed from list of `key=value` or JSON.
|
|
||||||
"""
|
|
||||||
socket = None
|
socket = None
|
||||||
""" The socket """
|
"""The socket."""
|
||||||
address = None
|
address = None
|
||||||
"""
|
"""String to a Unix domain socket file, or a tuple (host, port) for TCP/IP
|
||||||
String to a Unix domain socket file, or a tuple (host, port) for
|
connection."""
|
||||||
TCP/IP connection
|
|
||||||
"""
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_open(self):
|
def is_open(self):
|
||||||
|
@ -32,12 +30,13 @@ class Connector:
|
||||||
if self.is_open:
|
if self.is_open:
|
||||||
return
|
return
|
||||||
|
|
||||||
family = socket.AF_UNIX if isinstance(self.address, str) else \
|
family = (
|
||||||
socket.AF_INET
|
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
self.socket = socket.socket(family, socket.SOCK_STREAM)
|
self.socket = socket.socket(family, socket.SOCK_STREAM)
|
||||||
self.socket.connect(self.address)
|
self.socket.connect(self.address)
|
||||||
except:
|
except Exception:
|
||||||
self.close()
|
self.close()
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
@ -50,27 +49,32 @@ class Connector:
|
||||||
if self.open():
|
if self.open():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
data = bytes(''.join([str(d) for d in data]) + '\n', encoding='utf-8')
|
data = bytes("".join([str(d) for d in data]) + "\n", encoding="utf-8")
|
||||||
try:
|
try:
|
||||||
self.socket.sendall(data)
|
self.socket.sendall(data)
|
||||||
data = ''
|
data = ""
|
||||||
while not response_re.search(data):
|
while not response_re.search(data):
|
||||||
data += self.socket.recv(1024).decode('utf-8')
|
data += self.socket.recv(1024).decode("utf-8")
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
data = response_re.sub(r'\1', data).strip()
|
data = response_re.sub(r"\1", data).strip()
|
||||||
data = self.parse(data) if parse else \
|
data = (
|
||||||
self.parse_json(data) if parse_json else data
|
self.parse(data)
|
||||||
|
if parse
|
||||||
|
else self.parse_json(data)
|
||||||
|
if parse_json
|
||||||
|
else data
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
except:
|
except Exception:
|
||||||
self.close()
|
self.close()
|
||||||
if try_count > 0:
|
if try_count > 0:
|
||||||
return self.send(data, try_count - 1)
|
return self.send(data, try_count - 1)
|
||||||
|
|
||||||
def parse(self, value):
|
def parse(self, value):
|
||||||
return {
|
return {
|
||||||
line.groupdict()['key']: line.groupdict()['value']
|
line.groupdict()["key"]: line.groupdict()["value"]
|
||||||
for line in (key_val_re.search(line) for line in value.split('\n'))
|
for line in (key_val_re.search(line) for line in value.split("\n"))
|
||||||
if line
|
if line
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,5 +83,5 @@ class Connector:
|
||||||
if value[0] == '"' and value[-1] == '"':
|
if value[0] == '"' and value[-1] == '"':
|
||||||
value = value[1:-1]
|
value = value[1:-1]
|
||||||
return json.loads(value) if value else None
|
return json.loads(value) if value else None
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user