forked from rc/aircox
Reviewed-on: rc/aircox#86
This commit is contained in:
commit
4bebc56a28
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -5,5 +5,3 @@ venv/
|
|||
node_modules/
|
||||
*.egg-info/
|
||||
*.egg
|
||||
|
||||
|
||||
|
|
29
.pre-commit-config.yaml
Normal file
29
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,29 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --line-length=79
|
||||
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
|
||||
- repo: https://github.com/PyCQA/autoflake.git
|
||||
rev: v2.0.2
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
- --remove-all-unused-imports
|
||||
- repo: https://github.com/PyCQA/flake8.git
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: instance/sample_settings.py
|
||||
- repo: https://github.com/PyCQA/docformatter.git
|
||||
rev: v1.5.1
|
||||
hooks:
|
||||
- id: docformatter
|
|
@ -104,4 +104,3 @@ in it instead of running commands manually.
|
|||
|
||||
## More informations
|
||||
There are extra informations in `aircox/README.md` and `aircox_streamer/README.md`.
|
||||
|
||||
|
|
|
@ -16,4 +16,3 @@ Each program has a directory on the server where user puts its podcasts (in **AI
|
|||
## Requirements
|
||||
* Sox (and soxi): sound file monitor and quality check
|
||||
* requirements.txt for python's dependecies
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -7,3 +7,18 @@ from .program import ProgramAdmin, ScheduleAdmin, StreamAdmin
|
|||
from .sound import SoundAdmin, TrackAdmin
|
||||
from .station import StationAdmin
|
||||
|
||||
__all__ = (
|
||||
"filters",
|
||||
"ArticleAdmin",
|
||||
"DiffusionAdmin",
|
||||
"EpisodeAdmin",
|
||||
"LogAdmin",
|
||||
"PageAdmin",
|
||||
"StaticPageAdmin",
|
||||
"ProgramAdmin",
|
||||
"ScheduleAdmin",
|
||||
"StreamAdmin",
|
||||
"SoundAdmin",
|
||||
"TrackAdmin",
|
||||
"StationAdmin",
|
||||
)
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
import copy
|
||||
|
||||
from django.contrib import admin
|
||||
|
||||
from ..models import Article
|
||||
from .page import PageAdmin
|
||||
|
||||
|
||||
__all__ = ['ArticleAdmin']
|
||||
__all__ = ["ArticleAdmin"]
|
||||
|
||||
|
||||
@admin.register(Article)
|
||||
class ArticleAdmin(PageAdmin):
|
||||
search_fields = PageAdmin.search_fields + ('parent__title',)
|
||||
search_fields = PageAdmin.search_fields + ("parent__title",)
|
||||
# TODO: readonly field
|
||||
|
||||
|
||||
|
|
|
@ -1,78 +1,83 @@
|
|||
from adminsortable2.admin import SortableAdminBase
|
||||
from django.contrib import admin
|
||||
from django.forms import ModelForm
|
||||
from django.utils.translation import gettext as _
|
||||
from adminsortable2.admin import SortableAdminBase
|
||||
|
||||
from ..models import Episode, Diffusion
|
||||
|
||||
from ..models import Diffusion, Episode
|
||||
from .page import PageAdmin
|
||||
from .sound import SoundInline, TrackInline
|
||||
|
||||
|
||||
class DiffusionBaseAdmin:
|
||||
fields = ('type', 'start', 'end', 'schedule')
|
||||
readonly_fields = ('schedule',)
|
||||
fields = ("type", "start", "end", "schedule")
|
||||
readonly_fields = ("schedule",)
|
||||
|
||||
def get_readonly_fields(self, request, obj=None):
|
||||
fields = super().get_readonly_fields(request, obj)
|
||||
if not request.user.has_perm('aircox_program.scheduling'):
|
||||
fields = fields + ('program', 'start', 'end')
|
||||
if not request.user.has_perm("aircox_program.scheduling"):
|
||||
fields = fields + ("program", "start", "end")
|
||||
return [field for field in fields if field in self.fields]
|
||||
|
||||
|
||||
@admin.register(Diffusion)
|
||||
class DiffusionAdmin(DiffusionBaseAdmin, admin.ModelAdmin):
|
||||
def start_date(self, obj):
|
||||
return obj.local_start.strftime('%Y/%m/%d %H:%M')
|
||||
start_date.short_description = _('start')
|
||||
return obj.local_start.strftime("%Y/%m/%d %H:%M")
|
||||
|
||||
start_date.short_description = _("start")
|
||||
|
||||
def end_date(self, obj):
|
||||
return obj.local_end.strftime('%H:%M')
|
||||
end_date.short_description = _('end')
|
||||
return obj.local_end.strftime("%H:%M")
|
||||
|
||||
list_display = ('episode', 'start_date', 'end_date', 'type', 'initial')
|
||||
list_filter = ('type', 'start', 'program')
|
||||
list_editable = ('type',)
|
||||
ordering = ('-start', 'id')
|
||||
end_date.short_description = _("end")
|
||||
|
||||
fields = ('type', 'start', 'end', 'initial', 'program', 'schedule')
|
||||
readonly_fields = ('schedule',)
|
||||
list_display = ("episode", "start_date", "end_date", "type", "initial")
|
||||
list_filter = ("type", "start", "program")
|
||||
list_editable = ("type",)
|
||||
ordering = ("-start", "id")
|
||||
|
||||
fields = ("type", "start", "end", "initial", "program", "schedule")
|
||||
readonly_fields = ("schedule",)
|
||||
|
||||
|
||||
class DiffusionInline(DiffusionBaseAdmin, admin.TabularInline):
|
||||
model = Diffusion
|
||||
fk_name = 'episode'
|
||||
fk_name = "episode"
|
||||
extra = 0
|
||||
|
||||
def has_add_permission(self, request, obj):
|
||||
return request.user.has_perm('aircox_program.scheduling')
|
||||
return request.user.has_perm("aircox_program.scheduling")
|
||||
|
||||
|
||||
class EpisodeAdminForm(ModelForm):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['parent'].required = True
|
||||
self.fields["parent"].required = True
|
||||
|
||||
|
||||
@admin.register(Episode)
|
||||
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
||||
form = EpisodeAdminForm
|
||||
list_display = PageAdmin.list_display
|
||||
list_filter = tuple(f for f in PageAdmin.list_filter
|
||||
if f != 'pub_date') + ('diffusion__start', 'pub_date')
|
||||
search_fields = PageAdmin.search_fields + ('parent__title',)
|
||||
list_filter = tuple(
|
||||
f for f in PageAdmin.list_filter if f != "pub_date"
|
||||
) + (
|
||||
"diffusion__start",
|
||||
"pub_date",
|
||||
)
|
||||
search_fields = PageAdmin.search_fields + ("parent__title",)
|
||||
# readonly_fields = ('parent',)
|
||||
|
||||
inlines = [TrackInline, SoundInline, DiffusionInline]
|
||||
|
||||
def add_view(self, request, object_id, form_url='', context=None):
|
||||
def add_view(self, request, object_id, form_url="", context=None):
|
||||
context = context or {}
|
||||
context['init_app'] = True
|
||||
context['init_el'] = '#inline-tracks'
|
||||
context["init_app"] = True
|
||||
context["init_el"] = "#inline-tracks"
|
||||
return super().change_view(request, object_id, form_url, context)
|
||||
|
||||
def change_view(self, request, object_id, form_url='', context=None):
|
||||
def change_view(self, request, object_id, form_url="", context=None):
|
||||
context = context or {}
|
||||
context['init_app'] = True
|
||||
context['init_el'] = '#inline-tracks'
|
||||
context["init_app"] = True
|
||||
context["init_el"] = "#inline-tracks"
|
||||
return super().change_view(request, object_id, form_url, context)
|
||||
|
|
|
@ -1,63 +1,86 @@
|
|||
from django.db import models
|
||||
from django.contrib.admin import filters
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db import models
|
||||
from django.utils.http import urlencode
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
__all__ = ('DateFieldFilter', 'DateTimeField')
|
||||
__all__ = ("DateFieldFilter", "DateTimeFieldFilter")
|
||||
|
||||
|
||||
class DateFieldFilter(filters.FieldListFilter):
|
||||
""" Display date input """
|
||||
template = 'admin/aircox/filters/date_filter.html'
|
||||
input_type = 'date'
|
||||
"""Display date input."""
|
||||
|
||||
template = "admin/aircox/filters/date_filter.html"
|
||||
input_type = "date"
|
||||
|
||||
def __init__(self, field, request, params, model, model_admin, field_path):
|
||||
self.field_generic = '%s__' % field_path
|
||||
self.date_params = {k: v for k, v in params.items()
|
||||
if k.startswith(self.field_generic)}
|
||||
self.field_generic = "%s__" % field_path
|
||||
self.date_params = {
|
||||
k: v for k, v in params.items() if k.startswith(self.field_generic)
|
||||
}
|
||||
|
||||
exact_lookup = 'date' if isinstance(field, models.DateTimeField) else 'exact'
|
||||
exact_lookup = (
|
||||
"date" if isinstance(field, models.DateTimeField) else "exact"
|
||||
)
|
||||
|
||||
# links as: (label, param, input_type|None, value)
|
||||
self.links = [(_('Exact'), self.field_generic + exact_lookup, self.input_type),
|
||||
(_('Since'), self.field_generic + 'gte', self.input_type),
|
||||
(_('Until'), self.field_generic + 'lte', self.input_type)]
|
||||
self.links = [
|
||||
(_("Exact"), self.field_generic + exact_lookup, self.input_type),
|
||||
(_("Since"), self.field_generic + "gte", self.input_type),
|
||||
(_("Until"), self.field_generic + "lte", self.input_type),
|
||||
]
|
||||
if field.null:
|
||||
self.links.insert(0, (_('None'), self.field_generic + 'isnull', None, '1'))
|
||||
|
||||
self.query_attrs = {k:v for k,v in request.GET.items()
|
||||
if k not in self.date_params}
|
||||
self.links.insert(
|
||||
0, (_("None"), self.field_generic + "isnull", None, "1")
|
||||
)
|
||||
|
||||
self.query_attrs = {
|
||||
k: v for k, v in request.GET.items() if k not in self.date_params
|
||||
}
|
||||
self.query_string = urlencode(self.query_attrs)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
super().__init__(
|
||||
field, request, params, model, model_admin, field_path
|
||||
)
|
||||
|
||||
def expected_parameters(self):
|
||||
return [link[1] for link in self.links]
|
||||
|
||||
def choices(self, changelist):
|
||||
yield {'label': _('Any'),
|
||||
'type': None,
|
||||
'query_string': self.query_string}
|
||||
yield {
|
||||
"label": _("Any"),
|
||||
"type": None,
|
||||
"query_string": self.query_string,
|
||||
}
|
||||
|
||||
for link in self.links:
|
||||
value = len(link) > 3 and link[3] or self.date_params.get(link[1])
|
||||
yield {
|
||||
'label': link[0], 'name': link[1], 'value': value,
|
||||
'type': link[2],
|
||||
'query_attrs': self.query_attrs,
|
||||
'query_string': urlencode({link[1]: value}) + '&' + self.query_string
|
||||
if value else self.query_string,
|
||||
"label": link[0],
|
||||
"name": link[1],
|
||||
"value": value,
|
||||
"type": link[2],
|
||||
"query_attrs": self.query_attrs,
|
||||
"query_string": urlencode({link[1]: value})
|
||||
+ "&"
|
||||
+ self.query_string
|
||||
if value
|
||||
else self.query_string,
|
||||
}
|
||||
|
||||
|
||||
class DateTimeFieldFilter(DateFieldFilter):
|
||||
""" Display datetime input """
|
||||
input_type = 'datetime-local'
|
||||
"""Display datetime input."""
|
||||
|
||||
input_type = "datetime-local"
|
||||
|
||||
|
||||
filters.FieldListFilter.register(
|
||||
lambda f: isinstance(f, models.DateField), DateFieldFilter, take_priority=True)
|
||||
lambda f: isinstance(f, models.DateField),
|
||||
DateFieldFilter,
|
||||
take_priority=True,
|
||||
)
|
||||
|
||||
filters.FieldListFilter.register(
|
||||
lambda f: isinstance(f, models.DateTimeField), DateTimeFieldFilter, take_priority=True)
|
||||
|
||||
lambda f: isinstance(f, models.DateTimeField),
|
||||
DateTimeFieldFilter,
|
||||
take_priority=True,
|
||||
)
|
||||
|
|
|
@ -2,12 +2,10 @@ from django.contrib import admin
|
|||
|
||||
from ..models import Log
|
||||
|
||||
|
||||
__all__ = ['LogAdmin']
|
||||
__all__ = ["LogAdmin"]
|
||||
|
||||
|
||||
@admin.register(Log)
|
||||
class LogAdmin(admin.ModelAdmin):
|
||||
list_display = ['id', 'date', 'station', 'source', 'type', 'comment']
|
||||
list_filter = ['date', 'source', 'station']
|
||||
|
||||
list_display = ["id", "date", "station", "source", "type", "comment"]
|
||||
list_filter = ["date", "source", "station"]
|
||||
|
|
|
@ -1,23 +1,22 @@
|
|||
class UnrelatedInlineMixin:
|
||||
"""
|
||||
Inline class that can be included in an admin change view whose model
|
||||
is not directly related to inline's model.
|
||||
"""
|
||||
"""Inline class that can be included in an admin change view whose model is
|
||||
not directly related to inline's model."""
|
||||
|
||||
view_model = None
|
||||
parent_model = None
|
||||
parent_fk = ''
|
||||
parent_fk = ""
|
||||
|
||||
def __init__(self, parent_model, admin_site):
|
||||
self.view_model = parent_model
|
||||
super().__init__(self.parent_model, admin_site)
|
||||
|
||||
def get_parent(self, view_obj):
|
||||
""" Get formset's instance from `obj` of AdminSite's change form. """
|
||||
"""Get formset's instance from `obj` of AdminSite's change form."""
|
||||
field = self.parent_model._meta.get_field(self.parent_fk).remote_field
|
||||
return getattr(view_obj, field.name, None)
|
||||
|
||||
def save_parent(self, parent, view_obj):
|
||||
""" Save formset's instance. """
|
||||
"""Save formset's instance."""
|
||||
setattr(parent, self.parent_fk, view_obj)
|
||||
parent.save()
|
||||
return parent
|
||||
|
@ -25,6 +24,7 @@ class UnrelatedInlineMixin:
|
|||
def get_formset(self, request, obj):
|
||||
ParentFormSet = super().get_formset(request, obj)
|
||||
inline = self
|
||||
|
||||
class FormSet(ParentFormSet):
|
||||
view_obj = None
|
||||
|
||||
|
@ -37,6 +37,5 @@ class UnrelatedInlineMixin:
|
|||
def save(self):
|
||||
inline.save_parent(self.instance, self.view_obj)
|
||||
return super().save()
|
||||
|
||||
return FormSet
|
||||
|
||||
|
||||
|
|
|
@ -1,74 +1,82 @@
|
|||
from copy import deepcopy
|
||||
|
||||
from adminsortable2.admin import SortableInlineAdminMixin
|
||||
from django.contrib import admin
|
||||
from django.http import QueryDict
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from adminsortable2.admin import SortableInlineAdminMixin
|
||||
|
||||
from ..models import Category, Comment, NavItem, Page, StaticPage
|
||||
|
||||
|
||||
__all__ = ('CategoryAdmin', 'PageAdmin', 'NavItemInline')
|
||||
__all__ = ("CategoryAdmin", "PageAdmin", "NavItemInline")
|
||||
|
||||
|
||||
@admin.register(Category)
|
||||
class CategoryAdmin(admin.ModelAdmin):
|
||||
list_display = ['pk', 'title', 'slug']
|
||||
list_editable = ['title', 'slug']
|
||||
search_fields = ['title']
|
||||
fields = ['title', 'slug']
|
||||
list_display = ["pk", "title", "slug"]
|
||||
list_editable = ["title", "slug"]
|
||||
search_fields = ["title"]
|
||||
fields = ["title", "slug"]
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
|
||||
|
||||
class BasePageAdmin(admin.ModelAdmin):
|
||||
list_display = ('cover_thumb', 'title', 'status', 'parent')
|
||||
list_display_links = ('cover_thumb', 'title')
|
||||
list_editable = ('status',)
|
||||
list_filter = ('status',)
|
||||
list_display = ("cover_thumb", "title", "status", "parent")
|
||||
list_display_links = ("cover_thumb", "title")
|
||||
list_editable = ("status",)
|
||||
list_filter = ("status",)
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
|
||||
# prepopulate fields using changelist's filters
|
||||
prepopulated_filters = ('parent',)
|
||||
prepopulated_filters = ("parent",)
|
||||
|
||||
search_fields = ('title',)
|
||||
search_fields = ("title",)
|
||||
|
||||
fieldsets = [
|
||||
('', {
|
||||
'fields': ['title', 'slug', 'cover', 'content'],
|
||||
}),
|
||||
(_('Publication Settings'), {
|
||||
'fields': ['status', 'parent'],
|
||||
}),
|
||||
(
|
||||
"",
|
||||
{
|
||||
"fields": ["title", "slug", "cover", "content"],
|
||||
},
|
||||
),
|
||||
(
|
||||
_("Publication Settings"),
|
||||
{
|
||||
"fields": ["status", "parent"],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
change_form_template = 'admin/aircox/page_change_form.html'
|
||||
change_form_template = "admin/aircox/page_change_form.html"
|
||||
|
||||
def cover_thumb(self, obj):
|
||||
return mark_safe('<img src="{}"/>'.format(obj.cover.icons['64'])) \
|
||||
if obj.cover else ''
|
||||
return (
|
||||
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
|
||||
if obj.cover
|
||||
else ""
|
||||
)
|
||||
|
||||
def get_changeform_initial_data(self, request):
|
||||
data = super().get_changeform_initial_data(request)
|
||||
filters = QueryDict(request.GET.get('_changelist_filters', ''))
|
||||
data['parent'] = filters.get('parent', None)
|
||||
filters = QueryDict(request.GET.get("_changelist_filters", ""))
|
||||
data["parent"] = filters.get("parent", None)
|
||||
return data
|
||||
|
||||
def _get_common_context(self, query, extra_context=None):
|
||||
extra_context = extra_context or {}
|
||||
parent = query.get('parent', None)
|
||||
extra_context['parent'] = None if parent is None else \
|
||||
Page.objects.get_subclass(id=parent)
|
||||
parent = query.get("parent", None)
|
||||
extra_context["parent"] = (
|
||||
None if parent is None else Page.objects.get_subclass(id=parent)
|
||||
)
|
||||
return extra_context
|
||||
|
||||
def render_change_form(self, request, context, *args, **kwargs):
|
||||
if context['original'] and not 'parent' in context:
|
||||
context['parent'] = context['original'].parent
|
||||
if context["original"] and "parent" not in context:
|
||||
context["parent"] = context["original"].parent
|
||||
return super().render_change_form(request, context, *args, **kwargs)
|
||||
|
||||
def add_view(self, request, form_url='', extra_context=None):
|
||||
filters = QueryDict(request.GET.get('_changelist_filters', ''))
|
||||
def add_view(self, request, form_url="", extra_context=None):
|
||||
filters = QueryDict(request.GET.get("_changelist_filters", ""))
|
||||
extra_context = self._get_common_context(filters, extra_context)
|
||||
return super().add_view(request, form_url, extra_context)
|
||||
|
||||
|
@ -78,31 +86,33 @@ class BasePageAdmin(admin.ModelAdmin):
|
|||
|
||||
|
||||
class PageAdmin(BasePageAdmin):
|
||||
change_list_template = 'admin/aircox/page_change_list.html'
|
||||
change_list_template = "admin/aircox/page_change_list.html"
|
||||
|
||||
list_display = BasePageAdmin.list_display + ('category',)
|
||||
list_editable = BasePageAdmin.list_editable + ('category',)
|
||||
list_filter = BasePageAdmin.list_filter + ('category', 'pub_date')
|
||||
search_fields = BasePageAdmin.search_fields + ('category__title',)
|
||||
list_display = BasePageAdmin.list_display + ("category",)
|
||||
list_editable = BasePageAdmin.list_editable + ("category",)
|
||||
list_filter = BasePageAdmin.list_filter + ("category", "pub_date")
|
||||
search_fields = BasePageAdmin.search_fields + ("category__title",)
|
||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||
|
||||
fieldsets[0][1]['fields'].insert(fieldsets[0][1]['fields'].index('slug') + 1, 'category')
|
||||
fieldsets[1][1]['fields'] += ('featured', 'allow_comments')
|
||||
fieldsets[0][1]["fields"].insert(
|
||||
fieldsets[0][1]["fields"].index("slug") + 1, "category"
|
||||
)
|
||||
fieldsets[1][1]["fields"] += ("featured", "allow_comments")
|
||||
|
||||
|
||||
@admin.register(StaticPage)
|
||||
class StaticPageAdmin(BasePageAdmin):
|
||||
list_display = BasePageAdmin.list_display + ('attach_to',)
|
||||
list_display = BasePageAdmin.list_display + ("attach_to",)
|
||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||
|
||||
fieldsets[1][1]['fields'] += ('attach_to',)
|
||||
fieldsets[1][1]["fields"] += ("attach_to",)
|
||||
|
||||
|
||||
@admin.register(Comment)
|
||||
class CommentAdmin(admin.ModelAdmin):
|
||||
list_display = ('page_title', 'date', 'nickname')
|
||||
list_filter = ('date',)
|
||||
search_fields = ('page__title', 'nickname')
|
||||
list_display = ("page_title", "date", "nickname")
|
||||
list_filter = ("date",)
|
||||
search_fields = ("page__title", "nickname")
|
||||
|
||||
def page_title(self, obj):
|
||||
return obj.page.title
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from copy import copy
|
||||
|
||||
from django.contrib import admin
|
||||
from django.forms import ModelForm
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -14,20 +12,20 @@ class ScheduleInlineForm(ModelForm):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if self.initial:
|
||||
self.fields['date'].disabled = True
|
||||
self.fields['frequency'].disabled = True
|
||||
self.fields["date"].disabled = True
|
||||
self.fields["frequency"].disabled = True
|
||||
|
||||
|
||||
class ScheduleInline(admin.TabularInline):
|
||||
model = Schedule
|
||||
form = ScheduleInlineForm
|
||||
readonly_fields = ('timezone',)
|
||||
readonly_fields = ("timezone",)
|
||||
extra = 1
|
||||
|
||||
|
||||
class StreamInline(admin.TabularInline):
|
||||
model = Stream
|
||||
fields = ['delay', 'begin', 'end']
|
||||
fields = ["delay", "begin", "end"]
|
||||
extra = 1
|
||||
|
||||
|
||||
|
@ -39,20 +37,23 @@ class ProgramAdmin(PageAdmin):
|
|||
schedule.boolean = True
|
||||
schedule.short_description = _("Schedule")
|
||||
|
||||
list_display = PageAdmin.list_display + ('schedule', 'station', 'active')
|
||||
list_filter = PageAdmin.list_filter + ('station', 'active')
|
||||
prepopulated_fields = {'slug': ('title',)}
|
||||
search_fields = ('title',)
|
||||
list_display = PageAdmin.list_display + ("schedule", "station", "active")
|
||||
list_filter = PageAdmin.list_filter + ("station", "active")
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
search_fields = ("title",)
|
||||
|
||||
inlines = [ScheduleInline, StreamInline]
|
||||
|
||||
def get_fieldsets(self, request, obj=None):
|
||||
fields = super().get_fieldsets(request, obj)
|
||||
if request.user.has_perm('aircox.program.scheduling'):
|
||||
if request.user.has_perm("aircox.program.scheduling"):
|
||||
fields = fields + [
|
||||
(_('Program Settings'), {
|
||||
'fields': ['active', 'station', 'sync'],
|
||||
})
|
||||
(
|
||||
_("Program Settings"),
|
||||
{
|
||||
"fields": ["active", "station", "sync"],
|
||||
},
|
||||
)
|
||||
]
|
||||
return fields
|
||||
|
||||
|
@ -61,26 +62,32 @@ class ProgramAdmin(PageAdmin):
|
|||
class ScheduleAdmin(admin.ModelAdmin):
|
||||
def program_title(self, obj):
|
||||
return obj.program.title
|
||||
program_title.short_description = _('Program')
|
||||
|
||||
program_title.short_description = _("Program")
|
||||
|
||||
def freq(self, obj):
|
||||
return obj.get_frequency_verbose()
|
||||
freq.short_description = _('Day')
|
||||
|
||||
list_filter = ['frequency', 'program']
|
||||
list_display = ['program_title', 'freq', 'time', 'timezone', 'duration',
|
||||
'initial']
|
||||
list_editable = ['time', 'duration', 'initial']
|
||||
freq.short_description = _("Day")
|
||||
|
||||
list_filter = ["frequency", "program"]
|
||||
list_display = [
|
||||
"program_title",
|
||||
"freq",
|
||||
"time",
|
||||
"timezone",
|
||||
"duration",
|
||||
"initial",
|
||||
]
|
||||
list_editable = ["time", "duration", "initial"]
|
||||
|
||||
def get_readonly_fields(self, request, obj=None):
|
||||
if obj:
|
||||
return ['program', 'date', 'frequency']
|
||||
return ["program", "date", "frequency"]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
@admin.register(Stream)
|
||||
class StreamAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'program', 'delay', 'begin', 'end')
|
||||
|
||||
|
||||
list_display = ("id", "program", "delay", "begin", "end")
|
||||
|
|
|
@ -1,40 +1,48 @@
|
|||
import math
|
||||
|
||||
from adminsortable2.admin import SortableAdminBase
|
||||
from django.contrib import admin
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from adminsortable2.admin import SortableAdminBase, SortableInlineAdminMixin
|
||||
|
||||
from ..models import Sound, Track
|
||||
|
||||
|
||||
class TrackInline(admin.TabularInline):
|
||||
template = 'admin/aircox/playlist_inline.html'
|
||||
template = "admin/aircox/playlist_inline.html"
|
||||
model = Track
|
||||
extra = 0
|
||||
fields = ('position', 'artist', 'title', 'tags', 'album', 'year', 'info')
|
||||
fields = ("position", "artist", "title", "tags", "album", "year", "info")
|
||||
|
||||
list_display = ['artist', 'album', 'title', 'tags', 'related']
|
||||
list_filter = ['artist', 'album', 'title', 'tags']
|
||||
list_display = ["artist", "album", "title", "tags", "related"]
|
||||
list_filter = ["artist", "album", "title", "tags"]
|
||||
|
||||
|
||||
class SoundTrackInline(TrackInline):
|
||||
fields = TrackInline.fields + ('timestamp',)
|
||||
fields = TrackInline.fields + ("timestamp",)
|
||||
|
||||
|
||||
class SoundInline(admin.TabularInline):
|
||||
model = Sound
|
||||
fields = ['type', 'name', 'audio', 'duration', 'is_good_quality',
|
||||
'is_public', 'is_downloadable']
|
||||
readonly_fields = ['type', 'audio', 'duration', 'is_good_quality']
|
||||
fields = [
|
||||
"type",
|
||||
"name",
|
||||
"audio",
|
||||
"duration",
|
||||
"is_good_quality",
|
||||
"is_public",
|
||||
"is_downloadable",
|
||||
]
|
||||
readonly_fields = ["type", "audio", "duration", "is_good_quality"]
|
||||
extra = 0
|
||||
max_num = 0
|
||||
|
||||
def audio(self, obj):
|
||||
return mark_safe('<audio src="{}" controls></audio>'
|
||||
.format(obj.file.url))
|
||||
audio.short_description = _('Audio')
|
||||
return mark_safe(
|
||||
'<audio src="{}" controls></audio>'.format(obj.file.url)
|
||||
)
|
||||
|
||||
audio.short_description = _("Audio")
|
||||
|
||||
def get_queryset(self, request):
|
||||
return super().get_queryset(request).available()
|
||||
|
@ -43,63 +51,99 @@ class SoundInline(admin.TabularInline):
|
|||
@admin.register(Sound)
|
||||
class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
fields = None
|
||||
list_display = ['id', 'name', 'related',
|
||||
'type', 'duration', 'is_public', 'is_good_quality',
|
||||
'is_downloadable', 'audio']
|
||||
list_filter = ('type', 'is_good_quality', 'is_public')
|
||||
list_editable = ['name', 'is_public', 'is_downloadable']
|
||||
|
||||
search_fields = ['name', 'program__title']
|
||||
fieldsets = [
|
||||
(None, {'fields': ['name', 'file', 'type', 'program', 'episode']}),
|
||||
(None, {'fields': ['duration', 'is_public', 'is_downloadable',
|
||||
'is_good_quality', 'mtime']}),
|
||||
list_display = [
|
||||
"id",
|
||||
"name",
|
||||
"related",
|
||||
"type",
|
||||
"duration",
|
||||
"is_public",
|
||||
"is_good_quality",
|
||||
"is_downloadable",
|
||||
"audio",
|
||||
]
|
||||
readonly_fields = ('file', 'duration', 'type')
|
||||
list_filter = ("type", "is_good_quality", "is_public")
|
||||
list_editable = ["name", "is_public", "is_downloadable"]
|
||||
|
||||
search_fields = ["name", "program__title"]
|
||||
fieldsets = [
|
||||
(None, {"fields": ["name", "file", "type", "program", "episode"]}),
|
||||
(
|
||||
None,
|
||||
{
|
||||
"fields": [
|
||||
"duration",
|
||||
"is_public",
|
||||
"is_downloadable",
|
||||
"is_good_quality",
|
||||
"mtime",
|
||||
]
|
||||
},
|
||||
),
|
||||
]
|
||||
readonly_fields = ("file", "duration", "type")
|
||||
inlines = [SoundTrackInline]
|
||||
|
||||
def related(self, obj):
|
||||
# TODO: link to episode or program edit
|
||||
return obj.episode.title if obj.episode else\
|
||||
obj.program.title if obj.program else ''
|
||||
related.short_description = _('Program / Episode')
|
||||
return (
|
||||
obj.episode.title
|
||||
if obj.episode
|
||||
else obj.program.title
|
||||
if obj.program
|
||||
else ""
|
||||
)
|
||||
|
||||
related.short_description = _("Program / Episode")
|
||||
|
||||
def audio(self, obj):
|
||||
return mark_safe('<audio src="{}" controls></audio>'
|
||||
.format(obj.file.url)) \
|
||||
if obj.type != Sound.TYPE_REMOVED else ''
|
||||
audio.short_description = _('Audio')
|
||||
return (
|
||||
mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
|
||||
if obj.type != Sound.TYPE_REMOVED
|
||||
else ""
|
||||
)
|
||||
|
||||
def add_view(self, request, form_url='', context=None):
|
||||
audio.short_description = _("Audio")
|
||||
|
||||
def add_view(self, request, form_url="", context=None):
|
||||
context = context or {}
|
||||
context['init_app'] = True
|
||||
context['init_el'] = '#inline-tracks'
|
||||
context['track_timestamp'] = True
|
||||
context["init_app"] = True
|
||||
context["init_el"] = "#inline-tracks"
|
||||
context["track_timestamp"] = True
|
||||
return super().add_view(request, form_url, context)
|
||||
|
||||
def change_view(self, request, object_id, form_url='', context=None):
|
||||
def change_view(self, request, object_id, form_url="", context=None):
|
||||
context = context or {}
|
||||
context['init_app'] = True
|
||||
context['init_el'] = '#inline-tracks'
|
||||
context['track_timestamp'] = True
|
||||
context["init_app"] = True
|
||||
context["init_el"] = "#inline-tracks"
|
||||
context["track_timestamp"] = True
|
||||
return super().change_view(request, object_id, form_url, context)
|
||||
|
||||
|
||||
@admin.register(Track)
|
||||
class TrackAdmin(admin.ModelAdmin):
|
||||
def tag_list(self, obj):
|
||||
return u", ".join(o.name for o in obj.tags.all())
|
||||
return ", ".join(o.name for o in obj.tags.all())
|
||||
|
||||
list_display = ['pk', 'artist', 'title', 'tag_list', 'episode',
|
||||
'sound', 'ts']
|
||||
list_editable = ['artist', 'title']
|
||||
list_filter = ['artist', 'title', 'tags']
|
||||
list_display = [
|
||||
"pk",
|
||||
"artist",
|
||||
"title",
|
||||
"tag_list",
|
||||
"episode",
|
||||
"sound",
|
||||
"ts",
|
||||
]
|
||||
list_editable = ["artist", "title"]
|
||||
list_filter = ["artist", "title", "tags"]
|
||||
|
||||
search_fields = ['artist', 'title']
|
||||
search_fields = ["artist", "title"]
|
||||
fieldsets = [
|
||||
(_('Playlist'), {'fields': ['episode', 'sound', 'position',
|
||||
'timestamp']}),
|
||||
(_('Info'), {'fields': ['artist', 'title', 'info', 'tags']}),
|
||||
(
|
||||
_("Playlist"),
|
||||
{"fields": ["episode", "sound", "position", "timestamp"]},
|
||||
),
|
||||
(_("Info"), {"fields": ["artist", "title", "info", "tags"]}),
|
||||
]
|
||||
|
||||
# TODO on edit: readonly_fields = ['episode', 'sound']
|
||||
|
@ -107,10 +151,10 @@ class TrackAdmin(admin.ModelAdmin):
|
|||
def ts(self, obj):
|
||||
ts = obj.timestamp
|
||||
if ts is None:
|
||||
return ''
|
||||
return ""
|
||||
h = math.floor(ts / 3600)
|
||||
m = math.floor((ts - h) / 60)
|
||||
s = ts-h*3600-m*60
|
||||
return '{:0>2}:{:0>2}:{:0>2}'.format(h, m, s)
|
||||
s = ts - h * 3600 - m * 60
|
||||
return "{:0>2}:{:0>2}:{:0>2}".format(h, m, s)
|
||||
|
||||
ts.short_description = _('timestamp')
|
||||
ts.short_description = _("timestamp")
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from django.contrib import admin
|
||||
from adminsortable2.admin import SortableAdminBase
|
||||
from django.contrib import admin
|
||||
|
||||
from ..models import Port, Station
|
||||
from .page import NavItemInline
|
||||
|
||||
|
||||
__all__ = ['PortInline', 'StationAdmin']
|
||||
__all__ = ["PortInline", "StationAdmin"]
|
||||
|
||||
|
||||
class PortInline(admin.StackedInline):
|
||||
|
@ -15,7 +14,5 @@ class PortInline(admin.StackedInline):
|
|||
|
||||
@admin.register(Station)
|
||||
class StationAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
prepopulated_fields = {"slug": ("name",)}
|
||||
inlines = (PortInline, NavItemInline)
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
from django.contrib import admin
|
||||
from django.urls import path, include, reverse
|
||||
from django.urls import include, path, reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from .models import Comment, Diffusion, Program
|
||||
from .views.admin import StatisticsView
|
||||
|
||||
|
||||
__all__ = ['AdminSite']
|
||||
__all__ = ["AdminSite"]
|
||||
|
||||
|
||||
class AdminSite(admin.AdminSite):
|
||||
extra_urls = None
|
||||
tools = [
|
||||
(_('Statistics'), 'admin:tools-stats'),
|
||||
(_("Statistics"), "admin:tools-stats"),
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -25,41 +23,52 @@ class AdminSite(admin.AdminSite):
|
|||
|
||||
def each_context(self, request):
|
||||
context = super().each_context(request)
|
||||
context.update({
|
||||
# all programs
|
||||
'programs': Program.objects.active().values('pk', 'title') \
|
||||
.order_by('title'),
|
||||
# today's diffusions
|
||||
'diffusions': Diffusion.objects.date().order_by('start') \
|
||||
.select_related('episode'),
|
||||
# TODO: only for dashboard
|
||||
# last comments
|
||||
'comments': Comment.objects.order_by('-date')
|
||||
.select_related('page')[0:10],
|
||||
})
|
||||
context.update(
|
||||
{
|
||||
# all programs
|
||||
"programs": Program.objects.active()
|
||||
.values("pk", "title")
|
||||
.order_by("title"),
|
||||
# today's diffusions
|
||||
"diffusions": Diffusion.objects.date()
|
||||
.order_by("start")
|
||||
.select_related("episode"),
|
||||
# TODO: only for dashboard
|
||||
# last comments
|
||||
"comments": Comment.objects.order_by("-date").select_related(
|
||||
"page"
|
||||
)[0:10],
|
||||
}
|
||||
)
|
||||
return context
|
||||
|
||||
def get_urls(self):
|
||||
urls = [
|
||||
path('api/', include((self.router.urls, 'api'))),
|
||||
path('tools/statistics/',
|
||||
self.admin_view(StatisticsView.as_view()),
|
||||
name='tools-stats'),
|
||||
path('tools/statistics/<date:date>/',
|
||||
self.admin_view(StatisticsView.as_view()),
|
||||
name='tools-stats'),
|
||||
] + self.extra_urls + super().get_urls()
|
||||
urls = (
|
||||
[
|
||||
path("api/", include((self.router.urls, "api"))),
|
||||
path(
|
||||
"tools/statistics/",
|
||||
self.admin_view(StatisticsView.as_view()),
|
||||
name="tools-stats",
|
||||
),
|
||||
path(
|
||||
"tools/statistics/<date:date>/",
|
||||
self.admin_view(StatisticsView.as_view()),
|
||||
name="tools-stats",
|
||||
),
|
||||
]
|
||||
+ self.extra_urls
|
||||
+ super().get_urls()
|
||||
)
|
||||
return urls
|
||||
|
||||
def get_tools(self):
|
||||
return [(label, reverse(url)) for label, url in self.tools]
|
||||
|
||||
def route_view(self, url, view, name, admin_view=True, label=None):
|
||||
self.extra_urls.append(path(
|
||||
url, self.admin_view(view) if admin_view else view, name=name
|
||||
))
|
||||
self.extra_urls.append(
|
||||
path(url, self.admin_view(view) if admin_view else view, name=name)
|
||||
)
|
||||
|
||||
if label:
|
||||
self.tools.append((label, 'admin:' + name))
|
||||
|
||||
|
||||
self.tools.append((label, "admin:" + name))
|
||||
|
|
|
@ -3,11 +3,9 @@ from django.contrib.admin.apps import AdminConfig
|
|||
|
||||
|
||||
class AircoxConfig(AppConfig):
|
||||
name = 'aircox'
|
||||
verbose_name = 'Aircox'
|
||||
name = "aircox"
|
||||
verbose_name = "Aircox"
|
||||
|
||||
|
||||
class AircoxAdminConfig(AdminConfig):
|
||||
default_site = 'aircox.admin_site.AdminSite'
|
||||
|
||||
|
||||
default_site = "aircox.admin_site.AdminSite"
|
||||
|
|
|
@ -1,50 +1,61 @@
|
|||
import datetime
|
||||
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.urls.converters import StringConverter
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
from .utils import str_to_date
|
||||
__all__ = ("PagePathConverter", "WeekConverter", "DateConverter")
|
||||
|
||||
|
||||
class PagePathConverter(StringConverter):
|
||||
""" Match path for pages, including surrounding slashes. """
|
||||
regex = r'/?|([-_a-zA-Z0-9]+/)*?'
|
||||
"""Match path for pages, including surrounding slashes."""
|
||||
|
||||
regex = r"/?|([-_a-zA-Z0-9]+/)*?"
|
||||
|
||||
def to_python(self, value):
|
||||
if not value or value[0] != '/':
|
||||
value = '/' + value
|
||||
if len(value) > 1 and value[-1] != '/':
|
||||
value = value + '/'
|
||||
if not value or value[0] != "/":
|
||||
value = "/" + value
|
||||
if len(value) > 1 and value[-1] != "/":
|
||||
value = value + "/"
|
||||
return value
|
||||
|
||||
def to_url(self, value):
|
||||
if value[0] == '/':
|
||||
if value[0] == "/":
|
||||
value = value[1:]
|
||||
if value[-1] != '/':
|
||||
value = value + '/'
|
||||
if value[-1] != "/":
|
||||
value = value + "/"
|
||||
return mark_safe(value)
|
||||
|
||||
|
||||
class WeekConverter:
|
||||
""" Converter for date as YYYYY/WW """
|
||||
regex = r'[0-9]{4}/[0-9]{2}'
|
||||
"""Converter for date as YYYYY/WW."""
|
||||
|
||||
regex = r"[0-9]{4}/[0-9]{2}"
|
||||
|
||||
def to_python(self, value):
|
||||
return datetime.datetime.strptime(value + '/1', '%G/%V/%u').date()
|
||||
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
|
||||
|
||||
def to_url(self, value):
|
||||
return value if isinstance(value, str) else \
|
||||
'{:04d}/{:02d}'.format(*value.isocalendar())
|
||||
return (
|
||||
value
|
||||
if isinstance(value, str)
|
||||
else "{:04d}/{:02d}".format(*value.isocalendar())
|
||||
)
|
||||
|
||||
|
||||
class DateConverter:
|
||||
""" Converter for date as YYYY/MM/DD """
|
||||
regex = r'[0-9]{4}/[0-9]{2}/[0-9]{2}'
|
||||
"""Converter for date as YYYY/MM/DD."""
|
||||
|
||||
regex = r"[0-9]{4}/[0-9]{2}/[0-9]{2}"
|
||||
|
||||
def to_python(self, value):
|
||||
value = value.split('/')[:3]
|
||||
value = value.split("/")[:3]
|
||||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||
|
||||
def to_url(self, value):
|
||||
return value if isinstance(value, str) else \
|
||||
'{:04d}/{:02d}/{:02d}'.format(value.year, value.month, value.day)
|
||||
return (
|
||||
value
|
||||
if isinstance(value, str)
|
||||
else "{:04d}/{:02d}/{:02d}".format(
|
||||
value.year, value.month, value.day
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
import django_filters as filters
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .models import Page, Episode
|
||||
from .models import Episode, Page
|
||||
|
||||
|
||||
class PageFilters(filters.FilterSet):
|
||||
q = filters.CharFilter(method='search_filter', label=_('Search'))
|
||||
|
||||
q = filters.CharFilter(method="search_filter", label=_("Search"))
|
||||
|
||||
class Meta:
|
||||
model = Page
|
||||
fields = {
|
||||
'category__id': ['in'],
|
||||
'pub_date': ['exact', 'gte', 'lte'],
|
||||
"category__id": ["in"],
|
||||
"pub_date": ["exact", "gte", "lte"],
|
||||
}
|
||||
|
||||
def search_filter(self, queryset, name, value):
|
||||
|
@ -19,7 +19,9 @@ class PageFilters(filters.FilterSet):
|
|||
|
||||
|
||||
class EpisodeFilters(PageFilters):
|
||||
podcast = filters.BooleanFilter(method='podcast_filter', label=_('Podcast'))
|
||||
podcast = filters.BooleanFilter(
|
||||
method="podcast_filter", label=_("Podcast")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Episode
|
||||
|
@ -29,4 +31,3 @@ class EpisodeFilters(PageFilters):
|
|||
if value:
|
||||
return queryset.filter(sound__is_public=True).distinct()
|
||||
return queryset.filter(sound__isnull=True)
|
||||
|
||||
|
|
|
@ -9,12 +9,10 @@ class CommentForm(ModelForm):
|
|||
email = forms.EmailField(required=False)
|
||||
content = forms.CharField(widget=forms.Textarea())
|
||||
|
||||
nickname.widget.attrs.update({'class': 'input'})
|
||||
email.widget.attrs.update({'class': 'input'})
|
||||
content.widget.attrs.update({'class': 'textarea'})
|
||||
nickname.widget.attrs.update({"class": "input"})
|
||||
email.widget.attrs.update({"class": "input"})
|
||||
content.widget.attrs.update({"class": "textarea"})
|
||||
|
||||
class Meta:
|
||||
model = Comment
|
||||
fields = ['nickname', 'email', 'content']
|
||||
|
||||
|
||||
fields = ["nickname", "email", "content"]
|
||||
|
|
|
@ -1,41 +1,48 @@
|
|||
"""Handle archiving of logs in order to keep database light and fast.
|
||||
|
||||
The logs are archived in gzip files, per day.
|
||||
"""
|
||||
Handle archiving of logs in order to keep database light and fast. The
|
||||
logs are archived in gzip files, per day.
|
||||
"""
|
||||
from argparse import RawTextHelpFormatter
|
||||
import datetime
|
||||
import logging
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils import timezone as tz
|
||||
|
||||
import aircox.settings as settings
|
||||
from aircox.models import Log, Station
|
||||
from aircox.models import Log
|
||||
from aircox.models.log import LogArchiver
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class Command (BaseCommand):
|
||||
__all__ = ("Command",)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = __doc__
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
group = parser.add_argument_group('actions')
|
||||
group = parser.add_argument_group("actions")
|
||||
group.add_argument(
|
||||
'-a', '--age', type=int,
|
||||
"-a",
|
||||
"--age",
|
||||
type=int,
|
||||
default=settings.AIRCOX_LOGS_ARCHIVES_AGE,
|
||||
help='minimal age in days of logs to archive. Default is '
|
||||
'settings.AIRCOX_LOGS_ARCHIVES_AGE'
|
||||
help="minimal age in days of logs to archive. Default is "
|
||||
"settings.AIRCOX_LOGS_ARCHIVES_AGE",
|
||||
)
|
||||
group.add_argument(
|
||||
'-k', '--keep', action='store_true',
|
||||
help='keep logs in database instead of deleting them'
|
||||
"-k",
|
||||
"--keep",
|
||||
action="store_true",
|
||||
help="keep logs in database instead of deleting them",
|
||||
)
|
||||
|
||||
def handle(self, *args, age, keep, **options):
|
||||
date = datetime.date.today() - tz.timedelta(days=age)
|
||||
# FIXME: mysql support?
|
||||
logger.info('archive logs for %s and earlier', date)
|
||||
logger.info("archive logs for %s and earlier", date)
|
||||
count = LogArchiver().archive(Log.objects.filter(date__date__lte=date))
|
||||
logger.info('total log archived %d', count)
|
||||
logger.info("total log archived %d", count)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""
|
||||
Manage diffusions using schedules, to update, clean up or check diffusions.
|
||||
"""Manage diffusions using schedules, to update, clean up or check diffusions.
|
||||
|
||||
A generated diffusion can be unconfirmed, that means that the user must confirm
|
||||
it by changing its type to "normal". The behaviour is controlled using
|
||||
|
@ -13,9 +12,9 @@ from django.core.management.base import BaseCommand
|
|||
from django.db import transaction
|
||||
from django.utils import timezone as tz
|
||||
|
||||
from aircox.models import Schedule, Diffusion
|
||||
from aircox.models import Diffusion, Schedule
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class Actions:
|
||||
|
@ -26,20 +25,28 @@ class Actions:
|
|||
|
||||
def update(self):
|
||||
episodes, diffusions = [], []
|
||||
for schedule in Schedule.objects.filter(program__active=True,
|
||||
initial__isnull=True):
|
||||
for schedule in Schedule.objects.filter(
|
||||
program__active=True, initial__isnull=True
|
||||
):
|
||||
eps, diffs = schedule.diffusions_of_month(self.date)
|
||||
if eps:
|
||||
episodes += eps
|
||||
if diffs:
|
||||
diffusions += diffs
|
||||
|
||||
logger.info('[update] %s: %d episodes, %d diffusions and reruns',
|
||||
str(schedule), len(eps), len(diffs))
|
||||
logger.info(
|
||||
"[update] %s: %d episodes, %d diffusions and reruns",
|
||||
str(schedule),
|
||||
len(eps),
|
||||
len(diffs),
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
logger.info('[update] save %d episodes and %d diffusions',
|
||||
len(episodes), len(diffusions))
|
||||
logger.info(
|
||||
"[update] save %d episodes and %d diffusions",
|
||||
len(episodes),
|
||||
len(diffusions),
|
||||
)
|
||||
for episode in episodes:
|
||||
episode.save()
|
||||
for diffusion in diffusions:
|
||||
|
@ -48,9 +55,10 @@ class Actions:
|
|||
diffusion.save()
|
||||
|
||||
def clean(self):
|
||||
qs = Diffusion.objects.filter(type=Diffusion.TYPE_UNCONFIRMED,
|
||||
start__lt=self.date)
|
||||
logger.info('[clean] %d diffusions will be removed', qs.count())
|
||||
qs = Diffusion.objects.filter(
|
||||
type=Diffusion.TYPE_UNCONFIRMED, start__lt=self.date
|
||||
)
|
||||
logger.info("[clean] %d diffusions will be removed", qs.count())
|
||||
qs.delete()
|
||||
|
||||
|
||||
|
@ -61,45 +69,57 @@ class Command(BaseCommand):
|
|||
parser.formatter_class = RawTextHelpFormatter
|
||||
today = datetime.date.today()
|
||||
|
||||
group = parser.add_argument_group('action')
|
||||
group = parser.add_argument_group("action")
|
||||
group.add_argument(
|
||||
'-u', '--update', action='store_true',
|
||||
help='generate (unconfirmed) diffusions for the given month. '
|
||||
'These diffusions must be confirmed manually by changing '
|
||||
'their type to "normal"'
|
||||
"-u",
|
||||
"--update",
|
||||
action="store_true",
|
||||
help="generate (unconfirmed) diffusions for the given month. "
|
||||
"These diffusions must be confirmed manually by changing "
|
||||
'their type to "normal"',
|
||||
)
|
||||
group.add_argument(
|
||||
'-l', '--clean', action='store_true',
|
||||
help='remove unconfirmed diffusions older than the given month'
|
||||
"-l",
|
||||
"--clean",
|
||||
action="store_true",
|
||||
help="remove unconfirmed diffusions older than the given month",
|
||||
)
|
||||
|
||||
group = parser.add_argument_group('date')
|
||||
group = parser.add_argument_group("date")
|
||||
group.add_argument(
|
||||
'--year', type=int, default=today.year,
|
||||
help='used by update, default is today\'s year')
|
||||
"--year",
|
||||
type=int,
|
||||
default=today.year,
|
||||
help="used by update, default is today's year",
|
||||
)
|
||||
group.add_argument(
|
||||
'--month', type=int, default=today.month,
|
||||
help='used by update, default is today\'s month')
|
||||
"--month",
|
||||
type=int,
|
||||
default=today.month,
|
||||
help="used by update, default is today's month",
|
||||
)
|
||||
group.add_argument(
|
||||
'--next-month', action='store_true',
|
||||
help='set the date to the next month of given date'
|
||||
' (if next month from today'
|
||||
"--next-month",
|
||||
action="store_true",
|
||||
help="set the date to the next month of given date"
|
||||
" (if next month from today",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
date = datetime.date(year=options['year'], month=options['month'],
|
||||
day=1)
|
||||
if options.get('next_month'):
|
||||
month = options.get('month')
|
||||
date = datetime.date(
|
||||
year=options["year"], month=options["month"], day=1
|
||||
)
|
||||
if options.get("next_month"):
|
||||
month = options.get("month")
|
||||
date += tz.timedelta(days=28)
|
||||
if date.month == month:
|
||||
date += tz.timedelta(days=28)
|
||||
date = date.replace(day=1)
|
||||
|
||||
actions = Actions(date)
|
||||
if options.get('update'):
|
||||
if options.get("update"):
|
||||
actions.update()
|
||||
if options.get('clean'):
|
||||
if options.get("clean"):
|
||||
actions.clean()
|
||||
if options.get('check'):
|
||||
if options.get("check"):
|
||||
actions.check()
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""
|
||||
Import one or more playlist for the given sound. Attach it to the provided
|
||||
"""Import one or more playlist for the given sound. Attach it to the provided
|
||||
sound.
|
||||
|
||||
Playlists are in CSV format, where columns are separated with a
|
||||
|
@ -10,23 +9,22 @@ The order of the elements is: {settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS}
|
|||
If 'minutes' or 'seconds' are given, position will be expressed as timed
|
||||
position, instead of position in playlist.
|
||||
"""
|
||||
import os
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from aircox import settings
|
||||
from aircox.models import *
|
||||
from aircox.models import Sound, Track
|
||||
|
||||
__doc__ = __doc__.format(settings=settings)
|
||||
|
||||
__all__ = ('PlaylistImport', 'Command')
|
||||
__all__ = ("PlaylistImport", "Command")
|
||||
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class PlaylistImport:
|
||||
|
@ -45,62 +43,74 @@ class PlaylistImport:
|
|||
|
||||
def run(self):
|
||||
self.read()
|
||||
if self.track_kwargs.get('sound') is not None:
|
||||
if self.track_kwargs.get("sound") is not None:
|
||||
self.make_playlist()
|
||||
|
||||
def read(self):
|
||||
if not os.path.exists(self.path):
|
||||
return True
|
||||
with open(self.path, 'r') as file:
|
||||
logger.info('start reading csv ' + self.path)
|
||||
self.data = list(csv.DictReader(
|
||||
(row for row in file
|
||||
if not (row.startswith('#') or row.startswith('\ufeff#'))
|
||||
and row.strip()),
|
||||
fieldnames=settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS,
|
||||
delimiter=settings.AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER,
|
||||
quotechar=settings.AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
||||
))
|
||||
with open(self.path, "r") as file:
|
||||
logger.info("start reading csv " + self.path)
|
||||
self.data = list(
|
||||
csv.DictReader(
|
||||
(
|
||||
row
|
||||
for row in file
|
||||
if not (
|
||||
row.startswith("#") or row.startswith("\ufeff#")
|
||||
)
|
||||
and row.strip()
|
||||
),
|
||||
fieldnames=settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS,
|
||||
delimiter=settings.AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER,
|
||||
quotechar=settings.AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
||||
)
|
||||
)
|
||||
|
||||
def make_playlist(self):
|
||||
"""Make a playlist from the read data, and return it.
|
||||
|
||||
If save is true, save it into the database
|
||||
"""
|
||||
Make a playlist from the read data, and return it. If save is
|
||||
true, save it into the database
|
||||
"""
|
||||
if self.track_kwargs.get('sound') is None:
|
||||
logger.error('related track\'s sound is missing. Skip import of ' +
|
||||
self.path + '.')
|
||||
if self.track_kwargs.get("sound") is None:
|
||||
logger.error(
|
||||
"related track's sound is missing. Skip import of "
|
||||
+ self.path
|
||||
+ "."
|
||||
)
|
||||
return
|
||||
|
||||
maps = settings.AIRCOX_IMPORT_PLAYLIST_CSV_COLS
|
||||
tracks = []
|
||||
|
||||
logger.info('parse csv file ' + self.path)
|
||||
has_timestamp = ('minutes' or 'seconds') in maps
|
||||
logger.info("parse csv file " + self.path)
|
||||
has_timestamp = ("minutes" or "seconds") in maps
|
||||
for index, line in enumerate(self.data):
|
||||
if ('title' or 'artist') not in line:
|
||||
if ("title" or "artist") not in line:
|
||||
return
|
||||
try:
|
||||
timestamp = int(line.get('minutes') or 0) * 60 + \
|
||||
int(line.get('seconds') or 0) \
|
||||
if has_timestamp else None
|
||||
timestamp = (
|
||||
int(line.get("minutes") or 0) * 60
|
||||
+ int(line.get("seconds") or 0)
|
||||
if has_timestamp
|
||||
else None
|
||||
)
|
||||
|
||||
track, created = Track.objects.get_or_create(
|
||||
title=line.get('title'),
|
||||
artist=line.get('artist'),
|
||||
title=line.get("title"),
|
||||
artist=line.get("artist"),
|
||||
position=index,
|
||||
**self.track_kwargs
|
||||
)
|
||||
track.timestamp = timestamp
|
||||
track.info = line.get('info')
|
||||
tags = line.get('tags')
|
||||
track.info = line.get("info")
|
||||
tags = line.get("tags")
|
||||
if tags:
|
||||
track.tags.add(*tags.lower().split(','))
|
||||
track.tags.add(*tags.lower().split(","))
|
||||
except Exception as err:
|
||||
logger.warning(
|
||||
'an error occured for track {index}, it may not '
|
||||
'have been saved: {err}'
|
||||
.format(index=index, err=err)
|
||||
"an error occured for track {index}, it may not "
|
||||
"have been saved: {err}".format(index=index, err=err)
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -116,33 +126,41 @@ class Command(BaseCommand):
|
|||
def add_arguments(self, parser):
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
parser.add_argument(
|
||||
'path', metavar='PATH', type=str,
|
||||
help='path of the input playlist to read'
|
||||
"path",
|
||||
metavar="PATH",
|
||||
type=str,
|
||||
help="path of the input playlist to read",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sound', '-s', type=str,
|
||||
help='generate a playlist for the sound of the given path. '
|
||||
'If not given, try to match a sound with the same path.'
|
||||
"--sound",
|
||||
"-s",
|
||||
type=str,
|
||||
help="generate a playlist for the sound of the given path. "
|
||||
"If not given, try to match a sound with the same path.",
|
||||
)
|
||||
|
||||
def handle(self, path, *args, **options):
|
||||
# FIXME: absolute/relative path of sounds vs given path
|
||||
if options.get('sound'):
|
||||
sound = Sound.objects.filter(file__icontains=options.get('sound'))\
|
||||
.first()
|
||||
if options.get("sound"):
|
||||
sound = Sound.objects.filter(
|
||||
file__icontains=options.get("sound")
|
||||
).first()
|
||||
else:
|
||||
path_, ext = os.path.splitext(path)
|
||||
sound = Sound.objects.filter(path__icontains=path_).first()
|
||||
|
||||
if not sound:
|
||||
logger.error('no sound found in the database for the path '
|
||||
'{path}'.format(path=path))
|
||||
logger.error(
|
||||
"no sound found in the database for the path "
|
||||
"{path}".format(path=path)
|
||||
)
|
||||
return
|
||||
|
||||
# FIXME: auto get sound.episode if any
|
||||
importer = PlaylistImport(path, sound=sound).run()
|
||||
for track in importer.tracks:
|
||||
logger.info('track #{pos} imported: {title}, by {artist}'.format(
|
||||
pos=track.position, title=track.title, artist=track.artist
|
||||
))
|
||||
|
||||
logger.info(
|
||||
"track #{pos} imported: {title}, by {artist}".format(
|
||||
pos=track.position, title=track.title, artist=track.artist
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#! /usr/bin/env python3
|
||||
|
||||
"""
|
||||
Monitor sound files; For each program, check for:
|
||||
"""Monitor sound files; For each program, check for:
|
||||
|
||||
- new files;
|
||||
- deleted files;
|
||||
- differences between files and sound;
|
||||
|
@ -23,23 +23,22 @@ To check quality of files, call the command sound_quality_check using the
|
|||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||
Sox (and soxi).
|
||||
"""
|
||||
from argparse import RawTextHelpFormatter
|
||||
import concurrent.futures as futures
|
||||
import atexit
|
||||
import concurrent.futures as futures
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from watchdog.observers import Observer
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from watchdog.observers import Observer
|
||||
|
||||
from aircox import settings
|
||||
from aircox.models import Program, Sound
|
||||
from aircox.management.sound_file import SoundFile
|
||||
from aircox.management.sound_monitor import MonitorHandler
|
||||
from aircox.models import Program, Sound
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
@ -47,39 +46,42 @@ class Command(BaseCommand):
|
|||
|
||||
def report(self, program=None, component=None, *content):
|
||||
if not component:
|
||||
logger.info('%s: %s', str(program),
|
||||
' '.join([str(c) for c in content]))
|
||||
logger.info(
|
||||
"%s: %s", str(program), " ".join([str(c) for c in content])
|
||||
)
|
||||
else:
|
||||
logger.info('%s, %s: %s', str(program), str(component),
|
||||
' '.join([str(c) for c in content]))
|
||||
logger.info(
|
||||
"%s, %s: %s",
|
||||
str(program),
|
||||
str(component),
|
||||
" ".join([str(c) for c in content]),
|
||||
)
|
||||
|
||||
def scan(self):
|
||||
"""
|
||||
For all programs, scan dirs
|
||||
"""
|
||||
logger.info('scan all programs...')
|
||||
"""For all programs, scan dirs."""
|
||||
logger.info("scan all programs...")
|
||||
programs = Program.objects.filter()
|
||||
|
||||
dirs = []
|
||||
for program in programs:
|
||||
logger.info('#%d %s', program.id, program.title)
|
||||
logger.info("#%d %s", program.id, program.title)
|
||||
self.scan_for_program(
|
||||
program, settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
||||
program,
|
||||
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
||||
type=Sound.TYPE_ARCHIVE,
|
||||
)
|
||||
self.scan_for_program(
|
||||
program, settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
||||
program,
|
||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
||||
type=Sound.TYPE_EXCERPT,
|
||||
)
|
||||
dirs.append(os.path.join(program.abspath))
|
||||
return dirs
|
||||
|
||||
def scan_for_program(self, program, subdir, **sound_kwargs):
|
||||
"""
|
||||
Scan a given directory that is associated to the given program, and
|
||||
update sounds information.
|
||||
"""
|
||||
logger.info('- %s/', subdir)
|
||||
"""Scan a given directory that is associated to the given program, and
|
||||
update sounds information."""
|
||||
logger.info("- %s/", subdir)
|
||||
if not program.ensure_dir(subdir):
|
||||
return
|
||||
|
||||
|
@ -97,37 +99,49 @@ class Command(BaseCommand):
|
|||
sounds.append(sound_file.sound.pk)
|
||||
|
||||
# sounds in db & unchecked
|
||||
sounds = Sound.objects.filter(file__startswith=subdir). \
|
||||
exclude(pk__in=sounds)
|
||||
sounds = Sound.objects.filter(file__startswith=subdir).exclude(
|
||||
pk__in=sounds
|
||||
)
|
||||
self.check_sounds(sounds, program=program)
|
||||
|
||||
def check_sounds(self, qs, **sync_kwargs):
|
||||
""" Only check for the sound existence or update """
|
||||
"""Only check for the sound existence or update."""
|
||||
# check files
|
||||
for sound in qs:
|
||||
if sound.check_on_file():
|
||||
SoundFile(sound.file.path).sync(sound=sound, **sync_kwargs)
|
||||
|
||||
def monitor(self):
|
||||
""" Run in monitor mode """
|
||||
"""Run in monitor mode."""
|
||||
with futures.ThreadPoolExecutor() as pool:
|
||||
archives_handler = MonitorHandler(
|
||||
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR, pool,
|
||||
type=Sound.TYPE_ARCHIVE)
|
||||
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR,
|
||||
pool,
|
||||
type=Sound.TYPE_ARCHIVE,
|
||||
)
|
||||
excerpts_handler = MonitorHandler(
|
||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR, pool,
|
||||
type=Sound.TYPE_EXCERPT)
|
||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR,
|
||||
pool,
|
||||
type=Sound.TYPE_EXCERPT,
|
||||
)
|
||||
|
||||
observer = Observer()
|
||||
observer.schedule(archives_handler, settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||
recursive=True)
|
||||
observer.schedule(excerpts_handler, settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||
recursive=True)
|
||||
observer.schedule(
|
||||
archives_handler,
|
||||
settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||
recursive=True,
|
||||
)
|
||||
observer.schedule(
|
||||
excerpts_handler,
|
||||
settings.AIRCOX_PROGRAMS_DIR_ABS,
|
||||
recursive=True,
|
||||
)
|
||||
observer.start()
|
||||
|
||||
def leave():
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
atexit.register(leave)
|
||||
|
||||
while True:
|
||||
|
@ -136,25 +150,31 @@ class Command(BaseCommand):
|
|||
def add_arguments(self, parser):
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
parser.add_argument(
|
||||
'-q', '--quality_check', action='store_true',
|
||||
help='Enable quality check using sound_quality_check on all '
|
||||
'sounds marqued as not good'
|
||||
"-q",
|
||||
"--quality_check",
|
||||
action="store_true",
|
||||
help="Enable quality check using sound_quality_check on all "
|
||||
"sounds marqued as not good",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-s', '--scan', action='store_true',
|
||||
help='Scan programs directories for changes, plus check for a '
|
||||
' matching diffusion on sounds that have not been yet assigned'
|
||||
"-s",
|
||||
"--scan",
|
||||
action="store_true",
|
||||
help="Scan programs directories for changes, plus check for a "
|
||||
" matching diffusion on sounds that have not been yet assigned",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-m', '--monitor', action='store_true',
|
||||
help='Run in monitor mode, watch for modification in the filesystem '
|
||||
'and react in consequence'
|
||||
"-m",
|
||||
"--monitor",
|
||||
action="store_true",
|
||||
help="Run in monitor mode, watch for modification in the "
|
||||
"filesystem and react in consequence",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if options.get('scan'):
|
||||
if options.get("scan"):
|
||||
self.scan()
|
||||
#if options.get('quality_check'):
|
||||
# if options.get('quality_check'):
|
||||
# self.check_quality(check=(not options.get('scan')))
|
||||
if options.get('monitor'):
|
||||
if options.get("monitor"):
|
||||
self.monitor()
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
"""
|
||||
Analyse and check files using Sox, prints good and bad files.
|
||||
"""
|
||||
"""Analyse and check files using Sox, prints good and bad files."""
|
||||
import logging
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from aircox.management.sound_stats import SoxStats, SoundStats
|
||||
from aircox.management.sound_stats import SoundStats, SoxStats
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class Command (BaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = __doc__
|
||||
sounds = None
|
||||
|
||||
|
@ -19,46 +17,61 @@ class Command (BaseCommand):
|
|||
parser.formatter_class = RawTextHelpFormatter
|
||||
|
||||
parser.add_argument(
|
||||
'files', metavar='FILE', type=str, nargs='+',
|
||||
help='file(s) to analyse'
|
||||
"files",
|
||||
metavar="FILE",
|
||||
type=str,
|
||||
nargs="+",
|
||||
help="file(s) to analyse",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-s', '--sample_length', type=int, default=120,
|
||||
help='size of sample to analyse in seconds. If not set (or 0), does'
|
||||
' not analyse by sample',
|
||||
"-s",
|
||||
"--sample_length",
|
||||
type=int,
|
||||
default=120,
|
||||
help="size of sample to analyse in seconds. If not set (or 0), "
|
||||
"does not analyse by sample",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-a', '--attribute', type=str,
|
||||
help='attribute name to use to check, that can be:\n' +
|
||||
', '.join(['"{}"'.format(attr) for attr in SoxStats.attributes])
|
||||
"-a",
|
||||
"--attribute",
|
||||
type=str,
|
||||
help="attribute name to use to check, that can be:\n"
|
||||
+ ", ".join(['"{}"'.format(attr) for attr in SoxStats.attributes]),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-r', '--range', type=float, nargs=2,
|
||||
help='range of minimal and maximal accepted value such as: '
|
||||
'--range min max'
|
||||
"-r",
|
||||
"--range",
|
||||
type=float,
|
||||
nargs=2,
|
||||
help="range of minimal and maximal accepted value such as: "
|
||||
"--range min max",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-i', '--resume', action='store_true',
|
||||
help='print a resume of good and bad files'
|
||||
"-i",
|
||||
"--resume",
|
||||
action="store_true",
|
||||
help="print a resume of good and bad files",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# parameters
|
||||
minmax = options.get('range')
|
||||
minmax = options.get("range")
|
||||
if not minmax:
|
||||
raise CommandError('no range specified')
|
||||
raise CommandError("no range specified")
|
||||
|
||||
attr = options.get('attribute')
|
||||
attr = options.get("attribute")
|
||||
if not attr:
|
||||
raise CommandError('no attribute specified')
|
||||
raise CommandError("no attribute specified")
|
||||
|
||||
# sound analyse and checks
|
||||
self.sounds = [SoundStats(path, options.get('sample_length'))
|
||||
for path in options.get('files')]
|
||||
self.sounds = [
|
||||
SoundStats(path, options.get("sample_length"))
|
||||
for path in options.get("files")
|
||||
]
|
||||
self.bad = []
|
||||
self.good = []
|
||||
for sound in self.sounds:
|
||||
logger.info('analyse ' + sound.path)
|
||||
logger.info("analyse " + sound.path)
|
||||
sound.analyse()
|
||||
sound.check(attr, minmax[0], minmax[1])
|
||||
if sound.bad:
|
||||
|
@ -67,8 +80,8 @@ class Command (BaseCommand):
|
|||
self.good.append(sound)
|
||||
|
||||
# resume
|
||||
if options.get('resume'):
|
||||
if options.get("resume"):
|
||||
for sound in self.good:
|
||||
logger.info('\033[92m+ %s\033[0m', sound.path)
|
||||
logger.info("\033[92m+ %s\033[0m", sound.path)
|
||||
for sound in self.bad:
|
||||
logger.info('\033[91m+ %s\033[0m', sound.path)
|
||||
logger.info("\033[91m+ %s\033[0m", sound.path)
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#! /usr/bin/env python3
|
||||
"""
|
||||
Provide SoundFile which is used to link between database and file system.
|
||||
|
||||
"""Provide SoundFile which is used to link between database and file system.
|
||||
|
||||
File name
|
||||
=========
|
||||
|
@ -22,28 +20,27 @@ To check quality of files, call the command sound_quality_check using the
|
|||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||
Sox (and soxi).
|
||||
"""
|
||||
from datetime import date
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import date
|
||||
|
||||
import mutagen
|
||||
|
||||
from django.conf import settings as conf
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from aircox import utils
|
||||
from aircox.models import Program, Sound, Track
|
||||
|
||||
from .commands.import_playlist import PlaylistImport
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
class SoundFile:
|
||||
"""
|
||||
Handle synchronisation between sounds on files and database.
|
||||
"""
|
||||
"""Handle synchronisation between sounds on files and database."""
|
||||
|
||||
path = None
|
||||
info = None
|
||||
path_info = None
|
||||
|
@ -54,18 +51,22 @@ class SoundFile:
|
|||
|
||||
@property
|
||||
def sound_path(self):
|
||||
""" Relative path name """
|
||||
return self.path.replace(conf.MEDIA_ROOT + '/', '')
|
||||
"""Relative path name."""
|
||||
return self.path.replace(conf.MEDIA_ROOT + "/", "")
|
||||
|
||||
@property
|
||||
def episode(self):
|
||||
return self.sound and self.sound.episode
|
||||
|
||||
def sync(self, sound=None, program=None, deleted=False, keep_deleted=False,
|
||||
**kwargs):
|
||||
"""
|
||||
Update related sound model and save it.
|
||||
"""
|
||||
def sync(
|
||||
self,
|
||||
sound=None,
|
||||
program=None,
|
||||
deleted=False,
|
||||
keep_deleted=False,
|
||||
**kwargs
|
||||
):
|
||||
"""Update related sound model and save it."""
|
||||
if deleted:
|
||||
return self._on_delete(self.path, keep_deleted)
|
||||
|
||||
|
@ -73,26 +74,27 @@ class SoundFile:
|
|||
if not program:
|
||||
program = Program.get_from_path(self.path)
|
||||
logger.debug('program from path "%s" -> %s', self.path, program)
|
||||
kwargs['program_id'] = program.pk
|
||||
kwargs["program_id"] = program.pk
|
||||
|
||||
if sound:
|
||||
created = False
|
||||
else:
|
||||
sound, created = Sound.objects.get_or_create(
|
||||
file=self.sound_path, defaults=kwargs)
|
||||
file=self.sound_path, defaults=kwargs
|
||||
)
|
||||
|
||||
self.sound = sound
|
||||
self.path_info = self.read_path(self.path)
|
||||
|
||||
sound.program = program
|
||||
if created or sound.check_on_file():
|
||||
sound.name = self.path_info.get('name')
|
||||
sound.name = self.path_info.get("name")
|
||||
self.info = self.read_file_info()
|
||||
if self.info is not None:
|
||||
sound.duration = utils.seconds_to_time(self.info.info.length)
|
||||
|
||||
# check for episode
|
||||
if sound.episode is None and 'year' in self.path_info:
|
||||
if sound.episode is None and "year" in self.path_info:
|
||||
sound.episode = self.find_episode(sound, self.path_info)
|
||||
sound.save()
|
||||
|
||||
|
@ -114,8 +116,9 @@ class SoundFile:
|
|||
Sound.objects.path(self.path).delete()
|
||||
|
||||
def read_path(self, path):
|
||||
"""
|
||||
Parse path name returning dictionary of extracted info. It can contain:
|
||||
"""Parse path name returning dictionary of extracted info. It can
|
||||
contain:
|
||||
|
||||
- `year`, `month`, `day`: diffusion date
|
||||
- `hour`, `minute`: diffusion time
|
||||
- `n`: sound arbitrary number (used for sound ordering)
|
||||
|
@ -126,29 +129,29 @@ class SoundFile:
|
|||
reg_match = self._path_re.search(basename)
|
||||
if reg_match:
|
||||
info = reg_match.groupdict()
|
||||
for k in ('year', 'month', 'day', 'hour', 'minute', 'n'):
|
||||
for k in ("year", "month", "day", "hour", "minute", "n"):
|
||||
if info.get(k) is not None:
|
||||
info[k] = int(info[k])
|
||||
|
||||
name = info.get('name')
|
||||
info['name'] = name and self._into_name(name) or basename
|
||||
name = info.get("name")
|
||||
info["name"] = name and self._into_name(name) or basename
|
||||
else:
|
||||
info = {'name': basename}
|
||||
info = {"name": basename}
|
||||
return info
|
||||
|
||||
_path_re = re.compile(
|
||||
'^(?P<year>[0-9]{4})(?P<month>[0-9]{2})(?P<day>[0-9]{2})'
|
||||
'(_(?P<hour>[0-9]{2})h(?P<minute>[0-9]{2}))?'
|
||||
'(_(?P<n>[0-9]+))?'
|
||||
'_?[ -]*(?P<name>.*)$'
|
||||
"^(?P<year>[0-9]{4})(?P<month>[0-9]{2})(?P<day>[0-9]{2})"
|
||||
"(_(?P<hour>[0-9]{2})h(?P<minute>[0-9]{2}))?"
|
||||
"(_(?P<n>[0-9]+))?"
|
||||
"_?[ -]*(?P<name>.*)$"
|
||||
)
|
||||
|
||||
def _into_name(self, name):
|
||||
name = name.replace('_', ' ')
|
||||
return ' '.join(r.capitalize() for r in name.split(' '))
|
||||
name = name.replace("_", " ")
|
||||
return " ".join(r.capitalize() for r in name.split(" "))
|
||||
|
||||
def read_file_info(self):
|
||||
""" Read file information and metadata. """
|
||||
"""Read file information and metadata."""
|
||||
try:
|
||||
if os.path.exists(self.path):
|
||||
return mutagen.File(self.path)
|
||||
|
@ -157,22 +160,21 @@ class SoundFile:
|
|||
return None
|
||||
|
||||
def find_episode(self, sound, path_info):
|
||||
"""
|
||||
For a given program, check if there is an initial diffusion
|
||||
to associate to, using the date info we have. Update self.sound
|
||||
and save it consequently.
|
||||
"""For a given program, check if there is an initial diffusion to
|
||||
associate to, using the date info we have. Update self.sound and save
|
||||
it consequently.
|
||||
|
||||
We only allow initial diffusion since there should be no
|
||||
rerun.
|
||||
We only allow initial diffusion since there should be no rerun.
|
||||
"""
|
||||
program, pi = sound.program, path_info
|
||||
if 'year' not in pi or not sound or sound.episode:
|
||||
if "year" not in pi or not sound or sound.episode:
|
||||
return None
|
||||
|
||||
year, month, day = pi.get('year'), pi.get('month'), pi.get('day')
|
||||
if pi.get('hour') is not None:
|
||||
at = tz.datetime(year, month, day, pi.get('hour', 0),
|
||||
pi.get('minute', 0))
|
||||
year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
|
||||
if pi.get("hour") is not None:
|
||||
at = tz.datetime(
|
||||
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
|
||||
)
|
||||
at = tz.get_current_timezone().localize(at)
|
||||
else:
|
||||
at = date(year, month, day)
|
||||
|
@ -181,13 +183,12 @@ class SoundFile:
|
|||
if not diffusion:
|
||||
return None
|
||||
|
||||
logger.debug('%s <--> %s', sound.file.name, str(diffusion.episode))
|
||||
logger.debug("%s <--> %s", sound.file.name, str(diffusion.episode))
|
||||
return diffusion.episode
|
||||
|
||||
def find_playlist(self, sound=None, use_meta=True):
|
||||
"""
|
||||
Find a playlist file corresponding to the sound path, such as:
|
||||
my_sound.ogg => my_sound.csv
|
||||
"""Find a playlist file corresponding to the sound path, such as:
|
||||
my_sound.ogg => my_sound.csv.
|
||||
|
||||
Use sound's file metadata if no corresponding playlist has been
|
||||
found and `use_meta` is True.
|
||||
|
@ -199,7 +200,7 @@ class SoundFile:
|
|||
|
||||
# import playlist
|
||||
path_noext, ext = os.path.splitext(self.sound.file.path)
|
||||
path = path_noext + '.csv'
|
||||
path = path_noext + ".csv"
|
||||
if os.path.exists(path):
|
||||
PlaylistImport(path, sound=sound).run()
|
||||
# use metadata
|
||||
|
@ -209,18 +210,27 @@ class SoundFile:
|
|||
if self.info and self.info.tags:
|
||||
tags = self.info.tags
|
||||
title, artist, album, year = tuple(
|
||||
t and ', '.join(t) for t in (
|
||||
tags.get(k) for k in ('title', 'artist', 'album',
|
||||
'year'))
|
||||
t and ", ".join(t)
|
||||
for t in (
|
||||
tags.get(k)
|
||||
for k in ("title", "artist", "album", "year")
|
||||
)
|
||||
)
|
||||
title = (
|
||||
title
|
||||
or (self.path_info and self.path_info.get("name"))
|
||||
or os.path.basename(path_noext)
|
||||
)
|
||||
info = (
|
||||
"{} ({})".format(album, year)
|
||||
if album and year
|
||||
else album or year or ""
|
||||
)
|
||||
track = Track(
|
||||
sound=sound,
|
||||
position=int(tags.get("tracknumber", 0)),
|
||||
title=title,
|
||||
artist=artist or _("unknown"),
|
||||
info=info,
|
||||
)
|
||||
title = title or (self.path_info and
|
||||
self.path_info.get('name')) or \
|
||||
os.path.basename(path_noext)
|
||||
info = '{} ({})'.format(album, year) if album and year else \
|
||||
album or year or ''
|
||||
track = Track(sound=sound,
|
||||
position=int(tags.get('tracknumber', 0)),
|
||||
title=title,
|
||||
artist=artist or _('unknown'),
|
||||
info=info)
|
||||
track.save()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#! /usr/bin/env python3
|
||||
|
||||
"""
|
||||
Monitor sound files; For each program, check for:
|
||||
"""Monitor sound files; For each program, check for:
|
||||
|
||||
- new files;
|
||||
- deleted files;
|
||||
- differences between files and sound;
|
||||
|
@ -23,9 +23,9 @@ To check quality of files, call the command sound_quality_check using the
|
|||
parameters given by the setting AIRCOX_SOUND_QUALITY. This script requires
|
||||
Sox (and soxi).
|
||||
"""
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from watchdog.events import PatternMatchingEventHandler
|
||||
|
||||
|
@ -34,12 +34,17 @@ from aircox.models import Sound
|
|||
|
||||
from .sound_file import SoundFile
|
||||
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
__all__ = ('NotifyHandler', 'CreateHandler', 'DeleteHandler',
|
||||
'MoveHandler', 'ModifiedHandler', 'MonitorHandler',)
|
||||
__all__ = (
|
||||
"NotifyHandler",
|
||||
"CreateHandler",
|
||||
"DeleteHandler",
|
||||
"MoveHandler",
|
||||
"ModifiedHandler",
|
||||
"MonitorHandler",
|
||||
)
|
||||
|
||||
|
||||
class NotifyHandler:
|
||||
|
@ -63,34 +68,34 @@ class NotifyHandler:
|
|||
|
||||
|
||||
class CreateHandler(NotifyHandler):
|
||||
log_msg = 'Sound file created: {sound_file.path}'
|
||||
log_msg = "Sound file created: {sound_file.path}"
|
||||
|
||||
|
||||
class DeleteHandler(NotifyHandler):
|
||||
log_msg = 'Sound file deleted: {sound_file.path}'
|
||||
log_msg = "Sound file deleted: {sound_file.path}"
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
kwargs['deleted'] = True
|
||||
kwargs["deleted"] = True
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
class MoveHandler(NotifyHandler):
|
||||
log_msg = 'Sound file moved: {event.src_path} -> {event.dest_path}'
|
||||
log_msg = "Sound file moved: {event.src_path} -> {event.dest_path}"
|
||||
|
||||
def __call__(self, event, **kw):
|
||||
sound = Sound.objects.filter(file=event.src_path)
|
||||
# FIXME: this is wrong
|
||||
if sound:
|
||||
kw['sound'] = sound
|
||||
kw['path'] = event.src_path
|
||||
kw["sound"] = sound
|
||||
kw["path"] = event.src_path
|
||||
else:
|
||||
kw['path'] = event.dest_path
|
||||
kw["path"] = event.dest_path
|
||||
return super().__call__(event, **kw)
|
||||
|
||||
|
||||
class ModifiedHandler(NotifyHandler):
|
||||
timeout_delta = timedelta(seconds=30)
|
||||
log_msg = 'Sound file updated: {sound_file.path}'
|
||||
log_msg = "Sound file updated: {sound_file.path}"
|
||||
|
||||
def wait(self):
|
||||
# multiple call of this handler can be done consecutively, we block
|
||||
|
@ -108,9 +113,8 @@ class ModifiedHandler(NotifyHandler):
|
|||
|
||||
|
||||
class MonitorHandler(PatternMatchingEventHandler):
|
||||
"""
|
||||
Event handler for watchdog, in order to be used in monitoring.
|
||||
"""
|
||||
"""Event handler for watchdog, in order to be used in monitoring."""
|
||||
|
||||
pool = None
|
||||
jobs = {}
|
||||
|
||||
|
@ -118,35 +122,39 @@ class MonitorHandler(PatternMatchingEventHandler):
|
|||
"""
|
||||
:param str subdir: sub-directory in program dirs to monitor \
|
||||
(AIRCOX_SOUND_ARCHIVES_SUBDIR or AIRCOX_SOUND_EXCERPTS_SUBDIR);
|
||||
:param concurrent.futures.Executor pool: pool executing jobs on file change;
|
||||
:param concurrent.futures.Executor pool: pool executing jobs on file
|
||||
change;
|
||||
:param **sync_kw: kwargs passed to `SoundFile.sync`;
|
||||
"""
|
||||
self.subdir = subdir
|
||||
self.pool = pool
|
||||
self.sync_kw = sync_kw
|
||||
|
||||
patterns = ['*/{}/*{}'.format(self.subdir, ext)
|
||||
for ext in settings.AIRCOX_SOUND_FILE_EXT]
|
||||
patterns = [
|
||||
"*/{}/*{}".format(self.subdir, ext)
|
||||
for ext in settings.AIRCOX_SOUND_FILE_EXT
|
||||
]
|
||||
super().__init__(patterns=patterns, ignore_directories=True)
|
||||
|
||||
def on_created(self, event):
|
||||
self._submit(CreateHandler(), event, 'new', **self.sync_kw)
|
||||
self._submit(CreateHandler(), event, "new", **self.sync_kw)
|
||||
|
||||
def on_deleted(self, event):
|
||||
self._submit(DeleteHandler(), event, 'del')
|
||||
self._submit(DeleteHandler(), event, "del")
|
||||
|
||||
def on_moved(self, event):
|
||||
self._submit(MoveHandler(), event, 'mv', **self.sync_kw)
|
||||
self._submit(MoveHandler(), event, "mv", **self.sync_kw)
|
||||
|
||||
def on_modified(self, event):
|
||||
self._submit(ModifiedHandler(), event, 'up', **self.sync_kw)
|
||||
self._submit(ModifiedHandler(), event, "up", **self.sync_kw)
|
||||
|
||||
def _submit(self, handler, event, job_key_prefix, **kwargs):
|
||||
"""Send handler job to pool if not already running.
|
||||
|
||||
Return tuple with running job and boolean indicating if its a
|
||||
new one.
|
||||
"""
|
||||
Send handler job to pool if not already running.
|
||||
Return tuple with running job and boolean indicating if its a new one.
|
||||
"""
|
||||
key = job_key_prefix + ':' + event.src_path
|
||||
key = job_key_prefix + ":" + event.src_path
|
||||
job = self.jobs.get(key)
|
||||
if job and not job.future.done():
|
||||
job.ping()
|
||||
|
@ -158,5 +166,6 @@ class MonitorHandler(PatternMatchingEventHandler):
|
|||
def done(r):
|
||||
if self.jobs.get(key) is handler:
|
||||
del self.jobs[key]
|
||||
|
||||
handler.future.add_done_callback(done)
|
||||
return handler, True
|
||||
|
|
|
@ -1,30 +1,31 @@
|
|||
"""
|
||||
Provide sound analysis class using Sox.
|
||||
"""
|
||||
"""Provide sound analysis class using Sox."""
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger('aircox.commands')
|
||||
logger = logging.getLogger("aircox.commands")
|
||||
|
||||
|
||||
__all__ = ('SoxStats', 'SoundStats')
|
||||
__all__ = ("SoxStats", "SoundStats")
|
||||
|
||||
|
||||
class SoxStats:
|
||||
"""
|
||||
Run Sox process and parse output
|
||||
"""
|
||||
"""Run Sox process and parse output."""
|
||||
|
||||
attributes = [
|
||||
'DC offset', 'Min level', 'Max level',
|
||||
'Pk lev dB', 'RMS lev dB', 'RMS Pk dB',
|
||||
'RMS Tr dB', 'Flat factor', 'Length s',
|
||||
"DC offset",
|
||||
"Min level",
|
||||
"Max level",
|
||||
"Pk lev dB",
|
||||
"RMS lev dB",
|
||||
"RMS Pk dB",
|
||||
"RMS Tr dB",
|
||||
"Flat factor",
|
||||
"Length s",
|
||||
]
|
||||
|
||||
def __init__(self, path, **kwargs):
|
||||
"""
|
||||
If path is given, call analyse with path and kwargs
|
||||
"""
|
||||
"""If path is given, call analyse with path and kwargs."""
|
||||
self.values = {}
|
||||
if path:
|
||||
self.analyse(path, **kwargs)
|
||||
|
@ -34,82 +35,95 @@ class SoxStats:
|
|||
|
||||
def parse(self, output):
|
||||
for attr in self.attributes:
|
||||
value = re.search(attr + r'\s+(?P<value>\S+)', output)
|
||||
value = re.search(attr + r"\s+(?P<value>\S+)", output)
|
||||
value = value and value.groupdict()
|
||||
if value:
|
||||
try:
|
||||
value = float(value.get('value'))
|
||||
value = float(value.get("value"))
|
||||
except ValueError:
|
||||
value = None
|
||||
self.values[attr] = value
|
||||
self.values['length'] = self.values['Length s']
|
||||
self.values["length"] = self.values["Length s"]
|
||||
|
||||
def analyse(self, path, at=None, length=None):
|
||||
"""
|
||||
If at and length are given use them as excerpt to analyse.
|
||||
"""
|
||||
args = ['sox', path, '-n']
|
||||
"""If at and length are given use them as excerpt to analyse."""
|
||||
args = ["sox", path, "-n"]
|
||||
|
||||
if at is not None and length is not None:
|
||||
args += ['trim', str(at), str(length)]
|
||||
args += ["trim", str(at), str(length)]
|
||||
|
||||
args.append('stats')
|
||||
args.append("stats")
|
||||
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
p = subprocess.Popen(
|
||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
# sox outputs to stderr (my god WHYYYY)
|
||||
out_, out = p.communicate()
|
||||
self.parse(str(out, encoding='utf-8'))
|
||||
self.parse(str(out, encoding="utf-8"))
|
||||
|
||||
|
||||
class SoundStats:
|
||||
path = None # file path
|
||||
sample_length = 120 # default sample length in seconds
|
||||
stats = None # list of samples statistics
|
||||
bad = None # list of bad samples
|
||||
good = None # list of good samples
|
||||
path = None # file path
|
||||
sample_length = 120 # default sample length in seconds
|
||||
stats = None # list of samples statistics
|
||||
bad = None # list of bad samples
|
||||
good = None # list of good samples
|
||||
|
||||
def __init__(self, path, sample_length=None):
|
||||
self.path = path
|
||||
self.sample_length = sample_length if sample_length is not None \
|
||||
else self.sample_length
|
||||
self.sample_length = (
|
||||
sample_length if sample_length is not None else self.sample_length
|
||||
)
|
||||
|
||||
def get_file_stats(self):
|
||||
return self.stats and self.stats[0]
|
||||
|
||||
def analyse(self):
|
||||
logger.debug('complete file analysis')
|
||||
logger.debug("complete file analysis")
|
||||
self.stats = [SoxStats(self.path)]
|
||||
position = 0
|
||||
length = self.stats[0].get('length')
|
||||
length = self.stats[0].get("length")
|
||||
|
||||
if not self.sample_length:
|
||||
return
|
||||
|
||||
logger.debug('start samples analysis...')
|
||||
logger.debug("start samples analysis...")
|
||||
while position < length:
|
||||
stats = SoxStats(self.path, at=position, length=self.sample_length)
|
||||
self.stats.append(stats)
|
||||
position += self.sample_length
|
||||
|
||||
def check(self, name, min_val, max_val):
|
||||
self.good = [index for index, stats in enumerate(self.stats)
|
||||
if min_val <= stats.get(name) <= max_val]
|
||||
self.bad = [index for index, stats in enumerate(self.stats)
|
||||
if index not in self.good]
|
||||
self.good = [
|
||||
index
|
||||
for index, stats in enumerate(self.stats)
|
||||
if min_val <= stats.get(name) <= max_val
|
||||
]
|
||||
self.bad = [
|
||||
index
|
||||
for index, stats in enumerate(self.stats)
|
||||
if index not in self.good
|
||||
]
|
||||
self.resume()
|
||||
|
||||
def resume(self):
|
||||
def view(array): return [
|
||||
'file' if index == 0 else
|
||||
'sample {} (at {} seconds)'.format(
|
||||
index, (index-1) * self.sample_length)
|
||||
for index in array
|
||||
]
|
||||
def view(array):
|
||||
return [
|
||||
"file"
|
||||
if index == 0
|
||||
else "sample {} (at {} seconds)".format(
|
||||
index, (index - 1) * self.sample_length
|
||||
)
|
||||
for index in array
|
||||
]
|
||||
|
||||
if self.good:
|
||||
logger.debug(self.path + ' -> good: \033[92m%s\033[0m',
|
||||
', '.join(view(self.good)))
|
||||
logger.debug(
|
||||
self.path + " -> good: \033[92m%s\033[0m",
|
||||
", ".join(view(self.good)),
|
||||
)
|
||||
if self.bad:
|
||||
logger.debug(self.path + ' -> bad: \033[91m%s\033[0m',
|
||||
', '.join(view(self.bad)))
|
||||
logger.debug(
|
||||
self.path + " -> bad: \033[91m%s\033[0m",
|
||||
", ".join(view(self.bad)),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,13 @@ from django.utils import timezone as tz
|
|||
from .models import Station
|
||||
from .utils import Redirect
|
||||
|
||||
|
||||
__all__ = ['AircoxMiddleware']
|
||||
__all__ = ("AircoxMiddleware",)
|
||||
|
||||
|
||||
class AircoxMiddleware(object):
|
||||
"""
|
||||
Middleware used to get default info for the given website. Theses
|
||||
"""Middleware used to get default info for the given website.
|
||||
|
||||
Theses
|
||||
This middleware must be set after the middleware
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
"""
|
||||
|
@ -20,11 +20,11 @@ class AircoxMiddleware(object):
|
|||
self.get_response = get_response
|
||||
|
||||
def get_station(self, request):
|
||||
""" Return station for the provided request """
|
||||
"""Return station for the provided request."""
|
||||
expr = Q(default=True) | Q(hosts__contains=request.get_host())
|
||||
# case = Case(When(hosts__contains=request.get_host(), then=Value(0)),
|
||||
# When(default=True, then=Value(32)))
|
||||
return Station.objects.filter(expr).order_by('default').first()
|
||||
return Station.objects.filter(expr).order_by("default").first()
|
||||
# .annotate(resolve_priority=case) \
|
||||
# .order_by('resolve_priority').first()
|
||||
|
||||
|
@ -33,10 +33,10 @@ class AircoxMiddleware(object):
|
|||
# required
|
||||
timezone = None
|
||||
try:
|
||||
timezone = request.session.get('aircox.timezone')
|
||||
timezone = request.session.get("aircox.timezone")
|
||||
if timezone:
|
||||
timezone = pytz.timezone(timezone)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not timezone:
|
||||
|
|
|
@ -1,12 +1,48 @@
|
|||
from .article import *
|
||||
from .page import *
|
||||
from .program import *
|
||||
from .episode import *
|
||||
from .log import *
|
||||
from .sound import *
|
||||
from .station import *
|
||||
from .user_settings import *
|
||||
|
||||
from . import signals
|
||||
from .article import Article
|
||||
from .episode import Diffusion, DiffusionQuerySet, Episode
|
||||
from .log import Log, LogArchiver, LogQuerySet
|
||||
from .page import Category, Comment, NavItem, Page, PageQuerySet, StaticPage
|
||||
from .program import (
|
||||
BaseRerun,
|
||||
BaseRerunQuerySet,
|
||||
Program,
|
||||
ProgramChildQuerySet,
|
||||
ProgramQuerySet,
|
||||
Schedule,
|
||||
Stream,
|
||||
)
|
||||
from .sound import Sound, SoundQuerySet, Track
|
||||
from .station import Port, Station, StationQuerySet
|
||||
from .user_settings import UserSettings
|
||||
|
||||
|
||||
__all__ = (
|
||||
"signals",
|
||||
"Article",
|
||||
"Episode",
|
||||
"Diffusion",
|
||||
"DiffusionQuerySet",
|
||||
"Log",
|
||||
"LogQuerySet",
|
||||
"LogArchiver",
|
||||
"Category",
|
||||
"PageQuerySet",
|
||||
"Page",
|
||||
"StaticPage",
|
||||
"Comment",
|
||||
"NavItem",
|
||||
"Program",
|
||||
"ProgramQuerySet",
|
||||
"Stream",
|
||||
"Schedule",
|
||||
"ProgramChildQuerySet",
|
||||
"BaseRerun",
|
||||
"BaseRerunQuerySet",
|
||||
"Sound",
|
||||
"SoundQuerySet",
|
||||
"Track",
|
||||
"Station",
|
||||
"StationQuerySet",
|
||||
"Port",
|
||||
"UserSettings",
|
||||
)
|
||||
|
|
|
@ -3,16 +3,14 @@ from django.utils.translation import gettext_lazy as _
|
|||
from .page import Page
|
||||
from .program import ProgramChildQuerySet
|
||||
|
||||
|
||||
__all__ = ('Article',)
|
||||
__all__ = ("Article",)
|
||||
|
||||
|
||||
class Article(Page):
|
||||
detail_url_name = 'article-detail'
|
||||
detail_url_name = "article-detail"
|
||||
|
||||
objects = ProgramChildQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Article')
|
||||
verbose_name_plural = _('Articles')
|
||||
|
||||
verbose_name = _("Article")
|
||||
verbose_name_plural = _("Articles")
|
||||
|
|
|
@ -3,45 +3,51 @@ import datetime
|
|||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from easy_thumbnails.files import get_thumbnailer
|
||||
|
||||
from aircox import settings, utils
|
||||
from .program import ProgramChildQuerySet, \
|
||||
BaseRerun, BaseRerunQuerySet, Schedule
|
||||
|
||||
from .page import Page
|
||||
from .program import (
|
||||
BaseRerun,
|
||||
BaseRerunQuerySet,
|
||||
ProgramChildQuerySet,
|
||||
Schedule,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('Episode', 'Diffusion', 'DiffusionQuerySet')
|
||||
__all__ = ("Episode", "Diffusion", "DiffusionQuerySet")
|
||||
|
||||
|
||||
class Episode(Page):
|
||||
objects = ProgramChildQuerySet.as_manager()
|
||||
detail_url_name = 'episode-detail'
|
||||
item_template_name = 'aircox/widgets/episode_item.html'
|
||||
detail_url_name = "episode-detail"
|
||||
item_template_name = "aircox/widgets/episode_item.html"
|
||||
|
||||
@property
|
||||
def program(self):
|
||||
return getattr(self.parent, 'program', None)
|
||||
return getattr(self.parent, "program", None)
|
||||
|
||||
@cached_property
|
||||
def podcasts(self):
|
||||
""" Return serialized data about podcasts. """
|
||||
"""Return serialized data about podcasts."""
|
||||
from ..serializers import PodcastSerializer
|
||||
podcasts = [PodcastSerializer(s).data
|
||||
for s in self.sound_set.public().order_by('type')]
|
||||
|
||||
podcasts = [
|
||||
PodcastSerializer(s).data
|
||||
for s in self.sound_set.public().order_by("type")
|
||||
]
|
||||
if self.cover:
|
||||
options = {'size': (128, 128), 'crop': 'scale'}
|
||||
options = {"size": (128, 128), "crop": "scale"}
|
||||
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
||||
else:
|
||||
cover = None
|
||||
|
||||
for index, podcast in enumerate(podcasts):
|
||||
podcasts[index]['cover'] = cover
|
||||
podcasts[index]['page_url'] = self.get_absolute_url()
|
||||
podcasts[index]['page_title'] = self.title
|
||||
podcasts[index]["cover"] = cover
|
||||
podcasts[index]["page_url"] = self.get_absolute_url()
|
||||
podcasts[index]["page_title"] = self.title
|
||||
return podcasts
|
||||
|
||||
@program.setter
|
||||
|
@ -49,8 +55,8 @@ class Episode(Page):
|
|||
self.parent = value
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Episode')
|
||||
verbose_name_plural = _('Episodes')
|
||||
verbose_name = _("Episode")
|
||||
verbose_name_plural = _("Episodes")
|
||||
|
||||
def get_absolute_url(self):
|
||||
if not self.is_published:
|
||||
|
@ -59,82 +65,89 @@ class Episode(Page):
|
|||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.parent is None:
|
||||
raise ValueError('missing parent program')
|
||||
raise ValueError("missing parent program")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_default_title(cls, page, date):
|
||||
return settings.AIRCOX_EPISODE_TITLE.format(
|
||||
program=page,
|
||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT)
|
||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_init_kwargs_from(cls, page, date, title=None, **kwargs):
|
||||
""" Get default Episode's title """
|
||||
title = settings.AIRCOX_EPISODE_TITLE.format(
|
||||
program=page,
|
||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
||||
) if title is None else title
|
||||
return super().get_init_kwargs_from(page, title=title, program=page,
|
||||
**kwargs)
|
||||
"""Get default Episode's title."""
|
||||
title = (
|
||||
settings.AIRCOX_EPISODE_TITLE.format(
|
||||
program=page,
|
||||
date=date.strftime(settings.AIRCOX_EPISODE_TITLE_DATE_FORMAT),
|
||||
)
|
||||
if title is None
|
||||
else title
|
||||
)
|
||||
return super().get_init_kwargs_from(
|
||||
page, title=title, program=page, **kwargs
|
||||
)
|
||||
|
||||
|
||||
class DiffusionQuerySet(BaseRerunQuerySet):
|
||||
def episode(self, episode=None, id=None):
|
||||
""" Diffusions for this episode """
|
||||
return self.filter(episode=episode) if id is None else \
|
||||
self.filter(episode__id=id)
|
||||
"""Diffusions for this episode."""
|
||||
return (
|
||||
self.filter(episode=episode)
|
||||
if id is None
|
||||
else self.filter(episode__id=id)
|
||||
)
|
||||
|
||||
def on_air(self):
|
||||
""" On air diffusions """
|
||||
"""On air diffusions."""
|
||||
return self.filter(type=Diffusion.TYPE_ON_AIR)
|
||||
|
||||
# TODO: rename to `datetime`
|
||||
def now(self, now=None, order=True):
|
||||
""" Diffusions occuring now """
|
||||
"""Diffusions occuring now."""
|
||||
now = now or tz.now()
|
||||
qs = self.filter(start__lte=now, end__gte=now).distinct()
|
||||
return qs.order_by('start') if order else qs
|
||||
return qs.order_by("start") if order else qs
|
||||
|
||||
def date(self, date=None, order=True):
|
||||
""" Diffusions occuring date. """
|
||||
"""Diffusions occuring date."""
|
||||
date = date or datetime.date.today()
|
||||
start = tz.datetime.combine(date, datetime.time())
|
||||
end = tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
|
||||
# start = tz.get_current_timezone().localize(start)
|
||||
# end = tz.get_current_timezone().localize(end)
|
||||
qs = self.filter(start__range=(start, end))
|
||||
return qs.order_by('start') if order else qs
|
||||
return qs.order_by("start") if order else qs
|
||||
|
||||
def at(self, date, order=True):
|
||||
""" Return diffusions at specified date or datetime """
|
||||
return self.now(date, order) if isinstance(date, tz.datetime) else \
|
||||
self.date(date, order)
|
||||
"""Return diffusions at specified date or datetime."""
|
||||
return (
|
||||
self.now(date, order)
|
||||
if isinstance(date, tz.datetime)
|
||||
else self.date(date, order)
|
||||
)
|
||||
|
||||
def after(self, date=None):
|
||||
"""
|
||||
Return a queryset of diffusions that happen after the given
|
||||
date (default: today).
|
||||
"""
|
||||
"""Return a queryset of diffusions that happen after the given date
|
||||
(default: today)."""
|
||||
date = utils.date_or_default(date)
|
||||
if isinstance(date, tz.datetime):
|
||||
qs = self.filter(Q(start__gte=date) | Q(end__gte=date))
|
||||
else:
|
||||
qs = self.filter(Q(start__date__gte=date) | Q(end__date__gte=date))
|
||||
return qs.order_by('start')
|
||||
return qs.order_by("start")
|
||||
|
||||
def before(self, date=None):
|
||||
"""
|
||||
Return a queryset of diffusions that finish before the given
|
||||
date (default: today).
|
||||
"""
|
||||
"""Return a queryset of diffusions that finish before the given date
|
||||
(default: today)."""
|
||||
date = utils.date_or_default(date)
|
||||
if isinstance(date, tz.datetime):
|
||||
qs = self.filter(start__lt=date)
|
||||
else:
|
||||
qs = self.filter(start__date__lt=date)
|
||||
return qs.order_by('start')
|
||||
return qs.order_by("start")
|
||||
|
||||
def range(self, start, end):
|
||||
# FIXME can return dates that are out of range...
|
||||
|
@ -142,10 +155,9 @@ class DiffusionQuerySet(BaseRerunQuerySet):
|
|||
|
||||
|
||||
class Diffusion(BaseRerun):
|
||||
"""
|
||||
A Diffusion is an occurrence of a Program that is scheduled on the
|
||||
station's timetable. It can be a rerun of a previous diffusion. In such
|
||||
a case, use rerun's info instead of its own.
|
||||
"""A Diffusion is an occurrence of a Program that is scheduled on the
|
||||
station's timetable. It can be a rerun of a previous diffusion. In such a
|
||||
case, use rerun's info instead of its own.
|
||||
|
||||
A Diffusion without any rerun is named Episode (previously, a
|
||||
Diffusion was different from an Episode, but in the end, an
|
||||
|
@ -159,29 +171,37 @@ class Diffusion(BaseRerun):
|
|||
- cancel: the diffusion has been canceled
|
||||
- stop: the diffusion has been manually stopped
|
||||
"""
|
||||
|
||||
objects = DiffusionQuerySet.as_manager()
|
||||
|
||||
TYPE_ON_AIR = 0x00
|
||||
TYPE_UNCONFIRMED = 0x01
|
||||
TYPE_CANCEL = 0x02
|
||||
TYPE_CHOICES = (
|
||||
(TYPE_ON_AIR, _('on air')),
|
||||
(TYPE_UNCONFIRMED, _('not confirmed')),
|
||||
(TYPE_CANCEL, _('cancelled')),
|
||||
(TYPE_ON_AIR, _("on air")),
|
||||
(TYPE_UNCONFIRMED, _("not confirmed")),
|
||||
(TYPE_CANCEL, _("cancelled")),
|
||||
)
|
||||
|
||||
episode = models.ForeignKey(
|
||||
Episode, models.CASCADE, verbose_name=_('episode'),
|
||||
Episode,
|
||||
models.CASCADE,
|
||||
verbose_name=_("episode"),
|
||||
)
|
||||
schedule = models.ForeignKey(
|
||||
Schedule, models.CASCADE, verbose_name=_('schedule'),
|
||||
blank=True, null=True,
|
||||
Schedule,
|
||||
models.CASCADE,
|
||||
verbose_name=_("schedule"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
type = models.SmallIntegerField(
|
||||
verbose_name=_('type'), default=TYPE_ON_AIR, choices=TYPE_CHOICES,
|
||||
verbose_name=_("type"),
|
||||
default=TYPE_ON_AIR,
|
||||
choices=TYPE_CHOICES,
|
||||
)
|
||||
start = models.DateTimeField(_('start'), db_index=True)
|
||||
end = models.DateTimeField(_('end'), db_index=True)
|
||||
start = models.DateTimeField(_("start"), db_index=True)
|
||||
end = models.DateTimeField(_("end"), db_index=True)
|
||||
# port = models.ForeignKey(
|
||||
# 'self',
|
||||
# verbose_name = _('port'),
|
||||
|
@ -190,33 +210,33 @@ class Diffusion(BaseRerun):
|
|||
# help_text = _('use this input port'),
|
||||
# )
|
||||
|
||||
item_template_name = 'aircox/widgets/diffusion_item.html'
|
||||
item_template_name = "aircox/widgets/diffusion_item.html"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Diffusion')
|
||||
verbose_name_plural = _('Diffusions')
|
||||
verbose_name = _("Diffusion")
|
||||
verbose_name_plural = _("Diffusions")
|
||||
permissions = (
|
||||
('programming', _('edit the diffusions\' planification')),
|
||||
("programming", _("edit the diffusions' planification")),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
str_ = '{episode} - {date}'.format(
|
||||
str_ = "{episode} - {date}".format(
|
||||
episode=self.episode and self.episode.title,
|
||||
date=self.local_start.strftime('%Y/%m/%d %H:%M%z'),
|
||||
date=self.local_start.strftime("%Y/%m/%d %H:%M%z"),
|
||||
)
|
||||
if self.initial:
|
||||
str_ += ' ({})'.format(_('rerun'))
|
||||
str_ += " ({})".format(_("rerun"))
|
||||
return str_
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
super().save(*args, **kwargs)
|
||||
if self.is_initial and self.episode != self._initial['episode']:
|
||||
if self.is_initial and self.episode != self._initial["episode"]:
|
||||
self.rerun_set.update(episode=self.episode, program=self.program)
|
||||
|
||||
#def save(self, no_check=False, *args, **kwargs):
|
||||
#if self.start != self._initial['start'] or \
|
||||
# self.end != self._initial['end']:
|
||||
# self.check_conflicts()
|
||||
# def save(self, no_check=False, *args, **kwargs):
|
||||
# if self.start != self._initial['start'] or \
|
||||
# self.end != self._initial['end']:
|
||||
# self.check_conflicts()
|
||||
|
||||
def save_rerun(self):
|
||||
self.episode = self.initial.episode
|
||||
|
@ -231,85 +251,96 @@ class Diffusion(BaseRerun):
|
|||
|
||||
@property
|
||||
def date(self):
|
||||
""" Return diffusion start as a date. """
|
||||
"""Return diffusion start as a date."""
|
||||
|
||||
return utils.cast_date(self.start)
|
||||
|
||||
@cached_property
|
||||
def local_start(self):
|
||||
"""
|
||||
Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want
|
||||
to get it as local time.
|
||||
"""
|
||||
"""Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want to get
|
||||
it as local time."""
|
||||
|
||||
return tz.localtime(self.start, tz.get_current_timezone())
|
||||
|
||||
@property
|
||||
def local_end(self):
|
||||
"""
|
||||
Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want
|
||||
to get it as local time.
|
||||
"""
|
||||
"""Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want to get
|
||||
it as local time."""
|
||||
|
||||
return tz.localtime(self.end, tz.get_current_timezone())
|
||||
|
||||
@property
|
||||
def is_now(self):
|
||||
""" True if diffusion is currently running """
|
||||
"""True if diffusion is currently running."""
|
||||
now = tz.now()
|
||||
return self.type == self.TYPE_ON_AIR and \
|
||||
self.start <= now and self.end >= now
|
||||
return (
|
||||
self.type == self.TYPE_ON_AIR
|
||||
and self.start <= now
|
||||
and self.end >= now
|
||||
)
|
||||
|
||||
@property
|
||||
def is_live(self):
|
||||
""" True if Diffusion is live (False if there are sounds files). """
|
||||
return self.type == self.TYPE_ON_AIR and \
|
||||
not self.episode.sound_set.archive().count()
|
||||
"""True if Diffusion is live (False if there are sounds files)."""
|
||||
return (
|
||||
self.type == self.TYPE_ON_AIR
|
||||
and not self.episode.sound_set.archive().count()
|
||||
)
|
||||
|
||||
def get_playlist(self, **types):
|
||||
"""
|
||||
Returns sounds as a playlist (list of *local* archive file path).
|
||||
"""Returns sounds as a playlist (list of *local* archive file path).
|
||||
|
||||
The given arguments are passed to ``get_sounds``.
|
||||
"""
|
||||
from .sound import Sound
|
||||
return list(self.get_sounds(**types)
|
||||
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
||||
.values_list('path', flat=True))
|
||||
|
||||
return list(
|
||||
self.get_sounds(**types)
|
||||
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
||||
.values_list("path", flat=True)
|
||||
)
|
||||
|
||||
def get_sounds(self, **types):
|
||||
"""
|
||||
Return a queryset of sounds related to this diffusion,
|
||||
ordered by type then path.
|
||||
"""Return a queryset of sounds related to this diffusion, ordered by
|
||||
type then path.
|
||||
|
||||
**types: filter on the given sound types name, as `archive=True`
|
||||
"""
|
||||
from .sound import Sound
|
||||
sounds = (self.initial or self).sound_set.order_by('type', 'path')
|
||||
_in = [getattr(Sound.Type, name)
|
||||
for name, value in types.items() if value]
|
||||
|
||||
sounds = (self.initial or self).sound_set.order_by("type", "path")
|
||||
_in = [
|
||||
getattr(Sound.Type, name) for name, value in types.items() if value
|
||||
]
|
||||
|
||||
return sounds.filter(type__in=_in)
|
||||
|
||||
def is_date_in_range(self, date=None):
|
||||
"""
|
||||
Return true if the given date is in the diffusion's start-end
|
||||
range.
|
||||
"""
|
||||
"""Return true if the given date is in the diffusion's start-end
|
||||
range."""
|
||||
date = date or tz.now()
|
||||
|
||||
return self.start < date < self.end
|
||||
|
||||
def get_conflicts(self):
|
||||
""" Return conflicting diffusions queryset """
|
||||
"""Return conflicting diffusions queryset."""
|
||||
|
||||
# conflicts=Diffusion.objects.filter(Q(start__lt=OuterRef('start'), end__gt=OuterRef('end')) | Q(start__gt=OuterRef('start'), start__lt=OuterRef('end')))
|
||||
# diffs= Diffusion.objects.annotate(conflict_with=Exists(conflicts)).filter(conflict_with=True)
|
||||
return Diffusion.objects.filter(
|
||||
Q(start__lt=self.start, end__gt=self.start) |
|
||||
Q(start__gt=self.start, start__lt=self.end)
|
||||
).exclude(pk=self.pk).distinct()
|
||||
# conflicts=Diffusion.objects.filter(
|
||||
# Q(start__lt=OuterRef('start'), end__gt=OuterRef('end')) |
|
||||
# Q(start__gt=OuterRef('start'), start__lt=OuterRef('end'))
|
||||
# )
|
||||
# diffs= Diffusion.objects.annotate(conflict_with=Exists(conflicts))
|
||||
# .filter(conflict_with=True)
|
||||
return (
|
||||
Diffusion.objects.filter(
|
||||
Q(start__lt=self.start, end__gt=self.start)
|
||||
| Q(start__gt=self.start, start__lt=self.end)
|
||||
)
|
||||
.exclude(pk=self.pk)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def check_conflicts(self):
|
||||
conflicts = self.get_conflicts()
|
||||
|
@ -320,7 +351,7 @@ class Diffusion(BaseRerun):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._initial = {
|
||||
'start': self.start,
|
||||
'end': self.end,
|
||||
'episode': getattr(self, 'episode', None),
|
||||
"start": self.start,
|
||||
"end": self.end,
|
||||
"episode": getattr(self, "episode", None),
|
||||
}
|
||||
|
|
|
@ -1,32 +1,34 @@
|
|||
from collections import deque
|
||||
import datetime
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
from collections import deque
|
||||
|
||||
import yaml
|
||||
|
||||
from django.db import models
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from aircox import settings
|
||||
|
||||
from .episode import Diffusion
|
||||
from .sound import Sound, Track
|
||||
from .station import Station
|
||||
|
||||
|
||||
logger = logging.getLogger('aircox')
|
||||
logger = logging.getLogger("aircox")
|
||||
|
||||
|
||||
__all__ = ('Log', 'LogQuerySet', 'LogArchiver')
|
||||
__all__ = ("Log", "LogQuerySet", "LogArchiver")
|
||||
|
||||
|
||||
class LogQuerySet(models.QuerySet):
|
||||
def station(self, station=None, id=None):
|
||||
return self.filter(station=station) if id is None else \
|
||||
self.filter(station_id=id)
|
||||
return (
|
||||
self.filter(station=station)
|
||||
if id is None
|
||||
else self.filter(station_id=id)
|
||||
)
|
||||
|
||||
def date(self, date):
|
||||
start = tz.datetime.combine(date, datetime.time())
|
||||
|
@ -36,9 +38,11 @@ class LogQuerySet(models.QuerySet):
|
|||
# return self.filter(date__date=date)
|
||||
|
||||
def after(self, date):
|
||||
return self.filter(date__gte=date) \
|
||||
if isinstance(date, tz.datetime) else \
|
||||
self.filter(date__date__gte=date)
|
||||
return (
|
||||
self.filter(date__gte=date)
|
||||
if isinstance(date, tz.datetime)
|
||||
else self.filter(date__date__gte=date)
|
||||
)
|
||||
|
||||
def on_air(self):
|
||||
return self.filter(type=Log.TYPE_ON_AIR)
|
||||
|
@ -57,64 +61,80 @@ class LogQuerySet(models.QuerySet):
|
|||
|
||||
|
||||
class Log(models.Model):
|
||||
"""
|
||||
Log sounds and diffusions that are played on the station.
|
||||
"""Log sounds and diffusions that are played on the station.
|
||||
|
||||
This only remember what has been played on the outputs, not on each
|
||||
source; Source designate here which source is responsible of that.
|
||||
"""
|
||||
|
||||
TYPE_STOP = 0x00
|
||||
""" Source has been stopped, e.g. manually """
|
||||
"""Source has been stopped, e.g. manually."""
|
||||
# Rule: \/ diffusion != null \/ sound != null
|
||||
TYPE_START = 0x01
|
||||
""" Diffusion or sound has been request to be played. """
|
||||
"""Diffusion or sound has been request to be played."""
|
||||
TYPE_CANCEL = 0x02
|
||||
""" Diffusion has been canceled. """
|
||||
"""Diffusion has been canceled."""
|
||||
# Rule: \/ sound != null /\ track == null
|
||||
# \/ sound == null /\ track != null
|
||||
# \/ sound == null /\ track == null /\ comment = sound_path
|
||||
TYPE_ON_AIR = 0x03
|
||||
""" Sound or diffusion occured on air """
|
||||
"""Sound or diffusion occured on air."""
|
||||
TYPE_OTHER = 0x04
|
||||
""" Other log """
|
||||
"""Other log."""
|
||||
TYPE_CHOICES = (
|
||||
(TYPE_STOP, _('stop')), (TYPE_START, _('start')),
|
||||
(TYPE_CANCEL, _('cancelled')), (TYPE_ON_AIR, _('on air')),
|
||||
(TYPE_OTHER, _('other'))
|
||||
(TYPE_STOP, _("stop")),
|
||||
(TYPE_START, _("start")),
|
||||
(TYPE_CANCEL, _("cancelled")),
|
||||
(TYPE_ON_AIR, _("on air")),
|
||||
(TYPE_OTHER, _("other")),
|
||||
)
|
||||
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE,
|
||||
verbose_name=_('station'), help_text=_('related station'),
|
||||
Station,
|
||||
models.CASCADE,
|
||||
verbose_name=_("station"),
|
||||
help_text=_("related station"),
|
||||
)
|
||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
||||
date = models.DateTimeField(_('date'), default=tz.now, db_index=True)
|
||||
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||
date = models.DateTimeField(_("date"), default=tz.now, db_index=True)
|
||||
source = models.CharField(
|
||||
# we use a CharField to avoid loosing logs information if the
|
||||
# source is removed
|
||||
max_length=64, blank=True, null=True,
|
||||
verbose_name=_('source'),
|
||||
help_text=_('identifier of the source related to this log'),
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("source"),
|
||||
help_text=_("identifier of the source related to this log"),
|
||||
)
|
||||
comment = models.CharField(
|
||||
max_length=512, blank=True, null=True,
|
||||
verbose_name=_('comment'),
|
||||
max_length=512,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("comment"),
|
||||
)
|
||||
sound = models.ForeignKey(
|
||||
Sound, models.SET_NULL,
|
||||
blank=True, null=True, db_index=True,
|
||||
verbose_name=_('Sound'),
|
||||
Sound,
|
||||
models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
verbose_name=_("Sound"),
|
||||
)
|
||||
track = models.ForeignKey(
|
||||
Track, models.SET_NULL,
|
||||
blank=True, null=True, db_index=True,
|
||||
verbose_name=_('Track'),
|
||||
Track,
|
||||
models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
verbose_name=_("Track"),
|
||||
)
|
||||
diffusion = models.ForeignKey(
|
||||
Diffusion, models.SET_NULL,
|
||||
blank=True, null=True, db_index=True,
|
||||
verbose_name=_('Diffusion'),
|
||||
Diffusion,
|
||||
models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
verbose_name=_("Diffusion"),
|
||||
)
|
||||
|
||||
objects = LogQuerySet.as_manager()
|
||||
|
@ -126,11 +146,9 @@ class Log(models.Model):
|
|||
# FIXME: required????
|
||||
@property
|
||||
def local_date(self):
|
||||
"""
|
||||
Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want
|
||||
to get it as local time.
|
||||
"""
|
||||
"""Return a version of self.date that is localized to self.timezone;
|
||||
This is needed since datetime are stored as UTC date and we want to get
|
||||
it as local time."""
|
||||
return tz.localtime(self.date, tz.get_current_timezone())
|
||||
|
||||
# prepare for the future on crash + ease the use in merged lists with
|
||||
|
@ -140,13 +158,16 @@ class Log(models.Model):
|
|||
return self.date
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Log')
|
||||
verbose_name_plural = _('Logs')
|
||||
verbose_name = _("Log")
|
||||
verbose_name_plural = _("Logs")
|
||||
|
||||
def __str__(self):
|
||||
return '#{} ({}, {}, {})'.format(
|
||||
self.pk, self.get_type_display(),
|
||||
self.source, self.local_date.strftime('%Y/%m/%d %H:%M%z'))
|
||||
return "#{} ({}, {}, {})".format(
|
||||
self.pk,
|
||||
self.get_type_display(),
|
||||
self.source,
|
||||
self.local_date.strftime("%Y/%m/%d %H:%M%z"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __list_append(cls, object_list, items):
|
||||
|
@ -154,15 +175,15 @@ class Log(models.Model):
|
|||
|
||||
@classmethod
|
||||
def merge_diffusions(cls, logs, diffs, count=None):
|
||||
"""
|
||||
Merge logs and diffusions together. `logs` can either be a queryset
|
||||
or a list ordered by `Log.date`.
|
||||
"""Merge logs and diffusions together.
|
||||
|
||||
`logs` can either be a queryset or a list ordered by `Log.date`.
|
||||
"""
|
||||
# TODO: limit count
|
||||
# FIXME: log may be iterable (in stats view)
|
||||
if isinstance(logs, models.QuerySet):
|
||||
logs = list(logs.order_by('-date'))
|
||||
diffs = deque(diffs.on_air().before().order_by('-start'))
|
||||
logs = list(logs.order_by("-date"))
|
||||
diffs = deque(diffs.on_air().before().order_by("-start"))
|
||||
object_list = []
|
||||
|
||||
while True:
|
||||
|
@ -177,8 +198,10 @@ class Log(models.Model):
|
|||
diff = diffs.popleft()
|
||||
|
||||
# - takes all logs after diff start
|
||||
index = next((i for i, v in enumerate(logs)
|
||||
if v.date <= diff.end), len(logs))
|
||||
index = next(
|
||||
(i for i, v in enumerate(logs) if v.date <= diff.end),
|
||||
len(logs),
|
||||
)
|
||||
if index is not None and index > 0:
|
||||
object_list += logs[:index]
|
||||
logs = logs[index:]
|
||||
|
@ -186,12 +209,14 @@ class Log(models.Model):
|
|||
if len(logs):
|
||||
# FIXME
|
||||
# - last log while diff is running
|
||||
#if logs[0].date > diff.start:
|
||||
# if logs[0].date > diff.start:
|
||||
# object_list.append(logs[0])
|
||||
|
||||
# - skips logs while diff is running
|
||||
index = next((i for i, v in enumerate(logs)
|
||||
if v.date < diff.start), len(logs))
|
||||
index = next(
|
||||
(i for i, v in enumerate(logs) if v.date < diff.start),
|
||||
len(logs),
|
||||
)
|
||||
if index is not None and index > 0:
|
||||
logs = logs[index:]
|
||||
|
||||
|
@ -203,18 +228,22 @@ class Log(models.Model):
|
|||
def print(self):
|
||||
r = []
|
||||
if self.diffusion:
|
||||
r.append('diff: ' + str(self.diffusion_id))
|
||||
r.append("diff: " + str(self.diffusion_id))
|
||||
if self.sound:
|
||||
r.append('sound: ' + str(self.sound_id))
|
||||
r.append("sound: " + str(self.sound_id))
|
||||
if self.track:
|
||||
r.append('track: ' + str(self.track_id))
|
||||
logger.info('log %s: %s%s', str(self), self.comment or '',
|
||||
' (' + ', '.join(r) + ')' if r else '')
|
||||
|
||||
r.append("track: " + str(self.track_id))
|
||||
logger.info(
|
||||
"log %s: %s%s",
|
||||
str(self),
|
||||
self.comment or "",
|
||||
" (" + ", ".join(r) + ")" if r else "",
|
||||
)
|
||||
|
||||
|
||||
class LogArchiver:
|
||||
""" Commodity class used to manage archives of logs. """
|
||||
"""Commodity class used to manage archives of logs."""
|
||||
|
||||
@cached_property
|
||||
def fields(self):
|
||||
return Log._meta.get_fields()
|
||||
|
@ -223,13 +252,14 @@ class LogArchiver:
|
|||
def get_path(station, date):
|
||||
return os.path.join(
|
||||
settings.AIRCOX_LOGS_ARCHIVES_DIR,
|
||||
'{}_{}.log.gz'.format(date.strftime("%Y%m%d"), station.pk)
|
||||
"{}_{}.log.gz".format(date.strftime("%Y%m%d"), station.pk),
|
||||
)
|
||||
|
||||
def archive(self, qs, keep=False):
|
||||
"""
|
||||
Archive logs of the given queryset. Delete archived logs if not
|
||||
`keep`. Return the count of archived logs
|
||||
"""Archive logs of the given queryset.
|
||||
|
||||
Delete archived logs if not `keep`. Return the count of archived
|
||||
logs
|
||||
"""
|
||||
if not qs.exists():
|
||||
return 0
|
||||
|
@ -242,8 +272,10 @@ class LogArchiver:
|
|||
# exists yet <3
|
||||
for (station, date), logs in logs.items():
|
||||
path = self.get_path(station, date)
|
||||
with gzip.open(path, 'ab') as archive:
|
||||
data = yaml.dump([self.serialize(l) for l in logs]).encode('utf8')
|
||||
with gzip.open(path, "ab") as archive:
|
||||
data = yaml.dump(
|
||||
[self.serialize(line) for line in logs]
|
||||
).encode("utf8")
|
||||
archive.write(data)
|
||||
|
||||
if not keep:
|
||||
|
@ -253,11 +285,9 @@ class LogArchiver:
|
|||
|
||||
@staticmethod
|
||||
def sort_logs(qs):
|
||||
"""
|
||||
Sort logs by station and date and return a dict of
|
||||
`{ (station,date): [logs] }`.
|
||||
"""
|
||||
qs = qs.order_by('date')
|
||||
"""Sort logs by station and date and return a dict of `{
|
||||
(station,date): [logs] }`."""
|
||||
qs = qs.order_by("date")
|
||||
logs = {}
|
||||
for log in qs:
|
||||
key = (log.station, log.date)
|
||||
|
@ -268,44 +298,45 @@ class LogArchiver:
|
|||
return logs
|
||||
|
||||
def serialize(self, log):
|
||||
""" Serialize log """
|
||||
return {i.attname: getattr(log, i.attname)
|
||||
for i in self.fields}
|
||||
"""Serialize log."""
|
||||
return {i.attname: getattr(log, i.attname) for i in self.fields}
|
||||
|
||||
def load(self, station, date):
|
||||
""" Load an archive returning logs in a list. """
|
||||
"""Load an archive returning logs in a list."""
|
||||
path = self.get_path(station, date)
|
||||
|
||||
if not os.path.exists(path):
|
||||
return []
|
||||
|
||||
with gzip.open(path, 'rb') as archive:
|
||||
with gzip.open(path, "rb") as archive:
|
||||
data = archive.read()
|
||||
logs = yaml.load(data)
|
||||
|
||||
# we need to preload diffusions, sounds and tracks
|
||||
rels = {
|
||||
'diffusion': self.get_relations(logs, Diffusion, 'diffusion'),
|
||||
'sound': self.get_relations(logs, Sound, 'sound'),
|
||||
'track': self.get_relations(logs, Track, 'track'),
|
||||
"diffusion": self.get_relations(logs, Diffusion, "diffusion"),
|
||||
"sound": self.get_relations(logs, Sound, "sound"),
|
||||
"track": self.get_relations(logs, Track, "track"),
|
||||
}
|
||||
|
||||
def rel_obj(log, attr):
|
||||
rel_id = log.get(attr + '_id')
|
||||
rel_id = log.get(attr + "_id")
|
||||
return rels[attr][rel_id] if rel_id else None
|
||||
|
||||
return [Log(diffusion=rel_obj(log, 'diffusion'),
|
||||
sound=rel_obj(log, 'sound'),
|
||||
track=rel_obj(log, 'track'),
|
||||
**log) for log in logs]
|
||||
return [
|
||||
Log(
|
||||
diffusion=rel_obj(log, "diffusion"),
|
||||
sound=rel_obj(log, "sound"),
|
||||
track=rel_obj(log, "track"),
|
||||
**log
|
||||
)
|
||||
for log in logs
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_relations(logs, model, attr):
|
||||
"""
|
||||
From a list of dict representing logs, retrieve related objects
|
||||
of the given type.
|
||||
"""
|
||||
attr_id = attr + '_id'
|
||||
"""From a list of dict representing logs, retrieve related objects of
|
||||
the given type."""
|
||||
attr_id = attr + "_id"
|
||||
pks = (log[attr_id] for log in logs if attr_id in log)
|
||||
return {rel.pk: rel for rel in model.objects.filter(pk__in=pks)}
|
||||
|
||||
|
|
|
@ -1,38 +1,42 @@
|
|||
import re
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.text import slugify
|
||||
from django.utils.html import format_html
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
import bleach
|
||||
from ckeditor_uploader.fields import RichTextUploadingField
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.html import format_html
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.text import slugify
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from filer.fields.image import FilerImageField
|
||||
from model_utils.managers import InheritanceQuerySet
|
||||
|
||||
from .station import Station
|
||||
|
||||
|
||||
__all__ = ('Category', 'PageQuerySet',
|
||||
'Page', 'StaticPage', 'Comment', 'NavItem')
|
||||
__all__ = (
|
||||
"Category",
|
||||
"PageQuerySet",
|
||||
"Page",
|
||||
"StaticPage",
|
||||
"Comment",
|
||||
"NavItem",
|
||||
)
|
||||
|
||||
|
||||
headline_re = re.compile(r'(<p>)?'
|
||||
r'(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))'
|
||||
r'(</p>)?')
|
||||
headline_re = re.compile(
|
||||
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
|
||||
)
|
||||
|
||||
|
||||
class Category(models.Model):
|
||||
title = models.CharField(_('title'), max_length=64)
|
||||
slug = models.SlugField(_('slug'), max_length=64, db_index=True)
|
||||
title = models.CharField(_("title"), max_length=64)
|
||||
slug = models.SlugField(_("slug"), max_length=64, db_index=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Category')
|
||||
verbose_name_plural = _('Categories')
|
||||
verbose_name = _("Category")
|
||||
verbose_name_plural = _("Categories")
|
||||
|
||||
def __str__(self):
|
||||
return self.title
|
||||
|
@ -49,68 +53,90 @@ class BasePageQuerySet(InheritanceQuerySet):
|
|||
return self.filter(status=Page.STATUS_TRASH)
|
||||
|
||||
def parent(self, parent=None, id=None):
|
||||
""" Return pages having this parent. """
|
||||
return self.filter(parent=parent) if id is None else \
|
||||
self.filter(parent__id=id)
|
||||
"""Return pages having this parent."""
|
||||
return (
|
||||
self.filter(parent=parent)
|
||||
if id is None
|
||||
else self.filter(parent__id=id)
|
||||
)
|
||||
|
||||
def search(self, q, search_content=True):
|
||||
if search_content:
|
||||
return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
|
||||
return self.filter(
|
||||
models.Q(title__icontains=q) | models.Q(content__icontains=q)
|
||||
)
|
||||
return self.filter(title__icontains=q)
|
||||
|
||||
|
||||
class BasePage(models.Model):
|
||||
""" Base class for publishable content """
|
||||
"""Base class for publishable content."""
|
||||
|
||||
STATUS_DRAFT = 0x00
|
||||
STATUS_PUBLISHED = 0x10
|
||||
STATUS_TRASH = 0x20
|
||||
STATUS_CHOICES = (
|
||||
(STATUS_DRAFT, _('draft')),
|
||||
(STATUS_PUBLISHED, _('published')),
|
||||
(STATUS_TRASH, _('trash')),
|
||||
(STATUS_DRAFT, _("draft")),
|
||||
(STATUS_PUBLISHED, _("published")),
|
||||
(STATUS_TRASH, _("trash")),
|
||||
)
|
||||
|
||||
parent = models.ForeignKey('self', models.CASCADE, blank=True, null=True,
|
||||
db_index=True, related_name='child_set')
|
||||
parent = models.ForeignKey(
|
||||
"self",
|
||||
models.CASCADE,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
related_name="child_set",
|
||||
)
|
||||
title = models.CharField(max_length=100)
|
||||
slug = models.SlugField(_('slug'), max_length=120, blank=True, unique=True,
|
||||
db_index=True)
|
||||
slug = models.SlugField(
|
||||
_("slug"), max_length=120, blank=True, unique=True, db_index=True
|
||||
)
|
||||
status = models.PositiveSmallIntegerField(
|
||||
_('status'), default=STATUS_DRAFT, choices=STATUS_CHOICES,
|
||||
_("status"),
|
||||
default=STATUS_DRAFT,
|
||||
choices=STATUS_CHOICES,
|
||||
)
|
||||
cover = FilerImageField(
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_('cover'), null=True, blank=True,
|
||||
verbose_name=_("cover"),
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
content = RichTextUploadingField(
|
||||
_('content'), blank=True, null=True,
|
||||
_("content"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
objects = BasePageQuerySet.as_manager()
|
||||
|
||||
detail_url_name = None
|
||||
item_template_name = 'aircox/widgets/page_item.html'
|
||||
item_template_name = "aircox/widgets/page_item.html"
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __str__(self):
|
||||
return '{}'.format(self.title or self.pk)
|
||||
return "{}".format(self.title or self.pk)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.title)[:100]
|
||||
count = Page.objects.filter(slug__startswith=self.slug).count()
|
||||
if count:
|
||||
self.slug += '-' + str(count)
|
||||
self.slug += "-" + str(count)
|
||||
|
||||
if self.parent and not self.cover:
|
||||
self.cover = self.parent.cover
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(self.detail_url_name, kwargs={'slug': self.slug}) \
|
||||
if self.is_published else '#'
|
||||
return (
|
||||
reverse(self.detail_url_name, kwargs={"slug": self.slug})
|
||||
if self.is_published
|
||||
else "#"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_draft(self):
|
||||
|
@ -133,15 +159,15 @@ class BasePage(models.Model):
|
|||
@cached_property
|
||||
def headline(self):
|
||||
if not self.content:
|
||||
return ''
|
||||
return ""
|
||||
content = bleach.clean(self.content, tags=[], strip=True)
|
||||
headline = headline_re.search(content)
|
||||
return mark_safe(headline.groupdict()['headline']) if headline else ''
|
||||
return mark_safe(headline.groupdict()["headline"]) if headline else ""
|
||||
|
||||
@classmethod
|
||||
def get_init_kwargs_from(cls, page, **kwargs):
|
||||
kwargs.setdefault('cover', page.cover)
|
||||
kwargs.setdefault('category', page.category)
|
||||
kwargs.setdefault("cover", page.cover)
|
||||
kwargs.setdefault("category", page.category)
|
||||
return kwargs
|
||||
|
||||
@classmethod
|
||||
|
@ -151,30 +177,39 @@ class BasePage(models.Model):
|
|||
|
||||
class PageQuerySet(BasePageQuerySet):
|
||||
def published(self):
|
||||
return self.filter(status=Page.STATUS_PUBLISHED,
|
||||
pub_date__lte=tz.now())
|
||||
return self.filter(
|
||||
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
|
||||
)
|
||||
|
||||
|
||||
class Page(BasePage):
|
||||
""" Base Page model used for articles and other dated content. """
|
||||
"""Base Page model used for articles and other dated content."""
|
||||
|
||||
category = models.ForeignKey(
|
||||
Category, models.SET_NULL,
|
||||
verbose_name=_('category'), blank=True, null=True, db_index=True
|
||||
Category,
|
||||
models.SET_NULL,
|
||||
verbose_name=_("category"),
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
)
|
||||
pub_date = models.DateTimeField(
|
||||
_('publication date'), blank=True, null=True, db_index=True)
|
||||
_("publication date"), blank=True, null=True, db_index=True
|
||||
)
|
||||
featured = models.BooleanField(
|
||||
_('featured'), default=False,
|
||||
_("featured"),
|
||||
default=False,
|
||||
)
|
||||
allow_comments = models.BooleanField(
|
||||
_('allow comments'), default=True,
|
||||
_("allow comments"),
|
||||
default=True,
|
||||
)
|
||||
|
||||
objects = PageQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Publication')
|
||||
verbose_name_plural = _('Publications')
|
||||
verbose_name = _("Publication")
|
||||
verbose_name_plural = _("Publications")
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.is_published and self.pub_date is None:
|
||||
|
@ -188,8 +223,9 @@ class Page(BasePage):
|
|||
|
||||
|
||||
class StaticPage(BasePage):
|
||||
""" Static page that eventually can be attached to a specific view. """
|
||||
detail_url_name = 'static-page-detail'
|
||||
"""Static page that eventually can be attached to a specific view."""
|
||||
|
||||
detail_url_name = "static-page-detail"
|
||||
|
||||
ATTACH_TO_HOME = 0x00
|
||||
ATTACH_TO_DIFFUSIONS = 0x01
|
||||
|
@ -199,25 +235,28 @@ class StaticPage(BasePage):
|
|||
ATTACH_TO_ARTICLES = 0x05
|
||||
|
||||
ATTACH_TO_CHOICES = (
|
||||
(ATTACH_TO_HOME, _('Home page')),
|
||||
(ATTACH_TO_DIFFUSIONS, _('Diffusions page')),
|
||||
(ATTACH_TO_LOGS, _('Logs page')),
|
||||
(ATTACH_TO_PROGRAMS, _('Programs list')),
|
||||
(ATTACH_TO_EPISODES, _('Episodes list')),
|
||||
(ATTACH_TO_ARTICLES, _('Articles list')),
|
||||
(ATTACH_TO_HOME, _("Home page")),
|
||||
(ATTACH_TO_DIFFUSIONS, _("Diffusions page")),
|
||||
(ATTACH_TO_LOGS, _("Logs page")),
|
||||
(ATTACH_TO_PROGRAMS, _("Programs list")),
|
||||
(ATTACH_TO_EPISODES, _("Episodes list")),
|
||||
(ATTACH_TO_ARTICLES, _("Articles list")),
|
||||
)
|
||||
VIEWS = {
|
||||
ATTACH_TO_HOME: 'home',
|
||||
ATTACH_TO_DIFFUSIONS: 'diffusion-list',
|
||||
ATTACH_TO_LOGS: 'log-list',
|
||||
ATTACH_TO_PROGRAMS: 'program-list',
|
||||
ATTACH_TO_EPISODES: 'episode-list',
|
||||
ATTACH_TO_ARTICLES: 'article-list',
|
||||
ATTACH_TO_HOME: "home",
|
||||
ATTACH_TO_DIFFUSIONS: "diffusion-list",
|
||||
ATTACH_TO_LOGS: "log-list",
|
||||
ATTACH_TO_PROGRAMS: "program-list",
|
||||
ATTACH_TO_EPISODES: "episode-list",
|
||||
ATTACH_TO_ARTICLES: "article-list",
|
||||
}
|
||||
|
||||
attach_to = models.SmallIntegerField(
|
||||
_('attach to'), choices=ATTACH_TO_CHOICES, blank=True, null=True,
|
||||
help_text=_('display this page content to related element'),
|
||||
_("attach to"),
|
||||
choices=ATTACH_TO_CHOICES,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("display this page content to related element"),
|
||||
)
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
@ -228,49 +267,65 @@ class StaticPage(BasePage):
|
|||
|
||||
class Comment(models.Model):
|
||||
page = models.ForeignKey(
|
||||
Page, models.CASCADE, verbose_name=_('related page'),
|
||||
Page,
|
||||
models.CASCADE,
|
||||
verbose_name=_("related page"),
|
||||
db_index=True,
|
||||
# TODO: allow_comment filter
|
||||
)
|
||||
nickname = models.CharField(_('nickname'), max_length=32)
|
||||
email = models.EmailField(_('email'), max_length=32)
|
||||
nickname = models.CharField(_("nickname"), max_length=32)
|
||||
email = models.EmailField(_("email"), max_length=32)
|
||||
date = models.DateTimeField(auto_now_add=True)
|
||||
content = models.TextField(_('content'), max_length=1024)
|
||||
content = models.TextField(_("content"), max_length=1024)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Comment')
|
||||
verbose_name_plural = _('Comments')
|
||||
verbose_name = _("Comment")
|
||||
verbose_name_plural = _("Comments")
|
||||
|
||||
|
||||
class NavItem(models.Model):
|
||||
""" Navigation menu items """
|
||||
"""Navigation menu items."""
|
||||
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_('station'))
|
||||
menu = models.SlugField(_('menu'), max_length=24)
|
||||
order = models.PositiveSmallIntegerField(_('order'))
|
||||
text = models.CharField(_('title'), max_length=64)
|
||||
url = models.CharField(_('url'), max_length=256, blank=True, null=True)
|
||||
page = models.ForeignKey(StaticPage, models.CASCADE, db_index=True,
|
||||
verbose_name=_('page'), blank=True, null=True)
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
menu = models.SlugField(_("menu"), max_length=24)
|
||||
order = models.PositiveSmallIntegerField(_("order"))
|
||||
text = models.CharField(_("title"), max_length=64)
|
||||
url = models.CharField(_("url"), max_length=256, blank=True, null=True)
|
||||
page = models.ForeignKey(
|
||||
StaticPage,
|
||||
models.CASCADE,
|
||||
db_index=True,
|
||||
verbose_name=_("page"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Menu item')
|
||||
verbose_name_plural = _('Menu items')
|
||||
ordering = ('order', 'pk')
|
||||
verbose_name = _("Menu item")
|
||||
verbose_name_plural = _("Menu items")
|
||||
ordering = ("order", "pk")
|
||||
|
||||
def get_url(self):
|
||||
return self.url if self.url else \
|
||||
self.page.get_absolute_url() if self.page else None
|
||||
return (
|
||||
self.url
|
||||
if self.url
|
||||
else self.page.get_absolute_url()
|
||||
if self.page
|
||||
else None
|
||||
)
|
||||
|
||||
def render(self, request, css_class='', active_class=''):
|
||||
def render(self, request, css_class="", active_class=""):
|
||||
url = self.get_url()
|
||||
if active_class and request.path.startswith(url):
|
||||
css_class += ' ' + active_class
|
||||
css_class += " " + active_class
|
||||
|
||||
if not url:
|
||||
return self.text
|
||||
elif not css_class:
|
||||
return format_html('<a href="{}">{}</a>', url, self.text)
|
||||
else:
|
||||
return format_html('<a href="{}" class="{}">{}</a>', url,
|
||||
css_class, self.text)
|
||||
|
||||
return format_html(
|
||||
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
|
||||
)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import calendar
|
||||
from collections import OrderedDict
|
||||
from enum import IntEnum
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
from enum import IntEnum
|
||||
|
||||
import pytz
|
||||
from django.conf import settings as conf
|
||||
|
@ -12,19 +12,26 @@ from django.db import models
|
|||
from django.db.models import F
|
||||
from django.db.models.functions import Concat, Substr
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from aircox import settings, utils
|
||||
|
||||
from .page import Page, PageQuerySet
|
||||
from .station import Station
|
||||
|
||||
|
||||
logger = logging.getLogger('aircox')
|
||||
logger = logging.getLogger("aircox")
|
||||
|
||||
|
||||
__all__ = ('Program', 'ProgramQuerySet', 'Stream', 'Schedule',
|
||||
'ProgramChildQuerySet', 'BaseRerun', 'BaseRerunQuerySet')
|
||||
__all__ = (
|
||||
"Program",
|
||||
"ProgramQuerySet",
|
||||
"Stream",
|
||||
"Schedule",
|
||||
"ProgramChildQuerySet",
|
||||
"BaseRerun",
|
||||
"BaseRerunQuerySet",
|
||||
)
|
||||
|
||||
|
||||
class ProgramQuerySet(PageQuerySet):
|
||||
|
@ -37,8 +44,7 @@ class ProgramQuerySet(PageQuerySet):
|
|||
|
||||
|
||||
class Program(Page):
|
||||
"""
|
||||
A Program can either be a Streamed or a Scheduled program.
|
||||
"""A Program can either be a Streamed or a Scheduled program.
|
||||
|
||||
A Streamed program is used to generate non-stop random playlists when there
|
||||
is not scheduled diffusion. In such a case, a Stream is used to describe
|
||||
|
@ -49,32 +55,35 @@ class Program(Page):
|
|||
Renaming a Program rename the corresponding directory to matches the new
|
||||
name if it does not exists.
|
||||
"""
|
||||
|
||||
# explicit foreign key in order to avoid related name clashes
|
||||
station = models.ForeignKey(Station, models.CASCADE,
|
||||
verbose_name=_('station'))
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
active = models.BooleanField(
|
||||
_('active'),
|
||||
_("active"),
|
||||
default=True,
|
||||
help_text=_('if not checked this program is no longer active')
|
||||
help_text=_("if not checked this program is no longer active"),
|
||||
)
|
||||
sync = models.BooleanField(
|
||||
_('syncronise'),
|
||||
_("syncronise"),
|
||||
default=True,
|
||||
help_text=_('update later diffusions according to schedule changes')
|
||||
help_text=_("update later diffusions according to schedule changes"),
|
||||
)
|
||||
|
||||
objects = ProgramQuerySet.as_manager()
|
||||
detail_url_name = 'program-detail'
|
||||
detail_url_name = "program-detail"
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
""" Return program's directory path """
|
||||
return os.path.join(settings.AIRCOX_PROGRAMS_DIR,
|
||||
self.slug.replace('-', '_'))
|
||||
"""Return program's directory path."""
|
||||
return os.path.join(
|
||||
settings.AIRCOX_PROGRAMS_DIR, self.slug.replace("-", "_")
|
||||
)
|
||||
|
||||
@property
|
||||
def abspath(self):
|
||||
""" Return absolute path to program's dir """
|
||||
"""Return absolute path to program's dir."""
|
||||
return os.path.join(conf.MEDIA_ROOT, self.path)
|
||||
|
||||
@property
|
||||
|
@ -93,69 +102,88 @@ class Program(Page):
|
|||
|
||||
@classmethod
|
||||
def get_from_path(cl, path):
|
||||
"""
|
||||
Return a Program from the given path. We assume the path has been
|
||||
given in a previous time by this model (Program.path getter).
|
||||
"""Return a Program from the given path.
|
||||
|
||||
We assume the path has been given in a previous time by this
|
||||
model (Program.path getter).
|
||||
"""
|
||||
if path.startswith(settings.AIRCOX_PROGRAMS_DIR_ABS):
|
||||
path = path.replace(settings.AIRCOX_PROGRAMS_DIR_ABS, '')
|
||||
while path[0] == '/':
|
||||
path = path.replace(settings.AIRCOX_PROGRAMS_DIR_ABS, "")
|
||||
while path[0] == "/":
|
||||
path = path[1:]
|
||||
path = path[:path.index('/')]
|
||||
return cl.objects.filter(slug=path.replace('_','-')).first()
|
||||
path = path[: path.index("/")]
|
||||
return cl.objects.filter(slug=path.replace("_", "-")).first()
|
||||
|
||||
def ensure_dir(self, subdir=None):
|
||||
"""Make sur the program's dir exists (and optionally subdir).
|
||||
|
||||
Return True if the dir (or subdir) exists.
|
||||
"""
|
||||
Make sur the program's dir exists (and optionally subdir). Return True
|
||||
if the dir (or subdir) exists.
|
||||
"""
|
||||
path = os.path.join(self.abspath, subdir) if subdir else \
|
||||
self.abspath
|
||||
path = os.path.join(self.abspath, subdir) if subdir else self.abspath
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return os.path.exists(path)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Program')
|
||||
verbose_name_plural = _('Programs')
|
||||
verbose_name = _("Program")
|
||||
verbose_name_plural = _("Programs")
|
||||
|
||||
def __str__(self):
|
||||
return self.title
|
||||
|
||||
def save(self, *kargs, **kwargs):
|
||||
from .sound import Sound
|
||||
|
||||
super().save(*kargs, **kwargs)
|
||||
|
||||
# TODO: move in signals
|
||||
path_ = getattr(self, '__initial_path', None)
|
||||
path_ = getattr(self, "__initial_path", None)
|
||||
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
||||
if path_ is not None and path_ != self.path and \
|
||||
os.path.exists(abspath) and not os.path.exists(self.abspath):
|
||||
logger.info('program #%s\'s dir changed to %s - update it.',
|
||||
self.id, self.title)
|
||||
if (
|
||||
path_ is not None
|
||||
and path_ != self.path
|
||||
and os.path.exists(abspath)
|
||||
and not os.path.exists(self.abspath)
|
||||
):
|
||||
logger.info(
|
||||
"program #%s's dir changed to %s - update it.",
|
||||
self.id,
|
||||
self.title,
|
||||
)
|
||||
|
||||
shutil.move(abspath, self.abspath)
|
||||
Sound.objects.filter(path__startswith=path_) \
|
||||
.update(file=Concat('file', Substr(F('file'), len(path_))))
|
||||
Sound.objects.filter(path__startswith=path_).update(
|
||||
file=Concat("file", Substr(F("file"), len(path_)))
|
||||
)
|
||||
|
||||
|
||||
class ProgramChildQuerySet(PageQuerySet):
|
||||
def station(self, station=None, id=None):
|
||||
return self.filter(parent__program__station=station) if id is None else \
|
||||
self.filter(parent__program__station__id=id)
|
||||
return (
|
||||
self.filter(parent__program__station=station)
|
||||
if id is None
|
||||
else self.filter(parent__program__station__id=id)
|
||||
)
|
||||
|
||||
def program(self, program=None, id=None):
|
||||
return self.parent(program, id)
|
||||
|
||||
|
||||
class BaseRerunQuerySet(models.QuerySet):
|
||||
""" Queryset for BaseRerun (sub)classes. """
|
||||
"""Queryset for BaseRerun (sub)classes."""
|
||||
|
||||
def station(self, station=None, id=None):
|
||||
return self.filter(program__station=station) if id is None else \
|
||||
self.filter(program__station__id=id)
|
||||
return (
|
||||
self.filter(program__station=station)
|
||||
if id is None
|
||||
else self.filter(program__station__id=id)
|
||||
)
|
||||
|
||||
def program(self, program=None, id=None):
|
||||
return self.filter(program=program) if id is None else \
|
||||
self.filter(program__id=id)
|
||||
return (
|
||||
self.filter(program=program)
|
||||
if id is None
|
||||
else self.filter(program__id=id)
|
||||
)
|
||||
|
||||
def rerun(self):
|
||||
return self.filter(initial__isnull=False)
|
||||
|
@ -165,19 +193,27 @@ class BaseRerunQuerySet(models.QuerySet):
|
|||
|
||||
|
||||
class BaseRerun(models.Model):
|
||||
"""Abstract model offering rerun facilities.
|
||||
|
||||
Assume `start` is a datetime field or attribute implemented by
|
||||
subclass.
|
||||
"""
|
||||
Abstract model offering rerun facilities. Assume `start` is a
|
||||
datetime field or attribute implemented by subclass.
|
||||
"""
|
||||
|
||||
program = models.ForeignKey(
|
||||
Program, models.CASCADE, db_index=True,
|
||||
verbose_name=_('related program'),
|
||||
Program,
|
||||
models.CASCADE,
|
||||
db_index=True,
|
||||
verbose_name=_("related program"),
|
||||
)
|
||||
initial = models.ForeignKey(
|
||||
'self', models.SET_NULL, related_name='rerun_set',
|
||||
verbose_name=_('rerun of'),
|
||||
limit_choices_to={'initial__isnull': True},
|
||||
blank=True, null=True, db_index=True,
|
||||
"self",
|
||||
models.SET_NULL,
|
||||
related_name="rerun_set",
|
||||
verbose_name=_("rerun of"),
|
||||
limit_choices_to={"initial__isnull": True},
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
objects = BaseRerunQuerySet.as_manager()
|
||||
|
@ -212,25 +248,27 @@ class BaseRerun(models.Model):
|
|||
return self.initial is not None
|
||||
|
||||
def get_initial(self):
|
||||
""" Return the initial schedule (self or initial) """
|
||||
"""Return the initial schedule (self or initial)"""
|
||||
return self if self.initial is None else self.initial.get_initial()
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
if self.initial is not None and self.initial.start >= self.start:
|
||||
raise ValidationError({
|
||||
'initial': _('rerun must happen after original')
|
||||
})
|
||||
raise ValidationError(
|
||||
{"initial": _("rerun must happen after original")}
|
||||
)
|
||||
|
||||
|
||||
# ? BIG FIXME: self.date is still used as datetime
|
||||
class Schedule(BaseRerun):
|
||||
"""A Schedule defines time slots of programs' diffusions.
|
||||
|
||||
It can be an initial run or a rerun (in such case it is linked to
|
||||
the related schedule).
|
||||
"""
|
||||
A Schedule defines time slots of programs' diffusions. It can be an initial
|
||||
run or a rerun (in such case it is linked to the related schedule).
|
||||
"""
|
||||
# Frequency for schedules. Basically, it is a mask of bits where each bit is
|
||||
# a week. Bits > rank 5 are used for special schedules.
|
||||
|
||||
# Frequency for schedules. Basically, it is a mask of bits where each bit
|
||||
# is a week. Bits > rank 5 are used for special schedules.
|
||||
# Important: the first week is always the first week where the weekday of
|
||||
# the schedule is present.
|
||||
# For ponctual programs, there is no need for a schedule, only a diffusion
|
||||
|
@ -247,45 +285,55 @@ class Schedule(BaseRerun):
|
|||
one_on_two = 0b100000
|
||||
|
||||
date = models.DateField(
|
||||
_('date'), help_text=_('date of the first diffusion'),
|
||||
_("date"),
|
||||
help_text=_("date of the first diffusion"),
|
||||
)
|
||||
time = models.TimeField(
|
||||
_('time'), help_text=_('start time'),
|
||||
_("time"),
|
||||
help_text=_("start time"),
|
||||
)
|
||||
timezone = models.CharField(
|
||||
_('timezone'),
|
||||
default=tz.get_current_timezone, max_length=100,
|
||||
_("timezone"),
|
||||
default=tz.get_current_timezone,
|
||||
max_length=100,
|
||||
choices=[(x, x) for x in pytz.all_timezones],
|
||||
help_text=_('timezone used for the date')
|
||||
help_text=_("timezone used for the date"),
|
||||
)
|
||||
duration = models.TimeField(
|
||||
_('duration'),
|
||||
help_text=_('regular duration'),
|
||||
_("duration"),
|
||||
help_text=_("regular duration"),
|
||||
)
|
||||
frequency = models.SmallIntegerField(
|
||||
_('frequency'),
|
||||
choices=[(int(y), {
|
||||
'ponctual': _('ponctual'),
|
||||
'first': _('1st {day} of the month'),
|
||||
'second': _('2nd {day} of the month'),
|
||||
'third': _('3rd {day} of the month'),
|
||||
'fourth': _('4th {day} of the month'),
|
||||
'last': _('last {day} of the month'),
|
||||
'first_and_third': _('1st and 3rd {day} of the month'),
|
||||
'second_and_fourth': _('2nd and 4th {day} of the month'),
|
||||
'every': _('{day}'),
|
||||
'one_on_two': _('one {day} on two'),
|
||||
}[x]) for x, y in Frequency.__members__.items()],
|
||||
_("frequency"),
|
||||
choices=[
|
||||
(
|
||||
int(y),
|
||||
{
|
||||
"ponctual": _("ponctual"),
|
||||
"first": _("1st {day} of the month"),
|
||||
"second": _("2nd {day} of the month"),
|
||||
"third": _("3rd {day} of the month"),
|
||||
"fourth": _("4th {day} of the month"),
|
||||
"last": _("last {day} of the month"),
|
||||
"first_and_third": _("1st and 3rd {day} of the month"),
|
||||
"second_and_fourth": _("2nd and 4th {day} of the month"),
|
||||
"every": _("{day}"),
|
||||
"one_on_two": _("one {day} on two"),
|
||||
}[x],
|
||||
)
|
||||
for x, y in Frequency.__members__.items()
|
||||
],
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Schedule')
|
||||
verbose_name_plural = _('Schedules')
|
||||
verbose_name = _("Schedule")
|
||||
verbose_name_plural = _("Schedules")
|
||||
|
||||
def __str__(self):
|
||||
return '{} - {}, {}'.format(
|
||||
self.program.title, self.get_frequency_verbose(),
|
||||
self.time.strftime('%H:%M')
|
||||
return "{} - {}, {}".format(
|
||||
self.program.title,
|
||||
self.get_frequency_verbose(),
|
||||
self.time.strftime("%H:%M"),
|
||||
)
|
||||
|
||||
def save_rerun(self, *args, **kwargs):
|
||||
|
@ -295,31 +343,35 @@ class Schedule(BaseRerun):
|
|||
|
||||
@cached_property
|
||||
def tz(self):
|
||||
""" Pytz timezone of the schedule. """
|
||||
"""Pytz timezone of the schedule."""
|
||||
import pytz
|
||||
|
||||
return pytz.timezone(self.timezone)
|
||||
|
||||
@cached_property
|
||||
def start(self):
|
||||
""" Datetime of the start (timezone unaware) """
|
||||
"""Datetime of the start (timezone unaware)"""
|
||||
return tz.datetime.combine(self.date, self.time)
|
||||
|
||||
@cached_property
|
||||
def end(self):
|
||||
""" Datetime of the end """
|
||||
"""Datetime of the end."""
|
||||
return self.start + utils.to_timedelta(self.duration)
|
||||
|
||||
def get_frequency_verbose(self):
|
||||
""" Return frequency formated for display """
|
||||
"""Return frequency formated for display."""
|
||||
from django.template.defaultfilters import date
|
||||
return self.get_frequency_display().format(
|
||||
day=date(self.date, 'l')
|
||||
).capitalize()
|
||||
|
||||
return (
|
||||
self.get_frequency_display()
|
||||
.format(day=date(self.date, "l"))
|
||||
.capitalize()
|
||||
)
|
||||
|
||||
# initial cached data
|
||||
__initial = None
|
||||
|
||||
def changed(self, fields=['date', 'duration', 'frequency', 'timezone']):
|
||||
def changed(self, fields=["date", "duration", "frequency", "timezone"]):
|
||||
initial = self._Schedule__initial
|
||||
|
||||
if not initial:
|
||||
|
@ -334,15 +386,13 @@ class Schedule(BaseRerun):
|
|||
return False
|
||||
|
||||
def normalize(self, date):
|
||||
"""
|
||||
Return a datetime set to schedule's time for the provided date,
|
||||
handling timezone (based on schedule's timezone).
|
||||
"""
|
||||
"""Return a datetime set to schedule's time for the provided date,
|
||||
handling timezone (based on schedule's timezone)."""
|
||||
date = tz.datetime.combine(date, self.time)
|
||||
return self.tz.normalize(self.tz.localize(date))
|
||||
|
||||
def dates_of_month(self, date):
|
||||
""" Return normalized diffusion dates of provided date's month. """
|
||||
"""Return normalized diffusion dates of provided date's month."""
|
||||
if self.frequency == Schedule.Frequency.ponctual:
|
||||
return []
|
||||
|
||||
|
@ -352,7 +402,8 @@ class Schedule(BaseRerun):
|
|||
# last of the month
|
||||
if freq == Schedule.Frequency.last:
|
||||
date = date.replace(
|
||||
day=calendar.monthrange(date.year, date.month)[1])
|
||||
day=calendar.monthrange(date.year, date.month)[1]
|
||||
)
|
||||
date_wday = date.weekday()
|
||||
|
||||
# end of month before the wanted weekday: move one week back
|
||||
|
@ -361,56 +412,72 @@ class Schedule(BaseRerun):
|
|||
date += tz.timedelta(days=sched_wday - date_wday)
|
||||
return [self.normalize(date)]
|
||||
|
||||
# move to the first day of the month that matches the schedule's weekday
|
||||
# check on SO#3284452 for the formula
|
||||
# move to the first day of the month that matches the schedule's
|
||||
# weekday. Check on SO#3284452 for the formula
|
||||
date_wday, month = date.weekday(), date.month
|
||||
date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) -
|
||||
date_wday + sched_wday)
|
||||
date += tz.timedelta(
|
||||
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
|
||||
)
|
||||
|
||||
if freq == Schedule.Frequency.one_on_two:
|
||||
# - adjust date with modulo 14 (= 2 weeks in days)
|
||||
# - there are max 3 "weeks on two" per month
|
||||
if (date - self.date).days % 14:
|
||||
date += tz.timedelta(days=7)
|
||||
dates = (date + tz.timedelta(days=14*i) for i in range(0, 3))
|
||||
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
|
||||
else:
|
||||
dates = (date + tz.timedelta(days=7*week) for week in range(0, 5)
|
||||
if freq & (0b1 << week))
|
||||
dates = (
|
||||
date + tz.timedelta(days=7 * week)
|
||||
for week in range(0, 5)
|
||||
if freq & (0b1 << week)
|
||||
)
|
||||
|
||||
return [self.normalize(date) for date in dates if date.month == month]
|
||||
|
||||
|
||||
def _exclude_existing_date(self, dates):
|
||||
from .episode import Diffusion
|
||||
saved = set(Diffusion.objects.filter(start__in=dates)
|
||||
.values_list('start', flat=True))
|
||||
|
||||
saved = set(
|
||||
Diffusion.objects.filter(start__in=dates).values_list(
|
||||
"start", flat=True
|
||||
)
|
||||
)
|
||||
return [date for date in dates if date not in saved]
|
||||
|
||||
|
||||
def diffusions_of_month(self, date):
|
||||
"""
|
||||
Get episodes and diffusions for month of provided date, including
|
||||
"""Get episodes and diffusions for month of provided date, including
|
||||
reruns.
|
||||
|
||||
:returns: tuple([Episode], [Diffusion])
|
||||
"""
|
||||
from .episode import Diffusion, Episode
|
||||
if self.initial is not None or \
|
||||
self.frequency == Schedule.Frequency.ponctual:
|
||||
|
||||
if (
|
||||
self.initial is not None
|
||||
or self.frequency == Schedule.Frequency.ponctual
|
||||
):
|
||||
return [], []
|
||||
|
||||
# dates for self and reruns as (date, initial)
|
||||
reruns = [(rerun, rerun.date - self.date)
|
||||
for rerun in self.rerun_set.all()]
|
||||
reruns = [
|
||||
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
|
||||
]
|
||||
|
||||
dates = OrderedDict((date, None) for date in self.dates_of_month(date))
|
||||
dates.update([(rerun.normalize(date.date() + delta), date)
|
||||
for date in dates.keys() for rerun, delta in reruns])
|
||||
dates.update(
|
||||
[
|
||||
(rerun.normalize(date.date() + delta), date)
|
||||
for date in dates.keys()
|
||||
for rerun, delta in reruns
|
||||
]
|
||||
)
|
||||
|
||||
# remove dates corresponding to existing diffusions
|
||||
saved = set(Diffusion.objects.filter(start__in=dates.keys(),
|
||||
program=self.program,
|
||||
schedule=self)
|
||||
.values_list('start', flat=True))
|
||||
saved = set(
|
||||
Diffusion.objects.filter(
|
||||
start__in=dates.keys(), program=self.program, schedule=self
|
||||
).values_list("start", flat=True)
|
||||
)
|
||||
|
||||
# make diffs
|
||||
duration = utils.to_timedelta(self.duration)
|
||||
|
@ -430,8 +497,12 @@ class Schedule(BaseRerun):
|
|||
initial = diffusions[initial]
|
||||
|
||||
diffusions[date] = Diffusion(
|
||||
episode=episode, schedule=self, type=Diffusion.TYPE_ON_AIR,
|
||||
initial=initial, start=date, end=date+duration
|
||||
episode=episode,
|
||||
schedule=self,
|
||||
type=Diffusion.TYPE_ON_AIR,
|
||||
initial=initial,
|
||||
start=date,
|
||||
end=date + duration,
|
||||
)
|
||||
return episodes.values(), diffusions.values()
|
||||
|
||||
|
@ -440,36 +511,38 @@ class Schedule(BaseRerun):
|
|||
|
||||
# TODO/FIXME: use validators?
|
||||
if self.initial is not None and self.date > self.date:
|
||||
raise ValueError('initial must be later')
|
||||
raise ValueError("initial must be later")
|
||||
|
||||
|
||||
class Stream(models.Model):
|
||||
"""
|
||||
When there are no program scheduled, it is possible to play sounds
|
||||
in order to avoid blanks. A Stream is a Program that plays this role,
|
||||
and whose linked to a Stream.
|
||||
"""When there are no program scheduled, it is possible to play sounds in
|
||||
order to avoid blanks. A Stream is a Program that plays this role, and
|
||||
whose linked to a Stream.
|
||||
|
||||
All sounds that are marked as good and that are under the related
|
||||
program's archive dir are elligible for the sound's selection.
|
||||
"""
|
||||
|
||||
program = models.ForeignKey(
|
||||
Program, models.CASCADE,
|
||||
verbose_name=_('related program'),
|
||||
Program,
|
||||
models.CASCADE,
|
||||
verbose_name=_("related program"),
|
||||
)
|
||||
delay = models.TimeField(
|
||||
_('delay'), blank=True, null=True,
|
||||
help_text=_('minimal delay between two sound plays')
|
||||
_("delay"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("minimal delay between two sound plays"),
|
||||
)
|
||||
begin = models.TimeField(
|
||||
_('begin'), blank=True, null=True,
|
||||
help_text=_('used to define a time range this stream is '
|
||||
'played')
|
||||
_("begin"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("used to define a time range this stream is " "played"),
|
||||
)
|
||||
end = models.TimeField(
|
||||
_('end'),
|
||||
blank=True, null=True,
|
||||
help_text=_('used to define a time range this stream is '
|
||||
'played')
|
||||
_("end"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("used to define a time range this stream is " "played"),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
import pytz
|
||||
|
||||
from django.contrib.auth.models import User, Group, Permission
|
||||
from django.contrib.auth.models import Group, Permission, User
|
||||
from django.db import transaction
|
||||
from django.db.models import signals
|
||||
from django.dispatch import receiver
|
||||
|
@ -18,9 +16,7 @@ from . import Diffusion, Episode, Page, Program, Schedule
|
|||
#
|
||||
@receiver(signals.post_save, sender=User)
|
||||
def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||
"""
|
||||
Set users to different default groups
|
||||
"""
|
||||
"""Set users to different default groups."""
|
||||
if not created or instance.is_superuser:
|
||||
return
|
||||
|
||||
|
@ -32,7 +28,8 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
|||
if created and permissions:
|
||||
for codename in permissions:
|
||||
permission = Permission.objects.filter(
|
||||
codename=codename).first()
|
||||
codename=codename
|
||||
).first()
|
||||
if permission:
|
||||
group.permissions.add(permission)
|
||||
group.save()
|
||||
|
@ -42,43 +39,40 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
|||
@receiver(signals.post_save, sender=Page)
|
||||
def page_post_save(sender, instance, created, *args, **kwargs):
|
||||
if not created and instance.cover:
|
||||
Page.objects.filter(parent=instance, cover__isnull=True) \
|
||||
.update(cover=instance.cover)
|
||||
Page.objects.filter(parent=instance, cover__isnull=True).update(
|
||||
cover=instance.cover
|
||||
)
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender=Program)
|
||||
def program_post_save(sender, instance, created, *args, **kwargs):
|
||||
"""
|
||||
Clean-up later diffusions when a program becomes inactive
|
||||
"""
|
||||
"""Clean-up later diffusions when a program becomes inactive."""
|
||||
if not instance.active:
|
||||
Diffusion.object.program(instance).after(tz.now()).delete()
|
||||
Episode.object.parent(instance).filter(diffusion__isnull=True) \
|
||||
.delete()
|
||||
Episode.object.parent(instance).filter(diffusion__isnull=True).delete()
|
||||
|
||||
cover = getattr(instance, '__initial_cover', None)
|
||||
cover = getattr(instance, "__initial_cover", None)
|
||||
if cover is None and instance.cover is not None:
|
||||
Episode.objects.parent(instance) \
|
||||
.filter(cover__isnull=True) \
|
||||
.update(cover=instance.cover)
|
||||
|
||||
Episode.objects.parent(instance).filter(cover__isnull=True).update(
|
||||
cover=instance.cover
|
||||
)
|
||||
|
||||
|
||||
@receiver(signals.pre_save, sender=Schedule)
|
||||
def schedule_pre_save(sender, instance, *args, **kwargs):
|
||||
if getattr(instance, 'pk') is not None:
|
||||
if getattr(instance, "pk") is not None:
|
||||
instance._initial = Schedule.objects.get(pk=instance.pk)
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender=Schedule)
|
||||
def schedule_post_save(sender, instance, created, *args, **kwargs):
|
||||
"""
|
||||
Handles Schedule's time, duration and timezone changes and update
|
||||
corresponding diffusions accordingly.
|
||||
"""
|
||||
initial = getattr(instance, '_initial', None)
|
||||
if not initial or ((instance.time, instance.duration, instance.timezone) ==
|
||||
(initial.time, initial.duration, initial.timezone)):
|
||||
"""Handles Schedule's time, duration and timezone changes and update
|
||||
corresponding diffusions accordingly."""
|
||||
initial = getattr(instance, "_initial", None)
|
||||
if not initial or (
|
||||
(instance.time, instance.duration, instance.timezone)
|
||||
== (initial.time, initial.duration, initial.timezone)
|
||||
):
|
||||
return
|
||||
|
||||
today = tz.datetime.today()
|
||||
|
@ -94,14 +88,15 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
|||
|
||||
@receiver(signals.pre_delete, sender=Schedule)
|
||||
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
||||
""" Delete later corresponding diffusion to a changed schedule. """
|
||||
"""Delete later corresponding diffusion to a changed schedule."""
|
||||
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True,
|
||||
sound__isnull=True).delete()
|
||||
Episode.objects.filter(
|
||||
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||
).delete()
|
||||
|
||||
|
||||
@receiver(signals.post_delete, sender=Diffusion)
|
||||
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True,
|
||||
sound__isnull=True).delete()
|
||||
|
||||
|
||||
Episode.objects.filter(
|
||||
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||
).delete()
|
||||
|
|
|
@ -6,18 +6,17 @@ from django.db import models
|
|||
from django.db.models import Q
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from taggit.managers import TaggableManager
|
||||
|
||||
from aircox import settings
|
||||
from .program import Program
|
||||
|
||||
from .episode import Episode
|
||||
from .program import Program
|
||||
|
||||
logger = logging.getLogger("aircox")
|
||||
|
||||
|
||||
logger = logging.getLogger('aircox')
|
||||
|
||||
|
||||
__all__ = ('Sound', 'SoundQuerySet', 'Track')
|
||||
__all__ = ("Sound", "SoundQuerySet", "Track")
|
||||
|
||||
|
||||
class SoundQuerySet(models.QuerySet):
|
||||
|
@ -37,122 +36,150 @@ class SoundQuerySet(models.QuerySet):
|
|||
return self.exclude(type=Sound.TYPE_REMOVED)
|
||||
|
||||
def public(self):
|
||||
""" Return sounds available as podcasts """
|
||||
"""Return sounds available as podcasts."""
|
||||
return self.filter(is_public=True)
|
||||
|
||||
def downloadable(self):
|
||||
""" Return sounds available as podcasts """
|
||||
"""Return sounds available as podcasts."""
|
||||
return self.filter(is_downloadable=True)
|
||||
|
||||
def archive(self):
|
||||
""" Return sounds that are archives """
|
||||
"""Return sounds that are archives."""
|
||||
return self.filter(type=Sound.TYPE_ARCHIVE)
|
||||
|
||||
def path(self, paths):
|
||||
if isinstance(paths, str):
|
||||
return self.filter(file=paths.replace(conf.MEDIA_ROOT + '/', ''))
|
||||
return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + '/', '')
|
||||
for p in paths))
|
||||
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
|
||||
return self.filter(
|
||||
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
|
||||
)
|
||||
|
||||
def playlist(self, archive=True, order_by=True):
|
||||
"""
|
||||
Return files absolute paths as a flat list (exclude sound without path).
|
||||
"""Return files absolute paths as a flat list (exclude sound without
|
||||
path).
|
||||
|
||||
If `order_by` is True, order by path.
|
||||
"""
|
||||
if archive:
|
||||
self = self.archive()
|
||||
if order_by:
|
||||
self = self.order_by('file')
|
||||
return [os.path.join(conf.MEDIA_ROOT, file) for file in self.filter(file__isnull=False) \
|
||||
.values_list('file', flat=True)]
|
||||
self = self.order_by("file")
|
||||
return [
|
||||
os.path.join(conf.MEDIA_ROOT, file)
|
||||
for file in self.filter(file__isnull=False).values_list(
|
||||
"file", flat=True
|
||||
)
|
||||
]
|
||||
|
||||
def search(self, query):
|
||||
return self.filter(
|
||||
Q(name__icontains=query) | Q(file__icontains=query) |
|
||||
Q(program__title__icontains=query) |
|
||||
Q(episode__title__icontains=query)
|
||||
Q(name__icontains=query)
|
||||
| Q(file__icontains=query)
|
||||
| Q(program__title__icontains=query)
|
||||
| Q(episode__title__icontains=query)
|
||||
)
|
||||
|
||||
|
||||
# TODO:
|
||||
# - provide a default name based on program and episode
|
||||
class Sound(models.Model):
|
||||
"""
|
||||
A Sound is the representation of a sound file that can be either an excerpt
|
||||
or a complete archive of the related diffusion.
|
||||
"""
|
||||
"""A Sound is the representation of a sound file that can be either an
|
||||
excerpt or a complete archive of the related diffusion."""
|
||||
|
||||
TYPE_OTHER = 0x00
|
||||
TYPE_ARCHIVE = 0x01
|
||||
TYPE_EXCERPT = 0x02
|
||||
TYPE_REMOVED = 0x03
|
||||
TYPE_CHOICES = (
|
||||
(TYPE_OTHER, _('other')), (TYPE_ARCHIVE, _('archive')),
|
||||
(TYPE_EXCERPT, _('excerpt')), (TYPE_REMOVED, _('removed'))
|
||||
(TYPE_OTHER, _("other")),
|
||||
(TYPE_ARCHIVE, _("archive")),
|
||||
(TYPE_EXCERPT, _("excerpt")),
|
||||
(TYPE_REMOVED, _("removed")),
|
||||
)
|
||||
|
||||
name = models.CharField(_('name'), max_length=64)
|
||||
name = models.CharField(_("name"), max_length=64)
|
||||
program = models.ForeignKey(
|
||||
Program, models.CASCADE, blank=True, # NOT NULL
|
||||
verbose_name=_('program'),
|
||||
help_text=_('program related to it'),
|
||||
Program,
|
||||
models.CASCADE,
|
||||
blank=True, # NOT NULL
|
||||
verbose_name=_("program"),
|
||||
help_text=_("program related to it"),
|
||||
db_index=True,
|
||||
)
|
||||
episode = models.ForeignKey(
|
||||
Episode, models.SET_NULL, blank=True, null=True,
|
||||
verbose_name=_('episode'),
|
||||
Episode,
|
||||
models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("episode"),
|
||||
db_index=True,
|
||||
)
|
||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
||||
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||
position = models.PositiveSmallIntegerField(
|
||||
_('order'), default=0, help_text=_('position in the playlist'),
|
||||
_("order"),
|
||||
default=0,
|
||||
help_text=_("position in the playlist"),
|
||||
)
|
||||
|
||||
def _upload_to(self, filename):
|
||||
subdir = settings.AIRCOX_SOUND_ARCHIVES_SUBDIR \
|
||||
if self.type == self.TYPE_ARCHIVE else \
|
||||
settings.AIRCOX_SOUND_EXCERPTS_SUBDIR
|
||||
subdir = (
|
||||
settings.AIRCOX_SOUND_ARCHIVES_SUBDIR
|
||||
if self.type == self.TYPE_ARCHIVE
|
||||
else settings.AIRCOX_SOUND_EXCERPTS_SUBDIR
|
||||
)
|
||||
return os.path.join(self.program.path, subdir, filename)
|
||||
|
||||
file = models.FileField(
|
||||
_('file'), upload_to=_upload_to, max_length=256,
|
||||
db_index=True, unique=True,
|
||||
_("file"),
|
||||
upload_to=_upload_to,
|
||||
max_length=256,
|
||||
db_index=True,
|
||||
unique=True,
|
||||
)
|
||||
duration = models.TimeField(
|
||||
_('duration'),
|
||||
blank=True, null=True,
|
||||
help_text=_('duration of the sound'),
|
||||
_("duration"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("duration of the sound"),
|
||||
)
|
||||
mtime = models.DateTimeField(
|
||||
_('modification time'),
|
||||
blank=True, null=True,
|
||||
help_text=_('last modification date and time'),
|
||||
_("modification time"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("last modification date and time"),
|
||||
)
|
||||
is_good_quality = models.BooleanField(
|
||||
_('good quality'), help_text=_('sound meets quality requirements'),
|
||||
blank=True, null=True
|
||||
_("good quality"),
|
||||
help_text=_("sound meets quality requirements"),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
is_public = models.BooleanField(
|
||||
_('public'), help_text=_('whether it is publicly available as podcast'),
|
||||
_("public"),
|
||||
help_text=_("whether it is publicly available as podcast"),
|
||||
default=False,
|
||||
)
|
||||
is_downloadable = models.BooleanField(
|
||||
_('downloadable'),
|
||||
help_text=_('whether it can be publicly downloaded by visitors (sound must be public)'),
|
||||
_("downloadable"),
|
||||
help_text=_(
|
||||
"whether it can be publicly downloaded by visitors (sound must be "
|
||||
"public)"
|
||||
),
|
||||
default=False,
|
||||
)
|
||||
|
||||
objects = SoundQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Sound')
|
||||
verbose_name_plural = _('Sounds')
|
||||
verbose_name = _("Sound")
|
||||
verbose_name_plural = _("Sounds")
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.file and self.file.url
|
||||
|
||||
def __str__(self):
|
||||
return '/'.join(self.file.path.split('/')[-3:])
|
||||
return "/".join(self.file.path.split("/")[-3:])
|
||||
|
||||
def save(self, check=True, *args, **kwargs):
|
||||
if self.episode is not None and self.program is None:
|
||||
|
@ -166,29 +193,28 @@ class Sound(models.Model):
|
|||
|
||||
# TODO: rename get_file_mtime(self)
|
||||
def get_mtime(self):
|
||||
"""
|
||||
Get the last modification date from file
|
||||
"""
|
||||
"""Get the last modification date from file."""
|
||||
mtime = os.stat(self.file.path).st_mtime
|
||||
mtime = tz.datetime.fromtimestamp(mtime)
|
||||
mtime = mtime.replace(microsecond=0)
|
||||
return tz.make_aware(mtime, tz.get_current_timezone())
|
||||
|
||||
def file_exists(self):
|
||||
""" Return true if the file still exists. """
|
||||
"""Return true if the file still exists."""
|
||||
|
||||
return os.path.exists(self.file.path)
|
||||
|
||||
# TODO: rename to sync_fs()
|
||||
def check_on_file(self):
|
||||
"""
|
||||
Check sound file info again'st self, and update informations if
|
||||
needed (do not save). Return True if there was changes.
|
||||
"""Check sound file info again'st self, and update informations if
|
||||
needed (do not save).
|
||||
|
||||
Return True if there was changes.
|
||||
"""
|
||||
if not self.file_exists():
|
||||
if self.type == self.TYPE_REMOVED:
|
||||
return
|
||||
logger.debug('sound %s: has been removed', self.file.name)
|
||||
logger.debug("sound %s: has been removed", self.file.name)
|
||||
self.type = self.TYPE_REMOVED
|
||||
return True
|
||||
|
||||
|
@ -197,9 +223,11 @@ class Sound(models.Model):
|
|||
|
||||
if self.type == self.TYPE_REMOVED and self.program:
|
||||
changed = True
|
||||
self.type = self.TYPE_ARCHIVE \
|
||||
if self.file.name.startswith(self.program.archives_path) else \
|
||||
self.TYPE_EXCERPT
|
||||
self.type = (
|
||||
self.TYPE_ARCHIVE
|
||||
if self.file.name.startswith(self.program.archives_path)
|
||||
else self.TYPE_EXCERPT
|
||||
)
|
||||
|
||||
# check mtime -> reset quality if changed (assume file changed)
|
||||
mtime = self.get_mtime()
|
||||
|
@ -207,8 +235,10 @@ class Sound(models.Model):
|
|||
if self.mtime != mtime:
|
||||
self.mtime = mtime
|
||||
self.is_good_quality = None
|
||||
logger.debug('sound %s: m_time has changed. Reset quality info',
|
||||
self.file.name)
|
||||
logger.debug(
|
||||
"sound %s: m_time has changed. Reset quality info",
|
||||
self.file.name,
|
||||
)
|
||||
return True
|
||||
|
||||
return changed
|
||||
|
@ -218,7 +248,7 @@ class Sound(models.Model):
|
|||
# FIXME: later, remove date?
|
||||
name = os.path.basename(self.file.name)
|
||||
name = os.path.splitext(name)[0]
|
||||
self.name = name.replace('_', ' ').strip()
|
||||
self.name = name.replace("_", " ").strip()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -226,53 +256,67 @@ class Sound(models.Model):
|
|||
|
||||
|
||||
class Track(models.Model):
|
||||
"""Track of a playlist of an object.
|
||||
|
||||
The position can either be expressed as the position in the playlist
|
||||
or as the moment in seconds it started.
|
||||
"""
|
||||
Track of a playlist of an object. The position can either be expressed
|
||||
as the position in the playlist or as the moment in seconds it started.
|
||||
"""
|
||||
|
||||
episode = models.ForeignKey(
|
||||
Episode, models.CASCADE, blank=True, null=True,
|
||||
verbose_name=_('episode'),
|
||||
Episode,
|
||||
models.CASCADE,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("episode"),
|
||||
)
|
||||
sound = models.ForeignKey(
|
||||
Sound, models.CASCADE, blank=True, null=True,
|
||||
verbose_name=_('sound'),
|
||||
Sound,
|
||||
models.CASCADE,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("sound"),
|
||||
)
|
||||
position = models.PositiveSmallIntegerField(
|
||||
_('order'), default=0, help_text=_('position in the playlist'),
|
||||
_("order"),
|
||||
default=0,
|
||||
help_text=_("position in the playlist"),
|
||||
)
|
||||
timestamp = models.PositiveSmallIntegerField(
|
||||
_('timestamp'),
|
||||
blank=True, null=True,
|
||||
help_text=_('position (in seconds)')
|
||||
_("timestamp"),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("position (in seconds)"),
|
||||
)
|
||||
title = models.CharField(_('title'), max_length=128)
|
||||
artist = models.CharField(_('artist'), max_length=128)
|
||||
album = models.CharField(_('album'), max_length=128, null=True, blank=True)
|
||||
tags = TaggableManager(verbose_name=_('tags'), blank=True)
|
||||
year = models.IntegerField(_('year'), blank=True, null=True)
|
||||
title = models.CharField(_("title"), max_length=128)
|
||||
artist = models.CharField(_("artist"), max_length=128)
|
||||
album = models.CharField(_("album"), max_length=128, null=True, blank=True)
|
||||
tags = TaggableManager(verbose_name=_("tags"), blank=True)
|
||||
year = models.IntegerField(_("year"), blank=True, null=True)
|
||||
# FIXME: remove?
|
||||
info = models.CharField(
|
||||
_('information'),
|
||||
_("information"),
|
||||
max_length=128,
|
||||
blank=True, null=True,
|
||||
help_text=_('additional informations about this track, such as '
|
||||
'the version, if is it a remix, features, etc.'),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_(
|
||||
"additional informations about this track, such as "
|
||||
"the version, if is it a remix, features, etc."
|
||||
),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Track')
|
||||
verbose_name_plural = _('Tracks')
|
||||
ordering = ('position',)
|
||||
verbose_name = _("Track")
|
||||
verbose_name_plural = _("Tracks")
|
||||
ordering = ("position",)
|
||||
|
||||
def __str__(self):
|
||||
return '{self.artist} -- {self.title} -- {self.position}'.format(
|
||||
self=self)
|
||||
return "{self.artist} -- {self.title} -- {self.position}".format(
|
||||
self=self
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if (self.sound is None and self.episode is None) or \
|
||||
(self.sound is not None and self.episode is not None):
|
||||
raise ValueError('sound XOR episode is required')
|
||||
if (self.sound is None and self.episode is None) or (
|
||||
self.sound is not None and self.episode is not None
|
||||
):
|
||||
raise ValueError("sound XOR episode is required")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -1,25 +1,20 @@
|
|||
import os
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from filer.fields.image import FilerImageField
|
||||
|
||||
from .. import settings
|
||||
|
||||
|
||||
__all__ = ('Station', 'StationQuerySet', 'Port')
|
||||
__all__ = ("Station", "StationQuerySet", "Port")
|
||||
|
||||
|
||||
class StationQuerySet(models.QuerySet):
|
||||
def default(self, station=None):
|
||||
"""
|
||||
Return station model instance, using defaults or
|
||||
given one.
|
||||
"""
|
||||
"""Return station model instance, using defaults or given one."""
|
||||
if station is None:
|
||||
return self.order_by('-default', 'pk').first()
|
||||
return self.order_by("-default", "pk").first()
|
||||
return self.filter(pk=station).first()
|
||||
|
||||
def active(self):
|
||||
|
@ -27,66 +22,79 @@ class StationQuerySet(models.QuerySet):
|
|||
|
||||
|
||||
class Station(models.Model):
|
||||
"""
|
||||
Represents a radio station, to which multiple programs are attached
|
||||
and that is used as the top object for everything.
|
||||
"""Represents a radio station, to which multiple programs are attached and
|
||||
that is used as the top object for everything.
|
||||
|
||||
A Station holds controllers for the audio stream generation too.
|
||||
Theses are set up when needed (at the first access to these elements)
|
||||
then cached.
|
||||
Theses are set up when needed (at the first access to these
|
||||
elements) then cached.
|
||||
"""
|
||||
name = models.CharField(_('name'), max_length=64)
|
||||
slug = models.SlugField(_('slug'), max_length=64, unique=True)
|
||||
|
||||
name = models.CharField(_("name"), max_length=64)
|
||||
slug = models.SlugField(_("slug"), max_length=64, unique=True)
|
||||
# FIXME: remove - should be decided only by Streamer controller + settings
|
||||
path = models.CharField(
|
||||
_('path'),
|
||||
help_text=_('path to the working directory'),
|
||||
_("path"),
|
||||
help_text=_("path to the working directory"),
|
||||
max_length=256,
|
||||
blank=True,
|
||||
)
|
||||
default = models.BooleanField(
|
||||
_('default station'),
|
||||
_("default station"),
|
||||
default=False,
|
||||
help_text=_('use this station as the main one.')
|
||||
help_text=_("use this station as the main one."),
|
||||
)
|
||||
active = models.BooleanField(
|
||||
_('active'),
|
||||
_("active"),
|
||||
default=True,
|
||||
help_text=_('whether this station is still active or not.')
|
||||
help_text=_("whether this station is still active or not."),
|
||||
)
|
||||
logo = FilerImageField(
|
||||
on_delete=models.SET_NULL, null=True, blank=True,
|
||||
verbose_name=_('Logo'),
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
verbose_name=_("Logo"),
|
||||
)
|
||||
hosts = models.TextField(
|
||||
_("website's urls"), max_length=512, null=True, blank=True,
|
||||
help_text=_('specify one url per line')
|
||||
_("website's urls"),
|
||||
max_length=512,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("specify one url per line"),
|
||||
)
|
||||
audio_streams = models.TextField(
|
||||
_("audio streams"), max_length=2048, null=True, blank=True,
|
||||
help_text=_("Audio streams urls used by station's player. One url "
|
||||
"a line.")
|
||||
_("audio streams"),
|
||||
max_length=2048,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_(
|
||||
"Audio streams urls used by station's player. One url " "a line."
|
||||
),
|
||||
)
|
||||
default_cover = FilerImageField(
|
||||
on_delete=models.SET_NULL,
|
||||
verbose_name=_('Default pages\' cover'), null=True, blank=True,
|
||||
related_name='+',
|
||||
verbose_name=_("Default pages' cover"),
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="+",
|
||||
)
|
||||
|
||||
objects = StationQuerySet.as_manager()
|
||||
|
||||
@cached_property
|
||||
def streams(self):
|
||||
""" Audio streams as list of urls. """
|
||||
return self.audio_streams.split('\n') if self.audio_streams else []
|
||||
"""Audio streams as list of urls."""
|
||||
return self.audio_streams.split("\n") if self.audio_streams else []
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, make_sources=True, *args, **kwargs):
|
||||
if not self.path:
|
||||
self.path = os.path.join(settings.AIRCOX_CONTROLLERS_WORKING_DIR,
|
||||
self.slug.replace('-', '_'))
|
||||
self.path = os.path.join(
|
||||
settings.AIRCOX_CONTROLLERS_WORKING_DIR,
|
||||
self.slug.replace("-", "_"),
|
||||
)
|
||||
|
||||
if self.default:
|
||||
qs = Station.objects.filter(default=True)
|
||||
|
@ -99,22 +107,20 @@ class Station(models.Model):
|
|||
|
||||
class PortQuerySet(models.QuerySet):
|
||||
def active(self, value=True):
|
||||
""" Active ports """
|
||||
"""Active ports."""
|
||||
return self.filter(active=value)
|
||||
|
||||
def output(self):
|
||||
""" Filter in output ports """
|
||||
"""Filter in output ports."""
|
||||
return self.filter(direction=Port.DIRECTION_OUTPUT)
|
||||
|
||||
def input(self):
|
||||
""" Fitler in input ports """
|
||||
"""Fitler in input ports."""
|
||||
return self.filter(direction=Port.DIRECTION_INPUT)
|
||||
|
||||
|
||||
class Port(models.Model):
|
||||
"""
|
||||
Represent an audio input/output for the audio stream
|
||||
generation.
|
||||
"""Represent an audio input/output for the audio stream generation.
|
||||
|
||||
You might want to take a look to LiquidSoap's documentation
|
||||
for the options available for each kind of input/output.
|
||||
|
@ -122,10 +128,13 @@ class Port(models.Model):
|
|||
Some port types may be not available depending on the
|
||||
direction of the port.
|
||||
"""
|
||||
|
||||
DIRECTION_INPUT = 0x00
|
||||
DIRECTION_OUTPUT = 0x01
|
||||
DIRECTION_CHOICES = ((DIRECTION_INPUT, _('input')),
|
||||
(DIRECTION_OUTPUT, _('output')))
|
||||
DIRECTION_CHOICES = (
|
||||
(DIRECTION_INPUT, _("input")),
|
||||
(DIRECTION_OUTPUT, _("output")),
|
||||
)
|
||||
|
||||
TYPE_JACK = 0x00
|
||||
TYPE_ALSA = 0x01
|
||||
|
@ -135,27 +144,34 @@ class Port(models.Model):
|
|||
TYPE_HTTPS = 0x05
|
||||
TYPE_FILE = 0x06
|
||||
TYPE_CHOICES = (
|
||||
(TYPE_JACK, 'jack'), (TYPE_ALSA, 'alsa'),
|
||||
(TYPE_PULSEAUDIO, 'pulseaudio'), (TYPE_ICECAST, 'icecast'),
|
||||
(TYPE_HTTP, 'http'), (TYPE_HTTPS, 'https'),
|
||||
(TYPE_FILE, _('file'))
|
||||
(TYPE_JACK, "jack"),
|
||||
(TYPE_ALSA, "alsa"),
|
||||
(TYPE_PULSEAUDIO, "pulseaudio"),
|
||||
(TYPE_ICECAST, "icecast"),
|
||||
(TYPE_HTTP, "http"),
|
||||
(TYPE_HTTPS, "https"),
|
||||
(TYPE_FILE, _("file")),
|
||||
)
|
||||
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_('station'))
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
direction = models.SmallIntegerField(
|
||||
_('direction'), choices=DIRECTION_CHOICES)
|
||||
type = models.SmallIntegerField(_('type'), choices=TYPE_CHOICES)
|
||||
_("direction"), choices=DIRECTION_CHOICES
|
||||
)
|
||||
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||
active = models.BooleanField(
|
||||
_('active'), default=True,
|
||||
help_text=_('this port is active')
|
||||
_("active"), default=True, help_text=_("this port is active")
|
||||
)
|
||||
settings = models.TextField(
|
||||
_('port settings'),
|
||||
help_text=_('list of comma separated params available; '
|
||||
'this is put in the output config file as raw code; '
|
||||
'plugin related'),
|
||||
blank=True, null=True
|
||||
_("port settings"),
|
||||
help_text=_(
|
||||
"list of comma separated params available; "
|
||||
"this is put in the output config file as raw code; "
|
||||
"plugin related"
|
||||
),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
objects = PortQuerySet.as_manager()
|
||||
|
@ -163,22 +179,17 @@ class Port(models.Model):
|
|||
def __str__(self):
|
||||
return "{direction}: {type} #{id}".format(
|
||||
direction=self.get_direction_display(),
|
||||
type=self.get_type_display(), id=self.pk or ''
|
||||
type=self.get_type_display(),
|
||||
id=self.pk or "",
|
||||
)
|
||||
|
||||
def is_valid_type(self):
|
||||
"""
|
||||
Return True if the type is available for the given direction.
|
||||
"""
|
||||
"""Return True if the type is available for the given direction."""
|
||||
|
||||
if self.direction == self.DIRECTION_INPUT:
|
||||
return self.type not in (
|
||||
self.TYPE_ICECAST, self.TYPE_FILE
|
||||
)
|
||||
return self.type not in (self.TYPE_ICECAST, self.TYPE_FILE)
|
||||
|
||||
return self.type not in (
|
||||
self.TYPE_HTTP, self.TYPE_HTTPS
|
||||
)
|
||||
return self.type not in (self.TYPE_HTTP, self.TYPE_HTTPS)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.is_valid_type():
|
||||
|
@ -187,4 +198,3 @@ class Port(models.Model):
|
|||
)
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
|
|
|
@ -1,16 +1,20 @@
|
|||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
__all__ = ("UserSettings",)
|
||||
|
||||
|
||||
class UserSettings(models.Model):
|
||||
"""
|
||||
Store user's settings.
|
||||
"""
|
||||
"""Store user's settings."""
|
||||
|
||||
user = models.OneToOneField(
|
||||
User, models.CASCADE, verbose_name=_('User'),
|
||||
related_name='aircox_settings')
|
||||
playlist_editor_columns = models.JSONField(
|
||||
_('Playlist Editor Columns'))
|
||||
User,
|
||||
models.CASCADE,
|
||||
verbose_name=_("User"),
|
||||
related_name="aircox_settings",
|
||||
)
|
||||
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
|
||||
playlist_editor_sep = models.CharField(
|
||||
_('Playlist Editor Separator'), max_length=16)
|
||||
_("Playlist Editor Separator"), max_length=16
|
||||
)
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
from .log import *
|
||||
from .sound import *
|
||||
from .admin import *
|
||||
from .admin import TrackSerializer, UserSettingsSerializer
|
||||
from .log import LogInfo, LogInfoSerializer
|
||||
from .sound import PodcastSerializer, SoundSerializer
|
||||
|
||||
__all__ = (
|
||||
"TrackSerializer",
|
||||
"UserSettingsSerializer",
|
||||
"LogInfo",
|
||||
"LogInfoSerializer",
|
||||
"SoundSerializer",
|
||||
"PodcastSerializer",
|
||||
)
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
from rest_framework import serializers
|
||||
from taggit.serializers import TagListSerializerField, TaggitSerializer
|
||||
from taggit.serializers import TaggitSerializer, TagListSerializerField
|
||||
|
||||
from ..models import Track, UserSettings
|
||||
|
||||
|
||||
__all__ = ('TrackSerializer', 'UserSettingsSerializer')
|
||||
__all__ = ("TrackSerializer", "UserSettingsSerializer")
|
||||
|
||||
|
||||
class TrackSerializer(TaggitSerializer, serializers.ModelSerializer):
|
||||
|
@ -12,19 +11,29 @@ class TrackSerializer(TaggitSerializer, serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Track
|
||||
fields = ('pk', 'artist', 'title', 'album', 'year', 'position',
|
||||
'info', 'tags', 'episode', 'sound', 'timestamp')
|
||||
fields = (
|
||||
"pk",
|
||||
"artist",
|
||||
"title",
|
||||
"album",
|
||||
"year",
|
||||
"position",
|
||||
"info",
|
||||
"tags",
|
||||
"episode",
|
||||
"sound",
|
||||
"timestamp",
|
||||
)
|
||||
|
||||
|
||||
class UserSettingsSerializer(serializers.ModelSerializer):
|
||||
# TODO: validate fields values (playlist_editor_columns at least)
|
||||
class Meta:
|
||||
model = UserSettings
|
||||
fields = ('playlist_editor_columns', 'playlist_editor_sep')
|
||||
fields = ("playlist_editor_columns", "playlist_editor_sep")
|
||||
|
||||
def create(self, validated_data):
|
||||
user = self.context.get('user')
|
||||
user = self.context.get("user")
|
||||
if user:
|
||||
validated_data['user_id'] = user.id
|
||||
validated_data["user_id"] = user.id
|
||||
return super().create(validated_data)
|
||||
|
||||
|
|
|
@ -2,14 +2,13 @@ from rest_framework import serializers
|
|||
|
||||
from ..models import Diffusion, Log
|
||||
|
||||
|
||||
__all__ = ('LogInfo', 'LogInfoSerializer')
|
||||
__all__ = ("LogInfo", "LogInfoSerializer")
|
||||
|
||||
|
||||
class LogInfo:
|
||||
obj = None
|
||||
start, end = None, None
|
||||
title, artist = '', ''
|
||||
title, artist = "", ""
|
||||
url, cover = None, None
|
||||
info = None
|
||||
|
||||
|
@ -20,17 +19,17 @@ class LogInfo:
|
|||
elif isinstance(obj, Log):
|
||||
self.from_log(obj)
|
||||
else:
|
||||
raise ValueError('`obj` must be a Diffusion or a Track Log.')
|
||||
raise ValueError("`obj` must be a Diffusion or a Track Log.")
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return 'track' if isinstance(self.obj, Log) else 'diffusion'
|
||||
return "track" if isinstance(self.obj, Log) else "diffusion"
|
||||
|
||||
def from_diffusion(self, obj):
|
||||
episode = obj.episode
|
||||
self.start, self.end = obj.start, obj.end
|
||||
self.title, self.url = episode.title, episode.get_absolute_url()
|
||||
self.cover = episode.cover and episode.cover.icons['64']
|
||||
self.cover = episode.cover and episode.cover.icons["64"]
|
||||
self.info = episode.category and episode.category.title
|
||||
self.obj = obj
|
||||
|
||||
|
|
|
@ -2,14 +2,27 @@ from rest_framework import serializers
|
|||
|
||||
from ..models import Sound
|
||||
|
||||
__all__ = ("SoundSerializer", "PodcastSerializer")
|
||||
|
||||
|
||||
class SoundSerializer(serializers.ModelSerializer):
|
||||
file = serializers.FileField(use_url=False)
|
||||
|
||||
class Meta:
|
||||
model = Sound
|
||||
fields = ['pk', 'name', 'program', 'episode', 'type', 'file',
|
||||
'duration', 'mtime', 'is_good_quality', 'is_public', 'url']
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"program",
|
||||
"episode",
|
||||
"type",
|
||||
"file",
|
||||
"duration",
|
||||
"mtime",
|
||||
"is_good_quality",
|
||||
"is_public",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class PodcastSerializer(serializers.ModelSerializer):
|
||||
|
@ -17,5 +30,14 @@ class PodcastSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Sound
|
||||
fields = ['pk', 'name', 'program', 'episode', 'type',
|
||||
'duration', 'mtime', 'url', 'is_downloadable']
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"program",
|
||||
"episode",
|
||||
"type",
|
||||
"duration",
|
||||
"mtime",
|
||||
"url",
|
||||
"is_downloadable",
|
||||
]
|
||||
|
|
|
@ -2,61 +2,11 @@ import os
|
|||
|
||||
from django.conf import settings
|
||||
|
||||
# TODO:
|
||||
# - items() iteration
|
||||
# - sub-settings as values
|
||||
# - validate() settings
|
||||
# - Meta inner-class?
|
||||
# - custom settings class instead of default
|
||||
#class BaseSettings:
|
||||
# deprecated = set()
|
||||
#
|
||||
# def __init__(self, user_conf):
|
||||
# if user_conf:
|
||||
# for key, value in user_conf.items():
|
||||
# if not hasattr(self, key):
|
||||
# if key in self.deprecated:
|
||||
# raise ValueError('"{}" config is deprecated'.format(key))
|
||||
# else:
|
||||
# raise ValueError('"{}" is not a config value'.format(key))
|
||||
# setattr(self, key, value)
|
||||
#
|
||||
#
|
||||
#class Settings(BaseSettings):
|
||||
# default_user_groups = {
|
||||
#
|
||||
# }
|
||||
#
|
||||
# programs_dir = os.path.join(settings.MEDIA_ROOT, 'programs'),
|
||||
# """ Programs data directory. """
|
||||
# episode_title = '{program.title} - {date}'
|
||||
# """ Default episodes title. """
|
||||
# episode_title_date_format = '%-d %B %Y'
|
||||
# """ Date format used in episode title. """
|
||||
#
|
||||
# logs_archives_dir = os.path.join(settings.PROJECT_ROOT, 'logs/archives')
|
||||
# """ Directory where logs are saved once archived """
|
||||
# logs_archive_age = 30
|
||||
# """ Default age of log before being archived """
|
||||
#
|
||||
# sounds_default_dir = os.path.join(settings.MEDIA_ROOT, 'programs/defaults')
|
||||
# sound_archive_dir = 'archives'
|
||||
# sound_excerpt_dir = 'excerpts'
|
||||
# sound_quality = {
|
||||
# 'attribute': 'RMS lev dB',
|
||||
# 'range': (-18.0, -8.0),
|
||||
# 'sample_length': 120,
|
||||
# }
|
||||
# sound_ext = ('.ogg', '.flac', '.wav', '.mp3', '.opus')
|
||||
#
|
||||
# # TODO: move into aircox_streamer
|
||||
# streamer_working_dir = '/tmp/aircox'
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
def ensure(key, default):
|
||||
globals()[key] = getattr(settings, key, default)
|
||||
value = getattr(settings, key, default)
|
||||
globals()[key] = value
|
||||
return value
|
||||
|
||||
|
||||
########################################################################
|
||||
|
@ -64,85 +14,101 @@ def ensure(key, default):
|
|||
########################################################################
|
||||
# group to assign to users at their creation, along with the permissions
|
||||
# to add to each group.
|
||||
ensure('AIRCOX_DEFAULT_USER_GROUPS', {
|
||||
'radio hosts': (
|
||||
# TODO include content_type in order to avoid clash with potential
|
||||
# extra applications
|
||||
|
||||
# aircox
|
||||
'change_program', 'change_episode', 'change_diffusion',
|
||||
'add_comment', 'change_comment', 'delete_comment',
|
||||
'add_article', 'change_article', 'delete_article',
|
||||
'change_sound',
|
||||
'add_track', 'change_track', 'delete_track',
|
||||
|
||||
# taggit
|
||||
'add_tag', 'change_tag', 'delete_tag',
|
||||
|
||||
# filer
|
||||
'add_folder', 'change_folder', 'delete_folder', 'can_use_directory_listing',
|
||||
'add_image', 'change_image', 'delete_image',
|
||||
),
|
||||
})
|
||||
ensure(
|
||||
"AIRCOX_DEFAULT_USER_GROUPS",
|
||||
{
|
||||
"radio hosts": (
|
||||
# TODO include content_type in order to avoid clash with potential
|
||||
# extra applications
|
||||
# aircox
|
||||
"change_program",
|
||||
"change_episode",
|
||||
"change_diffusion",
|
||||
"add_comment",
|
||||
"change_comment",
|
||||
"delete_comment",
|
||||
"add_article",
|
||||
"change_article",
|
||||
"delete_article",
|
||||
"change_sound",
|
||||
"add_track",
|
||||
"change_track",
|
||||
"delete_track",
|
||||
# taggit
|
||||
"add_tag",
|
||||
"change_tag",
|
||||
"delete_tag",
|
||||
# filer
|
||||
"add_folder",
|
||||
"change_folder",
|
||||
"delete_folder",
|
||||
"can_use_directory_listing",
|
||||
"add_image",
|
||||
"change_image",
|
||||
"delete_image",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
# Directory for the programs data
|
||||
ensure('AIRCOX_PROGRAMS_DIR', 'programs')
|
||||
ensure('AIRCOX_PROGRAMS_DIR_ABS', os.path.join(settings.MEDIA_ROOT,
|
||||
AIRCOX_PROGRAMS_DIR))
|
||||
AIRCOX_PROGRAMS_DIR = ensure("AIRCOX_PROGRAMS_DIR", "programs")
|
||||
ensure(
|
||||
"AIRCOX_PROGRAMS_DIR_ABS",
|
||||
os.path.join(settings.MEDIA_ROOT, AIRCOX_PROGRAMS_DIR),
|
||||
)
|
||||
|
||||
|
||||
########################################################################
|
||||
# Programs & Episodes
|
||||
########################################################################
|
||||
# default title for episodes
|
||||
ensure('AIRCOX_EPISODE_TITLE', '{program.title} - {date}')
|
||||
ensure("AIRCOX_EPISODE_TITLE", "{program.title} - {date}")
|
||||
# date format in episode title (python's strftime)
|
||||
ensure('AIRCOX_EPISODE_TITLE_DATE_FORMAT', '%-d %B %Y')
|
||||
ensure("AIRCOX_EPISODE_TITLE_DATE_FORMAT", "%-d %B %Y")
|
||||
|
||||
########################################################################
|
||||
# Logs & Archives
|
||||
########################################################################
|
||||
# Directory where to save logs' archives
|
||||
ensure('AIRCOX_LOGS_ARCHIVES_DIR', os.path.join(settings.PROJECT_ROOT, 'logs/archives'))
|
||||
ensure(
|
||||
"AIRCOX_LOGS_ARCHIVES_DIR",
|
||||
os.path.join(settings.PROJECT_ROOT, "logs/archives"),
|
||||
)
|
||||
# In days, minimal age of a log before it is archived
|
||||
ensure('AIRCOX_LOGS_ARCHIVES_AGE', 60)
|
||||
ensure("AIRCOX_LOGS_ARCHIVES_AGE", 60)
|
||||
|
||||
|
||||
########################################################################
|
||||
# Sounds
|
||||
########################################################################
|
||||
# Sub directory used for the complete episode sounds
|
||||
ensure('AIRCOX_SOUND_ARCHIVES_SUBDIR', 'archives')
|
||||
ensure("AIRCOX_SOUND_ARCHIVES_SUBDIR", "archives")
|
||||
# Sub directory used for the excerpts of the episode
|
||||
ensure('AIRCOX_SOUND_EXCERPTS_SUBDIR', 'excerpts')
|
||||
ensure("AIRCOX_SOUND_EXCERPTS_SUBDIR", "excerpts")
|
||||
|
||||
# Quality attributes passed to sound_quality_check from sounds_monitor
|
||||
ensure('AIRCOX_SOUND_QUALITY', {
|
||||
'attribute': 'RMS lev dB',
|
||||
'range': (-18.0, -8.0),
|
||||
'sample_length': 120,
|
||||
}
|
||||
ensure(
|
||||
"AIRCOX_SOUND_QUALITY",
|
||||
{
|
||||
"attribute": "RMS lev dB",
|
||||
"range": (-18.0, -8.0),
|
||||
"sample_length": 120,
|
||||
},
|
||||
)
|
||||
|
||||
# Extension of sound files
|
||||
ensure(
|
||||
'AIRCOX_SOUND_FILE_EXT',
|
||||
('.ogg', '.flac', '.wav', '.mp3', '.opus')
|
||||
)
|
||||
ensure("AIRCOX_SOUND_FILE_EXT", (".ogg", ".flac", ".wav", ".mp3", ".opus"))
|
||||
|
||||
# Tag sounds as deleted instead of deleting them when file has been removed
|
||||
# from filesystem (sound monitoring)
|
||||
ensure(
|
||||
'AIRCOX_SOUND_KEEP_DELETED',
|
||||
False
|
||||
)
|
||||
ensure("AIRCOX_SOUND_KEEP_DELETED", False)
|
||||
|
||||
|
||||
########################################################################
|
||||
# Streamer & Controllers
|
||||
########################################################################
|
||||
# Controllers working directory
|
||||
ensure('AIRCOX_CONTROLLERS_WORKING_DIR', '/tmp/aircox')
|
||||
ensure("AIRCOX_CONTROLLERS_WORKING_DIR", "/tmp/aircox")
|
||||
|
||||
|
||||
########################################################################
|
||||
|
@ -150,12 +116,10 @@ ensure('AIRCOX_CONTROLLERS_WORKING_DIR', '/tmp/aircox')
|
|||
########################################################################
|
||||
# Columns for CSV file
|
||||
ensure(
|
||||
'AIRCOX_IMPORT_PLAYLIST_CSV_COLS',
|
||||
('artist', 'title', 'minutes', 'seconds', 'tags', 'info')
|
||||
"AIRCOX_IMPORT_PLAYLIST_CSV_COLS",
|
||||
("artist", "title", "minutes", "seconds", "tags", "info"),
|
||||
)
|
||||
# Column delimiter of csv text files
|
||||
ensure('AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER', ';')
|
||||
ensure("AIRCOX_IMPORT_PLAYLIST_CSV_DELIMITER", ";")
|
||||
# Text delimiter of csv text files
|
||||
ensure('AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE', '"')
|
||||
|
||||
|
||||
ensure("AIRCOX_IMPORT_PLAYLIST_CSV_TEXT_QUOTE", '"')
|
||||
|
|
|
@ -34,7 +34,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/************************************************************************/
|
||||
/******/ // The module cache
|
||||
/******/ var __webpack_module_cache__ = {};
|
||||
/******/
|
||||
/******/
|
||||
/******/ // The require function
|
||||
/******/ function __webpack_require__(moduleId) {
|
||||
/******/ // Check if module is in cache
|
||||
|
@ -48,20 +48,20 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ loaded: false,
|
||||
/******/ exports: {}
|
||||
/******/ };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Execute the module function
|
||||
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Flag the module as loaded
|
||||
/******/ module.loaded = true;
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Return the exports of the module
|
||||
/******/ return module.exports;
|
||||
/******/ }
|
||||
/******/
|
||||
/******/
|
||||
/******/ // expose the modules object (__webpack_modules__)
|
||||
/******/ __webpack_require__.m = __webpack_modules__;
|
||||
/******/
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/ /* webpack/runtime/chunk loaded */
|
||||
/******/ !function() {
|
||||
|
@ -96,7 +96,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ return result;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/compat get default export */
|
||||
/******/ !function() {
|
||||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||||
|
@ -108,7 +108,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ return getter;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/define property getters */
|
||||
/******/ !function() {
|
||||
/******/ // define getter functions for harmony exports
|
||||
|
@ -120,7 +120,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ }
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/global */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.g = (function() {
|
||||
|
@ -132,12 +132,12 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ }
|
||||
/******/ })();
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/make namespace object */
|
||||
/******/ !function() {
|
||||
/******/ // define __esModule on exports
|
||||
|
@ -148,7 +148,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/node module decorator */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.nmd = function(module) {
|
||||
|
@ -157,30 +157,30 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ return module;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/jsonp chunk loading */
|
||||
/******/ !function() {
|
||||
/******/ // no baseURI
|
||||
/******/
|
||||
/******/
|
||||
/******/ // object to store loaded and loading chunks
|
||||
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
|
||||
/******/ // [resolve, reject, Promise] = chunk loading, 0 = chunk loaded
|
||||
/******/ var installedChunks = {
|
||||
/******/ "admin": 0
|
||||
/******/ };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no chunk on demand loading
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no prefetching
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no preloaded
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no HMR
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no HMR manifest
|
||||
/******/
|
||||
/******/
|
||||
/******/ __webpack_require__.O.j = function(chunkId) { return installedChunks[chunkId] === 0; };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // install a JSONP callback for chunk loading
|
||||
/******/ var webpackJsonpCallback = function(parentChunkLoadingFunction, data) {
|
||||
/******/ var chunkIds = data[0];
|
||||
|
@ -207,19 +207,19 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
/******/ }
|
||||
/******/ return __webpack_require__.O(result);
|
||||
/******/ }
|
||||
/******/
|
||||
/******/
|
||||
/******/ var chunkLoadingGlobal = self["webpackChunkaircox_assets"] = self["webpackChunkaircox_assets"] || [];
|
||||
/******/ chunkLoadingGlobal.forEach(webpackJsonpCallback.bind(null, 0));
|
||||
/******/ chunkLoadingGlobal.push = webpackJsonpCallback.bind(null, chunkLoadingGlobal.push.bind(chunkLoadingGlobal));
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/
|
||||
/******/
|
||||
/******/ // startup
|
||||
/******/ // Load entry module and return exports
|
||||
/******/ // This entry module depends on other loaded chunks and execution need to be delayed
|
||||
/******/ var __webpack_exports__ = __webpack_require__.O(undefined, ["chunk-vendors","chunk-common"], function() { return __webpack_require__("./src/admin.js"); })
|
||||
/******/ __webpack_exports__ = __webpack_require__.O(__webpack_exports__);
|
||||
/******/
|
||||
/******/
|
||||
/******/ })()
|
||||
;
|
||||
;
|
||||
|
|
|
@ -819,4 +819,4 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpac
|
|||
|
||||
/***/ })
|
||||
|
||||
}]);
|
||||
}]);
|
||||
|
|
|
@ -842,4 +842,4 @@ eval("__webpack_require__.r(__webpack_exports__);\n// extracted by mini-css-extr
|
|||
|
||||
/***/ })
|
||||
|
||||
}]);
|
||||
}]);
|
||||
|
|
|
@ -24,7 +24,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/************************************************************************/
|
||||
/******/ // The module cache
|
||||
/******/ var __webpack_module_cache__ = {};
|
||||
/******/
|
||||
/******/
|
||||
/******/ // The require function
|
||||
/******/ function __webpack_require__(moduleId) {
|
||||
/******/ // Check if module is in cache
|
||||
|
@ -38,20 +38,20 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ loaded: false,
|
||||
/******/ exports: {}
|
||||
/******/ };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Execute the module function
|
||||
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Flag the module as loaded
|
||||
/******/ module.loaded = true;
|
||||
/******/
|
||||
/******/
|
||||
/******/ // Return the exports of the module
|
||||
/******/ return module.exports;
|
||||
/******/ }
|
||||
/******/
|
||||
/******/
|
||||
/******/ // expose the modules object (__webpack_modules__)
|
||||
/******/ __webpack_require__.m = __webpack_modules__;
|
||||
/******/
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/ /* webpack/runtime/chunk loaded */
|
||||
/******/ !function() {
|
||||
|
@ -86,7 +86,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ return result;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/compat get default export */
|
||||
/******/ !function() {
|
||||
/******/ // getDefaultExport function for compatibility with non-harmony modules
|
||||
|
@ -98,7 +98,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ return getter;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/define property getters */
|
||||
/******/ !function() {
|
||||
/******/ // define getter functions for harmony exports
|
||||
|
@ -110,7 +110,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ }
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/global */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.g = (function() {
|
||||
|
@ -122,12 +122,12 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ }
|
||||
/******/ })();
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/make namespace object */
|
||||
/******/ !function() {
|
||||
/******/ // define __esModule on exports
|
||||
|
@ -138,7 +138,7 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ Object.defineProperty(exports, '__esModule', { value: true });
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/node module decorator */
|
||||
/******/ !function() {
|
||||
/******/ __webpack_require__.nmd = function(module) {
|
||||
|
@ -147,30 +147,30 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ return module;
|
||||
/******/ };
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/******/ /* webpack/runtime/jsonp chunk loading */
|
||||
/******/ !function() {
|
||||
/******/ // no baseURI
|
||||
/******/
|
||||
/******/
|
||||
/******/ // object to store loaded and loading chunks
|
||||
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
|
||||
/******/ // [resolve, reject, Promise] = chunk loading, 0 = chunk loaded
|
||||
/******/ var installedChunks = {
|
||||
/******/ "core": 0
|
||||
/******/ };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no chunk on demand loading
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no prefetching
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no preloaded
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no HMR
|
||||
/******/
|
||||
/******/
|
||||
/******/ // no HMR manifest
|
||||
/******/
|
||||
/******/
|
||||
/******/ __webpack_require__.O.j = function(chunkId) { return installedChunks[chunkId] === 0; };
|
||||
/******/
|
||||
/******/
|
||||
/******/ // install a JSONP callback for chunk loading
|
||||
/******/ var webpackJsonpCallback = function(parentChunkLoadingFunction, data) {
|
||||
/******/ var chunkIds = data[0];
|
||||
|
@ -197,19 +197,19 @@ eval("__webpack_require__.r(__webpack_exports__);\n/* harmony import */ var _ind
|
|||
/******/ }
|
||||
/******/ return __webpack_require__.O(result);
|
||||
/******/ }
|
||||
/******/
|
||||
/******/
|
||||
/******/ var chunkLoadingGlobal = self["webpackChunkaircox_assets"] = self["webpackChunkaircox_assets"] || [];
|
||||
/******/ chunkLoadingGlobal.forEach(webpackJsonpCallback.bind(null, 0));
|
||||
/******/ chunkLoadingGlobal.push = webpackJsonpCallback.bind(null, chunkLoadingGlobal.push.bind(chunkLoadingGlobal));
|
||||
/******/ }();
|
||||
/******/
|
||||
/******/
|
||||
/************************************************************************/
|
||||
/******/
|
||||
/******/
|
||||
/******/ // startup
|
||||
/******/ // Load entry module and return exports
|
||||
/******/ // This entry module depends on other loaded chunks and execution need to be delayed
|
||||
/******/ var __webpack_exports__ = __webpack_require__.O(undefined, ["chunk-vendors","chunk-common"], function() { return __webpack_require__("./src/core.js"); })
|
||||
/******/ __webpack_exports__ = __webpack_require__.O(__webpack_exports__);
|
||||
/******/
|
||||
/******/
|
||||
/******/ })()
|
||||
;
|
||||
;
|
||||
|
|
|
@ -4,4 +4,3 @@
|
|||
<input type="datetime-local" value="choice.value" />
|
||||
{% endwith %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
{% load i18n %}
|
||||
<h3>{% blocktranslate with filter_title=title %} By {{ filter_title }} {% endblocktranslate %}</h3>
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
|
|
|
@ -42,4 +42,3 @@
|
|||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -17,5 +17,3 @@
|
|||
</div>
|
||||
{% endblock %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
|
|
@ -80,5 +80,3 @@
|
|||
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
elm.setAttribute('v-pre', true)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
window.addEventListener('load', function() {
|
||||
{% block init-scripts %}
|
||||
aircox.init(null, {
|
||||
|
|
|
@ -6,5 +6,3 @@
|
|||
<img src="{% static "aircox/logo.png" %}"/>
|
||||
</a>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -3,4 +3,3 @@
|
|||
{% block content %}
|
||||
<div v-pre>{{ block.super }}</div>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -92,4 +92,3 @@
|
|||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -28,4 +28,3 @@
|
|||
</section>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -164,5 +164,3 @@ Usefull context:
|
|||
<div id="player">{% include "aircox/widgets/player.html" %}</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
|
|
|
@ -6,4 +6,3 @@
|
|||
—
|
||||
{{ station.name }}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -84,4 +84,3 @@
|
|||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -27,4 +27,3 @@
|
|||
</section>
|
||||
{% endwith %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -78,5 +78,3 @@
|
|||
</section>
|
||||
{{ block.super }}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -15,4 +15,3 @@
|
|||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -83,5 +83,3 @@
|
|||
{% endwith %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -27,4 +27,3 @@
|
|||
{% include "aircox/widgets/log_list.html" %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -88,6 +88,3 @@ Context:
|
|||
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -60,4 +60,3 @@
|
|||
</form>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -65,4 +65,3 @@
|
|||
</section>
|
||||
{{ block.super }}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -4,5 +4,3 @@
|
|||
{% blocktranslate %}Recently on {{ program }}{% endblocktranslate %}
|
||||
{% endwith %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -69,5 +69,3 @@ Context variables:
|
|||
{% block actions %}{% endblock %}
|
||||
</article>
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
|
|
@ -44,5 +44,3 @@ An empty date results to a title or a separator
|
|||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
|
|
@ -8,4 +8,3 @@ Context:
|
|||
{% include "aircox/widgets/episode_item.html" %}
|
||||
{% endwith %}
|
||||
{% endwith %}
|
||||
|
||||
|
|
|
@ -19,4 +19,3 @@ Context:
|
|||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
|
|
|
@ -56,4 +56,3 @@ Context variables:
|
|||
</button>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -20,4 +20,3 @@ for design review.
|
|||
{% include "aircox/widgets/track_item.html" %}
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
|
||||
|
|
|
@ -28,4 +28,3 @@ Context:
|
|||
{% endfor %}
|
||||
</table>
|
||||
{% endwith %}
|
||||
|
||||
|
|
|
@ -3,4 +3,3 @@
|
|||
{% block card_title %}
|
||||
{% block title %}{{ block.super }}{% endblock %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -24,5 +24,3 @@ Context:
|
|||
</ul>
|
||||
</nav>
|
||||
{% endif %}
|
||||
|
||||
|
||||
|
|
|
@ -46,4 +46,3 @@ The audio player
|
|||
</template>
|
||||
</a-player>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -15,4 +15,3 @@ List item for a podcast.
|
|||
:actions="['play']">
|
||||
</a-sound-item>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -11,4 +11,3 @@ Context:
|
|||
— {{ object.artist }}
|
||||
{% if object.info %}(<i>{{ object.info }}</i>){% endif %}
|
||||
</span>
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import random
|
||||
import json
|
||||
import random
|
||||
|
||||
from django import template
|
||||
from django.contrib.admin.templatetags.admin_urls import admin_urlname
|
||||
|
@ -11,16 +11,16 @@ random.seed()
|
|||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter(name='admin_url')
|
||||
@register.filter(name="admin_url")
|
||||
def do_admin_url(obj, arg, pass_id=True):
|
||||
""" Reverse admin url for object """
|
||||
"""Reverse admin url for object."""
|
||||
name = admin_urlname(obj._meta, arg)
|
||||
return reverse(name, args=(obj.id,)) if pass_id else reverse(name)
|
||||
|
||||
|
||||
@register.filter(name='get_tracks')
|
||||
@register.filter(name="get_tracks")
|
||||
def do_get_tracks(obj):
|
||||
""" Get a list of track for the provided log, diffusion, or episode """
|
||||
"""Get a list of track for the provided log, diffusion, or episode."""
|
||||
if isinstance(obj, Log):
|
||||
return (obj.track,)
|
||||
|
||||
|
@ -29,65 +29,71 @@ def do_get_tracks(obj):
|
|||
return obj.track_set.all()
|
||||
|
||||
|
||||
@register.simple_tag(name='has_perm', takes_context=True)
|
||||
@register.simple_tag(name="has_perm", takes_context=True)
|
||||
def do_has_perm(context, obj, perm, user=None):
|
||||
""" Return True if ``user.has_perm('[APP].[perm]_[MODEL]')`` """
|
||||
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
|
||||
if user is None:
|
||||
user = context['request'].user
|
||||
return user.has_perm('{}.{}_{}'.format(
|
||||
obj._meta.app_label, perm, obj._meta.model_name))
|
||||
user = context["request"].user
|
||||
return user.has_perm(
|
||||
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
|
||||
)
|
||||
|
||||
|
||||
@register.filter(name='is_diffusion')
|
||||
@register.filter(name="is_diffusion")
|
||||
def do_is_diffusion(obj):
|
||||
""" Return True if object is a Diffusion. """
|
||||
"""Return True if object is a Diffusion."""
|
||||
return isinstance(obj, Diffusion)
|
||||
|
||||
|
||||
@register.filter(name='json')
|
||||
@register.filter(name="json")
|
||||
def do_json(obj, fields=""):
|
||||
""" Return object as json """
|
||||
"""Return object as json."""
|
||||
if fields:
|
||||
obj = {k: getattr(obj, k, None)
|
||||
for k in ','.split(fields)}
|
||||
obj = {k: getattr(obj, k, None) for k in ",".split(fields)}
|
||||
return json.dumps(obj)
|
||||
|
||||
|
||||
@register.simple_tag(name='player_live_attr', takes_context=True)
|
||||
@register.simple_tag(name="player_live_attr", takes_context=True)
|
||||
def do_player_live_attr(context):
|
||||
""" Player 'live-args' attribute value """
|
||||
station = getattr(context['request'], 'station', None)
|
||||
return json.dumps({
|
||||
'url': reverse('api:live'),
|
||||
'src': station and station.audio_streams.split('\n')
|
||||
})
|
||||
"""Player 'live-args' attribute value."""
|
||||
station = getattr(context["request"], "station", None)
|
||||
return json.dumps(
|
||||
{
|
||||
"url": reverse("api:live"),
|
||||
"src": station and station.audio_streams.split("\n"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@register.simple_tag(name='nav_items', takes_context=True)
|
||||
@register.simple_tag(name="nav_items", takes_context=True)
|
||||
def do_nav_items(context, menu, **kwargs):
|
||||
""" Render navigation items for the provided menu name. """
|
||||
station, request = context['station'], context['request']
|
||||
return [(item, item.render(request, **kwargs))
|
||||
for item in station.navitem_set.filter(menu=menu)]
|
||||
"""Render navigation items for the provided menu name."""
|
||||
station, request = context["station"], context["request"]
|
||||
return [
|
||||
(item, item.render(request, **kwargs))
|
||||
for item in station.navitem_set.filter(menu=menu)
|
||||
]
|
||||
|
||||
|
||||
@register.simple_tag(name='update_query')
|
||||
@register.simple_tag(name="update_query")
|
||||
def do_update_query(obj, **kwargs):
|
||||
""" Replace provided querydict's values with **kwargs. """
|
||||
"""Replace provided querydict's values with **kwargs."""
|
||||
for k, v in kwargs.items():
|
||||
if v is not None:
|
||||
obj[k] = list(v) if hasattr(v, '__iter__') else [v]
|
||||
obj[k] = list(v) if hasattr(v, "__iter__") else [v]
|
||||
elif k in obj:
|
||||
obj.pop(k)
|
||||
return obj
|
||||
|
||||
|
||||
@register.filter(name='verbose_name')
|
||||
@register.filter(name="verbose_name")
|
||||
def do_verbose_name(obj, plural=False):
|
||||
"""
|
||||
Return model's verbose name (singular or plural) or `obj` if it is a
|
||||
string (can act for default values).
|
||||
"""
|
||||
return obj if isinstance(obj, str) else \
|
||||
obj._meta.verbose_name_plural if plural else \
|
||||
obj._meta.verbose_name
|
||||
"""Return model's verbose name (singular or plural) or `obj` if it is a
|
||||
string (can act for default values)."""
|
||||
return (
|
||||
obj
|
||||
if isinstance(obj, str)
|
||||
else obj._meta.verbose_name_plural
|
||||
if plural
|
||||
else obj._meta.verbose_name
|
||||
)
|
||||
|
|
|
@ -1,63 +1,65 @@
|
|||
import json
|
||||
|
||||
from django import template
|
||||
from django.contrib import admin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from aircox.serializers.admin import UserSettingsSerializer
|
||||
|
||||
|
||||
__all__ = ('register', 'do_get_admin_tools', 'do_track_inline_data')
|
||||
__all__ = ("register", "do_get_admin_tools", "do_track_inline_data")
|
||||
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag(name='get_admin_tools')
|
||||
@register.simple_tag(name="get_admin_tools")
|
||||
def do_get_admin_tools():
|
||||
return admin.site.get_tools()
|
||||
|
||||
|
||||
@register.simple_tag(name='track_inline_data', takes_context=True)
|
||||
@register.simple_tag(name="track_inline_data", takes_context=True)
|
||||
def do_track_inline_data(context, formset):
|
||||
"""
|
||||
Return initial data for playlist editor as dict. Keys are:
|
||||
"""Return initial data for playlist editor as dict. Keys are:
|
||||
|
||||
- ``items``: list of items. Extra keys:
|
||||
- ``__error__``: dict of form fields errors
|
||||
- ``settings``: user's settings
|
||||
"""
|
||||
items = []
|
||||
for form in formset.forms:
|
||||
item = {name: form[name].value()
|
||||
for name in form.fields.keys()}
|
||||
item['__errors__'] = form.errors
|
||||
item = {name: form[name].value() for name in form.fields.keys()}
|
||||
item["__errors__"] = form.errors
|
||||
|
||||
# hack for playlist editor
|
||||
tags = item.get('tags')
|
||||
tags = item.get("tags")
|
||||
if tags and not isinstance(tags, str):
|
||||
item['tags'] = ', '.join(tag.name for tag in tags)
|
||||
item["tags"] = ", ".join(tag.name for tag in tags)
|
||||
items.append(item)
|
||||
|
||||
data = {"items": items}
|
||||
user = context['request'].user
|
||||
settings = getattr(user, 'aircox_settings', None)
|
||||
data['settings'] = settings and UserSettingsSerializer(settings).data
|
||||
user = context["request"].user
|
||||
settings = getattr(user, "aircox_settings", None)
|
||||
data["settings"] = settings and UserSettingsSerializer(settings).data
|
||||
source = json.dumps(data)
|
||||
return source
|
||||
|
||||
|
||||
track_inline_labels_ = {
|
||||
'artist': _('Artist'), 'album': _('Album'), 'title': _('Title'),
|
||||
'tags': _('Tags'), 'year': _('Year'),
|
||||
'save_settings': _('Save Settings'),
|
||||
'discard_changes': _('Discard changes'),
|
||||
'columns': _('Columns'),
|
||||
'add_track': _('Add a track'),
|
||||
'remove_track': _('Remove'),
|
||||
'timestamp': _('Timestamp'),
|
||||
"artist": _("Artist"),
|
||||
"album": _("Album"),
|
||||
"title": _("Title"),
|
||||
"tags": _("Tags"),
|
||||
"year": _("Year"),
|
||||
"save_settings": _("Save Settings"),
|
||||
"discard_changes": _("Discard changes"),
|
||||
"columns": _("Columns"),
|
||||
"add_track": _("Add a track"),
|
||||
"remove_track": _("Remove"),
|
||||
"timestamp": _("Timestamp"),
|
||||
}
|
||||
|
||||
|
||||
@register.simple_tag(name='track_inline_labels')
|
||||
@register.simple_tag(name="track_inline_labels")
|
||||
def do_track_inline_labels():
|
||||
""" Return labels for columns in playlist editor as dict """
|
||||
"""Return labels for columns in playlist editor as dict."""
|
||||
return json.dumps({k: str(v) for k, v in track_inline_labels_.items()})
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
from .management import *
|
||||
from . import management
|
||||
|
||||
__all__ = ("management",)
|
||||
|
|
|
@ -1,2 +1,15 @@
|
|||
from .sound_file import *
|
||||
from .sound_monitor import *
|
||||
from .sound_file import SoundFileTestCase
|
||||
from .sound_monitor import (
|
||||
ModifiedHandlerTestCase,
|
||||
MonitorHandlerTestCase,
|
||||
MoveHandlerTestCase,
|
||||
NotifyHandlerTestCase,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"SoundFileTestCase",
|
||||
"NotifyHandlerTestCase",
|
||||
"MoveHandlerTestCase",
|
||||
"ModifiedHandlerTestCase",
|
||||
"MonitorHandlerTestCase",
|
||||
)
|
||||
|
|
|
@ -7,30 +7,56 @@ from django.utils import timezone as tz
|
|||
from aircox import models
|
||||
from aircox.management.sound_file import SoundFile
|
||||
|
||||
|
||||
__all__ = ('SoundFileTestCase',)
|
||||
__all__ = ("SoundFileTestCase",)
|
||||
|
||||
|
||||
class SoundFileTestCase(TestCase):
|
||||
path_infos = {
|
||||
'test/20220101_10h13_1_sample_1.mp3': {
|
||||
'year': 2022, 'month': 1, 'day': 1, 'hour': 10, 'minute': 13,
|
||||
'n': 1, 'name': 'Sample 1'},
|
||||
'test/20220102_10h13_sample_2.mp3': {
|
||||
'year': 2022, 'month': 1, 'day': 2, 'hour': 10, 'minute': 13,
|
||||
'name': 'Sample 2'},
|
||||
'test/20220103_1_sample_3.mp3': {
|
||||
'year': 2022, 'month': 1, 'day': 3, 'n': 1, 'name': 'Sample 3'},
|
||||
'test/20220104_sample_4.mp3': {
|
||||
'year': 2022, 'month': 1, 'day': 4, 'name': 'Sample 4'},
|
||||
'test/20220105.mp3': {
|
||||
'year': 2022, 'month': 1, 'day': 5, 'name': '20220105'},
|
||||
"test/20220101_10h13_1_sample_1.mp3": {
|
||||
"year": 2022,
|
||||
"month": 1,
|
||||
"day": 1,
|
||||
"hour": 10,
|
||||
"minute": 13,
|
||||
"n": 1,
|
||||
"name": "Sample 1",
|
||||
},
|
||||
"test/20220102_10h13_sample_2.mp3": {
|
||||
"year": 2022,
|
||||
"month": 1,
|
||||
"day": 2,
|
||||
"hour": 10,
|
||||
"minute": 13,
|
||||
"name": "Sample 2",
|
||||
},
|
||||
"test/20220103_1_sample_3.mp3": {
|
||||
"year": 2022,
|
||||
"month": 1,
|
||||
"day": 3,
|
||||
"n": 1,
|
||||
"name": "Sample 3",
|
||||
},
|
||||
"test/20220104_sample_4.mp3": {
|
||||
"year": 2022,
|
||||
"month": 1,
|
||||
"day": 4,
|
||||
"name": "Sample 4",
|
||||
},
|
||||
"test/20220105.mp3": {
|
||||
"year": 2022,
|
||||
"month": 1,
|
||||
"day": 5,
|
||||
"name": "20220105",
|
||||
},
|
||||
}
|
||||
subdir_prefix = "test"
|
||||
sound_files = {
|
||||
k: r
|
||||
for k, r in (
|
||||
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
|
||||
for path in path_infos.keys()
|
||||
)
|
||||
}
|
||||
subdir_prefix = 'test'
|
||||
sound_files = {k: r for k, r in (
|
||||
(path, SoundFile(conf.MEDIA_ROOT + '/' + path))
|
||||
for path in path_infos.keys()
|
||||
)}
|
||||
|
||||
def test_sound_path(self):
|
||||
for path, sound_file in self.sound_files.items():
|
||||
|
@ -45,21 +71,25 @@ class SoundFileTestCase(TestCase):
|
|||
self.assertEqual(expected, result, "path: {}".format(path))
|
||||
|
||||
def _setup_diff(self, program, info):
|
||||
episode = models.Episode(program=program, title='test-episode')
|
||||
at = tz.datetime(**{
|
||||
k: info[k] for k in ('year', 'month', 'day', 'hour', 'minute')
|
||||
if info.get(k)
|
||||
})
|
||||
episode = models.Episode(program=program, title="test-episode")
|
||||
at = tz.datetime(
|
||||
**{
|
||||
k: info[k]
|
||||
for k in ("year", "month", "day", "hour", "minute")
|
||||
if info.get(k)
|
||||
}
|
||||
)
|
||||
at = tz.make_aware(at)
|
||||
diff = models.Diffusion(episode=episode, start=at,
|
||||
end=at+timedelta(hours=1))
|
||||
diff = models.Diffusion(
|
||||
episode=episode, start=at, end=at + timedelta(hours=1)
|
||||
)
|
||||
episode.save()
|
||||
diff.save()
|
||||
return diff
|
||||
|
||||
def test_find_episode(self):
|
||||
station = models.Station(name='test-station')
|
||||
program = models.Program(station=station, title='test')
|
||||
station = models.Station(name="test-station")
|
||||
program = models.Program(station=station, title="test")
|
||||
station.save()
|
||||
program.save()
|
||||
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
import concurrent.futures as futures
|
||||
from datetime import datetime, timedelta
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from aircox.management.sound_monitor import \
|
||||
NotifyHandler, MoveHandler, ModifiedHandler, MonitorHandler
|
||||
from aircox.management.sound_monitor import (
|
||||
ModifiedHandler,
|
||||
MonitorHandler,
|
||||
NotifyHandler,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ('NotifyHandlerTestCase', 'MoveHandlerTestCase',
|
||||
'ModifiedHandlerTestCase', 'MonitorHandlerTestCase',)
|
||||
__all__ = (
|
||||
"NotifyHandlerTestCase",
|
||||
"MoveHandlerTestCase",
|
||||
"ModifiedHandlerTestCase",
|
||||
"MonitorHandlerTestCase",
|
||||
)
|
||||
|
||||
|
||||
class FakeEvent:
|
||||
|
@ -57,21 +63,21 @@ class ModifiedHandlerTestCase(TestCase):
|
|||
class MonitorHandlerTestCase(TestCase):
|
||||
def setUp(self):
|
||||
pool = futures.ThreadPoolExecutor(2)
|
||||
self.monitor = MonitorHandler('archives', pool)
|
||||
self.monitor = MonitorHandler("archives", pool)
|
||||
|
||||
def test_submit_new_job(self):
|
||||
event = FakeEvent(src_path='dummy_src')
|
||||
event = FakeEvent(src_path="dummy_src")
|
||||
handler = NotifyHandler()
|
||||
result, _ = self.monitor._submit(handler, event, 'up')
|
||||
result, _ = self.monitor._submit(handler, event, "up")
|
||||
self.assertIs(handler, result)
|
||||
self.assertIsInstance(handler.future, futures.Future)
|
||||
self.monitor.pool.shutdown()
|
||||
|
||||
def test_submit_job_exists(self):
|
||||
event = FakeEvent(src_path='dummy_src')
|
||||
event = FakeEvent(src_path="dummy_src")
|
||||
|
||||
job_1, new_1 = self.monitor._submit(WaitHandler(), event, 'up')
|
||||
job_2, new_2 = self.monitor._submit(NotifyHandler(), event, 'up')
|
||||
job_1, new_1 = self.monitor._submit(WaitHandler(), event, "up")
|
||||
job_2, new_2 = self.monitor._submit(NotifyHandler(), event, "up")
|
||||
self.assertIs(job_1, job_2)
|
||||
self.assertTrue(new_1)
|
||||
self.assertFalse(new_2)
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
import datetime
|
||||
import calendar
|
||||
import datetime
|
||||
import logging
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone as tz
|
||||
|
||||
from aircox.models import *
|
||||
from aircox.models import Schedule
|
||||
|
||||
logger = logging.getLogger('aircox.test')
|
||||
logger.setLevel('INFO')
|
||||
logger = logging.getLogger("aircox.test")
|
||||
logger.setLevel("INFO")
|
||||
|
||||
|
||||
class ScheduleCheck (TestCase):
|
||||
class ScheduleCheck(TestCase):
|
||||
def setUp(self):
|
||||
self.schedules = [
|
||||
Schedule(
|
||||
|
@ -25,15 +25,16 @@ class ScheduleCheck (TestCase):
|
|||
|
||||
def test_frequencies(self):
|
||||
for schedule in self.schedules:
|
||||
logger.info('- test frequency %s' %
|
||||
schedule.get_frequency_display())
|
||||
logger.info(
|
||||
"- test frequency %s" % schedule.get_frequency_display()
|
||||
)
|
||||
date = schedule.date
|
||||
count = 24
|
||||
while count:
|
||||
logger.info('- month %(month)s/%(year)s' % {
|
||||
'month': date.month,
|
||||
'year': date.year
|
||||
})
|
||||
logger.info(
|
||||
"- month %(month)s/%(year)s"
|
||||
% {"month": date.month, "year": date.year}
|
||||
)
|
||||
count -= 1
|
||||
dates = schedule.dates_of_month(date)
|
||||
if schedule.frequency == schedule.Frequency.one_on_two:
|
||||
|
|
131
aircox/urls.py
131
aircox/urls.py
|
@ -1,18 +1,16 @@
|
|||
from django.urls import include, path, register_converter
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from . import models, views, viewsets
|
||||
from .converters import PagePathConverter, DateConverter, WeekConverter
|
||||
from .converters import DateConverter, PagePathConverter, WeekConverter
|
||||
|
||||
__all__ = ["api", "urls"]
|
||||
|
||||
|
||||
__all__ = ['api', 'urls']
|
||||
|
||||
|
||||
register_converter(PagePathConverter, 'page_path')
|
||||
register_converter(DateConverter, 'date')
|
||||
register_converter(WeekConverter, 'week')
|
||||
register_converter(PagePathConverter, "page_path")
|
||||
register_converter(DateConverter, "date")
|
||||
register_converter(WeekConverter, "week")
|
||||
|
||||
|
||||
# urls = [
|
||||
|
@ -23,69 +21,92 @@ register_converter(WeekConverter, 'week')
|
|||
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register('sound', viewsets.SoundViewSet, basename='sound')
|
||||
router.register('track', viewsets.TrackROViewSet, basename='track')
|
||||
router.register("sound", viewsets.SoundViewSet, basename="sound")
|
||||
router.register("track", viewsets.TrackROViewSet, basename="track")
|
||||
|
||||
|
||||
api = [
|
||||
path('logs/', views.LogListAPIView.as_view(), name='live'),
|
||||
path('user/settings/', viewsets.UserSettingsViewSet.as_view(
|
||||
{'get': 'retrieve', 'post': 'update', 'put': 'update'}),
|
||||
name='user-settings'),
|
||||
path("logs/", views.LogListAPIView.as_view(), name="live"),
|
||||
path(
|
||||
"user/settings/",
|
||||
viewsets.UserSettingsViewSet.as_view(
|
||||
{"get": "retrieve", "post": "update", "put": "update"}
|
||||
),
|
||||
name="user-settings",
|
||||
),
|
||||
] + router.urls
|
||||
|
||||
|
||||
urls = [
|
||||
path('', views.HomeView.as_view(), name='home'),
|
||||
path('api/', include((api, 'aircox'), namespace='api')),
|
||||
|
||||
path("", views.HomeView.as_view(), name="home"),
|
||||
path("api/", include((api, "aircox"), namespace="api")),
|
||||
# path('', views.PageDetailView.as_view(model=models.Article),
|
||||
# name='home'),
|
||||
path(_('articles/'),
|
||||
views.ArticleListView.as_view(model=models.Article),
|
||||
name='article-list'),
|
||||
path(_('articles/<slug:slug>/'),
|
||||
views.ArticleDetailView.as_view(),
|
||||
name='article-detail'),
|
||||
|
||||
path(_('episodes/'),
|
||||
views.EpisodeListView.as_view(), name='episode-list'),
|
||||
path(_('episodes/<slug:slug>/'),
|
||||
views.EpisodeDetailView.as_view(), name='episode-detail'),
|
||||
path(_('week/'),
|
||||
views.DiffusionListView.as_view(), name='diffusion-list'),
|
||||
path(_('week/<date:date>/'),
|
||||
views.DiffusionListView.as_view(), name='diffusion-list'),
|
||||
|
||||
path(_('logs/'), views.LogListView.as_view(), name='log-list'),
|
||||
path(_('logs/<date:date>/'), views.LogListView.as_view(), name='log-list'),
|
||||
path(
|
||||
_("articles/"),
|
||||
views.ArticleListView.as_view(model=models.Article),
|
||||
name="article-list",
|
||||
),
|
||||
path(
|
||||
_("articles/<slug:slug>/"),
|
||||
views.ArticleDetailView.as_view(),
|
||||
name="article-detail",
|
||||
),
|
||||
path(_("episodes/"), views.EpisodeListView.as_view(), name="episode-list"),
|
||||
path(
|
||||
_("episodes/<slug:slug>/"),
|
||||
views.EpisodeDetailView.as_view(),
|
||||
name="episode-detail",
|
||||
),
|
||||
path(_("week/"), views.DiffusionListView.as_view(), name="diffusion-list"),
|
||||
path(
|
||||
_("week/<date:date>/"),
|
||||
views.DiffusionListView.as_view(),
|
||||
name="diffusion-list",
|
||||
),
|
||||
path(_("logs/"), views.LogListView.as_view(), name="log-list"),
|
||||
path(_("logs/<date:date>/"), views.LogListView.as_view(), name="log-list"),
|
||||
# path('<page_path:path>', views.route_page, name='page'),
|
||||
|
||||
path(_('publications/'),
|
||||
views.PageListView.as_view(model=models.Page), name='page-list'),
|
||||
|
||||
path(_('pages/'), views.BasePageListView.as_view(
|
||||
path(
|
||||
_("publications/"),
|
||||
views.PageListView.as_view(model=models.Page),
|
||||
name="page-list",
|
||||
),
|
||||
path(
|
||||
_("pages/"),
|
||||
views.BasePageListView.as_view(
|
||||
model=models.StaticPage,
|
||||
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
||||
),
|
||||
name='static-page-list'
|
||||
name="static-page-list",
|
||||
),
|
||||
path(_('pages/<slug:slug>/'), views.BasePageDetailView.as_view(
|
||||
path(
|
||||
_("pages/<slug:slug>/"),
|
||||
views.BasePageDetailView.as_view(
|
||||
model=models.StaticPage,
|
||||
queryset=models.StaticPage.objects.filter(attach_to__isnull=True),
|
||||
),
|
||||
name='static-page-detail'
|
||||
name="static-page-detail",
|
||||
),
|
||||
path(_("programs/"), views.ProgramListView.as_view(), name="program-list"),
|
||||
path(
|
||||
_("programs/<slug:slug>/"),
|
||||
views.ProgramDetailView.as_view(),
|
||||
name="program-detail",
|
||||
),
|
||||
path(
|
||||
_("programs/<slug:parent_slug>/episodes/"),
|
||||
views.EpisodeListView.as_view(),
|
||||
name="episode-list",
|
||||
),
|
||||
path(
|
||||
_("programs/<slug:parent_slug>/articles/"),
|
||||
views.ArticleListView.as_view(),
|
||||
name="article-list",
|
||||
),
|
||||
path(
|
||||
_("programs/<slug:parent_slug>/publications/"),
|
||||
views.ProgramPageListView.as_view(),
|
||||
name="program-page-list",
|
||||
),
|
||||
|
||||
path(_('programs/'), views.ProgramListView.as_view(),
|
||||
name='program-list'),
|
||||
path(_('programs/<slug:slug>/'),
|
||||
views.ProgramDetailView.as_view(), name='program-detail'),
|
||||
path(_('programs/<slug:parent_slug>/episodes/'),
|
||||
views.EpisodeListView.as_view(), name='episode-list'),
|
||||
path(_('programs/<slug:parent_slug>/articles/'),
|
||||
views.ArticleListView.as_view(), name='article-list'),
|
||||
path(_('programs/<slug:parent_slug>/publications/'),
|
||||
views.ProgramPageListView.as_view(), name='program-page-list'),
|
||||
]
|
||||
|
||||
|
|
|
@ -1,44 +1,51 @@
|
|||
import datetime
|
||||
|
||||
import django.utils.timezone as tz
|
||||
|
||||
|
||||
__all__ = ['Redirect', 'redirect', 'date_range', 'cast_date',
|
||||
'date_or_default', 'to_timedelta', 'seconds_to_time']
|
||||
__all__ = [
|
||||
"Redirect",
|
||||
"redirect",
|
||||
"date_range",
|
||||
"cast_date",
|
||||
"date_or_default",
|
||||
"to_timedelta",
|
||||
"seconds_to_time",
|
||||
]
|
||||
|
||||
|
||||
class Redirect(Exception):
|
||||
""" Redirect exception -- see `redirect()`. """
|
||||
"""Redirect exception -- see `redirect()`."""
|
||||
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
|
||||
def redirect(url):
|
||||
"""Raise a Redirect exception in order to response a redirection to client.
|
||||
|
||||
AircoxMiddleware must be enabled.
|
||||
"""
|
||||
Raise a Redirect exception in order to response a redirection to client.
|
||||
AircoxMiddleware must be enabled. """
|
||||
raise Redirect(url)
|
||||
|
||||
|
||||
def str_to_date(value, sep='/'):
|
||||
"""
|
||||
Return a date from the provided `value` string, formated as "yyyy/mm/dd"
|
||||
def str_to_date(value, sep="/"):
|
||||
"""Return a date from the provided `value` string, formated as "yyyy/mm/dd"
|
||||
(or "dd/mm/yyyy" if `reverse` is True).
|
||||
|
||||
Raises ValueError for incorrect value format.
|
||||
"""
|
||||
value = value.split(sep)[:3]
|
||||
if len(value) < 3:
|
||||
return ValueError('incorrect date format')
|
||||
return ValueError("incorrect date format")
|
||||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||
|
||||
|
||||
def date_range(date, delta=None, **delta_kwargs):
|
||||
"""
|
||||
Return a range of provided date such as `[date-delta, date+delta]`.
|
||||
"""Return a range of provided date such as `[date-delta, date+delta]`.
|
||||
|
||||
:param date: the reference date
|
||||
:param delta: timedelta
|
||||
:param \**delta_kwargs: timedelta init arguments
|
||||
:param **delta_kwargs: timedelta init arguments
|
||||
|
||||
Return a datetime range for a given day, as:
|
||||
```(date, 0:0:0:0; date, 23:59:59:999)```.
|
||||
|
@ -48,21 +55,26 @@ def date_range(date, delta=None, **delta_kwargs):
|
|||
|
||||
|
||||
def cast_date(date, into=datetime.date):
|
||||
"""
|
||||
Cast a given date into the provided class' instance. Make datetime
|
||||
aware of timezone.
|
||||
"""Cast a given date into the provided class' instance.
|
||||
|
||||
Make datetime aware of timezone.
|
||||
"""
|
||||
date = into(date.year, date.month, date.day)
|
||||
return tz.make_aware(date) if issubclass(into, tz.datetime) else date
|
||||
|
||||
|
||||
def date_or_default(date, into=None):
|
||||
"""Return date if not None, otherwise return now.
|
||||
|
||||
Cast result into provided type if any.
|
||||
"""
|
||||
Return date if not None, otherwise return now. Cast result into provided
|
||||
type if any.
|
||||
"""
|
||||
date = date if date is not None else datetime.date.today() \
|
||||
if into is not None and issubclass(into, datetime.date) else tz.now()
|
||||
date = (
|
||||
date
|
||||
if date is not None
|
||||
else datetime.date.today()
|
||||
if into is not None and issubclass(into, datetime.date)
|
||||
else tz.now()
|
||||
)
|
||||
|
||||
if into is not None:
|
||||
date = cast_date(date, into)
|
||||
|
@ -73,30 +85,26 @@ def date_or_default(date, into=None):
|
|||
|
||||
|
||||
def to_timedelta(time):
|
||||
"""
|
||||
Transform a datetime or a time instance to a timedelta,
|
||||
only using time info
|
||||
"""
|
||||
"""Transform a datetime or a time instance to a timedelta, only using time
|
||||
info."""
|
||||
return datetime.timedelta(
|
||||
hours=time.hour,
|
||||
minutes=time.minute,
|
||||
seconds=time.second
|
||||
hours=time.hour, minutes=time.minute, seconds=time.second
|
||||
)
|
||||
|
||||
|
||||
def to_seconds(time):
|
||||
""" Return total seconds for provided time """
|
||||
"""Return total seconds for provided time."""
|
||||
return 3600 * time.hour + 60 * time.minute + time.second
|
||||
|
||||
|
||||
def seconds_to_time(seconds):
|
||||
"""
|
||||
Seconds to datetime.time
|
||||
"""
|
||||
"""Seconds to datetime.time."""
|
||||
seconds, microseconds = divmod(seconds, 1)
|
||||
minutes, seconds = divmod(seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
return datetime.time(hour=int(hours), minute=int(minutes), second=int(seconds),
|
||||
microsecond=int(microseconds*100000))
|
||||
|
||||
|
||||
return datetime.time(
|
||||
hour=int(hours),
|
||||
minute=int(minutes),
|
||||
second=int(seconds),
|
||||
microsecond=int(microseconds * 100000),
|
||||
)
|
||||
|
|
|
@ -1,12 +1,40 @@
|
|||
from . import admin
|
||||
|
||||
from .base import BaseView, BaseAPIView
|
||||
from .home import HomeView
|
||||
|
||||
from .article import ArticleDetailView, ArticleListView
|
||||
from .episode import EpisodeDetailView, EpisodeListView, DiffusionListView
|
||||
from .log import LogListView, LogListAPIView
|
||||
from .page import BasePageListView, BasePageDetailView, PageListView, PageDetailView
|
||||
from .program import ProgramDetailView, ProgramListView, \
|
||||
ProgramPageDetailView, ProgramPageListView
|
||||
from .base import BaseAPIView, BaseView
|
||||
from .episode import DiffusionListView, EpisodeDetailView, EpisodeListView
|
||||
from .home import HomeView
|
||||
from .log import LogListAPIView, LogListView
|
||||
from .page import (
|
||||
BasePageDetailView,
|
||||
BasePageListView,
|
||||
PageDetailView,
|
||||
PageListView,
|
||||
)
|
||||
from .program import (
|
||||
ProgramDetailView,
|
||||
ProgramListView,
|
||||
ProgramPageDetailView,
|
||||
ProgramPageListView,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"admin",
|
||||
"ArticleDetailView",
|
||||
"ArticleListView",
|
||||
"BaseAPIView",
|
||||
"BaseView",
|
||||
"DiffusionListView",
|
||||
"EpisodeDetailView",
|
||||
"EpisodeListView",
|
||||
"HomeView",
|
||||
"LogListAPIView",
|
||||
"LogListView",
|
||||
"BasePageDetailView",
|
||||
"BasePageListView",
|
||||
"PageDetailView",
|
||||
"PageListView",
|
||||
"ProgramDetailView",
|
||||
"ProgramListView",
|
||||
"ProgramPageDetailView",
|
||||
"ProgramPageListView",
|
||||
)
|
||||
|
|
|
@ -3,17 +3,16 @@ from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.generic import ListView
|
||||
|
||||
from .log import LogListView
|
||||
from ..models.log import LogArchiver
|
||||
from .log import LogListView
|
||||
|
||||
|
||||
__all__ = ['AdminMixin', 'StatisticsView']
|
||||
__all__ = ["AdminMixin", "StatisticsView"]
|
||||
|
||||
|
||||
class AdminMixin(LoginRequiredMixin, UserPassesTestMixin):
|
||||
title = ''
|
||||
title = ""
|
||||
init_app = True
|
||||
""" If true, create vue app. """
|
||||
"""If true, create vue app."""
|
||||
|
||||
@property
|
||||
def station(self):
|
||||
|
@ -24,19 +23,23 @@ class AdminMixin(LoginRequiredMixin, UserPassesTestMixin):
|
|||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs.update(admin.site.each_context(self.request))
|
||||
kwargs.setdefault('title', self.title)
|
||||
kwargs.setdefault('station', self.station)
|
||||
kwargs.setdefault('init_app', self.init_app)
|
||||
kwargs.setdefault("title", self.title)
|
||||
kwargs.setdefault("station", self.station)
|
||||
kwargs.setdefault("init_app", self.init_app)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class StatisticsView(AdminMixin, LogListView, ListView):
|
||||
template_name = 'admin/aircox/statistics.html'
|
||||
redirect_date_url = 'admin:tools-stats'
|
||||
title = _('Statistics')
|
||||
template_name = "admin/aircox/statistics.html"
|
||||
redirect_date_url = "admin:tools-stats"
|
||||
title = _("Statistics")
|
||||
date = None
|
||||
|
||||
def get_object_list(self, logs, full=False):
|
||||
if not logs.exists():
|
||||
logs = LogArchiver().load(self.station, self.date) if self.date else []
|
||||
logs = (
|
||||
LogArchiver().load(self.station, self.date)
|
||||
if self.date
|
||||
else []
|
||||
)
|
||||
return super().get_object_list(logs, True)
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
from ..models import Article, Program, StaticPage
|
||||
from .page import PageDetailView, PageListView
|
||||
|
||||
|
||||
__all__ = ['ArticleDetailView', 'ArticleListView']
|
||||
__all__ = ["ArticleDetailView", "ArticleListView"]
|
||||
|
||||
|
||||
class ArticleDetailView(PageDetailView):
|
||||
|
@ -10,8 +9,11 @@ class ArticleDetailView(PageDetailView):
|
|||
model = Article
|
||||
|
||||
def get_sidebar_queryset(self):
|
||||
qs = Article.objects.published().select_related('cover') \
|
||||
.order_by('-pub_date')
|
||||
qs = (
|
||||
Article.objects.published()
|
||||
.select_related("cover")
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
return qs
|
||||
|
||||
|
||||
|
@ -20,5 +22,3 @@ class ArticleListView(PageListView):
|
|||
has_headline = True
|
||||
parent_model = Program
|
||||
attach_to_value = StaticPage.ATTACH_TO_ARTICLES
|
||||
|
||||
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
from django.views.generic.base import TemplateResponseMixin, ContextMixin
|
||||
from django.urls import reverse
|
||||
from django.views.generic.base import ContextMixin, TemplateResponseMixin
|
||||
|
||||
from ..models import Page
|
||||
|
||||
|
||||
__all__ = ('BaseView', 'BaseAPIView')
|
||||
__all__ = ("BaseView", "BaseAPIView")
|
||||
|
||||
|
||||
class BaseView(TemplateResponseMixin, ContextMixin):
|
||||
has_sidebar = True
|
||||
""" Show side navigation """
|
||||
"""Show side navigation."""
|
||||
has_filters = False
|
||||
""" Show filters nav """
|
||||
"""Show filters nav."""
|
||||
list_count = 5
|
||||
""" Item count for small lists displayed on page. """
|
||||
"""Item count for small lists displayed on page."""
|
||||
|
||||
@property
|
||||
def station(self):
|
||||
|
@ -23,37 +22,43 @@ class BaseView(TemplateResponseMixin, ContextMixin):
|
|||
# return super().get_queryset().station(self.station)
|
||||
|
||||
def get_sidebar_queryset(self):
|
||||
""" Return a queryset of items to render on the side nav. """
|
||||
return Page.objects.select_subclasses().published() \
|
||||
.order_by('-pub_date')
|
||||
"""Return a queryset of items to render on the side nav."""
|
||||
return (
|
||||
Page.objects.select_subclasses().published().order_by("-pub_date")
|
||||
)
|
||||
|
||||
def get_sidebar_url(self):
|
||||
return reverse('page-list')
|
||||
return reverse("page-list")
|
||||
|
||||
def get_page(self):
|
||||
return None
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs.setdefault('station', self.station)
|
||||
kwargs.setdefault('page', self.get_page())
|
||||
kwargs.setdefault('has_filters', self.has_filters)
|
||||
kwargs.setdefault("station", self.station)
|
||||
kwargs.setdefault("page", self.get_page())
|
||||
kwargs.setdefault("has_filters", self.has_filters)
|
||||
|
||||
has_sidebar = kwargs.setdefault('has_sidebar', self.has_sidebar)
|
||||
if has_sidebar and 'sidebar_object_list' not in kwargs:
|
||||
has_sidebar = kwargs.setdefault("has_sidebar", self.has_sidebar)
|
||||
if has_sidebar and "sidebar_object_list" not in kwargs:
|
||||
sidebar_object_list = self.get_sidebar_queryset()
|
||||
if sidebar_object_list is not None:
|
||||
kwargs['sidebar_object_list'] = sidebar_object_list[:self.list_count]
|
||||
kwargs['sidebar_list_url'] = self.get_sidebar_url()
|
||||
kwargs["sidebar_object_list"] = sidebar_object_list[
|
||||
: self.list_count
|
||||
]
|
||||
kwargs["sidebar_list_url"] = self.get_sidebar_url()
|
||||
|
||||
if 'audio_streams' not in kwargs:
|
||||
if "audio_streams" not in kwargs:
|
||||
streams = self.station.audio_streams
|
||||
streams = streams and streams.split('\n')
|
||||
kwargs['audio_streams'] = streams
|
||||
streams = streams and streams.split("\n")
|
||||
kwargs["audio_streams"] = streams
|
||||
|
||||
if 'model' not in kwargs:
|
||||
model = getattr(self, 'model', None) or \
|
||||
hasattr(self, 'object') and type(self.object)
|
||||
kwargs['model'] = model
|
||||
if "model" not in kwargs:
|
||||
model = (
|
||||
getattr(self, "model", None)
|
||||
or hasattr(self, "object")
|
||||
and type(self.object)
|
||||
)
|
||||
kwargs["model"] = model
|
||||
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
@ -66,5 +71,3 @@ class BaseAPIView:
|
|||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().station(self.station)
|
||||
|
||||
|
||||
|
|
|
@ -1,43 +1,45 @@
|
|||
from collections import OrderedDict
|
||||
import datetime
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.generic import ListView
|
||||
|
||||
from ..filters import EpisodeFilters
|
||||
from ..models import Diffusion, Episode, Program, StaticPage, Sound
|
||||
from ..models import Diffusion, Episode, Program, StaticPage
|
||||
from .base import BaseView
|
||||
from .program import ProgramPageDetailView
|
||||
from .mixins import AttachedToMixin, GetDateMixin
|
||||
from .page import PageListView
|
||||
from .mixins import AttachedToMixin, GetDateMixin, ParentMixin
|
||||
from .program import ProgramPageDetailView
|
||||
|
||||
|
||||
__all__ = ['EpisodeDetailView', 'EpisodeListView', 'DiffusionListView', 'SoundListView']
|
||||
__all__ = (
|
||||
"EpisodeDetailView",
|
||||
"EpisodeListView",
|
||||
"DiffusionListView",
|
||||
)
|
||||
|
||||
|
||||
class EpisodeDetailView(ProgramPageDetailView):
|
||||
model = Episode
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
if not 'tracks' in kwargs:
|
||||
kwargs['tracks'] = self.object.track_set.order_by('position')
|
||||
if "tracks" not in kwargs:
|
||||
kwargs["tracks"] = self.object.track_set.order_by("position")
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class EpisodeListView(PageListView):
|
||||
model = Episode
|
||||
filterset_class = EpisodeFilters
|
||||
item_template_name = 'aircox/widgets/episode_item.html'
|
||||
item_template_name = "aircox/widgets/episode_item.html"
|
||||
has_headline = True
|
||||
parent_model = Program
|
||||
attach_to_value = StaticPage.ATTACH_TO_EPISODES
|
||||
|
||||
|
||||
class DiffusionListView(GetDateMixin, AttachedToMixin, BaseView, ListView):
|
||||
""" View for timetables """
|
||||
"""View for timetables."""
|
||||
|
||||
model = Diffusion
|
||||
has_filters = True
|
||||
redirect_date_url = 'diffusion-list'
|
||||
redirect_date_url = "diffusion-list"
|
||||
attach_to_value = StaticPage.ATTACH_TO_DIFFUSIONS
|
||||
|
||||
def get_date(self):
|
||||
|
@ -45,10 +47,9 @@ class DiffusionListView(GetDateMixin, AttachedToMixin, BaseView, ListView):
|
|||
return date if date is not None else datetime.date.today()
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().date(self.date).order_by('start')
|
||||
return super().get_queryset().date(self.date).order_by("start")
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
start = self.date - datetime.timedelta(days=self.date.weekday())
|
||||
dates = [start + datetime.timedelta(days=i) for i in range(0, 7)]
|
||||
return super().get_context_data(date=self.date, dates=dates, **kwargs)
|
||||
|
||||
|
|
|
@ -1,19 +1,17 @@
|
|||
from datetime import date
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils import timezone as tz
|
||||
from django.views.generic import ListView
|
||||
|
||||
from ..models import Diffusion, Log, Page, StaticPage
|
||||
from .base import BaseView
|
||||
from .page import PageListView
|
||||
|
||||
|
||||
class HomeView(BaseView, ListView):
|
||||
template_name = 'aircox/home.html'
|
||||
template_name = "aircox/home.html"
|
||||
model = Diffusion
|
||||
attach_to_value = StaticPage.ATTACH_TO_HOME
|
||||
queryset = Diffusion.objects.on_air().select_related('episode')
|
||||
queryset = Diffusion.objects.on_air().select_related("episode")
|
||||
logs_count = 5
|
||||
publications_count = 5
|
||||
has_filters = False
|
||||
|
@ -32,15 +30,16 @@ class HomeView(BaseView, ListView):
|
|||
current_diff = Diffusion.objects.on_air().now(now).first()
|
||||
next_diffs = Diffusion.objects.on_air().after(now)
|
||||
if current_diff:
|
||||
diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:2])
|
||||
diffs = [current_diff] + list(
|
||||
next_diffs.exclude(pk=current_diff.pk)[:2]
|
||||
)
|
||||
else:
|
||||
diffs = next_diffs[:3]
|
||||
return diffs
|
||||
|
||||
def get_last_publications(self):
|
||||
# note: with postgres db, possible to use distinct()
|
||||
qs = Page.objects.select_subclasses().published() \
|
||||
.order_by('-pub_date')
|
||||
qs = Page.objects.select_subclasses().published().order_by("-pub_date")
|
||||
parents = set()
|
||||
items = []
|
||||
for publication in qs:
|
||||
|
@ -54,8 +53,7 @@ class HomeView(BaseView, ListView):
|
|||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['logs'] = self.get_logs(context['object_list'])
|
||||
context['next_diffs'] = self.get_next_diffs()
|
||||
context['last_publications'] = self.get_last_publications()[:5]
|
||||
context["logs"] = self.get_logs(context["object_list"])
|
||||
context["next_diffs"] = self.get_next_diffs()
|
||||
context["last_publications"] = self.get_last_publications()[:5]
|
||||
return context
|
||||
|
||||
|
|
|
@ -1,22 +1,17 @@
|
|||
from collections import deque
|
||||
import datetime
|
||||
|
||||
from django.utils import timezone as tz
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_page
|
||||
from django.views.generic import ListView
|
||||
from django.utils import timezone as tz
|
||||
|
||||
from rest_framework.generics import ListAPIView
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.decorators import action
|
||||
|
||||
from ..models import Diffusion, Log, StaticPage
|
||||
from ..serializers import LogInfo, LogInfoSerializer
|
||||
from .base import BaseView, BaseAPIView
|
||||
from .mixins import GetDateMixin, AttachedToMixin
|
||||
from .base import BaseAPIView, BaseView
|
||||
from .mixins import AttachedToMixin, GetDateMixin
|
||||
|
||||
|
||||
__all__ = ['LogListMixin', 'LogListView']
|
||||
__all__ = ["LogListMixin", "LogListView"]
|
||||
|
||||
|
||||
class LogListMixin(GetDateMixin):
|
||||
|
@ -32,21 +27,39 @@ class LogListMixin(GetDateMixin):
|
|||
def get_queryset(self):
|
||||
# only get logs for tracks: log for diffusion will be retrieved
|
||||
# by the diffusions' queryset.
|
||||
qs = super().get_queryset().on_air().filter(track__isnull=False) \
|
||||
.filter(date__lte=tz.now())
|
||||
return qs.date(self.date) if self.date is not None else \
|
||||
qs.after(self.min_date) if self.min_date is not None else qs
|
||||
qs = (
|
||||
super()
|
||||
.get_queryset()
|
||||
.on_air()
|
||||
.filter(track__isnull=False)
|
||||
.filter(date__lte=tz.now())
|
||||
)
|
||||
return (
|
||||
qs.date(self.date)
|
||||
if self.date is not None
|
||||
else qs.after(self.min_date)
|
||||
if self.min_date is not None
|
||||
else qs
|
||||
)
|
||||
|
||||
def get_diffusions_queryset(self):
|
||||
qs = Diffusion.objects.station(self.station).on_air() \
|
||||
.filter(start__lte=tz.now())
|
||||
return qs.date(self.date) if self.date is not None else \
|
||||
qs.after(self.min_date) if self.min_date is not None else qs
|
||||
qs = (
|
||||
Diffusion.objects.station(self.station)
|
||||
.on_air()
|
||||
.filter(start__lte=tz.now())
|
||||
)
|
||||
return (
|
||||
qs.date(self.date)
|
||||
if self.date is not None
|
||||
else qs.after(self.min_date)
|
||||
if self.min_date is not None
|
||||
else qs
|
||||
)
|
||||
|
||||
def get_object_list(self, logs, full=False):
|
||||
"""
|
||||
Return diffusions merged to the provided logs iterable. If
|
||||
`full`, sort items by date without merging.
|
||||
"""Return diffusions merged to the provided logs iterable.
|
||||
|
||||
If `full`, sort items by date without merging.
|
||||
"""
|
||||
diffs = self.get_diffusions_queryset()
|
||||
if self.request.user.is_staff and full:
|
||||
|
@ -55,11 +68,10 @@ class LogListMixin(GetDateMixin):
|
|||
|
||||
|
||||
class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
||||
"""
|
||||
Return list of logs for the provided date (from `kwargs` or
|
||||
`request.GET`, defaults to today).
|
||||
"""
|
||||
redirect_date_url = 'log-list'
|
||||
"""Return list of logs for the provided date (from `kwargs` or
|
||||
`request.GET`, defaults to today)."""
|
||||
|
||||
redirect_date_url = "log-list"
|
||||
has_filters = True
|
||||
attach_to_value = StaticPage.ATTACH_TO_LOGS
|
||||
|
||||
|
@ -72,24 +84,28 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
|||
# `super()...` must be called before updating kwargs, in order
|
||||
# to get `self.object_list`
|
||||
kwargs = super().get_context_data(**kwargs)
|
||||
kwargs.update({
|
||||
'date': self.date,
|
||||
'dates': (today - datetime.timedelta(days=i) for i in range(0, 7)),
|
||||
'object_list': self.get_object_list(self.object_list),
|
||||
})
|
||||
kwargs.update(
|
||||
{
|
||||
"date": self.date,
|
||||
"dates": (
|
||||
today - datetime.timedelta(days=i) for i in range(0, 7)
|
||||
),
|
||||
"object_list": self.get_object_list(self.object_list),
|
||||
}
|
||||
)
|
||||
return kwargs
|
||||
|
||||
|
||||
# Logs are accessible through API only with this list view
|
||||
class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
||||
"""
|
||||
Return logs list, including diffusions. By default return logs of
|
||||
the last 30 minutes.
|
||||
"""Return logs list, including diffusions. By default return logs of the
|
||||
last 30 minutes.
|
||||
|
||||
Available GET parameters:
|
||||
- "date": return logs for a specified date (
|
||||
- "full": (staff user only) don't merge diffusion and logs
|
||||
"""
|
||||
|
||||
serializer_class = LogInfoSerializer
|
||||
queryset = Log.objects.all()
|
||||
|
||||
|
@ -107,7 +123,7 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
|||
return [LogInfo(obj) for obj in super().get_object_list(logs, full)]
|
||||
|
||||
def get_serializer(self, queryset, *args, **kwargs):
|
||||
full = bool(self.request.GET.get('full'))
|
||||
return super().get_serializer(self.get_object_list(queryset, full),
|
||||
*args, **kwargs)
|
||||
|
||||
full = bool(self.request.GET.get("full"))
|
||||
return super().get_serializer(
|
||||
self.get_object_list(queryset, full), *args, **kwargs
|
||||
)
|
||||
|
|
|
@ -1,49 +1,54 @@
|
|||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse
|
||||
|
||||
from ..utils import str_to_date
|
||||
from ..models import StaticPage
|
||||
from ..utils import str_to_date
|
||||
|
||||
|
||||
__all__ = ['GetDateMixin', 'ParentMixin', 'AttachedToMixin']
|
||||
__all__ = ["GetDateMixin", "ParentMixin", "AttachedToMixin"]
|
||||
|
||||
|
||||
class GetDateMixin:
|
||||
"""
|
||||
Mixin offering utils to get date by `request.GET` or
|
||||
`kwargs['date']`
|
||||
"""
|
||||
"""Mixin offering utils to get date by `request.GET` or `kwargs['date']`"""
|
||||
|
||||
date = None
|
||||
redirect_date_url = None
|
||||
|
||||
def get_date(self):
|
||||
date = self.request.GET.get('date')
|
||||
return str_to_date(date, '-') if date is not None else \
|
||||
self.kwargs['date'] if 'date' in self.kwargs else None
|
||||
date = self.request.GET.get("date")
|
||||
return (
|
||||
str_to_date(date, "-")
|
||||
if date is not None
|
||||
else self.kwargs["date"]
|
||||
if "date" in self.kwargs
|
||||
else None
|
||||
)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
if self.redirect_date_url and self.request.GET.get('date'):
|
||||
return redirect(self.redirect_date_url,
|
||||
date=self.request.GET['date'].replace('-', '/'))
|
||||
if self.redirect_date_url and self.request.GET.get("date"):
|
||||
return redirect(
|
||||
self.redirect_date_url,
|
||||
date=self.request.GET["date"].replace("-", "/"),
|
||||
)
|
||||
|
||||
self.date = self.get_date()
|
||||
return super().get(*args, **kwargs)
|
||||
|
||||
|
||||
class ParentMixin:
|
||||
"""Optional parent page for a list view.
|
||||
|
||||
Parent is fetched and passed to the template context when
|
||||
`parent_model` is provided (queryset is filtered by parent page in
|
||||
such case).
|
||||
"""
|
||||
Optional parent page for a list view. Parent is fetched and passed to the
|
||||
template context when `parent_model` is provided (queryset is filtered by
|
||||
parent page in such case).
|
||||
"""
|
||||
|
||||
parent_model = None
|
||||
""" Parent model """
|
||||
parent_url_kwarg = 'parent_slug'
|
||||
""" Url lookup argument """
|
||||
parent_field = 'slug'
|
||||
""" Parent field for url lookup """
|
||||
"""Parent model."""
|
||||
parent_url_kwarg = "parent_slug"
|
||||
"""Url lookup argument."""
|
||||
parent_field = "slug"
|
||||
"""Parent field for url lookup."""
|
||||
parent = None
|
||||
""" Parent page object """
|
||||
"""Parent page object."""
|
||||
|
||||
def get_parent(self, request, *args, **kwargs):
|
||||
if self.parent_model is None or self.parent_url_kwarg not in kwargs:
|
||||
|
@ -51,7 +56,8 @@ class ParentMixin:
|
|||
|
||||
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
||||
return get_object_or_404(
|
||||
self.parent_model.objects.select_related('cover'), **lookup)
|
||||
self.parent_model.objects.select_related("cover"), **lookup
|
||||
)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
self.parent = self.get_parent(request, *args, **kwargs)
|
||||
|
@ -63,32 +69,37 @@ class ParentMixin:
|
|||
return super().get_queryset()
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
self.parent = kwargs.setdefault('parent', self.parent)
|
||||
self.parent = kwargs.setdefault("parent", self.parent)
|
||||
if self.parent is not None:
|
||||
kwargs.setdefault('cover', self.parent.cover)
|
||||
kwargs.setdefault("cover", self.parent.cover)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class AttachedToMixin:
|
||||
""" Mixin for views that can have a static page attached to it. """
|
||||
"""Mixin for views that can have a static page attached to it."""
|
||||
|
||||
attach_to_value = None
|
||||
""" Value of StaticPage.attach_to """
|
||||
"""Value of StaticPage.attach_to."""
|
||||
|
||||
def get_page(self):
|
||||
if self.attach_to_value is not None:
|
||||
return StaticPage.objects.filter(attach_to=self.attach_to_value) \
|
||||
.published().first()
|
||||
return (
|
||||
StaticPage.objects.filter(attach_to=self.attach_to_value)
|
||||
.published()
|
||||
.first()
|
||||
)
|
||||
return super().get_page()
|
||||
|
||||
|
||||
class FiltersMixin:
|
||||
""" Mixin integrating Django filters' filter set """
|
||||
"""Mixin integrating Django filters' filter set."""
|
||||
|
||||
filterset = None
|
||||
filterset_class = None
|
||||
|
||||
def get_filterset(self, data, query):
|
||||
return self.filterset_class(data, query)
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
query = super().get_queryset()
|
||||
if self.filterset_class:
|
||||
|
@ -97,13 +108,12 @@ class FiltersMixin:
|
|||
return query
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
filterset = kwargs.setdefault('filterset', self.filterset)
|
||||
filterset = kwargs.setdefault("filterset", self.filterset)
|
||||
if filterset.is_valid():
|
||||
kwargs['filterset_data'] = filterset.form.cleaned_data
|
||||
kwargs["filterset_data"] = filterset.form.cleaned_data
|
||||
else:
|
||||
kwargs['filterset_data'] = {}
|
||||
kwargs["filterset_data"] = {}
|
||||
|
||||
params = self.request.GET.copy()
|
||||
kwargs['get_params'] = params.pop('page', True) and params
|
||||
kwargs["get_params"] = params.pop("page", True) and params
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
|
|
@ -1,25 +1,28 @@
|
|||
|
||||
from django.http import Http404, HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.generic import DetailView, ListView
|
||||
|
||||
from honeypot.decorators import check_honeypot
|
||||
|
||||
from ..filters import PageFilters
|
||||
from ..forms import CommentForm
|
||||
from ..models import Category, Comment
|
||||
from ..models import Comment
|
||||
from ..utils import Redirect
|
||||
from .base import BaseView
|
||||
from .mixins import AttachedToMixin, FiltersMixin, ParentMixin
|
||||
|
||||
|
||||
__all__ = ['BasePageListView', 'BasePageDetailView', 'PageDetailView', 'PageListView']
|
||||
__all__ = [
|
||||
"BasePageListView",
|
||||
"BasePageDetailView",
|
||||
"PageDetailView",
|
||||
"PageListView",
|
||||
]
|
||||
|
||||
|
||||
class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
||||
""" Base view class for BasePage list. """
|
||||
template_name = 'aircox/basepage_list.html'
|
||||
item_template_name = 'aircox/widgets/page_item.html'
|
||||
"""Base view class for BasePage list."""
|
||||
|
||||
template_name = "aircox/basepage_list.html"
|
||||
item_template_name = "aircox/widgets/page_item.html"
|
||||
has_sidebar = True
|
||||
|
||||
paginate_by = 30
|
||||
|
@ -29,35 +32,39 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
|||
return super().get(*args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().select_subclasses().published() \
|
||||
.select_related('cover')
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.select_subclasses()
|
||||
.published()
|
||||
.select_related("cover")
|
||||
)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs.setdefault('item_template_name', self.item_template_name)
|
||||
kwargs.setdefault('has_headline', self.has_headline)
|
||||
kwargs.setdefault("item_template_name", self.item_template_name)
|
||||
kwargs.setdefault("has_headline", self.has_headline)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class BasePageDetailView(BaseView, DetailView):
|
||||
""" Base view class for BasePage. """
|
||||
template_name = 'aircox/basepage_detail.html'
|
||||
context_object_name = 'page'
|
||||
"""Base view class for BasePage."""
|
||||
|
||||
template_name = "aircox/basepage_detail.html"
|
||||
context_object_name = "page"
|
||||
has_filters = False
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().select_related('cover')
|
||||
return super().get_queryset().select_related("cover")
|
||||
|
||||
# This should not exists: it allows mapping not published pages
|
||||
# or it should be only used for trashed pages.
|
||||
def not_published_redirect(self, page):
|
||||
"""
|
||||
When a page is not published, redirect to the returned url instead of an
|
||||
HTTP 404 code.
|
||||
"""
|
||||
"""When a page is not published, redirect to the returned url instead
|
||||
of an HTTP 404 code."""
|
||||
return None
|
||||
|
||||
def get_object(self):
|
||||
if getattr(self, 'object', None):
|
||||
if getattr(self, "object", None):
|
||||
return self.object
|
||||
|
||||
obj = super().get_object()
|
||||
|
@ -65,7 +72,7 @@ class BasePageDetailView(BaseView, DetailView):
|
|||
redirect_url = self.not_published_redirect(obj)
|
||||
if redirect_url:
|
||||
raise Redirect(redirect_url)
|
||||
raise Http404('%s not found' % self.model._meta.verbose_name)
|
||||
raise Http404("%s not found" % self.model._meta.verbose_name)
|
||||
return obj
|
||||
|
||||
def get_page(self):
|
||||
|
@ -73,7 +80,8 @@ class BasePageDetailView(BaseView, DetailView):
|
|||
|
||||
|
||||
class PageListView(FiltersMixin, BasePageListView):
|
||||
""" Page list view. """
|
||||
"""Page list view."""
|
||||
|
||||
filterset_class = PageFilters
|
||||
template_name = None
|
||||
has_filters = True
|
||||
|
@ -81,58 +89,65 @@ class PageListView(FiltersMixin, BasePageListView):
|
|||
filters = None
|
||||
|
||||
def get_template_names(self):
|
||||
return super().get_template_names() + ['aircox/page_list.html']
|
||||
return super().get_template_names() + ["aircox/page_list.html"]
|
||||
|
||||
def get_filterset(self, data, query):
|
||||
# FIXME: not the most efficient, cause join then split (in django filters)
|
||||
data['category__id__in'] = ','.join(data.getlist('category__id__in'))
|
||||
# FIXME: not the most efficient, cause join then split (django filters)
|
||||
data["category__id__in"] = ",".join(data.getlist("category__id__in"))
|
||||
return super().get_filterset(data, query)
|
||||
|
||||
def get_queryset(self):
|
||||
qs = super().get_queryset().select_related('category') \
|
||||
.order_by('-pub_date')
|
||||
qs = (
|
||||
super()
|
||||
.get_queryset()
|
||||
.select_related("category")
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
return qs
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs['categories'] = self.model.objects.published() \
|
||||
.filter(category__isnull=False) \
|
||||
.values_list('category__title', 'category__id') \
|
||||
.distinct()
|
||||
kwargs["categories"] = (
|
||||
self.model.objects.published()
|
||||
.filter(category__isnull=False)
|
||||
.values_list("category__title", "category__id")
|
||||
.distinct()
|
||||
)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class PageDetailView(BasePageDetailView):
|
||||
""" Base view class for pages. """
|
||||
"""Base view class for pages."""
|
||||
|
||||
template_name = None
|
||||
context_object_name = 'page'
|
||||
context_object_name = "page"
|
||||
has_filters = False
|
||||
|
||||
def get_template_names(self):
|
||||
return super().get_template_names() + ['aircox/page_detail.html']
|
||||
return super().get_template_names() + ["aircox/page_detail.html"]
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().select_related('category')
|
||||
return super().get_queryset().select_related("category")
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
if self.object.allow_comments and not 'comment_form' in kwargs:
|
||||
kwargs['comment_form'] = CommentForm()
|
||||
kwargs['comments'] = Comment.objects.filter(page=self.object) \
|
||||
.order_by('-date')
|
||||
if self.object.allow_comments and "comment_form" not in kwargs:
|
||||
kwargs["comment_form"] = CommentForm()
|
||||
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by(
|
||||
"-date"
|
||||
)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def as_view(cls, *args, **kwargs):
|
||||
view = super(PageDetailView, cls).as_view(*args, **kwargs)
|
||||
return check_honeypot(view, field_name='website')
|
||||
return check_honeypot(view, field_name="website")
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.object = self.get_object()
|
||||
if not self.object.allow_comments:
|
||||
return HttpResponse(_('comments are not allowed'), status=503)
|
||||
return HttpResponse(_("comments are not allowed"), status=503)
|
||||
|
||||
form = CommentForm(request.POST)
|
||||
comment = form.save(commit=False)
|
||||
comment.page = self.object
|
||||
comment.save()
|
||||
return self.get(request, *args, **kwargs)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user