Compare commits
10 Commits
e70a352dcd
...
7dec6a49ae
Author | SHA1 | Date | |
---|---|---|---|
7dec6a49ae | |||
4689eb1d20 | |||
98bcb4fb00 | |||
fadbbcc762 | |||
1ced37c330 | |||
d59f3402cd | |||
d3b5b90d7a | |||
1730a8f54b | |||
4d54af78be | |||
387998365e |
|
@ -9,11 +9,14 @@ repos:
|
||||||
rev: 23.1.0
|
rev: 23.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
args:
|
||||||
rev: v0.0.292
|
- --line-length=79
|
||||||
|
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
|
||||||
|
- repo: https://github.com/PyCQA/flake8.git
|
||||||
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: flake8
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
exclude: ^instance/settings/|migrations/
|
||||||
- repo: https://github.com/PyCQA/docformatter.git
|
- repo: https://github.com/PyCQA/docformatter.git
|
||||||
rev: v1.5.1
|
rev: v1.5.1
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -18,7 +18,9 @@ class EpisodeAdminForm(ModelForm):
|
||||||
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
||||||
form = EpisodeAdminForm
|
form = EpisodeAdminForm
|
||||||
list_display = PageAdmin.list_display
|
list_display = PageAdmin.list_display
|
||||||
list_filter = tuple(f for f in PageAdmin.list_filter if f != "pub_date") + (
|
list_filter = tuple(
|
||||||
|
f for f in PageAdmin.list_filter if f != "pub_date"
|
||||||
|
) + (
|
||||||
"diffusion__start",
|
"diffusion__start",
|
||||||
"pub_date",
|
"pub_date",
|
||||||
)
|
)
|
||||||
|
|
|
@ -14,9 +14,13 @@ class DateFieldFilter(filters.FieldListFilter):
|
||||||
|
|
||||||
def __init__(self, field, request, params, model, model_admin, field_path):
|
def __init__(self, field, request, params, model, model_admin, field_path):
|
||||||
self.field_generic = f"{field_path}__"
|
self.field_generic = f"{field_path}__"
|
||||||
self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)}
|
self.date_params = {
|
||||||
|
k: v for k, v in params.items() if k.startswith(self.field_generic)
|
||||||
|
}
|
||||||
|
|
||||||
exact_lookup = "date" if isinstance(field, models.DateTimeField) else "exact"
|
exact_lookup = (
|
||||||
|
"date" if isinstance(field, models.DateTimeField) else "exact"
|
||||||
|
)
|
||||||
|
|
||||||
# links as: (label, param, input_type|None, value)
|
# links as: (label, param, input_type|None, value)
|
||||||
self.links = [
|
self.links = [
|
||||||
|
@ -25,11 +29,17 @@ class DateFieldFilter(filters.FieldListFilter):
|
||||||
(_("Until"), self.field_generic + "lte", self.input_type),
|
(_("Until"), self.field_generic + "lte", self.input_type),
|
||||||
]
|
]
|
||||||
if field.null:
|
if field.null:
|
||||||
self.links.insert(0, (_("None"), self.field_generic + "isnull", None, "1"))
|
self.links.insert(
|
||||||
|
0, (_("None"), self.field_generic + "isnull", None, "1")
|
||||||
|
)
|
||||||
|
|
||||||
self.query_attrs = {k: v for k, v in request.GET.items() if k not in self.date_params}
|
self.query_attrs = {
|
||||||
|
k: v for k, v in request.GET.items() if k not in self.date_params
|
||||||
|
}
|
||||||
self.query_string = urlencode(self.query_attrs)
|
self.query_string = urlencode(self.query_attrs)
|
||||||
super().__init__(field, request, params, model, model_admin, field_path)
|
super().__init__(
|
||||||
|
field, request, params, model, model_admin, field_path
|
||||||
|
)
|
||||||
|
|
||||||
def expected_parameters(self):
|
def expected_parameters(self):
|
||||||
return [link[1] for link in self.links]
|
return [link[1] for link in self.links]
|
||||||
|
@ -49,7 +59,11 @@ class DateFieldFilter(filters.FieldListFilter):
|
||||||
"value": value,
|
"value": value,
|
||||||
"type": link[2],
|
"type": link[2],
|
||||||
"query_attrs": self.query_attrs,
|
"query_attrs": self.query_attrs,
|
||||||
"query_string": urlencode({link[1]: value}) + "&" + self.query_string if value else self.query_string,
|
"query_string": urlencode({link[1]: value})
|
||||||
|
+ "&"
|
||||||
|
+ self.query_string
|
||||||
|
if value
|
||||||
|
else self.query_string,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,11 @@ class BasePageAdmin(admin.ModelAdmin):
|
||||||
change_form_template = "admin/aircox/page_change_form.html"
|
change_form_template = "admin/aircox/page_change_form.html"
|
||||||
|
|
||||||
def cover_thumb(self, obj):
|
def cover_thumb(self, obj):
|
||||||
return mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"])) if obj.cover else ""
|
return (
|
||||||
|
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
|
||||||
|
if obj.cover
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
def get_changeform_initial_data(self, request):
|
def get_changeform_initial_data(self, request):
|
||||||
data = super().get_changeform_initial_data(request)
|
data = super().get_changeform_initial_data(request)
|
||||||
|
@ -61,7 +65,9 @@ class BasePageAdmin(admin.ModelAdmin):
|
||||||
def _get_common_context(self, query, extra_context=None):
|
def _get_common_context(self, query, extra_context=None):
|
||||||
extra_context = extra_context or {}
|
extra_context = extra_context or {}
|
||||||
parent = query.get("parent", None)
|
parent = query.get("parent", None)
|
||||||
extra_context["parent"] = None if parent is None else Page.objects.get_subclass(id=parent)
|
extra_context["parent"] = (
|
||||||
|
None if parent is None else Page.objects.get_subclass(id=parent)
|
||||||
|
)
|
||||||
return extra_context
|
return extra_context
|
||||||
|
|
||||||
def render_change_form(self, request, context, *args, **kwargs):
|
def render_change_form(self, request, context, *args, **kwargs):
|
||||||
|
@ -88,7 +94,9 @@ class PageAdmin(BasePageAdmin):
|
||||||
search_fields = BasePageAdmin.search_fields + ("category__title",)
|
search_fields = BasePageAdmin.search_fields + ("category__title",)
|
||||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||||
|
|
||||||
fieldsets[0][1]["fields"].insert(fieldsets[0][1]["fields"].index("slug") + 1, "category")
|
fieldsets[0][1]["fields"].insert(
|
||||||
|
fieldsets[0][1]["fields"].index("slug") + 1, "category"
|
||||||
|
)
|
||||||
fieldsets[1][1]["fields"] += ("featured", "allow_comments")
|
fieldsets[1][1]["fields"] += ("featured", "allow_comments")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,9 @@ class SoundInline(admin.TabularInline):
|
||||||
max_num = 0
|
max_num = 0
|
||||||
|
|
||||||
def audio(self, obj):
|
def audio(self, obj):
|
||||||
return mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
|
return mark_safe(
|
||||||
|
'<audio src="{}" controls></audio>'.format(obj.file.url)
|
||||||
|
)
|
||||||
|
|
||||||
audio.short_description = _("Audio")
|
audio.short_description = _("Audio")
|
||||||
|
|
||||||
|
@ -84,7 +86,13 @@ class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
|
||||||
|
|
||||||
def related(self, obj):
|
def related(self, obj):
|
||||||
# TODO: link to episode or program edit
|
# TODO: link to episode or program edit
|
||||||
return obj.episode.title if obj.episode else obj.program.title if obj.program else ""
|
return (
|
||||||
|
obj.episode.title
|
||||||
|
if obj.episode
|
||||||
|
else obj.program.title
|
||||||
|
if obj.program
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
|
||||||
related.short_description = _("Program / Episode")
|
related.short_description = _("Program / Episode")
|
||||||
|
|
||||||
|
|
|
@ -26,13 +26,21 @@ class AdminSite(admin.AdminSite):
|
||||||
context.update(
|
context.update(
|
||||||
{
|
{
|
||||||
# all programs
|
# all programs
|
||||||
"programs": models.Program.objects.active().values("pk", "title").order_by("title"),
|
"programs": models.Program.objects.active()
|
||||||
|
.values("pk", "title")
|
||||||
|
.order_by("title"),
|
||||||
# today's diffusions
|
# today's diffusions
|
||||||
"diffusions": models.Diffusion.objects.date().order_by("start").select_related("episode"),
|
"diffusions": models.Diffusion.objects.date()
|
||||||
|
.order_by("start")
|
||||||
|
.select_related("episode"),
|
||||||
# TODO: only for dashboard
|
# TODO: only for dashboard
|
||||||
# last comments
|
# last comments
|
||||||
"comments": models.Comment.objects.order_by("-date").select_related("page")[0:10],
|
"comments": models.Comment.objects.order_by(
|
||||||
"latests": models.Page.objects.select_subclasses().order_by("-pub_date")[0:10],
|
"-date"
|
||||||
|
).select_related("page")[0:10],
|
||||||
|
"latests": models.Page.objects.select_subclasses().order_by(
|
||||||
|
"-pub_date"
|
||||||
|
)[0:10],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return context
|
return context
|
||||||
|
@ -61,7 +69,9 @@ class AdminSite(admin.AdminSite):
|
||||||
return [(label, reverse(url)) for label, url in self.tools]
|
return [(label, reverse(url)) for label, url in self.tools]
|
||||||
|
|
||||||
def route_view(self, url, view, name, admin_view=True, label=None):
|
def route_view(self, url, view, name, admin_view=True, label=None):
|
||||||
self.extra_urls.append(path(url, self.admin_view(view) if admin_view else view, name=name))
|
self.extra_urls.append(
|
||||||
|
path(url, self.admin_view(view) if admin_view else view, name=name)
|
||||||
|
)
|
||||||
|
|
||||||
if label:
|
if label:
|
||||||
self.tools.append((label, "admin:" + name))
|
self.tools.append((label, "admin:" + name))
|
||||||
|
|
|
@ -22,7 +22,9 @@ class DiffusionMonitor:
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
episodes, diffusions = [], []
|
episodes, diffusions = [], []
|
||||||
for schedule in Schedule.objects.filter(program__active=True, initial__isnull=True):
|
for schedule in Schedule.objects.filter(
|
||||||
|
program__active=True, initial__isnull=True
|
||||||
|
):
|
||||||
eps, diffs = schedule.diffusions_of_month(self.date)
|
eps, diffs = schedule.diffusions_of_month(self.date)
|
||||||
if eps:
|
if eps:
|
||||||
episodes += eps
|
episodes += eps
|
||||||
|
|
|
@ -44,7 +44,9 @@ class LogArchiver:
|
||||||
path = self.get_path(station, date)
|
path = self.get_path(station, date)
|
||||||
# FIXME: remove binary mode
|
# FIXME: remove binary mode
|
||||||
with gzip.open(path, "ab") as archive:
|
with gzip.open(path, "ab") as archive:
|
||||||
data = yaml.dump([self.serialize(line) for line in logs]).encode("utf8")
|
data = yaml.dump(
|
||||||
|
[self.serialize(line) for line in logs]
|
||||||
|
).encode("utf8")
|
||||||
archive.write(data)
|
archive.write(data)
|
||||||
|
|
||||||
if not keep:
|
if not keep:
|
||||||
|
@ -93,7 +95,10 @@ class LogArchiver:
|
||||||
|
|
||||||
return [
|
return [
|
||||||
Log(
|
Log(
|
||||||
diffusion=rel_obj(log, "diffusion"), sound=rel_obj(log, "sound"), track=rel_obj(log, "track"), **log
|
diffusion=rel_obj(log, "diffusion"),
|
||||||
|
sound=rel_obj(log, "sound"),
|
||||||
|
track=rel_obj(log, "track"),
|
||||||
|
**log
|
||||||
)
|
)
|
||||||
for log in logs
|
for log in logs
|
||||||
]
|
]
|
||||||
|
|
|
@ -50,7 +50,14 @@ class PlaylistImport:
|
||||||
logger.info("start reading csv " + self.path)
|
logger.info("start reading csv " + self.path)
|
||||||
self.data = list(
|
self.data = list(
|
||||||
csv.DictReader(
|
csv.DictReader(
|
||||||
(row for row in file if not (row.startswith("#") or row.startswith("\ufeff#")) and row.strip()),
|
(
|
||||||
|
row
|
||||||
|
for row in file
|
||||||
|
if not (
|
||||||
|
row.startswith("#") or row.startswith("\ufeff#")
|
||||||
|
)
|
||||||
|
and row.strip()
|
||||||
|
),
|
||||||
fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS,
|
fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS,
|
||||||
delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER,
|
delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER,
|
||||||
quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
||||||
|
@ -63,7 +70,11 @@ class PlaylistImport:
|
||||||
If save is true, save it into the database
|
If save is true, save it into the database
|
||||||
"""
|
"""
|
||||||
if self.track_kwargs.get("sound") is None:
|
if self.track_kwargs.get("sound") is None:
|
||||||
logger.error("related track's sound is missing. Skip import of " + self.path + ".")
|
logger.error(
|
||||||
|
"related track's sound is missing. Skip import of "
|
||||||
|
+ self.path
|
||||||
|
+ "."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
maps = settings.IMPORT_PLAYLIST_CSV_COLS
|
maps = settings.IMPORT_PLAYLIST_CSV_COLS
|
||||||
|
@ -76,11 +87,17 @@ class PlaylistImport:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
timestamp = (
|
timestamp = (
|
||||||
int(line.get("minutes") or 0) * 60 + int(line.get("seconds") or 0) if has_timestamp else None
|
int(line.get("minutes") or 0) * 60
|
||||||
|
+ int(line.get("seconds") or 0)
|
||||||
|
if has_timestamp
|
||||||
|
else None
|
||||||
)
|
)
|
||||||
|
|
||||||
track, created = Track.objects.get_or_create(
|
track, created = Track.objects.get_or_create(
|
||||||
title=line.get("title"), artist=line.get("artist"), position=index, **self.track_kwargs
|
title=line.get("title"),
|
||||||
|
artist=line.get("artist"),
|
||||||
|
position=index,
|
||||||
|
**self.track_kwargs
|
||||||
)
|
)
|
||||||
track.timestamp = timestamp
|
track.timestamp = timestamp
|
||||||
track.info = line.get("info")
|
track.info = line.get("info")
|
||||||
|
|
|
@ -58,7 +58,14 @@ class SoundFile:
|
||||||
def episode(self):
|
def episode(self):
|
||||||
return self.sound and self.sound.episode
|
return self.sound and self.sound.episode
|
||||||
|
|
||||||
def sync(self, sound=None, program=None, deleted=False, keep_deleted=False, **kwargs):
|
def sync(
|
||||||
|
self,
|
||||||
|
sound=None,
|
||||||
|
program=None,
|
||||||
|
deleted=False,
|
||||||
|
keep_deleted=False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
"""Update related sound model and save it."""
|
"""Update related sound model and save it."""
|
||||||
if deleted:
|
if deleted:
|
||||||
return self._on_delete(self.path, keep_deleted)
|
return self._on_delete(self.path, keep_deleted)
|
||||||
|
@ -72,7 +79,9 @@ class SoundFile:
|
||||||
if sound:
|
if sound:
|
||||||
created = False
|
created = False
|
||||||
else:
|
else:
|
||||||
sound, created = Sound.objects.get_or_create(file=self.sound_path, defaults=kwargs)
|
sound, created = Sound.objects.get_or_create(
|
||||||
|
file=self.sound_path, defaults=kwargs
|
||||||
|
)
|
||||||
|
|
||||||
self.sound = sound
|
self.sound = sound
|
||||||
self.path_info = self.read_path(self.path)
|
self.path_info = self.read_path(self.path)
|
||||||
|
@ -163,7 +172,9 @@ class SoundFile:
|
||||||
|
|
||||||
year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
|
year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
|
||||||
if pi.get("hour") is not None:
|
if pi.get("hour") is not None:
|
||||||
at = tz.datetime(year, month, day, pi.get("hour", 0), pi.get("minute", 0))
|
at = tz.datetime(
|
||||||
|
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
|
||||||
|
)
|
||||||
at = tz.make_aware(at)
|
at = tz.make_aware(at)
|
||||||
else:
|
else:
|
||||||
at = date(year, month, day)
|
at = date(year, month, day)
|
||||||
|
@ -199,10 +210,22 @@ class SoundFile:
|
||||||
if self.info and self.info.tags:
|
if self.info and self.info.tags:
|
||||||
tags = self.info.tags
|
tags = self.info.tags
|
||||||
title, artist, album, year = tuple(
|
title, artist, album, year = tuple(
|
||||||
t and ", ".join(t) for t in (tags.get(k) for k in ("title", "artist", "album", "year"))
|
t and ", ".join(t)
|
||||||
|
for t in (
|
||||||
|
tags.get(k)
|
||||||
|
for k in ("title", "artist", "album", "year")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
title = (
|
||||||
|
title
|
||||||
|
or (self.path_info and self.path_info.get("name"))
|
||||||
|
or os.path.basename(path_noext)
|
||||||
|
)
|
||||||
|
info = (
|
||||||
|
"{} ({})".format(album, year)
|
||||||
|
if album and year
|
||||||
|
else album or year or ""
|
||||||
)
|
)
|
||||||
title = title or (self.path_info and self.path_info.get("name")) or os.path.basename(path_noext)
|
|
||||||
info = "{} ({})".format(album, year) if album and year else album or year or ""
|
|
||||||
track = Track(
|
track = Track(
|
||||||
sound=sound,
|
sound=sound,
|
||||||
position=int(tags.get("tracknumber", 0)),
|
position=int(tags.get("tracknumber", 0)),
|
||||||
|
|
|
@ -155,7 +155,10 @@ class MonitorHandler(PatternMatchingEventHandler):
|
||||||
self.jobs = jobs or {}
|
self.jobs = jobs or {}
|
||||||
self.sync_kw = sync_kw
|
self.sync_kw = sync_kw
|
||||||
|
|
||||||
patterns = ["*/{}/*{}".format(self.subdir, ext) for ext in settings.SOUND_FILE_EXT]
|
patterns = [
|
||||||
|
"*/{}/*{}".format(self.subdir, ext)
|
||||||
|
for ext in settings.SOUND_FILE_EXT
|
||||||
|
]
|
||||||
super().__init__(patterns=patterns, ignore_directories=True)
|
super().__init__(patterns=patterns, ignore_directories=True)
|
||||||
|
|
||||||
def on_created(self, event):
|
def on_created(self, event):
|
||||||
|
@ -199,7 +202,11 @@ class SoundMonitor:
|
||||||
|
|
||||||
def report(self, program=None, component=None, *content, logger=logging):
|
def report(self, program=None, component=None, *content, logger=logging):
|
||||||
content = " ".join([str(c) for c in content])
|
content = " ".join([str(c) for c in content])
|
||||||
logger.info(f"{program}: {content}" if not component else f"{program}, {component}: {content}")
|
logger.info(
|
||||||
|
f"{program}: {content}"
|
||||||
|
if not component
|
||||||
|
else f"{program}, {component}: {content}"
|
||||||
|
)
|
||||||
|
|
||||||
def scan(self, logger=logging):
|
def scan(self, logger=logging):
|
||||||
"""For all programs, scan dirs.
|
"""For all programs, scan dirs.
|
||||||
|
@ -227,7 +234,9 @@ class SoundMonitor:
|
||||||
dirs.append(program.abspath)
|
dirs.append(program.abspath)
|
||||||
return dirs
|
return dirs
|
||||||
|
|
||||||
def scan_for_program(self, program, subdir, logger=logging, **sound_kwargs):
|
def scan_for_program(
|
||||||
|
self, program, subdir, logger=logging, **sound_kwargs
|
||||||
|
):
|
||||||
"""Scan a given directory that is associated to the given program, and
|
"""Scan a given directory that is associated to the given program, and
|
||||||
update sounds information."""
|
update sounds information."""
|
||||||
logger.info("- %s/", subdir)
|
logger.info("- %s/", subdir)
|
||||||
|
@ -248,7 +257,9 @@ class SoundMonitor:
|
||||||
sounds.append(sound_file.sound.pk)
|
sounds.append(sound_file.sound.pk)
|
||||||
|
|
||||||
# sounds in db & unchecked
|
# sounds in db & unchecked
|
||||||
sounds = Sound.objects.filter(file__startswith=subdir).exclude(pk__in=sounds)
|
sounds = Sound.objects.filter(file__startswith=subdir).exclude(
|
||||||
|
pk__in=sounds
|
||||||
|
)
|
||||||
self.check_sounds(sounds, program=program)
|
self.check_sounds(sounds, program=program)
|
||||||
|
|
||||||
def check_sounds(self, qs, **sync_kwargs):
|
def check_sounds(self, qs, **sync_kwargs):
|
||||||
|
|
|
@ -38,7 +38,9 @@ class SoxStats:
|
||||||
args += ["trim", str(at), str(length)]
|
args += ["trim", str(at), str(length)]
|
||||||
args.append("stats")
|
args.append("stats")
|
||||||
|
|
||||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.Popen(
|
||||||
|
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
# sox outputs to stderr (my god WHYYYY)
|
# sox outputs to stderr (my god WHYYYY)
|
||||||
out_, out = p.communicate()
|
out_, out = p.communicate()
|
||||||
self.values = self.parse(str(out, encoding="utf-8"))
|
self.values = self.parse(str(out, encoding="utf-8"))
|
||||||
|
@ -92,8 +94,16 @@ class SoundStats:
|
||||||
position += self.sample_length
|
position += self.sample_length
|
||||||
|
|
||||||
def check(self, name, min_val, max_val):
|
def check(self, name, min_val, max_val):
|
||||||
self.good = [index for index, stats in enumerate(self.stats) if min_val <= stats.get(name) <= max_val]
|
self.good = [
|
||||||
self.bad = [index for index, stats in enumerate(self.stats) if index not in self.good]
|
index
|
||||||
|
for index, stats in enumerate(self.stats)
|
||||||
|
if min_val <= stats.get(name) <= max_val
|
||||||
|
]
|
||||||
|
self.bad = [
|
||||||
|
index
|
||||||
|
for index, stats in enumerate(self.stats)
|
||||||
|
if index not in self.good
|
||||||
|
]
|
||||||
self.resume()
|
self.resume()
|
||||||
|
|
||||||
def resume(self):
|
def resume(self):
|
||||||
|
@ -110,6 +120,10 @@ class SoundStats:
|
||||||
|
|
||||||
def _view(self, array):
|
def _view(self, array):
|
||||||
return [
|
return [
|
||||||
"file" if index == 0 else "sample {} (at {} seconds)".format(index, (index - 1) * self.sample_length)
|
"file"
|
||||||
|
if index == 0
|
||||||
|
else "sample {} (at {} seconds)".format(
|
||||||
|
index, (index - 1) * self.sample_length
|
||||||
|
)
|
||||||
for index in array
|
for index in array
|
||||||
]
|
]
|
||||||
|
|
|
@ -35,7 +35,11 @@ class WeekConverter:
|
||||||
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
|
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
return value if isinstance(value, str) else "{:04d}/{:02d}".format(*value.isocalendar())
|
return (
|
||||||
|
value
|
||||||
|
if isinstance(value, str)
|
||||||
|
else "{:04d}/{:02d}".format(*value.isocalendar())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DateConverter:
|
class DateConverter:
|
||||||
|
@ -48,4 +52,10 @@ class DateConverter:
|
||||||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
return value if isinstance(value, str) else "{:04d}/{:02d}/{:02d}".format(value.year, value.month, value.day)
|
return (
|
||||||
|
value
|
||||||
|
if isinstance(value, str)
|
||||||
|
else "{:04d}/{:02d}/{:02d}".format(
|
||||||
|
value.year, value.month, value.day
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -19,7 +19,9 @@ class PageFilters(filters.FilterSet):
|
||||||
|
|
||||||
|
|
||||||
class EpisodeFilters(PageFilters):
|
class EpisodeFilters(PageFilters):
|
||||||
podcast = filters.BooleanFilter(method="podcast_filter", label=_("Podcast"))
|
podcast = filters.BooleanFilter(
|
||||||
|
method="podcast_filter", label=_("Podcast")
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Episode
|
model = Episode
|
||||||
|
|
|
@ -30,7 +30,8 @@ class Command(BaseCommand):
|
||||||
"--age",
|
"--age",
|
||||||
type=int,
|
type=int,
|
||||||
default=settings.LOGS_ARCHIVES_AGE,
|
default=settings.LOGS_ARCHIVES_AGE,
|
||||||
help="minimal age in days of logs to archive. Default is " "settings.LOGS_ARCHIVES_AGE",
|
help="minimal age in days of logs to archive. Default is "
|
||||||
|
"settings.LOGS_ARCHIVES_AGE",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-k",
|
"-k",
|
||||||
|
|
|
@ -55,11 +55,14 @@ class Command(BaseCommand):
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--next-month",
|
"--next-month",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="set the date to the next month of given date" " (if next month from today",
|
help="set the date to the next month of given date"
|
||||||
|
" (if next month from today",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
date = datetime.date(year=options["year"], month=options["month"], day=1)
|
date = datetime.date(
|
||||||
|
year=options["year"], month=options["month"], day=1
|
||||||
|
)
|
||||||
if options.get("next_month"):
|
if options.get("next_month"):
|
||||||
month = options.get("month")
|
month = options.get("month")
|
||||||
date += tz.timedelta(days=28)
|
date += tz.timedelta(days=28)
|
||||||
|
|
|
@ -51,13 +51,18 @@ class Command(BaseCommand):
|
||||||
def handle(self, path, *args, **options):
|
def handle(self, path, *args, **options):
|
||||||
# FIXME: absolute/relative path of sounds vs given path
|
# FIXME: absolute/relative path of sounds vs given path
|
||||||
if options.get("sound"):
|
if options.get("sound"):
|
||||||
sound = Sound.objects.filter(file__icontains=options.get("sound")).first()
|
sound = Sound.objects.filter(
|
||||||
|
file__icontains=options.get("sound")
|
||||||
|
).first()
|
||||||
else:
|
else:
|
||||||
path_, ext = os.path.splitext(path)
|
path_, ext = os.path.splitext(path)
|
||||||
sound = Sound.objects.filter(path__icontains=path_).first()
|
sound = Sound.objects.filter(path__icontains=path_).first()
|
||||||
|
|
||||||
if not sound:
|
if not sound:
|
||||||
logger.error("no sound found in the database for the path " "{path}".format(path=path))
|
logger.error(
|
||||||
|
"no sound found in the database for the path "
|
||||||
|
"{path}".format(path=path)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# FIXME: auto get sound.episode if any
|
# FIXME: auto get sound.episode if any
|
||||||
|
|
|
@ -43,7 +43,8 @@ class Command(BaseCommand):
|
||||||
"-q",
|
"-q",
|
||||||
"--quality_check",
|
"--quality_check",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Enable quality check using sound_quality_check on all " "sounds marqued as not good",
|
help="Enable quality check using sound_quality_check on all "
|
||||||
|
"sounds marqued as not good",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
|
@ -56,7 +57,8 @@ class Command(BaseCommand):
|
||||||
"-m",
|
"-m",
|
||||||
"--monitor",
|
"--monitor",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Run in monitor mode, watch for modification in the " "filesystem and react in consequence",
|
help="Run in monitor mode, watch for modification in the "
|
||||||
|
"filesystem and react in consequence",
|
||||||
)
|
)
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
|
|
|
@ -28,7 +28,8 @@ class Command(BaseCommand):
|
||||||
"--sample_length",
|
"--sample_length",
|
||||||
type=int,
|
type=int,
|
||||||
default=120,
|
default=120,
|
||||||
help="size of sample to analyse in seconds. If not set (or 0), " "does not analyse by sample",
|
help="size of sample to analyse in seconds. If not set (or 0), "
|
||||||
|
"does not analyse by sample",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-a",
|
"-a",
|
||||||
|
@ -42,7 +43,8 @@ class Command(BaseCommand):
|
||||||
"--range",
|
"--range",
|
||||||
type=float,
|
type=float,
|
||||||
nargs=2,
|
nargs=2,
|
||||||
help="range of minimal and maximal accepted value such as: " "--range min max",
|
help="range of minimal and maximal accepted value such as: "
|
||||||
|
"--range min max",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-i",
|
"-i",
|
||||||
|
@ -62,7 +64,10 @@ class Command(BaseCommand):
|
||||||
raise CommandError("no attribute specified")
|
raise CommandError("no attribute specified")
|
||||||
|
|
||||||
# sound analyse and checks
|
# sound analyse and checks
|
||||||
self.sounds = [SoundStats(path, options.get("sample_length")) for path in options.get("files")]
|
self.sounds = [
|
||||||
|
SoundStats(path, options.get("sample_length"))
|
||||||
|
for path in options.get("files")
|
||||||
|
]
|
||||||
self.bad = []
|
self.bad = []
|
||||||
self.good = []
|
self.good = []
|
||||||
for sound in self.sounds:
|
for sound in self.sounds:
|
||||||
|
|
|
@ -84,7 +84,9 @@ class Migration(migrations.Migration):
|
||||||
options={
|
options={
|
||||||
"verbose_name": "Diffusion",
|
"verbose_name": "Diffusion",
|
||||||
"verbose_name_plural": "Diffusions",
|
"verbose_name_plural": "Diffusions",
|
||||||
"permissions": (("programming", "edit the diffusion's planification"),),
|
"permissions": (
|
||||||
|
("programming", "edit the diffusion's planification"),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
@ -123,16 +125,22 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"content",
|
"content",
|
||||||
ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
|
ckeditor.fields.RichTextField(
|
||||||
|
blank=True, null=True, verbose_name="content"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
("pub_date", models.DateTimeField(blank=True, null=True)),
|
("pub_date", models.DateTimeField(blank=True, null=True)),
|
||||||
(
|
(
|
||||||
"featured",
|
"featured",
|
||||||
models.BooleanField(default=False, verbose_name="featured"),
|
models.BooleanField(
|
||||||
|
default=False, verbose_name="featured"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"allow_comments",
|
"allow_comments",
|
||||||
models.BooleanField(default=True, verbose_name="allow comments"),
|
models.BooleanField(
|
||||||
|
default=True, verbose_name="allow comments"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"category",
|
"category",
|
||||||
|
@ -450,7 +458,9 @@ class Migration(migrations.Migration):
|
||||||
("name", models.CharField(max_length=64, verbose_name="name")),
|
("name", models.CharField(max_length=64, verbose_name="name")),
|
||||||
(
|
(
|
||||||
"slug",
|
"slug",
|
||||||
models.SlugField(max_length=64, unique=True, verbose_name="slug"),
|
models.SlugField(
|
||||||
|
max_length=64, unique=True, verbose_name="slug"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"path",
|
"path",
|
||||||
|
@ -556,7 +566,9 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"content",
|
"content",
|
||||||
ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
|
ckeditor.fields.RichTextField(
|
||||||
|
blank=True, null=True, verbose_name="content"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"view",
|
"view",
|
||||||
|
@ -937,7 +949,9 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"time",
|
"time",
|
||||||
models.TimeField(help_text="start time", verbose_name="time"),
|
models.TimeField(
|
||||||
|
help_text="start time", verbose_name="time"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"timezone",
|
"timezone",
|
||||||
|
@ -1629,7 +1643,9 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"duration",
|
"duration",
|
||||||
models.TimeField(help_text="regular duration", verbose_name="duration"),
|
models.TimeField(
|
||||||
|
help_text="regular duration", verbose_name="duration"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"frequency",
|
"frequency",
|
||||||
|
|
|
@ -33,7 +33,9 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="page",
|
model_name="page",
|
||||||
name="content",
|
name="content",
|
||||||
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
|
field=ckeditor_uploader.fields.RichTextUploadingField(
|
||||||
|
blank=True, null=True, verbose_name="content"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="sound",
|
model_name="sound",
|
||||||
|
@ -50,6 +52,8 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="staticpage",
|
model_name="staticpage",
|
||||||
name="content",
|
name="content",
|
||||||
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
|
field=ckeditor_uploader.fields.RichTextUploadingField(
|
||||||
|
blank=True, null=True, verbose_name="content"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -12,7 +12,9 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name="diffusion",
|
name="diffusion",
|
||||||
options={
|
options={
|
||||||
"permissions": (("programming", "edit the diffusions' planification"),),
|
"permissions": (
|
||||||
|
("programming", "edit the diffusions' planification"),
|
||||||
|
),
|
||||||
"verbose_name": "Diffusion",
|
"verbose_name": "Diffusion",
|
||||||
"verbose_name_plural": "Diffusions",
|
"verbose_name_plural": "Diffusions",
|
||||||
},
|
},
|
||||||
|
@ -20,7 +22,9 @@ class Migration(migrations.Migration):
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="track",
|
model_name="track",
|
||||||
name="album",
|
name="album",
|
||||||
field=models.CharField(default="", max_length=128, verbose_name="album"),
|
field=models.CharField(
|
||||||
|
default="", max_length=128, verbose_name="album"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="schedule",
|
model_name="schedule",
|
||||||
|
|
|
@ -12,6 +12,8 @@ class Migration(migrations.Migration):
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="track",
|
model_name="track",
|
||||||
name="year",
|
name="year",
|
||||||
field=models.IntegerField(blank=True, null=True, verbose_name="year"),
|
field=models.IntegerField(
|
||||||
|
blank=True, null=True, verbose_name="year"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -12,6 +12,8 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="track",
|
model_name="track",
|
||||||
name="album",
|
name="album",
|
||||||
field=models.CharField(blank=True, max_length=128, null=True, verbose_name="album"),
|
field=models.CharField(
|
||||||
|
blank=True, max_length=128, null=True, verbose_name="album"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -30,7 +30,9 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"playlist_editor_sep",
|
"playlist_editor_sep",
|
||||||
models.CharField(max_length=16, verbose_name="Playlist Editor Separator"),
|
models.CharField(
|
||||||
|
max_length=16, verbose_name="Playlist Editor Separator"
|
||||||
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"user",
|
"user",
|
||||||
|
|
|
@ -1,675 +0,0 @@
|
||||||
# Generated by Django 4.2.1 on 2023-09-28 11:07
|
|
||||||
|
|
||||||
import aircox.models.schedule
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("aircox", "0012_alter_sound_file_alter_station_default"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="schedule",
|
|
||||||
name="timezone",
|
|
||||||
field=models.CharField(
|
|
||||||
choices=[
|
|
||||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
|
||||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
|
||||||
("Asia/Baku", "Asia/Baku"),
|
|
||||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
|
||||||
("Etc/GMT-10", "Etc/GMT-10"),
|
|
||||||
("UTC", "UTC"),
|
|
||||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
|
||||||
("ROC", "ROC"),
|
|
||||||
("Asia/Seoul", "Asia/Seoul"),
|
|
||||||
("Europe/Moscow", "Europe/Moscow"),
|
|
||||||
("Australia/Melbourne", "Australia/Melbourne"),
|
|
||||||
("Asia/Manila", "Asia/Manila"),
|
|
||||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
|
||||||
("Australia/Adelaide", "Australia/Adelaide"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
"America/Argentina/Rio_Gallegos",
|
|
||||||
),
|
|
||||||
("Brazil/Acre", "Brazil/Acre"),
|
|
||||||
("America/Porto_Acre", "America/Porto_Acre"),
|
|
||||||
("Europe/Nicosia", "Europe/Nicosia"),
|
|
||||||
("Europe/Vienna", "Europe/Vienna"),
|
|
||||||
("GB-Eire", "GB-Eire"),
|
|
||||||
("US/Mountain", "US/Mountain"),
|
|
||||||
("Etc/GMT-2", "Etc/GMT-2"),
|
|
||||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
|
||||||
("Africa/Malabo", "Africa/Malabo"),
|
|
||||||
("Asia/Qostanay", "Asia/Qostanay"),
|
|
||||||
("America/Noronha", "America/Noronha"),
|
|
||||||
("Etc/GMT+2", "Etc/GMT+2"),
|
|
||||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
|
||||||
("America/Ensenada", "America/Ensenada"),
|
|
||||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
|
||||||
("America/Anchorage", "America/Anchorage"),
|
|
||||||
("America/Miquelon", "America/Miquelon"),
|
|
||||||
("Europe/Simferopol", "Europe/Simferopol"),
|
|
||||||
("America/Martinique", "America/Martinique"),
|
|
||||||
("Canada/Eastern", "Canada/Eastern"),
|
|
||||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
|
||||||
("America/St_Vincent", "America/St_Vincent"),
|
|
||||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
|
||||||
("Pacific/Yap", "Pacific/Yap"),
|
|
||||||
("America/St_Lucia", "America/St_Lucia"),
|
|
||||||
("CET", "CET"),
|
|
||||||
("Africa/Monrovia", "Africa/Monrovia"),
|
|
||||||
("Etc/Universal", "Etc/Universal"),
|
|
||||||
("America/Belem", "America/Belem"),
|
|
||||||
("US/Pacific", "US/Pacific"),
|
|
||||||
("Africa/Dakar", "Africa/Dakar"),
|
|
||||||
("Europe/Belfast", "Europe/Belfast"),
|
|
||||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
|
||||||
("Africa/Casablanca", "Africa/Casablanca"),
|
|
||||||
(
|
|
||||||
"America/Kentucky/Monticello",
|
|
||||||
"America/Kentucky/Monticello",
|
|
||||||
),
|
|
||||||
("Etc/Greenwich", "Etc/Greenwich"),
|
|
||||||
("Indian/Chagos", "Indian/Chagos"),
|
|
||||||
("Asia/Shanghai", "Asia/Shanghai"),
|
|
||||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
|
||||||
("Europe/Madrid", "Europe/Madrid"),
|
|
||||||
("America/Lower_Princes", "America/Lower_Princes"),
|
|
||||||
("Europe/Busingen", "Europe/Busingen"),
|
|
||||||
("Asia/Macao", "Asia/Macao"),
|
|
||||||
("Australia/Tasmania", "Australia/Tasmania"),
|
|
||||||
("Asia/Saigon", "Asia/Saigon"),
|
|
||||||
("America/Nipigon", "America/Nipigon"),
|
|
||||||
("MST", "MST"),
|
|
||||||
("America/Juneau", "America/Juneau"),
|
|
||||||
("Singapore", "Singapore"),
|
|
||||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
|
||||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
|
||||||
("HST", "HST"),
|
|
||||||
("Indian/Christmas", "Indian/Christmas"),
|
|
||||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
|
||||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
|
||||||
("Europe/Monaco", "Europe/Monaco"),
|
|
||||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
|
||||||
("Australia/Darwin", "Australia/Darwin"),
|
|
||||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
|
||||||
("Asia/Amman", "Asia/Amman"),
|
|
||||||
(
|
|
||||||
"America/Argentina/San_Juan",
|
|
||||||
"America/Argentina/San_Juan",
|
|
||||||
),
|
|
||||||
("Indian/Reunion", "Indian/Reunion"),
|
|
||||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
|
||||||
("Antarctica/Davis", "Antarctica/Davis"),
|
|
||||||
("Europe/Kyiv", "Europe/Kyiv"),
|
|
||||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
|
||||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
|
||||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
|
||||||
("America/Metlakatla", "America/Metlakatla"),
|
|
||||||
("Australia/Canberra", "Australia/Canberra"),
|
|
||||||
("Europe/Rome", "Europe/Rome"),
|
|
||||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
|
||||||
("Europe/Tirane", "Europe/Tirane"),
|
|
||||||
("Asia/Dhaka", "Asia/Dhaka"),
|
|
||||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
|
||||||
("America/New_York", "America/New_York"),
|
|
||||||
("Pacific/Johnston", "Pacific/Johnston"),
|
|
||||||
("Africa/Abidjan", "Africa/Abidjan"),
|
|
||||||
("Pacific/Noumea", "Pacific/Noumea"),
|
|
||||||
("Canada/Central", "Canada/Central"),
|
|
||||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
|
||||||
("America/Rosario", "America/Rosario"),
|
|
||||||
("Asia/Baghdad", "Asia/Baghdad"),
|
|
||||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
|
||||||
("Canada/Pacific", "Canada/Pacific"),
|
|
||||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
|
||||||
("America/Cuiaba", "America/Cuiaba"),
|
|
||||||
("Asia/Barnaul", "Asia/Barnaul"),
|
|
||||||
("Pacific/Gambier", "Pacific/Gambier"),
|
|
||||||
("America/Mazatlan", "America/Mazatlan"),
|
|
||||||
("Europe/Helsinki", "Europe/Helsinki"),
|
|
||||||
("Asia/Urumqi", "Asia/Urumqi"),
|
|
||||||
("Indian/Maldives", "Indian/Maldives"),
|
|
||||||
("CST6CDT", "CST6CDT"),
|
|
||||||
("Africa/Blantyre", "Africa/Blantyre"),
|
|
||||||
("Europe/Minsk", "Europe/Minsk"),
|
|
||||||
("Asia/Samarkand", "Asia/Samarkand"),
|
|
||||||
("US/Michigan", "US/Michigan"),
|
|
||||||
("Etc/GMT+6", "Etc/GMT+6"),
|
|
||||||
("Asia/Nicosia", "Asia/Nicosia"),
|
|
||||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
|
||||||
("Europe/Bratislava", "Europe/Bratislava"),
|
|
||||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
|
||||||
("NZ-CHAT", "NZ-CHAT"),
|
|
||||||
("Antarctica/Troll", "Antarctica/Troll"),
|
|
||||||
(
|
|
||||||
"America/Argentina/La_Rioja",
|
|
||||||
"America/Argentina/La_Rioja",
|
|
||||||
),
|
|
||||||
("Etc/GMT+12", "Etc/GMT+12"),
|
|
||||||
("Africa/Gaborone", "Africa/Gaborone"),
|
|
||||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
|
||||||
("Etc/GMT-14", "Etc/GMT-14"),
|
|
||||||
("Africa/Luanda", "Africa/Luanda"),
|
|
||||||
("America/Denver", "America/Denver"),
|
|
||||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
|
||||||
("America/Pangnirtung", "America/Pangnirtung"),
|
|
||||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
|
||||||
("GMT-0", "GMT-0"),
|
|
||||||
("Australia/Victoria", "Australia/Victoria"),
|
|
||||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
|
||||||
("Europe/Berlin", "Europe/Berlin"),
|
|
||||||
("Etc/GMT0", "Etc/GMT0"),
|
|
||||||
("America/Halifax", "America/Halifax"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
"America/North_Dakota/New_Salem",
|
|
||||||
),
|
|
||||||
("NZ", "NZ"),
|
|
||||||
("America/Nome", "America/Nome"),
|
|
||||||
("Europe/Brussels", "Europe/Brussels"),
|
|
||||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
|
||||||
("Africa/Asmara", "Africa/Asmara"),
|
|
||||||
("Africa/Lusaka", "Africa/Lusaka"),
|
|
||||||
("America/Cancun", "America/Cancun"),
|
|
||||||
("Iran", "Iran"),
|
|
||||||
("Asia/Brunei", "Asia/Brunei"),
|
|
||||||
("America/Barbados", "America/Barbados"),
|
|
||||||
("Asia/Aqtau", "Asia/Aqtau"),
|
|
||||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
|
||||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
|
||||||
("America/Dominica", "America/Dominica"),
|
|
||||||
("Etc/GMT-1", "Etc/GMT-1"),
|
|
||||||
("Etc/GMT", "Etc/GMT"),
|
|
||||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
|
||||||
(
|
|
||||||
"America/Indiana/Petersburg",
|
|
||||||
"America/Indiana/Petersburg",
|
|
||||||
),
|
|
||||||
("Africa/Harare", "Africa/Harare"),
|
|
||||||
("US/Alaska", "US/Alaska"),
|
|
||||||
("Asia/Chongqing", "Asia/Chongqing"),
|
|
||||||
("Asia/Jakarta", "Asia/Jakarta"),
|
|
||||||
("Etc/GMT-8", "Etc/GMT-8"),
|
|
||||||
("Asia/Katmandu", "Asia/Katmandu"),
|
|
||||||
("Africa/Maputo", "Africa/Maputo"),
|
|
||||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
|
||||||
("America/Havana", "America/Havana"),
|
|
||||||
("Asia/Chungking", "Asia/Chungking"),
|
|
||||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
|
||||||
("America/Fortaleza", "America/Fortaleza"),
|
|
||||||
("America/Campo_Grande", "America/Campo_Grande"),
|
|
||||||
("America/Rio_Branco", "America/Rio_Branco"),
|
|
||||||
("America/Bogota", "America/Bogota"),
|
|
||||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
|
||||||
("Australia/North", "Australia/North"),
|
|
||||||
("Etc/GMT-6", "Etc/GMT-6"),
|
|
||||||
("Europe/Samara", "Europe/Samara"),
|
|
||||||
("GMT0", "GMT0"),
|
|
||||||
("Europe/Paris", "Europe/Paris"),
|
|
||||||
("America/Vancouver", "America/Vancouver"),
|
|
||||||
("America/Santiago", "America/Santiago"),
|
|
||||||
("America/Paramaribo", "America/Paramaribo"),
|
|
||||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
|
||||||
("America/Manaus", "America/Manaus"),
|
|
||||||
("America/Grand_Turk", "America/Grand_Turk"),
|
|
||||||
("America/Yakutat", "America/Yakutat"),
|
|
||||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
|
||||||
("America/Edmonton", "America/Edmonton"),
|
|
||||||
("Europe/Athens", "Europe/Athens"),
|
|
||||||
("America/Guayaquil", "America/Guayaquil"),
|
|
||||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
|
||||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
|
||||||
("Pacific/Kanton", "Pacific/Kanton"),
|
|
||||||
("Africa/Ceuta", "Africa/Ceuta"),
|
|
||||||
("America/Kralendijk", "America/Kralendijk"),
|
|
||||||
("Pacific/Midway", "Pacific/Midway"),
|
|
||||||
("Zulu", "Zulu"),
|
|
||||||
("Asia/Tehran", "Asia/Tehran"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/Beulah",
|
|
||||||
"America/North_Dakota/Beulah",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
"America/Argentina/Buenos_Aires",
|
|
||||||
),
|
|
||||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
|
||||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
|
||||||
("America/Yellowknife", "America/Yellowknife"),
|
|
||||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
|
||||||
("Africa/Tripoli", "Africa/Tripoli"),
|
|
||||||
("Europe/Skopje", "Europe/Skopje"),
|
|
||||||
("Australia/NSW", "Australia/NSW"),
|
|
||||||
("Australia/Currie", "Australia/Currie"),
|
|
||||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
|
||||||
("Asia/Gaza", "Asia/Gaza"),
|
|
||||||
("Africa/Douala", "Africa/Douala"),
|
|
||||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
|
||||||
("Poland", "Poland"),
|
|
||||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
|
||||||
(
|
|
||||||
"America/Argentina/Catamarca",
|
|
||||||
"America/Argentina/Catamarca",
|
|
||||||
),
|
|
||||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
|
||||||
("Europe/London", "Europe/London"),
|
|
||||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
|
||||||
("America/Godthab", "America/Godthab"),
|
|
||||||
("Europe/Warsaw", "Europe/Warsaw"),
|
|
||||||
("Etc/Zulu", "Etc/Zulu"),
|
|
||||||
("Africa/Cairo", "Africa/Cairo"),
|
|
||||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
|
||||||
("Indian/Comoro", "Indian/Comoro"),
|
|
||||||
("Europe/Riga", "Europe/Riga"),
|
|
||||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
|
||||||
("Pacific/Samoa", "Pacific/Samoa"),
|
|
||||||
("Pacific/Fiji", "Pacific/Fiji"),
|
|
||||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
|
||||||
("Etc/GMT-9", "Etc/GMT-9"),
|
|
||||||
("Asia/Thimphu", "Asia/Thimphu"),
|
|
||||||
("Pacific/Auckland", "Pacific/Auckland"),
|
|
||||||
("Africa/Windhoek", "Africa/Windhoek"),
|
|
||||||
("America/Los_Angeles", "America/Los_Angeles"),
|
|
||||||
("America/Managua", "America/Managua"),
|
|
||||||
("Pacific/Majuro", "Pacific/Majuro"),
|
|
||||||
("America/Adak", "America/Adak"),
|
|
||||||
("Etc/UCT", "Etc/UCT"),
|
|
||||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
|
||||||
("US/Hawaii", "US/Hawaii"),
|
|
||||||
("Europe/Vilnius", "Europe/Vilnius"),
|
|
||||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
|
||||||
("Asia/Kuwait", "Asia/Kuwait"),
|
|
||||||
("Asia/Dili", "Asia/Dili"),
|
|
||||||
("America/El_Salvador", "America/El_Salvador"),
|
|
||||||
("US/Aleutian", "US/Aleutian"),
|
|
||||||
("Etc/GMT-3", "Etc/GMT-3"),
|
|
||||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
|
||||||
("America/Moncton", "America/Moncton"),
|
|
||||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
|
||||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
|
||||||
("Asia/Chita", "Asia/Chita"),
|
|
||||||
("America/Cayenne", "America/Cayenne"),
|
|
||||||
("Africa/Bissau", "Africa/Bissau"),
|
|
||||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
|
||||||
("America/Porto_Velho", "America/Porto_Velho"),
|
|
||||||
("Africa/Niamey", "Africa/Niamey"),
|
|
||||||
("Asia/Famagusta", "Asia/Famagusta"),
|
|
||||||
("Etc/UTC", "Etc/UTC"),
|
|
||||||
("Greenwich", "Greenwich"),
|
|
||||||
("America/Grenada", "America/Grenada"),
|
|
||||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
|
||||||
("W-SU", "W-SU"),
|
|
||||||
("Factory", "Factory"),
|
|
||||||
("Europe/Bucharest", "Europe/Bucharest"),
|
|
||||||
("America/St_Kitts", "America/St_Kitts"),
|
|
||||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
|
||||||
("Asia/Bangkok", "Asia/Bangkok"),
|
|
||||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
|
||||||
("Egypt", "Egypt"),
|
|
||||||
("Africa/Maseru", "Africa/Maseru"),
|
|
||||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
|
||||||
("Asia/Harbin", "Asia/Harbin"),
|
|
||||||
("Asia/Beirut", "Asia/Beirut"),
|
|
||||||
("America/Monterrey", "America/Monterrey"),
|
|
||||||
("Africa/Kampala", "Africa/Kampala"),
|
|
||||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
|
||||||
("America/Chihuahua", "America/Chihuahua"),
|
|
||||||
("Eire", "Eire"),
|
|
||||||
("Europe/Saratov", "Europe/Saratov"),
|
|
||||||
("Cuba", "Cuba"),
|
|
||||||
("Asia/Tashkent", "Asia/Tashkent"),
|
|
||||||
("Pacific/Guam", "Pacific/Guam"),
|
|
||||||
("America/Jamaica", "America/Jamaica"),
|
|
||||||
("America/Hermosillo", "America/Hermosillo"),
|
|
||||||
("Australia/Hobart", "Australia/Hobart"),
|
|
||||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
|
||||||
("America/Antigua", "America/Antigua"),
|
|
||||||
("Indian/Mauritius", "Indian/Mauritius"),
|
|
||||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
|
||||||
("Asia/Muscat", "Asia/Muscat"),
|
|
||||||
("Europe/Budapest", "Europe/Budapest"),
|
|
||||||
("MET", "MET"),
|
|
||||||
("Navajo", "Navajo"),
|
|
||||||
("Etc/GMT-4", "Etc/GMT-4"),
|
|
||||||
("America/Nassau", "America/Nassau"),
|
|
||||||
("Asia/Bishkek", "Asia/Bishkek"),
|
|
||||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
|
||||||
("America/Nuuk", "America/Nuuk"),
|
|
||||||
("Etc/GMT+9", "Etc/GMT+9"),
|
|
||||||
("Australia/LHI", "Australia/LHI"),
|
|
||||||
("America/Scoresbysund", "America/Scoresbysund"),
|
|
||||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
|
||||||
("Etc/GMT-0", "Etc/GMT-0"),
|
|
||||||
("America/Creston", "America/Creston"),
|
|
||||||
("Indian/Mahe", "Indian/Mahe"),
|
|
||||||
(
|
|
||||||
"America/Indiana/Indianapolis",
|
|
||||||
"America/Indiana/Indianapolis",
|
|
||||||
),
|
|
||||||
("Pacific/Wallis", "Pacific/Wallis"),
|
|
||||||
("America/Jujuy", "America/Jujuy"),
|
|
||||||
("Europe/Zurich", "Europe/Zurich"),
|
|
||||||
("Australia/Brisbane", "Australia/Brisbane"),
|
|
||||||
("Etc/GMT-13", "Etc/GMT-13"),
|
|
||||||
("Etc/GMT-5", "Etc/GMT-5"),
|
|
||||||
("Hongkong", "Hongkong"),
|
|
||||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
|
||||||
("America/Recife", "America/Recife"),
|
|
||||||
("America/Knox_IN", "America/Knox_IN"),
|
|
||||||
("Australia/Lindeman", "Australia/Lindeman"),
|
|
||||||
("Etc/GMT+11", "Etc/GMT+11"),
|
|
||||||
("Canada/Yukon", "Canada/Yukon"),
|
|
||||||
("Africa/Banjul", "Africa/Banjul"),
|
|
||||||
("America/Belize", "America/Belize"),
|
|
||||||
("Asia/Hovd", "Asia/Hovd"),
|
|
||||||
("Etc/GMT+4", "Etc/GMT+4"),
|
|
||||||
("Africa/Djibouti", "Africa/Djibouti"),
|
|
||||||
("Africa/Nairobi", "Africa/Nairobi"),
|
|
||||||
("Iceland", "Iceland"),
|
|
||||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
|
||||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
|
||||||
("Asia/Magadan", "Asia/Magadan"),
|
|
||||||
("America/Lima", "America/Lima"),
|
|
||||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
|
||||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
|
||||||
("America/Merida", "America/Merida"),
|
|
||||||
("America/Aruba", "America/Aruba"),
|
|
||||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
|
||||||
("Europe/Kirov", "Europe/Kirov"),
|
|
||||||
("America/St_Johns", "America/St_Johns"),
|
|
||||||
("Africa/Bamako", "Africa/Bamako"),
|
|
||||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
|
||||||
("Australia/Queensland", "Australia/Queensland"),
|
|
||||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
|
||||||
("Europe/Tallinn", "Europe/Tallinn"),
|
|
||||||
("Europe/Lisbon", "Europe/Lisbon"),
|
|
||||||
("America/Catamarca", "America/Catamarca"),
|
|
||||||
("America/Phoenix", "America/Phoenix"),
|
|
||||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
|
||||||
("Asia/Karachi", "Asia/Karachi"),
|
|
||||||
("America/Curacao", "America/Curacao"),
|
|
||||||
("MST7MDT", "MST7MDT"),
|
|
||||||
("Europe/Podgorica", "Europe/Podgorica"),
|
|
||||||
("Asia/Makassar", "Asia/Makassar"),
|
|
||||||
("America/Regina", "America/Regina"),
|
|
||||||
("Asia/Aden", "Asia/Aden"),
|
|
||||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
|
||||||
("Asia/Vientiane", "Asia/Vientiane"),
|
|
||||||
("US/Eastern", "US/Eastern"),
|
|
||||||
("Asia/Tokyo", "Asia/Tokyo"),
|
|
||||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
|
||||||
("America/Tijuana", "America/Tijuana"),
|
|
||||||
("America/Montevideo", "America/Montevideo"),
|
|
||||||
("Europe/Oslo", "Europe/Oslo"),
|
|
||||||
("America/La_Paz", "America/La_Paz"),
|
|
||||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
|
||||||
("Europe/Volgograd", "Europe/Volgograd"),
|
|
||||||
("America/Costa_Rica", "America/Costa_Rica"),
|
|
||||||
("GMT+0", "GMT+0"),
|
|
||||||
("America/Guadeloupe", "America/Guadeloupe"),
|
|
||||||
("America/Bahia", "America/Bahia"),
|
|
||||||
("Africa/Khartoum", "Africa/Khartoum"),
|
|
||||||
("Europe/Belgrade", "Europe/Belgrade"),
|
|
||||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
|
||||||
("America/Swift_Current", "America/Swift_Current"),
|
|
||||||
("Asia/Macau", "Asia/Macau"),
|
|
||||||
("America/Dawson", "America/Dawson"),
|
|
||||||
("Asia/Thimbu", "Asia/Thimbu"),
|
|
||||||
("America/Panama", "America/Panama"),
|
|
||||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
|
||||||
("Africa/Mbabane", "Africa/Mbabane"),
|
|
||||||
("Africa/Libreville", "Africa/Libreville"),
|
|
||||||
("PST8PDT", "PST8PDT"),
|
|
||||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
|
||||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
|
||||||
("Asia/Jayapura", "Asia/Jayapura"),
|
|
||||||
(
|
|
||||||
"America/North_Dakota/Center",
|
|
||||||
"America/North_Dakota/Center",
|
|
||||||
),
|
|
||||||
("Etc/GMT-11", "Etc/GMT-11"),
|
|
||||||
("Etc/GMT-12", "Etc/GMT-12"),
|
|
||||||
("GB", "GB"),
|
|
||||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
|
||||||
("Africa/Kigali", "Africa/Kigali"),
|
|
||||||
("America/Marigot", "America/Marigot"),
|
|
||||||
("Asia/Oral", "Asia/Oral"),
|
|
||||||
("Brazil/West", "Brazil/West"),
|
|
||||||
("Antarctica/Casey", "Antarctica/Casey"),
|
|
||||||
("US/Central", "US/Central"),
|
|
||||||
("America/Ojinaga", "America/Ojinaga"),
|
|
||||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
|
||||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
|
||||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
|
||||||
("Africa/Conakry", "Africa/Conakry"),
|
|
||||||
("Europe/Andorra", "Europe/Andorra"),
|
|
||||||
("Pacific/Apia", "Pacific/Apia"),
|
|
||||||
("America/Santarem", "America/Santarem"),
|
|
||||||
("Europe/Kiev", "Europe/Kiev"),
|
|
||||||
("Australia/West", "Australia/West"),
|
|
||||||
("Asia/Taipei", "Asia/Taipei"),
|
|
||||||
("America/Goose_Bay", "America/Goose_Bay"),
|
|
||||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
|
||||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
|
||||||
("Pacific/Niue", "Pacific/Niue"),
|
|
||||||
("Africa/Lome", "Africa/Lome"),
|
|
||||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
|
||||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
|
||||||
("Indian/Mayotte", "Indian/Mayotte"),
|
|
||||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
|
||||||
("Etc/GMT+7", "Etc/GMT+7"),
|
|
||||||
("America/Mendoza", "America/Mendoza"),
|
|
||||||
("America/Atka", "America/Atka"),
|
|
||||||
("Asia/Qatar", "Asia/Qatar"),
|
|
||||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
|
||||||
("America/Asuncion", "America/Asuncion"),
|
|
||||||
("Europe/Prague", "Europe/Prague"),
|
|
||||||
("EET", "EET"),
|
|
||||||
("America/Anguilla", "America/Anguilla"),
|
|
||||||
("America/Sitka", "America/Sitka"),
|
|
||||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
|
||||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
|
||||||
("Jamaica", "Jamaica"),
|
|
||||||
("America/St_Thomas", "America/St_Thomas"),
|
|
||||||
(
|
|
||||||
"America/Argentina/San_Luis",
|
|
||||||
"America/Argentina/San_Luis",
|
|
||||||
),
|
|
||||||
("Chile/Continental", "Chile/Continental"),
|
|
||||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
|
||||||
("Africa/Lagos", "Africa/Lagos"),
|
|
||||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
|
||||||
("Atlantic/Canary", "Atlantic/Canary"),
|
|
||||||
("Europe/Vatican", "Europe/Vatican"),
|
|
||||||
("America/Guatemala", "America/Guatemala"),
|
|
||||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
|
||||||
("America/Indianapolis", "America/Indianapolis"),
|
|
||||||
("Asia/Calcutta", "Asia/Calcutta"),
|
|
||||||
("Indian/Cocos", "Indian/Cocos"),
|
|
||||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
|
||||||
("Europe/San_Marino", "Europe/San_Marino"),
|
|
||||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
|
||||||
("Etc/GMT+8", "Etc/GMT+8"),
|
|
||||||
("Asia/Atyrau", "Asia/Atyrau"),
|
|
||||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
|
||||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
|
||||||
("Asia/Istanbul", "Asia/Istanbul"),
|
|
||||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
|
||||||
("Africa/Algiers", "Africa/Algiers"),
|
|
||||||
("Asia/Almaty", "Asia/Almaty"),
|
|
||||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
|
||||||
("Africa/Freetown", "Africa/Freetown"),
|
|
||||||
("Asia/Kabul", "Asia/Kabul"),
|
|
||||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
|
||||||
("America/Detroit", "America/Detroit"),
|
|
||||||
("America/Cordoba", "America/Cordoba"),
|
|
||||||
("America/Whitehorse", "America/Whitehorse"),
|
|
||||||
("Asia/Riyadh", "Asia/Riyadh"),
|
|
||||||
("Asia/Dubai", "Asia/Dubai"),
|
|
||||||
("Universal", "Universal"),
|
|
||||||
("America/Boise", "America/Boise"),
|
|
||||||
("Africa/Tunis", "Africa/Tunis"),
|
|
||||||
("Asia/Yangon", "Asia/Yangon"),
|
|
||||||
("America/Araguaina", "America/Araguaina"),
|
|
||||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
|
||||||
("America/Caracas", "America/Caracas"),
|
|
||||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
|
||||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
|
||||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
|
||||||
("Asia/Rangoon", "Asia/Rangoon"),
|
|
||||||
("Australia/Eucla", "Australia/Eucla"),
|
|
||||||
("PRC", "PRC"),
|
|
||||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
|
||||||
("Australia/South", "Australia/South"),
|
|
||||||
(
|
|
||||||
"America/Kentucky/Louisville",
|
|
||||||
"America/Kentucky/Louisville",
|
|
||||||
),
|
|
||||||
("America/Iqaluit", "America/Iqaluit"),
|
|
||||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
|
||||||
("Asia/Damascus", "Asia/Damascus"),
|
|
||||||
("America/Glace_Bay", "America/Glace_Bay"),
|
|
||||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
|
||||||
("Asia/Pontianak", "Asia/Pontianak"),
|
|
||||||
("Asia/Kolkata", "Asia/Kolkata"),
|
|
||||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
|
||||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
|
||||||
("WET", "WET"),
|
|
||||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
|
||||||
("EST5EDT", "EST5EDT"),
|
|
||||||
("Europe/Zagreb", "Europe/Zagreb"),
|
|
||||||
("America/Toronto", "America/Toronto"),
|
|
||||||
(
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
"America/Argentina/ComodRivadavia",
|
|
||||||
),
|
|
||||||
("Pacific/Chatham", "Pacific/Chatham"),
|
|
||||||
("Europe/Istanbul", "Europe/Istanbul"),
|
|
||||||
("Asia/Singapore", "Asia/Singapore"),
|
|
||||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
|
||||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
|
||||||
("US/Arizona", "US/Arizona"),
|
|
||||||
("America/Montreal", "America/Montreal"),
|
|
||||||
("America/Resolute", "America/Resolute"),
|
|
||||||
("America/Boa_Vista", "America/Boa_Vista"),
|
|
||||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
|
||||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
|
||||||
("Canada/Atlantic", "Canada/Atlantic"),
|
|
||||||
("Australia/Perth", "Australia/Perth"),
|
|
||||||
("Kwajalein", "Kwajalein"),
|
|
||||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
|
||||||
("Europe/Malta", "Europe/Malta"),
|
|
||||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
|
||||||
("America/Guyana", "America/Guyana"),
|
|
||||||
("Pacific/Palau", "Pacific/Palau"),
|
|
||||||
("America/Winnipeg", "America/Winnipeg"),
|
|
||||||
("UCT", "UCT"),
|
|
||||||
("Atlantic/Azores", "Atlantic/Azores"),
|
|
||||||
("Mexico/General", "Mexico/General"),
|
|
||||||
("Pacific/Nauru", "Pacific/Nauru"),
|
|
||||||
("Asia/Hebron", "Asia/Hebron"),
|
|
||||||
("Asia/Khandyga", "Asia/Khandyga"),
|
|
||||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
|
||||||
("Portugal", "Portugal"),
|
|
||||||
("Etc/GMT-7", "Etc/GMT-7"),
|
|
||||||
("ROK", "ROK"),
|
|
||||||
("Libya", "Libya"),
|
|
||||||
("Europe/Jersey", "Europe/Jersey"),
|
|
||||||
("Israel", "Israel"),
|
|
||||||
("Pacific/Wake", "Pacific/Wake"),
|
|
||||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
|
||||||
("Africa/Asmera", "Africa/Asmera"),
|
|
||||||
("America/Maceio", "America/Maceio"),
|
|
||||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
|
||||||
("US/East-Indiana", "US/East-Indiana"),
|
|
||||||
("America/Rainy_River", "America/Rainy_River"),
|
|
||||||
("Europe/Stockholm", "Europe/Stockholm"),
|
|
||||||
("America/Thule", "America/Thule"),
|
|
||||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
|
||||||
("Pacific/Truk", "Pacific/Truk"),
|
|
||||||
("Pacific/Ponape", "Pacific/Ponape"),
|
|
||||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
|
||||||
("Turkey", "Turkey"),
|
|
||||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
|
||||||
("Etc/GMT+0", "Etc/GMT+0"),
|
|
||||||
("Europe/Sofia", "Europe/Sofia"),
|
|
||||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
|
||||||
("Australia/ACT", "Australia/ACT"),
|
|
||||||
("Canada/Mountain", "Canada/Mountain"),
|
|
||||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
|
||||||
("Asia/Kashgar", "Asia/Kashgar"),
|
|
||||||
("Europe/Chisinau", "Europe/Chisinau"),
|
|
||||||
("Pacific/Efate", "Pacific/Efate"),
|
|
||||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
|
||||||
("America/Eirunepe", "America/Eirunepe"),
|
|
||||||
("Europe/Guernsey", "Europe/Guernsey"),
|
|
||||||
("Europe/Vaduz", "Europe/Vaduz"),
|
|
||||||
("US/Samoa", "US/Samoa"),
|
|
||||||
("Africa/Bangui", "Africa/Bangui"),
|
|
||||||
("GMT", "GMT"),
|
|
||||||
("Asia/Omsk", "Asia/Omsk"),
|
|
||||||
("America/Menominee", "America/Menominee"),
|
|
||||||
("America/Matamoros", "America/Matamoros"),
|
|
||||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
|
||||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
|
||||||
("America/Montserrat", "America/Montserrat"),
|
|
||||||
("Australia/Sydney", "Australia/Sydney"),
|
|
||||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
|
||||||
("Asia/Colombo", "Asia/Colombo"),
|
|
||||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
|
||||||
("Etc/GMT+1", "Etc/GMT+1"),
|
|
||||||
("Asia/Dacca", "Asia/Dacca"),
|
|
||||||
("America/Louisville", "America/Louisville"),
|
|
||||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
|
||||||
("Africa/Juba", "Africa/Juba"),
|
|
||||||
("Japan", "Japan"),
|
|
||||||
("America/Inuvik", "America/Inuvik"),
|
|
||||||
("America/Cayman", "America/Cayman"),
|
|
||||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
|
||||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
|
||||||
("Asia/Anadyr", "Asia/Anadyr"),
|
|
||||||
("America/Atikokan", "America/Atikokan"),
|
|
||||||
("Asia/Tomsk", "Asia/Tomsk"),
|
|
||||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
|
||||||
("Pacific/Saipan", "Pacific/Saipan"),
|
|
||||||
("America/Virgin", "America/Virgin"),
|
|
||||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
|
||||||
("Pacific/Easter", "Pacific/Easter"),
|
|
||||||
("Brazil/East", "Brazil/East"),
|
|
||||||
("Africa/Accra", "Africa/Accra"),
|
|
||||||
("America/Mexico_City", "America/Mexico_City"),
|
|
||||||
("Europe/Dublin", "Europe/Dublin"),
|
|
||||||
("America/Chicago", "America/Chicago"),
|
|
||||||
("Etc/GMT+3", "Etc/GMT+3"),
|
|
||||||
("Etc/GMT+5", "Etc/GMT+5"),
|
|
||||||
("America/Tortola", "America/Tortola"),
|
|
||||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
|
||||||
("Asia/Bahrain", "Asia/Bahrain"),
|
|
||||||
("Asia/Kuching", "Asia/Kuching"),
|
|
||||||
("EST", "EST"),
|
|
||||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
|
||||||
("America/Shiprock", "America/Shiprock"),
|
|
||||||
("Asia/Yerevan", "Asia/Yerevan"),
|
|
||||||
("Etc/GMT+10", "Etc/GMT+10"),
|
|
||||||
],
|
|
||||||
default=aircox.models.schedule.current_timezone_key,
|
|
||||||
help_text="timezone used for the date",
|
|
||||||
max_length=100,
|
|
||||||
verbose_name="timezone",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="station",
|
|
||||||
name="hosts",
|
|
||||||
field=models.TextField(
|
|
||||||
blank=True,
|
|
||||||
help_text="specify one domain per line, without 'http://' prefix",
|
|
||||||
max_length=512,
|
|
||||||
null=True,
|
|
||||||
verbose_name="website's urls",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -19,7 +19,11 @@ __all__ = ("Diffusion", "DiffusionQuerySet")
|
||||||
class DiffusionQuerySet(RerunQuerySet):
|
class DiffusionQuerySet(RerunQuerySet):
|
||||||
def episode(self, episode=None, id=None):
|
def episode(self, episode=None, id=None):
|
||||||
"""Diffusions for this episode."""
|
"""Diffusions for this episode."""
|
||||||
return self.filter(episode=episode) if id is None else self.filter(episode__id=id)
|
return (
|
||||||
|
self.filter(episode=episode)
|
||||||
|
if id is None
|
||||||
|
else self.filter(episode__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def on_air(self):
|
def on_air(self):
|
||||||
"""On air diffusions."""
|
"""On air diffusions."""
|
||||||
|
@ -36,7 +40,9 @@ class DiffusionQuerySet(RerunQuerySet):
|
||||||
"""Diffusions occuring date."""
|
"""Diffusions occuring date."""
|
||||||
date = date or datetime.date.today()
|
date = date or datetime.date.today()
|
||||||
start = tz.make_aware(tz.datetime.combine(date, datetime.time()))
|
start = tz.make_aware(tz.datetime.combine(date, datetime.time()))
|
||||||
end = tz.make_aware(tz.datetime.combine(date, datetime.time(23, 59, 59, 999)))
|
end = tz.make_aware(
|
||||||
|
tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
|
||||||
|
)
|
||||||
# start = tz.get_current_timezone().localize(start)
|
# start = tz.get_current_timezone().localize(start)
|
||||||
# end = tz.get_current_timezone().localize(end)
|
# end = tz.get_current_timezone().localize(end)
|
||||||
qs = self.filter(start__range=(start, end))
|
qs = self.filter(start__range=(start, end))
|
||||||
|
@ -44,7 +50,11 @@ class DiffusionQuerySet(RerunQuerySet):
|
||||||
|
|
||||||
def at(self, date, order=True):
|
def at(self, date, order=True):
|
||||||
"""Return diffusions at specified date or datetime."""
|
"""Return diffusions at specified date or datetime."""
|
||||||
return self.now(date, order) if isinstance(date, tz.datetime) else self.date(date, order)
|
return (
|
||||||
|
self.now(date, order)
|
||||||
|
if isinstance(date, tz.datetime)
|
||||||
|
else self.date(date, order)
|
||||||
|
)
|
||||||
|
|
||||||
def after(self, date=None):
|
def after(self, date=None):
|
||||||
"""Return a queryset of diffusions that happen after the given date
|
"""Return a queryset of diffusions that happen after the given date
|
||||||
|
@ -132,7 +142,9 @@ class Diffusion(Rerun):
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Diffusion")
|
verbose_name = _("Diffusion")
|
||||||
verbose_name_plural = _("Diffusions")
|
verbose_name_plural = _("Diffusions")
|
||||||
permissions = (("programming", _("edit the diffusions' planification")),)
|
permissions = (
|
||||||
|
("programming", _("edit the diffusions' planification")),
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
str_ = "{episode} - {date}".format(
|
str_ = "{episode} - {date}".format(
|
||||||
|
@ -190,12 +202,19 @@ class Diffusion(Rerun):
|
||||||
def is_now(self):
|
def is_now(self):
|
||||||
"""True if diffusion is currently running."""
|
"""True if diffusion is currently running."""
|
||||||
now = tz.now()
|
now = tz.now()
|
||||||
return self.type == self.TYPE_ON_AIR and self.start <= now and self.end >= now
|
return (
|
||||||
|
self.type == self.TYPE_ON_AIR
|
||||||
|
and self.start <= now
|
||||||
|
and self.end >= now
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_live(self):
|
def is_live(self):
|
||||||
"""True if Diffusion is live (False if there are sounds files)."""
|
"""True if Diffusion is live (False if there are sounds files)."""
|
||||||
return self.type == self.TYPE_ON_AIR and not self.episode.sound_set.archive().count()
|
return (
|
||||||
|
self.type == self.TYPE_ON_AIR
|
||||||
|
and not self.episode.sound_set.archive().count()
|
||||||
|
)
|
||||||
|
|
||||||
def get_playlist(self, **types):
|
def get_playlist(self, **types):
|
||||||
"""Returns sounds as a playlist (list of *local* archive file path).
|
"""Returns sounds as a playlist (list of *local* archive file path).
|
||||||
|
@ -205,7 +224,9 @@ class Diffusion(Rerun):
|
||||||
from .sound import Sound
|
from .sound import Sound
|
||||||
|
|
||||||
return list(
|
return list(
|
||||||
self.get_sounds(**types).filter(path__isnull=False, type=Sound.TYPE_ARCHIVE).values_list("path", flat=True)
|
self.get_sounds(**types)
|
||||||
|
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
||||||
|
.values_list("path", flat=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_sounds(self, **types):
|
def get_sounds(self, **types):
|
||||||
|
@ -217,7 +238,9 @@ class Diffusion(Rerun):
|
||||||
from .sound import Sound
|
from .sound import Sound
|
||||||
|
|
||||||
sounds = (self.initial or self).sound_set.order_by("type", "path")
|
sounds = (self.initial or self).sound_set.order_by("type", "path")
|
||||||
_in = [getattr(Sound.Type, name) for name, value in types.items() if value]
|
_in = [
|
||||||
|
getattr(Sound.Type, name) for name, value in types.items() if value
|
||||||
|
]
|
||||||
|
|
||||||
return sounds.filter(type__in=_in)
|
return sounds.filter(type__in=_in)
|
||||||
|
|
||||||
|
@ -239,7 +262,8 @@ class Diffusion(Rerun):
|
||||||
# .filter(conflict_with=True)
|
# .filter(conflict_with=True)
|
||||||
return (
|
return (
|
||||||
Diffusion.objects.filter(
|
Diffusion.objects.filter(
|
||||||
Q(start__lt=self.start, end__gt=self.start) | Q(start__gt=self.start, start__lt=self.end)
|
Q(start__lt=self.start, end__gt=self.start)
|
||||||
|
| Q(start__gt=self.start, start__lt=self.end)
|
||||||
)
|
)
|
||||||
.exclude(pk=self.pk)
|
.exclude(pk=self.pk)
|
||||||
.distinct()
|
.distinct()
|
||||||
|
|
|
@ -24,7 +24,10 @@ class Episode(Page):
|
||||||
"""Return serialized data about podcasts."""
|
"""Return serialized data about podcasts."""
|
||||||
from ..serializers import PodcastSerializer
|
from ..serializers import PodcastSerializer
|
||||||
|
|
||||||
podcasts = [PodcastSerializer(s).data for s in self.sound_set.public().order_by("type")]
|
podcasts = [
|
||||||
|
PodcastSerializer(s).data
|
||||||
|
for s in self.sound_set.public().order_by("type")
|
||||||
|
]
|
||||||
if self.cover:
|
if self.cover:
|
||||||
options = {"size": (128, 128), "crop": "scale"}
|
options = {"size": (128, 128), "crop": "scale"}
|
||||||
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
||||||
|
@ -73,4 +76,6 @@ class Episode(Page):
|
||||||
if title is None
|
if title is None
|
||||||
else title
|
else title
|
||||||
)
|
)
|
||||||
return super().get_init_kwargs_from(page, title=title, program=page, **kwargs)
|
return super().get_init_kwargs_from(
|
||||||
|
page, title=title, program=page, **kwargs
|
||||||
|
)
|
||||||
|
|
|
@ -18,7 +18,11 @@ __all__ = ("Log", "LogQuerySet")
|
||||||
|
|
||||||
class LogQuerySet(models.QuerySet):
|
class LogQuerySet(models.QuerySet):
|
||||||
def station(self, station=None, id=None):
|
def station(self, station=None, id=None):
|
||||||
return self.filter(station=station) if id is None else self.filter(station_id=id)
|
return (
|
||||||
|
self.filter(station=station)
|
||||||
|
if id is None
|
||||||
|
else self.filter(station_id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def date(self, date):
|
def date(self, date):
|
||||||
start = tz.datetime.combine(date, datetime.time())
|
start = tz.datetime.combine(date, datetime.time())
|
||||||
|
@ -28,7 +32,11 @@ class LogQuerySet(models.QuerySet):
|
||||||
# return self.filter(date__date=date)
|
# return self.filter(date__date=date)
|
||||||
|
|
||||||
def after(self, date):
|
def after(self, date):
|
||||||
return self.filter(date__gte=date) if isinstance(date, tz.datetime) else self.filter(date__date__gte=date)
|
return (
|
||||||
|
self.filter(date__gte=date)
|
||||||
|
if isinstance(date, tz.datetime)
|
||||||
|
else self.filter(date__date__gte=date)
|
||||||
|
)
|
||||||
|
|
||||||
def on_air(self):
|
def on_air(self):
|
||||||
return self.filter(type=Log.TYPE_ON_AIR)
|
return self.filter(type=Log.TYPE_ON_AIR)
|
||||||
|
|
|
@ -25,7 +25,9 @@ __all__ = (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
headline_re = re.compile(r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?")
|
headline_re = re.compile(
|
||||||
|
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Category(models.Model):
|
class Category(models.Model):
|
||||||
|
@ -52,11 +54,17 @@ class BasePageQuerySet(InheritanceQuerySet):
|
||||||
|
|
||||||
def parent(self, parent=None, id=None):
|
def parent(self, parent=None, id=None):
|
||||||
"""Return pages having this parent."""
|
"""Return pages having this parent."""
|
||||||
return self.filter(parent=parent) if id is None else self.filter(parent__id=id)
|
return (
|
||||||
|
self.filter(parent=parent)
|
||||||
|
if id is None
|
||||||
|
else self.filter(parent__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def search(self, q, search_content=True):
|
def search(self, q, search_content=True):
|
||||||
if search_content:
|
if search_content:
|
||||||
return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
|
return self.filter(
|
||||||
|
models.Q(title__icontains=q) | models.Q(content__icontains=q)
|
||||||
|
)
|
||||||
return self.filter(title__icontains=q)
|
return self.filter(title__icontains=q)
|
||||||
|
|
||||||
|
|
||||||
|
@ -81,7 +89,9 @@ class BasePage(models.Model):
|
||||||
related_name="child_set",
|
related_name="child_set",
|
||||||
)
|
)
|
||||||
title = models.CharField(max_length=100)
|
title = models.CharField(max_length=100)
|
||||||
slug = models.SlugField(_("slug"), max_length=120, blank=True, unique=True, db_index=True)
|
slug = models.SlugField(
|
||||||
|
_("slug"), max_length=120, blank=True, unique=True, db_index=True
|
||||||
|
)
|
||||||
status = models.PositiveSmallIntegerField(
|
status = models.PositiveSmallIntegerField(
|
||||||
_("status"),
|
_("status"),
|
||||||
default=STATUS_DRAFT,
|
default=STATUS_DRAFT,
|
||||||
|
@ -122,7 +132,11 @@ class BasePage(models.Model):
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
return reverse(self.detail_url_name, kwargs={"slug": self.slug}) if self.is_published else "#"
|
return (
|
||||||
|
reverse(self.detail_url_name, kwargs={"slug": self.slug})
|
||||||
|
if self.is_published
|
||||||
|
else "#"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_draft(self):
|
def is_draft(self):
|
||||||
|
@ -163,7 +177,9 @@ class BasePage(models.Model):
|
||||||
|
|
||||||
class PageQuerySet(BasePageQuerySet):
|
class PageQuerySet(BasePageQuerySet):
|
||||||
def published(self):
|
def published(self):
|
||||||
return self.filter(status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now())
|
return self.filter(
|
||||||
|
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Page(BasePage):
|
class Page(BasePage):
|
||||||
|
@ -177,7 +193,9 @@ class Page(BasePage):
|
||||||
null=True,
|
null=True,
|
||||||
db_index=True,
|
db_index=True,
|
||||||
)
|
)
|
||||||
pub_date = models.DateTimeField(_("publication date"), blank=True, null=True, db_index=True)
|
pub_date = models.DateTimeField(
|
||||||
|
_("publication date"), blank=True, null=True, db_index=True
|
||||||
|
)
|
||||||
featured = models.BooleanField(
|
featured = models.BooleanField(
|
||||||
_("featured"),
|
_("featured"),
|
||||||
default=False,
|
default=False,
|
||||||
|
@ -278,7 +296,9 @@ class Comment(models.Model):
|
||||||
class NavItem(models.Model):
|
class NavItem(models.Model):
|
||||||
"""Navigation menu items."""
|
"""Navigation menu items."""
|
||||||
|
|
||||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
station = models.ForeignKey(
|
||||||
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
|
)
|
||||||
menu = models.SlugField(_("menu"), max_length=24)
|
menu = models.SlugField(_("menu"), max_length=24)
|
||||||
order = models.PositiveSmallIntegerField(_("order"))
|
order = models.PositiveSmallIntegerField(_("order"))
|
||||||
text = models.CharField(_("title"), max_length=64)
|
text = models.CharField(_("title"), max_length=64)
|
||||||
|
@ -298,7 +318,13 @@ class NavItem(models.Model):
|
||||||
ordering = ("order", "pk")
|
ordering = ("order", "pk")
|
||||||
|
|
||||||
def get_url(self):
|
def get_url(self):
|
||||||
return self.url if self.url else self.page.get_absolute_url() if self.page else None
|
return (
|
||||||
|
self.url
|
||||||
|
if self.url
|
||||||
|
else self.page.get_absolute_url()
|
||||||
|
if self.page
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
def render(self, request, css_class="", active_class=""):
|
def render(self, request, css_class="", active_class=""):
|
||||||
url = self.get_url()
|
url = self.get_url()
|
||||||
|
@ -310,4 +336,6 @@ class NavItem(models.Model):
|
||||||
elif not css_class:
|
elif not css_class:
|
||||||
return format_html('<a href="{}">{}</a>', url, self.text)
|
return format_html('<a href="{}">{}</a>', url, self.text)
|
||||||
else:
|
else:
|
||||||
return format_html('<a href="{}" class="{}">{}</a>', url, css_class, self.text)
|
return format_html(
|
||||||
|
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
|
||||||
|
)
|
||||||
|
|
|
@ -47,7 +47,9 @@ class Program(Page):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# explicit foreign key in order to avoid related name clashes
|
# explicit foreign key in order to avoid related name clashes
|
||||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
station = models.ForeignKey(
|
||||||
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
|
)
|
||||||
active = models.BooleanField(
|
active = models.BooleanField(
|
||||||
_("active"),
|
_("active"),
|
||||||
default=True,
|
default=True,
|
||||||
|
@ -124,7 +126,12 @@ class Program(Page):
|
||||||
# TODO: move in signals
|
# TODO: move in signals
|
||||||
path_ = getattr(self, "__initial_path", None)
|
path_ = getattr(self, "__initial_path", None)
|
||||||
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
||||||
if path_ is not None and path_ != self.path and os.path.exists(abspath) and not os.path.exists(self.abspath):
|
if (
|
||||||
|
path_ is not None
|
||||||
|
and path_ != self.path
|
||||||
|
and os.path.exists(abspath)
|
||||||
|
and not os.path.exists(self.abspath)
|
||||||
|
):
|
||||||
logger.info(
|
logger.info(
|
||||||
"program #%s's dir changed to %s - update it.",
|
"program #%s's dir changed to %s - update it.",
|
||||||
self.id,
|
self.id,
|
||||||
|
@ -132,7 +139,9 @@ class Program(Page):
|
||||||
)
|
)
|
||||||
|
|
||||||
shutil.move(abspath, self.abspath)
|
shutil.move(abspath, self.abspath)
|
||||||
Sound.objects.filter(path__startswith=path_).update(file=Concat("file", Substr(F("file"), len(path_))))
|
Sound.objects.filter(path__startswith=path_).update(
|
||||||
|
file=Concat("file", Substr(F("file"), len(path_)))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ProgramChildQuerySet(PageQuerySet):
|
class ProgramChildQuerySet(PageQuerySet):
|
||||||
|
|
|
@ -15,10 +15,18 @@ class RerunQuerySet(models.QuerySet):
|
||||||
"""Queryset for Rerun (sub)classes."""
|
"""Queryset for Rerun (sub)classes."""
|
||||||
|
|
||||||
def station(self, station=None, id=None):
|
def station(self, station=None, id=None):
|
||||||
return self.filter(program__station=station) if id is None else self.filter(program__station__id=id)
|
return (
|
||||||
|
self.filter(program__station=station)
|
||||||
|
if id is None
|
||||||
|
else self.filter(program__station__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def program(self, program=None, id=None):
|
def program(self, program=None, id=None):
|
||||||
return self.filter(program=program) if id is None else self.filter(program__id=id)
|
return (
|
||||||
|
self.filter(program=program)
|
||||||
|
if id is None
|
||||||
|
else self.filter(program__id=id)
|
||||||
|
)
|
||||||
|
|
||||||
def rerun(self):
|
def rerun(self):
|
||||||
return self.filter(initial__isnull=False)
|
return self.filter(initial__isnull=False)
|
||||||
|
@ -70,8 +78,14 @@ class Rerun(models.Model):
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
super().clean()
|
super().clean()
|
||||||
if hasattr(self, "start") and self.initial is not None and self.initial.start >= self.start:
|
if (
|
||||||
raise ValidationError({"initial": _("rerun must happen after original")})
|
hasattr(self, "start")
|
||||||
|
and self.initial is not None
|
||||||
|
and self.initial.start >= self.start
|
||||||
|
):
|
||||||
|
raise ValidationError(
|
||||||
|
{"initial": _("rerun must happen after original")}
|
||||||
|
)
|
||||||
|
|
||||||
def save_rerun(self):
|
def save_rerun(self):
|
||||||
self.program = self.initial.program
|
self.program = self.initial.program
|
||||||
|
|
|
@ -14,10 +14,6 @@ from .rerun import Rerun
|
||||||
__all__ = ("Schedule",)
|
__all__ = ("Schedule",)
|
||||||
|
|
||||||
|
|
||||||
def current_timezone_key():
|
|
||||||
return tz.get_current_timezone().key
|
|
||||||
|
|
||||||
|
|
||||||
# ? BIG FIXME: self.date is still used as datetime
|
# ? BIG FIXME: self.date is still used as datetime
|
||||||
class Schedule(Rerun):
|
class Schedule(Rerun):
|
||||||
"""A Schedule defines time slots of programs' diffusions.
|
"""A Schedule defines time slots of programs' diffusions.
|
||||||
|
@ -53,7 +49,7 @@ class Schedule(Rerun):
|
||||||
)
|
)
|
||||||
timezone = models.CharField(
|
timezone = models.CharField(
|
||||||
_("timezone"),
|
_("timezone"),
|
||||||
default=current_timezone_key,
|
default=lambda: tz.get_current_timezone().key,
|
||||||
max_length=100,
|
max_length=100,
|
||||||
choices=[(x, x) for x in zoneinfo.available_timezones()],
|
choices=[(x, x) for x in zoneinfo.available_timezones()],
|
||||||
help_text=_("timezone used for the date"),
|
help_text=_("timezone used for the date"),
|
||||||
|
@ -102,7 +98,11 @@ class Schedule(Rerun):
|
||||||
"""Return frequency formated for display."""
|
"""Return frequency formated for display."""
|
||||||
from django.template.defaultfilters import date
|
from django.template.defaultfilters import date
|
||||||
|
|
||||||
return self._get_FIELD_display(self._meta.get_field("frequency")).format(day=date(self.date, "l")).capitalize()
|
return (
|
||||||
|
self._get_FIELD_display(self._meta.get_field("frequency"))
|
||||||
|
.format(day=date(self.date, "l"))
|
||||||
|
.capitalize()
|
||||||
|
)
|
||||||
|
|
||||||
def normalize(self, date):
|
def normalize(self, date):
|
||||||
"""Return a datetime set to schedule's time for the provided date,
|
"""Return a datetime set to schedule's time for the provided date,
|
||||||
|
@ -120,7 +120,9 @@ class Schedule(Rerun):
|
||||||
|
|
||||||
# last of the month
|
# last of the month
|
||||||
if freq == Schedule.Frequency.last:
|
if freq == Schedule.Frequency.last:
|
||||||
date = date.replace(day=calendar.monthrange(date.year, date.month)[1])
|
date = date.replace(
|
||||||
|
day=calendar.monthrange(date.year, date.month)[1]
|
||||||
|
)
|
||||||
date_wday = date.weekday()
|
date_wday = date.weekday()
|
||||||
|
|
||||||
# end of month before the wanted weekday: move one week back
|
# end of month before the wanted weekday: move one week back
|
||||||
|
@ -132,7 +134,9 @@ class Schedule(Rerun):
|
||||||
# move to the first day of the month that matches the schedule's
|
# move to the first day of the month that matches the schedule's
|
||||||
# weekday. Check on SO#3284452 for the formula
|
# weekday. Check on SO#3284452 for the formula
|
||||||
date_wday, month = date.weekday(), date.month
|
date_wday, month = date.weekday(), date.month
|
||||||
date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday)
|
date += tz.timedelta(
|
||||||
|
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
|
||||||
|
)
|
||||||
|
|
||||||
if freq == Schedule.Frequency.one_on_two:
|
if freq == Schedule.Frequency.one_on_two:
|
||||||
# - adjust date with modulo 14 (= 2 weeks in days)
|
# - adjust date with modulo 14 (= 2 weeks in days)
|
||||||
|
@ -141,7 +145,11 @@ class Schedule(Rerun):
|
||||||
date += tz.timedelta(days=7)
|
date += tz.timedelta(days=7)
|
||||||
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
|
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
|
||||||
else:
|
else:
|
||||||
dates = (date + tz.timedelta(days=7 * week) for week in range(0, 5) if freq & (0b1 << week))
|
dates = (
|
||||||
|
date + tz.timedelta(days=7 * week)
|
||||||
|
for week in range(0, 5)
|
||||||
|
if freq & (0b1 << week)
|
||||||
|
)
|
||||||
|
|
||||||
return [self.normalize(date) for date in dates if date.month == month]
|
return [self.normalize(date) for date in dates if date.month == month]
|
||||||
|
|
||||||
|
@ -154,22 +162,29 @@ class Schedule(Rerun):
|
||||||
from .diffusion import Diffusion
|
from .diffusion import Diffusion
|
||||||
from .episode import Episode
|
from .episode import Episode
|
||||||
|
|
||||||
if self.initial is not None or self.frequency == Schedule.Frequency.ponctual:
|
if (
|
||||||
|
self.initial is not None
|
||||||
|
or self.frequency == Schedule.Frequency.ponctual
|
||||||
|
):
|
||||||
return [], []
|
return [], []
|
||||||
|
|
||||||
# dates for self and reruns as (date, initial)
|
# dates for self and reruns as (date, initial)
|
||||||
reruns = [(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()]
|
reruns = [
|
||||||
|
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
|
||||||
|
]
|
||||||
|
|
||||||
dates = {date: None for date in self.dates_of_month(date)}
|
dates = {date: None for date in self.dates_of_month(date)}
|
||||||
dates.update(
|
dates.update(
|
||||||
(rerun.normalize(date.date() + delta), date) for date in list(dates.keys()) for rerun, delta in reruns
|
(rerun.normalize(date.date() + delta), date)
|
||||||
|
for date in list(dates.keys())
|
||||||
|
for rerun, delta in reruns
|
||||||
)
|
)
|
||||||
|
|
||||||
# remove dates corresponding to existing diffusions
|
# remove dates corresponding to existing diffusions
|
||||||
saved = set(
|
saved = set(
|
||||||
Diffusion.objects.filter(start__in=dates.keys(), program=self.program, schedule=self).values_list(
|
Diffusion.objects.filter(
|
||||||
"start", flat=True
|
start__in=dates.keys(), program=self.program, schedule=self
|
||||||
)
|
).values_list("start", flat=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
# make diffs
|
# make diffs
|
||||||
|
|
|
@ -32,7 +32,9 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||||
group, created = Group.objects.get_or_create(name=group_name)
|
group, created = Group.objects.get_or_create(name=group_name)
|
||||||
if created and permissions:
|
if created and permissions:
|
||||||
for codename in permissions:
|
for codename in permissions:
|
||||||
permission = Permission.objects.filter(codename=codename).first()
|
permission = Permission.objects.filter(
|
||||||
|
codename=codename
|
||||||
|
).first()
|
||||||
if permission:
|
if permission:
|
||||||
group.permissions.add(permission)
|
group.permissions.add(permission)
|
||||||
group.save()
|
group.save()
|
||||||
|
@ -42,7 +44,9 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
||||||
@receiver(signals.post_save, sender=Page)
|
@receiver(signals.post_save, sender=Page)
|
||||||
def page_post_save(sender, instance, created, *args, **kwargs):
|
def page_post_save(sender, instance, created, *args, **kwargs):
|
||||||
if not created and instance.cover:
|
if not created and instance.cover:
|
||||||
Page.objects.filter(parent=instance, cover__isnull=True).update(cover=instance.cover)
|
Page.objects.filter(parent=instance, cover__isnull=True).update(
|
||||||
|
cover=instance.cover
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_save, sender=Program)
|
@receiver(signals.post_save, sender=Program)
|
||||||
|
@ -50,11 +54,15 @@ def program_post_save(sender, instance, created, *args, **kwargs):
|
||||||
"""Clean-up later diffusions when a program becomes inactive."""
|
"""Clean-up later diffusions when a program becomes inactive."""
|
||||||
if not instance.active:
|
if not instance.active:
|
||||||
Diffusion.objects.program(instance).after(tz.now()).delete()
|
Diffusion.objects.program(instance).after(tz.now()).delete()
|
||||||
Episode.objects.parent(instance).filter(diffusion__isnull=True).delete()
|
Episode.objects.parent(instance).filter(
|
||||||
|
diffusion__isnull=True
|
||||||
|
).delete()
|
||||||
|
|
||||||
cover = getattr(instance, "__initial_cover", None)
|
cover = getattr(instance, "__initial_cover", None)
|
||||||
if cover is None and instance.cover is not None:
|
if cover is None and instance.cover is not None:
|
||||||
Episode.objects.parent(instance).filter(cover__isnull=True).update(cover=instance.cover)
|
Episode.objects.parent(instance).filter(cover__isnull=True).update(
|
||||||
|
cover=instance.cover
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.pre_save, sender=Schedule)
|
@receiver(signals.pre_save, sender=Schedule)
|
||||||
|
@ -69,7 +77,8 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
||||||
corresponding diffusions accordingly."""
|
corresponding diffusions accordingly."""
|
||||||
initial = getattr(instance, "_initial", None)
|
initial = getattr(instance, "_initial", None)
|
||||||
if not initial or (
|
if not initial or (
|
||||||
(instance.time, instance.duration, instance.timezone) == (initial.time, initial.duration, initial.timezone)
|
(instance.time, instance.duration, instance.timezone)
|
||||||
|
== (initial.time, initial.duration, initial.timezone)
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -88,9 +97,13 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
||||||
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
||||||
"""Delete later corresponding diffusion to a changed schedule."""
|
"""Delete later corresponding diffusion to a changed schedule."""
|
||||||
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
||||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
|
Episode.objects.filter(
|
||||||
|
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_delete, sender=Diffusion)
|
@receiver(signals.post_delete, sender=Diffusion)
|
||||||
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
||||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
|
Episode.objects.filter(
|
||||||
|
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||||
|
).delete()
|
||||||
|
|
|
@ -50,7 +50,9 @@ class SoundQuerySet(models.QuerySet):
|
||||||
def path(self, paths):
|
def path(self, paths):
|
||||||
if isinstance(paths, str):
|
if isinstance(paths, str):
|
||||||
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
|
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
|
||||||
return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths))
|
return self.filter(
|
||||||
|
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
|
||||||
|
)
|
||||||
|
|
||||||
def playlist(self, archive=True, order_by=True):
|
def playlist(self, archive=True, order_by=True):
|
||||||
"""Return files absolute paths as a flat list (exclude sound without
|
"""Return files absolute paths as a flat list (exclude sound without
|
||||||
|
@ -64,7 +66,9 @@ class SoundQuerySet(models.QuerySet):
|
||||||
self = self.order_by("file")
|
self = self.order_by("file")
|
||||||
return [
|
return [
|
||||||
os.path.join(conf.MEDIA_ROOT, file)
|
os.path.join(conf.MEDIA_ROOT, file)
|
||||||
for file in self.filter(file__isnull=False).values_list("file", flat=True)
|
for file in self.filter(file__isnull=False).values_list(
|
||||||
|
"file", flat=True
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
def search(self, query):
|
def search(self, query):
|
||||||
|
@ -118,7 +122,11 @@ class Sound(models.Model):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _upload_to(self, filename):
|
def _upload_to(self, filename):
|
||||||
subdir = settings.SOUND_ARCHIVES_SUBDIR if self.type == self.TYPE_ARCHIVE else settings.SOUND_EXCERPTS_SUBDIR
|
subdir = (
|
||||||
|
settings.SOUND_ARCHIVES_SUBDIR
|
||||||
|
if self.type == self.TYPE_ARCHIVE
|
||||||
|
else settings.SOUND_EXCERPTS_SUBDIR
|
||||||
|
)
|
||||||
return os.path.join(self.program.path, subdir, filename)
|
return os.path.join(self.program.path, subdir, filename)
|
||||||
|
|
||||||
file = models.FileField(
|
file = models.FileField(
|
||||||
|
@ -153,7 +161,10 @@ class Sound(models.Model):
|
||||||
)
|
)
|
||||||
is_downloadable = models.BooleanField(
|
is_downloadable = models.BooleanField(
|
||||||
_("downloadable"),
|
_("downloadable"),
|
||||||
help_text=_("whether it can be publicly downloaded by visitors (sound must be " "public)"),
|
help_text=_(
|
||||||
|
"whether it can be publicly downloaded by visitors (sound must be "
|
||||||
|
"public)"
|
||||||
|
),
|
||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -213,7 +224,9 @@ class Sound(models.Model):
|
||||||
if self.type == self.TYPE_REMOVED and self.program:
|
if self.type == self.TYPE_REMOVED and self.program:
|
||||||
changed = True
|
changed = True
|
||||||
self.type = (
|
self.type = (
|
||||||
self.TYPE_ARCHIVE if self.file.name.startswith(self.program.archives_path) else self.TYPE_EXCERPT
|
self.TYPE_ARCHIVE
|
||||||
|
if self.file.name.startswith(self.program.archives_path)
|
||||||
|
else self.TYPE_EXCERPT
|
||||||
)
|
)
|
||||||
|
|
||||||
# check mtime -> reset quality if changed (assume file changed)
|
# check mtime -> reset quality if changed (assume file changed)
|
||||||
|
@ -286,7 +299,8 @@ class Track(models.Model):
|
||||||
blank=True,
|
blank=True,
|
||||||
null=True,
|
null=True,
|
||||||
help_text=_(
|
help_text=_(
|
||||||
"additional informations about this track, such as " "the version, if is it a remix, features, etc."
|
"additional informations about this track, such as "
|
||||||
|
"the version, if is it a remix, features, etc."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -296,9 +310,13 @@ class Track(models.Model):
|
||||||
ordering = ("position",)
|
ordering = ("position",)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{self.artist} -- {self.title} -- {self.position}".format(self=self)
|
return "{self.artist} -- {self.title} -- {self.position}".format(
|
||||||
|
self=self
|
||||||
|
)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if (self.sound is None and self.episode is None) or (self.sound is not None and self.episode is not None):
|
if (self.sound is None and self.episode is None) or (
|
||||||
|
self.sound is not None and self.episode is not None
|
||||||
|
):
|
||||||
raise ValueError("sound XOR episode is required")
|
raise ValueError("sound XOR episode is required")
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
|
@ -67,7 +67,9 @@ class Station(models.Model):
|
||||||
max_length=2048,
|
max_length=2048,
|
||||||
null=True,
|
null=True,
|
||||||
blank=True,
|
blank=True,
|
||||||
help_text=_("Audio streams urls used by station's player. One url " "a line."),
|
help_text=_(
|
||||||
|
"Audio streams urls used by station's player. One url " "a line."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
default_cover = FilerImageField(
|
default_cover = FilerImageField(
|
||||||
on_delete=models.SET_NULL,
|
on_delete=models.SET_NULL,
|
||||||
|
@ -151,10 +153,16 @@ class Port(models.Model):
|
||||||
(TYPE_FILE, _("file")),
|
(TYPE_FILE, _("file")),
|
||||||
)
|
)
|
||||||
|
|
||||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
station = models.ForeignKey(
|
||||||
direction = models.SmallIntegerField(_("direction"), choices=DIRECTION_CHOICES)
|
Station, models.CASCADE, verbose_name=_("station")
|
||||||
|
)
|
||||||
|
direction = models.SmallIntegerField(
|
||||||
|
_("direction"), choices=DIRECTION_CHOICES
|
||||||
|
)
|
||||||
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||||
active = models.BooleanField(_("active"), default=True, help_text=_("this port is active"))
|
active = models.BooleanField(
|
||||||
|
_("active"), default=True, help_text=_("this port is active")
|
||||||
|
)
|
||||||
settings = models.TextField(
|
settings = models.TextField(
|
||||||
_("port settings"),
|
_("port settings"),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
|
@ -185,6 +193,8 @@ class Port(models.Model):
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.is_valid_type():
|
if not self.is_valid_type():
|
||||||
raise ValueError("port type is not allowed with the given port direction")
|
raise ValueError(
|
||||||
|
"port type is not allowed with the given port direction"
|
||||||
|
)
|
||||||
|
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
|
@ -15,4 +15,6 @@ class UserSettings(models.Model):
|
||||||
related_name="aircox_settings",
|
related_name="aircox_settings",
|
||||||
)
|
)
|
||||||
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
|
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
|
||||||
playlist_editor_sep = models.CharField(_("Playlist Editor Separator"), max_length=16)
|
playlist_editor_sep = models.CharField(
|
||||||
|
_("Playlist Editor Separator"), max_length=16
|
||||||
|
)
|
||||||
|
|
|
@ -34,7 +34,9 @@ def do_has_perm(context, obj, perm, user=None):
|
||||||
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
|
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
|
||||||
if user is None:
|
if user is None:
|
||||||
user = context["request"].user
|
user = context["request"].user
|
||||||
return user.has_perm("{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name))
|
return user.has_perm(
|
||||||
|
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@register.filter(name="is_diffusion")
|
@register.filter(name="is_diffusion")
|
||||||
|
@ -67,7 +69,10 @@ def do_player_live_attr(context):
|
||||||
def do_nav_items(context, menu, **kwargs):
|
def do_nav_items(context, menu, **kwargs):
|
||||||
"""Render navigation items for the provided menu name."""
|
"""Render navigation items for the provided menu name."""
|
||||||
station, request = context["station"], context["request"]
|
station, request = context["station"], context["request"]
|
||||||
return [(item, item.render(request, **kwargs)) for item in station.navitem_set.filter(menu=menu)]
|
return [
|
||||||
|
(item, item.render(request, **kwargs))
|
||||||
|
for item in station.navitem_set.filter(menu=menu)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag(name="update_query")
|
@register.simple_tag(name="update_query")
|
||||||
|
@ -85,4 +90,10 @@ def do_update_query(obj, **kwargs):
|
||||||
def do_verbose_name(obj, plural=False):
|
def do_verbose_name(obj, plural=False):
|
||||||
"""Return model's verbose name (singular or plural) or `obj` if it is a
|
"""Return model's verbose name (singular or plural) or `obj` if it is a
|
||||||
string (can act for default values)."""
|
string (can act for default values)."""
|
||||||
return obj if isinstance(obj, str) else obj._meta.verbose_name_plural if plural else obj._meta.verbose_name
|
return (
|
||||||
|
obj
|
||||||
|
if isinstance(obj, str)
|
||||||
|
else obj._meta.verbose_name_plural
|
||||||
|
if plural
|
||||||
|
else obj._meta.verbose_name
|
||||||
|
)
|
||||||
|
|
|
@ -51,7 +51,9 @@ class WrapperMixin:
|
||||||
ns = None
|
ns = None
|
||||||
ns_attr = None
|
ns_attr = None
|
||||||
|
|
||||||
def __init__(self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs):
|
def __init__(
|
||||||
|
self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs
|
||||||
|
):
|
||||||
self.target = target
|
self.target = target
|
||||||
if ns:
|
if ns:
|
||||||
self.inject(ns, ns_attr)
|
self.inject(ns, ns_attr)
|
||||||
|
@ -85,7 +87,10 @@ class WrapperMixin:
|
||||||
if self.target is ns_target:
|
if self.target is ns_target:
|
||||||
return
|
return
|
||||||
elif self.target is not None and self.ns:
|
elif self.target is not None and self.ns:
|
||||||
raise RuntimeError("self target already injected. It must be " "`release` before `inject`.")
|
raise RuntimeError(
|
||||||
|
"self target already injected. It must be "
|
||||||
|
"`release` before `inject`."
|
||||||
|
)
|
||||||
|
|
||||||
self.target = ns_target
|
self.target = ns_target
|
||||||
setattr(ns, ns_attr, self.interface)
|
setattr(ns, ns_attr, self.interface)
|
||||||
|
@ -140,7 +145,9 @@ class SpoofMixin:
|
||||||
traces = self.traces[name]
|
traces = self.traces[name]
|
||||||
if not isinstance(traces, list):
|
if not isinstance(traces, list):
|
||||||
traces = (traces,)
|
traces = (traces,)
|
||||||
return tuple(self._get_trace(trace, args=args, kw=kw) for trace in traces)
|
return tuple(
|
||||||
|
self._get_trace(trace, args=args, kw=kw) for trace in traces
|
||||||
|
)
|
||||||
|
|
||||||
def _get_trace(self, trace, args=False, kw=False):
|
def _get_trace(self, trace, args=False, kw=False):
|
||||||
if (args and kw) or (not args and not kw):
|
if (args and kw) or (not args and not kw):
|
||||||
|
|
|
@ -48,11 +48,15 @@ class TestDateFieldFilter:
|
||||||
def test___init__(self, date_filter):
|
def test___init__(self, date_filter):
|
||||||
assert date_filter.date_params == {"pub_date__lte": tomorrow}
|
assert date_filter.date_params == {"pub_date__lte": tomorrow}
|
||||||
|
|
||||||
date_filter.links = [(str(link[0]), *list(link[1:])) for link in date_filter.links]
|
date_filter.links = [
|
||||||
|
(str(link[0]), *list(link[1:])) for link in date_filter.links
|
||||||
|
]
|
||||||
assert date_filter.links == [
|
assert date_filter.links == [
|
||||||
(str(_("None")), "pub_date__isnull", None, "1"),
|
(str(_("None")), "pub_date__isnull", None, "1"),
|
||||||
(str(_("Exact")), "pub_date__date", date_filter.input_type),
|
(str(_("Exact")), "pub_date__date", date_filter.input_type),
|
||||||
(str(_("Since")), "pub_date__gte", date_filter.input_type),
|
(str(_("Since")), "pub_date__gte", date_filter.input_type),
|
||||||
(str(_("Until")), "pub_date__lte", date_filter.input_type),
|
(str(_("Until")), "pub_date__lte", date_filter.input_type),
|
||||||
]
|
]
|
||||||
assert date_filter.query_attrs == {"pub_date__gte": today.strftime("%Y-%m-%d")}
|
assert date_filter.query_attrs == {
|
||||||
|
"pub_date__gte": today.strftime("%Y-%m-%d")
|
||||||
|
}
|
||||||
|
|
|
@ -30,7 +30,9 @@ def staff_user():
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def logger():
|
def logger():
|
||||||
logger = Interface(logging, {"info": None, "debug": None, "error": None, "warning": None})
|
logger = Interface(
|
||||||
|
logging, {"info": None, "debug": None, "error": None, "warning": None}
|
||||||
|
)
|
||||||
return logger
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,7 +123,10 @@ def schedules(sched_initials, sched_reruns):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def episodes(programs):
|
def episodes(programs):
|
||||||
return [baker.make(models.Episode, parent=program, cover=None) for program in programs]
|
return [
|
||||||
|
baker.make(models.Episode, parent=program, cover=None)
|
||||||
|
for program in programs
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -153,7 +158,15 @@ def sound(program):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def tracks(episode, sound):
|
def tracks(episode, sound):
|
||||||
items = [baker.prepare(models.Track, episode=episode, position=i, timestamp=i * 60) for i in range(0, 3)]
|
items = [
|
||||||
items += [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 3)]
|
baker.prepare(
|
||||||
|
models.Track, episode=episode, position=i, timestamp=i * 60
|
||||||
|
)
|
||||||
|
for i in range(0, 3)
|
||||||
|
]
|
||||||
|
items += [
|
||||||
|
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||||
|
for i in range(0, 3)
|
||||||
|
]
|
||||||
models.Track.objects.bulk_create(items)
|
models.Track.objects.bulk_create(items)
|
||||||
return items
|
return items
|
||||||
|
|
|
@ -21,21 +21,30 @@ class TestDiffusion:
|
||||||
def test_update(self, monitor, schedules, sched_initials, logger):
|
def test_update(self, monitor, schedules, sched_initials, logger):
|
||||||
monitor.update()
|
monitor.update()
|
||||||
|
|
||||||
diffusions = models.Diffusion.objects.filter(schedule__in=sched_initials)
|
diffusions = models.Diffusion.objects.filter(
|
||||||
|
schedule__in=sched_initials
|
||||||
|
)
|
||||||
by_date = {}
|
by_date = {}
|
||||||
for diff in diffusions:
|
for diff in diffusions:
|
||||||
assert diff.episode_id
|
assert diff.episode_id
|
||||||
by_date.setdefault(diff.schedule_id, set()).add((diff.start, diff.end))
|
by_date.setdefault(diff.schedule_id, set()).add(
|
||||||
|
(diff.start, diff.end)
|
||||||
|
)
|
||||||
|
|
||||||
for schedule in sched_initials:
|
for schedule in sched_initials:
|
||||||
if schedule.pk not in by_date:
|
if schedule.pk not in by_date:
|
||||||
continue
|
continue
|
||||||
_, items = schedule.diffusions_of_month(now)
|
_, items = schedule.diffusions_of_month(now)
|
||||||
assert all((item.start, item.end) in by_date[schedule.pk] for item in items)
|
assert all(
|
||||||
|
(item.start, item.end) in by_date[schedule.pk]
|
||||||
|
for item in items
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_clean(self, monitor, episode):
|
def test_clean(self, monitor, episode):
|
||||||
start = tz.make_aware(datetime.combine(monitor.date - timedelta(days=1), time(10, 20)))
|
start = tz.make_aware(
|
||||||
|
datetime.combine(monitor.date - timedelta(days=1), time(10, 20))
|
||||||
|
)
|
||||||
diff = models.Diffusion(
|
diff = models.Diffusion(
|
||||||
type=models.Diffusion.TYPE_UNCONFIRMED,
|
type=models.Diffusion.TYPE_UNCONFIRMED,
|
||||||
episode=episode,
|
episode=episode,
|
||||||
|
|
|
@ -79,12 +79,16 @@ class TestLogArchiver:
|
||||||
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
|
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
|
||||||
# before logs are deleted from db, get data
|
# before logs are deleted from db, get data
|
||||||
sorted = archiver.sort_logs(logs_qs)
|
sorted = archiver.sort_logs(logs_qs)
|
||||||
paths = {archiver.get_path(station, date) for station, date in sorted.keys()}
|
paths = {
|
||||||
|
archiver.get_path(station, date) for station, date in sorted.keys()
|
||||||
|
}
|
||||||
|
|
||||||
count = archiver.archive(logs_qs, keep=False)
|
count = archiver.archive(logs_qs, keep=False)
|
||||||
assert count == len(logs)
|
assert count == len(logs)
|
||||||
assert not logs_qs.count()
|
assert not logs_qs.count()
|
||||||
assert all(path in paths for path, *_ in gzip._traces("open", args=True))
|
assert all(
|
||||||
|
path in paths for path, *_ in gzip._traces("open", args=True)
|
||||||
|
)
|
||||||
|
|
||||||
results = archiver.load_file("dummy path")
|
results = archiver.load_file("dummy path")
|
||||||
assert results
|
assert results
|
||||||
|
@ -100,4 +104,7 @@ class TestLogArchiver:
|
||||||
|
|
||||||
assert sorted
|
assert sorted
|
||||||
for (station, date), logs in sorted.items():
|
for (station, date), logs in sorted.items():
|
||||||
assert all(log.station == station and log.date.date() == date for log in logs)
|
assert all(
|
||||||
|
log.station == station and log.date.date() == date
|
||||||
|
for log in logs
|
||||||
|
)
|
||||||
|
|
|
@ -53,7 +53,13 @@ def path_infos():
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sound_files(path_infos):
|
def sound_files(path_infos):
|
||||||
return {k: r for k, r in ((path, SoundFile(conf.MEDIA_ROOT + "/" + path)) for path in path_infos.keys())}
|
return {
|
||||||
|
k: r
|
||||||
|
for k, r in (
|
||||||
|
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
|
||||||
|
for path in path_infos.keys()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_sound_path(sound_files):
|
def test_sound_path(sound_files):
|
||||||
|
@ -72,9 +78,17 @@ def test_read_path(path_infos, sound_files):
|
||||||
|
|
||||||
def _setup_diff(program, info):
|
def _setup_diff(program, info):
|
||||||
episode = models.Episode(program=program, title="test-episode")
|
episode = models.Episode(program=program, title="test-episode")
|
||||||
at = tz.datetime(**{k: info[k] for k in ("year", "month", "day", "hour", "minute") if info.get(k)})
|
at = tz.datetime(
|
||||||
|
**{
|
||||||
|
k: info[k]
|
||||||
|
for k in ("year", "month", "day", "hour", "minute")
|
||||||
|
if info.get(k)
|
||||||
|
}
|
||||||
|
)
|
||||||
at = tz.make_aware(at)
|
at = tz.make_aware(at)
|
||||||
diff = models.Diffusion(episode=episode, start=at, end=at + timedelta(hours=1))
|
diff = models.Diffusion(
|
||||||
|
episode=episode, start=at, end=at + timedelta(hours=1)
|
||||||
|
)
|
||||||
episode.save()
|
episode.save()
|
||||||
diff.save()
|
diff.save()
|
||||||
return diff
|
return diff
|
||||||
|
|
|
@ -92,7 +92,9 @@ class TestTask:
|
||||||
task.log_msg = "--{event.src_path}--"
|
task.log_msg = "--{event.src_path}--"
|
||||||
sound_file = task(event, logger=logger, kw=13)
|
sound_file = task(event, logger=logger, kw=13)
|
||||||
assert sound_file._trace("sync", kw=True) == {"kw": 13}
|
assert sound_file._trace("sync", kw=True) == {"kw": 13}
|
||||||
assert logger._trace("info", args=True) == (task.log_msg.format(event=event),)
|
assert logger._trace("info", args=True) == (
|
||||||
|
task.log_msg.format(event=event),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestDeleteTask:
|
class TestDeleteTask:
|
||||||
|
@ -123,7 +125,9 @@ class TestModifiedTask:
|
||||||
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
|
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
|
||||||
|
|
||||||
def sleep(imeta, n):
|
def sleep(imeta, n):
|
||||||
datetime._imeta.funcs["now"] = modified_task.timestamp + tz.timedelta(hours=10)
|
datetime._imeta.funcs[
|
||||||
|
"now"
|
||||||
|
] = modified_task.timestamp + tz.timedelta(hours=10)
|
||||||
|
|
||||||
time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
|
time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
|
||||||
modified_task.wait()
|
modified_task.wait()
|
||||||
|
@ -171,7 +175,9 @@ class TestMonitorHandler:
|
||||||
|
|
||||||
def test__submit(self, monitor_handler, event):
|
def test__submit(self, monitor_handler, event):
|
||||||
handler = Interface()
|
handler = Interface()
|
||||||
handler, created = monitor_handler._submit(handler, event, "prefix", kw=13)
|
handler, created = monitor_handler._submit(
|
||||||
|
handler, event, "prefix", kw=13
|
||||||
|
)
|
||||||
assert created
|
assert created
|
||||||
assert handler.future._trace("add_done_callback")
|
assert handler.future._trace("add_done_callback")
|
||||||
assert monitor_handler.pool._trace("submit") == (
|
assert monitor_handler.pool._trace("submit") == (
|
||||||
|
@ -186,7 +192,9 @@ class TestMonitorHandler:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def monitor_interfaces():
|
def monitor_interfaces():
|
||||||
items = {
|
items = {
|
||||||
"atexit": Interface.inject(sound_monitor, "atexit", {"register": None, "leave": None}),
|
"atexit": Interface.inject(
|
||||||
|
sound_monitor, "atexit", {"register": None, "leave": None}
|
||||||
|
),
|
||||||
"observer": Interface.inject(
|
"observer": Interface.inject(
|
||||||
sound_monitor,
|
sound_monitor,
|
||||||
"Observer",
|
"Observer",
|
||||||
|
|
|
@ -38,8 +38,12 @@ sox_values = {
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sox_interfaces():
|
def sox_interfaces():
|
||||||
process = Interface(None, {"communicate": ("", sox_output.encode("utf-8"))})
|
process = Interface(
|
||||||
subprocess = Interface.inject(sound_stats, "subprocess", {"Popen": lambda *_, **__: process})
|
None, {"communicate": ("", sox_output.encode("utf-8"))}
|
||||||
|
)
|
||||||
|
subprocess = Interface.inject(
|
||||||
|
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
|
||||||
|
)
|
||||||
yield {"process": process, "subprocess": subprocess}
|
yield {"process": process, "subprocess": subprocess}
|
||||||
subprocess._irelease()
|
subprocess._irelease()
|
||||||
|
|
||||||
|
@ -106,7 +110,9 @@ class TestSoundStats:
|
||||||
|
|
||||||
def test_check(self, stats):
|
def test_check(self, stats):
|
||||||
good = [{"val": i} for i in range(0, 11)]
|
good = [{"val": i} for i in range(0, 11)]
|
||||||
bad = [{"val": i} for i in range(-10, 0)] + [{"val": i} for i in range(11, 20)]
|
bad = [{"val": i} for i in range(-10, 0)] + [
|
||||||
|
{"val": i} for i in range(11, 20)
|
||||||
|
]
|
||||||
stats.stats = good + bad
|
stats.stats = good + bad
|
||||||
calls = {}
|
calls = {}
|
||||||
stats.resume = lambda *_: calls.setdefault("resume", True)
|
stats.resume = lambda *_: calls.setdefault("resume", True)
|
||||||
|
|
|
@ -12,7 +12,11 @@ class TestEpisode:
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_podcasts(self, episode, podcasts):
|
def test_podcasts(self, episode, podcasts):
|
||||||
podcasts = {podcast.pk: podcast for podcast in podcasts if podcast.episode == episode}
|
podcasts = {
|
||||||
|
podcast.pk: podcast
|
||||||
|
for podcast in podcasts
|
||||||
|
if podcast.episode == episode
|
||||||
|
}
|
||||||
for data in episode.podcasts:
|
for data in episode.podcasts:
|
||||||
podcast = podcasts[data["pk"]]
|
podcast = podcasts[data["pk"]]
|
||||||
assert data["name"] == podcast.name
|
assert data["name"] == podcast.name
|
||||||
|
|
|
@ -12,28 +12,44 @@ class TestRerunQuerySet:
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_station_by_obj(self, stations, schedules):
|
def test_station_by_obj(self, stations, schedules):
|
||||||
for station in stations:
|
for station in stations:
|
||||||
queryset = Schedule.objects.station(station).distinct().values_list("program__station", flat=True)
|
queryset = (
|
||||||
|
Schedule.objects.station(station)
|
||||||
|
.distinct()
|
||||||
|
.values_list("program__station", flat=True)
|
||||||
|
)
|
||||||
assert queryset.count() == 1
|
assert queryset.count() == 1
|
||||||
assert queryset.first() == station.pk
|
assert queryset.first() == station.pk
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_station_by_id(self, stations, schedules):
|
def test_station_by_id(self, stations, schedules):
|
||||||
for station in stations:
|
for station in stations:
|
||||||
queryset = Schedule.objects.station(id=station.pk).distinct().values_list("program__station", flat=True)
|
queryset = (
|
||||||
|
Schedule.objects.station(id=station.pk)
|
||||||
|
.distinct()
|
||||||
|
.values_list("program__station", flat=True)
|
||||||
|
)
|
||||||
assert queryset.count() == 1
|
assert queryset.count() == 1
|
||||||
assert queryset.first() == station.pk
|
assert queryset.first() == station.pk
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_program_by_obj(self, programs, schedules):
|
def test_program_by_obj(self, programs, schedules):
|
||||||
for program in programs:
|
for program in programs:
|
||||||
queryset = Schedule.objects.program(program).distinct().values_list("program", flat=True)
|
queryset = (
|
||||||
|
Schedule.objects.program(program)
|
||||||
|
.distinct()
|
||||||
|
.values_list("program", flat=True)
|
||||||
|
)
|
||||||
assert queryset.count() == 1
|
assert queryset.count() == 1
|
||||||
assert queryset.first() == program.pk
|
assert queryset.first() == program.pk
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_program_by_id(self, programs, schedules):
|
def test_program_by_id(self, programs, schedules):
|
||||||
for program in programs:
|
for program in programs:
|
||||||
queryset = Schedule.objects.program(id=program.pk).distinct().values_list("program", flat=True)
|
queryset = (
|
||||||
|
Schedule.objects.program(id=program.pk)
|
||||||
|
.distinct()
|
||||||
|
.values_list("program", flat=True)
|
||||||
|
)
|
||||||
assert queryset.count() == 1
|
assert queryset.count() == 1
|
||||||
assert queryset.first() == program.pk
|
assert queryset.first() == program.pk
|
||||||
|
|
||||||
|
@ -44,7 +60,11 @@ class TestRerunQuerySet:
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_initial(self, schedules):
|
def test_initial(self, schedules):
|
||||||
queryset = Schedule.objects.initial().distinct().values_list("initial", flat=True)
|
queryset = (
|
||||||
|
Schedule.objects.initial()
|
||||||
|
.distinct()
|
||||||
|
.values_list("initial", flat=True)
|
||||||
|
)
|
||||||
assert queryset.count() == 1
|
assert queryset.count() == 1
|
||||||
assert queryset.first() is None
|
assert queryset.first() is None
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,9 @@ class TestSchedule:
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_dates_of_month_ponctual(self):
|
def test_dates_of_month_ponctual(self):
|
||||||
schedule = baker.prepare(Schedule, frequency=Schedule.Frequency.ponctual)
|
schedule = baker.prepare(
|
||||||
|
Schedule, frequency=Schedule.Frequency.ponctual
|
||||||
|
)
|
||||||
at = schedule.date + relativedelta(months=4)
|
at = schedule.date + relativedelta(months=4)
|
||||||
assert schedule.dates_of_month(at) == []
|
assert schedule.dates_of_month(at) == []
|
||||||
|
|
||||||
|
@ -57,7 +59,9 @@ class TestSchedule:
|
||||||
@pytest.mark.parametrize("months", range(0, 25, 4))
|
@pytest.mark.parametrize("months", range(0, 25, 4))
|
||||||
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
||||||
def test_dates_of_month_last(self, months, hour):
|
def test_dates_of_month_last(self, months, hour):
|
||||||
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last)
|
schedule = baker.prepare(
|
||||||
|
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last
|
||||||
|
)
|
||||||
at = schedule.date + relativedelta(months=months)
|
at = schedule.date + relativedelta(months=months)
|
||||||
datetimes = schedule.dates_of_month(at)
|
datetimes = schedule.dates_of_month(at)
|
||||||
assert len(datetimes) == 1
|
assert len(datetimes) == 1
|
||||||
|
@ -69,7 +73,9 @@ class TestSchedule:
|
||||||
at = date(at.year, at.month, month_info[1])
|
at = date(at.year, at.month, month_info[1])
|
||||||
if at.weekday() < schedule.date.weekday():
|
if at.weekday() < schedule.date.weekday():
|
||||||
at -= timedelta(days=7)
|
at -= timedelta(days=7)
|
||||||
at += timedelta(days=schedule.date.weekday()) - timedelta(days=at.weekday())
|
at += timedelta(days=schedule.date.weekday()) - timedelta(
|
||||||
|
days=at.weekday()
|
||||||
|
)
|
||||||
assert dt.date() == at
|
assert dt.date() == at
|
||||||
|
|
||||||
# since the same method is used for first, second, etc. frequencies
|
# since the same method is used for first, second, etc. frequencies
|
||||||
|
@ -78,7 +84,9 @@ class TestSchedule:
|
||||||
@pytest.mark.parametrize("months", range(0, 25, 4))
|
@pytest.mark.parametrize("months", range(0, 25, 4))
|
||||||
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
||||||
def test_dates_of_month_every(self, months, hour):
|
def test_dates_of_month_every(self, months, hour):
|
||||||
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every)
|
schedule = baker.prepare(
|
||||||
|
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every
|
||||||
|
)
|
||||||
at = schedule.date + relativedelta(months=months)
|
at = schedule.date + relativedelta(months=months)
|
||||||
datetimes = schedule.dates_of_month(at)
|
datetimes = schedule.dates_of_month(at)
|
||||||
last = None
|
last = None
|
||||||
|
@ -120,4 +128,8 @@ class TestSchedule:
|
||||||
episodes, diffusions = schedule.diffusions_of_month(at)
|
episodes, diffusions = schedule.diffusions_of_month(at)
|
||||||
|
|
||||||
assert all(r.date in dates for r in episodes)
|
assert all(r.date in dates for r in episodes)
|
||||||
assert all((not r.initial or r.date in dates) and r.type == Diffusion.TYPE_ON_AIR for r in diffusions)
|
assert all(
|
||||||
|
(not r.initial or r.date in dates)
|
||||||
|
and r.type == Diffusion.TYPE_ON_AIR
|
||||||
|
for r in diffusions
|
||||||
|
)
|
||||||
|
|
|
@ -39,7 +39,8 @@ def test_user_default_groups():
|
||||||
groups = Group.objects.filter(name__in=default_groups.keys())
|
groups = Group.objects.filter(name__in=default_groups.keys())
|
||||||
assert groups.exists()
|
assert groups.exists()
|
||||||
assert all(
|
assert all(
|
||||||
set(group.permissions.all().values_list("codename", flat=True)) == set(default_groups[group.name])
|
set(group.permissions.all().values_list("codename", flat=True))
|
||||||
|
== set(default_groups[group.name])
|
||||||
for group in groups
|
for group in groups
|
||||||
)
|
)
|
||||||
user_groups = set(user.groups.all().values_list("name", flat=True))
|
user_groups = set(user.groups.all().values_list("name", flat=True))
|
||||||
|
@ -103,5 +104,7 @@ def test_schedule_pre_delete(sched, eps_diffs):
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_diffusion_post_delete(eps_diffs):
|
def test_diffusion_post_delete(eps_diffs):
|
||||||
eps = eps_diffs[0][0]
|
eps = eps_diffs[0][0]
|
||||||
Diffusion.objects.filter(id__in=[r.id for r in eps.diffusion_set.all()]).delete()
|
Diffusion.objects.filter(
|
||||||
|
id__in=[r.id for r in eps.diffusion_set.all()]
|
||||||
|
).delete()
|
||||||
assert Episode.objects.filter(id=eps.id).first() is None
|
assert Episode.objects.filter(id=eps.id).first() is None
|
||||||
|
|
|
@ -29,7 +29,9 @@ def test_date_or_default():
|
||||||
|
|
||||||
def test_to_timedelta():
|
def test_to_timedelta():
|
||||||
val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
|
val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
|
||||||
assert utils.to_timedelta(val) == timedelta(hours=20, minutes=10, seconds=1)
|
assert utils.to_timedelta(val) == timedelta(
|
||||||
|
hours=20, minutes=10, seconds=1
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_to_seconds():
|
def test_to_seconds():
|
||||||
|
|
|
@ -23,12 +23,16 @@ class FakeView:
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def published_pages():
|
def published_pages():
|
||||||
return baker.make(models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3)
|
return baker.make(
|
||||||
|
models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def unpublished_pages():
|
def unpublished_pages():
|
||||||
return baker.make(models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3)
|
return baker.make(
|
||||||
|
models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|
|
@ -96,7 +96,9 @@ class TestParentMixin:
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_get_parent_raises_404(self, parent_mixin):
|
def test_get_parent_raises_404(self, parent_mixin):
|
||||||
with pytest.raises(Http404):
|
with pytest.raises(Http404):
|
||||||
parent_mixin.get_parent(self.req, parent_slug="parent-invalid-slug")
|
parent_mixin.get_parent(
|
||||||
|
self.req, parent_slug="parent-invalid-slug"
|
||||||
|
)
|
||||||
|
|
||||||
def test_get_parent_not_parent_model(self, parent_mixin):
|
def test_get_parent_not_parent_model(self, parent_mixin):
|
||||||
parent_mixin.parent_model = None
|
parent_mixin.parent_model = None
|
||||||
|
|
|
@ -29,7 +29,9 @@ api = [
|
||||||
path("logs/", views.LogListAPIView.as_view(), name="live"),
|
path("logs/", views.LogListAPIView.as_view(), name="live"),
|
||||||
path(
|
path(
|
||||||
"user/settings/",
|
"user/settings/",
|
||||||
viewsets.UserSettingsViewSet.as_view({"get": "retrieve", "post": "update", "put": "update"}),
|
viewsets.UserSettingsViewSet.as_view(
|
||||||
|
{"get": "retrieve", "post": "update", "put": "update"}
|
||||||
|
),
|
||||||
name="user-settings",
|
name="user-settings",
|
||||||
),
|
),
|
||||||
] + router.urls
|
] + router.urls
|
||||||
|
|
|
@ -72,7 +72,9 @@ def date_or_default(date, into=None):
|
||||||
def to_timedelta(time):
|
def to_timedelta(time):
|
||||||
"""Transform a datetime or a time instance to a timedelta, only using time
|
"""Transform a datetime or a time instance to a timedelta, only using time
|
||||||
info."""
|
info."""
|
||||||
return datetime.timedelta(hours=time.hour, minutes=time.minute, seconds=time.second)
|
return datetime.timedelta(
|
||||||
|
hours=time.hour, minutes=time.minute, seconds=time.second
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def to_seconds(time):
|
def to_seconds(time):
|
||||||
|
|
|
@ -37,5 +37,9 @@ class StatisticsView(AdminMixin, LogListView, ListView):
|
||||||
|
|
||||||
def get_object_list(self, logs, full=False):
|
def get_object_list(self, logs, full=False):
|
||||||
if not logs.exists():
|
if not logs.exists():
|
||||||
logs = LogArchiver().load(self.station, self.date) if self.date else []
|
logs = (
|
||||||
|
LogArchiver().load(self.station, self.date)
|
||||||
|
if self.date
|
||||||
|
else []
|
||||||
|
)
|
||||||
return super().get_object_list(logs, True)
|
return super().get_object_list(logs, True)
|
||||||
|
|
|
@ -9,7 +9,11 @@ class ArticleDetailView(PageDetailView):
|
||||||
model = Article
|
model = Article
|
||||||
|
|
||||||
def get_sidebar_queryset(self):
|
def get_sidebar_queryset(self):
|
||||||
qs = Article.objects.published().select_related("cover").order_by("-pub_date")
|
qs = (
|
||||||
|
Article.objects.published()
|
||||||
|
.select_related("cover")
|
||||||
|
.order_by("-pub_date")
|
||||||
|
)
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,9 @@ class BaseView(TemplateResponseMixin, ContextMixin):
|
||||||
|
|
||||||
def get_sidebar_queryset(self):
|
def get_sidebar_queryset(self):
|
||||||
"""Return a queryset of items to render on the side nav."""
|
"""Return a queryset of items to render on the side nav."""
|
||||||
return Page.objects.select_subclasses().published().order_by("-pub_date")
|
return (
|
||||||
|
Page.objects.select_subclasses().published().order_by("-pub_date")
|
||||||
|
)
|
||||||
|
|
||||||
def get_sidebar_url(self):
|
def get_sidebar_url(self):
|
||||||
return reverse("page-list")
|
return reverse("page-list")
|
||||||
|
@ -41,14 +43,20 @@ class BaseView(TemplateResponseMixin, ContextMixin):
|
||||||
if has_sidebar and "sidebar_object_list" not in kwargs:
|
if has_sidebar and "sidebar_object_list" not in kwargs:
|
||||||
sidebar_object_list = self.get_sidebar_queryset()
|
sidebar_object_list = self.get_sidebar_queryset()
|
||||||
if sidebar_object_list is not None:
|
if sidebar_object_list is not None:
|
||||||
kwargs["sidebar_object_list"] = sidebar_object_list[: self.list_count]
|
kwargs["sidebar_object_list"] = sidebar_object_list[
|
||||||
|
: self.list_count
|
||||||
|
]
|
||||||
kwargs["sidebar_list_url"] = self.get_sidebar_url()
|
kwargs["sidebar_list_url"] = self.get_sidebar_url()
|
||||||
|
|
||||||
if "audio_streams" not in kwargs:
|
if "audio_streams" not in kwargs:
|
||||||
kwargs["audio_streams"] = self.station.streams
|
kwargs["audio_streams"] = self.station.streams
|
||||||
|
|
||||||
if "model" not in kwargs:
|
if "model" not in kwargs:
|
||||||
model = getattr(self, "model", None) or hasattr(self, "object") and type(self.object)
|
model = (
|
||||||
|
getattr(self, "model", None)
|
||||||
|
or hasattr(self, "object")
|
||||||
|
and type(self.object)
|
||||||
|
)
|
||||||
kwargs["model"] = model
|
kwargs["model"] = model
|
||||||
|
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
|
@ -30,7 +30,9 @@ class HomeView(BaseView, ListView):
|
||||||
current_diff = Diffusion.objects.on_air().now(now).first()
|
current_diff = Diffusion.objects.on_air().now(now).first()
|
||||||
next_diffs = Diffusion.objects.on_air().after(now)
|
next_diffs = Diffusion.objects.on_air().after(now)
|
||||||
if current_diff:
|
if current_diff:
|
||||||
diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:2])
|
diffs = [current_diff] + list(
|
||||||
|
next_diffs.exclude(pk=current_diff.pk)[:2]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
diffs = next_diffs[:3]
|
diffs = next_diffs[:3]
|
||||||
return diffs
|
return diffs
|
||||||
|
|
|
@ -27,7 +27,13 @@ class LogListMixin(GetDateMixin):
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
# only get logs for tracks: log for diffusion will be retrieved
|
# only get logs for tracks: log for diffusion will be retrieved
|
||||||
# by the diffusions' queryset.
|
# by the diffusions' queryset.
|
||||||
qs = super().get_queryset().on_air().filter(track__isnull=False).filter(date__lte=tz.now())
|
qs = (
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.on_air()
|
||||||
|
.filter(track__isnull=False)
|
||||||
|
.filter(date__lte=tz.now())
|
||||||
|
)
|
||||||
return (
|
return (
|
||||||
qs.date(self.date)
|
qs.date(self.date)
|
||||||
if self.date is not None
|
if self.date is not None
|
||||||
|
@ -37,7 +43,11 @@ class LogListMixin(GetDateMixin):
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_diffusions_queryset(self):
|
def get_diffusions_queryset(self):
|
||||||
qs = Diffusion.objects.station(self.station).on_air().filter(start__lte=tz.now())
|
qs = (
|
||||||
|
Diffusion.objects.station(self.station)
|
||||||
|
.on_air()
|
||||||
|
.filter(start__lte=tz.now())
|
||||||
|
)
|
||||||
return (
|
return (
|
||||||
qs.date(self.date)
|
qs.date(self.date)
|
||||||
if self.date is not None
|
if self.date is not None
|
||||||
|
@ -77,7 +87,9 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
||||||
kwargs.update(
|
kwargs.update(
|
||||||
{
|
{
|
||||||
"date": self.date,
|
"date": self.date,
|
||||||
"dates": (today - datetime.timedelta(days=i) for i in range(0, 7)),
|
"dates": (
|
||||||
|
today - datetime.timedelta(days=i) for i in range(0, 7)
|
||||||
|
),
|
||||||
"object_list": self.get_object_list(self.object_list),
|
"object_list": self.get_object_list(self.object_list),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -112,4 +124,6 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
||||||
|
|
||||||
def get_serializer(self, queryset, *args, **kwargs):
|
def get_serializer(self, queryset, *args, **kwargs):
|
||||||
full = bool(self.request.GET.get("full"))
|
full = bool(self.request.GET.get("full"))
|
||||||
return super().get_serializer(self.get_object_list(queryset, full), *args, **kwargs)
|
return super().get_serializer(
|
||||||
|
self.get_object_list(queryset, full), *args, **kwargs
|
||||||
|
)
|
||||||
|
|
|
@ -14,7 +14,13 @@ class GetDateMixin:
|
||||||
|
|
||||||
def get_date(self):
|
def get_date(self):
|
||||||
date = self.request.GET.get("date")
|
date = self.request.GET.get("date")
|
||||||
return str_to_date(date, "-") if date is not None else self.kwargs["date"] if "date" in self.kwargs else None
|
return (
|
||||||
|
str_to_date(date, "-")
|
||||||
|
if date is not None
|
||||||
|
else self.kwargs["date"]
|
||||||
|
if "date" in self.kwargs
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
if self.redirect_date_url and self.request.GET.get("date"):
|
if self.redirect_date_url and self.request.GET.get("date"):
|
||||||
|
@ -49,7 +55,9 @@ class ParentMixin:
|
||||||
return
|
return
|
||||||
|
|
||||||
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
||||||
return get_object_or_404(self.parent_model.objects.select_related("cover"), **lookup)
|
return get_object_or_404(
|
||||||
|
self.parent_model.objects.select_related("cover"), **lookup
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
self.parent = self.get_parent(request, *args, **kwargs)
|
self.parent = self.get_parent(request, *args, **kwargs)
|
||||||
|
@ -75,7 +83,11 @@ class AttachedToMixin:
|
||||||
|
|
||||||
def get_page(self):
|
def get_page(self):
|
||||||
if self.attach_to_value is not None:
|
if self.attach_to_value is not None:
|
||||||
return StaticPage.objects.filter(attach_to=self.attach_to_value).published().first()
|
return (
|
||||||
|
StaticPage.objects.filter(attach_to=self.attach_to_value)
|
||||||
|
.published()
|
||||||
|
.first()
|
||||||
|
)
|
||||||
return super().get_page()
|
return super().get_page()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,13 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
||||||
return super().get(*args, **kwargs)
|
return super().get(*args, **kwargs)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().select_subclasses().published().select_related("cover")
|
return (
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.select_subclasses()
|
||||||
|
.published()
|
||||||
|
.select_related("cover")
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
kwargs.setdefault("item_template_name", self.item_template_name)
|
kwargs.setdefault("item_template_name", self.item_template_name)
|
||||||
|
@ -91,7 +97,12 @@ class PageListView(FiltersMixin, BasePageListView):
|
||||||
return super().get_filterset(data, query)
|
return super().get_filterset(data, query)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
qs = super().get_queryset().select_related("category").order_by("-pub_date")
|
qs = (
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.select_related("category")
|
||||||
|
.order_by("-pub_date")
|
||||||
|
)
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
|
@ -120,7 +131,9 @@ class PageDetailView(BasePageDetailView):
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
if self.object.allow_comments and "comment_form" not in kwargs:
|
if self.object.allow_comments and "comment_form" not in kwargs:
|
||||||
kwargs["comment_form"] = CommentForm()
|
kwargs["comment_form"] = CommentForm()
|
||||||
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by("-date")
|
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by(
|
||||||
|
"-date"
|
||||||
|
)
|
||||||
return super().get_context_data(**kwargs)
|
return super().get_context_data(**kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -12,7 +12,9 @@ class BaseProgramMixin:
|
||||||
return self.object
|
return self.object
|
||||||
|
|
||||||
def get_sidebar_url(self):
|
def get_sidebar_url(self):
|
||||||
return reverse("program-page-list", kwargs={"parent_slug": self.program.slug})
|
return reverse(
|
||||||
|
"program-page-list", kwargs={"parent_slug": self.program.slug}
|
||||||
|
)
|
||||||
|
|
||||||
def get_context_data(self, **kwargs):
|
def get_context_data(self, **kwargs):
|
||||||
self.program = self.get_program()
|
self.program = self.get_program()
|
||||||
|
|
|
@ -70,7 +70,9 @@ class UserSettingsViewSet(viewsets.ViewSet):
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def get_serializer(self, instance=None, **kwargs):
|
def get_serializer(self, instance=None, **kwargs):
|
||||||
return self.serializer_class(instance=instance, context={"user": self.request.user}, **kwargs)
|
return self.serializer_class(
|
||||||
|
instance=instance, context={"user": self.request.user}, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
@action(detail=False, methods=["GET"])
|
@action(detail=False, methods=["GET"])
|
||||||
def retrieve(self, request):
|
def retrieve(self, request):
|
||||||
|
|
|
@ -45,7 +45,9 @@ class Connector:
|
||||||
if self.is_open:
|
if self.is_open:
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
family = socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
|
family = (
|
||||||
|
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
self.socket = self.socket_class(family, socket.SOCK_STREAM)
|
self.socket = self.socket_class(family, socket.SOCK_STREAM)
|
||||||
self.socket.connect(self.address)
|
self.socket.connect(self.address)
|
||||||
|
@ -76,7 +78,13 @@ class Connector:
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
data = response_re.sub(r"\1", data).strip()
|
data = response_re.sub(r"\1", data).strip()
|
||||||
data = self.parse(data) if parse else self.parse_json(data) if parse_json else data
|
data = (
|
||||||
|
self.parse(data)
|
||||||
|
if parse
|
||||||
|
else self.parse_json(data)
|
||||||
|
if parse_json
|
||||||
|
else data
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
except Exception:
|
except Exception:
|
||||||
self.close()
|
self.close()
|
||||||
|
|
|
@ -62,7 +62,9 @@ class Monitor:
|
||||||
|
|
||||||
def get_logs_queryset(self):
|
def get_logs_queryset(self):
|
||||||
"""Return queryset to assign as `self.logs`"""
|
"""Return queryset to assign as `self.logs`"""
|
||||||
return self.station.log_set.select_related("diffusion", "sound", "track").order_by("-pk")
|
return self.station.log_set.select_related(
|
||||||
|
"diffusion", "sound", "track"
|
||||||
|
).order_by("-pk")
|
||||||
|
|
||||||
def init_last_sound_logs(self):
|
def init_last_sound_logs(self):
|
||||||
"""Retrieve last logs and initialize `last_sound_logs`"""
|
"""Retrieve last logs and initialize `last_sound_logs`"""
|
||||||
|
@ -134,7 +136,12 @@ class Monitor:
|
||||||
diff = None
|
diff = None
|
||||||
sound = Sound.objects.path(air_uri).first()
|
sound = Sound.objects.path(air_uri).first()
|
||||||
if sound and sound.episode_id is not None:
|
if sound and sound.episode_id is not None:
|
||||||
diff = Diffusion.objects.episode(id=sound.episode_id).on_air().now(air_time).first()
|
diff = (
|
||||||
|
Diffusion.objects.episode(id=sound.episode_id)
|
||||||
|
.on_air()
|
||||||
|
.now(air_time)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
# log sound on air
|
# log sound on air
|
||||||
return self.log(
|
return self.log(
|
||||||
|
@ -151,7 +158,9 @@ class Monitor:
|
||||||
if log.diffusion:
|
if log.diffusion:
|
||||||
return
|
return
|
||||||
|
|
||||||
tracks = Track.objects.filter(sound_id=log.sound_id, timestamp__isnull=False).order_by("timestamp")
|
tracks = Track.objects.filter(
|
||||||
|
sound_id=log.sound_id, timestamp__isnull=False
|
||||||
|
).order_by("timestamp")
|
||||||
if not tracks.exists():
|
if not tracks.exists():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -208,7 +217,11 @@ class Monitor:
|
||||||
|
|
||||||
dealer = self.streamer.dealer
|
dealer = self.streamer.dealer
|
||||||
# start
|
# start
|
||||||
if not dealer.queue and dealer.rid is None or dealer.remaining < self.delay.total_seconds():
|
if (
|
||||||
|
not dealer.queue
|
||||||
|
and dealer.rid is None
|
||||||
|
or dealer.remaining < self.delay.total_seconds()
|
||||||
|
):
|
||||||
self.start_diff(dealer, diff)
|
self.start_diff(dealer, diff)
|
||||||
# cancel
|
# cancel
|
||||||
elif diff.start < now - self.cancel_timeout:
|
elif diff.start < now - self.cancel_timeout:
|
||||||
|
|
|
@ -47,7 +47,9 @@ class Streamer:
|
||||||
|
|
||||||
self.id = self.station.slug.replace("-", "_")
|
self.id = self.station.slug.replace("-", "_")
|
||||||
self.path = os.path.join(station.path, "station.liq")
|
self.path = os.path.join(station.path, "station.liq")
|
||||||
self.connector = connector or Connector(os.path.join(station.path, "station.sock"))
|
self.connector = connector or Connector(
|
||||||
|
os.path.join(station.path, "station.sock")
|
||||||
|
)
|
||||||
self.init_sources()
|
self.init_sources()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -89,7 +91,9 @@ class Streamer:
|
||||||
def init_sources(self):
|
def init_sources(self):
|
||||||
streams = self.station.program_set.filter(stream__isnull=False)
|
streams = self.station.program_set.filter(stream__isnull=False)
|
||||||
self.dealer = QueueSource(self, "dealer")
|
self.dealer = QueueSource(self, "dealer")
|
||||||
self.sources = [self.dealer] + [PlaylistSource(self, program=program) for program in streams]
|
self.sources = [self.dealer] + [
|
||||||
|
PlaylistSource(self, program=program) for program in streams
|
||||||
|
]
|
||||||
|
|
||||||
def make_config(self):
|
def make_config(self):
|
||||||
"""Make configuration files and directory (and sync sources)"""
|
"""Make configuration files and directory (and sync sources)"""
|
||||||
|
@ -124,7 +128,12 @@ class Streamer:
|
||||||
self.source = next(
|
self.source = next(
|
||||||
iter(
|
iter(
|
||||||
sorted(
|
sorted(
|
||||||
(source for source in self.sources if source.request_status == "playing" and source.air_time),
|
(
|
||||||
|
source
|
||||||
|
for source in self.sources
|
||||||
|
if source.request_status == "playing"
|
||||||
|
and source.air_time
|
||||||
|
),
|
||||||
key=lambda o: o.air_time,
|
key=lambda o: o.air_time,
|
||||||
reverse=True,
|
reverse=True,
|
||||||
)
|
)
|
||||||
|
@ -140,7 +149,11 @@ class Streamer:
|
||||||
if not os.path.exists(self.socket_path):
|
if not os.path.exists(self.socket_path):
|
||||||
return
|
return
|
||||||
|
|
||||||
conns = [conn for conn in psutil.net_connections(kind="unix") if conn.laddr == self.socket_path]
|
conns = [
|
||||||
|
conn
|
||||||
|
for conn in psutil.net_connections(kind="unix")
|
||||||
|
if conn.laddr == self.socket_path
|
||||||
|
]
|
||||||
for conn in conns:
|
for conn in conns:
|
||||||
if conn.pid is not None:
|
if conn.pid is not None:
|
||||||
os.kill(conn.pid, signal.SIGKILL)
|
os.kill(conn.pid, signal.SIGKILL)
|
||||||
|
|
|
@ -23,7 +23,9 @@ class Streamers:
|
||||||
def reset(self, stations=Station.objects.active()):
|
def reset(self, stations=Station.objects.active()):
|
||||||
# FIXME: cf. TODO in aircox.controllers about model updates
|
# FIXME: cf. TODO in aircox.controllers about model updates
|
||||||
stations = stations.all()
|
stations = stations.all()
|
||||||
self.streamers = {station.pk: self.streamer_class(station) for station in stations}
|
self.streamers = {
|
||||||
|
station.pk: self.streamer_class(station) for station in stations
|
||||||
|
}
|
||||||
|
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
"""Call streamers fetch if timed-out."""
|
"""Call streamers fetch if timed-out."""
|
||||||
|
|
|
@ -62,24 +62,42 @@ class Command(BaseCommand):
|
||||||
"--station",
|
"--station",
|
||||||
type=str,
|
type=str,
|
||||||
action="append",
|
action="append",
|
||||||
help="name of the station to monitor instead of monitoring " "all stations",
|
help="name of the station to monitor instead of monitoring "
|
||||||
|
"all stations",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-t",
|
"-t",
|
||||||
"--timeout",
|
"--timeout",
|
||||||
type=float,
|
type=float,
|
||||||
default=Monitor.cancel_timeout.total_seconds() / 60,
|
default=Monitor.cancel_timeout.total_seconds() / 60,
|
||||||
help="time to wait in MINUTES before canceling a diffusion that " "should have ran but did not. ",
|
help="time to wait in MINUTES before canceling a diffusion that "
|
||||||
|
"should have ran but did not. ",
|
||||||
)
|
)
|
||||||
# TODO: sync-timeout, cancel-timeout
|
# TODO: sync-timeout, cancel-timeout
|
||||||
|
|
||||||
def handle(self, *args, config=None, run=None, monitor=None, station=[], delay=1000, timeout=600, **options):
|
def handle(
|
||||||
stations = Station.objects.filter(name__in=station) if station else Station.objects.all()
|
self,
|
||||||
|
*args,
|
||||||
|
config=None,
|
||||||
|
run=None,
|
||||||
|
monitor=None,
|
||||||
|
station=[],
|
||||||
|
delay=1000,
|
||||||
|
timeout=600,
|
||||||
|
**options
|
||||||
|
):
|
||||||
|
stations = (
|
||||||
|
Station.objects.filter(name__in=station)
|
||||||
|
if station
|
||||||
|
else Station.objects.all()
|
||||||
|
)
|
||||||
streamers = [Streamer(station) for station in stations]
|
streamers = [Streamer(station) for station in stations]
|
||||||
|
|
||||||
for streamer in streamers:
|
for streamer in streamers:
|
||||||
if not streamer.outputs:
|
if not streamer.outputs:
|
||||||
raise RuntimeError("Streamer {} has no outputs".format(streamer.id))
|
raise RuntimeError(
|
||||||
|
"Streamer {} has no outputs".format(streamer.id)
|
||||||
|
)
|
||||||
if config:
|
if config:
|
||||||
streamer.make_config()
|
streamer.make_config()
|
||||||
if run:
|
if run:
|
||||||
|
@ -88,7 +106,10 @@ class Command(BaseCommand):
|
||||||
if monitor:
|
if monitor:
|
||||||
delay = tz.timedelta(milliseconds=delay)
|
delay = tz.timedelta(milliseconds=delay)
|
||||||
timeout = tz.timedelta(minutes=timeout)
|
timeout = tz.timedelta(minutes=timeout)
|
||||||
monitors = [Monitor(streamer, delay, cancel_timeout=timeout) for streamer in streamers]
|
monitors = [
|
||||||
|
Monitor(streamer, delay, cancel_timeout=timeout)
|
||||||
|
for streamer in streamers
|
||||||
|
]
|
||||||
|
|
||||||
while not run or streamer.is_running:
|
while not run or streamer.is_running:
|
||||||
for monitor in monitors:
|
for monitor in monitors:
|
||||||
|
|
|
@ -55,7 +55,9 @@ class FakeSocket:
|
||||||
data = self.recv_data
|
data = self.recv_data
|
||||||
self.recv_data = self.recv_data[count:]
|
self.recv_data = self.recv_data[count:]
|
||||||
data = data[:count]
|
data = data[:count]
|
||||||
return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
|
return (
|
||||||
|
data.encode("utf-8") if isinstance(data, str) else data
|
||||||
|
) or b"\nEND"
|
||||||
|
|
||||||
def is_sent(self, data):
|
def is_sent(self, data):
|
||||||
"""Return True if provided data have been sent."""
|
"""Return True if provided data have been sent."""
|
||||||
|
@ -66,7 +68,9 @@ class FakeSocket:
|
||||||
# -- models
|
# -- models
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def station():
|
def station():
|
||||||
station = models.Station(name="test", path=working_dir, default=True, active=True)
|
station = models.Station(
|
||||||
|
name="test", path=working_dir, default=True, active=True
|
||||||
|
)
|
||||||
station.save()
|
station.save()
|
||||||
return station
|
return station
|
||||||
|
|
||||||
|
@ -132,7 +136,9 @@ def program(station):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def stream(program):
|
def stream(program):
|
||||||
stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
|
stream = models.Stream(
|
||||||
|
program=program, begin=time(10, 12), end=time(12, 13)
|
||||||
|
)
|
||||||
stream.save()
|
stream.save()
|
||||||
return stream
|
return stream
|
||||||
|
|
||||||
|
@ -223,7 +229,10 @@ def metadata_data(metadata_data_air_time):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def metadata_string(metadata_data):
|
def metadata_string(metadata_data):
|
||||||
return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
|
return (
|
||||||
|
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
|
||||||
|
+ "\nEND"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# -- streamers
|
# -- streamers
|
||||||
|
@ -276,7 +285,9 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def streamer(station, station_ports):
|
def streamer(station, station_ports):
|
||||||
streamer = FakeStreamer(station=station)
|
streamer = FakeStreamer(station=station)
|
||||||
streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
|
streamer.sources = [
|
||||||
|
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
|
||||||
|
]
|
||||||
streamer.dealer = FakeQueueSource(len(streamer.sources))
|
streamer.dealer = FakeQueueSource(len(streamer.sources))
|
||||||
streamer.sources.append(streamer.dealer)
|
streamer.sources.append(streamer.dealer)
|
||||||
return streamer
|
return streamer
|
||||||
|
@ -286,8 +297,12 @@ def streamer(station, station_ports):
|
||||||
def streamers(stations, stations_ports):
|
def streamers(stations, stations_ports):
|
||||||
streamers = controllers.Streamers(streamer_class=FakeStreamer)
|
streamers = controllers.Streamers(streamer_class=FakeStreamer)
|
||||||
# avoid unecessary db calls
|
# avoid unecessary db calls
|
||||||
streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
|
streamers.streamers = {
|
||||||
|
station.pk: FakeStreamer(station=station) for station in stations
|
||||||
|
}
|
||||||
for j, streamer in enumerate(streamers.values()):
|
for j, streamer in enumerate(streamers.values()):
|
||||||
streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
|
streamer.sources = [
|
||||||
|
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
|
||||||
|
]
|
||||||
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
||||||
return streamers
|
return streamers
|
||||||
|
|
|
@ -16,7 +16,9 @@ class TestConnector:
|
||||||
assert connector.is_open
|
assert connector.is_open
|
||||||
assert connector.socket.family == socket.AF_UNIX
|
assert connector.socket.family == socket.AF_UNIX
|
||||||
assert connector.socket.type == socket.SOCK_STREAM
|
assert connector.socket.type == socket.SOCK_STREAM
|
||||||
assert connector.socket.address == os.path.join(working_dir, "test.sock")
|
assert connector.socket.address == os.path.join(
|
||||||
|
working_dir, "test.sock"
|
||||||
|
)
|
||||||
connector.close()
|
connector.close()
|
||||||
|
|
||||||
def test_open_af_inet(self, connector):
|
def test_open_af_inet(self, connector):
|
||||||
|
|
|
@ -37,7 +37,9 @@ class TestBaseMetaData:
|
||||||
assert metadata.validate_status("any") == "stopped"
|
assert metadata.validate_status("any") == "stopped"
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
|
def test_validate_air_time(
|
||||||
|
self, metadata, metadata_data, metadata_data_air_time
|
||||||
|
):
|
||||||
air_time = metadata_data["on_air"]
|
air_time = metadata_data["on_air"]
|
||||||
result = metadata.validate_air_time(air_time)
|
result = metadata.validate_air_time(air_time)
|
||||||
assert result == metadata_data_air_time
|
assert result == metadata_data_air_time
|
||||||
|
|
|
@ -43,7 +43,10 @@ def source(monitor, streamer, sound, diffusion):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def tracks(sound):
|
def tracks(sound):
|
||||||
items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
|
items = [
|
||||||
|
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||||
|
for i in range(0, 4)
|
||||||
|
]
|
||||||
models.Track.objects.bulk_create(items)
|
models.Track.objects.bulk_create(items)
|
||||||
return items
|
return items
|
||||||
|
|
||||||
|
@ -175,7 +178,9 @@ class TestMonitor:
|
||||||
assert all(log_by_track.count(track) for track in tracks)
|
assert all(log_by_track.count(track) for track in tracks)
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
|
def test_trace_tracks_returns_on_log_diffusion(
|
||||||
|
self, monitor, log, diffusion, tracks
|
||||||
|
):
|
||||||
log.diffusion = None
|
log.diffusion = None
|
||||||
monitor.trace_tracks(log)
|
monitor.trace_tracks(log)
|
||||||
|
|
||||||
|
@ -205,7 +210,9 @@ class TestMonitor:
|
||||||
assert not monitor.calls["cancel_diff"]
|
assert not monitor.calls["cancel_diff"]
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
|
def test_handle_diffusions_returns_on_diff(
|
||||||
|
self, monitor, streamer, diffusion, log
|
||||||
|
):
|
||||||
interface(
|
interface(
|
||||||
monitor,
|
monitor,
|
||||||
{
|
{
|
||||||
|
@ -225,7 +232,9 @@ class TestMonitor:
|
||||||
assert not monitor.calls["cancel_diff"]
|
assert not monitor.calls["cancel_diff"]
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
|
def test_handle_diffusions_returns_on_diff_log_exists(
|
||||||
|
self, monitor, streamer, diffusion, log
|
||||||
|
):
|
||||||
interface(
|
interface(
|
||||||
monitor,
|
monitor,
|
||||||
{
|
{
|
||||||
|
@ -255,7 +264,9 @@ class TestMonitor:
|
||||||
streamer.dealer.queue = None
|
streamer.dealer.queue = None
|
||||||
streamer.dealer.rid = "13"
|
streamer.dealer.rid = "13"
|
||||||
streamer.dealer.remaining = monitor.delay.total_seconds() + 10
|
streamer.dealer.remaining = monitor.delay.total_seconds() + 10
|
||||||
diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
diffusion.start = (
|
||||||
|
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
||||||
|
)
|
||||||
diffusion.end = tz.now() + tz.timedelta(minutes=30)
|
diffusion.end = tz.now() + tz.timedelta(minutes=30)
|
||||||
diffusion.save()
|
diffusion.save()
|
||||||
|
|
||||||
|
@ -274,7 +285,9 @@ class TestMonitor:
|
||||||
assert log.comment == "test"
|
assert log.comment == "test"
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
|
def test_start_diff(
|
||||||
|
self, monitor, diffusion, source, episode, sound, tracks
|
||||||
|
):
|
||||||
result = {}
|
result = {}
|
||||||
monitor.log = lambda **kw: result.update(kw)
|
monitor.log = lambda **kw: result.update(kw)
|
||||||
|
|
||||||
|
@ -308,10 +321,17 @@ class TestMonitor:
|
||||||
monitor.sync()
|
monitor.sync()
|
||||||
|
|
||||||
assert monitor.sync_next >= now + monitor.sync_timeout
|
assert monitor.sync_next >= now + monitor.sync_timeout
|
||||||
assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
|
assert all(
|
||||||
|
source.calls.get("sync") for source in monitor.streamer.playlists
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_sync_timeout_not_reached_skip_sync(self, monitor):
|
def test_sync_timeout_not_reached_skip_sync(self, monitor):
|
||||||
monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
|
monitor.sync_next = tz.now() + tz.timedelta(
|
||||||
|
seconds=monitor.sync_timeout.total_seconds() + 20
|
||||||
|
)
|
||||||
monitor.sync()
|
monitor.sync()
|
||||||
assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)
|
assert all(
|
||||||
|
not source.calls.get("sync")
|
||||||
|
for source in monitor.streamer.playlists
|
||||||
|
)
|
||||||
|
|
|
@ -67,7 +67,11 @@ class TestPlaylistSource:
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_get_sound_queryset(self, playlist_source, sounds):
|
def test_get_sound_queryset(self, playlist_source, sounds):
|
||||||
query = playlist_source.get_sound_queryset()
|
query = playlist_source.get_sound_queryset()
|
||||||
assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
|
assert all(
|
||||||
|
r.program_id == playlist_source.program.pk
|
||||||
|
and r.type == r.TYPE_ARCHIVE
|
||||||
|
for r in query
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_get_playlist(self, playlist_source, sounds):
|
def test_get_playlist(self, playlist_source, sounds):
|
||||||
|
@ -110,7 +114,9 @@ class TestQueueSource:
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_requests(self, queue_source, socket, metadata_string):
|
def test_requests(self, queue_source, socket, metadata_string):
|
||||||
queue_source.queue = [13, 14, 15]
|
queue_source.queue = [13, 14, 15]
|
||||||
socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
|
socket.recv_data = [
|
||||||
|
f"{metadata_string}\nEND" for _ in queue_source.queue
|
||||||
|
]
|
||||||
|
|
||||||
requests = queue_source.requests
|
requests = queue_source.requests
|
||||||
|
|
||||||
|
@ -121,7 +127,10 @@ class TestQueueSource:
|
||||||
def test_push(self, queue_source, socket):
|
def test_push(self, queue_source, socket):
|
||||||
paths = ["/tmp/a", "/tmp/b"]
|
paths = ["/tmp/a", "/tmp/b"]
|
||||||
queue_source.push(*paths)
|
queue_source.push(*paths)
|
||||||
assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
|
assert all(
|
||||||
|
socket.is_sent(f"{queue_source.id}_queue.push {path}")
|
||||||
|
for path in paths
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_fetch(self, queue_source, socket, metadata_string):
|
def test_fetch(self, queue_source, socket, metadata_string):
|
||||||
|
|
|
@ -12,7 +12,9 @@ class TestStreamers:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def test_reset(self, streamers, stations):
|
def test_reset(self, streamers, stations):
|
||||||
streamers.reset()
|
streamers.reset()
|
||||||
assert all(streamers.streamers[station.pk] == station for station in stations)
|
assert all(
|
||||||
|
streamers.streamers[station.pk] == station for station in stations
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def test_fetch(self, streamers):
|
def test_fetch(self, streamers):
|
||||||
|
|
|
@ -168,14 +168,18 @@ class TestQueueSourceViewSet:
|
||||||
calls = {}
|
calls = {}
|
||||||
sound = sounds[0]
|
sound = sounds[0]
|
||||||
request = FakeRequest(station=station, data={"sound_id": sound.pk})
|
request = FakeRequest(station=station, data={"sound_id": sound.pk})
|
||||||
queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
|
queue_source_viewset._run = lambda pk, func: calls.setdefault(
|
||||||
|
"_run", (pk, func)
|
||||||
|
)
|
||||||
result = queue_source_viewset.push(request, 13)
|
result = queue_source_viewset.push(request, 13)
|
||||||
assert "_run" in calls
|
assert "_run" in calls
|
||||||
assert result[0] == 13
|
assert result[0] == 13
|
||||||
assert callable(result[1])
|
assert callable(result[1])
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
|
def test_push_missing_sound_in_request_post(
|
||||||
|
self, queue_source_viewset, station
|
||||||
|
):
|
||||||
request = FakeRequest(station=station, data={})
|
request = FakeRequest(station=station, data={})
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
queue_source_viewset.push(request, 0)
|
queue_source_viewset.push(request, 0)
|
||||||
|
|
|
@ -73,7 +73,9 @@ class StreamerViewSet(ControllerViewSet):
|
||||||
return Response(self.serialize(self.streamer))
|
return Response(self.serialize(self.streamer))
|
||||||
|
|
||||||
def list(self, request, pk=None):
|
def list(self, request, pk=None):
|
||||||
return Response({"results": self.serialize(self.streamers.values(), many=True)})
|
return Response(
|
||||||
|
{"results": self.serialize(self.streamers.values(), many=True)}
|
||||||
|
)
|
||||||
|
|
||||||
def dispatch(self, request, *args, pk=None, **kwargs):
|
def dispatch(self, request, *args, pk=None, **kwargs):
|
||||||
if pk is not None:
|
if pk is not None:
|
||||||
|
@ -91,7 +93,9 @@ class SourceViewSet(ControllerViewSet):
|
||||||
return (s for s in self.streamer.sources if isinstance(s, self.model))
|
return (s for s in self.streamer.sources if isinstance(s, self.model))
|
||||||
|
|
||||||
def get_source(self, pk):
|
def get_source(self, pk):
|
||||||
source = next((source for source in self.get_sources() if source.id == pk), None)
|
source = next(
|
||||||
|
(source for source in self.get_sources() if source.id == pk), None
|
||||||
|
)
|
||||||
if source is None:
|
if source is None:
|
||||||
raise Http404("source `%s` not found" % pk)
|
raise Http404("source `%s` not found" % pk)
|
||||||
return source
|
return source
|
||||||
|
@ -101,7 +105,9 @@ class SourceViewSet(ControllerViewSet):
|
||||||
return Response(self.serialize(source))
|
return Response(self.serialize(source))
|
||||||
|
|
||||||
def list(self, request):
|
def list(self, request):
|
||||||
return Response({"results": self.serialize(self.get_sources(), many=True)})
|
return Response(
|
||||||
|
{"results": self.serialize(self.get_sources(), many=True)}
|
||||||
|
)
|
||||||
|
|
||||||
def _run(self, pk, action):
|
def _run(self, pk, action):
|
||||||
source = self.object = self.get_source(pk)
|
source = self.object = self.get_source(pk)
|
||||||
|
@ -144,5 +150,9 @@ class QueueSourceViewSet(SourceViewSet):
|
||||||
if not request.data.get("sound_id"):
|
if not request.data.get("sound_id"):
|
||||||
raise ValidationError('missing "sound_id" POST data')
|
raise ValidationError('missing "sound_id" POST data')
|
||||||
|
|
||||||
sound = get_object_or_404(self.get_sound_queryset(request), pk=request.data["sound_id"])
|
sound = get_object_or_404(
|
||||||
return self._run(pk, lambda s: s.push(sound.file.path) if sound.file.path else None)
|
self.get_sound_queryset(request), pk=request.data["sound_id"]
|
||||||
|
)
|
||||||
|
return self._run(
|
||||||
|
pk, lambda s: s.push(sound.file.path) if sound.file.path else None
|
||||||
|
)
|
||||||
|
|
|
@ -10,7 +10,11 @@ sys.path.insert(1, os.path.dirname(os.path.realpath(__file__)))
|
||||||
PROJECT_ROOT = os.path.abspath(__file__ + "/../../../")
|
PROJECT_ROOT = os.path.abspath(__file__ + "/../../../")
|
||||||
|
|
||||||
# DEBUG mode
|
# DEBUG mode
|
||||||
DEBUG = (os.environ["AIRCOX_DEBUG"].lower() in ("true", 1)) if "AIRCOX_DEBUG" in os.environ else False
|
DEBUG = (
|
||||||
|
(os.environ["AIRCOX_DEBUG"].lower() in ("true", 1))
|
||||||
|
if "AIRCOX_DEBUG" in os.environ
|
||||||
|
else False
|
||||||
|
)
|
||||||
|
|
||||||
# Internationalization and timezones: thoses values may be set in order to
|
# Internationalization and timezones: thoses values may be set in order to
|
||||||
# have correct translation and timezone.
|
# have correct translation and timezone.
|
||||||
|
@ -70,7 +74,9 @@ try:
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"Can not set locale {LC}. Is it available on you system? Hint: "
|
"Can not set locale {LC}. Is it available on you system? Hint: "
|
||||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
|
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(
|
||||||
|
LC=LANGUAGE_CODE
|
||||||
|
)
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,8 @@ try:
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"Can not set locale {LC}. Is it available on you system? Hint: "
|
"Can not set locale {LC}. Is it available on you system? Hint: "
|
||||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
|
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(
|
||||||
|
LC=LANGUAGE_CODE
|
||||||
|
)
|
||||||
)
|
)
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -28,6 +28,6 @@ urlpatterns = aircox.urls.urls + [
|
||||||
]
|
]
|
||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(
|
urlpatterns += static(
|
||||||
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
|
settings.STATIC_URL, document_root=settings.STATIC_ROOT
|
||||||
)
|
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
[project]
|
|
||||||
name = "aircox"
|
|
||||||
# version = "0.1"
|
|
||||||
description = "Radio management platform and website"
|
|
||||||
readme = "README.md"
|
|
||||||
license = {text = "GPLv3"}
|
|
||||||
requires-python = ">=3.8"
|
|
||||||
|
|
||||||
authors = [
|
|
||||||
{name = "Thomas", email = "thomas@bkfox.net"},
|
|
||||||
]
|
|
||||||
|
|
||||||
classifiers = [
|
|
||||||
"Framework :: Django",
|
|
||||||
"Programming Language :: Python",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
]
|
|
||||||
|
|
||||||
dynamic = ["version", "dependencies"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
"Homepage" = "https://git.radiocampus.be/rc/aircox/"
|
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["setuptools>=60", "setuptools-scm>=8.0", "wheel"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[tool.setuptools]
|
|
||||||
include-package-data = true
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["aircox*",]
|
|
||||||
exclude = ["aircox*.tests*",]
|
|
||||||
namespaces = false
|
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
|
||||||
dependencies = {file = ["requirements.txt"]}
|
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
DJANGO_SETTINGS_MODULE = "instance.settings"
|
|
||||||
python_files = ["tests.py", "test_*.py", "*_tests.py"]
|
|
||||||
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
line-length = 120
|
|
||||||
exclude = '''
|
|
||||||
/(
|
|
||||||
\.egg
|
|
||||||
| \.git
|
|
||||||
| \.hg
|
|
||||||
| \.tox
|
|
||||||
| \._build
|
|
||||||
| \.build
|
|
||||||
| \.bulk-out
|
|
||||||
| \.dist
|
|
||||||
| \.__pycache__
|
|
||||||
| \.venv
|
|
||||||
| \.migrations
|
|
||||||
| \.static
|
|
||||||
| \.instance/settings
|
|
||||||
)
|
|
||||||
'''
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
line-length = 120
|
|
||||||
exclude = [
|
|
||||||
"egg",
|
|
||||||
"git",
|
|
||||||
"hg",
|
|
||||||
"tox",
|
|
||||||
"_build",
|
|
||||||
"build",
|
|
||||||
"dist",
|
|
||||||
"__pycache__",
|
|
||||||
"venv",
|
|
||||||
"*/migrations",
|
|
||||||
"static",
|
|
||||||
"instance/settings",
|
|
||||||
]
|
|
4
pytest.ini
Normal file
4
pytest.ini
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[pytest]
|
||||||
|
DJANGO_SETTINGS_MODULE = instance.settings
|
||||||
|
# -- recommended but optional:
|
||||||
|
python_files = tests.py test_*.py *_tests.py
|
37
setup.py
Executable file
37
setup.py
Executable file
|
@ -0,0 +1,37 @@
|
||||||
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
|
|
||||||
|
def to_rst(path):
|
||||||
|
try:
|
||||||
|
from pypandoc import convert
|
||||||
|
|
||||||
|
return convert(path, "rst")
|
||||||
|
except ImportError:
|
||||||
|
print("pypandoc module not found, can not convert Markdown to RST")
|
||||||
|
return open(path, "r").read()
|
||||||
|
|
||||||
|
|
||||||
|
def to_array(path):
|
||||||
|
with open(path, "r") as file:
|
||||||
|
return [r for r in file.read().split("\n") if r]
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="aircox",
|
||||||
|
version="0.9",
|
||||||
|
license="GPLv3",
|
||||||
|
author="bkfox",
|
||||||
|
description="Aircox is a radio programs manager including tools and cms",
|
||||||
|
long_description=to_rst("README.md"),
|
||||||
|
url="https://github.com/bkfox/aircox",
|
||||||
|
packages=find_packages(),
|
||||||
|
include_package_data=True,
|
||||||
|
install_requires=to_array("requirements.txt"),
|
||||||
|
classifiers=[
|
||||||
|
"Framework :: Django",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.2",
|
||||||
|
"Programming Language :: Python :: 3.3",
|
||||||
|
],
|
||||||
|
)
|
Loading…
Reference in New Issue
Block a user