#132 | #121: backoffice / dev-1.0-121 #131

Merged
thomas merged 151 commits from dev-1.0-121 into develop-1.0 2024-04-28 20:02:14 +00:00
90 changed files with 1080 additions and 996 deletions
Showing only changes of commit 474016f776 - Show all commits

3
.gitignore vendored
View File

@ -5,3 +5,6 @@ venv/
node_modules/ node_modules/
*.egg-info/ *.egg-info/
*.egg *.egg
db.sqlite3
instance/settings/settings.py

View File

@ -9,14 +9,11 @@ repos:
rev: 23.1.0 rev: 23.1.0
hooks: hooks:
- id: black - id: black
args: - repo: https://github.com/astral-sh/ruff-pre-commit
- --line-length=79 rev: v0.0.292
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
- repo: https://github.com/PyCQA/flake8.git
rev: 6.0.0
hooks: hooks:
- id: flake8 - id: ruff
exclude: ^instance/settings/|migrations/ args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/PyCQA/docformatter.git - repo: https://github.com/PyCQA/docformatter.git
rev: v1.5.1 rev: v1.5.1
hooks: hooks:

View File

@ -1,10 +1,9 @@
![](/logo.png) ![](/logo.png)
Platform to manage a radio, schedules, website, and so on. We use the power of great tools like Django or Liquidsoap. A platform to manage radio schedules, website content, and more. It uses the power of great tools like Django or Liquidsoap.
This project is distributed under GPL version 3. More information in the LICENSE file, except for some files whose license is indicated inside source code. This project is distributed under GPL version 3. More information in the LICENSE file, except for some files whose license is indicated inside source code.
## Features ## Features
* **streams**: multiple random music streams when no program is played. We also can specify a time range and frequency for each; * **streams**: multiple random music streams when no program is played. We also can specify a time range and frequency for each;
* **diffusions**: generate diffusions time slot for programs that have schedule informations. Check for conflicts and rerun. * **diffusions**: generate diffusions time slot for programs that have schedule informations. Check for conflicts and rerun.
@ -15,7 +14,51 @@ This project is distributed under GPL version 3. More information in the LICENSE
* **cms**: content management system. * **cms**: content management system.
## Scripts ## Architecture and concepts
Aircox is divided in two main modules:
* `aircox`: basics of Aircox (programs, diffusions, sounds, etc. management); interface for managing a website with Aircox elements (playlists, timetable, players on the website);
* `aircox_streamer`: interact with application to generate audio stream (LiquidSoap);
## Development setup
Start installing a virtual environment :
```
virtualenv venv
source venv/bin/activate
pip install -r requirements.txt
pip install -r requirements_tests.txt
```
Then copy the default settings and initiate the database :
```
cp instance/settings/sample.py instance/settings/settings.py
python -c "from django.core.management.utils import get_random_secret_key; print('SECRET_KEY = \"%s\"' % get_random_secret_key())" >> instance/settings/settings.py
DJANGO_SETTINGS_MODULE=instance.settings.dev ./manage.py migrate
```
Finally test and run the instance using development settings, and point your browser to http://localhost:8000 :
```
DJANGO_SETTINGS_MODULE=instance.settings.dev pytest
DJANGO_SETTINGS_MODULE=instance.settings.dev ./manage.py runserver
```
Before requesting a merge, enable pre-commit :
```
pip install pre-commit
pre-commit install
```
## Installation
Running Aircox on production involves:
* Aircox modules and a running Django project;
* a supervisor for common tasks (sounds monitoring, stream control, etc.) -- `supervisord`;
* a wsgi and an HTTP server -- `gunicorn`, `nginx`;
* a database supported by Django (MySQL, SQLite, PostGresSQL);
### Scripts
Are included various configuration scripts that can be used to ease setup. They Are included various configuration scripts that can be used to ease setup. They
assume that the project is present in `/srv/apps/aircox`: assume that the project is present in `/srv/apps/aircox`:
@ -27,7 +70,6 @@ The scripts are written with a combination of `cron`, `supervisord`, `nginx`
and `gunicorn` in mind. and `gunicorn` in mind.
## Installation
### Dependencies ### Dependencies
For python dependencies take a peek at the `requirements.txt` file, plus For python dependencies take a peek at the `requirements.txt` file, plus
dependencies specific to Django (e.g. for database: `mysqlclient` for MySql dependencies specific to Django (e.g. for database: `mysqlclient` for MySql
@ -62,8 +104,8 @@ pip install -r requirements.txt
``` ```
### Configuration ### Configuration
You must write a settings.py file in the `instance` directory (you can just You must write a settings.py file in the `instance/settings` directory (you can just
copy and paste `instance/sample_settings.py`. There still is configuration copy and paste `instance/settings/sample.py`. There still is configuration
required in this file, check it in for more info. required in this file, check it in for more info.

View File

@ -18,9 +18,7 @@ class EpisodeAdminForm(ModelForm):
class EpisodeAdmin(SortableAdminBase, PageAdmin): class EpisodeAdmin(SortableAdminBase, PageAdmin):
form = EpisodeAdminForm form = EpisodeAdminForm
list_display = PageAdmin.list_display list_display = PageAdmin.list_display
list_filter = tuple( list_filter = tuple(f for f in PageAdmin.list_filter if f != "pub_date") + (
f for f in PageAdmin.list_filter if f != "pub_date"
) + (
"diffusion__start", "diffusion__start",
"pub_date", "pub_date",
) )

View File

@ -14,13 +14,9 @@ class DateFieldFilter(filters.FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path): def __init__(self, field, request, params, model, model_admin, field_path):
self.field_generic = f"{field_path}__" self.field_generic = f"{field_path}__"
self.date_params = { self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)}
k: v for k, v in params.items() if k.startswith(self.field_generic)
}
exact_lookup = ( exact_lookup = "date" if isinstance(field, models.DateTimeField) else "exact"
"date" if isinstance(field, models.DateTimeField) else "exact"
)
# links as: (label, param, input_type|None, value) # links as: (label, param, input_type|None, value)
self.links = [ self.links = [
@ -29,17 +25,11 @@ class DateFieldFilter(filters.FieldListFilter):
(_("Until"), self.field_generic + "lte", self.input_type), (_("Until"), self.field_generic + "lte", self.input_type),
] ]
if field.null: if field.null:
self.links.insert( self.links.insert(0, (_("None"), self.field_generic + "isnull", None, "1"))
0, (_("None"), self.field_generic + "isnull", None, "1")
)
self.query_attrs = { self.query_attrs = {k: v for k, v in request.GET.items() if k not in self.date_params}
k: v for k, v in request.GET.items() if k not in self.date_params
}
self.query_string = urlencode(self.query_attrs) self.query_string = urlencode(self.query_attrs)
super().__init__( super().__init__(field, request, params, model, model_admin, field_path)
field, request, params, model, model_admin, field_path
)
def expected_parameters(self): def expected_parameters(self):
return [link[1] for link in self.links] return [link[1] for link in self.links]
@ -59,11 +49,7 @@ class DateFieldFilter(filters.FieldListFilter):
"value": value, "value": value,
"type": link[2], "type": link[2],
"query_attrs": self.query_attrs, "query_attrs": self.query_attrs,
"query_string": urlencode({link[1]: value}) "query_string": urlencode({link[1]: value}) + "&" + self.query_string if value else self.query_string,
+ "&"
+ self.query_string
if value
else self.query_string,
} }

View File

@ -50,11 +50,7 @@ class BasePageAdmin(admin.ModelAdmin):
change_form_template = "admin/aircox/page_change_form.html" change_form_template = "admin/aircox/page_change_form.html"
def cover_thumb(self, obj): def cover_thumb(self, obj):
return ( return mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"])) if obj.cover else ""
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
if obj.cover
else ""
)
def get_changeform_initial_data(self, request): def get_changeform_initial_data(self, request):
data = super().get_changeform_initial_data(request) data = super().get_changeform_initial_data(request)
@ -65,9 +61,7 @@ class BasePageAdmin(admin.ModelAdmin):
def _get_common_context(self, query, extra_context=None): def _get_common_context(self, query, extra_context=None):
extra_context = extra_context or {} extra_context = extra_context or {}
parent = query.get("parent", None) parent = query.get("parent", None)
extra_context["parent"] = ( extra_context["parent"] = None if parent is None else Page.objects.get_subclass(id=parent)
None if parent is None else Page.objects.get_subclass(id=parent)
)
return extra_context return extra_context
def render_change_form(self, request, context, *args, **kwargs): def render_change_form(self, request, context, *args, **kwargs):
@ -94,9 +88,7 @@ class PageAdmin(BasePageAdmin):
search_fields = BasePageAdmin.search_fields + ("category__title",) search_fields = BasePageAdmin.search_fields + ("category__title",)
fieldsets = deepcopy(BasePageAdmin.fieldsets) fieldsets = deepcopy(BasePageAdmin.fieldsets)
fieldsets[0][1]["fields"].insert( fieldsets[0][1]["fields"].insert(fieldsets[0][1]["fields"].index("slug") + 1, "category")
fieldsets[0][1]["fields"].index("slug") + 1, "category"
)
fieldsets[1][1]["fields"] += ("featured", "allow_comments") fieldsets[1][1]["fields"] += ("featured", "allow_comments")

View File

@ -38,9 +38,7 @@ class SoundInline(admin.TabularInline):
max_num = 0 max_num = 0
def audio(self, obj): def audio(self, obj):
return mark_safe( return mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
'<audio src="{}" controls></audio>'.format(obj.file.url)
)
audio.short_description = _("Audio") audio.short_description = _("Audio")
@ -86,13 +84,7 @@ class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
def related(self, obj): def related(self, obj):
# TODO: link to episode or program edit # TODO: link to episode or program edit
return ( return obj.episode.title if obj.episode else obj.program.title if obj.program else ""
obj.episode.title
if obj.episode
else obj.program.title
if obj.program
else ""
)
related.short_description = _("Program / Episode") related.short_description = _("Program / Episode")

View File

@ -26,21 +26,13 @@ class AdminSite(admin.AdminSite):
context.update( context.update(
{ {
# all programs # all programs
"programs": models.Program.objects.active() "programs": models.Program.objects.active().values("pk", "title").order_by("title"),
.values("pk", "title")
.order_by("title"),
# today's diffusions # today's diffusions
"diffusions": models.Diffusion.objects.date() "diffusions": models.Diffusion.objects.date().order_by("start").select_related("episode"),
.order_by("start")
.select_related("episode"),
# TODO: only for dashboard # TODO: only for dashboard
# last comments # last comments
"comments": models.Comment.objects.order_by( "comments": models.Comment.objects.order_by("-date").select_related("page")[0:10],
"-date" "latests": models.Page.objects.select_subclasses().order_by("-pub_date")[0:10],
).select_related("page")[0:10],
"latests": models.Page.objects.select_subclasses().order_by(
"-pub_date"
)[0:10],
} }
) )
return context return context
@ -69,9 +61,7 @@ class AdminSite(admin.AdminSite):
return [(label, reverse(url)) for label, url in self.tools] return [(label, reverse(url)) for label, url in self.tools]
def route_view(self, url, view, name, admin_view=True, label=None): def route_view(self, url, view, name, admin_view=True, label=None):
self.extra_urls.append( self.extra_urls.append(path(url, self.admin_view(view) if admin_view else view, name=name))
path(url, self.admin_view(view) if admin_view else view, name=name)
)
if label: if label:
self.tools.append((label, "admin:" + name)) self.tools.append((label, "admin:" + name))

View File

@ -22,9 +22,7 @@ class DiffusionMonitor:
def update(self): def update(self):
episodes, diffusions = [], [] episodes, diffusions = [], []
for schedule in Schedule.objects.filter( for schedule in Schedule.objects.filter(program__active=True, initial__isnull=True):
program__active=True, initial__isnull=True
):
eps, diffs = schedule.diffusions_of_month(self.date) eps, diffs = schedule.diffusions_of_month(self.date)
if eps: if eps:
episodes += eps episodes += eps

View File

@ -44,9 +44,7 @@ class LogArchiver:
path = self.get_path(station, date) path = self.get_path(station, date)
# FIXME: remove binary mode # FIXME: remove binary mode
with gzip.open(path, "ab") as archive: with gzip.open(path, "ab") as archive:
data = yaml.dump( data = yaml.dump([self.serialize(line) for line in logs]).encode("utf8")
[self.serialize(line) for line in logs]
).encode("utf8")
archive.write(data) archive.write(data)
if not keep: if not keep:
@ -95,10 +93,7 @@ class LogArchiver:
return [ return [
Log( Log(
diffusion=rel_obj(log, "diffusion"), diffusion=rel_obj(log, "diffusion"), sound=rel_obj(log, "sound"), track=rel_obj(log, "track"), **log
sound=rel_obj(log, "sound"),
track=rel_obj(log, "track"),
**log
) )
for log in logs for log in logs
] ]

View File

@ -50,14 +50,7 @@ class PlaylistImport:
logger.info("start reading csv " + self.path) logger.info("start reading csv " + self.path)
self.data = list( self.data = list(
csv.DictReader( csv.DictReader(
( (row for row in file if not (row.startswith("#") or row.startswith("\ufeff#")) and row.strip()),
row
for row in file
if not (
row.startswith("#") or row.startswith("\ufeff#")
)
and row.strip()
),
fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS, fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS,
delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER, delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER,
quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE, quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
@ -70,11 +63,7 @@ class PlaylistImport:
If save is true, save it into the database If save is true, save it into the database
""" """
if self.track_kwargs.get("sound") is None: if self.track_kwargs.get("sound") is None:
logger.error( logger.error("related track's sound is missing. Skip import of " + self.path + ".")
"related track's sound is missing. Skip import of "
+ self.path
+ "."
)
return return
maps = settings.IMPORT_PLAYLIST_CSV_COLS maps = settings.IMPORT_PLAYLIST_CSV_COLS
@ -87,17 +76,11 @@ class PlaylistImport:
return return
try: try:
timestamp = ( timestamp = (
int(line.get("minutes") or 0) * 60 int(line.get("minutes") or 0) * 60 + int(line.get("seconds") or 0) if has_timestamp else None
+ int(line.get("seconds") or 0)
if has_timestamp
else None
) )
track, created = Track.objects.get_or_create( track, created = Track.objects.get_or_create(
title=line.get("title"), title=line.get("title"), artist=line.get("artist"), position=index, **self.track_kwargs
artist=line.get("artist"),
position=index,
**self.track_kwargs
) )
track.timestamp = timestamp track.timestamp = timestamp
track.info = line.get("info") track.info = line.get("info")

View File

@ -58,14 +58,7 @@ class SoundFile:
def episode(self): def episode(self):
return self.sound and self.sound.episode return self.sound and self.sound.episode
def sync( def sync(self, sound=None, program=None, deleted=False, keep_deleted=False, **kwargs):
self,
sound=None,
program=None,
deleted=False,
keep_deleted=False,
**kwargs
):
"""Update related sound model and save it.""" """Update related sound model and save it."""
if deleted: if deleted:
return self._on_delete(self.path, keep_deleted) return self._on_delete(self.path, keep_deleted)
@ -79,9 +72,7 @@ class SoundFile:
if sound: if sound:
created = False created = False
else: else:
sound, created = Sound.objects.get_or_create( sound, created = Sound.objects.get_or_create(file=self.sound_path, defaults=kwargs)
file=self.sound_path, defaults=kwargs
)
self.sound = sound self.sound = sound
self.path_info = self.read_path(self.path) self.path_info = self.read_path(self.path)
@ -172,9 +163,7 @@ class SoundFile:
year, month, day = pi.get("year"), pi.get("month"), pi.get("day") year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
if pi.get("hour") is not None: if pi.get("hour") is not None:
at = tz.datetime( at = tz.datetime(year, month, day, pi.get("hour", 0), pi.get("minute", 0))
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
)
at = tz.make_aware(at) at = tz.make_aware(at)
else: else:
at = date(year, month, day) at = date(year, month, day)
@ -210,22 +199,10 @@ class SoundFile:
if self.info and self.info.tags: if self.info and self.info.tags:
tags = self.info.tags tags = self.info.tags
title, artist, album, year = tuple( title, artist, album, year = tuple(
t and ", ".join(t) t and ", ".join(t) for t in (tags.get(k) for k in ("title", "artist", "album", "year"))
for t in (
tags.get(k)
for k in ("title", "artist", "album", "year")
)
)
title = (
title
or (self.path_info and self.path_info.get("name"))
or os.path.basename(path_noext)
)
info = (
"{} ({})".format(album, year)
if album and year
else album or year or ""
) )
title = title or (self.path_info and self.path_info.get("name")) or os.path.basename(path_noext)
info = "{} ({})".format(album, year) if album and year else album or year or ""
track = Track( track = Track(
sound=sound, sound=sound,
position=int(tags.get("tracknumber", 0)), position=int(tags.get("tracknumber", 0)),

View File

@ -155,10 +155,7 @@ class MonitorHandler(PatternMatchingEventHandler):
self.jobs = jobs or {} self.jobs = jobs or {}
self.sync_kw = sync_kw self.sync_kw = sync_kw
patterns = [ patterns = ["*/{}/*{}".format(self.subdir, ext) for ext in settings.SOUND_FILE_EXT]
"*/{}/*{}".format(self.subdir, ext)
for ext in settings.SOUND_FILE_EXT
]
super().__init__(patterns=patterns, ignore_directories=True) super().__init__(patterns=patterns, ignore_directories=True)
def on_created(self, event): def on_created(self, event):
@ -202,11 +199,7 @@ class SoundMonitor:
def report(self, program=None, component=None, *content, logger=logging): def report(self, program=None, component=None, *content, logger=logging):
content = " ".join([str(c) for c in content]) content = " ".join([str(c) for c in content])
logger.info( logger.info(f"{program}: {content}" if not component else f"{program}, {component}: {content}")
f"{program}: {content}"
if not component
else f"{program}, {component}: {content}"
)
def scan(self, logger=logging): def scan(self, logger=logging):
"""For all programs, scan dirs. """For all programs, scan dirs.
@ -234,9 +227,7 @@ class SoundMonitor:
dirs.append(program.abspath) dirs.append(program.abspath)
return dirs return dirs
def scan_for_program( def scan_for_program(self, program, subdir, logger=logging, **sound_kwargs):
self, program, subdir, logger=logging, **sound_kwargs
):
"""Scan a given directory that is associated to the given program, and """Scan a given directory that is associated to the given program, and
update sounds information.""" update sounds information."""
logger.info("- %s/", subdir) logger.info("- %s/", subdir)
@ -257,9 +248,7 @@ class SoundMonitor:
sounds.append(sound_file.sound.pk) sounds.append(sound_file.sound.pk)
# sounds in db & unchecked # sounds in db & unchecked
sounds = Sound.objects.filter(file__startswith=subdir).exclude( sounds = Sound.objects.filter(file__startswith=subdir).exclude(pk__in=sounds)
pk__in=sounds
)
self.check_sounds(sounds, program=program) self.check_sounds(sounds, program=program)
def check_sounds(self, qs, **sync_kwargs): def check_sounds(self, qs, **sync_kwargs):

View File

@ -38,9 +38,7 @@ class SoxStats:
args += ["trim", str(at), str(length)] args += ["trim", str(at), str(length)]
args.append("stats") args.append("stats")
p = subprocess.Popen( p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
# sox outputs to stderr (my god WHYYYY) # sox outputs to stderr (my god WHYYYY)
out_, out = p.communicate() out_, out = p.communicate()
self.values = self.parse(str(out, encoding="utf-8")) self.values = self.parse(str(out, encoding="utf-8"))
@ -94,16 +92,8 @@ class SoundStats:
position += self.sample_length position += self.sample_length
def check(self, name, min_val, max_val): def check(self, name, min_val, max_val):
self.good = [ self.good = [index for index, stats in enumerate(self.stats) if min_val <= stats.get(name) <= max_val]
index self.bad = [index for index, stats in enumerate(self.stats) if index not in self.good]
for index, stats in enumerate(self.stats)
if min_val <= stats.get(name) <= max_val
]
self.bad = [
index
for index, stats in enumerate(self.stats)
if index not in self.good
]
self.resume() self.resume()
def resume(self): def resume(self):
@ -120,10 +110,6 @@ class SoundStats:
def _view(self, array): def _view(self, array):
return [ return [
"file" "file" if index == 0 else "sample {} (at {} seconds)".format(index, (index - 1) * self.sample_length)
if index == 0
else "sample {} (at {} seconds)".format(
index, (index - 1) * self.sample_length
)
for index in array for index in array
] ]

View File

@ -35,11 +35,7 @@ class WeekConverter:
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date() return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
def to_url(self, value): def to_url(self, value):
return ( return value if isinstance(value, str) else "{:04d}/{:02d}".format(*value.isocalendar())
value
if isinstance(value, str)
else "{:04d}/{:02d}".format(*value.isocalendar())
)
class DateConverter: class DateConverter:
@ -52,10 +48,4 @@ class DateConverter:
return datetime.date(int(value[0]), int(value[1]), int(value[2])) return datetime.date(int(value[0]), int(value[1]), int(value[2]))
def to_url(self, value): def to_url(self, value):
return ( return value if isinstance(value, str) else "{:04d}/{:02d}/{:02d}".format(value.year, value.month, value.day)
value
if isinstance(value, str)
else "{:04d}/{:02d}/{:02d}".format(
value.year, value.month, value.day
)
)

View File

@ -19,9 +19,7 @@ class PageFilters(filters.FilterSet):
class EpisodeFilters(PageFilters): class EpisodeFilters(PageFilters):
podcast = filters.BooleanFilter( podcast = filters.BooleanFilter(method="podcast_filter", label=_("Podcast"))
method="podcast_filter", label=_("Podcast")
)
class Meta: class Meta:
model = Episode model = Episode

View File

@ -30,8 +30,7 @@ class Command(BaseCommand):
"--age", "--age",
type=int, type=int,
default=settings.LOGS_ARCHIVES_AGE, default=settings.LOGS_ARCHIVES_AGE,
help="minimal age in days of logs to archive. Default is " help="minimal age in days of logs to archive. Default is " "settings.LOGS_ARCHIVES_AGE",
"settings.LOGS_ARCHIVES_AGE",
) )
group.add_argument( group.add_argument(
"-k", "-k",

View File

@ -55,14 +55,11 @@ class Command(BaseCommand):
group.add_argument( group.add_argument(
"--next-month", "--next-month",
action="store_true", action="store_true",
help="set the date to the next month of given date" help="set the date to the next month of given date" " (if next month from today",
" (if next month from today",
) )
def handle(self, *args, **options): def handle(self, *args, **options):
date = datetime.date( date = datetime.date(year=options["year"], month=options["month"], day=1)
year=options["year"], month=options["month"], day=1
)
if options.get("next_month"): if options.get("next_month"):
month = options.get("month") month = options.get("month")
date += tz.timedelta(days=28) date += tz.timedelta(days=28)

View File

@ -51,18 +51,13 @@ class Command(BaseCommand):
def handle(self, path, *args, **options): def handle(self, path, *args, **options):
# FIXME: absolute/relative path of sounds vs given path # FIXME: absolute/relative path of sounds vs given path
if options.get("sound"): if options.get("sound"):
sound = Sound.objects.filter( sound = Sound.objects.filter(file__icontains=options.get("sound")).first()
file__icontains=options.get("sound")
).first()
else: else:
path_, ext = os.path.splitext(path) path_, ext = os.path.splitext(path)
sound = Sound.objects.filter(path__icontains=path_).first() sound = Sound.objects.filter(path__icontains=path_).first()
if not sound: if not sound:
logger.error( logger.error("no sound found in the database for the path " "{path}".format(path=path))
"no sound found in the database for the path "
"{path}".format(path=path)
)
return return
# FIXME: auto get sound.episode if any # FIXME: auto get sound.episode if any

View File

@ -1,5 +1,4 @@
#! /usr/bin/env python3 #! /usr/bin/env python3
# TODO: SoundMonitor class
"""Monitor sound files; For each program, check for: """Monitor sound files; For each program, check for:
@ -43,8 +42,7 @@ class Command(BaseCommand):
"-q", "-q",
"--quality_check", "--quality_check",
action="store_true", action="store_true",
help="Enable quality check using sound_quality_check on all " help="Enable quality check using sound_quality_check on all " "sounds marqued as not good",
"sounds marqued as not good",
) )
parser.add_argument( parser.add_argument(
"-s", "-s",
@ -57,15 +55,12 @@ class Command(BaseCommand):
"-m", "-m",
"--monitor", "--monitor",
action="store_true", action="store_true",
help="Run in monitor mode, watch for modification in the " help="Run in monitor mode, watch for modification in the " "filesystem and react in consequence",
"filesystem and react in consequence",
) )
def handle(self, *args, **options): def handle(self, *args, **options):
SoundMonitor() monitor = SoundMonitor()
if options.get("scan"): if options.get("scan"):
self.scan() monitor.scan()
# if options.get('quality_check'):
# self.check_quality(check=(not options.get('scan')))
if options.get("monitor"): if options.get("monitor"):
self.monitor() monitor.monitor()

View File

@ -28,8 +28,7 @@ class Command(BaseCommand):
"--sample_length", "--sample_length",
type=int, type=int,
default=120, default=120,
help="size of sample to analyse in seconds. If not set (or 0), " help="size of sample to analyse in seconds. If not set (or 0), " "does not analyse by sample",
"does not analyse by sample",
) )
parser.add_argument( parser.add_argument(
"-a", "-a",
@ -43,8 +42,7 @@ class Command(BaseCommand):
"--range", "--range",
type=float, type=float,
nargs=2, nargs=2,
help="range of minimal and maximal accepted value such as: " help="range of minimal and maximal accepted value such as: " "--range min max",
"--range min max",
) )
parser.add_argument( parser.add_argument(
"-i", "-i",
@ -64,10 +62,7 @@ class Command(BaseCommand):
raise CommandError("no attribute specified") raise CommandError("no attribute specified")
# sound analyse and checks # sound analyse and checks
self.sounds = [ self.sounds = [SoundStats(path, options.get("sample_length")) for path in options.get("files")]
SoundStats(path, options.get("sample_length"))
for path in options.get("files")
]
self.bad = [] self.bad = []
self.good = [] self.good = []
for sound in self.sounds: for sound in self.sounds:

View File

@ -84,9 +84,7 @@ class Migration(migrations.Migration):
options={ options={
"verbose_name": "Diffusion", "verbose_name": "Diffusion",
"verbose_name_plural": "Diffusions", "verbose_name_plural": "Diffusions",
"permissions": ( "permissions": (("programming", "edit the diffusion's planification"),),
("programming", "edit the diffusion's planification"),
),
}, },
), ),
migrations.CreateModel( migrations.CreateModel(
@ -125,22 +123,16 @@ class Migration(migrations.Migration):
), ),
( (
"content", "content",
ckeditor.fields.RichTextField( ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
("pub_date", models.DateTimeField(blank=True, null=True)), ("pub_date", models.DateTimeField(blank=True, null=True)),
( (
"featured", "featured",
models.BooleanField( models.BooleanField(default=False, verbose_name="featured"),
default=False, verbose_name="featured"
),
), ),
( (
"allow_comments", "allow_comments",
models.BooleanField( models.BooleanField(default=True, verbose_name="allow comments"),
default=True, verbose_name="allow comments"
),
), ),
( (
"category", "category",
@ -458,9 +450,7 @@ class Migration(migrations.Migration):
("name", models.CharField(max_length=64, verbose_name="name")), ("name", models.CharField(max_length=64, verbose_name="name")),
( (
"slug", "slug",
models.SlugField( models.SlugField(max_length=64, unique=True, verbose_name="slug"),
max_length=64, unique=True, verbose_name="slug"
),
), ),
( (
"path", "path",
@ -566,9 +556,7 @@ class Migration(migrations.Migration):
), ),
( (
"content", "content",
ckeditor.fields.RichTextField( ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
( (
"view", "view",
@ -949,9 +937,7 @@ class Migration(migrations.Migration):
), ),
( (
"time", "time",
models.TimeField( models.TimeField(help_text="start time", verbose_name="time"),
help_text="start time", verbose_name="time"
),
), ),
( (
"timezone", "timezone",
@ -1643,9 +1629,7 @@ class Migration(migrations.Migration):
), ),
( (
"duration", "duration",
models.TimeField( models.TimeField(help_text="regular duration", verbose_name="duration"),
help_text="regular duration", verbose_name="duration"
),
), ),
( (
"frequency", "frequency",

View File

@ -33,9 +33,7 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="page", model_name="page",
name="content", name="content",
field=ckeditor_uploader.fields.RichTextUploadingField( field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
migrations.AlterField( migrations.AlterField(
model_name="sound", model_name="sound",
@ -52,8 +50,6 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="staticpage", model_name="staticpage",
name="content", name="content",
field=ckeditor_uploader.fields.RichTextUploadingField( field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
] ]

View File

@ -12,9 +12,7 @@ class Migration(migrations.Migration):
migrations.AlterModelOptions( migrations.AlterModelOptions(
name="diffusion", name="diffusion",
options={ options={
"permissions": ( "permissions": (("programming", "edit the diffusions' planification"),),
("programming", "edit the diffusions' planification"),
),
"verbose_name": "Diffusion", "verbose_name": "Diffusion",
"verbose_name_plural": "Diffusions", "verbose_name_plural": "Diffusions",
}, },
@ -22,9 +20,7 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name="track", model_name="track",
name="album", name="album",
field=models.CharField( field=models.CharField(default="", max_length=128, verbose_name="album"),
default="", max_length=128, verbose_name="album"
),
), ),
migrations.AlterField( migrations.AlterField(
model_name="schedule", model_name="schedule",

View File

@ -12,8 +12,6 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name="track", model_name="track",
name="year", name="year",
field=models.IntegerField( field=models.IntegerField(blank=True, null=True, verbose_name="year"),
blank=True, null=True, verbose_name="year"
),
), ),
] ]

View File

@ -12,8 +12,6 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="track", model_name="track",
name="album", name="album",
field=models.CharField( field=models.CharField(blank=True, max_length=128, null=True, verbose_name="album"),
blank=True, max_length=128, null=True, verbose_name="album"
),
), ),
] ]

View File

@ -30,9 +30,7 @@ class Migration(migrations.Migration):
), ),
( (
"playlist_editor_sep", "playlist_editor_sep",
models.CharField( models.CharField(max_length=16, verbose_name="Playlist Editor Separator"),
max_length=16, verbose_name="Playlist Editor Separator"
),
), ),
( (
"user", "user",

View File

@ -0,0 +1,623 @@
# Generated by Django 4.2.5 on 2023-10-18 07:26
import aircox.models.schedule
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("aircox", "0013_alter_schedule_timezone_alter_station_hosts"),
]
operations = [
migrations.AlterField(
model_name="schedule",
name="timezone",
field=models.CharField(
choices=[
("Africa/Abidjan", "Africa/Abidjan"),
("Africa/Accra", "Africa/Accra"),
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
("Africa/Algiers", "Africa/Algiers"),
("Africa/Asmara", "Africa/Asmara"),
("Africa/Asmera", "Africa/Asmera"),
("Africa/Bamako", "Africa/Bamako"),
("Africa/Bangui", "Africa/Bangui"),
("Africa/Banjul", "Africa/Banjul"),
("Africa/Bissau", "Africa/Bissau"),
("Africa/Blantyre", "Africa/Blantyre"),
("Africa/Brazzaville", "Africa/Brazzaville"),
("Africa/Bujumbura", "Africa/Bujumbura"),
("Africa/Cairo", "Africa/Cairo"),
("Africa/Casablanca", "Africa/Casablanca"),
("Africa/Ceuta", "Africa/Ceuta"),
("Africa/Conakry", "Africa/Conakry"),
("Africa/Dakar", "Africa/Dakar"),
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
("Africa/Djibouti", "Africa/Djibouti"),
("Africa/Douala", "Africa/Douala"),
("Africa/El_Aaiun", "Africa/El_Aaiun"),
("Africa/Freetown", "Africa/Freetown"),
("Africa/Gaborone", "Africa/Gaborone"),
("Africa/Harare", "Africa/Harare"),
("Africa/Johannesburg", "Africa/Johannesburg"),
("Africa/Juba", "Africa/Juba"),
("Africa/Kampala", "Africa/Kampala"),
("Africa/Khartoum", "Africa/Khartoum"),
("Africa/Kigali", "Africa/Kigali"),
("Africa/Kinshasa", "Africa/Kinshasa"),
("Africa/Lagos", "Africa/Lagos"),
("Africa/Libreville", "Africa/Libreville"),
("Africa/Lome", "Africa/Lome"),
("Africa/Luanda", "Africa/Luanda"),
("Africa/Lubumbashi", "Africa/Lubumbashi"),
("Africa/Lusaka", "Africa/Lusaka"),
("Africa/Malabo", "Africa/Malabo"),
("Africa/Maputo", "Africa/Maputo"),
("Africa/Maseru", "Africa/Maseru"),
("Africa/Mbabane", "Africa/Mbabane"),
("Africa/Mogadishu", "Africa/Mogadishu"),
("Africa/Monrovia", "Africa/Monrovia"),
("Africa/Nairobi", "Africa/Nairobi"),
("Africa/Ndjamena", "Africa/Ndjamena"),
("Africa/Niamey", "Africa/Niamey"),
("Africa/Nouakchott", "Africa/Nouakchott"),
("Africa/Ouagadougou", "Africa/Ouagadougou"),
("Africa/Porto-Novo", "Africa/Porto-Novo"),
("Africa/Sao_Tome", "Africa/Sao_Tome"),
("Africa/Timbuktu", "Africa/Timbuktu"),
("Africa/Tripoli", "Africa/Tripoli"),
("Africa/Tunis", "Africa/Tunis"),
("Africa/Windhoek", "Africa/Windhoek"),
("America/Adak", "America/Adak"),
("America/Anchorage", "America/Anchorage"),
("America/Anguilla", "America/Anguilla"),
("America/Antigua", "America/Antigua"),
("America/Araguaina", "America/Araguaina"),
("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"),
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
("America/Argentina/ComodRivadavia", "America/Argentina/ComodRivadavia"),
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"),
("America/Argentina/Salta", "America/Argentina/Salta"),
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
("America/Aruba", "America/Aruba"),
("America/Asuncion", "America/Asuncion"),
("America/Atikokan", "America/Atikokan"),
("America/Atka", "America/Atka"),
("America/Bahia", "America/Bahia"),
("America/Bahia_Banderas", "America/Bahia_Banderas"),
("America/Barbados", "America/Barbados"),
("America/Belem", "America/Belem"),
("America/Belize", "America/Belize"),
("America/Blanc-Sablon", "America/Blanc-Sablon"),
("America/Boa_Vista", "America/Boa_Vista"),
("America/Bogota", "America/Bogota"),
("America/Boise", "America/Boise"),
("America/Buenos_Aires", "America/Buenos_Aires"),
("America/Cambridge_Bay", "America/Cambridge_Bay"),
("America/Campo_Grande", "America/Campo_Grande"),
("America/Cancun", "America/Cancun"),
("America/Caracas", "America/Caracas"),
("America/Catamarca", "America/Catamarca"),
("America/Cayenne", "America/Cayenne"),
("America/Cayman", "America/Cayman"),
("America/Chicago", "America/Chicago"),
("America/Chihuahua", "America/Chihuahua"),
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
("America/Coral_Harbour", "America/Coral_Harbour"),
("America/Cordoba", "America/Cordoba"),
("America/Costa_Rica", "America/Costa_Rica"),
("America/Creston", "America/Creston"),
("America/Cuiaba", "America/Cuiaba"),
("America/Curacao", "America/Curacao"),
("America/Danmarkshavn", "America/Danmarkshavn"),
("America/Dawson", "America/Dawson"),
("America/Dawson_Creek", "America/Dawson_Creek"),
("America/Denver", "America/Denver"),
("America/Detroit", "America/Detroit"),
("America/Dominica", "America/Dominica"),
("America/Edmonton", "America/Edmonton"),
("America/Eirunepe", "America/Eirunepe"),
("America/El_Salvador", "America/El_Salvador"),
("America/Ensenada", "America/Ensenada"),
("America/Fort_Nelson", "America/Fort_Nelson"),
("America/Fort_Wayne", "America/Fort_Wayne"),
("America/Fortaleza", "America/Fortaleza"),
("America/Glace_Bay", "America/Glace_Bay"),
("America/Godthab", "America/Godthab"),
("America/Goose_Bay", "America/Goose_Bay"),
("America/Grand_Turk", "America/Grand_Turk"),
("America/Grenada", "America/Grenada"),
("America/Guadeloupe", "America/Guadeloupe"),
("America/Guatemala", "America/Guatemala"),
("America/Guayaquil", "America/Guayaquil"),
("America/Guyana", "America/Guyana"),
("America/Halifax", "America/Halifax"),
("America/Havana", "America/Havana"),
("America/Hermosillo", "America/Hermosillo"),
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
("America/Indiana/Knox", "America/Indiana/Knox"),
("America/Indiana/Marengo", "America/Indiana/Marengo"),
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
("America/Indiana/Vevay", "America/Indiana/Vevay"),
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
("America/Indiana/Winamac", "America/Indiana/Winamac"),
("America/Indianapolis", "America/Indianapolis"),
("America/Inuvik", "America/Inuvik"),
("America/Iqaluit", "America/Iqaluit"),
("America/Jamaica", "America/Jamaica"),
("America/Jujuy", "America/Jujuy"),
("America/Juneau", "America/Juneau"),
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
("America/Knox_IN", "America/Knox_IN"),
("America/Kralendijk", "America/Kralendijk"),
("America/La_Paz", "America/La_Paz"),
("America/Lima", "America/Lima"),
("America/Los_Angeles", "America/Los_Angeles"),
("America/Louisville", "America/Louisville"),
("America/Lower_Princes", "America/Lower_Princes"),
("America/Maceio", "America/Maceio"),
("America/Managua", "America/Managua"),
("America/Manaus", "America/Manaus"),
("America/Marigot", "America/Marigot"),
("America/Martinique", "America/Martinique"),
("America/Matamoros", "America/Matamoros"),
("America/Mazatlan", "America/Mazatlan"),
("America/Mendoza", "America/Mendoza"),
("America/Menominee", "America/Menominee"),
("America/Merida", "America/Merida"),
("America/Metlakatla", "America/Metlakatla"),
("America/Mexico_City", "America/Mexico_City"),
("America/Miquelon", "America/Miquelon"),
("America/Moncton", "America/Moncton"),
("America/Monterrey", "America/Monterrey"),
("America/Montevideo", "America/Montevideo"),
("America/Montreal", "America/Montreal"),
("America/Montserrat", "America/Montserrat"),
("America/Nassau", "America/Nassau"),
("America/New_York", "America/New_York"),
("America/Nipigon", "America/Nipigon"),
("America/Nome", "America/Nome"),
("America/Noronha", "America/Noronha"),
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
("America/North_Dakota/Center", "America/North_Dakota/Center"),
("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"),
("America/Nuuk", "America/Nuuk"),
("America/Ojinaga", "America/Ojinaga"),
("America/Panama", "America/Panama"),
("America/Pangnirtung", "America/Pangnirtung"),
("America/Paramaribo", "America/Paramaribo"),
("America/Phoenix", "America/Phoenix"),
("America/Port-au-Prince", "America/Port-au-Prince"),
("America/Port_of_Spain", "America/Port_of_Spain"),
("America/Porto_Acre", "America/Porto_Acre"),
("America/Porto_Velho", "America/Porto_Velho"),
("America/Puerto_Rico", "America/Puerto_Rico"),
("America/Punta_Arenas", "America/Punta_Arenas"),
("America/Rainy_River", "America/Rainy_River"),
("America/Rankin_Inlet", "America/Rankin_Inlet"),
("America/Recife", "America/Recife"),
("America/Regina", "America/Regina"),
("America/Resolute", "America/Resolute"),
("America/Rio_Branco", "America/Rio_Branco"),
("America/Rosario", "America/Rosario"),
("America/Santa_Isabel", "America/Santa_Isabel"),
("America/Santarem", "America/Santarem"),
("America/Santiago", "America/Santiago"),
("America/Santo_Domingo", "America/Santo_Domingo"),
("America/Sao_Paulo", "America/Sao_Paulo"),
("America/Scoresbysund", "America/Scoresbysund"),
("America/Shiprock", "America/Shiprock"),
("America/Sitka", "America/Sitka"),
("America/St_Barthelemy", "America/St_Barthelemy"),
("America/St_Johns", "America/St_Johns"),
("America/St_Kitts", "America/St_Kitts"),
("America/St_Lucia", "America/St_Lucia"),
("America/St_Thomas", "America/St_Thomas"),
("America/St_Vincent", "America/St_Vincent"),
("America/Swift_Current", "America/Swift_Current"),
("America/Tegucigalpa", "America/Tegucigalpa"),
("America/Thule", "America/Thule"),
("America/Thunder_Bay", "America/Thunder_Bay"),
("America/Tijuana", "America/Tijuana"),
("America/Toronto", "America/Toronto"),
("America/Tortola", "America/Tortola"),
("America/Vancouver", "America/Vancouver"),
("America/Virgin", "America/Virgin"),
("America/Whitehorse", "America/Whitehorse"),
("America/Winnipeg", "America/Winnipeg"),
("America/Yakutat", "America/Yakutat"),
("America/Yellowknife", "America/Yellowknife"),
("Antarctica/Casey", "Antarctica/Casey"),
("Antarctica/Davis", "Antarctica/Davis"),
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
("Antarctica/Macquarie", "Antarctica/Macquarie"),
("Antarctica/Mawson", "Antarctica/Mawson"),
("Antarctica/McMurdo", "Antarctica/McMurdo"),
("Antarctica/Palmer", "Antarctica/Palmer"),
("Antarctica/Rothera", "Antarctica/Rothera"),
("Antarctica/South_Pole", "Antarctica/South_Pole"),
("Antarctica/Syowa", "Antarctica/Syowa"),
("Antarctica/Troll", "Antarctica/Troll"),
("Antarctica/Vostok", "Antarctica/Vostok"),
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
("Asia/Aden", "Asia/Aden"),
("Asia/Almaty", "Asia/Almaty"),
("Asia/Amman", "Asia/Amman"),
("Asia/Anadyr", "Asia/Anadyr"),
("Asia/Aqtau", "Asia/Aqtau"),
("Asia/Aqtobe", "Asia/Aqtobe"),
("Asia/Ashgabat", "Asia/Ashgabat"),
("Asia/Ashkhabad", "Asia/Ashkhabad"),
("Asia/Atyrau", "Asia/Atyrau"),
("Asia/Baghdad", "Asia/Baghdad"),
("Asia/Bahrain", "Asia/Bahrain"),
("Asia/Baku", "Asia/Baku"),
("Asia/Bangkok", "Asia/Bangkok"),
("Asia/Barnaul", "Asia/Barnaul"),
("Asia/Beirut", "Asia/Beirut"),
("Asia/Bishkek", "Asia/Bishkek"),
("Asia/Brunei", "Asia/Brunei"),
("Asia/Calcutta", "Asia/Calcutta"),
("Asia/Chita", "Asia/Chita"),
("Asia/Choibalsan", "Asia/Choibalsan"),
("Asia/Chongqing", "Asia/Chongqing"),
("Asia/Chungking", "Asia/Chungking"),
("Asia/Colombo", "Asia/Colombo"),
("Asia/Dacca", "Asia/Dacca"),
("Asia/Damascus", "Asia/Damascus"),
("Asia/Dhaka", "Asia/Dhaka"),
("Asia/Dili", "Asia/Dili"),
("Asia/Dubai", "Asia/Dubai"),
("Asia/Dushanbe", "Asia/Dushanbe"),
("Asia/Famagusta", "Asia/Famagusta"),
("Asia/Gaza", "Asia/Gaza"),
("Asia/Harbin", "Asia/Harbin"),
("Asia/Hebron", "Asia/Hebron"),
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
("Asia/Hong_Kong", "Asia/Hong_Kong"),
("Asia/Hovd", "Asia/Hovd"),
("Asia/Irkutsk", "Asia/Irkutsk"),
("Asia/Istanbul", "Asia/Istanbul"),
("Asia/Jakarta", "Asia/Jakarta"),
("Asia/Jayapura", "Asia/Jayapura"),
("Asia/Jerusalem", "Asia/Jerusalem"),
("Asia/Kabul", "Asia/Kabul"),
("Asia/Kamchatka", "Asia/Kamchatka"),
("Asia/Karachi", "Asia/Karachi"),
("Asia/Kashgar", "Asia/Kashgar"),
("Asia/Kathmandu", "Asia/Kathmandu"),
("Asia/Katmandu", "Asia/Katmandu"),
("Asia/Khandyga", "Asia/Khandyga"),
("Asia/Kolkata", "Asia/Kolkata"),
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
("Asia/Kuching", "Asia/Kuching"),
("Asia/Kuwait", "Asia/Kuwait"),
("Asia/Macao", "Asia/Macao"),
("Asia/Macau", "Asia/Macau"),
("Asia/Magadan", "Asia/Magadan"),
("Asia/Makassar", "Asia/Makassar"),
("Asia/Manila", "Asia/Manila"),
("Asia/Muscat", "Asia/Muscat"),
("Asia/Nicosia", "Asia/Nicosia"),
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
("Asia/Novosibirsk", "Asia/Novosibirsk"),
("Asia/Omsk", "Asia/Omsk"),
("Asia/Oral", "Asia/Oral"),
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
("Asia/Pontianak", "Asia/Pontianak"),
("Asia/Pyongyang", "Asia/Pyongyang"),
("Asia/Qatar", "Asia/Qatar"),
("Asia/Qostanay", "Asia/Qostanay"),
("Asia/Qyzylorda", "Asia/Qyzylorda"),
("Asia/Rangoon", "Asia/Rangoon"),
("Asia/Riyadh", "Asia/Riyadh"),
("Asia/Saigon", "Asia/Saigon"),
("Asia/Sakhalin", "Asia/Sakhalin"),
("Asia/Samarkand", "Asia/Samarkand"),
("Asia/Seoul", "Asia/Seoul"),
("Asia/Shanghai", "Asia/Shanghai"),
("Asia/Singapore", "Asia/Singapore"),
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
("Asia/Taipei", "Asia/Taipei"),
("Asia/Tashkent", "Asia/Tashkent"),
("Asia/Tbilisi", "Asia/Tbilisi"),
("Asia/Tehran", "Asia/Tehran"),
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
("Asia/Thimbu", "Asia/Thimbu"),
("Asia/Thimphu", "Asia/Thimphu"),
("Asia/Tokyo", "Asia/Tokyo"),
("Asia/Tomsk", "Asia/Tomsk"),
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
("Asia/Urumqi", "Asia/Urumqi"),
("Asia/Ust-Nera", "Asia/Ust-Nera"),
("Asia/Vientiane", "Asia/Vientiane"),
("Asia/Vladivostok", "Asia/Vladivostok"),
("Asia/Yakutsk", "Asia/Yakutsk"),
("Asia/Yangon", "Asia/Yangon"),
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
("Asia/Yerevan", "Asia/Yerevan"),
("Atlantic/Azores", "Atlantic/Azores"),
("Atlantic/Bermuda", "Atlantic/Bermuda"),
("Atlantic/Canary", "Atlantic/Canary"),
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
("Atlantic/Faeroe", "Atlantic/Faeroe"),
("Atlantic/Faroe", "Atlantic/Faroe"),
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
("Atlantic/Madeira", "Atlantic/Madeira"),
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
("Atlantic/St_Helena", "Atlantic/St_Helena"),
("Atlantic/Stanley", "Atlantic/Stanley"),
("Australia/ACT", "Australia/ACT"),
("Australia/Adelaide", "Australia/Adelaide"),
("Australia/Brisbane", "Australia/Brisbane"),
("Australia/Broken_Hill", "Australia/Broken_Hill"),
("Australia/Canberra", "Australia/Canberra"),
("Australia/Currie", "Australia/Currie"),
("Australia/Darwin", "Australia/Darwin"),
("Australia/Eucla", "Australia/Eucla"),
("Australia/Hobart", "Australia/Hobart"),
("Australia/LHI", "Australia/LHI"),
("Australia/Lindeman", "Australia/Lindeman"),
("Australia/Lord_Howe", "Australia/Lord_Howe"),
("Australia/Melbourne", "Australia/Melbourne"),
("Australia/NSW", "Australia/NSW"),
("Australia/North", "Australia/North"),
("Australia/Perth", "Australia/Perth"),
("Australia/Queensland", "Australia/Queensland"),
("Australia/South", "Australia/South"),
("Australia/Sydney", "Australia/Sydney"),
("Australia/Tasmania", "Australia/Tasmania"),
("Australia/Victoria", "Australia/Victoria"),
("Australia/West", "Australia/West"),
("Australia/Yancowinna", "Australia/Yancowinna"),
("Brazil/Acre", "Brazil/Acre"),
("Brazil/DeNoronha", "Brazil/DeNoronha"),
("Brazil/East", "Brazil/East"),
("Brazil/West", "Brazil/West"),
("CET", "CET"),
("CST6CDT", "CST6CDT"),
("Canada/Atlantic", "Canada/Atlantic"),
("Canada/Central", "Canada/Central"),
("Canada/Eastern", "Canada/Eastern"),
("Canada/Mountain", "Canada/Mountain"),
("Canada/Newfoundland", "Canada/Newfoundland"),
("Canada/Pacific", "Canada/Pacific"),
("Canada/Saskatchewan", "Canada/Saskatchewan"),
("Canada/Yukon", "Canada/Yukon"),
("Chile/Continental", "Chile/Continental"),
("Chile/EasterIsland", "Chile/EasterIsland"),
("Cuba", "Cuba"),
("EET", "EET"),
("EST", "EST"),
("EST5EDT", "EST5EDT"),
("Egypt", "Egypt"),
("Eire", "Eire"),
("Etc/GMT", "Etc/GMT"),
("Etc/GMT+0", "Etc/GMT+0"),
("Etc/GMT+1", "Etc/GMT+1"),
("Etc/GMT+10", "Etc/GMT+10"),
("Etc/GMT+11", "Etc/GMT+11"),
("Etc/GMT+12", "Etc/GMT+12"),
("Etc/GMT+2", "Etc/GMT+2"),
("Etc/GMT+3", "Etc/GMT+3"),
("Etc/GMT+4", "Etc/GMT+4"),
("Etc/GMT+5", "Etc/GMT+5"),
("Etc/GMT+6", "Etc/GMT+6"),
("Etc/GMT+7", "Etc/GMT+7"),
("Etc/GMT+8", "Etc/GMT+8"),
("Etc/GMT+9", "Etc/GMT+9"),
("Etc/GMT-0", "Etc/GMT-0"),
("Etc/GMT-1", "Etc/GMT-1"),
("Etc/GMT-10", "Etc/GMT-10"),
("Etc/GMT-11", "Etc/GMT-11"),
("Etc/GMT-12", "Etc/GMT-12"),
("Etc/GMT-13", "Etc/GMT-13"),
("Etc/GMT-14", "Etc/GMT-14"),
("Etc/GMT-2", "Etc/GMT-2"),
("Etc/GMT-3", "Etc/GMT-3"),
("Etc/GMT-4", "Etc/GMT-4"),
("Etc/GMT-5", "Etc/GMT-5"),
("Etc/GMT-6", "Etc/GMT-6"),
("Etc/GMT-7", "Etc/GMT-7"),
("Etc/GMT-8", "Etc/GMT-8"),
("Etc/GMT-9", "Etc/GMT-9"),
("Etc/GMT0", "Etc/GMT0"),
("Etc/Greenwich", "Etc/Greenwich"),
("Etc/UCT", "Etc/UCT"),
("Etc/UTC", "Etc/UTC"),
("Etc/Universal", "Etc/Universal"),
("Etc/Zulu", "Etc/Zulu"),
("Europe/Amsterdam", "Europe/Amsterdam"),
("Europe/Andorra", "Europe/Andorra"),
("Europe/Astrakhan", "Europe/Astrakhan"),
("Europe/Athens", "Europe/Athens"),
("Europe/Belfast", "Europe/Belfast"),
("Europe/Belgrade", "Europe/Belgrade"),
("Europe/Berlin", "Europe/Berlin"),
("Europe/Bratislava", "Europe/Bratislava"),
("Europe/Brussels", "Europe/Brussels"),
("Europe/Bucharest", "Europe/Bucharest"),
("Europe/Budapest", "Europe/Budapest"),
("Europe/Busingen", "Europe/Busingen"),
("Europe/Chisinau", "Europe/Chisinau"),
("Europe/Copenhagen", "Europe/Copenhagen"),
("Europe/Dublin", "Europe/Dublin"),
("Europe/Gibraltar", "Europe/Gibraltar"),
("Europe/Guernsey", "Europe/Guernsey"),
("Europe/Helsinki", "Europe/Helsinki"),
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
("Europe/Istanbul", "Europe/Istanbul"),
("Europe/Jersey", "Europe/Jersey"),
("Europe/Kaliningrad", "Europe/Kaliningrad"),
("Europe/Kiev", "Europe/Kiev"),
("Europe/Kirov", "Europe/Kirov"),
("Europe/Kyiv", "Europe/Kyiv"),
("Europe/Lisbon", "Europe/Lisbon"),
("Europe/Ljubljana", "Europe/Ljubljana"),
("Europe/London", "Europe/London"),
("Europe/Luxembourg", "Europe/Luxembourg"),
("Europe/Madrid", "Europe/Madrid"),
("Europe/Malta", "Europe/Malta"),
("Europe/Mariehamn", "Europe/Mariehamn"),
("Europe/Minsk", "Europe/Minsk"),
("Europe/Monaco", "Europe/Monaco"),
("Europe/Moscow", "Europe/Moscow"),
("Europe/Nicosia", "Europe/Nicosia"),
("Europe/Oslo", "Europe/Oslo"),
("Europe/Paris", "Europe/Paris"),
("Europe/Podgorica", "Europe/Podgorica"),
("Europe/Prague", "Europe/Prague"),
("Europe/Riga", "Europe/Riga"),
("Europe/Rome", "Europe/Rome"),
("Europe/Samara", "Europe/Samara"),
("Europe/San_Marino", "Europe/San_Marino"),
("Europe/Sarajevo", "Europe/Sarajevo"),
("Europe/Saratov", "Europe/Saratov"),
("Europe/Simferopol", "Europe/Simferopol"),
("Europe/Skopje", "Europe/Skopje"),
("Europe/Sofia", "Europe/Sofia"),
("Europe/Stockholm", "Europe/Stockholm"),
("Europe/Tallinn", "Europe/Tallinn"),
("Europe/Tirane", "Europe/Tirane"),
("Europe/Tiraspol", "Europe/Tiraspol"),
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
("Europe/Uzhgorod", "Europe/Uzhgorod"),
("Europe/Vaduz", "Europe/Vaduz"),
("Europe/Vatican", "Europe/Vatican"),
("Europe/Vienna", "Europe/Vienna"),
("Europe/Vilnius", "Europe/Vilnius"),
("Europe/Volgograd", "Europe/Volgograd"),
("Europe/Warsaw", "Europe/Warsaw"),
("Europe/Zagreb", "Europe/Zagreb"),
("Europe/Zaporozhye", "Europe/Zaporozhye"),
("Europe/Zurich", "Europe/Zurich"),
("Factory", "Factory"),
("GB", "GB"),
("GB-Eire", "GB-Eire"),
("GMT", "GMT"),
("GMT+0", "GMT+0"),
("GMT-0", "GMT-0"),
("GMT0", "GMT0"),
("Greenwich", "Greenwich"),
("HST", "HST"),
("Hongkong", "Hongkong"),
("Iceland", "Iceland"),
("Indian/Antananarivo", "Indian/Antananarivo"),
("Indian/Chagos", "Indian/Chagos"),
("Indian/Christmas", "Indian/Christmas"),
("Indian/Cocos", "Indian/Cocos"),
("Indian/Comoro", "Indian/Comoro"),
("Indian/Kerguelen", "Indian/Kerguelen"),
("Indian/Mahe", "Indian/Mahe"),
("Indian/Maldives", "Indian/Maldives"),
("Indian/Mauritius", "Indian/Mauritius"),
("Indian/Mayotte", "Indian/Mayotte"),
("Indian/Reunion", "Indian/Reunion"),
("Iran", "Iran"),
("Israel", "Israel"),
("Jamaica", "Jamaica"),
("Japan", "Japan"),
("Kwajalein", "Kwajalein"),
("Libya", "Libya"),
("MET", "MET"),
("MST", "MST"),
("MST7MDT", "MST7MDT"),
("Mexico/BajaNorte", "Mexico/BajaNorte"),
("Mexico/BajaSur", "Mexico/BajaSur"),
("Mexico/General", "Mexico/General"),
("NZ", "NZ"),
("NZ-CHAT", "NZ-CHAT"),
("Navajo", "Navajo"),
("PRC", "PRC"),
("PST8PDT", "PST8PDT"),
("Pacific/Apia", "Pacific/Apia"),
("Pacific/Auckland", "Pacific/Auckland"),
("Pacific/Bougainville", "Pacific/Bougainville"),
("Pacific/Chatham", "Pacific/Chatham"),
("Pacific/Chuuk", "Pacific/Chuuk"),
("Pacific/Easter", "Pacific/Easter"),
("Pacific/Efate", "Pacific/Efate"),
("Pacific/Enderbury", "Pacific/Enderbury"),
("Pacific/Fakaofo", "Pacific/Fakaofo"),
("Pacific/Fiji", "Pacific/Fiji"),
("Pacific/Funafuti", "Pacific/Funafuti"),
("Pacific/Galapagos", "Pacific/Galapagos"),
("Pacific/Gambier", "Pacific/Gambier"),
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
("Pacific/Guam", "Pacific/Guam"),
("Pacific/Honolulu", "Pacific/Honolulu"),
("Pacific/Johnston", "Pacific/Johnston"),
("Pacific/Kanton", "Pacific/Kanton"),
("Pacific/Kiritimati", "Pacific/Kiritimati"),
("Pacific/Kosrae", "Pacific/Kosrae"),
("Pacific/Kwajalein", "Pacific/Kwajalein"),
("Pacific/Majuro", "Pacific/Majuro"),
("Pacific/Marquesas", "Pacific/Marquesas"),
("Pacific/Midway", "Pacific/Midway"),
("Pacific/Nauru", "Pacific/Nauru"),
("Pacific/Niue", "Pacific/Niue"),
("Pacific/Norfolk", "Pacific/Norfolk"),
("Pacific/Noumea", "Pacific/Noumea"),
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
("Pacific/Palau", "Pacific/Palau"),
("Pacific/Pitcairn", "Pacific/Pitcairn"),
("Pacific/Pohnpei", "Pacific/Pohnpei"),
("Pacific/Ponape", "Pacific/Ponape"),
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
("Pacific/Rarotonga", "Pacific/Rarotonga"),
("Pacific/Saipan", "Pacific/Saipan"),
("Pacific/Samoa", "Pacific/Samoa"),
("Pacific/Tahiti", "Pacific/Tahiti"),
("Pacific/Tarawa", "Pacific/Tarawa"),
("Pacific/Tongatapu", "Pacific/Tongatapu"),
("Pacific/Truk", "Pacific/Truk"),
("Pacific/Wake", "Pacific/Wake"),
("Pacific/Wallis", "Pacific/Wallis"),
("Pacific/Yap", "Pacific/Yap"),
("Poland", "Poland"),
("Portugal", "Portugal"),
("ROC", "ROC"),
("ROK", "ROK"),
("Singapore", "Singapore"),
("Turkey", "Turkey"),
("UCT", "UCT"),
("US/Alaska", "US/Alaska"),
("US/Aleutian", "US/Aleutian"),
("US/Arizona", "US/Arizona"),
("US/Central", "US/Central"),
("US/East-Indiana", "US/East-Indiana"),
("US/Eastern", "US/Eastern"),
("US/Hawaii", "US/Hawaii"),
("US/Indiana-Starke", "US/Indiana-Starke"),
("US/Michigan", "US/Michigan"),
("US/Mountain", "US/Mountain"),
("US/Pacific", "US/Pacific"),
("US/Samoa", "US/Samoa"),
("UTC", "UTC"),
("Universal", "Universal"),
("W-SU", "W-SU"),
("WET", "WET"),
("Zulu", "Zulu"),
("localtime", "localtime"),
],
default=aircox.models.schedule.current_timezone_key,
help_text="timezone used for the date",
max_length=100,
verbose_name="timezone",
),
),
]

View File

@ -19,11 +19,7 @@ __all__ = ("Diffusion", "DiffusionQuerySet")
class DiffusionQuerySet(RerunQuerySet): class DiffusionQuerySet(RerunQuerySet):
def episode(self, episode=None, id=None): def episode(self, episode=None, id=None):
"""Diffusions for this episode.""" """Diffusions for this episode."""
return ( return self.filter(episode=episode) if id is None else self.filter(episode__id=id)
self.filter(episode=episode)
if id is None
else self.filter(episode__id=id)
)
def on_air(self): def on_air(self):
"""On air diffusions.""" """On air diffusions."""
@ -40,9 +36,7 @@ class DiffusionQuerySet(RerunQuerySet):
"""Diffusions occuring date.""" """Diffusions occuring date."""
date = date or datetime.date.today() date = date or datetime.date.today()
start = tz.make_aware(tz.datetime.combine(date, datetime.time())) start = tz.make_aware(tz.datetime.combine(date, datetime.time()))
end = tz.make_aware( end = tz.make_aware(tz.datetime.combine(date, datetime.time(23, 59, 59, 999)))
tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
)
# start = tz.get_current_timezone().localize(start) # start = tz.get_current_timezone().localize(start)
# end = tz.get_current_timezone().localize(end) # end = tz.get_current_timezone().localize(end)
qs = self.filter(start__range=(start, end)) qs = self.filter(start__range=(start, end))
@ -50,11 +44,7 @@ class DiffusionQuerySet(RerunQuerySet):
def at(self, date, order=True): def at(self, date, order=True):
"""Return diffusions at specified date or datetime.""" """Return diffusions at specified date or datetime."""
return ( return self.now(date, order) if isinstance(date, tz.datetime) else self.date(date, order)
self.now(date, order)
if isinstance(date, tz.datetime)
else self.date(date, order)
)
def after(self, date=None): def after(self, date=None):
"""Return a queryset of diffusions that happen after the given date """Return a queryset of diffusions that happen after the given date
@ -140,9 +130,7 @@ class Diffusion(Rerun):
class Meta: class Meta:
verbose_name = _("Diffusion") verbose_name = _("Diffusion")
verbose_name_plural = _("Diffusions") verbose_name_plural = _("Diffusions")
permissions = ( permissions = (("programming", _("edit the diffusions' planification")),)
("programming", _("edit the diffusions' planification")),
)
def __str__(self): def __str__(self):
str_ = "{episode} - {date}".format( str_ = "{episode} - {date}".format(
@ -200,11 +188,7 @@ class Diffusion(Rerun):
def is_now(self): def is_now(self):
"""True if diffusion is currently running.""" """True if diffusion is currently running."""
now = tz.now() now = tz.now()
return ( return self.type == self.TYPE_ON_AIR and self.start <= now and self.end >= now
self.type == self.TYPE_ON_AIR
and self.start <= now
and self.end >= now
)
@property @property
def is_today(self): def is_today(self):
@ -214,10 +198,7 @@ class Diffusion(Rerun):
@property @property
def is_live(self): def is_live(self):
"""True if Diffusion is live (False if there are sounds files).""" """True if Diffusion is live (False if there are sounds files)."""
return ( return self.type == self.TYPE_ON_AIR and not self.episode.sound_set.archive().count()
self.type == self.TYPE_ON_AIR
and not self.episode.sound_set.archive().count()
)
def get_playlist(self, **types): def get_playlist(self, **types):
"""Returns sounds as a playlist (list of *local* archive file path). """Returns sounds as a playlist (list of *local* archive file path).
@ -227,9 +208,7 @@ class Diffusion(Rerun):
from .sound import Sound from .sound import Sound
return list( return list(
self.get_sounds(**types) self.get_sounds(**types).filter(path__isnull=False, type=Sound.TYPE_ARCHIVE).values_list("path", flat=True)
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
.values_list("path", flat=True)
) )
def get_sounds(self, **types): def get_sounds(self, **types):
@ -241,9 +220,7 @@ class Diffusion(Rerun):
from .sound import Sound from .sound import Sound
sounds = (self.initial or self).sound_set.order_by("type", "path") sounds = (self.initial or self).sound_set.order_by("type", "path")
_in = [ _in = [getattr(Sound.Type, name) for name, value in types.items() if value]
getattr(Sound.Type, name) for name, value in types.items() if value
]
return sounds.filter(type__in=_in) return sounds.filter(type__in=_in)
@ -265,8 +242,7 @@ class Diffusion(Rerun):
# .filter(conflict_with=True) # .filter(conflict_with=True)
return ( return (
Diffusion.objects.filter( Diffusion.objects.filter(
Q(start__lt=self.start, end__gt=self.start) Q(start__lt=self.start, end__gt=self.start) | Q(start__gt=self.start, start__lt=self.end)
| Q(start__gt=self.start, start__lt=self.end)
) )
.exclude(pk=self.pk) .exclude(pk=self.pk)
.distinct() .distinct()

View File

@ -28,10 +28,7 @@ class Episode(Page):
"""Return serialized data about podcasts.""" """Return serialized data about podcasts."""
from ..serializers import PodcastSerializer from ..serializers import PodcastSerializer
podcasts = [ podcasts = [PodcastSerializer(s).data for s in self.sound_set.public().order_by("type")]
PodcastSerializer(s).data
for s in self.sound_set.public().order_by("type")
]
if self.cover: if self.cover:
options = {"size": (128, 128), "crop": "scale"} options = {"size": (128, 128), "crop": "scale"}
cover = get_thumbnailer(self.cover).get_thumbnail(options).url cover = get_thumbnailer(self.cover).get_thumbnail(options).url
@ -76,6 +73,4 @@ class Episode(Page):
if title is None if title is None
else title else title
) )
return super().get_init_kwargs_from( return super().get_init_kwargs_from(page, title=title, program=page, **kwargs)
page, title=title, program=page, **kwargs
)

View File

@ -19,11 +19,7 @@ __all__ = ("Log", "LogQuerySet")
class LogQuerySet(models.QuerySet): class LogQuerySet(models.QuerySet):
def station(self, station=None, id=None): def station(self, station=None, id=None):
return ( return self.filter(station=station) if id is None else self.filter(station_id=id)
self.filter(station=station)
if id is None
else self.filter(station_id=id)
)
def date(self, date): def date(self, date):
start = tz.datetime.combine(date, datetime.time()) start = tz.datetime.combine(date, datetime.time())
@ -33,11 +29,7 @@ class LogQuerySet(models.QuerySet):
# return self.filter(date__date=date) # return self.filter(date__date=date)
def after(self, date): def after(self, date):
return ( return self.filter(date__gte=date) if isinstance(date, tz.datetime) else self.filter(date__date__gte=date)
self.filter(date__gte=date)
if isinstance(date, tz.datetime)
else self.filter(date__date__gte=date)
)
def on_air(self): def on_air(self):
return self.filter(type=Log.TYPE_ON_AIR) return self.filter(type=Log.TYPE_ON_AIR)

View File

@ -26,9 +26,7 @@ __all__ = (
) )
headline_re = re.compile( headline_re = re.compile(r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?")
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
)
class Renderable: class Renderable:
@ -37,9 +35,7 @@ class Renderable:
def get_template_name(self, widget): def get_template_name(self, widget):
"""Return template name for the provided widget.""" """Return template name for the provided widget."""
return self.template_name.format( return self.template_name.format(prefix=self.template_prefix, widget=widget)
prefix=self.template_prefix, widget=widget
)
class Category(models.Model): class Category(models.Model):
@ -69,17 +65,11 @@ class BasePageQuerySet(InheritanceQuerySet):
def parent(self, parent=None, id=None): def parent(self, parent=None, id=None):
"""Return pages having this parent.""" """Return pages having this parent."""
return ( return self.filter(parent=parent) if id is None else self.filter(parent__id=id)
self.filter(parent=parent)
if id is None
else self.filter(parent__id=id)
)
def search(self, q, search_content=True): def search(self, q, search_content=True):
if search_content: if search_content:
return self.filter( return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
models.Q(title__icontains=q) | models.Q(content__icontains=q)
)
return self.filter(title__icontains=q) return self.filter(title__icontains=q)
@ -104,9 +94,7 @@ class BasePage(Renderable, models.Model):
related_name="child_set", related_name="child_set",
) )
title = models.CharField(max_length=100) title = models.CharField(max_length=100)
slug = models.SlugField( slug = models.SlugField(_("slug"), max_length=120, blank=True, unique=True, db_index=True)
_("slug"), max_length=120, blank=True, unique=True, db_index=True
)
status = models.PositiveSmallIntegerField( status = models.PositiveSmallIntegerField(
_("status"), _("status"),
default=STATUS_DRAFT, default=STATUS_DRAFT,
@ -146,11 +134,7 @@ class BasePage(Renderable, models.Model):
super().save(*args, **kwargs) super().save(*args, **kwargs)
def get_absolute_url(self): def get_absolute_url(self):
return ( return reverse(self.detail_url_name, kwargs={"slug": self.slug}) if self.is_published else "#"
reverse(self.detail_url_name, kwargs={"slug": self.slug})
if self.is_published
else "#"
)
@property @property
def is_draft(self): def is_draft(self):
@ -191,9 +175,7 @@ class BasePage(Renderable, models.Model):
class PageQuerySet(BasePageQuerySet): class PageQuerySet(BasePageQuerySet):
def published(self): def published(self):
return self.filter( return self.filter(status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now())
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
)
class Page(BasePage): class Page(BasePage):
@ -207,9 +189,7 @@ class Page(BasePage):
null=True, null=True,
db_index=True, db_index=True,
) )
pub_date = models.DateTimeField( pub_date = models.DateTimeField(_("publication date"), blank=True, null=True, db_index=True)
_("publication date"), blank=True, null=True, db_index=True
)
featured = models.BooleanField( featured = models.BooleanField(
_("featured"), _("featured"),
default=False, default=False,
@ -316,9 +296,7 @@ class Comment(Renderable, models.Model):
class NavItem(models.Model): class NavItem(models.Model):
"""Navigation menu items.""" """Navigation menu items."""
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station")
)
menu = models.SlugField(_("menu"), max_length=24) menu = models.SlugField(_("menu"), max_length=24)
order = models.PositiveSmallIntegerField(_("order")) order = models.PositiveSmallIntegerField(_("order"))
text = models.CharField(_("title"), max_length=64) text = models.CharField(_("title"), max_length=64)
@ -338,13 +316,7 @@ class NavItem(models.Model):
ordering = ("order", "pk") ordering = ("order", "pk")
def get_url(self): def get_url(self):
return ( return self.url if self.url else self.page.get_absolute_url() if self.page else None
self.url
if self.url
else self.page.get_absolute_url()
if self.page
else None
)
def render(self, request, css_class="", active_class=""): def render(self, request, css_class="", active_class=""):
url = self.get_url() url = self.get_url()
@ -356,6 +328,4 @@ class NavItem(models.Model):
elif not css_class: elif not css_class:
return format_html('<a href="{}">{}</a>', url, self.text) return format_html('<a href="{}">{}</a>', url, self.text)
else: else:
return format_html( return format_html('<a href="{}" class="{}">{}</a>', url, css_class, self.text)
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
)

View File

@ -47,9 +47,7 @@ class Program(Page):
""" """
# explicit foreign key in order to avoid related name clashes # explicit foreign key in order to avoid related name clashes
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station")
)
active = models.BooleanField( active = models.BooleanField(
_("active"), _("active"),
default=True, default=True,
@ -126,12 +124,7 @@ class Program(Page):
# TODO: move in signals # TODO: move in signals
path_ = getattr(self, "__initial_path", None) path_ = getattr(self, "__initial_path", None)
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_) abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
if ( if path_ is not None and path_ != self.path and os.path.exists(abspath) and not os.path.exists(self.abspath):
path_ is not None
and path_ != self.path
and os.path.exists(abspath)
and not os.path.exists(self.abspath)
):
logger.info( logger.info(
"program #%s's dir changed to %s - update it.", "program #%s's dir changed to %s - update it.",
self.id, self.id,
@ -139,9 +132,7 @@ class Program(Page):
) )
shutil.move(abspath, self.abspath) shutil.move(abspath, self.abspath)
Sound.objects.filter(path__startswith=path_).update( Sound.objects.filter(path__startswith=path_).update(file=Concat("file", Substr(F("file"), len(path_))))
file=Concat("file", Substr(F("file"), len(path_)))
)
class ProgramChildQuerySet(PageQuerySet): class ProgramChildQuerySet(PageQuerySet):

View File

@ -15,18 +15,10 @@ class RerunQuerySet(models.QuerySet):
"""Queryset for Rerun (sub)classes.""" """Queryset for Rerun (sub)classes."""
def station(self, station=None, id=None): def station(self, station=None, id=None):
return ( return self.filter(program__station=station) if id is None else self.filter(program__station__id=id)
self.filter(program__station=station)
if id is None
else self.filter(program__station__id=id)
)
def program(self, program=None, id=None): def program(self, program=None, id=None):
return ( return self.filter(program=program) if id is None else self.filter(program__id=id)
self.filter(program=program)
if id is None
else self.filter(program__id=id)
)
def rerun(self): def rerun(self):
return self.filter(initial__isnull=False) return self.filter(initial__isnull=False)
@ -78,14 +70,8 @@ class Rerun(models.Model):
def clean(self): def clean(self):
super().clean() super().clean()
if ( if hasattr(self, "start") and self.initial is not None and self.initial.start >= self.start:
hasattr(self, "start") raise ValidationError({"initial": _("rerun must happen after original")})
and self.initial is not None
and self.initial.start >= self.start
):
raise ValidationError(
{"initial": _("rerun must happen after original")}
)
def save_rerun(self): def save_rerun(self):
self.program = self.initial.program self.program = self.initial.program

View File

@ -55,7 +55,7 @@ class Schedule(Rerun):
_("timezone"), _("timezone"),
default=current_timezone_key, default=current_timezone_key,
max_length=100, max_length=100,
choices=[(x, x) for x in zoneinfo.available_timezones()], choices=sorted([(x, x) for x in zoneinfo.available_timezones()]),
help_text=_("timezone used for the date"), help_text=_("timezone used for the date"),
) )
duration = models.TimeField( duration = models.TimeField(
@ -102,11 +102,7 @@ class Schedule(Rerun):
"""Return frequency formated for display.""" """Return frequency formated for display."""
from django.template.defaultfilters import date from django.template.defaultfilters import date
return ( return self._get_FIELD_display(self._meta.get_field("frequency")).format(day=date(self.date, "l")).capitalize()
self._get_FIELD_display(self._meta.get_field("frequency"))
.format(day=date(self.date, "l"))
.capitalize()
)
def normalize(self, date): def normalize(self, date):
"""Return a datetime set to schedule's time for the provided date, """Return a datetime set to schedule's time for the provided date,
@ -124,9 +120,7 @@ class Schedule(Rerun):
# last of the month # last of the month
if freq == Schedule.Frequency.last: if freq == Schedule.Frequency.last:
date = date.replace( date = date.replace(day=calendar.monthrange(date.year, date.month)[1])
day=calendar.monthrange(date.year, date.month)[1]
)
date_wday = date.weekday() date_wday = date.weekday()
# end of month before the wanted weekday: move one week back # end of month before the wanted weekday: move one week back
@ -138,9 +132,7 @@ class Schedule(Rerun):
# move to the first day of the month that matches the schedule's # move to the first day of the month that matches the schedule's
# weekday. Check on SO#3284452 for the formula # weekday. Check on SO#3284452 for the formula
date_wday, month = date.weekday(), date.month date_wday, month = date.weekday(), date.month
date += tz.timedelta( date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday)
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
)
if freq == Schedule.Frequency.one_on_two: if freq == Schedule.Frequency.one_on_two:
# - adjust date with modulo 14 (= 2 weeks in days) # - adjust date with modulo 14 (= 2 weeks in days)
@ -149,11 +141,7 @@ class Schedule(Rerun):
date += tz.timedelta(days=7) date += tz.timedelta(days=7)
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3)) dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
else: else:
dates = ( dates = (date + tz.timedelta(days=7 * week) for week in range(0, 5) if freq & (0b1 << week))
date + tz.timedelta(days=7 * week)
for week in range(0, 5)
if freq & (0b1 << week)
)
return [self.normalize(date) for date in dates if date.month == month] return [self.normalize(date) for date in dates if date.month == month]
@ -166,29 +154,22 @@ class Schedule(Rerun):
from .diffusion import Diffusion from .diffusion import Diffusion
from .episode import Episode from .episode import Episode
if ( if self.initial is not None or self.frequency == Schedule.Frequency.ponctual:
self.initial is not None
or self.frequency == Schedule.Frequency.ponctual
):
return [], [] return [], []
# dates for self and reruns as (date, initial) # dates for self and reruns as (date, initial)
reruns = [ reruns = [(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()]
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
]
dates = {date: None for date in self.dates_of_month(date)} dates = {date: None for date in self.dates_of_month(date)}
dates.update( dates.update(
(rerun.normalize(date.date() + delta), date) (rerun.normalize(date.date() + delta), date) for date in list(dates.keys()) for rerun, delta in reruns
for date in list(dates.keys())
for rerun, delta in reruns
) )
# remove dates corresponding to existing diffusions # remove dates corresponding to existing diffusions
saved = set( saved = set(
Diffusion.objects.filter( Diffusion.objects.filter(start__in=dates.keys(), program=self.program, schedule=self).values_list(
start__in=dates.keys(), program=self.program, schedule=self "start", flat=True
).values_list("start", flat=True) )
) )
# make diffs # make diffs

View File

@ -32,9 +32,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
group, created = Group.objects.get_or_create(name=group_name) group, created = Group.objects.get_or_create(name=group_name)
if created and permissions: if created and permissions:
for codename in permissions: for codename in permissions:
permission = Permission.objects.filter( permission = Permission.objects.filter(codename=codename).first()
codename=codename
).first()
if permission: if permission:
group.permissions.add(permission) group.permissions.add(permission)
group.save() group.save()
@ -45,9 +43,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
def page_post_save(sender, instance, created, *args, **kwargs): def page_post_save(sender, instance, created, *args, **kwargs):
return return
if not created and instance.cover: if not created and instance.cover:
Page.objects.filter(parent=instance, cover__isnull=True).update( Page.objects.filter(parent=instance, cover__isnull=True).update(cover=instance.cover)
cover=instance.cover
)
@receiver(signals.post_save, sender=Program) @receiver(signals.post_save, sender=Program)
@ -55,15 +51,11 @@ def program_post_save(sender, instance, created, *args, **kwargs):
"""Clean-up later diffusions when a program becomes inactive.""" """Clean-up later diffusions when a program becomes inactive."""
if not instance.active: if not instance.active:
Diffusion.objects.program(instance).after(tz.now()).delete() Diffusion.objects.program(instance).after(tz.now()).delete()
Episode.objects.parent(instance).filter( Episode.objects.parent(instance).filter(diffusion__isnull=True).delete()
diffusion__isnull=True
).delete()
cover = getattr(instance, "__initial_cover", None) cover = getattr(instance, "__initial_cover", None)
if cover is None and instance.cover is not None: if cover is None and instance.cover is not None:
Episode.objects.parent(instance).filter(cover__isnull=True).update( Episode.objects.parent(instance).filter(cover__isnull=True).update(cover=instance.cover)
cover=instance.cover
)
@receiver(signals.pre_save, sender=Schedule) @receiver(signals.pre_save, sender=Schedule)
@ -79,8 +71,7 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
corresponding diffusions accordingly.""" corresponding diffusions accordingly."""
initial = getattr(instance, "_initial", None) initial = getattr(instance, "_initial", None)
if not initial or ( if not initial or (
(instance.time, instance.duration, instance.timezone) (instance.time, instance.duration, instance.timezone) == (initial.time, initial.duration, initial.timezone)
== (initial.time, initial.duration, initial.timezone)
): ):
return return
@ -99,13 +90,9 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
def schedule_pre_delete(sender, instance, *args, **kwargs): def schedule_pre_delete(sender, instance, *args, **kwargs):
"""Delete later corresponding diffusion to a changed schedule.""" """Delete later corresponding diffusion to a changed schedule."""
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete() Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
Episode.objects.filter( Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
diffusion__isnull=True, content__isnull=True, sound__isnull=True
).delete()
@receiver(signals.post_delete, sender=Diffusion) @receiver(signals.post_delete, sender=Diffusion)
def diffusion_post_delete(sender, instance, *args, **kwargs): def diffusion_post_delete(sender, instance, *args, **kwargs):
Episode.objects.filter( Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
diffusion__isnull=True, content__isnull=True, sound__isnull=True
).delete()

View File

@ -50,9 +50,7 @@ class SoundQuerySet(models.QuerySet):
def path(self, paths): def path(self, paths):
if isinstance(paths, str): if isinstance(paths, str):
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", "")) return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
return self.filter( return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths))
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
)
def playlist(self, archive=True, order_by=True): def playlist(self, archive=True, order_by=True):
"""Return files absolute paths as a flat list (exclude sound without """Return files absolute paths as a flat list (exclude sound without
@ -66,9 +64,7 @@ class SoundQuerySet(models.QuerySet):
self = self.order_by("file") self = self.order_by("file")
return [ return [
os.path.join(conf.MEDIA_ROOT, file) os.path.join(conf.MEDIA_ROOT, file)
for file in self.filter(file__isnull=False).values_list( for file in self.filter(file__isnull=False).values_list("file", flat=True)
"file", flat=True
)
] ]
def search(self, query): def search(self, query):
@ -122,11 +118,7 @@ class Sound(models.Model):
) )
def _upload_to(self, filename): def _upload_to(self, filename):
subdir = ( subdir = settings.SOUND_ARCHIVES_SUBDIR if self.type == self.TYPE_ARCHIVE else settings.SOUND_EXCERPTS_SUBDIR
settings.SOUND_ARCHIVES_SUBDIR
if self.type == self.TYPE_ARCHIVE
else settings.SOUND_EXCERPTS_SUBDIR
)
return os.path.join(self.program.path, subdir, filename) return os.path.join(self.program.path, subdir, filename)
file = models.FileField( file = models.FileField(
@ -161,10 +153,7 @@ class Sound(models.Model):
) )
is_downloadable = models.BooleanField( is_downloadable = models.BooleanField(
_("downloadable"), _("downloadable"),
help_text=_( help_text=_("whether it can be publicly downloaded by visitors (sound must be " "public)"),
"whether it can be publicly downloaded by visitors (sound must be "
"public)"
),
default=False, default=False,
) )
@ -224,9 +213,7 @@ class Sound(models.Model):
if self.type == self.TYPE_REMOVED and self.program: if self.type == self.TYPE_REMOVED and self.program:
changed = True changed = True
self.type = ( self.type = (
self.TYPE_ARCHIVE self.TYPE_ARCHIVE if self.file.name.startswith(self.program.archives_path) else self.TYPE_EXCERPT
if self.file.name.startswith(self.program.archives_path)
else self.TYPE_EXCERPT
) )
# check mtime -> reset quality if changed (assume file changed) # check mtime -> reset quality if changed (assume file changed)
@ -299,8 +286,7 @@ class Track(models.Model):
blank=True, blank=True,
null=True, null=True,
help_text=_( help_text=_(
"additional informations about this track, such as " "additional informations about this track, such as " "the version, if is it a remix, features, etc."
"the version, if is it a remix, features, etc."
), ),
) )
@ -310,13 +296,9 @@ class Track(models.Model):
ordering = ("position",) ordering = ("position",)
def __str__(self): def __str__(self):
return "{self.artist} -- {self.title} -- {self.position}".format( return "{self.artist} -- {self.title} -- {self.position}".format(self=self)
self=self
)
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if (self.sound is None and self.episode is None) or ( if (self.sound is None and self.episode is None) or (self.sound is not None and self.episode is not None):
self.sound is not None and self.episode is not None
):
raise ValueError("sound XOR episode is required") raise ValueError("sound XOR episode is required")
super().save(*args, **kwargs) super().save(*args, **kwargs)

View File

@ -67,9 +67,7 @@ class Station(models.Model):
max_length=2048, max_length=2048,
null=True, null=True,
blank=True, blank=True,
help_text=_( help_text=_("Audio streams urls used by station's player. One url " "a line."),
"Audio streams urls used by station's player. One url " "a line."
),
) )
default_cover = FilerImageField( default_cover = FilerImageField(
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
@ -153,16 +151,10 @@ class Port(models.Model):
(TYPE_FILE, _("file")), (TYPE_FILE, _("file")),
) )
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station") direction = models.SmallIntegerField(_("direction"), choices=DIRECTION_CHOICES)
)
direction = models.SmallIntegerField(
_("direction"), choices=DIRECTION_CHOICES
)
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES) type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
active = models.BooleanField( active = models.BooleanField(_("active"), default=True, help_text=_("this port is active"))
_("active"), default=True, help_text=_("this port is active")
)
settings = models.TextField( settings = models.TextField(
_("port settings"), _("port settings"),
help_text=_( help_text=_(
@ -193,8 +185,6 @@ class Port(models.Model):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.is_valid_type(): if not self.is_valid_type():
raise ValueError( raise ValueError("port type is not allowed with the given port direction")
"port type is not allowed with the given port direction"
)
return super().save(*args, **kwargs) return super().save(*args, **kwargs)

View File

@ -15,6 +15,4 @@ class UserSettings(models.Model):
related_name="aircox_settings", related_name="aircox_settings",
) )
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns")) playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
playlist_editor_sep = models.CharField( playlist_editor_sep = models.CharField(_("Playlist Editor Separator"), max_length=16)
_("Playlist Editor Separator"), max_length=16
)

View File

@ -52,9 +52,7 @@ def do_has_perm(context, obj, perm, user=None):
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``""" """Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
if user is None: if user is None:
user = context["request"].user user = context["request"].user
return user.has_perm( return user.has_perm("{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name))
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
)
@register.filter(name="is_diffusion") @register.filter(name="is_diffusion")
@ -87,10 +85,7 @@ def do_player_live_attr(context):
def do_nav_items(context, menu, **kwargs): def do_nav_items(context, menu, **kwargs):
"""Render navigation items for the provided menu name.""" """Render navigation items for the provided menu name."""
station, request = context["station"], context["request"] station, request = context["station"], context["request"]
return [ return [(item, item.render(request, **kwargs)) for item in station.navitem_set.filter(menu=menu)]
(item, item.render(request, **kwargs))
for item in station.navitem_set.filter(menu=menu)
]
@register.simple_tag(name="update_query") @register.simple_tag(name="update_query")
@ -108,10 +103,4 @@ def do_update_query(obj, **kwargs):
def do_verbose_name(obj, plural=False): def do_verbose_name(obj, plural=False):
"""Return model's verbose name (singular or plural) or `obj` if it is a """Return model's verbose name (singular or plural) or `obj` if it is a
string (can act for default values).""" string (can act for default values)."""
return ( return obj if isinstance(obj, str) else obj._meta.verbose_name_plural if plural else obj._meta.verbose_name
obj
if isinstance(obj, str)
else obj._meta.verbose_name_plural
if plural
else obj._meta.verbose_name
)

View File

@ -51,9 +51,7 @@ class WrapperMixin:
ns = None ns = None
ns_attr = None ns_attr = None
def __init__( def __init__(self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs):
self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs
):
self.target = target self.target = target
if ns: if ns:
self.inject(ns, ns_attr) self.inject(ns, ns_attr)
@ -87,10 +85,7 @@ class WrapperMixin:
if self.target is ns_target: if self.target is ns_target:
return return
elif self.target is not None and self.ns: elif self.target is not None and self.ns:
raise RuntimeError( raise RuntimeError("self target already injected. It must be " "`release` before `inject`.")
"self target already injected. It must be "
"`release` before `inject`."
)
self.target = ns_target self.target = ns_target
setattr(ns, ns_attr, self.interface) setattr(ns, ns_attr, self.interface)
@ -145,9 +140,7 @@ class SpoofMixin:
traces = self.traces[name] traces = self.traces[name]
if not isinstance(traces, list): if not isinstance(traces, list):
traces = (traces,) traces = (traces,)
return tuple( return tuple(self._get_trace(trace, args=args, kw=kw) for trace in traces)
self._get_trace(trace, args=args, kw=kw) for trace in traces
)
def _get_trace(self, trace, args=False, kw=False): def _get_trace(self, trace, args=False, kw=False):
if (args and kw) or (not args and not kw): if (args and kw) or (not args and not kw):

View File

@ -48,15 +48,11 @@ class TestDateFieldFilter:
def test___init__(self, date_filter): def test___init__(self, date_filter):
assert date_filter.date_params == {"pub_date__lte": tomorrow} assert date_filter.date_params == {"pub_date__lte": tomorrow}
date_filter.links = [ date_filter.links = [(str(link[0]), *list(link[1:])) for link in date_filter.links]
(str(link[0]), *list(link[1:])) for link in date_filter.links
]
assert date_filter.links == [ assert date_filter.links == [
(str(_("None")), "pub_date__isnull", None, "1"), (str(_("None")), "pub_date__isnull", None, "1"),
(str(_("Exact")), "pub_date__date", date_filter.input_type), (str(_("Exact")), "pub_date__date", date_filter.input_type),
(str(_("Since")), "pub_date__gte", date_filter.input_type), (str(_("Since")), "pub_date__gte", date_filter.input_type),
(str(_("Until")), "pub_date__lte", date_filter.input_type), (str(_("Until")), "pub_date__lte", date_filter.input_type),
] ]
assert date_filter.query_attrs == { assert date_filter.query_attrs == {"pub_date__gte": today.strftime("%Y-%m-%d")}
"pub_date__gte": today.strftime("%Y-%m-%d")
}

View File

@ -30,9 +30,7 @@ def staff_user():
@pytest.fixture @pytest.fixture
def logger(): def logger():
logger = Interface( logger = Interface(logging, {"info": None, "debug": None, "error": None, "warning": None})
logging, {"info": None, "debug": None, "error": None, "warning": None}
)
return logger return logger
@ -123,10 +121,7 @@ def schedules(sched_initials, sched_reruns):
@pytest.fixture @pytest.fixture
def episodes(programs): def episodes(programs):
return [ return [baker.make(models.Episode, parent=program, cover=None) for program in programs]
baker.make(models.Episode, parent=program, cover=None)
for program in programs
]
@pytest.fixture @pytest.fixture
@ -158,15 +153,7 @@ def sound(program):
@pytest.fixture @pytest.fixture
def tracks(episode, sound): def tracks(episode, sound):
items = [ items = [baker.prepare(models.Track, episode=episode, position=i, timestamp=i * 60) for i in range(0, 3)]
baker.prepare( items += [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 3)]
models.Track, episode=episode, position=i, timestamp=i * 60
)
for i in range(0, 3)
]
items += [
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 3)
]
models.Track.objects.bulk_create(items) models.Track.objects.bulk_create(items)
return items return items

View File

@ -21,30 +21,21 @@ class TestDiffusion:
def test_update(self, monitor, schedules, sched_initials, logger): def test_update(self, monitor, schedules, sched_initials, logger):
monitor.update() monitor.update()
diffusions = models.Diffusion.objects.filter( diffusions = models.Diffusion.objects.filter(schedule__in=sched_initials)
schedule__in=sched_initials
)
by_date = {} by_date = {}
for diff in diffusions: for diff in diffusions:
assert diff.episode_id assert diff.episode_id
by_date.setdefault(diff.schedule_id, set()).add( by_date.setdefault(diff.schedule_id, set()).add((diff.start, diff.end))
(diff.start, diff.end)
)
for schedule in sched_initials: for schedule in sched_initials:
if schedule.pk not in by_date: if schedule.pk not in by_date:
continue continue
_, items = schedule.diffusions_of_month(now) _, items = schedule.diffusions_of_month(now)
assert all( assert all((item.start, item.end) in by_date[schedule.pk] for item in items)
(item.start, item.end) in by_date[schedule.pk]
for item in items
)
@pytest.mark.django_db @pytest.mark.django_db
def test_clean(self, monitor, episode): def test_clean(self, monitor, episode):
start = tz.make_aware( start = tz.make_aware(datetime.combine(monitor.date - timedelta(days=1), time(10, 20)))
datetime.combine(monitor.date - timedelta(days=1), time(10, 20))
)
diff = models.Diffusion( diff = models.Diffusion(
type=models.Diffusion.TYPE_UNCONFIRMED, type=models.Diffusion.TYPE_UNCONFIRMED,
episode=episode, episode=episode,

View File

@ -79,16 +79,12 @@ class TestLogArchiver:
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs): def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
# before logs are deleted from db, get data # before logs are deleted from db, get data
sorted = archiver.sort_logs(logs_qs) sorted = archiver.sort_logs(logs_qs)
paths = { paths = {archiver.get_path(station, date) for station, date in sorted.keys()}
archiver.get_path(station, date) for station, date in sorted.keys()
}
count = archiver.archive(logs_qs, keep=False) count = archiver.archive(logs_qs, keep=False)
assert count == len(logs) assert count == len(logs)
assert not logs_qs.count() assert not logs_qs.count()
assert all( assert all(path in paths for path, *_ in gzip._traces("open", args=True))
path in paths for path, *_ in gzip._traces("open", args=True)
)
results = archiver.load_file("dummy path") results = archiver.load_file("dummy path")
assert results assert results
@ -104,7 +100,4 @@ class TestLogArchiver:
assert sorted assert sorted
for (station, date), logs in sorted.items(): for (station, date), logs in sorted.items():
assert all( assert all(log.station == station and log.date.date() == date for log in logs)
log.station == station and log.date.date() == date
for log in logs
)

View File

@ -53,13 +53,7 @@ def path_infos():
@pytest.fixture @pytest.fixture
def sound_files(path_infos): def sound_files(path_infos):
return { return {k: r for k, r in ((path, SoundFile(conf.MEDIA_ROOT + "/" + path)) for path in path_infos.keys())}
k: r
for k, r in (
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
for path in path_infos.keys()
)
}
def test_sound_path(sound_files): def test_sound_path(sound_files):
@ -78,17 +72,9 @@ def test_read_path(path_infos, sound_files):
def _setup_diff(program, info): def _setup_diff(program, info):
episode = models.Episode(program=program, title="test-episode") episode = models.Episode(program=program, title="test-episode")
at = tz.datetime( at = tz.datetime(**{k: info[k] for k in ("year", "month", "day", "hour", "minute") if info.get(k)})
**{
k: info[k]
for k in ("year", "month", "day", "hour", "minute")
if info.get(k)
}
)
at = tz.make_aware(at) at = tz.make_aware(at)
diff = models.Diffusion( diff = models.Diffusion(episode=episode, start=at, end=at + timedelta(hours=1))
episode=episode, start=at, end=at + timedelta(hours=1)
)
episode.save() episode.save()
diff.save() diff.save()
return diff return diff

View File

@ -92,9 +92,7 @@ class TestTask:
task.log_msg = "--{event.src_path}--" task.log_msg = "--{event.src_path}--"
sound_file = task(event, logger=logger, kw=13) sound_file = task(event, logger=logger, kw=13)
assert sound_file._trace("sync", kw=True) == {"kw": 13} assert sound_file._trace("sync", kw=True) == {"kw": 13}
assert logger._trace("info", args=True) == ( assert logger._trace("info", args=True) == (task.log_msg.format(event=event),)
task.log_msg.format(event=event),
)
class TestDeleteTask: class TestDeleteTask:
@ -125,9 +123,7 @@ class TestModifiedTask:
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now}) datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
def sleep(imeta, n): def sleep(imeta, n):
datetime._imeta.funcs[ datetime._imeta.funcs["now"] = modified_task.timestamp + tz.timedelta(hours=10)
"now"
] = modified_task.timestamp + tz.timedelta(hours=10)
time = Interface.inject(sound_monitor, "time", {"sleep": sleep}) time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
modified_task.wait() modified_task.wait()
@ -175,9 +171,7 @@ class TestMonitorHandler:
def test__submit(self, monitor_handler, event): def test__submit(self, monitor_handler, event):
handler = Interface() handler = Interface()
handler, created = monitor_handler._submit( handler, created = monitor_handler._submit(handler, event, "prefix", kw=13)
handler, event, "prefix", kw=13
)
assert created assert created
assert handler.future._trace("add_done_callback") assert handler.future._trace("add_done_callback")
assert monitor_handler.pool._trace("submit") == ( assert monitor_handler.pool._trace("submit") == (
@ -192,9 +186,7 @@ class TestMonitorHandler:
@pytest.fixture @pytest.fixture
def monitor_interfaces(): def monitor_interfaces():
items = { items = {
"atexit": Interface.inject( "atexit": Interface.inject(sound_monitor, "atexit", {"register": None, "leave": None}),
sound_monitor, "atexit", {"register": None, "leave": None}
),
"observer": Interface.inject( "observer": Interface.inject(
sound_monitor, sound_monitor,
"Observer", "Observer",
@ -214,29 +206,48 @@ def monitor():
yield sound_monitor.SoundMonitor() yield sound_monitor.SoundMonitor()
class SoundMonitor: class TestSoundMonitor:
@pytest.mark.django_db
def test_report(self, monitor, program, logger): def test_report(self, monitor, program, logger):
monitor.report(program, "component", "content", logger=logger) monitor.report(program, "component", "content", logger=logger)
msg = f"{program}, component: content" msg = f"{program}, component: content"
assert logger._trace("info", args=True) == (msg,) assert logger._trace("info", args=True) == (msg,)
def test_scan(self, monitor, program, logger): @pytest.mark.django_db
def test_scan(self, monitor, programs, logger):
interface = Interface(None, {"scan_for_program": None}) interface = Interface(None, {"scan_for_program": None})
monitor.scan_for_program = interface.scan_for_program monitor.scan_for_program = interface.scan_for_program
dirs = monitor.scan(logger) dirs = monitor.scan(logger)
assert logger._traces("info") == ( assert logger._traces("info") == tuple(
"scan all programs...", [
f"#{program.id} {program.title}", (("scan all programs...",), {}),
]
+ [
((f"#{program.id} {program.title}",), {})
for program in programs
]
) )
assert dirs == [program.abspath] assert dirs == [program.abspath for program in programs]
assert interface._traces("scan_for_program") == ( traces = tuple(
((program, settings.SOUND_ARCHIVES_SUBDIR), {"logger": logger})( [
(program, settings.SOUND_EXCERPTS_SUBDIR), {"logger": logger} [
) (
(program, settings.SOUND_ARCHIVES_SUBDIR),
{"logger": logger, "type": Sound.TYPE_ARCHIVE},
),
(
(program, settings.SOUND_EXCERPTS_SUBDIR),
{"logger": logger, "type": Sound.TYPE_EXCERPT},
),
]
for program in programs
]
) )
traces_flat = tuple([item for sublist in traces for item in sublist])
assert interface._traces("scan_for_program") == traces_flat
def test_monitor(self, monitor, monitor_interfaces, logger): def broken_test_monitor(self, monitor, monitor_interfaces, logger):
def sleep(*args, **kwargs): def sleep(*args, **kwargs):
monitor.stop() monitor.stop()

View File

@ -38,12 +38,8 @@ sox_values = {
@pytest.fixture @pytest.fixture
def sox_interfaces(): def sox_interfaces():
process = Interface( process = Interface(None, {"communicate": ("", sox_output.encode("utf-8"))})
None, {"communicate": ("", sox_output.encode("utf-8"))} subprocess = Interface.inject(sound_stats, "subprocess", {"Popen": lambda *_, **__: process})
)
subprocess = Interface.inject(
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
)
yield {"process": process, "subprocess": subprocess} yield {"process": process, "subprocess": subprocess}
subprocess._irelease() subprocess._irelease()
@ -110,9 +106,7 @@ class TestSoundStats:
def test_check(self, stats): def test_check(self, stats):
good = [{"val": i} for i in range(0, 11)] good = [{"val": i} for i in range(0, 11)]
bad = [{"val": i} for i in range(-10, 0)] + [ bad = [{"val": i} for i in range(-10, 0)] + [{"val": i} for i in range(11, 20)]
{"val": i} for i in range(11, 20)
]
stats.stats = good + bad stats.stats = good + bad
calls = {} calls = {}
stats.resume = lambda *_: calls.setdefault("resume", True) stats.resume = lambda *_: calls.setdefault("resume", True)

View File

@ -0,0 +1,23 @@
import pytest
import os
from django.core.management import call_command
from django.conf import settings
wav = (
b"RIFF$\x00\x00\x00WAVEfmt \x10\x00\x00\x00\x01\x00\x02\x00D\xac\x00\x00"
b"\x10\xb1\x02\x00\x04\x00\x10\x00data\x00\x00\x00\x00"
)
@pytest.mark.django_db
def test_adding_a_sound(programs, fs):
p0 = programs[0]
assert len(p0.sound_set.all()) == 0
s0 = os.path.join(
settings.PROJECT_ROOT, "static/media/%s/archives/sound.wav" % p0.path
)
fs.create_file(s0, contents=wav)
call_command("sounds_monitor", "-s")
assert len(p0.sound_set.all()) == 1

View File

@ -12,11 +12,7 @@ class TestEpisode:
@pytest.mark.django_db @pytest.mark.django_db
def test_podcasts(self, episode, podcasts): def test_podcasts(self, episode, podcasts):
podcasts = { podcasts = {podcast.pk: podcast for podcast in podcasts if podcast.episode == episode}
podcast.pk: podcast
for podcast in podcasts
if podcast.episode == episode
}
for data in episode.podcasts: for data in episode.podcasts:
podcast = podcasts[data["pk"]] podcast = podcasts[data["pk"]]
assert data["name"] == podcast.name assert data["name"] == podcast.name

View File

@ -12,44 +12,28 @@ class TestRerunQuerySet:
@pytest.mark.django_db @pytest.mark.django_db
def test_station_by_obj(self, stations, schedules): def test_station_by_obj(self, stations, schedules):
for station in stations: for station in stations:
queryset = ( queryset = Schedule.objects.station(station).distinct().values_list("program__station", flat=True)
Schedule.objects.station(station)
.distinct()
.values_list("program__station", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == station.pk assert queryset.first() == station.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_station_by_id(self, stations, schedules): def test_station_by_id(self, stations, schedules):
for station in stations: for station in stations:
queryset = ( queryset = Schedule.objects.station(id=station.pk).distinct().values_list("program__station", flat=True)
Schedule.objects.station(id=station.pk)
.distinct()
.values_list("program__station", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == station.pk assert queryset.first() == station.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_program_by_obj(self, programs, schedules): def test_program_by_obj(self, programs, schedules):
for program in programs: for program in programs:
queryset = ( queryset = Schedule.objects.program(program).distinct().values_list("program", flat=True)
Schedule.objects.program(program)
.distinct()
.values_list("program", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == program.pk assert queryset.first() == program.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_program_by_id(self, programs, schedules): def test_program_by_id(self, programs, schedules):
for program in programs: for program in programs:
queryset = ( queryset = Schedule.objects.program(id=program.pk).distinct().values_list("program", flat=True)
Schedule.objects.program(id=program.pk)
.distinct()
.values_list("program", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == program.pk assert queryset.first() == program.pk
@ -60,11 +44,7 @@ class TestRerunQuerySet:
@pytest.mark.django_db @pytest.mark.django_db
def test_initial(self, schedules): def test_initial(self, schedules):
queryset = ( queryset = Schedule.objects.initial().distinct().values_list("initial", flat=True)
Schedule.objects.initial()
.distinct()
.values_list("initial", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() is None assert queryset.first() is None

View File

@ -49,9 +49,7 @@ class TestSchedule:
@pytest.mark.django_db @pytest.mark.django_db
def test_dates_of_month_ponctual(self): def test_dates_of_month_ponctual(self):
schedule = baker.prepare( schedule = baker.prepare(Schedule, frequency=Schedule.Frequency.ponctual)
Schedule, frequency=Schedule.Frequency.ponctual
)
at = schedule.date + relativedelta(months=4) at = schedule.date + relativedelta(months=4)
assert schedule.dates_of_month(at) == [] assert schedule.dates_of_month(at) == []
@ -59,9 +57,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4)) @pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4)) @pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_last(self, months, hour): def test_dates_of_month_last(self, months, hour):
schedule = baker.prepare( schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last)
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last
)
at = schedule.date + relativedelta(months=months) at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at) datetimes = schedule.dates_of_month(at)
assert len(datetimes) == 1 assert len(datetimes) == 1
@ -73,9 +69,7 @@ class TestSchedule:
at = date(at.year, at.month, month_info[1]) at = date(at.year, at.month, month_info[1])
if at.weekday() < schedule.date.weekday(): if at.weekday() < schedule.date.weekday():
at -= timedelta(days=7) at -= timedelta(days=7)
at += timedelta(days=schedule.date.weekday()) - timedelta( at += timedelta(days=schedule.date.weekday()) - timedelta(days=at.weekday())
days=at.weekday()
)
assert dt.date() == at assert dt.date() == at
# since the same method is used for first, second, etc. frequencies # since the same method is used for first, second, etc. frequencies
@ -84,9 +78,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4)) @pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4)) @pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_every(self, months, hour): def test_dates_of_month_every(self, months, hour):
schedule = baker.prepare( schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every)
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every
)
at = schedule.date + relativedelta(months=months) at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at) datetimes = schedule.dates_of_month(at)
last = None last = None
@ -128,8 +120,4 @@ class TestSchedule:
episodes, diffusions = schedule.diffusions_of_month(at) episodes, diffusions = schedule.diffusions_of_month(at)
assert all(r.date in dates for r in episodes) assert all(r.date in dates for r in episodes)
assert all( assert all((not r.initial or r.date in dates) and r.type == Diffusion.TYPE_ON_AIR for r in diffusions)
(not r.initial or r.date in dates)
and r.type == Diffusion.TYPE_ON_AIR
for r in diffusions
)

View File

@ -39,8 +39,7 @@ def test_user_default_groups():
groups = Group.objects.filter(name__in=default_groups.keys()) groups = Group.objects.filter(name__in=default_groups.keys())
assert groups.exists() assert groups.exists()
assert all( assert all(
set(group.permissions.all().values_list("codename", flat=True)) set(group.permissions.all().values_list("codename", flat=True)) == set(default_groups[group.name])
== set(default_groups[group.name])
for group in groups for group in groups
) )
user_groups = set(user.groups.all().values_list("name", flat=True)) user_groups = set(user.groups.all().values_list("name", flat=True))
@ -104,7 +103,5 @@ def test_schedule_pre_delete(sched, eps_diffs):
@pytest.mark.django_db @pytest.mark.django_db
def test_diffusion_post_delete(eps_diffs): def test_diffusion_post_delete(eps_diffs):
eps = eps_diffs[0][0] eps = eps_diffs[0][0]
Diffusion.objects.filter( Diffusion.objects.filter(id__in=[r.id for r in eps.diffusion_set.all()]).delete()
id__in=[r.id for r in eps.diffusion_set.all()]
).delete()
assert Episode.objects.filter(id=eps.id).first() is None assert Episode.objects.filter(id=eps.id).first() is None

View File

@ -29,9 +29,7 @@ def test_date_or_default():
def test_to_timedelta(): def test_to_timedelta():
val = datetime(2023, 1, 10, hour=20, minute=10, second=1) val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
assert utils.to_timedelta(val) == timedelta( assert utils.to_timedelta(val) == timedelta(hours=20, minutes=10, seconds=1)
hours=20, minutes=10, seconds=1
)
def test_to_seconds(): def test_to_seconds():

View File

@ -23,16 +23,12 @@ class FakeView:
@pytest.fixture @pytest.fixture
def published_pages(): def published_pages():
return baker.make( return baker.make(models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3)
models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3
)
@pytest.fixture @pytest.fixture
def unpublished_pages(): def unpublished_pages():
return baker.make( return baker.make(models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3)
models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3
)
@pytest.fixture @pytest.fixture

View File

@ -96,9 +96,7 @@ class TestParentMixin:
@pytest.mark.django_db @pytest.mark.django_db
def test_get_parent_raises_404(self, parent_mixin): def test_get_parent_raises_404(self, parent_mixin):
with pytest.raises(Http404): with pytest.raises(Http404):
parent_mixin.get_parent( parent_mixin.get_parent(self.req, parent_slug="parent-invalid-slug")
self.req, parent_slug="parent-invalid-slug"
)
def test_get_parent_not_parent_model(self, parent_mixin): def test_get_parent_not_parent_model(self, parent_mixin):
parent_mixin.parent_model = None parent_mixin.parent_model = None

View File

@ -29,9 +29,7 @@ api = [
path("logs/", views.LogListAPIView.as_view(), name="live"), path("logs/", views.LogListAPIView.as_view(), name="live"),
path( path(
"user/settings/", "user/settings/",
viewsets.UserSettingsViewSet.as_view( viewsets.UserSettingsViewSet.as_view({"get": "retrieve", "post": "update", "put": "update"}),
{"get": "retrieve", "post": "update", "put": "update"}
),
name="user-settings", name="user-settings",
), ),
] + router.urls ] + router.urls

View File

@ -72,9 +72,7 @@ def date_or_default(date, into=None):
def to_timedelta(time): def to_timedelta(time):
"""Transform a datetime or a time instance to a timedelta, only using time """Transform a datetime or a time instance to a timedelta, only using time
info.""" info."""
return datetime.timedelta( return datetime.timedelta(hours=time.hour, minutes=time.minute, seconds=time.second)
hours=time.hour, minutes=time.minute, seconds=time.second
)
def to_seconds(time): def to_seconds(time):

View File

@ -37,9 +37,5 @@ class StatisticsView(AdminMixin, LogListView, ListView):
def get_object_list(self, logs, full=False): def get_object_list(self, logs, full=False):
if not logs.exists(): if not logs.exists():
logs = ( logs = LogArchiver().load(self.station, self.date) if self.date else []
LogArchiver().load(self.station, self.date)
if self.date
else []
)
return super().get_object_list(logs, True) return super().get_object_list(logs, True)

View File

@ -8,11 +8,7 @@ class ArticleDetailView(PageDetailView):
model = Article model = Article
def get_sidebar_queryset(self): def get_sidebar_queryset(self):
qs = ( qs = Article.objects.published().select_related("cover").order_by("-pub_date")
Article.objects.published()
.select_related("cover")
.order_by("-pub_date")
)
return qs return qs

View File

@ -28,11 +28,7 @@ class BaseView(TemplateResponseMixin, ContextMixin):
kwargs["audio_streams"] = self.station.streams kwargs["audio_streams"] = self.station.streams
if "model" not in kwargs: if "model" not in kwargs:
model = ( model = getattr(self, "model", None) or hasattr(self, "object") and type(self.object)
getattr(self, "model", None)
or hasattr(self, "object")
and type(self.object)
)
kwargs["model"] = model kwargs["model"] = model
page = kwargs.get("page") page = kwargs.get("page")

View File

@ -31,9 +31,7 @@ class HomeView(AttachedToMixin, BaseView, ListView):
current_diff = query.now(now).first() current_diff = query.now(now).first()
next_diffs = query.after(now) next_diffs = query.after(now)
if current_diff: if current_diff:
diffs = [current_diff] + list( diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:9])
next_diffs.exclude(pk=current_diff.pk)[:9]
)
else: else:
diffs = next_diffs[:10] diffs = next_diffs[:10]
return diffs return diffs

View File

@ -27,13 +27,7 @@ class LogListMixin(GetDateMixin):
def get_queryset(self): def get_queryset(self):
# only get logs for tracks: log for diffusion will be retrieved # only get logs for tracks: log for diffusion will be retrieved
# by the diffusions' queryset. # by the diffusions' queryset.
qs = ( qs = super().get_queryset().on_air().filter(track__isnull=False).filter(date__lte=tz.now())
super()
.get_queryset()
.on_air()
.filter(track__isnull=False)
.filter(date__lte=tz.now())
)
return ( return (
qs.date(self.date) qs.date(self.date)
if self.date is not None if self.date is not None
@ -43,11 +37,7 @@ class LogListMixin(GetDateMixin):
) )
def get_diffusions_queryset(self): def get_diffusions_queryset(self):
qs = ( qs = Diffusion.objects.station(self.station).on_air().filter(start__lte=tz.now())
Diffusion.objects.station(self.station)
.on_air()
.filter(start__lte=tz.now())
)
return ( return (
qs.date(self.date) qs.date(self.date)
if self.date is not None if self.date is not None
@ -86,9 +76,7 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
kwargs.update( kwargs.update(
{ {
"date": self.date, "date": self.date,
"dates": ( "dates": (today - datetime.timedelta(days=i) for i in range(0, 7)),
today - datetime.timedelta(days=i) for i in range(0, 7)
),
"object_list": self.get_object_list(self.object_list), "object_list": self.get_object_list(self.object_list),
} }
) )
@ -123,6 +111,4 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
def get_serializer(self, queryset, *args, **kwargs): def get_serializer(self, queryset, *args, **kwargs):
full = bool(self.request.GET.get("full")) full = bool(self.request.GET.get("full"))
return super().get_serializer( return super().get_serializer(self.get_object_list(queryset, full), *args, **kwargs)
self.get_object_list(queryset, full), *args, **kwargs
)

View File

@ -14,13 +14,7 @@ class GetDateMixin:
def get_date(self): def get_date(self):
date = self.request.GET.get("date") date = self.request.GET.get("date")
return ( return str_to_date(date, "-") if date is not None else self.kwargs["date"] if "date" in self.kwargs else None
str_to_date(date, "-")
if date is not None
else self.kwargs["date"]
if "date" in self.kwargs
else None
)
def get(self, *args, **kwargs): def get(self, *args, **kwargs):
if self.redirect_date_url and self.request.GET.get("date"): if self.redirect_date_url and self.request.GET.get("date"):
@ -55,9 +49,7 @@ class ParentMixin:
return return
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]} lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
return get_object_or_404( return get_object_or_404(self.parent_model.objects.select_related("cover"), **lookup)
self.parent_model.objects.select_related("cover"), **lookup
)
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
self.parent = self.get_parent(request, *args, **kwargs) self.parent = self.get_parent(request, *args, **kwargs)
@ -83,11 +75,7 @@ class AttachedToMixin:
def get_page(self): def get_page(self):
if self.attach_to_value is not None: if self.attach_to_value is not None:
return ( return StaticPage.objects.filter(attach_to=self.attach_to_value).published().first()
StaticPage.objects.filter(attach_to=self.attach_to_value)
.published()
.first()
)
return super().get_page() return super().get_page()

View File

@ -30,13 +30,7 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
return super().get(*args, **kwargs) return super().get(*args, **kwargs)
def get_queryset(self): def get_queryset(self):
return ( return super().get_queryset().select_subclasses().published().select_related("cover")
super()
.get_queryset()
.select_subclasses()
.published()
.select_related("cover")
)
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
kwargs.setdefault("has_headline", self.has_headline) kwargs.setdefault("has_headline", self.has_headline)
@ -114,12 +108,7 @@ class PageListView(FiltersMixin, BasePageListView):
return super().get_filterset(data, query) return super().get_filterset(data, query)
def get_queryset(self): def get_queryset(self):
qs = ( qs = super().get_queryset().select_related("category").order_by("-pub_date")
super()
.get_queryset()
.select_related("category")
.order_by("-pub_date")
)
return qs return qs
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
@ -152,9 +141,7 @@ class PageDetailView(BasePageDetailView):
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
if self.object.allow_comments and "comment_form" not in kwargs: if self.object.allow_comments and "comment_form" not in kwargs:
kwargs["comment_form"] = CommentForm() kwargs["comment_form"] = CommentForm()
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by( kwargs["comments"] = Comment.objects.filter(page=self.object).order_by("-date")
"-date"
)
if self.object.parent_subclass: if self.object.parent_subclass:
kwargs["parent"] = self.object.parent_subclass kwargs["parent"] = self.object.parent_subclass
return super().get_context_data(**kwargs) return super().get_context_data(**kwargs)

View File

@ -12,9 +12,7 @@ class BaseProgramMixin:
return self.object return self.object
def get_sidebar_url(self): def get_sidebar_url(self):
return reverse( return reverse("program-page-list", kwargs={"parent_slug": self.program.slug})
"program-page-list", kwargs={"parent_slug": self.program.slug}
)
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
self.program = self.get_program() self.program = self.get_program()
@ -26,19 +24,9 @@ class ProgramDetailView(BaseProgramMixin, PageDetailView):
model = Program model = Program
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
episodes = ( episodes = Episode.objects.program(self.object).published().order_by("-pub_date")
Episode.objects.program(self.object) articles = Article.objects.parent(self.object).published().order_by("-pub_date")
.published() return super().get_context_data(articles=articles, episodes=episodes, **kwargs)
.order_by("-pub_date")
)
articles = (
Article.objects.parent(self.object)
.published()
.order_by("-pub_date")
)
return super().get_context_data(
articles=articles, episodes=episodes, **kwargs
)
class ProgramListView(PageListView): class ProgramListView(PageListView):

View File

@ -70,9 +70,7 @@ class UserSettingsViewSet(viewsets.ViewSet):
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
def get_serializer(self, instance=None, **kwargs): def get_serializer(self, instance=None, **kwargs):
return self.serializer_class( return self.serializer_class(instance=instance, context={"user": self.request.user}, **kwargs)
instance=instance, context={"user": self.request.user}, **kwargs
)
@action(detail=False, methods=["GET"]) @action(detail=False, methods=["GET"])
def retrieve(self, request): def retrieve(self, request):

View File

@ -45,9 +45,7 @@ class Connector:
if self.is_open: if self.is_open:
return 1 return 1
family = ( family = socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
)
try: try:
self.socket = self.socket_class(family, socket.SOCK_STREAM) self.socket = self.socket_class(family, socket.SOCK_STREAM)
self.socket.connect(self.address) self.socket.connect(self.address)
@ -78,13 +76,7 @@ class Connector:
if data: if data:
data = response_re.sub(r"\1", data).strip() data = response_re.sub(r"\1", data).strip()
data = ( data = self.parse(data) if parse else self.parse_json(data) if parse_json else data
self.parse(data)
if parse
else self.parse_json(data)
if parse_json
else data
)
return data return data
except Exception: except Exception:
self.close() self.close()

View File

@ -62,9 +62,7 @@ class Monitor:
def get_logs_queryset(self): def get_logs_queryset(self):
"""Return queryset to assign as `self.logs`""" """Return queryset to assign as `self.logs`"""
return self.station.log_set.select_related( return self.station.log_set.select_related("diffusion", "sound", "track").order_by("-pk")
"diffusion", "sound", "track"
).order_by("-pk")
def init_last_sound_logs(self): def init_last_sound_logs(self):
"""Retrieve last logs and initialize `last_sound_logs`""" """Retrieve last logs and initialize `last_sound_logs`"""
@ -136,12 +134,7 @@ class Monitor:
diff = None diff = None
sound = Sound.objects.path(air_uri).first() sound = Sound.objects.path(air_uri).first()
if sound and sound.episode_id is not None: if sound and sound.episode_id is not None:
diff = ( diff = Diffusion.objects.episode(id=sound.episode_id).on_air().now(air_time).first()
Diffusion.objects.episode(id=sound.episode_id)
.on_air()
.now(air_time)
.first()
)
# log sound on air # log sound on air
return self.log( return self.log(
@ -158,9 +151,7 @@ class Monitor:
if log.diffusion: if log.diffusion:
return return
tracks = Track.objects.filter( tracks = Track.objects.filter(sound_id=log.sound_id, timestamp__isnull=False).order_by("timestamp")
sound_id=log.sound_id, timestamp__isnull=False
).order_by("timestamp")
if not tracks.exists(): if not tracks.exists():
return return
@ -217,11 +208,7 @@ class Monitor:
dealer = self.streamer.dealer dealer = self.streamer.dealer
# start # start
if ( if not dealer.queue and dealer.rid is None or dealer.remaining < self.delay.total_seconds():
not dealer.queue
and dealer.rid is None
or dealer.remaining < self.delay.total_seconds()
):
self.start_diff(dealer, diff) self.start_diff(dealer, diff)
# cancel # cancel
elif diff.start < now - self.cancel_timeout: elif diff.start < now - self.cancel_timeout:

View File

@ -47,9 +47,7 @@ class Streamer:
self.id = self.station.slug.replace("-", "_") self.id = self.station.slug.replace("-", "_")
self.path = os.path.join(station.path, "station.liq") self.path = os.path.join(station.path, "station.liq")
self.connector = connector or Connector( self.connector = connector or Connector(os.path.join(station.path, "station.sock"))
os.path.join(station.path, "station.sock")
)
self.init_sources() self.init_sources()
@property @property
@ -91,9 +89,7 @@ class Streamer:
def init_sources(self): def init_sources(self):
streams = self.station.program_set.filter(stream__isnull=False) streams = self.station.program_set.filter(stream__isnull=False)
self.dealer = QueueSource(self, "dealer") self.dealer = QueueSource(self, "dealer")
self.sources = [self.dealer] + [ self.sources = [self.dealer] + [PlaylistSource(self, program=program) for program in streams]
PlaylistSource(self, program=program) for program in streams
]
def make_config(self): def make_config(self):
"""Make configuration files and directory (and sync sources)""" """Make configuration files and directory (and sync sources)"""
@ -128,12 +124,7 @@ class Streamer:
self.source = next( self.source = next(
iter( iter(
sorted( sorted(
( (source for source in self.sources if source.request_status == "playing" and source.air_time),
source
for source in self.sources
if source.request_status == "playing"
and source.air_time
),
key=lambda o: o.air_time, key=lambda o: o.air_time,
reverse=True, reverse=True,
) )
@ -149,11 +140,7 @@ class Streamer:
if not os.path.exists(self.socket_path): if not os.path.exists(self.socket_path):
return return
conns = [ conns = [conn for conn in psutil.net_connections(kind="unix") if conn.laddr == self.socket_path]
conn
for conn in psutil.net_connections(kind="unix")
if conn.laddr == self.socket_path
]
for conn in conns: for conn in conns:
if conn.pid is not None: if conn.pid is not None:
os.kill(conn.pid, signal.SIGKILL) os.kill(conn.pid, signal.SIGKILL)

View File

@ -23,9 +23,7 @@ class Streamers:
def reset(self, stations=Station.objects.active()): def reset(self, stations=Station.objects.active()):
# FIXME: cf. TODO in aircox.controllers about model updates # FIXME: cf. TODO in aircox.controllers about model updates
stations = stations.all() stations = stations.all()
self.streamers = { self.streamers = {station.pk: self.streamer_class(station) for station in stations}
station.pk: self.streamer_class(station) for station in stations
}
def fetch(self): def fetch(self):
"""Call streamers fetch if timed-out.""" """Call streamers fetch if timed-out."""

View File

@ -62,42 +62,24 @@ class Command(BaseCommand):
"--station", "--station",
type=str, type=str,
action="append", action="append",
help="name of the station to monitor instead of monitoring " help="name of the station to monitor instead of monitoring " "all stations",
"all stations",
) )
group.add_argument( group.add_argument(
"-t", "-t",
"--timeout", "--timeout",
type=float, type=float,
default=Monitor.cancel_timeout.total_seconds() / 60, default=Monitor.cancel_timeout.total_seconds() / 60,
help="time to wait in MINUTES before canceling a diffusion that " help="time to wait in MINUTES before canceling a diffusion that " "should have ran but did not. ",
"should have ran but did not. ",
) )
# TODO: sync-timeout, cancel-timeout # TODO: sync-timeout, cancel-timeout
def handle( def handle(self, *args, config=None, run=None, monitor=None, station=[], delay=1000, timeout=600, **options):
self, stations = Station.objects.filter(name__in=station) if station else Station.objects.all()
*args,
config=None,
run=None,
monitor=None,
station=[],
delay=1000,
timeout=600,
**options
):
stations = (
Station.objects.filter(name__in=station)
if station
else Station.objects.all()
)
streamers = [Streamer(station) for station in stations] streamers = [Streamer(station) for station in stations]
for streamer in streamers: for streamer in streamers:
if not streamer.outputs: if not streamer.outputs:
raise RuntimeError( raise RuntimeError("Streamer {} has no outputs".format(streamer.id))
"Streamer {} has no outputs".format(streamer.id)
)
if config: if config:
streamer.make_config() streamer.make_config()
if run: if run:
@ -106,10 +88,7 @@ class Command(BaseCommand):
if monitor: if monitor:
delay = tz.timedelta(milliseconds=delay) delay = tz.timedelta(milliseconds=delay)
timeout = tz.timedelta(minutes=timeout) timeout = tz.timedelta(minutes=timeout)
monitors = [ monitors = [Monitor(streamer, delay, cancel_timeout=timeout) for streamer in streamers]
Monitor(streamer, delay, cancel_timeout=timeout)
for streamer in streamers
]
while not run or streamer.is_running: while not run or streamer.is_running:
for monitor in monitors: for monitor in monitors:

View File

@ -55,9 +55,7 @@ class FakeSocket:
data = self.recv_data data = self.recv_data
self.recv_data = self.recv_data[count:] self.recv_data = self.recv_data[count:]
data = data[:count] data = data[:count]
return ( return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
data.encode("utf-8") if isinstance(data, str) else data
) or b"\nEND"
def is_sent(self, data): def is_sent(self, data):
"""Return True if provided data have been sent.""" """Return True if provided data have been sent."""
@ -68,9 +66,7 @@ class FakeSocket:
# -- models # -- models
@pytest.fixture @pytest.fixture
def station(): def station():
station = models.Station( station = models.Station(name="test", path=working_dir, default=True, active=True)
name="test", path=working_dir, default=True, active=True
)
station.save() station.save()
return station return station
@ -136,9 +132,7 @@ def program(station):
@pytest.fixture @pytest.fixture
def stream(program): def stream(program):
stream = models.Stream( stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
program=program, begin=time(10, 12), end=time(12, 13)
)
stream.save() stream.save()
return stream return stream
@ -229,10 +223,7 @@ def metadata_data(metadata_data_air_time):
@pytest.fixture @pytest.fixture
def metadata_string(metadata_data): def metadata_string(metadata_data):
return ( return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
+ "\nEND"
)
# -- streamers # -- streamers
@ -285,9 +276,7 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
@pytest.fixture @pytest.fixture
def streamer(station, station_ports): def streamer(station, station_ports):
streamer = FakeStreamer(station=station) streamer = FakeStreamer(station=station)
streamer.sources = [ streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
]
streamer.dealer = FakeQueueSource(len(streamer.sources)) streamer.dealer = FakeQueueSource(len(streamer.sources))
streamer.sources.append(streamer.dealer) streamer.sources.append(streamer.dealer)
return streamer return streamer
@ -297,12 +286,8 @@ def streamer(station, station_ports):
def streamers(stations, stations_ports): def streamers(stations, stations_ports):
streamers = controllers.Streamers(streamer_class=FakeStreamer) streamers = controllers.Streamers(streamer_class=FakeStreamer)
# avoid unecessary db calls # avoid unecessary db calls
streamers.streamers = { streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
station.pk: FakeStreamer(station=station) for station in stations
}
for j, streamer in enumerate(streamers.values()): for j, streamer in enumerate(streamers.values()):
streamer.sources = [ streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
]
streamer.sources.append(FakeQueueSource(len(streamer.sources))) streamer.sources.append(FakeQueueSource(len(streamer.sources)))
return streamers return streamers

View File

@ -16,9 +16,7 @@ class TestConnector:
assert connector.is_open assert connector.is_open
assert connector.socket.family == socket.AF_UNIX assert connector.socket.family == socket.AF_UNIX
assert connector.socket.type == socket.SOCK_STREAM assert connector.socket.type == socket.SOCK_STREAM
assert connector.socket.address == os.path.join( assert connector.socket.address == os.path.join(working_dir, "test.sock")
working_dir, "test.sock"
)
connector.close() connector.close()
def test_open_af_inet(self, connector): def test_open_af_inet(self, connector):

View File

@ -37,9 +37,7 @@ class TestBaseMetaData:
assert metadata.validate_status("any") == "stopped" assert metadata.validate_status("any") == "stopped"
@pytest.mark.django_db @pytest.mark.django_db
def test_validate_air_time( def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
self, metadata, metadata_data, metadata_data_air_time
):
air_time = metadata_data["on_air"] air_time = metadata_data["on_air"]
result = metadata.validate_air_time(air_time) result = metadata.validate_air_time(air_time)
assert result == metadata_data_air_time assert result == metadata_data_air_time

View File

@ -43,10 +43,7 @@ def source(monitor, streamer, sound, diffusion):
@pytest.fixture @pytest.fixture
def tracks(sound): def tracks(sound):
items = [ items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 4)
]
models.Track.objects.bulk_create(items) models.Track.objects.bulk_create(items)
return items return items
@ -178,9 +175,7 @@ class TestMonitor:
assert all(log_by_track.count(track) for track in tracks) assert all(log_by_track.count(track) for track in tracks)
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_trace_tracks_returns_on_log_diffusion( def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
self, monitor, log, diffusion, tracks
):
log.diffusion = None log.diffusion = None
monitor.trace_tracks(log) monitor.trace_tracks(log)
@ -210,9 +205,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"] assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff( def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
self, monitor, streamer, diffusion, log
):
interface( interface(
monitor, monitor,
{ {
@ -232,9 +225,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"] assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff_log_exists( def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
self, monitor, streamer, diffusion, log
):
interface( interface(
monitor, monitor,
{ {
@ -264,9 +255,7 @@ class TestMonitor:
streamer.dealer.queue = None streamer.dealer.queue = None
streamer.dealer.rid = "13" streamer.dealer.rid = "13"
streamer.dealer.remaining = monitor.delay.total_seconds() + 10 streamer.dealer.remaining = monitor.delay.total_seconds() + 10
diffusion.start = ( diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
)
diffusion.end = tz.now() + tz.timedelta(minutes=30) diffusion.end = tz.now() + tz.timedelta(minutes=30)
diffusion.save() diffusion.save()
@ -285,9 +274,7 @@ class TestMonitor:
assert log.comment == "test" assert log.comment == "test"
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_start_diff( def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
self, monitor, diffusion, source, episode, sound, tracks
):
result = {} result = {}
monitor.log = lambda **kw: result.update(kw) monitor.log = lambda **kw: result.update(kw)
@ -321,17 +308,10 @@ class TestMonitor:
monitor.sync() monitor.sync()
assert monitor.sync_next >= now + monitor.sync_timeout assert monitor.sync_next >= now + monitor.sync_timeout
assert all( assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
source.calls.get("sync") for source in monitor.streamer.playlists
)
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_sync_timeout_not_reached_skip_sync(self, monitor): def test_sync_timeout_not_reached_skip_sync(self, monitor):
monitor.sync_next = tz.now() + tz.timedelta( monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
seconds=monitor.sync_timeout.total_seconds() + 20
)
monitor.sync() monitor.sync()
assert all( assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)
not source.calls.get("sync")
for source in monitor.streamer.playlists
)

View File

@ -67,11 +67,7 @@ class TestPlaylistSource:
@pytest.mark.django_db @pytest.mark.django_db
def test_get_sound_queryset(self, playlist_source, sounds): def test_get_sound_queryset(self, playlist_source, sounds):
query = playlist_source.get_sound_queryset() query = playlist_source.get_sound_queryset()
assert all( assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
r.program_id == playlist_source.program.pk
and r.type == r.TYPE_ARCHIVE
for r in query
)
@pytest.mark.django_db @pytest.mark.django_db
def test_get_playlist(self, playlist_source, sounds): def test_get_playlist(self, playlist_source, sounds):
@ -114,9 +110,7 @@ class TestQueueSource:
@pytest.mark.django_db @pytest.mark.django_db
def test_requests(self, queue_source, socket, metadata_string): def test_requests(self, queue_source, socket, metadata_string):
queue_source.queue = [13, 14, 15] queue_source.queue = [13, 14, 15]
socket.recv_data = [ socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
f"{metadata_string}\nEND" for _ in queue_source.queue
]
requests = queue_source.requests requests = queue_source.requests
@ -127,10 +121,7 @@ class TestQueueSource:
def test_push(self, queue_source, socket): def test_push(self, queue_source, socket):
paths = ["/tmp/a", "/tmp/b"] paths = ["/tmp/a", "/tmp/b"]
queue_source.push(*paths) queue_source.push(*paths)
assert all( assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
socket.is_sent(f"{queue_source.id}_queue.push {path}")
for path in paths
)
@pytest.mark.django_db @pytest.mark.django_db
def test_fetch(self, queue_source, socket, metadata_string): def test_fetch(self, queue_source, socket, metadata_string):

View File

@ -12,9 +12,7 @@ class TestStreamers:
@pytest.fixture @pytest.fixture
def test_reset(self, streamers, stations): def test_reset(self, streamers, stations):
streamers.reset() streamers.reset()
assert all( assert all(streamers.streamers[station.pk] == station for station in stations)
streamers.streamers[station.pk] == station for station in stations
)
@pytest.fixture @pytest.fixture
def test_fetch(self, streamers): def test_fetch(self, streamers):

View File

@ -168,18 +168,14 @@ class TestQueueSourceViewSet:
calls = {} calls = {}
sound = sounds[0] sound = sounds[0]
request = FakeRequest(station=station, data={"sound_id": sound.pk}) request = FakeRequest(station=station, data={"sound_id": sound.pk})
queue_source_viewset._run = lambda pk, func: calls.setdefault( queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
"_run", (pk, func)
)
result = queue_source_viewset.push(request, 13) result = queue_source_viewset.push(request, 13)
assert "_run" in calls assert "_run" in calls
assert result[0] == 13 assert result[0] == 13
assert callable(result[1]) assert callable(result[1])
@pytest.mark.django_db @pytest.mark.django_db
def test_push_missing_sound_in_request_post( def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
self, queue_source_viewset, station
):
request = FakeRequest(station=station, data={}) request = FakeRequest(station=station, data={})
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
queue_source_viewset.push(request, 0) queue_source_viewset.push(request, 0)

View File

@ -73,9 +73,7 @@ class StreamerViewSet(ControllerViewSet):
return Response(self.serialize(self.streamer)) return Response(self.serialize(self.streamer))
def list(self, request, pk=None): def list(self, request, pk=None):
return Response( return Response({"results": self.serialize(self.streamers.values(), many=True)})
{"results": self.serialize(self.streamers.values(), many=True)}
)
def dispatch(self, request, *args, pk=None, **kwargs): def dispatch(self, request, *args, pk=None, **kwargs):
if pk is not None: if pk is not None:
@ -93,9 +91,7 @@ class SourceViewSet(ControllerViewSet):
return (s for s in self.streamer.sources if isinstance(s, self.model)) return (s for s in self.streamer.sources if isinstance(s, self.model))
def get_source(self, pk): def get_source(self, pk):
source = next( source = next((source for source in self.get_sources() if source.id == pk), None)
(source for source in self.get_sources() if source.id == pk), None
)
if source is None: if source is None:
raise Http404("source `%s` not found" % pk) raise Http404("source `%s` not found" % pk)
return source return source
@ -105,9 +101,7 @@ class SourceViewSet(ControllerViewSet):
return Response(self.serialize(source)) return Response(self.serialize(source))
def list(self, request): def list(self, request):
return Response( return Response({"results": self.serialize(self.get_sources(), many=True)})
{"results": self.serialize(self.get_sources(), many=True)}
)
def _run(self, pk, action): def _run(self, pk, action):
source = self.object = self.get_source(pk) source = self.object = self.get_source(pk)
@ -150,9 +144,5 @@ class QueueSourceViewSet(SourceViewSet):
if not request.data.get("sound_id"): if not request.data.get("sound_id"):
raise ValidationError('missing "sound_id" POST data') raise ValidationError('missing "sound_id" POST data')
sound = get_object_or_404( sound = get_object_or_404(self.get_sound_queryset(request), pk=request.data["sound_id"])
self.get_sound_queryset(request), pk=request.data["sound_id"] return self._run(pk, lambda s: s.push(sound.file.path) if sound.file.path else None)
)
return self._run(
pk, lambda s: s.push(sound.file.path) if sound.file.path else None
)

View File

@ -1,25 +0,0 @@
# General information
Aircox is a set of Django applications that aims to provide a radio management solution, and is
written in Python 3.5.
Running Aircox on production involves:
* Aircox modules and a running Django project;
* a supervisor for common tasks (sounds monitoring, stream control, etc.) -- `supervisord`;
* a wsgi and an HTTP server -- `gunicorn`, `nginx`;
* a database supported by Django (MySQL, SQLite, PostGresSQL);
# Architecture and concepts
Aircox is divided in three main modules:
* `programs`: basics of Aircox (programs, diffusions, sounds, etc. management);
* `controllers`: interact with application to generate audio stream (LiquidSoap);
* `cms`: create a website with Aircox elements (playlists, timetable, players on the website);
# Installation
# Configuration

View File

@ -10,11 +10,7 @@ sys.path.insert(1, os.path.dirname(os.path.realpath(__file__)))
PROJECT_ROOT = os.path.abspath(__file__ + "/../../../") PROJECT_ROOT = os.path.abspath(__file__ + "/../../../")
# DEBUG mode # DEBUG mode
DEBUG = ( DEBUG = (os.environ["AIRCOX_DEBUG"].lower() in ("true", 1)) if "AIRCOX_DEBUG" in os.environ else False
(os.environ["AIRCOX_DEBUG"].lower() in ("true", 1))
if "AIRCOX_DEBUG" in os.environ
else False
)
# Internationalization and timezones: thoses values may be set in order to # Internationalization and timezones: thoses values may be set in order to
# have correct translation and timezone. # have correct translation and timezone.
@ -74,9 +70,7 @@ try:
except Exception: except Exception:
print( print(
"Can not set locale {LC}. Is it available on you system? Hint: " "Can not set locale {LC}. Is it available on you system? Hint: "
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format( "Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
LC=LANGUAGE_CODE
)
) )
pass pass

View File

@ -7,6 +7,7 @@ try:
except ImportError: except ImportError:
pass pass
DEBUG = True
LOCALE_PATHS = ["aircox/locale", "aircox_streamer/locale"] LOCALE_PATHS = ["aircox/locale", "aircox_streamer/locale"]
@ -15,7 +16,7 @@ LOGGING = {
"disable_existing_loggers": False, "disable_existing_loggers": False,
"formatters": { "formatters": {
"timestamp": { "timestamp": {
"format": "{asctime} {levelname} {message}", "format": "{asctime} {module} {levelname} {message}",
"style": "{", "style": "{",
}, },
}, },
@ -26,6 +27,10 @@ LOGGING = {
}, },
}, },
"loggers": { "loggers": {
"root": {
"handlers": ["console"],
"level": os.getenv("DJANGO_LOG_LEVEL", "DEBUG"),
},
"aircox": { "aircox": {
"handlers": ["console"], "handlers": ["console"],
"level": os.getenv("DJANGO_LOG_LEVEL", "DEBUG"), "level": os.getenv("DJANGO_LOG_LEVEL", "DEBUG"),
@ -40,3 +45,9 @@ LOGGING = {
}, },
}, },
} }
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
}
}

View File

@ -10,6 +10,7 @@ For Django settings see:
https://docs.djangoproject.com/en/3.1/topics/settings/ https://docs.djangoproject.com/en/3.1/topics/settings/
https://docs.djangoproject.com/en/3.1/ref/settings/ https://docs.djangoproject.com/en/3.1/ref/settings/
""" """
from django.utils import timezone
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from .prod import * from .prod import *
@ -43,8 +44,6 @@ try:
except Exception: except Exception:
print( print(
"Can not set locale {LC}. Is it available on you system? Hint: " "Can not set locale {LC}. Is it available on you system? Hint: "
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format( "Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
LC=LANGUAGE_CODE
)
) )
pass pass

View File

@ -28,6 +28,6 @@ urlpatterns = aircox.urls.urls + [
] ]
if settings.DEBUG: if settings.DEBUG:
urlpatterns += static( urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(
settings.STATIC_URL, document_root=settings.STATIC_ROOT settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) )

83
pyproject.toml Normal file
View File

@ -0,0 +1,83 @@
[project]
name = "aircox"
# version = "0.1"
description = "Radio management platform and website"
readme = "README.md"
license = {text = "GPLv3"}
requires-python = ">=3.8"
authors = [
{name = "Thomas", email = "thomas@bkfox.net"},
]
classifiers = [
"Framework :: Django",
"Programming Language :: Python",
"Programming Language :: Python :: 3.11",
]
dynamic = ["version", "dependencies"]
[project.urls]
"Homepage" = "https://git.radiocampus.be/rc/aircox/"
[build-system]
requires = ["setuptools>=60", "setuptools-scm>=8.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
include-package-data = true
[tool.setuptools.packages.find]
where = ["."]
include = ["aircox*",]
exclude = ["aircox*.tests*",]
namespaces = false
[tool.setuptools.dynamic]
dependencies = {file = ["requirements.txt"]}
[tool.setuptools_scm]
[tool.pytest.ini_options]
DJANGO_SETTINGS_MODULE = "instance.settings"
python_files = ["tests.py", "test_*.py", "*_tests.py"]
[tool.black]
line-length = 120
exclude = '''
/(
\.egg
| \.git
| \.hg
| \.tox
| \._build
| \.build
| \.bulk-out
| \.dist
| \.__pycache__
| \.venv
| \.migrations
| \.static
| \.instance/settings
)
'''
[tool.ruff]
line-length = 120
exclude = [
"egg",
"git",
"hg",
"tox",
"_build",
"build",
"dist",
"__pycache__",
"venv",
"*/migrations",
"static",
"instance/settings",
]

View File

@ -1,4 +0,0 @@
[pytest]
DJANGO_SETTINGS_MODULE = instance.settings
# -- recommended but optional:
python_files = tests.py test_*.py *_tests.py

View File

@ -17,5 +17,5 @@ dateutils~=0.6
mutagen~=1.45 mutagen~=1.45
Pillow~=9.0 Pillow~=9.0
psutil~=5.9 psutil~=5.9
PyYAML==5.4 PyYAML==6.0.1
watchdog~=2.1 watchdog~=2.1

View File

@ -1,3 +1,4 @@
pytest~=7.2 pytest~=7.2
pytest-django~=4.5 pytest-django~=4.5
model_bakery~=1.10 model_bakery~=1.10
pyfakefs~=5.2

View File

@ -1,37 +0,0 @@
from setuptools import find_packages, setup
def to_rst(path):
try:
from pypandoc import convert
return convert(path, "rst")
except ImportError:
print("pypandoc module not found, can not convert Markdown to RST")
return open(path, "r").read()
def to_array(path):
with open(path, "r") as file:
return [r for r in file.read().split("\n") if r]
setup(
name="aircox",
version="0.9",
license="GPLv3",
author="bkfox",
description="Aircox is a radio programs manager including tools and cms",
long_description=to_rst("README.md"),
url="https://github.com/bkfox/aircox",
packages=find_packages(),
include_package_data=True,
install_requires=to_array("requirements.txt"),
classifiers=[
"Framework :: Django",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
],
)