Feat: packaging (#127)

- Add configuration files for packaging
- Precommit now uses ruff

Co-authored-by: bkfox <thomas bkfox net>
Reviewed-on: #127
This commit is contained in:
Thomas Kairos 2023-10-11 10:58:34 +02:00
parent 5ea092dba6
commit f7a61fe6c0
82 changed files with 332 additions and 935 deletions

View File

@ -9,14 +9,11 @@ repos:
rev: 23.1.0 rev: 23.1.0
hooks: hooks:
- id: black - id: black
args: - repo: https://github.com/astral-sh/ruff-pre-commit
- --line-length=79 rev: v0.0.292
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
- repo: https://github.com/PyCQA/flake8.git
rev: 6.0.0
hooks: hooks:
- id: flake8 - id: ruff
exclude: ^instance/settings/|migrations/ args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/PyCQA/docformatter.git - repo: https://github.com/PyCQA/docformatter.git
rev: v1.5.1 rev: v1.5.1
hooks: hooks:

View File

@ -18,9 +18,7 @@ class EpisodeAdminForm(ModelForm):
class EpisodeAdmin(SortableAdminBase, PageAdmin): class EpisodeAdmin(SortableAdminBase, PageAdmin):
form = EpisodeAdminForm form = EpisodeAdminForm
list_display = PageAdmin.list_display list_display = PageAdmin.list_display
list_filter = tuple( list_filter = tuple(f for f in PageAdmin.list_filter if f != "pub_date") + (
f for f in PageAdmin.list_filter if f != "pub_date"
) + (
"diffusion__start", "diffusion__start",
"pub_date", "pub_date",
) )

View File

@ -14,13 +14,9 @@ class DateFieldFilter(filters.FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path): def __init__(self, field, request, params, model, model_admin, field_path):
self.field_generic = f"{field_path}__" self.field_generic = f"{field_path}__"
self.date_params = { self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)}
k: v for k, v in params.items() if k.startswith(self.field_generic)
}
exact_lookup = ( exact_lookup = "date" if isinstance(field, models.DateTimeField) else "exact"
"date" if isinstance(field, models.DateTimeField) else "exact"
)
# links as: (label, param, input_type|None, value) # links as: (label, param, input_type|None, value)
self.links = [ self.links = [
@ -29,17 +25,11 @@ class DateFieldFilter(filters.FieldListFilter):
(_("Until"), self.field_generic + "lte", self.input_type), (_("Until"), self.field_generic + "lte", self.input_type),
] ]
if field.null: if field.null:
self.links.insert( self.links.insert(0, (_("None"), self.field_generic + "isnull", None, "1"))
0, (_("None"), self.field_generic + "isnull", None, "1")
)
self.query_attrs = { self.query_attrs = {k: v for k, v in request.GET.items() if k not in self.date_params}
k: v for k, v in request.GET.items() if k not in self.date_params
}
self.query_string = urlencode(self.query_attrs) self.query_string = urlencode(self.query_attrs)
super().__init__( super().__init__(field, request, params, model, model_admin, field_path)
field, request, params, model, model_admin, field_path
)
def expected_parameters(self): def expected_parameters(self):
return [link[1] for link in self.links] return [link[1] for link in self.links]
@ -59,11 +49,7 @@ class DateFieldFilter(filters.FieldListFilter):
"value": value, "value": value,
"type": link[2], "type": link[2],
"query_attrs": self.query_attrs, "query_attrs": self.query_attrs,
"query_string": urlencode({link[1]: value}) "query_string": urlencode({link[1]: value}) + "&" + self.query_string if value else self.query_string,
+ "&"
+ self.query_string
if value
else self.query_string,
} }

View File

@ -50,11 +50,7 @@ class BasePageAdmin(admin.ModelAdmin):
change_form_template = "admin/aircox/page_change_form.html" change_form_template = "admin/aircox/page_change_form.html"
def cover_thumb(self, obj): def cover_thumb(self, obj):
return ( return mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"])) if obj.cover else ""
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
if obj.cover
else ""
)
def get_changeform_initial_data(self, request): def get_changeform_initial_data(self, request):
data = super().get_changeform_initial_data(request) data = super().get_changeform_initial_data(request)
@ -65,9 +61,7 @@ class BasePageAdmin(admin.ModelAdmin):
def _get_common_context(self, query, extra_context=None): def _get_common_context(self, query, extra_context=None):
extra_context = extra_context or {} extra_context = extra_context or {}
parent = query.get("parent", None) parent = query.get("parent", None)
extra_context["parent"] = ( extra_context["parent"] = None if parent is None else Page.objects.get_subclass(id=parent)
None if parent is None else Page.objects.get_subclass(id=parent)
)
return extra_context return extra_context
def render_change_form(self, request, context, *args, **kwargs): def render_change_form(self, request, context, *args, **kwargs):
@ -94,9 +88,7 @@ class PageAdmin(BasePageAdmin):
search_fields = BasePageAdmin.search_fields + ("category__title",) search_fields = BasePageAdmin.search_fields + ("category__title",)
fieldsets = deepcopy(BasePageAdmin.fieldsets) fieldsets = deepcopy(BasePageAdmin.fieldsets)
fieldsets[0][1]["fields"].insert( fieldsets[0][1]["fields"].insert(fieldsets[0][1]["fields"].index("slug") + 1, "category")
fieldsets[0][1]["fields"].index("slug") + 1, "category"
)
fieldsets[1][1]["fields"] += ("featured", "allow_comments") fieldsets[1][1]["fields"] += ("featured", "allow_comments")

View File

@ -38,9 +38,7 @@ class SoundInline(admin.TabularInline):
max_num = 0 max_num = 0
def audio(self, obj): def audio(self, obj):
return mark_safe( return mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
'<audio src="{}" controls></audio>'.format(obj.file.url)
)
audio.short_description = _("Audio") audio.short_description = _("Audio")
@ -86,13 +84,7 @@ class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
def related(self, obj): def related(self, obj):
# TODO: link to episode or program edit # TODO: link to episode or program edit
return ( return obj.episode.title if obj.episode else obj.program.title if obj.program else ""
obj.episode.title
if obj.episode
else obj.program.title
if obj.program
else ""
)
related.short_description = _("Program / Episode") related.short_description = _("Program / Episode")

View File

@ -26,21 +26,13 @@ class AdminSite(admin.AdminSite):
context.update( context.update(
{ {
# all programs # all programs
"programs": models.Program.objects.active() "programs": models.Program.objects.active().values("pk", "title").order_by("title"),
.values("pk", "title")
.order_by("title"),
# today's diffusions # today's diffusions
"diffusions": models.Diffusion.objects.date() "diffusions": models.Diffusion.objects.date().order_by("start").select_related("episode"),
.order_by("start")
.select_related("episode"),
# TODO: only for dashboard # TODO: only for dashboard
# last comments # last comments
"comments": models.Comment.objects.order_by( "comments": models.Comment.objects.order_by("-date").select_related("page")[0:10],
"-date" "latests": models.Page.objects.select_subclasses().order_by("-pub_date")[0:10],
).select_related("page")[0:10],
"latests": models.Page.objects.select_subclasses().order_by(
"-pub_date"
)[0:10],
} }
) )
return context return context
@ -69,9 +61,7 @@ class AdminSite(admin.AdminSite):
return [(label, reverse(url)) for label, url in self.tools] return [(label, reverse(url)) for label, url in self.tools]
def route_view(self, url, view, name, admin_view=True, label=None): def route_view(self, url, view, name, admin_view=True, label=None):
self.extra_urls.append( self.extra_urls.append(path(url, self.admin_view(view) if admin_view else view, name=name))
path(url, self.admin_view(view) if admin_view else view, name=name)
)
if label: if label:
self.tools.append((label, "admin:" + name)) self.tools.append((label, "admin:" + name))

View File

@ -22,9 +22,7 @@ class DiffusionMonitor:
def update(self): def update(self):
episodes, diffusions = [], [] episodes, diffusions = [], []
for schedule in Schedule.objects.filter( for schedule in Schedule.objects.filter(program__active=True, initial__isnull=True):
program__active=True, initial__isnull=True
):
eps, diffs = schedule.diffusions_of_month(self.date) eps, diffs = schedule.diffusions_of_month(self.date)
if eps: if eps:
episodes += eps episodes += eps

View File

@ -44,9 +44,7 @@ class LogArchiver:
path = self.get_path(station, date) path = self.get_path(station, date)
# FIXME: remove binary mode # FIXME: remove binary mode
with gzip.open(path, "ab") as archive: with gzip.open(path, "ab") as archive:
data = yaml.dump( data = yaml.dump([self.serialize(line) for line in logs]).encode("utf8")
[self.serialize(line) for line in logs]
).encode("utf8")
archive.write(data) archive.write(data)
if not keep: if not keep:
@ -95,10 +93,7 @@ class LogArchiver:
return [ return [
Log( Log(
diffusion=rel_obj(log, "diffusion"), diffusion=rel_obj(log, "diffusion"), sound=rel_obj(log, "sound"), track=rel_obj(log, "track"), **log
sound=rel_obj(log, "sound"),
track=rel_obj(log, "track"),
**log
) )
for log in logs for log in logs
] ]

View File

@ -50,14 +50,7 @@ class PlaylistImport:
logger.info("start reading csv " + self.path) logger.info("start reading csv " + self.path)
self.data = list( self.data = list(
csv.DictReader( csv.DictReader(
( (row for row in file if not (row.startswith("#") or row.startswith("\ufeff#")) and row.strip()),
row
for row in file
if not (
row.startswith("#") or row.startswith("\ufeff#")
)
and row.strip()
),
fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS, fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS,
delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER, delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER,
quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE, quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
@ -70,11 +63,7 @@ class PlaylistImport:
If save is true, save it into the database If save is true, save it into the database
""" """
if self.track_kwargs.get("sound") is None: if self.track_kwargs.get("sound") is None:
logger.error( logger.error("related track's sound is missing. Skip import of " + self.path + ".")
"related track's sound is missing. Skip import of "
+ self.path
+ "."
)
return return
maps = settings.IMPORT_PLAYLIST_CSV_COLS maps = settings.IMPORT_PLAYLIST_CSV_COLS
@ -87,17 +76,11 @@ class PlaylistImport:
return return
try: try:
timestamp = ( timestamp = (
int(line.get("minutes") or 0) * 60 int(line.get("minutes") or 0) * 60 + int(line.get("seconds") or 0) if has_timestamp else None
+ int(line.get("seconds") or 0)
if has_timestamp
else None
) )
track, created = Track.objects.get_or_create( track, created = Track.objects.get_or_create(
title=line.get("title"), title=line.get("title"), artist=line.get("artist"), position=index, **self.track_kwargs
artist=line.get("artist"),
position=index,
**self.track_kwargs
) )
track.timestamp = timestamp track.timestamp = timestamp
track.info = line.get("info") track.info = line.get("info")

View File

@ -58,14 +58,7 @@ class SoundFile:
def episode(self): def episode(self):
return self.sound and self.sound.episode return self.sound and self.sound.episode
def sync( def sync(self, sound=None, program=None, deleted=False, keep_deleted=False, **kwargs):
self,
sound=None,
program=None,
deleted=False,
keep_deleted=False,
**kwargs
):
"""Update related sound model and save it.""" """Update related sound model and save it."""
if deleted: if deleted:
return self._on_delete(self.path, keep_deleted) return self._on_delete(self.path, keep_deleted)
@ -79,9 +72,7 @@ class SoundFile:
if sound: if sound:
created = False created = False
else: else:
sound, created = Sound.objects.get_or_create( sound, created = Sound.objects.get_or_create(file=self.sound_path, defaults=kwargs)
file=self.sound_path, defaults=kwargs
)
self.sound = sound self.sound = sound
self.path_info = self.read_path(self.path) self.path_info = self.read_path(self.path)
@ -172,9 +163,7 @@ class SoundFile:
year, month, day = pi.get("year"), pi.get("month"), pi.get("day") year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
if pi.get("hour") is not None: if pi.get("hour") is not None:
at = tz.datetime( at = tz.datetime(year, month, day, pi.get("hour", 0), pi.get("minute", 0))
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
)
at = tz.make_aware(at) at = tz.make_aware(at)
else: else:
at = date(year, month, day) at = date(year, month, day)
@ -210,22 +199,10 @@ class SoundFile:
if self.info and self.info.tags: if self.info and self.info.tags:
tags = self.info.tags tags = self.info.tags
title, artist, album, year = tuple( title, artist, album, year = tuple(
t and ", ".join(t) t and ", ".join(t) for t in (tags.get(k) for k in ("title", "artist", "album", "year"))
for t in (
tags.get(k)
for k in ("title", "artist", "album", "year")
)
)
title = (
title
or (self.path_info and self.path_info.get("name"))
or os.path.basename(path_noext)
)
info = (
"{} ({})".format(album, year)
if album and year
else album or year or ""
) )
title = title or (self.path_info and self.path_info.get("name")) or os.path.basename(path_noext)
info = "{} ({})".format(album, year) if album and year else album or year or ""
track = Track( track = Track(
sound=sound, sound=sound,
position=int(tags.get("tracknumber", 0)), position=int(tags.get("tracknumber", 0)),

View File

@ -155,10 +155,7 @@ class MonitorHandler(PatternMatchingEventHandler):
self.jobs = jobs or {} self.jobs = jobs or {}
self.sync_kw = sync_kw self.sync_kw = sync_kw
patterns = [ patterns = ["*/{}/*{}".format(self.subdir, ext) for ext in settings.SOUND_FILE_EXT]
"*/{}/*{}".format(self.subdir, ext)
for ext in settings.SOUND_FILE_EXT
]
super().__init__(patterns=patterns, ignore_directories=True) super().__init__(patterns=patterns, ignore_directories=True)
def on_created(self, event): def on_created(self, event):
@ -202,11 +199,7 @@ class SoundMonitor:
def report(self, program=None, component=None, *content, logger=logging): def report(self, program=None, component=None, *content, logger=logging):
content = " ".join([str(c) for c in content]) content = " ".join([str(c) for c in content])
logger.info( logger.info(f"{program}: {content}" if not component else f"{program}, {component}: {content}")
f"{program}: {content}"
if not component
else f"{program}, {component}: {content}"
)
def scan(self, logger=logging): def scan(self, logger=logging):
"""For all programs, scan dirs. """For all programs, scan dirs.
@ -234,9 +227,7 @@ class SoundMonitor:
dirs.append(program.abspath) dirs.append(program.abspath)
return dirs return dirs
def scan_for_program( def scan_for_program(self, program, subdir, logger=logging, **sound_kwargs):
self, program, subdir, logger=logging, **sound_kwargs
):
"""Scan a given directory that is associated to the given program, and """Scan a given directory that is associated to the given program, and
update sounds information.""" update sounds information."""
logger.info("- %s/", subdir) logger.info("- %s/", subdir)
@ -257,9 +248,7 @@ class SoundMonitor:
sounds.append(sound_file.sound.pk) sounds.append(sound_file.sound.pk)
# sounds in db & unchecked # sounds in db & unchecked
sounds = Sound.objects.filter(file__startswith=subdir).exclude( sounds = Sound.objects.filter(file__startswith=subdir).exclude(pk__in=sounds)
pk__in=sounds
)
self.check_sounds(sounds, program=program) self.check_sounds(sounds, program=program)
def check_sounds(self, qs, **sync_kwargs): def check_sounds(self, qs, **sync_kwargs):

View File

@ -38,9 +38,7 @@ class SoxStats:
args += ["trim", str(at), str(length)] args += ["trim", str(at), str(length)]
args.append("stats") args.append("stats")
p = subprocess.Popen( p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
# sox outputs to stderr (my god WHYYYY) # sox outputs to stderr (my god WHYYYY)
out_, out = p.communicate() out_, out = p.communicate()
self.values = self.parse(str(out, encoding="utf-8")) self.values = self.parse(str(out, encoding="utf-8"))
@ -94,16 +92,8 @@ class SoundStats:
position += self.sample_length position += self.sample_length
def check(self, name, min_val, max_val): def check(self, name, min_val, max_val):
self.good = [ self.good = [index for index, stats in enumerate(self.stats) if min_val <= stats.get(name) <= max_val]
index self.bad = [index for index, stats in enumerate(self.stats) if index not in self.good]
for index, stats in enumerate(self.stats)
if min_val <= stats.get(name) <= max_val
]
self.bad = [
index
for index, stats in enumerate(self.stats)
if index not in self.good
]
self.resume() self.resume()
def resume(self): def resume(self):
@ -120,10 +110,6 @@ class SoundStats:
def _view(self, array): def _view(self, array):
return [ return [
"file" "file" if index == 0 else "sample {} (at {} seconds)".format(index, (index - 1) * self.sample_length)
if index == 0
else "sample {} (at {} seconds)".format(
index, (index - 1) * self.sample_length
)
for index in array for index in array
] ]

View File

@ -35,11 +35,7 @@ class WeekConverter:
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date() return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
def to_url(self, value): def to_url(self, value):
return ( return value if isinstance(value, str) else "{:04d}/{:02d}".format(*value.isocalendar())
value
if isinstance(value, str)
else "{:04d}/{:02d}".format(*value.isocalendar())
)
class DateConverter: class DateConverter:
@ -52,10 +48,4 @@ class DateConverter:
return datetime.date(int(value[0]), int(value[1]), int(value[2])) return datetime.date(int(value[0]), int(value[1]), int(value[2]))
def to_url(self, value): def to_url(self, value):
return ( return value if isinstance(value, str) else "{:04d}/{:02d}/{:02d}".format(value.year, value.month, value.day)
value
if isinstance(value, str)
else "{:04d}/{:02d}/{:02d}".format(
value.year, value.month, value.day
)
)

View File

@ -19,9 +19,7 @@ class PageFilters(filters.FilterSet):
class EpisodeFilters(PageFilters): class EpisodeFilters(PageFilters):
podcast = filters.BooleanFilter( podcast = filters.BooleanFilter(method="podcast_filter", label=_("Podcast"))
method="podcast_filter", label=_("Podcast")
)
class Meta: class Meta:
model = Episode model = Episode

View File

@ -30,8 +30,7 @@ class Command(BaseCommand):
"--age", "--age",
type=int, type=int,
default=settings.LOGS_ARCHIVES_AGE, default=settings.LOGS_ARCHIVES_AGE,
help="minimal age in days of logs to archive. Default is " help="minimal age in days of logs to archive. Default is " "settings.LOGS_ARCHIVES_AGE",
"settings.LOGS_ARCHIVES_AGE",
) )
group.add_argument( group.add_argument(
"-k", "-k",

View File

@ -55,14 +55,11 @@ class Command(BaseCommand):
group.add_argument( group.add_argument(
"--next-month", "--next-month",
action="store_true", action="store_true",
help="set the date to the next month of given date" help="set the date to the next month of given date" " (if next month from today",
" (if next month from today",
) )
def handle(self, *args, **options): def handle(self, *args, **options):
date = datetime.date( date = datetime.date(year=options["year"], month=options["month"], day=1)
year=options["year"], month=options["month"], day=1
)
if options.get("next_month"): if options.get("next_month"):
month = options.get("month") month = options.get("month")
date += tz.timedelta(days=28) date += tz.timedelta(days=28)

View File

@ -51,18 +51,13 @@ class Command(BaseCommand):
def handle(self, path, *args, **options): def handle(self, path, *args, **options):
# FIXME: absolute/relative path of sounds vs given path # FIXME: absolute/relative path of sounds vs given path
if options.get("sound"): if options.get("sound"):
sound = Sound.objects.filter( sound = Sound.objects.filter(file__icontains=options.get("sound")).first()
file__icontains=options.get("sound")
).first()
else: else:
path_, ext = os.path.splitext(path) path_, ext = os.path.splitext(path)
sound = Sound.objects.filter(path__icontains=path_).first() sound = Sound.objects.filter(path__icontains=path_).first()
if not sound: if not sound:
logger.error( logger.error("no sound found in the database for the path " "{path}".format(path=path))
"no sound found in the database for the path "
"{path}".format(path=path)
)
return return
# FIXME: auto get sound.episode if any # FIXME: auto get sound.episode if any

View File

@ -43,8 +43,7 @@ class Command(BaseCommand):
"-q", "-q",
"--quality_check", "--quality_check",
action="store_true", action="store_true",
help="Enable quality check using sound_quality_check on all " help="Enable quality check using sound_quality_check on all " "sounds marqued as not good",
"sounds marqued as not good",
) )
parser.add_argument( parser.add_argument(
"-s", "-s",
@ -57,8 +56,7 @@ class Command(BaseCommand):
"-m", "-m",
"--monitor", "--monitor",
action="store_true", action="store_true",
help="Run in monitor mode, watch for modification in the " help="Run in monitor mode, watch for modification in the " "filesystem and react in consequence",
"filesystem and react in consequence",
) )
def handle(self, *args, **options): def handle(self, *args, **options):

View File

@ -28,8 +28,7 @@ class Command(BaseCommand):
"--sample_length", "--sample_length",
type=int, type=int,
default=120, default=120,
help="size of sample to analyse in seconds. If not set (or 0), " help="size of sample to analyse in seconds. If not set (or 0), " "does not analyse by sample",
"does not analyse by sample",
) )
parser.add_argument( parser.add_argument(
"-a", "-a",
@ -43,8 +42,7 @@ class Command(BaseCommand):
"--range", "--range",
type=float, type=float,
nargs=2, nargs=2,
help="range of minimal and maximal accepted value such as: " help="range of minimal and maximal accepted value such as: " "--range min max",
"--range min max",
) )
parser.add_argument( parser.add_argument(
"-i", "-i",
@ -64,10 +62,7 @@ class Command(BaseCommand):
raise CommandError("no attribute specified") raise CommandError("no attribute specified")
# sound analyse and checks # sound analyse and checks
self.sounds = [ self.sounds = [SoundStats(path, options.get("sample_length")) for path in options.get("files")]
SoundStats(path, options.get("sample_length"))
for path in options.get("files")
]
self.bad = [] self.bad = []
self.good = [] self.good = []
for sound in self.sounds: for sound in self.sounds:

View File

@ -84,9 +84,7 @@ class Migration(migrations.Migration):
options={ options={
"verbose_name": "Diffusion", "verbose_name": "Diffusion",
"verbose_name_plural": "Diffusions", "verbose_name_plural": "Diffusions",
"permissions": ( "permissions": (("programming", "edit the diffusion's planification"),),
("programming", "edit the diffusion's planification"),
),
}, },
), ),
migrations.CreateModel( migrations.CreateModel(
@ -125,22 +123,16 @@ class Migration(migrations.Migration):
), ),
( (
"content", "content",
ckeditor.fields.RichTextField( ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
("pub_date", models.DateTimeField(blank=True, null=True)), ("pub_date", models.DateTimeField(blank=True, null=True)),
( (
"featured", "featured",
models.BooleanField( models.BooleanField(default=False, verbose_name="featured"),
default=False, verbose_name="featured"
),
), ),
( (
"allow_comments", "allow_comments",
models.BooleanField( models.BooleanField(default=True, verbose_name="allow comments"),
default=True, verbose_name="allow comments"
),
), ),
( (
"category", "category",
@ -458,9 +450,7 @@ class Migration(migrations.Migration):
("name", models.CharField(max_length=64, verbose_name="name")), ("name", models.CharField(max_length=64, verbose_name="name")),
( (
"slug", "slug",
models.SlugField( models.SlugField(max_length=64, unique=True, verbose_name="slug"),
max_length=64, unique=True, verbose_name="slug"
),
), ),
( (
"path", "path",
@ -566,9 +556,7 @@ class Migration(migrations.Migration):
), ),
( (
"content", "content",
ckeditor.fields.RichTextField( ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
( (
"view", "view",
@ -949,9 +937,7 @@ class Migration(migrations.Migration):
), ),
( (
"time", "time",
models.TimeField( models.TimeField(help_text="start time", verbose_name="time"),
help_text="start time", verbose_name="time"
),
), ),
( (
"timezone", "timezone",
@ -1643,9 +1629,7 @@ class Migration(migrations.Migration):
), ),
( (
"duration", "duration",
models.TimeField( models.TimeField(help_text="regular duration", verbose_name="duration"),
help_text="regular duration", verbose_name="duration"
),
), ),
( (
"frequency", "frequency",

View File

@ -33,9 +33,7 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="page", model_name="page",
name="content", name="content",
field=ckeditor_uploader.fields.RichTextUploadingField( field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
migrations.AlterField( migrations.AlterField(
model_name="sound", model_name="sound",
@ -52,8 +50,6 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="staticpage", model_name="staticpage",
name="content", name="content",
field=ckeditor_uploader.fields.RichTextUploadingField( field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
blank=True, null=True, verbose_name="content"
),
), ),
] ]

View File

@ -12,9 +12,7 @@ class Migration(migrations.Migration):
migrations.AlterModelOptions( migrations.AlterModelOptions(
name="diffusion", name="diffusion",
options={ options={
"permissions": ( "permissions": (("programming", "edit the diffusions' planification"),),
("programming", "edit the diffusions' planification"),
),
"verbose_name": "Diffusion", "verbose_name": "Diffusion",
"verbose_name_plural": "Diffusions", "verbose_name_plural": "Diffusions",
}, },
@ -22,9 +20,7 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name="track", model_name="track",
name="album", name="album",
field=models.CharField( field=models.CharField(default="", max_length=128, verbose_name="album"),
default="", max_length=128, verbose_name="album"
),
), ),
migrations.AlterField( migrations.AlterField(
model_name="schedule", model_name="schedule",

View File

@ -12,8 +12,6 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name="track", model_name="track",
name="year", name="year",
field=models.IntegerField( field=models.IntegerField(blank=True, null=True, verbose_name="year"),
blank=True, null=True, verbose_name="year"
),
), ),
] ]

View File

@ -12,8 +12,6 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="track", model_name="track",
name="album", name="album",
field=models.CharField( field=models.CharField(blank=True, max_length=128, null=True, verbose_name="album"),
blank=True, max_length=128, null=True, verbose_name="album"
),
), ),
] ]

View File

@ -30,9 +30,7 @@ class Migration(migrations.Migration):
), ),
( (
"playlist_editor_sep", "playlist_editor_sep",
models.CharField( models.CharField(max_length=16, verbose_name="Playlist Editor Separator"),
max_length=16, verbose_name="Playlist Editor Separator"
),
), ),
( (
"user", "user",

View File

@ -19,11 +19,7 @@ __all__ = ("Diffusion", "DiffusionQuerySet")
class DiffusionQuerySet(RerunQuerySet): class DiffusionQuerySet(RerunQuerySet):
def episode(self, episode=None, id=None): def episode(self, episode=None, id=None):
"""Diffusions for this episode.""" """Diffusions for this episode."""
return ( return self.filter(episode=episode) if id is None else self.filter(episode__id=id)
self.filter(episode=episode)
if id is None
else self.filter(episode__id=id)
)
def on_air(self): def on_air(self):
"""On air diffusions.""" """On air diffusions."""
@ -40,9 +36,7 @@ class DiffusionQuerySet(RerunQuerySet):
"""Diffusions occuring date.""" """Diffusions occuring date."""
date = date or datetime.date.today() date = date or datetime.date.today()
start = tz.make_aware(tz.datetime.combine(date, datetime.time())) start = tz.make_aware(tz.datetime.combine(date, datetime.time()))
end = tz.make_aware( end = tz.make_aware(tz.datetime.combine(date, datetime.time(23, 59, 59, 999)))
tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
)
# start = tz.get_current_timezone().localize(start) # start = tz.get_current_timezone().localize(start)
# end = tz.get_current_timezone().localize(end) # end = tz.get_current_timezone().localize(end)
qs = self.filter(start__range=(start, end)) qs = self.filter(start__range=(start, end))
@ -50,11 +44,7 @@ class DiffusionQuerySet(RerunQuerySet):
def at(self, date, order=True): def at(self, date, order=True):
"""Return diffusions at specified date or datetime.""" """Return diffusions at specified date or datetime."""
return ( return self.now(date, order) if isinstance(date, tz.datetime) else self.date(date, order)
self.now(date, order)
if isinstance(date, tz.datetime)
else self.date(date, order)
)
def after(self, date=None): def after(self, date=None):
"""Return a queryset of diffusions that happen after the given date """Return a queryset of diffusions that happen after the given date
@ -142,9 +132,7 @@ class Diffusion(Rerun):
class Meta: class Meta:
verbose_name = _("Diffusion") verbose_name = _("Diffusion")
verbose_name_plural = _("Diffusions") verbose_name_plural = _("Diffusions")
permissions = ( permissions = (("programming", _("edit the diffusions' planification")),)
("programming", _("edit the diffusions' planification")),
)
def __str__(self): def __str__(self):
str_ = "{episode} - {date}".format( str_ = "{episode} - {date}".format(
@ -202,19 +190,12 @@ class Diffusion(Rerun):
def is_now(self): def is_now(self):
"""True if diffusion is currently running.""" """True if diffusion is currently running."""
now = tz.now() now = tz.now()
return ( return self.type == self.TYPE_ON_AIR and self.start <= now and self.end >= now
self.type == self.TYPE_ON_AIR
and self.start <= now
and self.end >= now
)
@property @property
def is_live(self): def is_live(self):
"""True if Diffusion is live (False if there are sounds files).""" """True if Diffusion is live (False if there are sounds files)."""
return ( return self.type == self.TYPE_ON_AIR and not self.episode.sound_set.archive().count()
self.type == self.TYPE_ON_AIR
and not self.episode.sound_set.archive().count()
)
def get_playlist(self, **types): def get_playlist(self, **types):
"""Returns sounds as a playlist (list of *local* archive file path). """Returns sounds as a playlist (list of *local* archive file path).
@ -224,9 +205,7 @@ class Diffusion(Rerun):
from .sound import Sound from .sound import Sound
return list( return list(
self.get_sounds(**types) self.get_sounds(**types).filter(path__isnull=False, type=Sound.TYPE_ARCHIVE).values_list("path", flat=True)
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
.values_list("path", flat=True)
) )
def get_sounds(self, **types): def get_sounds(self, **types):
@ -238,9 +217,7 @@ class Diffusion(Rerun):
from .sound import Sound from .sound import Sound
sounds = (self.initial or self).sound_set.order_by("type", "path") sounds = (self.initial or self).sound_set.order_by("type", "path")
_in = [ _in = [getattr(Sound.Type, name) for name, value in types.items() if value]
getattr(Sound.Type, name) for name, value in types.items() if value
]
return sounds.filter(type__in=_in) return sounds.filter(type__in=_in)
@ -262,8 +239,7 @@ class Diffusion(Rerun):
# .filter(conflict_with=True) # .filter(conflict_with=True)
return ( return (
Diffusion.objects.filter( Diffusion.objects.filter(
Q(start__lt=self.start, end__gt=self.start) Q(start__lt=self.start, end__gt=self.start) | Q(start__gt=self.start, start__lt=self.end)
| Q(start__gt=self.start, start__lt=self.end)
) )
.exclude(pk=self.pk) .exclude(pk=self.pk)
.distinct() .distinct()

View File

@ -24,10 +24,7 @@ class Episode(Page):
"""Return serialized data about podcasts.""" """Return serialized data about podcasts."""
from ..serializers import PodcastSerializer from ..serializers import PodcastSerializer
podcasts = [ podcasts = [PodcastSerializer(s).data for s in self.sound_set.public().order_by("type")]
PodcastSerializer(s).data
for s in self.sound_set.public().order_by("type")
]
if self.cover: if self.cover:
options = {"size": (128, 128), "crop": "scale"} options = {"size": (128, 128), "crop": "scale"}
cover = get_thumbnailer(self.cover).get_thumbnail(options).url cover = get_thumbnailer(self.cover).get_thumbnail(options).url
@ -76,6 +73,4 @@ class Episode(Page):
if title is None if title is None
else title else title
) )
return super().get_init_kwargs_from( return super().get_init_kwargs_from(page, title=title, program=page, **kwargs)
page, title=title, program=page, **kwargs
)

View File

@ -18,11 +18,7 @@ __all__ = ("Log", "LogQuerySet")
class LogQuerySet(models.QuerySet): class LogQuerySet(models.QuerySet):
def station(self, station=None, id=None): def station(self, station=None, id=None):
return ( return self.filter(station=station) if id is None else self.filter(station_id=id)
self.filter(station=station)
if id is None
else self.filter(station_id=id)
)
def date(self, date): def date(self, date):
start = tz.datetime.combine(date, datetime.time()) start = tz.datetime.combine(date, datetime.time())
@ -32,11 +28,7 @@ class LogQuerySet(models.QuerySet):
# return self.filter(date__date=date) # return self.filter(date__date=date)
def after(self, date): def after(self, date):
return ( return self.filter(date__gte=date) if isinstance(date, tz.datetime) else self.filter(date__date__gte=date)
self.filter(date__gte=date)
if isinstance(date, tz.datetime)
else self.filter(date__date__gte=date)
)
def on_air(self): def on_air(self):
return self.filter(type=Log.TYPE_ON_AIR) return self.filter(type=Log.TYPE_ON_AIR)

View File

@ -25,9 +25,7 @@ __all__ = (
) )
headline_re = re.compile( headline_re = re.compile(r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?")
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
)
class Category(models.Model): class Category(models.Model):
@ -54,17 +52,11 @@ class BasePageQuerySet(InheritanceQuerySet):
def parent(self, parent=None, id=None): def parent(self, parent=None, id=None):
"""Return pages having this parent.""" """Return pages having this parent."""
return ( return self.filter(parent=parent) if id is None else self.filter(parent__id=id)
self.filter(parent=parent)
if id is None
else self.filter(parent__id=id)
)
def search(self, q, search_content=True): def search(self, q, search_content=True):
if search_content: if search_content:
return self.filter( return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
models.Q(title__icontains=q) | models.Q(content__icontains=q)
)
return self.filter(title__icontains=q) return self.filter(title__icontains=q)
@ -89,9 +81,7 @@ class BasePage(models.Model):
related_name="child_set", related_name="child_set",
) )
title = models.CharField(max_length=100) title = models.CharField(max_length=100)
slug = models.SlugField( slug = models.SlugField(_("slug"), max_length=120, blank=True, unique=True, db_index=True)
_("slug"), max_length=120, blank=True, unique=True, db_index=True
)
status = models.PositiveSmallIntegerField( status = models.PositiveSmallIntegerField(
_("status"), _("status"),
default=STATUS_DRAFT, default=STATUS_DRAFT,
@ -132,11 +122,7 @@ class BasePage(models.Model):
super().save(*args, **kwargs) super().save(*args, **kwargs)
def get_absolute_url(self): def get_absolute_url(self):
return ( return reverse(self.detail_url_name, kwargs={"slug": self.slug}) if self.is_published else "#"
reverse(self.detail_url_name, kwargs={"slug": self.slug})
if self.is_published
else "#"
)
@property @property
def is_draft(self): def is_draft(self):
@ -177,9 +163,7 @@ class BasePage(models.Model):
class PageQuerySet(BasePageQuerySet): class PageQuerySet(BasePageQuerySet):
def published(self): def published(self):
return self.filter( return self.filter(status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now())
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
)
class Page(BasePage): class Page(BasePage):
@ -193,9 +177,7 @@ class Page(BasePage):
null=True, null=True,
db_index=True, db_index=True,
) )
pub_date = models.DateTimeField( pub_date = models.DateTimeField(_("publication date"), blank=True, null=True, db_index=True)
_("publication date"), blank=True, null=True, db_index=True
)
featured = models.BooleanField( featured = models.BooleanField(
_("featured"), _("featured"),
default=False, default=False,
@ -296,9 +278,7 @@ class Comment(models.Model):
class NavItem(models.Model): class NavItem(models.Model):
"""Navigation menu items.""" """Navigation menu items."""
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station")
)
menu = models.SlugField(_("menu"), max_length=24) menu = models.SlugField(_("menu"), max_length=24)
order = models.PositiveSmallIntegerField(_("order")) order = models.PositiveSmallIntegerField(_("order"))
text = models.CharField(_("title"), max_length=64) text = models.CharField(_("title"), max_length=64)
@ -318,13 +298,7 @@ class NavItem(models.Model):
ordering = ("order", "pk") ordering = ("order", "pk")
def get_url(self): def get_url(self):
return ( return self.url if self.url else self.page.get_absolute_url() if self.page else None
self.url
if self.url
else self.page.get_absolute_url()
if self.page
else None
)
def render(self, request, css_class="", active_class=""): def render(self, request, css_class="", active_class=""):
url = self.get_url() url = self.get_url()
@ -336,6 +310,4 @@ class NavItem(models.Model):
elif not css_class: elif not css_class:
return format_html('<a href="{}">{}</a>', url, self.text) return format_html('<a href="{}">{}</a>', url, self.text)
else: else:
return format_html( return format_html('<a href="{}" class="{}">{}</a>', url, css_class, self.text)
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
)

View File

@ -47,9 +47,7 @@ class Program(Page):
""" """
# explicit foreign key in order to avoid related name clashes # explicit foreign key in order to avoid related name clashes
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station")
)
active = models.BooleanField( active = models.BooleanField(
_("active"), _("active"),
default=True, default=True,
@ -126,12 +124,7 @@ class Program(Page):
# TODO: move in signals # TODO: move in signals
path_ = getattr(self, "__initial_path", None) path_ = getattr(self, "__initial_path", None)
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_) abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
if ( if path_ is not None and path_ != self.path and os.path.exists(abspath) and not os.path.exists(self.abspath):
path_ is not None
and path_ != self.path
and os.path.exists(abspath)
and not os.path.exists(self.abspath)
):
logger.info( logger.info(
"program #%s's dir changed to %s - update it.", "program #%s's dir changed to %s - update it.",
self.id, self.id,
@ -139,9 +132,7 @@ class Program(Page):
) )
shutil.move(abspath, self.abspath) shutil.move(abspath, self.abspath)
Sound.objects.filter(path__startswith=path_).update( Sound.objects.filter(path__startswith=path_).update(file=Concat("file", Substr(F("file"), len(path_))))
file=Concat("file", Substr(F("file"), len(path_)))
)
class ProgramChildQuerySet(PageQuerySet): class ProgramChildQuerySet(PageQuerySet):

View File

@ -15,18 +15,10 @@ class RerunQuerySet(models.QuerySet):
"""Queryset for Rerun (sub)classes.""" """Queryset for Rerun (sub)classes."""
def station(self, station=None, id=None): def station(self, station=None, id=None):
return ( return self.filter(program__station=station) if id is None else self.filter(program__station__id=id)
self.filter(program__station=station)
if id is None
else self.filter(program__station__id=id)
)
def program(self, program=None, id=None): def program(self, program=None, id=None):
return ( return self.filter(program=program) if id is None else self.filter(program__id=id)
self.filter(program=program)
if id is None
else self.filter(program__id=id)
)
def rerun(self): def rerun(self):
return self.filter(initial__isnull=False) return self.filter(initial__isnull=False)
@ -78,14 +70,8 @@ class Rerun(models.Model):
def clean(self): def clean(self):
super().clean() super().clean()
if ( if hasattr(self, "start") and self.initial is not None and self.initial.start >= self.start:
hasattr(self, "start") raise ValidationError({"initial": _("rerun must happen after original")})
and self.initial is not None
and self.initial.start >= self.start
):
raise ValidationError(
{"initial": _("rerun must happen after original")}
)
def save_rerun(self): def save_rerun(self):
self.program = self.initial.program self.program = self.initial.program

View File

@ -102,11 +102,7 @@ class Schedule(Rerun):
"""Return frequency formated for display.""" """Return frequency formated for display."""
from django.template.defaultfilters import date from django.template.defaultfilters import date
return ( return self._get_FIELD_display(self._meta.get_field("frequency")).format(day=date(self.date, "l")).capitalize()
self._get_FIELD_display(self._meta.get_field("frequency"))
.format(day=date(self.date, "l"))
.capitalize()
)
def normalize(self, date): def normalize(self, date):
"""Return a datetime set to schedule's time for the provided date, """Return a datetime set to schedule's time for the provided date,
@ -124,9 +120,7 @@ class Schedule(Rerun):
# last of the month # last of the month
if freq == Schedule.Frequency.last: if freq == Schedule.Frequency.last:
date = date.replace( date = date.replace(day=calendar.monthrange(date.year, date.month)[1])
day=calendar.monthrange(date.year, date.month)[1]
)
date_wday = date.weekday() date_wday = date.weekday()
# end of month before the wanted weekday: move one week back # end of month before the wanted weekday: move one week back
@ -138,9 +132,7 @@ class Schedule(Rerun):
# move to the first day of the month that matches the schedule's # move to the first day of the month that matches the schedule's
# weekday. Check on SO#3284452 for the formula # weekday. Check on SO#3284452 for the formula
date_wday, month = date.weekday(), date.month date_wday, month = date.weekday(), date.month
date += tz.timedelta( date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday)
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
)
if freq == Schedule.Frequency.one_on_two: if freq == Schedule.Frequency.one_on_two:
# - adjust date with modulo 14 (= 2 weeks in days) # - adjust date with modulo 14 (= 2 weeks in days)
@ -149,11 +141,7 @@ class Schedule(Rerun):
date += tz.timedelta(days=7) date += tz.timedelta(days=7)
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3)) dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
else: else:
dates = ( dates = (date + tz.timedelta(days=7 * week) for week in range(0, 5) if freq & (0b1 << week))
date + tz.timedelta(days=7 * week)
for week in range(0, 5)
if freq & (0b1 << week)
)
return [self.normalize(date) for date in dates if date.month == month] return [self.normalize(date) for date in dates if date.month == month]
@ -166,29 +154,22 @@ class Schedule(Rerun):
from .diffusion import Diffusion from .diffusion import Diffusion
from .episode import Episode from .episode import Episode
if ( if self.initial is not None or self.frequency == Schedule.Frequency.ponctual:
self.initial is not None
or self.frequency == Schedule.Frequency.ponctual
):
return [], [] return [], []
# dates for self and reruns as (date, initial) # dates for self and reruns as (date, initial)
reruns = [ reruns = [(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()]
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
]
dates = {date: None for date in self.dates_of_month(date)} dates = {date: None for date in self.dates_of_month(date)}
dates.update( dates.update(
(rerun.normalize(date.date() + delta), date) (rerun.normalize(date.date() + delta), date) for date in list(dates.keys()) for rerun, delta in reruns
for date in list(dates.keys())
for rerun, delta in reruns
) )
# remove dates corresponding to existing diffusions # remove dates corresponding to existing diffusions
saved = set( saved = set(
Diffusion.objects.filter( Diffusion.objects.filter(start__in=dates.keys(), program=self.program, schedule=self).values_list(
start__in=dates.keys(), program=self.program, schedule=self "start", flat=True
).values_list("start", flat=True) )
) )
# make diffs # make diffs

View File

@ -32,9 +32,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
group, created = Group.objects.get_or_create(name=group_name) group, created = Group.objects.get_or_create(name=group_name)
if created and permissions: if created and permissions:
for codename in permissions: for codename in permissions:
permission = Permission.objects.filter( permission = Permission.objects.filter(codename=codename).first()
codename=codename
).first()
if permission: if permission:
group.permissions.add(permission) group.permissions.add(permission)
group.save() group.save()
@ -44,9 +42,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
@receiver(signals.post_save, sender=Page) @receiver(signals.post_save, sender=Page)
def page_post_save(sender, instance, created, *args, **kwargs): def page_post_save(sender, instance, created, *args, **kwargs):
if not created and instance.cover: if not created and instance.cover:
Page.objects.filter(parent=instance, cover__isnull=True).update( Page.objects.filter(parent=instance, cover__isnull=True).update(cover=instance.cover)
cover=instance.cover
)
@receiver(signals.post_save, sender=Program) @receiver(signals.post_save, sender=Program)
@ -54,15 +50,11 @@ def program_post_save(sender, instance, created, *args, **kwargs):
"""Clean-up later diffusions when a program becomes inactive.""" """Clean-up later diffusions when a program becomes inactive."""
if not instance.active: if not instance.active:
Diffusion.objects.program(instance).after(tz.now()).delete() Diffusion.objects.program(instance).after(tz.now()).delete()
Episode.objects.parent(instance).filter( Episode.objects.parent(instance).filter(diffusion__isnull=True).delete()
diffusion__isnull=True
).delete()
cover = getattr(instance, "__initial_cover", None) cover = getattr(instance, "__initial_cover", None)
if cover is None and instance.cover is not None: if cover is None and instance.cover is not None:
Episode.objects.parent(instance).filter(cover__isnull=True).update( Episode.objects.parent(instance).filter(cover__isnull=True).update(cover=instance.cover)
cover=instance.cover
)
@receiver(signals.pre_save, sender=Schedule) @receiver(signals.pre_save, sender=Schedule)
@ -77,8 +69,7 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
corresponding diffusions accordingly.""" corresponding diffusions accordingly."""
initial = getattr(instance, "_initial", None) initial = getattr(instance, "_initial", None)
if not initial or ( if not initial or (
(instance.time, instance.duration, instance.timezone) (instance.time, instance.duration, instance.timezone) == (initial.time, initial.duration, initial.timezone)
== (initial.time, initial.duration, initial.timezone)
): ):
return return
@ -97,13 +88,9 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
def schedule_pre_delete(sender, instance, *args, **kwargs): def schedule_pre_delete(sender, instance, *args, **kwargs):
"""Delete later corresponding diffusion to a changed schedule.""" """Delete later corresponding diffusion to a changed schedule."""
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete() Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
Episode.objects.filter( Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
diffusion__isnull=True, content__isnull=True, sound__isnull=True
).delete()
@receiver(signals.post_delete, sender=Diffusion) @receiver(signals.post_delete, sender=Diffusion)
def diffusion_post_delete(sender, instance, *args, **kwargs): def diffusion_post_delete(sender, instance, *args, **kwargs):
Episode.objects.filter( Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
diffusion__isnull=True, content__isnull=True, sound__isnull=True
).delete()

View File

@ -50,9 +50,7 @@ class SoundQuerySet(models.QuerySet):
def path(self, paths): def path(self, paths):
if isinstance(paths, str): if isinstance(paths, str):
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", "")) return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
return self.filter( return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths))
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
)
def playlist(self, archive=True, order_by=True): def playlist(self, archive=True, order_by=True):
"""Return files absolute paths as a flat list (exclude sound without """Return files absolute paths as a flat list (exclude sound without
@ -66,9 +64,7 @@ class SoundQuerySet(models.QuerySet):
self = self.order_by("file") self = self.order_by("file")
return [ return [
os.path.join(conf.MEDIA_ROOT, file) os.path.join(conf.MEDIA_ROOT, file)
for file in self.filter(file__isnull=False).values_list( for file in self.filter(file__isnull=False).values_list("file", flat=True)
"file", flat=True
)
] ]
def search(self, query): def search(self, query):
@ -122,11 +118,7 @@ class Sound(models.Model):
) )
def _upload_to(self, filename): def _upload_to(self, filename):
subdir = ( subdir = settings.SOUND_ARCHIVES_SUBDIR if self.type == self.TYPE_ARCHIVE else settings.SOUND_EXCERPTS_SUBDIR
settings.SOUND_ARCHIVES_SUBDIR
if self.type == self.TYPE_ARCHIVE
else settings.SOUND_EXCERPTS_SUBDIR
)
return os.path.join(self.program.path, subdir, filename) return os.path.join(self.program.path, subdir, filename)
file = models.FileField( file = models.FileField(
@ -161,10 +153,7 @@ class Sound(models.Model):
) )
is_downloadable = models.BooleanField( is_downloadable = models.BooleanField(
_("downloadable"), _("downloadable"),
help_text=_( help_text=_("whether it can be publicly downloaded by visitors (sound must be " "public)"),
"whether it can be publicly downloaded by visitors (sound must be "
"public)"
),
default=False, default=False,
) )
@ -224,9 +213,7 @@ class Sound(models.Model):
if self.type == self.TYPE_REMOVED and self.program: if self.type == self.TYPE_REMOVED and self.program:
changed = True changed = True
self.type = ( self.type = (
self.TYPE_ARCHIVE self.TYPE_ARCHIVE if self.file.name.startswith(self.program.archives_path) else self.TYPE_EXCERPT
if self.file.name.startswith(self.program.archives_path)
else self.TYPE_EXCERPT
) )
# check mtime -> reset quality if changed (assume file changed) # check mtime -> reset quality if changed (assume file changed)
@ -299,8 +286,7 @@ class Track(models.Model):
blank=True, blank=True,
null=True, null=True,
help_text=_( help_text=_(
"additional informations about this track, such as " "additional informations about this track, such as " "the version, if is it a remix, features, etc."
"the version, if is it a remix, features, etc."
), ),
) )
@ -310,13 +296,9 @@ class Track(models.Model):
ordering = ("position",) ordering = ("position",)
def __str__(self): def __str__(self):
return "{self.artist} -- {self.title} -- {self.position}".format( return "{self.artist} -- {self.title} -- {self.position}".format(self=self)
self=self
)
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if (self.sound is None and self.episode is None) or ( if (self.sound is None and self.episode is None) or (self.sound is not None and self.episode is not None):
self.sound is not None and self.episode is not None
):
raise ValueError("sound XOR episode is required") raise ValueError("sound XOR episode is required")
super().save(*args, **kwargs) super().save(*args, **kwargs)

View File

@ -67,9 +67,7 @@ class Station(models.Model):
max_length=2048, max_length=2048,
null=True, null=True,
blank=True, blank=True,
help_text=_( help_text=_("Audio streams urls used by station's player. One url " "a line."),
"Audio streams urls used by station's player. One url " "a line."
),
) )
default_cover = FilerImageField( default_cover = FilerImageField(
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
@ -153,16 +151,10 @@ class Port(models.Model):
(TYPE_FILE, _("file")), (TYPE_FILE, _("file")),
) )
station = models.ForeignKey( station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
Station, models.CASCADE, verbose_name=_("station") direction = models.SmallIntegerField(_("direction"), choices=DIRECTION_CHOICES)
)
direction = models.SmallIntegerField(
_("direction"), choices=DIRECTION_CHOICES
)
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES) type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
active = models.BooleanField( active = models.BooleanField(_("active"), default=True, help_text=_("this port is active"))
_("active"), default=True, help_text=_("this port is active")
)
settings = models.TextField( settings = models.TextField(
_("port settings"), _("port settings"),
help_text=_( help_text=_(
@ -193,8 +185,6 @@ class Port(models.Model):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.is_valid_type(): if not self.is_valid_type():
raise ValueError( raise ValueError("port type is not allowed with the given port direction")
"port type is not allowed with the given port direction"
)
return super().save(*args, **kwargs) return super().save(*args, **kwargs)

View File

@ -15,6 +15,4 @@ class UserSettings(models.Model):
related_name="aircox_settings", related_name="aircox_settings",
) )
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns")) playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
playlist_editor_sep = models.CharField( playlist_editor_sep = models.CharField(_("Playlist Editor Separator"), max_length=16)
_("Playlist Editor Separator"), max_length=16
)

View File

@ -34,9 +34,7 @@ def do_has_perm(context, obj, perm, user=None):
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``""" """Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
if user is None: if user is None:
user = context["request"].user user = context["request"].user
return user.has_perm( return user.has_perm("{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name))
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
)
@register.filter(name="is_diffusion") @register.filter(name="is_diffusion")
@ -69,10 +67,7 @@ def do_player_live_attr(context):
def do_nav_items(context, menu, **kwargs): def do_nav_items(context, menu, **kwargs):
"""Render navigation items for the provided menu name.""" """Render navigation items for the provided menu name."""
station, request = context["station"], context["request"] station, request = context["station"], context["request"]
return [ return [(item, item.render(request, **kwargs)) for item in station.navitem_set.filter(menu=menu)]
(item, item.render(request, **kwargs))
for item in station.navitem_set.filter(menu=menu)
]
@register.simple_tag(name="update_query") @register.simple_tag(name="update_query")
@ -90,10 +85,4 @@ def do_update_query(obj, **kwargs):
def do_verbose_name(obj, plural=False): def do_verbose_name(obj, plural=False):
"""Return model's verbose name (singular or plural) or `obj` if it is a """Return model's verbose name (singular or plural) or `obj` if it is a
string (can act for default values).""" string (can act for default values)."""
return ( return obj if isinstance(obj, str) else obj._meta.verbose_name_plural if plural else obj._meta.verbose_name
obj
if isinstance(obj, str)
else obj._meta.verbose_name_plural
if plural
else obj._meta.verbose_name
)

View File

@ -51,9 +51,7 @@ class WrapperMixin:
ns = None ns = None
ns_attr = None ns_attr = None
def __init__( def __init__(self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs):
self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs
):
self.target = target self.target = target
if ns: if ns:
self.inject(ns, ns_attr) self.inject(ns, ns_attr)
@ -87,10 +85,7 @@ class WrapperMixin:
if self.target is ns_target: if self.target is ns_target:
return return
elif self.target is not None and self.ns: elif self.target is not None and self.ns:
raise RuntimeError( raise RuntimeError("self target already injected. It must be " "`release` before `inject`.")
"self target already injected. It must be "
"`release` before `inject`."
)
self.target = ns_target self.target = ns_target
setattr(ns, ns_attr, self.interface) setattr(ns, ns_attr, self.interface)
@ -145,9 +140,7 @@ class SpoofMixin:
traces = self.traces[name] traces = self.traces[name]
if not isinstance(traces, list): if not isinstance(traces, list):
traces = (traces,) traces = (traces,)
return tuple( return tuple(self._get_trace(trace, args=args, kw=kw) for trace in traces)
self._get_trace(trace, args=args, kw=kw) for trace in traces
)
def _get_trace(self, trace, args=False, kw=False): def _get_trace(self, trace, args=False, kw=False):
if (args and kw) or (not args and not kw): if (args and kw) or (not args and not kw):

View File

@ -48,15 +48,11 @@ class TestDateFieldFilter:
def test___init__(self, date_filter): def test___init__(self, date_filter):
assert date_filter.date_params == {"pub_date__lte": tomorrow} assert date_filter.date_params == {"pub_date__lte": tomorrow}
date_filter.links = [ date_filter.links = [(str(link[0]), *list(link[1:])) for link in date_filter.links]
(str(link[0]), *list(link[1:])) for link in date_filter.links
]
assert date_filter.links == [ assert date_filter.links == [
(str(_("None")), "pub_date__isnull", None, "1"), (str(_("None")), "pub_date__isnull", None, "1"),
(str(_("Exact")), "pub_date__date", date_filter.input_type), (str(_("Exact")), "pub_date__date", date_filter.input_type),
(str(_("Since")), "pub_date__gte", date_filter.input_type), (str(_("Since")), "pub_date__gte", date_filter.input_type),
(str(_("Until")), "pub_date__lte", date_filter.input_type), (str(_("Until")), "pub_date__lte", date_filter.input_type),
] ]
assert date_filter.query_attrs == { assert date_filter.query_attrs == {"pub_date__gte": today.strftime("%Y-%m-%d")}
"pub_date__gte": today.strftime("%Y-%m-%d")
}

View File

@ -30,9 +30,7 @@ def staff_user():
@pytest.fixture @pytest.fixture
def logger(): def logger():
logger = Interface( logger = Interface(logging, {"info": None, "debug": None, "error": None, "warning": None})
logging, {"info": None, "debug": None, "error": None, "warning": None}
)
return logger return logger
@ -123,10 +121,7 @@ def schedules(sched_initials, sched_reruns):
@pytest.fixture @pytest.fixture
def episodes(programs): def episodes(programs):
return [ return [baker.make(models.Episode, parent=program, cover=None) for program in programs]
baker.make(models.Episode, parent=program, cover=None)
for program in programs
]
@pytest.fixture @pytest.fixture
@ -158,15 +153,7 @@ def sound(program):
@pytest.fixture @pytest.fixture
def tracks(episode, sound): def tracks(episode, sound):
items = [ items = [baker.prepare(models.Track, episode=episode, position=i, timestamp=i * 60) for i in range(0, 3)]
baker.prepare( items += [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 3)]
models.Track, episode=episode, position=i, timestamp=i * 60
)
for i in range(0, 3)
]
items += [
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 3)
]
models.Track.objects.bulk_create(items) models.Track.objects.bulk_create(items)
return items return items

View File

@ -21,30 +21,21 @@ class TestDiffusion:
def test_update(self, monitor, schedules, sched_initials, logger): def test_update(self, monitor, schedules, sched_initials, logger):
monitor.update() monitor.update()
diffusions = models.Diffusion.objects.filter( diffusions = models.Diffusion.objects.filter(schedule__in=sched_initials)
schedule__in=sched_initials
)
by_date = {} by_date = {}
for diff in diffusions: for diff in diffusions:
assert diff.episode_id assert diff.episode_id
by_date.setdefault(diff.schedule_id, set()).add( by_date.setdefault(diff.schedule_id, set()).add((diff.start, diff.end))
(diff.start, diff.end)
)
for schedule in sched_initials: for schedule in sched_initials:
if schedule.pk not in by_date: if schedule.pk not in by_date:
continue continue
_, items = schedule.diffusions_of_month(now) _, items = schedule.diffusions_of_month(now)
assert all( assert all((item.start, item.end) in by_date[schedule.pk] for item in items)
(item.start, item.end) in by_date[schedule.pk]
for item in items
)
@pytest.mark.django_db @pytest.mark.django_db
def test_clean(self, monitor, episode): def test_clean(self, monitor, episode):
start = tz.make_aware( start = tz.make_aware(datetime.combine(monitor.date - timedelta(days=1), time(10, 20)))
datetime.combine(monitor.date - timedelta(days=1), time(10, 20))
)
diff = models.Diffusion( diff = models.Diffusion(
type=models.Diffusion.TYPE_UNCONFIRMED, type=models.Diffusion.TYPE_UNCONFIRMED,
episode=episode, episode=episode,

View File

@ -79,16 +79,12 @@ class TestLogArchiver:
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs): def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
# before logs are deleted from db, get data # before logs are deleted from db, get data
sorted = archiver.sort_logs(logs_qs) sorted = archiver.sort_logs(logs_qs)
paths = { paths = {archiver.get_path(station, date) for station, date in sorted.keys()}
archiver.get_path(station, date) for station, date in sorted.keys()
}
count = archiver.archive(logs_qs, keep=False) count = archiver.archive(logs_qs, keep=False)
assert count == len(logs) assert count == len(logs)
assert not logs_qs.count() assert not logs_qs.count()
assert all( assert all(path in paths for path, *_ in gzip._traces("open", args=True))
path in paths for path, *_ in gzip._traces("open", args=True)
)
results = archiver.load_file("dummy path") results = archiver.load_file("dummy path")
assert results assert results
@ -104,7 +100,4 @@ class TestLogArchiver:
assert sorted assert sorted
for (station, date), logs in sorted.items(): for (station, date), logs in sorted.items():
assert all( assert all(log.station == station and log.date.date() == date for log in logs)
log.station == station and log.date.date() == date
for log in logs
)

View File

@ -53,13 +53,7 @@ def path_infos():
@pytest.fixture @pytest.fixture
def sound_files(path_infos): def sound_files(path_infos):
return { return {k: r for k, r in ((path, SoundFile(conf.MEDIA_ROOT + "/" + path)) for path in path_infos.keys())}
k: r
for k, r in (
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
for path in path_infos.keys()
)
}
def test_sound_path(sound_files): def test_sound_path(sound_files):
@ -78,17 +72,9 @@ def test_read_path(path_infos, sound_files):
def _setup_diff(program, info): def _setup_diff(program, info):
episode = models.Episode(program=program, title="test-episode") episode = models.Episode(program=program, title="test-episode")
at = tz.datetime( at = tz.datetime(**{k: info[k] for k in ("year", "month", "day", "hour", "minute") if info.get(k)})
**{
k: info[k]
for k in ("year", "month", "day", "hour", "minute")
if info.get(k)
}
)
at = tz.make_aware(at) at = tz.make_aware(at)
diff = models.Diffusion( diff = models.Diffusion(episode=episode, start=at, end=at + timedelta(hours=1))
episode=episode, start=at, end=at + timedelta(hours=1)
)
episode.save() episode.save()
diff.save() diff.save()
return diff return diff

View File

@ -92,9 +92,7 @@ class TestTask:
task.log_msg = "--{event.src_path}--" task.log_msg = "--{event.src_path}--"
sound_file = task(event, logger=logger, kw=13) sound_file = task(event, logger=logger, kw=13)
assert sound_file._trace("sync", kw=True) == {"kw": 13} assert sound_file._trace("sync", kw=True) == {"kw": 13}
assert logger._trace("info", args=True) == ( assert logger._trace("info", args=True) == (task.log_msg.format(event=event),)
task.log_msg.format(event=event),
)
class TestDeleteTask: class TestDeleteTask:
@ -125,9 +123,7 @@ class TestModifiedTask:
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now}) datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
def sleep(imeta, n): def sleep(imeta, n):
datetime._imeta.funcs[ datetime._imeta.funcs["now"] = modified_task.timestamp + tz.timedelta(hours=10)
"now"
] = modified_task.timestamp + tz.timedelta(hours=10)
time = Interface.inject(sound_monitor, "time", {"sleep": sleep}) time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
modified_task.wait() modified_task.wait()
@ -175,9 +171,7 @@ class TestMonitorHandler:
def test__submit(self, monitor_handler, event): def test__submit(self, monitor_handler, event):
handler = Interface() handler = Interface()
handler, created = monitor_handler._submit( handler, created = monitor_handler._submit(handler, event, "prefix", kw=13)
handler, event, "prefix", kw=13
)
assert created assert created
assert handler.future._trace("add_done_callback") assert handler.future._trace("add_done_callback")
assert monitor_handler.pool._trace("submit") == ( assert monitor_handler.pool._trace("submit") == (
@ -192,9 +186,7 @@ class TestMonitorHandler:
@pytest.fixture @pytest.fixture
def monitor_interfaces(): def monitor_interfaces():
items = { items = {
"atexit": Interface.inject( "atexit": Interface.inject(sound_monitor, "atexit", {"register": None, "leave": None}),
sound_monitor, "atexit", {"register": None, "leave": None}
),
"observer": Interface.inject( "observer": Interface.inject(
sound_monitor, sound_monitor,
"Observer", "Observer",

View File

@ -38,12 +38,8 @@ sox_values = {
@pytest.fixture @pytest.fixture
def sox_interfaces(): def sox_interfaces():
process = Interface( process = Interface(None, {"communicate": ("", sox_output.encode("utf-8"))})
None, {"communicate": ("", sox_output.encode("utf-8"))} subprocess = Interface.inject(sound_stats, "subprocess", {"Popen": lambda *_, **__: process})
)
subprocess = Interface.inject(
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
)
yield {"process": process, "subprocess": subprocess} yield {"process": process, "subprocess": subprocess}
subprocess._irelease() subprocess._irelease()
@ -110,9 +106,7 @@ class TestSoundStats:
def test_check(self, stats): def test_check(self, stats):
good = [{"val": i} for i in range(0, 11)] good = [{"val": i} for i in range(0, 11)]
bad = [{"val": i} for i in range(-10, 0)] + [ bad = [{"val": i} for i in range(-10, 0)] + [{"val": i} for i in range(11, 20)]
{"val": i} for i in range(11, 20)
]
stats.stats = good + bad stats.stats = good + bad
calls = {} calls = {}
stats.resume = lambda *_: calls.setdefault("resume", True) stats.resume = lambda *_: calls.setdefault("resume", True)

View File

@ -12,11 +12,7 @@ class TestEpisode:
@pytest.mark.django_db @pytest.mark.django_db
def test_podcasts(self, episode, podcasts): def test_podcasts(self, episode, podcasts):
podcasts = { podcasts = {podcast.pk: podcast for podcast in podcasts if podcast.episode == episode}
podcast.pk: podcast
for podcast in podcasts
if podcast.episode == episode
}
for data in episode.podcasts: for data in episode.podcasts:
podcast = podcasts[data["pk"]] podcast = podcasts[data["pk"]]
assert data["name"] == podcast.name assert data["name"] == podcast.name

View File

@ -12,44 +12,28 @@ class TestRerunQuerySet:
@pytest.mark.django_db @pytest.mark.django_db
def test_station_by_obj(self, stations, schedules): def test_station_by_obj(self, stations, schedules):
for station in stations: for station in stations:
queryset = ( queryset = Schedule.objects.station(station).distinct().values_list("program__station", flat=True)
Schedule.objects.station(station)
.distinct()
.values_list("program__station", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == station.pk assert queryset.first() == station.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_station_by_id(self, stations, schedules): def test_station_by_id(self, stations, schedules):
for station in stations: for station in stations:
queryset = ( queryset = Schedule.objects.station(id=station.pk).distinct().values_list("program__station", flat=True)
Schedule.objects.station(id=station.pk)
.distinct()
.values_list("program__station", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == station.pk assert queryset.first() == station.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_program_by_obj(self, programs, schedules): def test_program_by_obj(self, programs, schedules):
for program in programs: for program in programs:
queryset = ( queryset = Schedule.objects.program(program).distinct().values_list("program", flat=True)
Schedule.objects.program(program)
.distinct()
.values_list("program", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == program.pk assert queryset.first() == program.pk
@pytest.mark.django_db @pytest.mark.django_db
def test_program_by_id(self, programs, schedules): def test_program_by_id(self, programs, schedules):
for program in programs: for program in programs:
queryset = ( queryset = Schedule.objects.program(id=program.pk).distinct().values_list("program", flat=True)
Schedule.objects.program(id=program.pk)
.distinct()
.values_list("program", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() == program.pk assert queryset.first() == program.pk
@ -60,11 +44,7 @@ class TestRerunQuerySet:
@pytest.mark.django_db @pytest.mark.django_db
def test_initial(self, schedules): def test_initial(self, schedules):
queryset = ( queryset = Schedule.objects.initial().distinct().values_list("initial", flat=True)
Schedule.objects.initial()
.distinct()
.values_list("initial", flat=True)
)
assert queryset.count() == 1 assert queryset.count() == 1
assert queryset.first() is None assert queryset.first() is None

View File

@ -49,9 +49,7 @@ class TestSchedule:
@pytest.mark.django_db @pytest.mark.django_db
def test_dates_of_month_ponctual(self): def test_dates_of_month_ponctual(self):
schedule = baker.prepare( schedule = baker.prepare(Schedule, frequency=Schedule.Frequency.ponctual)
Schedule, frequency=Schedule.Frequency.ponctual
)
at = schedule.date + relativedelta(months=4) at = schedule.date + relativedelta(months=4)
assert schedule.dates_of_month(at) == [] assert schedule.dates_of_month(at) == []
@ -59,9 +57,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4)) @pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4)) @pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_last(self, months, hour): def test_dates_of_month_last(self, months, hour):
schedule = baker.prepare( schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last)
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last
)
at = schedule.date + relativedelta(months=months) at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at) datetimes = schedule.dates_of_month(at)
assert len(datetimes) == 1 assert len(datetimes) == 1
@ -73,9 +69,7 @@ class TestSchedule:
at = date(at.year, at.month, month_info[1]) at = date(at.year, at.month, month_info[1])
if at.weekday() < schedule.date.weekday(): if at.weekday() < schedule.date.weekday():
at -= timedelta(days=7) at -= timedelta(days=7)
at += timedelta(days=schedule.date.weekday()) - timedelta( at += timedelta(days=schedule.date.weekday()) - timedelta(days=at.weekday())
days=at.weekday()
)
assert dt.date() == at assert dt.date() == at
# since the same method is used for first, second, etc. frequencies # since the same method is used for first, second, etc. frequencies
@ -84,9 +78,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4)) @pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4)) @pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_every(self, months, hour): def test_dates_of_month_every(self, months, hour):
schedule = baker.prepare( schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every)
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every
)
at = schedule.date + relativedelta(months=months) at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at) datetimes = schedule.dates_of_month(at)
last = None last = None
@ -128,8 +120,4 @@ class TestSchedule:
episodes, diffusions = schedule.diffusions_of_month(at) episodes, diffusions = schedule.diffusions_of_month(at)
assert all(r.date in dates for r in episodes) assert all(r.date in dates for r in episodes)
assert all( assert all((not r.initial or r.date in dates) and r.type == Diffusion.TYPE_ON_AIR for r in diffusions)
(not r.initial or r.date in dates)
and r.type == Diffusion.TYPE_ON_AIR
for r in diffusions
)

View File

@ -39,8 +39,7 @@ def test_user_default_groups():
groups = Group.objects.filter(name__in=default_groups.keys()) groups = Group.objects.filter(name__in=default_groups.keys())
assert groups.exists() assert groups.exists()
assert all( assert all(
set(group.permissions.all().values_list("codename", flat=True)) set(group.permissions.all().values_list("codename", flat=True)) == set(default_groups[group.name])
== set(default_groups[group.name])
for group in groups for group in groups
) )
user_groups = set(user.groups.all().values_list("name", flat=True)) user_groups = set(user.groups.all().values_list("name", flat=True))
@ -104,7 +103,5 @@ def test_schedule_pre_delete(sched, eps_diffs):
@pytest.mark.django_db @pytest.mark.django_db
def test_diffusion_post_delete(eps_diffs): def test_diffusion_post_delete(eps_diffs):
eps = eps_diffs[0][0] eps = eps_diffs[0][0]
Diffusion.objects.filter( Diffusion.objects.filter(id__in=[r.id for r in eps.diffusion_set.all()]).delete()
id__in=[r.id for r in eps.diffusion_set.all()]
).delete()
assert Episode.objects.filter(id=eps.id).first() is None assert Episode.objects.filter(id=eps.id).first() is None

View File

@ -29,9 +29,7 @@ def test_date_or_default():
def test_to_timedelta(): def test_to_timedelta():
val = datetime(2023, 1, 10, hour=20, minute=10, second=1) val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
assert utils.to_timedelta(val) == timedelta( assert utils.to_timedelta(val) == timedelta(hours=20, minutes=10, seconds=1)
hours=20, minutes=10, seconds=1
)
def test_to_seconds(): def test_to_seconds():

View File

@ -23,16 +23,12 @@ class FakeView:
@pytest.fixture @pytest.fixture
def published_pages(): def published_pages():
return baker.make( return baker.make(models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3)
models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3
)
@pytest.fixture @pytest.fixture
def unpublished_pages(): def unpublished_pages():
return baker.make( return baker.make(models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3)
models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3
)
@pytest.fixture @pytest.fixture

View File

@ -96,9 +96,7 @@ class TestParentMixin:
@pytest.mark.django_db @pytest.mark.django_db
def test_get_parent_raises_404(self, parent_mixin): def test_get_parent_raises_404(self, parent_mixin):
with pytest.raises(Http404): with pytest.raises(Http404):
parent_mixin.get_parent( parent_mixin.get_parent(self.req, parent_slug="parent-invalid-slug")
self.req, parent_slug="parent-invalid-slug"
)
def test_get_parent_not_parent_model(self, parent_mixin): def test_get_parent_not_parent_model(self, parent_mixin):
parent_mixin.parent_model = None parent_mixin.parent_model = None

View File

@ -29,9 +29,7 @@ api = [
path("logs/", views.LogListAPIView.as_view(), name="live"), path("logs/", views.LogListAPIView.as_view(), name="live"),
path( path(
"user/settings/", "user/settings/",
viewsets.UserSettingsViewSet.as_view( viewsets.UserSettingsViewSet.as_view({"get": "retrieve", "post": "update", "put": "update"}),
{"get": "retrieve", "post": "update", "put": "update"}
),
name="user-settings", name="user-settings",
), ),
] + router.urls ] + router.urls

View File

@ -72,9 +72,7 @@ def date_or_default(date, into=None):
def to_timedelta(time): def to_timedelta(time):
"""Transform a datetime or a time instance to a timedelta, only using time """Transform a datetime or a time instance to a timedelta, only using time
info.""" info."""
return datetime.timedelta( return datetime.timedelta(hours=time.hour, minutes=time.minute, seconds=time.second)
hours=time.hour, minutes=time.minute, seconds=time.second
)
def to_seconds(time): def to_seconds(time):

View File

@ -37,9 +37,5 @@ class StatisticsView(AdminMixin, LogListView, ListView):
def get_object_list(self, logs, full=False): def get_object_list(self, logs, full=False):
if not logs.exists(): if not logs.exists():
logs = ( logs = LogArchiver().load(self.station, self.date) if self.date else []
LogArchiver().load(self.station, self.date)
if self.date
else []
)
return super().get_object_list(logs, True) return super().get_object_list(logs, True)

View File

@ -9,11 +9,7 @@ class ArticleDetailView(PageDetailView):
model = Article model = Article
def get_sidebar_queryset(self): def get_sidebar_queryset(self):
qs = ( qs = Article.objects.published().select_related("cover").order_by("-pub_date")
Article.objects.published()
.select_related("cover")
.order_by("-pub_date")
)
return qs return qs

View File

@ -24,9 +24,7 @@ class BaseView(TemplateResponseMixin, ContextMixin):
def get_sidebar_queryset(self): def get_sidebar_queryset(self):
"""Return a queryset of items to render on the side nav.""" """Return a queryset of items to render on the side nav."""
return ( return Page.objects.select_subclasses().published().order_by("-pub_date")
Page.objects.select_subclasses().published().order_by("-pub_date")
)
def get_sidebar_url(self): def get_sidebar_url(self):
return reverse("page-list") return reverse("page-list")
@ -43,20 +41,14 @@ class BaseView(TemplateResponseMixin, ContextMixin):
if has_sidebar and "sidebar_object_list" not in kwargs: if has_sidebar and "sidebar_object_list" not in kwargs:
sidebar_object_list = self.get_sidebar_queryset() sidebar_object_list = self.get_sidebar_queryset()
if sidebar_object_list is not None: if sidebar_object_list is not None:
kwargs["sidebar_object_list"] = sidebar_object_list[ kwargs["sidebar_object_list"] = sidebar_object_list[: self.list_count]
: self.list_count
]
kwargs["sidebar_list_url"] = self.get_sidebar_url() kwargs["sidebar_list_url"] = self.get_sidebar_url()
if "audio_streams" not in kwargs: if "audio_streams" not in kwargs:
kwargs["audio_streams"] = self.station.streams kwargs["audio_streams"] = self.station.streams
if "model" not in kwargs: if "model" not in kwargs:
model = ( model = getattr(self, "model", None) or hasattr(self, "object") and type(self.object)
getattr(self, "model", None)
or hasattr(self, "object")
and type(self.object)
)
kwargs["model"] = model kwargs["model"] = model
return super().get_context_data(**kwargs) return super().get_context_data(**kwargs)

View File

@ -30,9 +30,7 @@ class HomeView(BaseView, ListView):
current_diff = Diffusion.objects.on_air().now(now).first() current_diff = Diffusion.objects.on_air().now(now).first()
next_diffs = Diffusion.objects.on_air().after(now) next_diffs = Diffusion.objects.on_air().after(now)
if current_diff: if current_diff:
diffs = [current_diff] + list( diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:2])
next_diffs.exclude(pk=current_diff.pk)[:2]
)
else: else:
diffs = next_diffs[:3] diffs = next_diffs[:3]
return diffs return diffs

View File

@ -27,13 +27,7 @@ class LogListMixin(GetDateMixin):
def get_queryset(self): def get_queryset(self):
# only get logs for tracks: log for diffusion will be retrieved # only get logs for tracks: log for diffusion will be retrieved
# by the diffusions' queryset. # by the diffusions' queryset.
qs = ( qs = super().get_queryset().on_air().filter(track__isnull=False).filter(date__lte=tz.now())
super()
.get_queryset()
.on_air()
.filter(track__isnull=False)
.filter(date__lte=tz.now())
)
return ( return (
qs.date(self.date) qs.date(self.date)
if self.date is not None if self.date is not None
@ -43,11 +37,7 @@ class LogListMixin(GetDateMixin):
) )
def get_diffusions_queryset(self): def get_diffusions_queryset(self):
qs = ( qs = Diffusion.objects.station(self.station).on_air().filter(start__lte=tz.now())
Diffusion.objects.station(self.station)
.on_air()
.filter(start__lte=tz.now())
)
return ( return (
qs.date(self.date) qs.date(self.date)
if self.date is not None if self.date is not None
@ -87,9 +77,7 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
kwargs.update( kwargs.update(
{ {
"date": self.date, "date": self.date,
"dates": ( "dates": (today - datetime.timedelta(days=i) for i in range(0, 7)),
today - datetime.timedelta(days=i) for i in range(0, 7)
),
"object_list": self.get_object_list(self.object_list), "object_list": self.get_object_list(self.object_list),
} }
) )
@ -124,6 +112,4 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
def get_serializer(self, queryset, *args, **kwargs): def get_serializer(self, queryset, *args, **kwargs):
full = bool(self.request.GET.get("full")) full = bool(self.request.GET.get("full"))
return super().get_serializer( return super().get_serializer(self.get_object_list(queryset, full), *args, **kwargs)
self.get_object_list(queryset, full), *args, **kwargs
)

View File

@ -14,13 +14,7 @@ class GetDateMixin:
def get_date(self): def get_date(self):
date = self.request.GET.get("date") date = self.request.GET.get("date")
return ( return str_to_date(date, "-") if date is not None else self.kwargs["date"] if "date" in self.kwargs else None
str_to_date(date, "-")
if date is not None
else self.kwargs["date"]
if "date" in self.kwargs
else None
)
def get(self, *args, **kwargs): def get(self, *args, **kwargs):
if self.redirect_date_url and self.request.GET.get("date"): if self.redirect_date_url and self.request.GET.get("date"):
@ -55,9 +49,7 @@ class ParentMixin:
return return
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]} lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
return get_object_or_404( return get_object_or_404(self.parent_model.objects.select_related("cover"), **lookup)
self.parent_model.objects.select_related("cover"), **lookup
)
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
self.parent = self.get_parent(request, *args, **kwargs) self.parent = self.get_parent(request, *args, **kwargs)
@ -83,11 +75,7 @@ class AttachedToMixin:
def get_page(self): def get_page(self):
if self.attach_to_value is not None: if self.attach_to_value is not None:
return ( return StaticPage.objects.filter(attach_to=self.attach_to_value).published().first()
StaticPage.objects.filter(attach_to=self.attach_to_value)
.published()
.first()
)
return super().get_page() return super().get_page()

View File

@ -32,13 +32,7 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
return super().get(*args, **kwargs) return super().get(*args, **kwargs)
def get_queryset(self): def get_queryset(self):
return ( return super().get_queryset().select_subclasses().published().select_related("cover")
super()
.get_queryset()
.select_subclasses()
.published()
.select_related("cover")
)
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
kwargs.setdefault("item_template_name", self.item_template_name) kwargs.setdefault("item_template_name", self.item_template_name)
@ -97,12 +91,7 @@ class PageListView(FiltersMixin, BasePageListView):
return super().get_filterset(data, query) return super().get_filterset(data, query)
def get_queryset(self): def get_queryset(self):
qs = ( qs = super().get_queryset().select_related("category").order_by("-pub_date")
super()
.get_queryset()
.select_related("category")
.order_by("-pub_date")
)
return qs return qs
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
@ -131,9 +120,7 @@ class PageDetailView(BasePageDetailView):
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
if self.object.allow_comments and "comment_form" not in kwargs: if self.object.allow_comments and "comment_form" not in kwargs:
kwargs["comment_form"] = CommentForm() kwargs["comment_form"] = CommentForm()
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by( kwargs["comments"] = Comment.objects.filter(page=self.object).order_by("-date")
"-date"
)
return super().get_context_data(**kwargs) return super().get_context_data(**kwargs)
@classmethod @classmethod

View File

@ -12,9 +12,7 @@ class BaseProgramMixin:
return self.object return self.object
def get_sidebar_url(self): def get_sidebar_url(self):
return reverse( return reverse("program-page-list", kwargs={"parent_slug": self.program.slug})
"program-page-list", kwargs={"parent_slug": self.program.slug}
)
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
self.program = self.get_program() self.program = self.get_program()

View File

@ -70,9 +70,7 @@ class UserSettingsViewSet(viewsets.ViewSet):
permission_classes = [IsAuthenticated] permission_classes = [IsAuthenticated]
def get_serializer(self, instance=None, **kwargs): def get_serializer(self, instance=None, **kwargs):
return self.serializer_class( return self.serializer_class(instance=instance, context={"user": self.request.user}, **kwargs)
instance=instance, context={"user": self.request.user}, **kwargs
)
@action(detail=False, methods=["GET"]) @action(detail=False, methods=["GET"])
def retrieve(self, request): def retrieve(self, request):

View File

@ -45,9 +45,7 @@ class Connector:
if self.is_open: if self.is_open:
return 1 return 1
family = ( family = socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
)
try: try:
self.socket = self.socket_class(family, socket.SOCK_STREAM) self.socket = self.socket_class(family, socket.SOCK_STREAM)
self.socket.connect(self.address) self.socket.connect(self.address)
@ -78,13 +76,7 @@ class Connector:
if data: if data:
data = response_re.sub(r"\1", data).strip() data = response_re.sub(r"\1", data).strip()
data = ( data = self.parse(data) if parse else self.parse_json(data) if parse_json else data
self.parse(data)
if parse
else self.parse_json(data)
if parse_json
else data
)
return data return data
except Exception: except Exception:
self.close() self.close()

View File

@ -62,9 +62,7 @@ class Monitor:
def get_logs_queryset(self): def get_logs_queryset(self):
"""Return queryset to assign as `self.logs`""" """Return queryset to assign as `self.logs`"""
return self.station.log_set.select_related( return self.station.log_set.select_related("diffusion", "sound", "track").order_by("-pk")
"diffusion", "sound", "track"
).order_by("-pk")
def init_last_sound_logs(self): def init_last_sound_logs(self):
"""Retrieve last logs and initialize `last_sound_logs`""" """Retrieve last logs and initialize `last_sound_logs`"""
@ -136,12 +134,7 @@ class Monitor:
diff = None diff = None
sound = Sound.objects.path(air_uri).first() sound = Sound.objects.path(air_uri).first()
if sound and sound.episode_id is not None: if sound and sound.episode_id is not None:
diff = ( diff = Diffusion.objects.episode(id=sound.episode_id).on_air().now(air_time).first()
Diffusion.objects.episode(id=sound.episode_id)
.on_air()
.now(air_time)
.first()
)
# log sound on air # log sound on air
return self.log( return self.log(
@ -158,9 +151,7 @@ class Monitor:
if log.diffusion: if log.diffusion:
return return
tracks = Track.objects.filter( tracks = Track.objects.filter(sound_id=log.sound_id, timestamp__isnull=False).order_by("timestamp")
sound_id=log.sound_id, timestamp__isnull=False
).order_by("timestamp")
if not tracks.exists(): if not tracks.exists():
return return
@ -217,11 +208,7 @@ class Monitor:
dealer = self.streamer.dealer dealer = self.streamer.dealer
# start # start
if ( if not dealer.queue and dealer.rid is None or dealer.remaining < self.delay.total_seconds():
not dealer.queue
and dealer.rid is None
or dealer.remaining < self.delay.total_seconds()
):
self.start_diff(dealer, diff) self.start_diff(dealer, diff)
# cancel # cancel
elif diff.start < now - self.cancel_timeout: elif diff.start < now - self.cancel_timeout:

View File

@ -47,9 +47,7 @@ class Streamer:
self.id = self.station.slug.replace("-", "_") self.id = self.station.slug.replace("-", "_")
self.path = os.path.join(station.path, "station.liq") self.path = os.path.join(station.path, "station.liq")
self.connector = connector or Connector( self.connector = connector or Connector(os.path.join(station.path, "station.sock"))
os.path.join(station.path, "station.sock")
)
self.init_sources() self.init_sources()
@property @property
@ -91,9 +89,7 @@ class Streamer:
def init_sources(self): def init_sources(self):
streams = self.station.program_set.filter(stream__isnull=False) streams = self.station.program_set.filter(stream__isnull=False)
self.dealer = QueueSource(self, "dealer") self.dealer = QueueSource(self, "dealer")
self.sources = [self.dealer] + [ self.sources = [self.dealer] + [PlaylistSource(self, program=program) for program in streams]
PlaylistSource(self, program=program) for program in streams
]
def make_config(self): def make_config(self):
"""Make configuration files and directory (and sync sources)""" """Make configuration files and directory (and sync sources)"""
@ -128,12 +124,7 @@ class Streamer:
self.source = next( self.source = next(
iter( iter(
sorted( sorted(
( (source for source in self.sources if source.request_status == "playing" and source.air_time),
source
for source in self.sources
if source.request_status == "playing"
and source.air_time
),
key=lambda o: o.air_time, key=lambda o: o.air_time,
reverse=True, reverse=True,
) )
@ -149,11 +140,7 @@ class Streamer:
if not os.path.exists(self.socket_path): if not os.path.exists(self.socket_path):
return return
conns = [ conns = [conn for conn in psutil.net_connections(kind="unix") if conn.laddr == self.socket_path]
conn
for conn in psutil.net_connections(kind="unix")
if conn.laddr == self.socket_path
]
for conn in conns: for conn in conns:
if conn.pid is not None: if conn.pid is not None:
os.kill(conn.pid, signal.SIGKILL) os.kill(conn.pid, signal.SIGKILL)

View File

@ -23,9 +23,7 @@ class Streamers:
def reset(self, stations=Station.objects.active()): def reset(self, stations=Station.objects.active()):
# FIXME: cf. TODO in aircox.controllers about model updates # FIXME: cf. TODO in aircox.controllers about model updates
stations = stations.all() stations = stations.all()
self.streamers = { self.streamers = {station.pk: self.streamer_class(station) for station in stations}
station.pk: self.streamer_class(station) for station in stations
}
def fetch(self): def fetch(self):
"""Call streamers fetch if timed-out.""" """Call streamers fetch if timed-out."""

View File

@ -62,42 +62,24 @@ class Command(BaseCommand):
"--station", "--station",
type=str, type=str,
action="append", action="append",
help="name of the station to monitor instead of monitoring " help="name of the station to monitor instead of monitoring " "all stations",
"all stations",
) )
group.add_argument( group.add_argument(
"-t", "-t",
"--timeout", "--timeout",
type=float, type=float,
default=Monitor.cancel_timeout.total_seconds() / 60, default=Monitor.cancel_timeout.total_seconds() / 60,
help="time to wait in MINUTES before canceling a diffusion that " help="time to wait in MINUTES before canceling a diffusion that " "should have ran but did not. ",
"should have ran but did not. ",
) )
# TODO: sync-timeout, cancel-timeout # TODO: sync-timeout, cancel-timeout
def handle( def handle(self, *args, config=None, run=None, monitor=None, station=[], delay=1000, timeout=600, **options):
self, stations = Station.objects.filter(name__in=station) if station else Station.objects.all()
*args,
config=None,
run=None,
monitor=None,
station=[],
delay=1000,
timeout=600,
**options
):
stations = (
Station.objects.filter(name__in=station)
if station
else Station.objects.all()
)
streamers = [Streamer(station) for station in stations] streamers = [Streamer(station) for station in stations]
for streamer in streamers: for streamer in streamers:
if not streamer.outputs: if not streamer.outputs:
raise RuntimeError( raise RuntimeError("Streamer {} has no outputs".format(streamer.id))
"Streamer {} has no outputs".format(streamer.id)
)
if config: if config:
streamer.make_config() streamer.make_config()
if run: if run:
@ -106,10 +88,7 @@ class Command(BaseCommand):
if monitor: if monitor:
delay = tz.timedelta(milliseconds=delay) delay = tz.timedelta(milliseconds=delay)
timeout = tz.timedelta(minutes=timeout) timeout = tz.timedelta(minutes=timeout)
monitors = [ monitors = [Monitor(streamer, delay, cancel_timeout=timeout) for streamer in streamers]
Monitor(streamer, delay, cancel_timeout=timeout)
for streamer in streamers
]
while not run or streamer.is_running: while not run or streamer.is_running:
for monitor in monitors: for monitor in monitors:

View File

@ -55,9 +55,7 @@ class FakeSocket:
data = self.recv_data data = self.recv_data
self.recv_data = self.recv_data[count:] self.recv_data = self.recv_data[count:]
data = data[:count] data = data[:count]
return ( return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
data.encode("utf-8") if isinstance(data, str) else data
) or b"\nEND"
def is_sent(self, data): def is_sent(self, data):
"""Return True if provided data have been sent.""" """Return True if provided data have been sent."""
@ -68,9 +66,7 @@ class FakeSocket:
# -- models # -- models
@pytest.fixture @pytest.fixture
def station(): def station():
station = models.Station( station = models.Station(name="test", path=working_dir, default=True, active=True)
name="test", path=working_dir, default=True, active=True
)
station.save() station.save()
return station return station
@ -136,9 +132,7 @@ def program(station):
@pytest.fixture @pytest.fixture
def stream(program): def stream(program):
stream = models.Stream( stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
program=program, begin=time(10, 12), end=time(12, 13)
)
stream.save() stream.save()
return stream return stream
@ -229,10 +223,7 @@ def metadata_data(metadata_data_air_time):
@pytest.fixture @pytest.fixture
def metadata_string(metadata_data): def metadata_string(metadata_data):
return ( return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
+ "\nEND"
)
# -- streamers # -- streamers
@ -285,9 +276,7 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
@pytest.fixture @pytest.fixture
def streamer(station, station_ports): def streamer(station, station_ports):
streamer = FakeStreamer(station=station) streamer = FakeStreamer(station=station)
streamer.sources = [ streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
]
streamer.dealer = FakeQueueSource(len(streamer.sources)) streamer.dealer = FakeQueueSource(len(streamer.sources))
streamer.sources.append(streamer.dealer) streamer.sources.append(streamer.dealer)
return streamer return streamer
@ -297,12 +286,8 @@ def streamer(station, station_ports):
def streamers(stations, stations_ports): def streamers(stations, stations_ports):
streamers = controllers.Streamers(streamer_class=FakeStreamer) streamers = controllers.Streamers(streamer_class=FakeStreamer)
# avoid unecessary db calls # avoid unecessary db calls
streamers.streamers = { streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
station.pk: FakeStreamer(station=station) for station in stations
}
for j, streamer in enumerate(streamers.values()): for j, streamer in enumerate(streamers.values()):
streamer.sources = [ streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
]
streamer.sources.append(FakeQueueSource(len(streamer.sources))) streamer.sources.append(FakeQueueSource(len(streamer.sources)))
return streamers return streamers

View File

@ -16,9 +16,7 @@ class TestConnector:
assert connector.is_open assert connector.is_open
assert connector.socket.family == socket.AF_UNIX assert connector.socket.family == socket.AF_UNIX
assert connector.socket.type == socket.SOCK_STREAM assert connector.socket.type == socket.SOCK_STREAM
assert connector.socket.address == os.path.join( assert connector.socket.address == os.path.join(working_dir, "test.sock")
working_dir, "test.sock"
)
connector.close() connector.close()
def test_open_af_inet(self, connector): def test_open_af_inet(self, connector):

View File

@ -37,9 +37,7 @@ class TestBaseMetaData:
assert metadata.validate_status("any") == "stopped" assert metadata.validate_status("any") == "stopped"
@pytest.mark.django_db @pytest.mark.django_db
def test_validate_air_time( def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
self, metadata, metadata_data, metadata_data_air_time
):
air_time = metadata_data["on_air"] air_time = metadata_data["on_air"]
result = metadata.validate_air_time(air_time) result = metadata.validate_air_time(air_time)
assert result == metadata_data_air_time assert result == metadata_data_air_time

View File

@ -43,10 +43,7 @@ def source(monitor, streamer, sound, diffusion):
@pytest.fixture @pytest.fixture
def tracks(sound): def tracks(sound):
items = [ items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 4)
]
models.Track.objects.bulk_create(items) models.Track.objects.bulk_create(items)
return items return items
@ -178,9 +175,7 @@ class TestMonitor:
assert all(log_by_track.count(track) for track in tracks) assert all(log_by_track.count(track) for track in tracks)
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_trace_tracks_returns_on_log_diffusion( def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
self, monitor, log, diffusion, tracks
):
log.diffusion = None log.diffusion = None
monitor.trace_tracks(log) monitor.trace_tracks(log)
@ -210,9 +205,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"] assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff( def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
self, monitor, streamer, diffusion, log
):
interface( interface(
monitor, monitor,
{ {
@ -232,9 +225,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"] assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff_log_exists( def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
self, monitor, streamer, diffusion, log
):
interface( interface(
monitor, monitor,
{ {
@ -264,9 +255,7 @@ class TestMonitor:
streamer.dealer.queue = None streamer.dealer.queue = None
streamer.dealer.rid = "13" streamer.dealer.rid = "13"
streamer.dealer.remaining = monitor.delay.total_seconds() + 10 streamer.dealer.remaining = monitor.delay.total_seconds() + 10
diffusion.start = ( diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
)
diffusion.end = tz.now() + tz.timedelta(minutes=30) diffusion.end = tz.now() + tz.timedelta(minutes=30)
diffusion.save() diffusion.save()
@ -285,9 +274,7 @@ class TestMonitor:
assert log.comment == "test" assert log.comment == "test"
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_start_diff( def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
self, monitor, diffusion, source, episode, sound, tracks
):
result = {} result = {}
monitor.log = lambda **kw: result.update(kw) monitor.log = lambda **kw: result.update(kw)
@ -321,17 +308,10 @@ class TestMonitor:
monitor.sync() monitor.sync()
assert monitor.sync_next >= now + monitor.sync_timeout assert monitor.sync_next >= now + monitor.sync_timeout
assert all( assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
source.calls.get("sync") for source in monitor.streamer.playlists
)
@pytest.mark.django_db(transaction=True) @pytest.mark.django_db(transaction=True)
def test_sync_timeout_not_reached_skip_sync(self, monitor): def test_sync_timeout_not_reached_skip_sync(self, monitor):
monitor.sync_next = tz.now() + tz.timedelta( monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
seconds=monitor.sync_timeout.total_seconds() + 20
)
monitor.sync() monitor.sync()
assert all( assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)
not source.calls.get("sync")
for source in monitor.streamer.playlists
)

View File

@ -67,11 +67,7 @@ class TestPlaylistSource:
@pytest.mark.django_db @pytest.mark.django_db
def test_get_sound_queryset(self, playlist_source, sounds): def test_get_sound_queryset(self, playlist_source, sounds):
query = playlist_source.get_sound_queryset() query = playlist_source.get_sound_queryset()
assert all( assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
r.program_id == playlist_source.program.pk
and r.type == r.TYPE_ARCHIVE
for r in query
)
@pytest.mark.django_db @pytest.mark.django_db
def test_get_playlist(self, playlist_source, sounds): def test_get_playlist(self, playlist_source, sounds):
@ -114,9 +110,7 @@ class TestQueueSource:
@pytest.mark.django_db @pytest.mark.django_db
def test_requests(self, queue_source, socket, metadata_string): def test_requests(self, queue_source, socket, metadata_string):
queue_source.queue = [13, 14, 15] queue_source.queue = [13, 14, 15]
socket.recv_data = [ socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
f"{metadata_string}\nEND" for _ in queue_source.queue
]
requests = queue_source.requests requests = queue_source.requests
@ -127,10 +121,7 @@ class TestQueueSource:
def test_push(self, queue_source, socket): def test_push(self, queue_source, socket):
paths = ["/tmp/a", "/tmp/b"] paths = ["/tmp/a", "/tmp/b"]
queue_source.push(*paths) queue_source.push(*paths)
assert all( assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
socket.is_sent(f"{queue_source.id}_queue.push {path}")
for path in paths
)
@pytest.mark.django_db @pytest.mark.django_db
def test_fetch(self, queue_source, socket, metadata_string): def test_fetch(self, queue_source, socket, metadata_string):

View File

@ -12,9 +12,7 @@ class TestStreamers:
@pytest.fixture @pytest.fixture
def test_reset(self, streamers, stations): def test_reset(self, streamers, stations):
streamers.reset() streamers.reset()
assert all( assert all(streamers.streamers[station.pk] == station for station in stations)
streamers.streamers[station.pk] == station for station in stations
)
@pytest.fixture @pytest.fixture
def test_fetch(self, streamers): def test_fetch(self, streamers):

View File

@ -168,18 +168,14 @@ class TestQueueSourceViewSet:
calls = {} calls = {}
sound = sounds[0] sound = sounds[0]
request = FakeRequest(station=station, data={"sound_id": sound.pk}) request = FakeRequest(station=station, data={"sound_id": sound.pk})
queue_source_viewset._run = lambda pk, func: calls.setdefault( queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
"_run", (pk, func)
)
result = queue_source_viewset.push(request, 13) result = queue_source_viewset.push(request, 13)
assert "_run" in calls assert "_run" in calls
assert result[0] == 13 assert result[0] == 13
assert callable(result[1]) assert callable(result[1])
@pytest.mark.django_db @pytest.mark.django_db
def test_push_missing_sound_in_request_post( def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
self, queue_source_viewset, station
):
request = FakeRequest(station=station, data={}) request = FakeRequest(station=station, data={})
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
queue_source_viewset.push(request, 0) queue_source_viewset.push(request, 0)

View File

@ -73,9 +73,7 @@ class StreamerViewSet(ControllerViewSet):
return Response(self.serialize(self.streamer)) return Response(self.serialize(self.streamer))
def list(self, request, pk=None): def list(self, request, pk=None):
return Response( return Response({"results": self.serialize(self.streamers.values(), many=True)})
{"results": self.serialize(self.streamers.values(), many=True)}
)
def dispatch(self, request, *args, pk=None, **kwargs): def dispatch(self, request, *args, pk=None, **kwargs):
if pk is not None: if pk is not None:
@ -93,9 +91,7 @@ class SourceViewSet(ControllerViewSet):
return (s for s in self.streamer.sources if isinstance(s, self.model)) return (s for s in self.streamer.sources if isinstance(s, self.model))
def get_source(self, pk): def get_source(self, pk):
source = next( source = next((source for source in self.get_sources() if source.id == pk), None)
(source for source in self.get_sources() if source.id == pk), None
)
if source is None: if source is None:
raise Http404("source `%s` not found" % pk) raise Http404("source `%s` not found" % pk)
return source return source
@ -105,9 +101,7 @@ class SourceViewSet(ControllerViewSet):
return Response(self.serialize(source)) return Response(self.serialize(source))
def list(self, request): def list(self, request):
return Response( return Response({"results": self.serialize(self.get_sources(), many=True)})
{"results": self.serialize(self.get_sources(), many=True)}
)
def _run(self, pk, action): def _run(self, pk, action):
source = self.object = self.get_source(pk) source = self.object = self.get_source(pk)
@ -150,9 +144,5 @@ class QueueSourceViewSet(SourceViewSet):
if not request.data.get("sound_id"): if not request.data.get("sound_id"):
raise ValidationError('missing "sound_id" POST data') raise ValidationError('missing "sound_id" POST data')
sound = get_object_or_404( sound = get_object_or_404(self.get_sound_queryset(request), pk=request.data["sound_id"])
self.get_sound_queryset(request), pk=request.data["sound_id"] return self._run(pk, lambda s: s.push(sound.file.path) if sound.file.path else None)
)
return self._run(
pk, lambda s: s.push(sound.file.path) if sound.file.path else None
)

View File

@ -10,11 +10,7 @@ sys.path.insert(1, os.path.dirname(os.path.realpath(__file__)))
PROJECT_ROOT = os.path.abspath(__file__ + "/../../../") PROJECT_ROOT = os.path.abspath(__file__ + "/../../../")
# DEBUG mode # DEBUG mode
DEBUG = ( DEBUG = (os.environ["AIRCOX_DEBUG"].lower() in ("true", 1)) if "AIRCOX_DEBUG" in os.environ else False
(os.environ["AIRCOX_DEBUG"].lower() in ("true", 1))
if "AIRCOX_DEBUG" in os.environ
else False
)
# Internationalization and timezones: thoses values may be set in order to # Internationalization and timezones: thoses values may be set in order to
# have correct translation and timezone. # have correct translation and timezone.
@ -74,9 +70,7 @@ try:
except Exception: except Exception:
print( print(
"Can not set locale {LC}. Is it available on you system? Hint: " "Can not set locale {LC}. Is it available on you system? Hint: "
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format( "Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
LC=LANGUAGE_CODE
)
) )
pass pass

View File

@ -43,8 +43,6 @@ try:
except Exception: except Exception:
print( print(
"Can not set locale {LC}. Is it available on you system? Hint: " "Can not set locale {LC}. Is it available on you system? Hint: "
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format( "Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
LC=LANGUAGE_CODE
)
) )
pass pass

View File

@ -28,6 +28,6 @@ urlpatterns = aircox.urls.urls + [
] ]
if settings.DEBUG: if settings.DEBUG:
urlpatterns += static( urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(
settings.STATIC_URL, document_root=settings.STATIC_ROOT settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) )

83
pyproject.toml Normal file
View File

@ -0,0 +1,83 @@
[project]
name = "aircox"
# version = "0.1"
description = "Radio management platform and website"
readme = "README.md"
license = {text = "GPLv3"}
requires-python = ">=3.8"
authors = [
{name = "Thomas", email = "thomas@bkfox.net"},
]
classifiers = [
"Framework :: Django",
"Programming Language :: Python",
"Programming Language :: Python :: 3.11",
]
dynamic = ["version", "dependencies"]
[project.urls]
"Homepage" = "https://git.radiocampus.be/rc/aircox/"
[build-system]
requires = ["setuptools>=60", "setuptools-scm>=8.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
include-package-data = true
[tool.setuptools.packages.find]
where = ["."]
include = ["aircox*", "instance"]
exclude = ["aircox*.tests*", "instance.settings.settings"]
namespaces = false
[tool.setuptools.dynamic]
dependencies = {file = ["requirements.txt"]}
[tool.setuptools_scm]
[tool.pytest.ini_options]
DJANGO_SETTINGS_MODULE = "instance.settings"
python_files = ["tests.py", "test_*.py", "*_tests.py"]
[tool.black]
line-length = 120
exclude = '''
/(
\.egg
| \.git
| \.hg
| \.tox
| \._build
| \.build
| \.bulk-out
| \.dist
| \.__pycache__
| \.venv
| \.migrations
| \.static
| \.instance/settings
)
'''
[tool.ruff]
line-length = 120
exclude = [
"egg",
"git",
"hg",
"tox",
"_build",
"build",
"dist",
"__pycache__",
"venv",
"*/migrations",
"static",
"instance/settings",
]

View File

@ -1,4 +0,0 @@
[pytest]
DJANGO_SETTINGS_MODULE = instance.settings
# -- recommended but optional:
python_files = tests.py test_*.py *_tests.py

View File

@ -1,37 +0,0 @@
from setuptools import find_packages, setup
def to_rst(path):
try:
from pypandoc import convert
return convert(path, "rst")
except ImportError:
print("pypandoc module not found, can not convert Markdown to RST")
return open(path, "r").read()
def to_array(path):
with open(path, "r") as file:
return [r for r in file.read().split("\n") if r]
setup(
name="aircox",
version="0.9",
license="GPLv3",
author="bkfox",
description="Aircox is a radio programs manager including tools and cms",
long_description=to_rst("README.md"),
url="https://github.com/bkfox/aircox",
packages=find_packages(),
include_package_data=True,
install_requires=to_array("requirements.txt"),
classifiers=[
"Framework :: Django",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
],
)