merge develop-1.0
This commit is contained in:
commit
474016f776
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -5,3 +5,6 @@ venv/
|
|||
node_modules/
|
||||
*.egg-info/
|
||||
*.egg
|
||||
|
||||
db.sqlite3
|
||||
instance/settings/settings.py
|
||||
|
|
|
@ -9,14 +9,11 @@ repos:
|
|||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --line-length=79
|
||||
- --exclude="""\.git|\.__pycache__|venv|_build|buck-out|build|dist"""
|
||||
- repo: https://github.com/PyCQA/flake8.git
|
||||
rev: 6.0.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.0.292
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: ^instance/settings/|migrations/
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
- repo: https://github.com/PyCQA/docformatter.git
|
||||
rev: v1.5.1
|
||||
hooks:
|
||||
|
|
54
README.md
54
README.md
|
@ -1,10 +1,9 @@
|
|||

|
||||
|
||||
Platform to manage a radio, schedules, website, and so on. We use the power of great tools like Django or Liquidsoap.
|
||||
A platform to manage radio schedules, website content, and more. It uses the power of great tools like Django or Liquidsoap.
|
||||
|
||||
This project is distributed under GPL version 3. More information in the LICENSE file, except for some files whose license is indicated inside source code.
|
||||
|
||||
|
||||
## Features
|
||||
* **streams**: multiple random music streams when no program is played. We also can specify a time range and frequency for each;
|
||||
* **diffusions**: generate diffusions time slot for programs that have schedule informations. Check for conflicts and rerun.
|
||||
|
@ -15,7 +14,51 @@ This project is distributed under GPL version 3. More information in the LICENSE
|
|||
* **cms**: content management system.
|
||||
|
||||
|
||||
## Scripts
|
||||
## Architecture and concepts
|
||||
Aircox is divided in two main modules:
|
||||
* `aircox`: basics of Aircox (programs, diffusions, sounds, etc. management); interface for managing a website with Aircox elements (playlists, timetable, players on the website);
|
||||
* `aircox_streamer`: interact with application to generate audio stream (LiquidSoap);
|
||||
|
||||
## Development setup
|
||||
Start installing a virtual environment :
|
||||
|
||||
```
|
||||
virtualenv venv
|
||||
source venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
pip install -r requirements_tests.txt
|
||||
```
|
||||
|
||||
Then copy the default settings and initiate the database :
|
||||
|
||||
```
|
||||
cp instance/settings/sample.py instance/settings/settings.py
|
||||
python -c "from django.core.management.utils import get_random_secret_key; print('SECRET_KEY = \"%s\"' % get_random_secret_key())" >> instance/settings/settings.py
|
||||
DJANGO_SETTINGS_MODULE=instance.settings.dev ./manage.py migrate
|
||||
```
|
||||
|
||||
Finally test and run the instance using development settings, and point your browser to http://localhost:8000 :
|
||||
|
||||
```
|
||||
DJANGO_SETTINGS_MODULE=instance.settings.dev pytest
|
||||
DJANGO_SETTINGS_MODULE=instance.settings.dev ./manage.py runserver
|
||||
```
|
||||
|
||||
Before requesting a merge, enable pre-commit :
|
||||
|
||||
```
|
||||
pip install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
## Installation
|
||||
Running Aircox on production involves:
|
||||
* Aircox modules and a running Django project;
|
||||
* a supervisor for common tasks (sounds monitoring, stream control, etc.) -- `supervisord`;
|
||||
* a wsgi and an HTTP server -- `gunicorn`, `nginx`;
|
||||
* a database supported by Django (MySQL, SQLite, PostGresSQL);
|
||||
|
||||
### Scripts
|
||||
Are included various configuration scripts that can be used to ease setup. They
|
||||
assume that the project is present in `/srv/apps/aircox`:
|
||||
|
||||
|
@ -27,7 +70,6 @@ The scripts are written with a combination of `cron`, `supervisord`, `nginx`
|
|||
and `gunicorn` in mind.
|
||||
|
||||
|
||||
## Installation
|
||||
### Dependencies
|
||||
For python dependencies take a peek at the `requirements.txt` file, plus
|
||||
dependencies specific to Django (e.g. for database: `mysqlclient` for MySql
|
||||
|
@ -62,8 +104,8 @@ pip install -r requirements.txt
|
|||
```
|
||||
|
||||
### Configuration
|
||||
You must write a settings.py file in the `instance` directory (you can just
|
||||
copy and paste `instance/sample_settings.py`. There still is configuration
|
||||
You must write a settings.py file in the `instance/settings` directory (you can just
|
||||
copy and paste `instance/settings/sample.py`. There still is configuration
|
||||
required in this file, check it in for more info.
|
||||
|
||||
|
||||
|
|
|
@ -18,9 +18,7 @@ class EpisodeAdminForm(ModelForm):
|
|||
class EpisodeAdmin(SortableAdminBase, PageAdmin):
|
||||
form = EpisodeAdminForm
|
||||
list_display = PageAdmin.list_display
|
||||
list_filter = tuple(
|
||||
f for f in PageAdmin.list_filter if f != "pub_date"
|
||||
) + (
|
||||
list_filter = tuple(f for f in PageAdmin.list_filter if f != "pub_date") + (
|
||||
"diffusion__start",
|
||||
"pub_date",
|
||||
)
|
||||
|
|
|
@ -14,13 +14,9 @@ class DateFieldFilter(filters.FieldListFilter):
|
|||
|
||||
def __init__(self, field, request, params, model, model_admin, field_path):
|
||||
self.field_generic = f"{field_path}__"
|
||||
self.date_params = {
|
||||
k: v for k, v in params.items() if k.startswith(self.field_generic)
|
||||
}
|
||||
self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)}
|
||||
|
||||
exact_lookup = (
|
||||
"date" if isinstance(field, models.DateTimeField) else "exact"
|
||||
)
|
||||
exact_lookup = "date" if isinstance(field, models.DateTimeField) else "exact"
|
||||
|
||||
# links as: (label, param, input_type|None, value)
|
||||
self.links = [
|
||||
|
@ -29,17 +25,11 @@ class DateFieldFilter(filters.FieldListFilter):
|
|||
(_("Until"), self.field_generic + "lte", self.input_type),
|
||||
]
|
||||
if field.null:
|
||||
self.links.insert(
|
||||
0, (_("None"), self.field_generic + "isnull", None, "1")
|
||||
)
|
||||
self.links.insert(0, (_("None"), self.field_generic + "isnull", None, "1"))
|
||||
|
||||
self.query_attrs = {
|
||||
k: v for k, v in request.GET.items() if k not in self.date_params
|
||||
}
|
||||
self.query_attrs = {k: v for k, v in request.GET.items() if k not in self.date_params}
|
||||
self.query_string = urlencode(self.query_attrs)
|
||||
super().__init__(
|
||||
field, request, params, model, model_admin, field_path
|
||||
)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
|
||||
def expected_parameters(self):
|
||||
return [link[1] for link in self.links]
|
||||
|
@ -59,11 +49,7 @@ class DateFieldFilter(filters.FieldListFilter):
|
|||
"value": value,
|
||||
"type": link[2],
|
||||
"query_attrs": self.query_attrs,
|
||||
"query_string": urlencode({link[1]: value})
|
||||
+ "&"
|
||||
+ self.query_string
|
||||
if value
|
||||
else self.query_string,
|
||||
"query_string": urlencode({link[1]: value}) + "&" + self.query_string if value else self.query_string,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -50,11 +50,7 @@ class BasePageAdmin(admin.ModelAdmin):
|
|||
change_form_template = "admin/aircox/page_change_form.html"
|
||||
|
||||
def cover_thumb(self, obj):
|
||||
return (
|
||||
mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"]))
|
||||
if obj.cover
|
||||
else ""
|
||||
)
|
||||
return mark_safe('<img src="{}"/>'.format(obj.cover.icons["64"])) if obj.cover else ""
|
||||
|
||||
def get_changeform_initial_data(self, request):
|
||||
data = super().get_changeform_initial_data(request)
|
||||
|
@ -65,9 +61,7 @@ class BasePageAdmin(admin.ModelAdmin):
|
|||
def _get_common_context(self, query, extra_context=None):
|
||||
extra_context = extra_context or {}
|
||||
parent = query.get("parent", None)
|
||||
extra_context["parent"] = (
|
||||
None if parent is None else Page.objects.get_subclass(id=parent)
|
||||
)
|
||||
extra_context["parent"] = None if parent is None else Page.objects.get_subclass(id=parent)
|
||||
return extra_context
|
||||
|
||||
def render_change_form(self, request, context, *args, **kwargs):
|
||||
|
@ -94,9 +88,7 @@ class PageAdmin(BasePageAdmin):
|
|||
search_fields = BasePageAdmin.search_fields + ("category__title",)
|
||||
fieldsets = deepcopy(BasePageAdmin.fieldsets)
|
||||
|
||||
fieldsets[0][1]["fields"].insert(
|
||||
fieldsets[0][1]["fields"].index("slug") + 1, "category"
|
||||
)
|
||||
fieldsets[0][1]["fields"].insert(fieldsets[0][1]["fields"].index("slug") + 1, "category")
|
||||
fieldsets[1][1]["fields"] += ("featured", "allow_comments")
|
||||
|
||||
|
||||
|
|
|
@ -38,9 +38,7 @@ class SoundInline(admin.TabularInline):
|
|||
max_num = 0
|
||||
|
||||
def audio(self, obj):
|
||||
return mark_safe(
|
||||
'<audio src="{}" controls></audio>'.format(obj.file.url)
|
||||
)
|
||||
return mark_safe('<audio src="{}" controls></audio>'.format(obj.file.url))
|
||||
|
||||
audio.short_description = _("Audio")
|
||||
|
||||
|
@ -86,13 +84,7 @@ class SoundAdmin(SortableAdminBase, admin.ModelAdmin):
|
|||
|
||||
def related(self, obj):
|
||||
# TODO: link to episode or program edit
|
||||
return (
|
||||
obj.episode.title
|
||||
if obj.episode
|
||||
else obj.program.title
|
||||
if obj.program
|
||||
else ""
|
||||
)
|
||||
return obj.episode.title if obj.episode else obj.program.title if obj.program else ""
|
||||
|
||||
related.short_description = _("Program / Episode")
|
||||
|
||||
|
|
|
@ -26,21 +26,13 @@ class AdminSite(admin.AdminSite):
|
|||
context.update(
|
||||
{
|
||||
# all programs
|
||||
"programs": models.Program.objects.active()
|
||||
.values("pk", "title")
|
||||
.order_by("title"),
|
||||
"programs": models.Program.objects.active().values("pk", "title").order_by("title"),
|
||||
# today's diffusions
|
||||
"diffusions": models.Diffusion.objects.date()
|
||||
.order_by("start")
|
||||
.select_related("episode"),
|
||||
"diffusions": models.Diffusion.objects.date().order_by("start").select_related("episode"),
|
||||
# TODO: only for dashboard
|
||||
# last comments
|
||||
"comments": models.Comment.objects.order_by(
|
||||
"-date"
|
||||
).select_related("page")[0:10],
|
||||
"latests": models.Page.objects.select_subclasses().order_by(
|
||||
"-pub_date"
|
||||
)[0:10],
|
||||
"comments": models.Comment.objects.order_by("-date").select_related("page")[0:10],
|
||||
"latests": models.Page.objects.select_subclasses().order_by("-pub_date")[0:10],
|
||||
}
|
||||
)
|
||||
return context
|
||||
|
@ -69,9 +61,7 @@ class AdminSite(admin.AdminSite):
|
|||
return [(label, reverse(url)) for label, url in self.tools]
|
||||
|
||||
def route_view(self, url, view, name, admin_view=True, label=None):
|
||||
self.extra_urls.append(
|
||||
path(url, self.admin_view(view) if admin_view else view, name=name)
|
||||
)
|
||||
self.extra_urls.append(path(url, self.admin_view(view) if admin_view else view, name=name))
|
||||
|
||||
if label:
|
||||
self.tools.append((label, "admin:" + name))
|
||||
|
|
|
@ -22,9 +22,7 @@ class DiffusionMonitor:
|
|||
|
||||
def update(self):
|
||||
episodes, diffusions = [], []
|
||||
for schedule in Schedule.objects.filter(
|
||||
program__active=True, initial__isnull=True
|
||||
):
|
||||
for schedule in Schedule.objects.filter(program__active=True, initial__isnull=True):
|
||||
eps, diffs = schedule.diffusions_of_month(self.date)
|
||||
if eps:
|
||||
episodes += eps
|
||||
|
|
|
@ -44,9 +44,7 @@ class LogArchiver:
|
|||
path = self.get_path(station, date)
|
||||
# FIXME: remove binary mode
|
||||
with gzip.open(path, "ab") as archive:
|
||||
data = yaml.dump(
|
||||
[self.serialize(line) for line in logs]
|
||||
).encode("utf8")
|
||||
data = yaml.dump([self.serialize(line) for line in logs]).encode("utf8")
|
||||
archive.write(data)
|
||||
|
||||
if not keep:
|
||||
|
@ -95,10 +93,7 @@ class LogArchiver:
|
|||
|
||||
return [
|
||||
Log(
|
||||
diffusion=rel_obj(log, "diffusion"),
|
||||
sound=rel_obj(log, "sound"),
|
||||
track=rel_obj(log, "track"),
|
||||
**log
|
||||
diffusion=rel_obj(log, "diffusion"), sound=rel_obj(log, "sound"), track=rel_obj(log, "track"), **log
|
||||
)
|
||||
for log in logs
|
||||
]
|
||||
|
|
|
@ -50,14 +50,7 @@ class PlaylistImport:
|
|||
logger.info("start reading csv " + self.path)
|
||||
self.data = list(
|
||||
csv.DictReader(
|
||||
(
|
||||
row
|
||||
for row in file
|
||||
if not (
|
||||
row.startswith("#") or row.startswith("\ufeff#")
|
||||
)
|
||||
and row.strip()
|
||||
),
|
||||
(row for row in file if not (row.startswith("#") or row.startswith("\ufeff#")) and row.strip()),
|
||||
fieldnames=settings.IMPORT_PLAYLIST_CSV_COLS,
|
||||
delimiter=settings.IMPORT_PLAYLIST_CSV_DELIMITER,
|
||||
quotechar=settings.IMPORT_PLAYLIST_CSV_TEXT_QUOTE,
|
||||
|
@ -70,11 +63,7 @@ class PlaylistImport:
|
|||
If save is true, save it into the database
|
||||
"""
|
||||
if self.track_kwargs.get("sound") is None:
|
||||
logger.error(
|
||||
"related track's sound is missing. Skip import of "
|
||||
+ self.path
|
||||
+ "."
|
||||
)
|
||||
logger.error("related track's sound is missing. Skip import of " + self.path + ".")
|
||||
return
|
||||
|
||||
maps = settings.IMPORT_PLAYLIST_CSV_COLS
|
||||
|
@ -87,17 +76,11 @@ class PlaylistImport:
|
|||
return
|
||||
try:
|
||||
timestamp = (
|
||||
int(line.get("minutes") or 0) * 60
|
||||
+ int(line.get("seconds") or 0)
|
||||
if has_timestamp
|
||||
else None
|
||||
int(line.get("minutes") or 0) * 60 + int(line.get("seconds") or 0) if has_timestamp else None
|
||||
)
|
||||
|
||||
track, created = Track.objects.get_or_create(
|
||||
title=line.get("title"),
|
||||
artist=line.get("artist"),
|
||||
position=index,
|
||||
**self.track_kwargs
|
||||
title=line.get("title"), artist=line.get("artist"), position=index, **self.track_kwargs
|
||||
)
|
||||
track.timestamp = timestamp
|
||||
track.info = line.get("info")
|
||||
|
|
|
@ -58,14 +58,7 @@ class SoundFile:
|
|||
def episode(self):
|
||||
return self.sound and self.sound.episode
|
||||
|
||||
def sync(
|
||||
self,
|
||||
sound=None,
|
||||
program=None,
|
||||
deleted=False,
|
||||
keep_deleted=False,
|
||||
**kwargs
|
||||
):
|
||||
def sync(self, sound=None, program=None, deleted=False, keep_deleted=False, **kwargs):
|
||||
"""Update related sound model and save it."""
|
||||
if deleted:
|
||||
return self._on_delete(self.path, keep_deleted)
|
||||
|
@ -79,9 +72,7 @@ class SoundFile:
|
|||
if sound:
|
||||
created = False
|
||||
else:
|
||||
sound, created = Sound.objects.get_or_create(
|
||||
file=self.sound_path, defaults=kwargs
|
||||
)
|
||||
sound, created = Sound.objects.get_or_create(file=self.sound_path, defaults=kwargs)
|
||||
|
||||
self.sound = sound
|
||||
self.path_info = self.read_path(self.path)
|
||||
|
@ -172,9 +163,7 @@ class SoundFile:
|
|||
|
||||
year, month, day = pi.get("year"), pi.get("month"), pi.get("day")
|
||||
if pi.get("hour") is not None:
|
||||
at = tz.datetime(
|
||||
year, month, day, pi.get("hour", 0), pi.get("minute", 0)
|
||||
)
|
||||
at = tz.datetime(year, month, day, pi.get("hour", 0), pi.get("minute", 0))
|
||||
at = tz.make_aware(at)
|
||||
else:
|
||||
at = date(year, month, day)
|
||||
|
@ -210,22 +199,10 @@ class SoundFile:
|
|||
if self.info and self.info.tags:
|
||||
tags = self.info.tags
|
||||
title, artist, album, year = tuple(
|
||||
t and ", ".join(t)
|
||||
for t in (
|
||||
tags.get(k)
|
||||
for k in ("title", "artist", "album", "year")
|
||||
)
|
||||
)
|
||||
title = (
|
||||
title
|
||||
or (self.path_info and self.path_info.get("name"))
|
||||
or os.path.basename(path_noext)
|
||||
)
|
||||
info = (
|
||||
"{} ({})".format(album, year)
|
||||
if album and year
|
||||
else album or year or ""
|
||||
t and ", ".join(t) for t in (tags.get(k) for k in ("title", "artist", "album", "year"))
|
||||
)
|
||||
title = title or (self.path_info and self.path_info.get("name")) or os.path.basename(path_noext)
|
||||
info = "{} ({})".format(album, year) if album and year else album or year or ""
|
||||
track = Track(
|
||||
sound=sound,
|
||||
position=int(tags.get("tracknumber", 0)),
|
||||
|
|
|
@ -155,10 +155,7 @@ class MonitorHandler(PatternMatchingEventHandler):
|
|||
self.jobs = jobs or {}
|
||||
self.sync_kw = sync_kw
|
||||
|
||||
patterns = [
|
||||
"*/{}/*{}".format(self.subdir, ext)
|
||||
for ext in settings.SOUND_FILE_EXT
|
||||
]
|
||||
patterns = ["*/{}/*{}".format(self.subdir, ext) for ext in settings.SOUND_FILE_EXT]
|
||||
super().__init__(patterns=patterns, ignore_directories=True)
|
||||
|
||||
def on_created(self, event):
|
||||
|
@ -202,11 +199,7 @@ class SoundMonitor:
|
|||
|
||||
def report(self, program=None, component=None, *content, logger=logging):
|
||||
content = " ".join([str(c) for c in content])
|
||||
logger.info(
|
||||
f"{program}: {content}"
|
||||
if not component
|
||||
else f"{program}, {component}: {content}"
|
||||
)
|
||||
logger.info(f"{program}: {content}" if not component else f"{program}, {component}: {content}")
|
||||
|
||||
def scan(self, logger=logging):
|
||||
"""For all programs, scan dirs.
|
||||
|
@ -234,9 +227,7 @@ class SoundMonitor:
|
|||
dirs.append(program.abspath)
|
||||
return dirs
|
||||
|
||||
def scan_for_program(
|
||||
self, program, subdir, logger=logging, **sound_kwargs
|
||||
):
|
||||
def scan_for_program(self, program, subdir, logger=logging, **sound_kwargs):
|
||||
"""Scan a given directory that is associated to the given program, and
|
||||
update sounds information."""
|
||||
logger.info("- %s/", subdir)
|
||||
|
@ -257,9 +248,7 @@ class SoundMonitor:
|
|||
sounds.append(sound_file.sound.pk)
|
||||
|
||||
# sounds in db & unchecked
|
||||
sounds = Sound.objects.filter(file__startswith=subdir).exclude(
|
||||
pk__in=sounds
|
||||
)
|
||||
sounds = Sound.objects.filter(file__startswith=subdir).exclude(pk__in=sounds)
|
||||
self.check_sounds(sounds, program=program)
|
||||
|
||||
def check_sounds(self, qs, **sync_kwargs):
|
||||
|
|
|
@ -38,9 +38,7 @@ class SoxStats:
|
|||
args += ["trim", str(at), str(length)]
|
||||
args.append("stats")
|
||||
|
||||
p = subprocess.Popen(
|
||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# sox outputs to stderr (my god WHYYYY)
|
||||
out_, out = p.communicate()
|
||||
self.values = self.parse(str(out, encoding="utf-8"))
|
||||
|
@ -94,16 +92,8 @@ class SoundStats:
|
|||
position += self.sample_length
|
||||
|
||||
def check(self, name, min_val, max_val):
|
||||
self.good = [
|
||||
index
|
||||
for index, stats in enumerate(self.stats)
|
||||
if min_val <= stats.get(name) <= max_val
|
||||
]
|
||||
self.bad = [
|
||||
index
|
||||
for index, stats in enumerate(self.stats)
|
||||
if index not in self.good
|
||||
]
|
||||
self.good = [index for index, stats in enumerate(self.stats) if min_val <= stats.get(name) <= max_val]
|
||||
self.bad = [index for index, stats in enumerate(self.stats) if index not in self.good]
|
||||
self.resume()
|
||||
|
||||
def resume(self):
|
||||
|
@ -120,10 +110,6 @@ class SoundStats:
|
|||
|
||||
def _view(self, array):
|
||||
return [
|
||||
"file"
|
||||
if index == 0
|
||||
else "sample {} (at {} seconds)".format(
|
||||
index, (index - 1) * self.sample_length
|
||||
)
|
||||
"file" if index == 0 else "sample {} (at {} seconds)".format(index, (index - 1) * self.sample_length)
|
||||
for index in array
|
||||
]
|
||||
|
|
|
@ -35,11 +35,7 @@ class WeekConverter:
|
|||
return datetime.datetime.strptime(value + "/1", "%G/%V/%u").date()
|
||||
|
||||
def to_url(self, value):
|
||||
return (
|
||||
value
|
||||
if isinstance(value, str)
|
||||
else "{:04d}/{:02d}".format(*value.isocalendar())
|
||||
)
|
||||
return value if isinstance(value, str) else "{:04d}/{:02d}".format(*value.isocalendar())
|
||||
|
||||
|
||||
class DateConverter:
|
||||
|
@ -52,10 +48,4 @@ class DateConverter:
|
|||
return datetime.date(int(value[0]), int(value[1]), int(value[2]))
|
||||
|
||||
def to_url(self, value):
|
||||
return (
|
||||
value
|
||||
if isinstance(value, str)
|
||||
else "{:04d}/{:02d}/{:02d}".format(
|
||||
value.year, value.month, value.day
|
||||
)
|
||||
)
|
||||
return value if isinstance(value, str) else "{:04d}/{:02d}/{:02d}".format(value.year, value.month, value.day)
|
||||
|
|
|
@ -19,9 +19,7 @@ class PageFilters(filters.FilterSet):
|
|||
|
||||
|
||||
class EpisodeFilters(PageFilters):
|
||||
podcast = filters.BooleanFilter(
|
||||
method="podcast_filter", label=_("Podcast")
|
||||
)
|
||||
podcast = filters.BooleanFilter(method="podcast_filter", label=_("Podcast"))
|
||||
|
||||
class Meta:
|
||||
model = Episode
|
||||
|
|
|
@ -30,8 +30,7 @@ class Command(BaseCommand):
|
|||
"--age",
|
||||
type=int,
|
||||
default=settings.LOGS_ARCHIVES_AGE,
|
||||
help="minimal age in days of logs to archive. Default is "
|
||||
"settings.LOGS_ARCHIVES_AGE",
|
||||
help="minimal age in days of logs to archive. Default is " "settings.LOGS_ARCHIVES_AGE",
|
||||
)
|
||||
group.add_argument(
|
||||
"-k",
|
||||
|
|
|
@ -55,14 +55,11 @@ class Command(BaseCommand):
|
|||
group.add_argument(
|
||||
"--next-month",
|
||||
action="store_true",
|
||||
help="set the date to the next month of given date"
|
||||
" (if next month from today",
|
||||
help="set the date to the next month of given date" " (if next month from today",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
date = datetime.date(
|
||||
year=options["year"], month=options["month"], day=1
|
||||
)
|
||||
date = datetime.date(year=options["year"], month=options["month"], day=1)
|
||||
if options.get("next_month"):
|
||||
month = options.get("month")
|
||||
date += tz.timedelta(days=28)
|
||||
|
|
|
@ -51,18 +51,13 @@ class Command(BaseCommand):
|
|||
def handle(self, path, *args, **options):
|
||||
# FIXME: absolute/relative path of sounds vs given path
|
||||
if options.get("sound"):
|
||||
sound = Sound.objects.filter(
|
||||
file__icontains=options.get("sound")
|
||||
).first()
|
||||
sound = Sound.objects.filter(file__icontains=options.get("sound")).first()
|
||||
else:
|
||||
path_, ext = os.path.splitext(path)
|
||||
sound = Sound.objects.filter(path__icontains=path_).first()
|
||||
|
||||
if not sound:
|
||||
logger.error(
|
||||
"no sound found in the database for the path "
|
||||
"{path}".format(path=path)
|
||||
)
|
||||
logger.error("no sound found in the database for the path " "{path}".format(path=path))
|
||||
return
|
||||
|
||||
# FIXME: auto get sound.episode if any
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#! /usr/bin/env python3
|
||||
# TODO: SoundMonitor class
|
||||
|
||||
"""Monitor sound files; For each program, check for:
|
||||
|
||||
|
@ -43,8 +42,7 @@ class Command(BaseCommand):
|
|||
"-q",
|
||||
"--quality_check",
|
||||
action="store_true",
|
||||
help="Enable quality check using sound_quality_check on all "
|
||||
"sounds marqued as not good",
|
||||
help="Enable quality check using sound_quality_check on all " "sounds marqued as not good",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
|
@ -57,15 +55,12 @@ class Command(BaseCommand):
|
|||
"-m",
|
||||
"--monitor",
|
||||
action="store_true",
|
||||
help="Run in monitor mode, watch for modification in the "
|
||||
"filesystem and react in consequence",
|
||||
help="Run in monitor mode, watch for modification in the " "filesystem and react in consequence",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
SoundMonitor()
|
||||
monitor = SoundMonitor()
|
||||
if options.get("scan"):
|
||||
self.scan()
|
||||
# if options.get('quality_check'):
|
||||
# self.check_quality(check=(not options.get('scan')))
|
||||
monitor.scan()
|
||||
if options.get("monitor"):
|
||||
self.monitor()
|
||||
monitor.monitor()
|
||||
|
|
|
@ -28,8 +28,7 @@ class Command(BaseCommand):
|
|||
"--sample_length",
|
||||
type=int,
|
||||
default=120,
|
||||
help="size of sample to analyse in seconds. If not set (or 0), "
|
||||
"does not analyse by sample",
|
||||
help="size of sample to analyse in seconds. If not set (or 0), " "does not analyse by sample",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
|
@ -43,8 +42,7 @@ class Command(BaseCommand):
|
|||
"--range",
|
||||
type=float,
|
||||
nargs=2,
|
||||
help="range of minimal and maximal accepted value such as: "
|
||||
"--range min max",
|
||||
help="range of minimal and maximal accepted value such as: " "--range min max",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
|
@ -64,10 +62,7 @@ class Command(BaseCommand):
|
|||
raise CommandError("no attribute specified")
|
||||
|
||||
# sound analyse and checks
|
||||
self.sounds = [
|
||||
SoundStats(path, options.get("sample_length"))
|
||||
for path in options.get("files")
|
||||
]
|
||||
self.sounds = [SoundStats(path, options.get("sample_length")) for path in options.get("files")]
|
||||
self.bad = []
|
||||
self.good = []
|
||||
for sound in self.sounds:
|
||||
|
|
|
@ -84,9 +84,7 @@ class Migration(migrations.Migration):
|
|||
options={
|
||||
"verbose_name": "Diffusion",
|
||||
"verbose_name_plural": "Diffusions",
|
||||
"permissions": (
|
||||
("programming", "edit the diffusion's planification"),
|
||||
),
|
||||
"permissions": (("programming", "edit the diffusion's planification"),),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
@ -125,22 +123,16 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
(
|
||||
"content",
|
||||
ckeditor.fields.RichTextField(
|
||||
blank=True, null=True, verbose_name="content"
|
||||
),
|
||||
ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
|
||||
),
|
||||
("pub_date", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"featured",
|
||||
models.BooleanField(
|
||||
default=False, verbose_name="featured"
|
||||
),
|
||||
models.BooleanField(default=False, verbose_name="featured"),
|
||||
),
|
||||
(
|
||||
"allow_comments",
|
||||
models.BooleanField(
|
||||
default=True, verbose_name="allow comments"
|
||||
),
|
||||
models.BooleanField(default=True, verbose_name="allow comments"),
|
||||
),
|
||||
(
|
||||
"category",
|
||||
|
@ -458,9 +450,7 @@ class Migration(migrations.Migration):
|
|||
("name", models.CharField(max_length=64, verbose_name="name")),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
max_length=64, unique=True, verbose_name="slug"
|
||||
),
|
||||
models.SlugField(max_length=64, unique=True, verbose_name="slug"),
|
||||
),
|
||||
(
|
||||
"path",
|
||||
|
@ -566,9 +556,7 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
(
|
||||
"content",
|
||||
ckeditor.fields.RichTextField(
|
||||
blank=True, null=True, verbose_name="content"
|
||||
),
|
||||
ckeditor.fields.RichTextField(blank=True, null=True, verbose_name="content"),
|
||||
),
|
||||
(
|
||||
"view",
|
||||
|
@ -949,9 +937,7 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
(
|
||||
"time",
|
||||
models.TimeField(
|
||||
help_text="start time", verbose_name="time"
|
||||
),
|
||||
models.TimeField(help_text="start time", verbose_name="time"),
|
||||
),
|
||||
(
|
||||
"timezone",
|
||||
|
@ -1643,9 +1629,7 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
(
|
||||
"duration",
|
||||
models.TimeField(
|
||||
help_text="regular duration", verbose_name="duration"
|
||||
),
|
||||
models.TimeField(help_text="regular duration", verbose_name="duration"),
|
||||
),
|
||||
(
|
||||
"frequency",
|
||||
|
|
|
@ -33,9 +33,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name="page",
|
||||
name="content",
|
||||
field=ckeditor_uploader.fields.RichTextUploadingField(
|
||||
blank=True, null=True, verbose_name="content"
|
||||
),
|
||||
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sound",
|
||||
|
@ -52,8 +50,6 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name="staticpage",
|
||||
name="content",
|
||||
field=ckeditor_uploader.fields.RichTextUploadingField(
|
||||
blank=True, null=True, verbose_name="content"
|
||||
),
|
||||
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name="content"),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -12,9 +12,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterModelOptions(
|
||||
name="diffusion",
|
||||
options={
|
||||
"permissions": (
|
||||
("programming", "edit the diffusions' planification"),
|
||||
),
|
||||
"permissions": (("programming", "edit the diffusions' planification"),),
|
||||
"verbose_name": "Diffusion",
|
||||
"verbose_name_plural": "Diffusions",
|
||||
},
|
||||
|
@ -22,9 +20,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name="track",
|
||||
name="album",
|
||||
field=models.CharField(
|
||||
default="", max_length=128, verbose_name="album"
|
||||
),
|
||||
field=models.CharField(default="", max_length=128, verbose_name="album"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="schedule",
|
||||
|
|
|
@ -12,8 +12,6 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name="track",
|
||||
name="year",
|
||||
field=models.IntegerField(
|
||||
blank=True, null=True, verbose_name="year"
|
||||
),
|
||||
field=models.IntegerField(blank=True, null=True, verbose_name="year"),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -12,8 +12,6 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name="track",
|
||||
name="album",
|
||||
field=models.CharField(
|
||||
blank=True, max_length=128, null=True, verbose_name="album"
|
||||
),
|
||||
field=models.CharField(blank=True, max_length=128, null=True, verbose_name="album"),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -30,9 +30,7 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
(
|
||||
"playlist_editor_sep",
|
||||
models.CharField(
|
||||
max_length=16, verbose_name="Playlist Editor Separator"
|
||||
),
|
||||
models.CharField(max_length=16, verbose_name="Playlist Editor Separator"),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
|
|
623
aircox/migrations/0014_alter_schedule_timezone.py
Normal file
623
aircox/migrations/0014_alter_schedule_timezone.py
Normal file
|
@ -0,0 +1,623 @@
|
|||
# Generated by Django 4.2.5 on 2023-10-18 07:26
|
||||
|
||||
import aircox.models.schedule
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("aircox", "0013_alter_schedule_timezone_alter_station_hosts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="schedule",
|
||||
name="timezone",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("Africa/Abidjan", "Africa/Abidjan"),
|
||||
("Africa/Accra", "Africa/Accra"),
|
||||
("Africa/Addis_Ababa", "Africa/Addis_Ababa"),
|
||||
("Africa/Algiers", "Africa/Algiers"),
|
||||
("Africa/Asmara", "Africa/Asmara"),
|
||||
("Africa/Asmera", "Africa/Asmera"),
|
||||
("Africa/Bamako", "Africa/Bamako"),
|
||||
("Africa/Bangui", "Africa/Bangui"),
|
||||
("Africa/Banjul", "Africa/Banjul"),
|
||||
("Africa/Bissau", "Africa/Bissau"),
|
||||
("Africa/Blantyre", "Africa/Blantyre"),
|
||||
("Africa/Brazzaville", "Africa/Brazzaville"),
|
||||
("Africa/Bujumbura", "Africa/Bujumbura"),
|
||||
("Africa/Cairo", "Africa/Cairo"),
|
||||
("Africa/Casablanca", "Africa/Casablanca"),
|
||||
("Africa/Ceuta", "Africa/Ceuta"),
|
||||
("Africa/Conakry", "Africa/Conakry"),
|
||||
("Africa/Dakar", "Africa/Dakar"),
|
||||
("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"),
|
||||
("Africa/Djibouti", "Africa/Djibouti"),
|
||||
("Africa/Douala", "Africa/Douala"),
|
||||
("Africa/El_Aaiun", "Africa/El_Aaiun"),
|
||||
("Africa/Freetown", "Africa/Freetown"),
|
||||
("Africa/Gaborone", "Africa/Gaborone"),
|
||||
("Africa/Harare", "Africa/Harare"),
|
||||
("Africa/Johannesburg", "Africa/Johannesburg"),
|
||||
("Africa/Juba", "Africa/Juba"),
|
||||
("Africa/Kampala", "Africa/Kampala"),
|
||||
("Africa/Khartoum", "Africa/Khartoum"),
|
||||
("Africa/Kigali", "Africa/Kigali"),
|
||||
("Africa/Kinshasa", "Africa/Kinshasa"),
|
||||
("Africa/Lagos", "Africa/Lagos"),
|
||||
("Africa/Libreville", "Africa/Libreville"),
|
||||
("Africa/Lome", "Africa/Lome"),
|
||||
("Africa/Luanda", "Africa/Luanda"),
|
||||
("Africa/Lubumbashi", "Africa/Lubumbashi"),
|
||||
("Africa/Lusaka", "Africa/Lusaka"),
|
||||
("Africa/Malabo", "Africa/Malabo"),
|
||||
("Africa/Maputo", "Africa/Maputo"),
|
||||
("Africa/Maseru", "Africa/Maseru"),
|
||||
("Africa/Mbabane", "Africa/Mbabane"),
|
||||
("Africa/Mogadishu", "Africa/Mogadishu"),
|
||||
("Africa/Monrovia", "Africa/Monrovia"),
|
||||
("Africa/Nairobi", "Africa/Nairobi"),
|
||||
("Africa/Ndjamena", "Africa/Ndjamena"),
|
||||
("Africa/Niamey", "Africa/Niamey"),
|
||||
("Africa/Nouakchott", "Africa/Nouakchott"),
|
||||
("Africa/Ouagadougou", "Africa/Ouagadougou"),
|
||||
("Africa/Porto-Novo", "Africa/Porto-Novo"),
|
||||
("Africa/Sao_Tome", "Africa/Sao_Tome"),
|
||||
("Africa/Timbuktu", "Africa/Timbuktu"),
|
||||
("Africa/Tripoli", "Africa/Tripoli"),
|
||||
("Africa/Tunis", "Africa/Tunis"),
|
||||
("Africa/Windhoek", "Africa/Windhoek"),
|
||||
("America/Adak", "America/Adak"),
|
||||
("America/Anchorage", "America/Anchorage"),
|
||||
("America/Anguilla", "America/Anguilla"),
|
||||
("America/Antigua", "America/Antigua"),
|
||||
("America/Araguaina", "America/Araguaina"),
|
||||
("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"),
|
||||
("America/Argentina/Catamarca", "America/Argentina/Catamarca"),
|
||||
("America/Argentina/ComodRivadavia", "America/Argentina/ComodRivadavia"),
|
||||
("America/Argentina/Cordoba", "America/Argentina/Cordoba"),
|
||||
("America/Argentina/Jujuy", "America/Argentina/Jujuy"),
|
||||
("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"),
|
||||
("America/Argentina/Mendoza", "America/Argentina/Mendoza"),
|
||||
("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"),
|
||||
("America/Argentina/Salta", "America/Argentina/Salta"),
|
||||
("America/Argentina/San_Juan", "America/Argentina/San_Juan"),
|
||||
("America/Argentina/San_Luis", "America/Argentina/San_Luis"),
|
||||
("America/Argentina/Tucuman", "America/Argentina/Tucuman"),
|
||||
("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"),
|
||||
("America/Aruba", "America/Aruba"),
|
||||
("America/Asuncion", "America/Asuncion"),
|
||||
("America/Atikokan", "America/Atikokan"),
|
||||
("America/Atka", "America/Atka"),
|
||||
("America/Bahia", "America/Bahia"),
|
||||
("America/Bahia_Banderas", "America/Bahia_Banderas"),
|
||||
("America/Barbados", "America/Barbados"),
|
||||
("America/Belem", "America/Belem"),
|
||||
("America/Belize", "America/Belize"),
|
||||
("America/Blanc-Sablon", "America/Blanc-Sablon"),
|
||||
("America/Boa_Vista", "America/Boa_Vista"),
|
||||
("America/Bogota", "America/Bogota"),
|
||||
("America/Boise", "America/Boise"),
|
||||
("America/Buenos_Aires", "America/Buenos_Aires"),
|
||||
("America/Cambridge_Bay", "America/Cambridge_Bay"),
|
||||
("America/Campo_Grande", "America/Campo_Grande"),
|
||||
("America/Cancun", "America/Cancun"),
|
||||
("America/Caracas", "America/Caracas"),
|
||||
("America/Catamarca", "America/Catamarca"),
|
||||
("America/Cayenne", "America/Cayenne"),
|
||||
("America/Cayman", "America/Cayman"),
|
||||
("America/Chicago", "America/Chicago"),
|
||||
("America/Chihuahua", "America/Chihuahua"),
|
||||
("America/Ciudad_Juarez", "America/Ciudad_Juarez"),
|
||||
("America/Coral_Harbour", "America/Coral_Harbour"),
|
||||
("America/Cordoba", "America/Cordoba"),
|
||||
("America/Costa_Rica", "America/Costa_Rica"),
|
||||
("America/Creston", "America/Creston"),
|
||||
("America/Cuiaba", "America/Cuiaba"),
|
||||
("America/Curacao", "America/Curacao"),
|
||||
("America/Danmarkshavn", "America/Danmarkshavn"),
|
||||
("America/Dawson", "America/Dawson"),
|
||||
("America/Dawson_Creek", "America/Dawson_Creek"),
|
||||
("America/Denver", "America/Denver"),
|
||||
("America/Detroit", "America/Detroit"),
|
||||
("America/Dominica", "America/Dominica"),
|
||||
("America/Edmonton", "America/Edmonton"),
|
||||
("America/Eirunepe", "America/Eirunepe"),
|
||||
("America/El_Salvador", "America/El_Salvador"),
|
||||
("America/Ensenada", "America/Ensenada"),
|
||||
("America/Fort_Nelson", "America/Fort_Nelson"),
|
||||
("America/Fort_Wayne", "America/Fort_Wayne"),
|
||||
("America/Fortaleza", "America/Fortaleza"),
|
||||
("America/Glace_Bay", "America/Glace_Bay"),
|
||||
("America/Godthab", "America/Godthab"),
|
||||
("America/Goose_Bay", "America/Goose_Bay"),
|
||||
("America/Grand_Turk", "America/Grand_Turk"),
|
||||
("America/Grenada", "America/Grenada"),
|
||||
("America/Guadeloupe", "America/Guadeloupe"),
|
||||
("America/Guatemala", "America/Guatemala"),
|
||||
("America/Guayaquil", "America/Guayaquil"),
|
||||
("America/Guyana", "America/Guyana"),
|
||||
("America/Halifax", "America/Halifax"),
|
||||
("America/Havana", "America/Havana"),
|
||||
("America/Hermosillo", "America/Hermosillo"),
|
||||
("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"),
|
||||
("America/Indiana/Knox", "America/Indiana/Knox"),
|
||||
("America/Indiana/Marengo", "America/Indiana/Marengo"),
|
||||
("America/Indiana/Petersburg", "America/Indiana/Petersburg"),
|
||||
("America/Indiana/Tell_City", "America/Indiana/Tell_City"),
|
||||
("America/Indiana/Vevay", "America/Indiana/Vevay"),
|
||||
("America/Indiana/Vincennes", "America/Indiana/Vincennes"),
|
||||
("America/Indiana/Winamac", "America/Indiana/Winamac"),
|
||||
("America/Indianapolis", "America/Indianapolis"),
|
||||
("America/Inuvik", "America/Inuvik"),
|
||||
("America/Iqaluit", "America/Iqaluit"),
|
||||
("America/Jamaica", "America/Jamaica"),
|
||||
("America/Jujuy", "America/Jujuy"),
|
||||
("America/Juneau", "America/Juneau"),
|
||||
("America/Kentucky/Louisville", "America/Kentucky/Louisville"),
|
||||
("America/Kentucky/Monticello", "America/Kentucky/Monticello"),
|
||||
("America/Knox_IN", "America/Knox_IN"),
|
||||
("America/Kralendijk", "America/Kralendijk"),
|
||||
("America/La_Paz", "America/La_Paz"),
|
||||
("America/Lima", "America/Lima"),
|
||||
("America/Los_Angeles", "America/Los_Angeles"),
|
||||
("America/Louisville", "America/Louisville"),
|
||||
("America/Lower_Princes", "America/Lower_Princes"),
|
||||
("America/Maceio", "America/Maceio"),
|
||||
("America/Managua", "America/Managua"),
|
||||
("America/Manaus", "America/Manaus"),
|
||||
("America/Marigot", "America/Marigot"),
|
||||
("America/Martinique", "America/Martinique"),
|
||||
("America/Matamoros", "America/Matamoros"),
|
||||
("America/Mazatlan", "America/Mazatlan"),
|
||||
("America/Mendoza", "America/Mendoza"),
|
||||
("America/Menominee", "America/Menominee"),
|
||||
("America/Merida", "America/Merida"),
|
||||
("America/Metlakatla", "America/Metlakatla"),
|
||||
("America/Mexico_City", "America/Mexico_City"),
|
||||
("America/Miquelon", "America/Miquelon"),
|
||||
("America/Moncton", "America/Moncton"),
|
||||
("America/Monterrey", "America/Monterrey"),
|
||||
("America/Montevideo", "America/Montevideo"),
|
||||
("America/Montreal", "America/Montreal"),
|
||||
("America/Montserrat", "America/Montserrat"),
|
||||
("America/Nassau", "America/Nassau"),
|
||||
("America/New_York", "America/New_York"),
|
||||
("America/Nipigon", "America/Nipigon"),
|
||||
("America/Nome", "America/Nome"),
|
||||
("America/Noronha", "America/Noronha"),
|
||||
("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"),
|
||||
("America/North_Dakota/Center", "America/North_Dakota/Center"),
|
||||
("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"),
|
||||
("America/Nuuk", "America/Nuuk"),
|
||||
("America/Ojinaga", "America/Ojinaga"),
|
||||
("America/Panama", "America/Panama"),
|
||||
("America/Pangnirtung", "America/Pangnirtung"),
|
||||
("America/Paramaribo", "America/Paramaribo"),
|
||||
("America/Phoenix", "America/Phoenix"),
|
||||
("America/Port-au-Prince", "America/Port-au-Prince"),
|
||||
("America/Port_of_Spain", "America/Port_of_Spain"),
|
||||
("America/Porto_Acre", "America/Porto_Acre"),
|
||||
("America/Porto_Velho", "America/Porto_Velho"),
|
||||
("America/Puerto_Rico", "America/Puerto_Rico"),
|
||||
("America/Punta_Arenas", "America/Punta_Arenas"),
|
||||
("America/Rainy_River", "America/Rainy_River"),
|
||||
("America/Rankin_Inlet", "America/Rankin_Inlet"),
|
||||
("America/Recife", "America/Recife"),
|
||||
("America/Regina", "America/Regina"),
|
||||
("America/Resolute", "America/Resolute"),
|
||||
("America/Rio_Branco", "America/Rio_Branco"),
|
||||
("America/Rosario", "America/Rosario"),
|
||||
("America/Santa_Isabel", "America/Santa_Isabel"),
|
||||
("America/Santarem", "America/Santarem"),
|
||||
("America/Santiago", "America/Santiago"),
|
||||
("America/Santo_Domingo", "America/Santo_Domingo"),
|
||||
("America/Sao_Paulo", "America/Sao_Paulo"),
|
||||
("America/Scoresbysund", "America/Scoresbysund"),
|
||||
("America/Shiprock", "America/Shiprock"),
|
||||
("America/Sitka", "America/Sitka"),
|
||||
("America/St_Barthelemy", "America/St_Barthelemy"),
|
||||
("America/St_Johns", "America/St_Johns"),
|
||||
("America/St_Kitts", "America/St_Kitts"),
|
||||
("America/St_Lucia", "America/St_Lucia"),
|
||||
("America/St_Thomas", "America/St_Thomas"),
|
||||
("America/St_Vincent", "America/St_Vincent"),
|
||||
("America/Swift_Current", "America/Swift_Current"),
|
||||
("America/Tegucigalpa", "America/Tegucigalpa"),
|
||||
("America/Thule", "America/Thule"),
|
||||
("America/Thunder_Bay", "America/Thunder_Bay"),
|
||||
("America/Tijuana", "America/Tijuana"),
|
||||
("America/Toronto", "America/Toronto"),
|
||||
("America/Tortola", "America/Tortola"),
|
||||
("America/Vancouver", "America/Vancouver"),
|
||||
("America/Virgin", "America/Virgin"),
|
||||
("America/Whitehorse", "America/Whitehorse"),
|
||||
("America/Winnipeg", "America/Winnipeg"),
|
||||
("America/Yakutat", "America/Yakutat"),
|
||||
("America/Yellowknife", "America/Yellowknife"),
|
||||
("Antarctica/Casey", "Antarctica/Casey"),
|
||||
("Antarctica/Davis", "Antarctica/Davis"),
|
||||
("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"),
|
||||
("Antarctica/Macquarie", "Antarctica/Macquarie"),
|
||||
("Antarctica/Mawson", "Antarctica/Mawson"),
|
||||
("Antarctica/McMurdo", "Antarctica/McMurdo"),
|
||||
("Antarctica/Palmer", "Antarctica/Palmer"),
|
||||
("Antarctica/Rothera", "Antarctica/Rothera"),
|
||||
("Antarctica/South_Pole", "Antarctica/South_Pole"),
|
||||
("Antarctica/Syowa", "Antarctica/Syowa"),
|
||||
("Antarctica/Troll", "Antarctica/Troll"),
|
||||
("Antarctica/Vostok", "Antarctica/Vostok"),
|
||||
("Arctic/Longyearbyen", "Arctic/Longyearbyen"),
|
||||
("Asia/Aden", "Asia/Aden"),
|
||||
("Asia/Almaty", "Asia/Almaty"),
|
||||
("Asia/Amman", "Asia/Amman"),
|
||||
("Asia/Anadyr", "Asia/Anadyr"),
|
||||
("Asia/Aqtau", "Asia/Aqtau"),
|
||||
("Asia/Aqtobe", "Asia/Aqtobe"),
|
||||
("Asia/Ashgabat", "Asia/Ashgabat"),
|
||||
("Asia/Ashkhabad", "Asia/Ashkhabad"),
|
||||
("Asia/Atyrau", "Asia/Atyrau"),
|
||||
("Asia/Baghdad", "Asia/Baghdad"),
|
||||
("Asia/Bahrain", "Asia/Bahrain"),
|
||||
("Asia/Baku", "Asia/Baku"),
|
||||
("Asia/Bangkok", "Asia/Bangkok"),
|
||||
("Asia/Barnaul", "Asia/Barnaul"),
|
||||
("Asia/Beirut", "Asia/Beirut"),
|
||||
("Asia/Bishkek", "Asia/Bishkek"),
|
||||
("Asia/Brunei", "Asia/Brunei"),
|
||||
("Asia/Calcutta", "Asia/Calcutta"),
|
||||
("Asia/Chita", "Asia/Chita"),
|
||||
("Asia/Choibalsan", "Asia/Choibalsan"),
|
||||
("Asia/Chongqing", "Asia/Chongqing"),
|
||||
("Asia/Chungking", "Asia/Chungking"),
|
||||
("Asia/Colombo", "Asia/Colombo"),
|
||||
("Asia/Dacca", "Asia/Dacca"),
|
||||
("Asia/Damascus", "Asia/Damascus"),
|
||||
("Asia/Dhaka", "Asia/Dhaka"),
|
||||
("Asia/Dili", "Asia/Dili"),
|
||||
("Asia/Dubai", "Asia/Dubai"),
|
||||
("Asia/Dushanbe", "Asia/Dushanbe"),
|
||||
("Asia/Famagusta", "Asia/Famagusta"),
|
||||
("Asia/Gaza", "Asia/Gaza"),
|
||||
("Asia/Harbin", "Asia/Harbin"),
|
||||
("Asia/Hebron", "Asia/Hebron"),
|
||||
("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"),
|
||||
("Asia/Hong_Kong", "Asia/Hong_Kong"),
|
||||
("Asia/Hovd", "Asia/Hovd"),
|
||||
("Asia/Irkutsk", "Asia/Irkutsk"),
|
||||
("Asia/Istanbul", "Asia/Istanbul"),
|
||||
("Asia/Jakarta", "Asia/Jakarta"),
|
||||
("Asia/Jayapura", "Asia/Jayapura"),
|
||||
("Asia/Jerusalem", "Asia/Jerusalem"),
|
||||
("Asia/Kabul", "Asia/Kabul"),
|
||||
("Asia/Kamchatka", "Asia/Kamchatka"),
|
||||
("Asia/Karachi", "Asia/Karachi"),
|
||||
("Asia/Kashgar", "Asia/Kashgar"),
|
||||
("Asia/Kathmandu", "Asia/Kathmandu"),
|
||||
("Asia/Katmandu", "Asia/Katmandu"),
|
||||
("Asia/Khandyga", "Asia/Khandyga"),
|
||||
("Asia/Kolkata", "Asia/Kolkata"),
|
||||
("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"),
|
||||
("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"),
|
||||
("Asia/Kuching", "Asia/Kuching"),
|
||||
("Asia/Kuwait", "Asia/Kuwait"),
|
||||
("Asia/Macao", "Asia/Macao"),
|
||||
("Asia/Macau", "Asia/Macau"),
|
||||
("Asia/Magadan", "Asia/Magadan"),
|
||||
("Asia/Makassar", "Asia/Makassar"),
|
||||
("Asia/Manila", "Asia/Manila"),
|
||||
("Asia/Muscat", "Asia/Muscat"),
|
||||
("Asia/Nicosia", "Asia/Nicosia"),
|
||||
("Asia/Novokuznetsk", "Asia/Novokuznetsk"),
|
||||
("Asia/Novosibirsk", "Asia/Novosibirsk"),
|
||||
("Asia/Omsk", "Asia/Omsk"),
|
||||
("Asia/Oral", "Asia/Oral"),
|
||||
("Asia/Phnom_Penh", "Asia/Phnom_Penh"),
|
||||
("Asia/Pontianak", "Asia/Pontianak"),
|
||||
("Asia/Pyongyang", "Asia/Pyongyang"),
|
||||
("Asia/Qatar", "Asia/Qatar"),
|
||||
("Asia/Qostanay", "Asia/Qostanay"),
|
||||
("Asia/Qyzylorda", "Asia/Qyzylorda"),
|
||||
("Asia/Rangoon", "Asia/Rangoon"),
|
||||
("Asia/Riyadh", "Asia/Riyadh"),
|
||||
("Asia/Saigon", "Asia/Saigon"),
|
||||
("Asia/Sakhalin", "Asia/Sakhalin"),
|
||||
("Asia/Samarkand", "Asia/Samarkand"),
|
||||
("Asia/Seoul", "Asia/Seoul"),
|
||||
("Asia/Shanghai", "Asia/Shanghai"),
|
||||
("Asia/Singapore", "Asia/Singapore"),
|
||||
("Asia/Srednekolymsk", "Asia/Srednekolymsk"),
|
||||
("Asia/Taipei", "Asia/Taipei"),
|
||||
("Asia/Tashkent", "Asia/Tashkent"),
|
||||
("Asia/Tbilisi", "Asia/Tbilisi"),
|
||||
("Asia/Tehran", "Asia/Tehran"),
|
||||
("Asia/Tel_Aviv", "Asia/Tel_Aviv"),
|
||||
("Asia/Thimbu", "Asia/Thimbu"),
|
||||
("Asia/Thimphu", "Asia/Thimphu"),
|
||||
("Asia/Tokyo", "Asia/Tokyo"),
|
||||
("Asia/Tomsk", "Asia/Tomsk"),
|
||||
("Asia/Ujung_Pandang", "Asia/Ujung_Pandang"),
|
||||
("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"),
|
||||
("Asia/Ulan_Bator", "Asia/Ulan_Bator"),
|
||||
("Asia/Urumqi", "Asia/Urumqi"),
|
||||
("Asia/Ust-Nera", "Asia/Ust-Nera"),
|
||||
("Asia/Vientiane", "Asia/Vientiane"),
|
||||
("Asia/Vladivostok", "Asia/Vladivostok"),
|
||||
("Asia/Yakutsk", "Asia/Yakutsk"),
|
||||
("Asia/Yangon", "Asia/Yangon"),
|
||||
("Asia/Yekaterinburg", "Asia/Yekaterinburg"),
|
||||
("Asia/Yerevan", "Asia/Yerevan"),
|
||||
("Atlantic/Azores", "Atlantic/Azores"),
|
||||
("Atlantic/Bermuda", "Atlantic/Bermuda"),
|
||||
("Atlantic/Canary", "Atlantic/Canary"),
|
||||
("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"),
|
||||
("Atlantic/Faeroe", "Atlantic/Faeroe"),
|
||||
("Atlantic/Faroe", "Atlantic/Faroe"),
|
||||
("Atlantic/Jan_Mayen", "Atlantic/Jan_Mayen"),
|
||||
("Atlantic/Madeira", "Atlantic/Madeira"),
|
||||
("Atlantic/Reykjavik", "Atlantic/Reykjavik"),
|
||||
("Atlantic/South_Georgia", "Atlantic/South_Georgia"),
|
||||
("Atlantic/St_Helena", "Atlantic/St_Helena"),
|
||||
("Atlantic/Stanley", "Atlantic/Stanley"),
|
||||
("Australia/ACT", "Australia/ACT"),
|
||||
("Australia/Adelaide", "Australia/Adelaide"),
|
||||
("Australia/Brisbane", "Australia/Brisbane"),
|
||||
("Australia/Broken_Hill", "Australia/Broken_Hill"),
|
||||
("Australia/Canberra", "Australia/Canberra"),
|
||||
("Australia/Currie", "Australia/Currie"),
|
||||
("Australia/Darwin", "Australia/Darwin"),
|
||||
("Australia/Eucla", "Australia/Eucla"),
|
||||
("Australia/Hobart", "Australia/Hobart"),
|
||||
("Australia/LHI", "Australia/LHI"),
|
||||
("Australia/Lindeman", "Australia/Lindeman"),
|
||||
("Australia/Lord_Howe", "Australia/Lord_Howe"),
|
||||
("Australia/Melbourne", "Australia/Melbourne"),
|
||||
("Australia/NSW", "Australia/NSW"),
|
||||
("Australia/North", "Australia/North"),
|
||||
("Australia/Perth", "Australia/Perth"),
|
||||
("Australia/Queensland", "Australia/Queensland"),
|
||||
("Australia/South", "Australia/South"),
|
||||
("Australia/Sydney", "Australia/Sydney"),
|
||||
("Australia/Tasmania", "Australia/Tasmania"),
|
||||
("Australia/Victoria", "Australia/Victoria"),
|
||||
("Australia/West", "Australia/West"),
|
||||
("Australia/Yancowinna", "Australia/Yancowinna"),
|
||||
("Brazil/Acre", "Brazil/Acre"),
|
||||
("Brazil/DeNoronha", "Brazil/DeNoronha"),
|
||||
("Brazil/East", "Brazil/East"),
|
||||
("Brazil/West", "Brazil/West"),
|
||||
("CET", "CET"),
|
||||
("CST6CDT", "CST6CDT"),
|
||||
("Canada/Atlantic", "Canada/Atlantic"),
|
||||
("Canada/Central", "Canada/Central"),
|
||||
("Canada/Eastern", "Canada/Eastern"),
|
||||
("Canada/Mountain", "Canada/Mountain"),
|
||||
("Canada/Newfoundland", "Canada/Newfoundland"),
|
||||
("Canada/Pacific", "Canada/Pacific"),
|
||||
("Canada/Saskatchewan", "Canada/Saskatchewan"),
|
||||
("Canada/Yukon", "Canada/Yukon"),
|
||||
("Chile/Continental", "Chile/Continental"),
|
||||
("Chile/EasterIsland", "Chile/EasterIsland"),
|
||||
("Cuba", "Cuba"),
|
||||
("EET", "EET"),
|
||||
("EST", "EST"),
|
||||
("EST5EDT", "EST5EDT"),
|
||||
("Egypt", "Egypt"),
|
||||
("Eire", "Eire"),
|
||||
("Etc/GMT", "Etc/GMT"),
|
||||
("Etc/GMT+0", "Etc/GMT+0"),
|
||||
("Etc/GMT+1", "Etc/GMT+1"),
|
||||
("Etc/GMT+10", "Etc/GMT+10"),
|
||||
("Etc/GMT+11", "Etc/GMT+11"),
|
||||
("Etc/GMT+12", "Etc/GMT+12"),
|
||||
("Etc/GMT+2", "Etc/GMT+2"),
|
||||
("Etc/GMT+3", "Etc/GMT+3"),
|
||||
("Etc/GMT+4", "Etc/GMT+4"),
|
||||
("Etc/GMT+5", "Etc/GMT+5"),
|
||||
("Etc/GMT+6", "Etc/GMT+6"),
|
||||
("Etc/GMT+7", "Etc/GMT+7"),
|
||||
("Etc/GMT+8", "Etc/GMT+8"),
|
||||
("Etc/GMT+9", "Etc/GMT+9"),
|
||||
("Etc/GMT-0", "Etc/GMT-0"),
|
||||
("Etc/GMT-1", "Etc/GMT-1"),
|
||||
("Etc/GMT-10", "Etc/GMT-10"),
|
||||
("Etc/GMT-11", "Etc/GMT-11"),
|
||||
("Etc/GMT-12", "Etc/GMT-12"),
|
||||
("Etc/GMT-13", "Etc/GMT-13"),
|
||||
("Etc/GMT-14", "Etc/GMT-14"),
|
||||
("Etc/GMT-2", "Etc/GMT-2"),
|
||||
("Etc/GMT-3", "Etc/GMT-3"),
|
||||
("Etc/GMT-4", "Etc/GMT-4"),
|
||||
("Etc/GMT-5", "Etc/GMT-5"),
|
||||
("Etc/GMT-6", "Etc/GMT-6"),
|
||||
("Etc/GMT-7", "Etc/GMT-7"),
|
||||
("Etc/GMT-8", "Etc/GMT-8"),
|
||||
("Etc/GMT-9", "Etc/GMT-9"),
|
||||
("Etc/GMT0", "Etc/GMT0"),
|
||||
("Etc/Greenwich", "Etc/Greenwich"),
|
||||
("Etc/UCT", "Etc/UCT"),
|
||||
("Etc/UTC", "Etc/UTC"),
|
||||
("Etc/Universal", "Etc/Universal"),
|
||||
("Etc/Zulu", "Etc/Zulu"),
|
||||
("Europe/Amsterdam", "Europe/Amsterdam"),
|
||||
("Europe/Andorra", "Europe/Andorra"),
|
||||
("Europe/Astrakhan", "Europe/Astrakhan"),
|
||||
("Europe/Athens", "Europe/Athens"),
|
||||
("Europe/Belfast", "Europe/Belfast"),
|
||||
("Europe/Belgrade", "Europe/Belgrade"),
|
||||
("Europe/Berlin", "Europe/Berlin"),
|
||||
("Europe/Bratislava", "Europe/Bratislava"),
|
||||
("Europe/Brussels", "Europe/Brussels"),
|
||||
("Europe/Bucharest", "Europe/Bucharest"),
|
||||
("Europe/Budapest", "Europe/Budapest"),
|
||||
("Europe/Busingen", "Europe/Busingen"),
|
||||
("Europe/Chisinau", "Europe/Chisinau"),
|
||||
("Europe/Copenhagen", "Europe/Copenhagen"),
|
||||
("Europe/Dublin", "Europe/Dublin"),
|
||||
("Europe/Gibraltar", "Europe/Gibraltar"),
|
||||
("Europe/Guernsey", "Europe/Guernsey"),
|
||||
("Europe/Helsinki", "Europe/Helsinki"),
|
||||
("Europe/Isle_of_Man", "Europe/Isle_of_Man"),
|
||||
("Europe/Istanbul", "Europe/Istanbul"),
|
||||
("Europe/Jersey", "Europe/Jersey"),
|
||||
("Europe/Kaliningrad", "Europe/Kaliningrad"),
|
||||
("Europe/Kiev", "Europe/Kiev"),
|
||||
("Europe/Kirov", "Europe/Kirov"),
|
||||
("Europe/Kyiv", "Europe/Kyiv"),
|
||||
("Europe/Lisbon", "Europe/Lisbon"),
|
||||
("Europe/Ljubljana", "Europe/Ljubljana"),
|
||||
("Europe/London", "Europe/London"),
|
||||
("Europe/Luxembourg", "Europe/Luxembourg"),
|
||||
("Europe/Madrid", "Europe/Madrid"),
|
||||
("Europe/Malta", "Europe/Malta"),
|
||||
("Europe/Mariehamn", "Europe/Mariehamn"),
|
||||
("Europe/Minsk", "Europe/Minsk"),
|
||||
("Europe/Monaco", "Europe/Monaco"),
|
||||
("Europe/Moscow", "Europe/Moscow"),
|
||||
("Europe/Nicosia", "Europe/Nicosia"),
|
||||
("Europe/Oslo", "Europe/Oslo"),
|
||||
("Europe/Paris", "Europe/Paris"),
|
||||
("Europe/Podgorica", "Europe/Podgorica"),
|
||||
("Europe/Prague", "Europe/Prague"),
|
||||
("Europe/Riga", "Europe/Riga"),
|
||||
("Europe/Rome", "Europe/Rome"),
|
||||
("Europe/Samara", "Europe/Samara"),
|
||||
("Europe/San_Marino", "Europe/San_Marino"),
|
||||
("Europe/Sarajevo", "Europe/Sarajevo"),
|
||||
("Europe/Saratov", "Europe/Saratov"),
|
||||
("Europe/Simferopol", "Europe/Simferopol"),
|
||||
("Europe/Skopje", "Europe/Skopje"),
|
||||
("Europe/Sofia", "Europe/Sofia"),
|
||||
("Europe/Stockholm", "Europe/Stockholm"),
|
||||
("Europe/Tallinn", "Europe/Tallinn"),
|
||||
("Europe/Tirane", "Europe/Tirane"),
|
||||
("Europe/Tiraspol", "Europe/Tiraspol"),
|
||||
("Europe/Ulyanovsk", "Europe/Ulyanovsk"),
|
||||
("Europe/Uzhgorod", "Europe/Uzhgorod"),
|
||||
("Europe/Vaduz", "Europe/Vaduz"),
|
||||
("Europe/Vatican", "Europe/Vatican"),
|
||||
("Europe/Vienna", "Europe/Vienna"),
|
||||
("Europe/Vilnius", "Europe/Vilnius"),
|
||||
("Europe/Volgograd", "Europe/Volgograd"),
|
||||
("Europe/Warsaw", "Europe/Warsaw"),
|
||||
("Europe/Zagreb", "Europe/Zagreb"),
|
||||
("Europe/Zaporozhye", "Europe/Zaporozhye"),
|
||||
("Europe/Zurich", "Europe/Zurich"),
|
||||
("Factory", "Factory"),
|
||||
("GB", "GB"),
|
||||
("GB-Eire", "GB-Eire"),
|
||||
("GMT", "GMT"),
|
||||
("GMT+0", "GMT+0"),
|
||||
("GMT-0", "GMT-0"),
|
||||
("GMT0", "GMT0"),
|
||||
("Greenwich", "Greenwich"),
|
||||
("HST", "HST"),
|
||||
("Hongkong", "Hongkong"),
|
||||
("Iceland", "Iceland"),
|
||||
("Indian/Antananarivo", "Indian/Antananarivo"),
|
||||
("Indian/Chagos", "Indian/Chagos"),
|
||||
("Indian/Christmas", "Indian/Christmas"),
|
||||
("Indian/Cocos", "Indian/Cocos"),
|
||||
("Indian/Comoro", "Indian/Comoro"),
|
||||
("Indian/Kerguelen", "Indian/Kerguelen"),
|
||||
("Indian/Mahe", "Indian/Mahe"),
|
||||
("Indian/Maldives", "Indian/Maldives"),
|
||||
("Indian/Mauritius", "Indian/Mauritius"),
|
||||
("Indian/Mayotte", "Indian/Mayotte"),
|
||||
("Indian/Reunion", "Indian/Reunion"),
|
||||
("Iran", "Iran"),
|
||||
("Israel", "Israel"),
|
||||
("Jamaica", "Jamaica"),
|
||||
("Japan", "Japan"),
|
||||
("Kwajalein", "Kwajalein"),
|
||||
("Libya", "Libya"),
|
||||
("MET", "MET"),
|
||||
("MST", "MST"),
|
||||
("MST7MDT", "MST7MDT"),
|
||||
("Mexico/BajaNorte", "Mexico/BajaNorte"),
|
||||
("Mexico/BajaSur", "Mexico/BajaSur"),
|
||||
("Mexico/General", "Mexico/General"),
|
||||
("NZ", "NZ"),
|
||||
("NZ-CHAT", "NZ-CHAT"),
|
||||
("Navajo", "Navajo"),
|
||||
("PRC", "PRC"),
|
||||
("PST8PDT", "PST8PDT"),
|
||||
("Pacific/Apia", "Pacific/Apia"),
|
||||
("Pacific/Auckland", "Pacific/Auckland"),
|
||||
("Pacific/Bougainville", "Pacific/Bougainville"),
|
||||
("Pacific/Chatham", "Pacific/Chatham"),
|
||||
("Pacific/Chuuk", "Pacific/Chuuk"),
|
||||
("Pacific/Easter", "Pacific/Easter"),
|
||||
("Pacific/Efate", "Pacific/Efate"),
|
||||
("Pacific/Enderbury", "Pacific/Enderbury"),
|
||||
("Pacific/Fakaofo", "Pacific/Fakaofo"),
|
||||
("Pacific/Fiji", "Pacific/Fiji"),
|
||||
("Pacific/Funafuti", "Pacific/Funafuti"),
|
||||
("Pacific/Galapagos", "Pacific/Galapagos"),
|
||||
("Pacific/Gambier", "Pacific/Gambier"),
|
||||
("Pacific/Guadalcanal", "Pacific/Guadalcanal"),
|
||||
("Pacific/Guam", "Pacific/Guam"),
|
||||
("Pacific/Honolulu", "Pacific/Honolulu"),
|
||||
("Pacific/Johnston", "Pacific/Johnston"),
|
||||
("Pacific/Kanton", "Pacific/Kanton"),
|
||||
("Pacific/Kiritimati", "Pacific/Kiritimati"),
|
||||
("Pacific/Kosrae", "Pacific/Kosrae"),
|
||||
("Pacific/Kwajalein", "Pacific/Kwajalein"),
|
||||
("Pacific/Majuro", "Pacific/Majuro"),
|
||||
("Pacific/Marquesas", "Pacific/Marquesas"),
|
||||
("Pacific/Midway", "Pacific/Midway"),
|
||||
("Pacific/Nauru", "Pacific/Nauru"),
|
||||
("Pacific/Niue", "Pacific/Niue"),
|
||||
("Pacific/Norfolk", "Pacific/Norfolk"),
|
||||
("Pacific/Noumea", "Pacific/Noumea"),
|
||||
("Pacific/Pago_Pago", "Pacific/Pago_Pago"),
|
||||
("Pacific/Palau", "Pacific/Palau"),
|
||||
("Pacific/Pitcairn", "Pacific/Pitcairn"),
|
||||
("Pacific/Pohnpei", "Pacific/Pohnpei"),
|
||||
("Pacific/Ponape", "Pacific/Ponape"),
|
||||
("Pacific/Port_Moresby", "Pacific/Port_Moresby"),
|
||||
("Pacific/Rarotonga", "Pacific/Rarotonga"),
|
||||
("Pacific/Saipan", "Pacific/Saipan"),
|
||||
("Pacific/Samoa", "Pacific/Samoa"),
|
||||
("Pacific/Tahiti", "Pacific/Tahiti"),
|
||||
("Pacific/Tarawa", "Pacific/Tarawa"),
|
||||
("Pacific/Tongatapu", "Pacific/Tongatapu"),
|
||||
("Pacific/Truk", "Pacific/Truk"),
|
||||
("Pacific/Wake", "Pacific/Wake"),
|
||||
("Pacific/Wallis", "Pacific/Wallis"),
|
||||
("Pacific/Yap", "Pacific/Yap"),
|
||||
("Poland", "Poland"),
|
||||
("Portugal", "Portugal"),
|
||||
("ROC", "ROC"),
|
||||
("ROK", "ROK"),
|
||||
("Singapore", "Singapore"),
|
||||
("Turkey", "Turkey"),
|
||||
("UCT", "UCT"),
|
||||
("US/Alaska", "US/Alaska"),
|
||||
("US/Aleutian", "US/Aleutian"),
|
||||
("US/Arizona", "US/Arizona"),
|
||||
("US/Central", "US/Central"),
|
||||
("US/East-Indiana", "US/East-Indiana"),
|
||||
("US/Eastern", "US/Eastern"),
|
||||
("US/Hawaii", "US/Hawaii"),
|
||||
("US/Indiana-Starke", "US/Indiana-Starke"),
|
||||
("US/Michigan", "US/Michigan"),
|
||||
("US/Mountain", "US/Mountain"),
|
||||
("US/Pacific", "US/Pacific"),
|
||||
("US/Samoa", "US/Samoa"),
|
||||
("UTC", "UTC"),
|
||||
("Universal", "Universal"),
|
||||
("W-SU", "W-SU"),
|
||||
("WET", "WET"),
|
||||
("Zulu", "Zulu"),
|
||||
("localtime", "localtime"),
|
||||
],
|
||||
default=aircox.models.schedule.current_timezone_key,
|
||||
help_text="timezone used for the date",
|
||||
max_length=100,
|
||||
verbose_name="timezone",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -19,11 +19,7 @@ __all__ = ("Diffusion", "DiffusionQuerySet")
|
|||
class DiffusionQuerySet(RerunQuerySet):
|
||||
def episode(self, episode=None, id=None):
|
||||
"""Diffusions for this episode."""
|
||||
return (
|
||||
self.filter(episode=episode)
|
||||
if id is None
|
||||
else self.filter(episode__id=id)
|
||||
)
|
||||
return self.filter(episode=episode) if id is None else self.filter(episode__id=id)
|
||||
|
||||
def on_air(self):
|
||||
"""On air diffusions."""
|
||||
|
@ -40,9 +36,7 @@ class DiffusionQuerySet(RerunQuerySet):
|
|||
"""Diffusions occuring date."""
|
||||
date = date or datetime.date.today()
|
||||
start = tz.make_aware(tz.datetime.combine(date, datetime.time()))
|
||||
end = tz.make_aware(
|
||||
tz.datetime.combine(date, datetime.time(23, 59, 59, 999))
|
||||
)
|
||||
end = tz.make_aware(tz.datetime.combine(date, datetime.time(23, 59, 59, 999)))
|
||||
# start = tz.get_current_timezone().localize(start)
|
||||
# end = tz.get_current_timezone().localize(end)
|
||||
qs = self.filter(start__range=(start, end))
|
||||
|
@ -50,11 +44,7 @@ class DiffusionQuerySet(RerunQuerySet):
|
|||
|
||||
def at(self, date, order=True):
|
||||
"""Return diffusions at specified date or datetime."""
|
||||
return (
|
||||
self.now(date, order)
|
||||
if isinstance(date, tz.datetime)
|
||||
else self.date(date, order)
|
||||
)
|
||||
return self.now(date, order) if isinstance(date, tz.datetime) else self.date(date, order)
|
||||
|
||||
def after(self, date=None):
|
||||
"""Return a queryset of diffusions that happen after the given date
|
||||
|
@ -140,9 +130,7 @@ class Diffusion(Rerun):
|
|||
class Meta:
|
||||
verbose_name = _("Diffusion")
|
||||
verbose_name_plural = _("Diffusions")
|
||||
permissions = (
|
||||
("programming", _("edit the diffusions' planification")),
|
||||
)
|
||||
permissions = (("programming", _("edit the diffusions' planification")),)
|
||||
|
||||
def __str__(self):
|
||||
str_ = "{episode} - {date}".format(
|
||||
|
@ -200,11 +188,7 @@ class Diffusion(Rerun):
|
|||
def is_now(self):
|
||||
"""True if diffusion is currently running."""
|
||||
now = tz.now()
|
||||
return (
|
||||
self.type == self.TYPE_ON_AIR
|
||||
and self.start <= now
|
||||
and self.end >= now
|
||||
)
|
||||
return self.type == self.TYPE_ON_AIR and self.start <= now and self.end >= now
|
||||
|
||||
@property
|
||||
def is_today(self):
|
||||
|
@ -214,10 +198,7 @@ class Diffusion(Rerun):
|
|||
@property
|
||||
def is_live(self):
|
||||
"""True if Diffusion is live (False if there are sounds files)."""
|
||||
return (
|
||||
self.type == self.TYPE_ON_AIR
|
||||
and not self.episode.sound_set.archive().count()
|
||||
)
|
||||
return self.type == self.TYPE_ON_AIR and not self.episode.sound_set.archive().count()
|
||||
|
||||
def get_playlist(self, **types):
|
||||
"""Returns sounds as a playlist (list of *local* archive file path).
|
||||
|
@ -227,9 +208,7 @@ class Diffusion(Rerun):
|
|||
from .sound import Sound
|
||||
|
||||
return list(
|
||||
self.get_sounds(**types)
|
||||
.filter(path__isnull=False, type=Sound.TYPE_ARCHIVE)
|
||||
.values_list("path", flat=True)
|
||||
self.get_sounds(**types).filter(path__isnull=False, type=Sound.TYPE_ARCHIVE).values_list("path", flat=True)
|
||||
)
|
||||
|
||||
def get_sounds(self, **types):
|
||||
|
@ -241,9 +220,7 @@ class Diffusion(Rerun):
|
|||
from .sound import Sound
|
||||
|
||||
sounds = (self.initial or self).sound_set.order_by("type", "path")
|
||||
_in = [
|
||||
getattr(Sound.Type, name) for name, value in types.items() if value
|
||||
]
|
||||
_in = [getattr(Sound.Type, name) for name, value in types.items() if value]
|
||||
|
||||
return sounds.filter(type__in=_in)
|
||||
|
||||
|
@ -265,8 +242,7 @@ class Diffusion(Rerun):
|
|||
# .filter(conflict_with=True)
|
||||
return (
|
||||
Diffusion.objects.filter(
|
||||
Q(start__lt=self.start, end__gt=self.start)
|
||||
| Q(start__gt=self.start, start__lt=self.end)
|
||||
Q(start__lt=self.start, end__gt=self.start) | Q(start__gt=self.start, start__lt=self.end)
|
||||
)
|
||||
.exclude(pk=self.pk)
|
||||
.distinct()
|
||||
|
|
|
@ -28,10 +28,7 @@ class Episode(Page):
|
|||
"""Return serialized data about podcasts."""
|
||||
from ..serializers import PodcastSerializer
|
||||
|
||||
podcasts = [
|
||||
PodcastSerializer(s).data
|
||||
for s in self.sound_set.public().order_by("type")
|
||||
]
|
||||
podcasts = [PodcastSerializer(s).data for s in self.sound_set.public().order_by("type")]
|
||||
if self.cover:
|
||||
options = {"size": (128, 128), "crop": "scale"}
|
||||
cover = get_thumbnailer(self.cover).get_thumbnail(options).url
|
||||
|
@ -76,6 +73,4 @@ class Episode(Page):
|
|||
if title is None
|
||||
else title
|
||||
)
|
||||
return super().get_init_kwargs_from(
|
||||
page, title=title, program=page, **kwargs
|
||||
)
|
||||
return super().get_init_kwargs_from(page, title=title, program=page, **kwargs)
|
||||
|
|
|
@ -19,11 +19,7 @@ __all__ = ("Log", "LogQuerySet")
|
|||
|
||||
class LogQuerySet(models.QuerySet):
|
||||
def station(self, station=None, id=None):
|
||||
return (
|
||||
self.filter(station=station)
|
||||
if id is None
|
||||
else self.filter(station_id=id)
|
||||
)
|
||||
return self.filter(station=station) if id is None else self.filter(station_id=id)
|
||||
|
||||
def date(self, date):
|
||||
start = tz.datetime.combine(date, datetime.time())
|
||||
|
@ -33,11 +29,7 @@ class LogQuerySet(models.QuerySet):
|
|||
# return self.filter(date__date=date)
|
||||
|
||||
def after(self, date):
|
||||
return (
|
||||
self.filter(date__gte=date)
|
||||
if isinstance(date, tz.datetime)
|
||||
else self.filter(date__date__gte=date)
|
||||
)
|
||||
return self.filter(date__gte=date) if isinstance(date, tz.datetime) else self.filter(date__date__gte=date)
|
||||
|
||||
def on_air(self):
|
||||
return self.filter(type=Log.TYPE_ON_AIR)
|
||||
|
|
|
@ -26,9 +26,7 @@ __all__ = (
|
|||
)
|
||||
|
||||
|
||||
headline_re = re.compile(
|
||||
r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?"
|
||||
)
|
||||
headline_re = re.compile(r"(<p>)?" r"(?P<headline>[^\n]{1,140}(\n|[^\.]*?\.))" r"(</p>)?")
|
||||
|
||||
|
||||
class Renderable:
|
||||
|
@ -37,9 +35,7 @@ class Renderable:
|
|||
|
||||
def get_template_name(self, widget):
|
||||
"""Return template name for the provided widget."""
|
||||
return self.template_name.format(
|
||||
prefix=self.template_prefix, widget=widget
|
||||
)
|
||||
return self.template_name.format(prefix=self.template_prefix, widget=widget)
|
||||
|
||||
|
||||
class Category(models.Model):
|
||||
|
@ -69,17 +65,11 @@ class BasePageQuerySet(InheritanceQuerySet):
|
|||
|
||||
def parent(self, parent=None, id=None):
|
||||
"""Return pages having this parent."""
|
||||
return (
|
||||
self.filter(parent=parent)
|
||||
if id is None
|
||||
else self.filter(parent__id=id)
|
||||
)
|
||||
return self.filter(parent=parent) if id is None else self.filter(parent__id=id)
|
||||
|
||||
def search(self, q, search_content=True):
|
||||
if search_content:
|
||||
return self.filter(
|
||||
models.Q(title__icontains=q) | models.Q(content__icontains=q)
|
||||
)
|
||||
return self.filter(models.Q(title__icontains=q) | models.Q(content__icontains=q))
|
||||
return self.filter(title__icontains=q)
|
||||
|
||||
|
||||
|
@ -104,9 +94,7 @@ class BasePage(Renderable, models.Model):
|
|||
related_name="child_set",
|
||||
)
|
||||
title = models.CharField(max_length=100)
|
||||
slug = models.SlugField(
|
||||
_("slug"), max_length=120, blank=True, unique=True, db_index=True
|
||||
)
|
||||
slug = models.SlugField(_("slug"), max_length=120, blank=True, unique=True, db_index=True)
|
||||
status = models.PositiveSmallIntegerField(
|
||||
_("status"),
|
||||
default=STATUS_DRAFT,
|
||||
|
@ -146,11 +134,7 @@ class BasePage(Renderable, models.Model):
|
|||
super().save(*args, **kwargs)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return (
|
||||
reverse(self.detail_url_name, kwargs={"slug": self.slug})
|
||||
if self.is_published
|
||||
else "#"
|
||||
)
|
||||
return reverse(self.detail_url_name, kwargs={"slug": self.slug}) if self.is_published else "#"
|
||||
|
||||
@property
|
||||
def is_draft(self):
|
||||
|
@ -191,9 +175,7 @@ class BasePage(Renderable, models.Model):
|
|||
|
||||
class PageQuerySet(BasePageQuerySet):
|
||||
def published(self):
|
||||
return self.filter(
|
||||
status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now()
|
||||
)
|
||||
return self.filter(status=Page.STATUS_PUBLISHED, pub_date__lte=tz.now())
|
||||
|
||||
|
||||
class Page(BasePage):
|
||||
|
@ -207,9 +189,7 @@ class Page(BasePage):
|
|||
null=True,
|
||||
db_index=True,
|
||||
)
|
||||
pub_date = models.DateTimeField(
|
||||
_("publication date"), blank=True, null=True, db_index=True
|
||||
)
|
||||
pub_date = models.DateTimeField(_("publication date"), blank=True, null=True, db_index=True)
|
||||
featured = models.BooleanField(
|
||||
_("featured"),
|
||||
default=False,
|
||||
|
@ -316,9 +296,7 @@ class Comment(Renderable, models.Model):
|
|||
class NavItem(models.Model):
|
||||
"""Navigation menu items."""
|
||||
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
||||
menu = models.SlugField(_("menu"), max_length=24)
|
||||
order = models.PositiveSmallIntegerField(_("order"))
|
||||
text = models.CharField(_("title"), max_length=64)
|
||||
|
@ -338,13 +316,7 @@ class NavItem(models.Model):
|
|||
ordering = ("order", "pk")
|
||||
|
||||
def get_url(self):
|
||||
return (
|
||||
self.url
|
||||
if self.url
|
||||
else self.page.get_absolute_url()
|
||||
if self.page
|
||||
else None
|
||||
)
|
||||
return self.url if self.url else self.page.get_absolute_url() if self.page else None
|
||||
|
||||
def render(self, request, css_class="", active_class=""):
|
||||
url = self.get_url()
|
||||
|
@ -356,6 +328,4 @@ class NavItem(models.Model):
|
|||
elif not css_class:
|
||||
return format_html('<a href="{}">{}</a>', url, self.text)
|
||||
else:
|
||||
return format_html(
|
||||
'<a href="{}" class="{}">{}</a>', url, css_class, self.text
|
||||
)
|
||||
return format_html('<a href="{}" class="{}">{}</a>', url, css_class, self.text)
|
||||
|
|
|
@ -47,9 +47,7 @@ class Program(Page):
|
|||
"""
|
||||
|
||||
# explicit foreign key in order to avoid related name clashes
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
||||
active = models.BooleanField(
|
||||
_("active"),
|
||||
default=True,
|
||||
|
@ -126,12 +124,7 @@ class Program(Page):
|
|||
# TODO: move in signals
|
||||
path_ = getattr(self, "__initial_path", None)
|
||||
abspath = path_ and os.path.join(conf.MEDIA_ROOT, path_)
|
||||
if (
|
||||
path_ is not None
|
||||
and path_ != self.path
|
||||
and os.path.exists(abspath)
|
||||
and not os.path.exists(self.abspath)
|
||||
):
|
||||
if path_ is not None and path_ != self.path and os.path.exists(abspath) and not os.path.exists(self.abspath):
|
||||
logger.info(
|
||||
"program #%s's dir changed to %s - update it.",
|
||||
self.id,
|
||||
|
@ -139,9 +132,7 @@ class Program(Page):
|
|||
)
|
||||
|
||||
shutil.move(abspath, self.abspath)
|
||||
Sound.objects.filter(path__startswith=path_).update(
|
||||
file=Concat("file", Substr(F("file"), len(path_)))
|
||||
)
|
||||
Sound.objects.filter(path__startswith=path_).update(file=Concat("file", Substr(F("file"), len(path_))))
|
||||
|
||||
|
||||
class ProgramChildQuerySet(PageQuerySet):
|
||||
|
|
|
@ -15,18 +15,10 @@ class RerunQuerySet(models.QuerySet):
|
|||
"""Queryset for Rerun (sub)classes."""
|
||||
|
||||
def station(self, station=None, id=None):
|
||||
return (
|
||||
self.filter(program__station=station)
|
||||
if id is None
|
||||
else self.filter(program__station__id=id)
|
||||
)
|
||||
return self.filter(program__station=station) if id is None else self.filter(program__station__id=id)
|
||||
|
||||
def program(self, program=None, id=None):
|
||||
return (
|
||||
self.filter(program=program)
|
||||
if id is None
|
||||
else self.filter(program__id=id)
|
||||
)
|
||||
return self.filter(program=program) if id is None else self.filter(program__id=id)
|
||||
|
||||
def rerun(self):
|
||||
return self.filter(initial__isnull=False)
|
||||
|
@ -78,14 +70,8 @@ class Rerun(models.Model):
|
|||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
if (
|
||||
hasattr(self, "start")
|
||||
and self.initial is not None
|
||||
and self.initial.start >= self.start
|
||||
):
|
||||
raise ValidationError(
|
||||
{"initial": _("rerun must happen after original")}
|
||||
)
|
||||
if hasattr(self, "start") and self.initial is not None and self.initial.start >= self.start:
|
||||
raise ValidationError({"initial": _("rerun must happen after original")})
|
||||
|
||||
def save_rerun(self):
|
||||
self.program = self.initial.program
|
||||
|
|
|
@ -55,7 +55,7 @@ class Schedule(Rerun):
|
|||
_("timezone"),
|
||||
default=current_timezone_key,
|
||||
max_length=100,
|
||||
choices=[(x, x) for x in zoneinfo.available_timezones()],
|
||||
choices=sorted([(x, x) for x in zoneinfo.available_timezones()]),
|
||||
help_text=_("timezone used for the date"),
|
||||
)
|
||||
duration = models.TimeField(
|
||||
|
@ -102,11 +102,7 @@ class Schedule(Rerun):
|
|||
"""Return frequency formated for display."""
|
||||
from django.template.defaultfilters import date
|
||||
|
||||
return (
|
||||
self._get_FIELD_display(self._meta.get_field("frequency"))
|
||||
.format(day=date(self.date, "l"))
|
||||
.capitalize()
|
||||
)
|
||||
return self._get_FIELD_display(self._meta.get_field("frequency")).format(day=date(self.date, "l")).capitalize()
|
||||
|
||||
def normalize(self, date):
|
||||
"""Return a datetime set to schedule's time for the provided date,
|
||||
|
@ -124,9 +120,7 @@ class Schedule(Rerun):
|
|||
|
||||
# last of the month
|
||||
if freq == Schedule.Frequency.last:
|
||||
date = date.replace(
|
||||
day=calendar.monthrange(date.year, date.month)[1]
|
||||
)
|
||||
date = date.replace(day=calendar.monthrange(date.year, date.month)[1])
|
||||
date_wday = date.weekday()
|
||||
|
||||
# end of month before the wanted weekday: move one week back
|
||||
|
@ -138,9 +132,7 @@ class Schedule(Rerun):
|
|||
# move to the first day of the month that matches the schedule's
|
||||
# weekday. Check on SO#3284452 for the formula
|
||||
date_wday, month = date.weekday(), date.month
|
||||
date += tz.timedelta(
|
||||
days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday
|
||||
)
|
||||
date += tz.timedelta(days=(7 if date_wday > sched_wday else 0) - date_wday + sched_wday)
|
||||
|
||||
if freq == Schedule.Frequency.one_on_two:
|
||||
# - adjust date with modulo 14 (= 2 weeks in days)
|
||||
|
@ -149,11 +141,7 @@ class Schedule(Rerun):
|
|||
date += tz.timedelta(days=7)
|
||||
dates = (date + tz.timedelta(days=14 * i) for i in range(0, 3))
|
||||
else:
|
||||
dates = (
|
||||
date + tz.timedelta(days=7 * week)
|
||||
for week in range(0, 5)
|
||||
if freq & (0b1 << week)
|
||||
)
|
||||
dates = (date + tz.timedelta(days=7 * week) for week in range(0, 5) if freq & (0b1 << week))
|
||||
|
||||
return [self.normalize(date) for date in dates if date.month == month]
|
||||
|
||||
|
@ -166,29 +154,22 @@ class Schedule(Rerun):
|
|||
from .diffusion import Diffusion
|
||||
from .episode import Episode
|
||||
|
||||
if (
|
||||
self.initial is not None
|
||||
or self.frequency == Schedule.Frequency.ponctual
|
||||
):
|
||||
if self.initial is not None or self.frequency == Schedule.Frequency.ponctual:
|
||||
return [], []
|
||||
|
||||
# dates for self and reruns as (date, initial)
|
||||
reruns = [
|
||||
(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()
|
||||
]
|
||||
reruns = [(rerun, rerun.date - self.date) for rerun in self.rerun_set.all()]
|
||||
|
||||
dates = {date: None for date in self.dates_of_month(date)}
|
||||
dates.update(
|
||||
(rerun.normalize(date.date() + delta), date)
|
||||
for date in list(dates.keys())
|
||||
for rerun, delta in reruns
|
||||
(rerun.normalize(date.date() + delta), date) for date in list(dates.keys()) for rerun, delta in reruns
|
||||
)
|
||||
|
||||
# remove dates corresponding to existing diffusions
|
||||
saved = set(
|
||||
Diffusion.objects.filter(
|
||||
start__in=dates.keys(), program=self.program, schedule=self
|
||||
).values_list("start", flat=True)
|
||||
Diffusion.objects.filter(start__in=dates.keys(), program=self.program, schedule=self).values_list(
|
||||
"start", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
# make diffs
|
||||
|
|
|
@ -32,9 +32,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
|||
group, created = Group.objects.get_or_create(name=group_name)
|
||||
if created and permissions:
|
||||
for codename in permissions:
|
||||
permission = Permission.objects.filter(
|
||||
codename=codename
|
||||
).first()
|
||||
permission = Permission.objects.filter(codename=codename).first()
|
||||
if permission:
|
||||
group.permissions.add(permission)
|
||||
group.save()
|
||||
|
@ -45,9 +43,7 @@ def user_default_groups(sender, instance, created, *args, **kwargs):
|
|||
def page_post_save(sender, instance, created, *args, **kwargs):
|
||||
return
|
||||
if not created and instance.cover:
|
||||
Page.objects.filter(parent=instance, cover__isnull=True).update(
|
||||
cover=instance.cover
|
||||
)
|
||||
Page.objects.filter(parent=instance, cover__isnull=True).update(cover=instance.cover)
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender=Program)
|
||||
|
@ -55,15 +51,11 @@ def program_post_save(sender, instance, created, *args, **kwargs):
|
|||
"""Clean-up later diffusions when a program becomes inactive."""
|
||||
if not instance.active:
|
||||
Diffusion.objects.program(instance).after(tz.now()).delete()
|
||||
Episode.objects.parent(instance).filter(
|
||||
diffusion__isnull=True
|
||||
).delete()
|
||||
Episode.objects.parent(instance).filter(diffusion__isnull=True).delete()
|
||||
|
||||
cover = getattr(instance, "__initial_cover", None)
|
||||
if cover is None and instance.cover is not None:
|
||||
Episode.objects.parent(instance).filter(cover__isnull=True).update(
|
||||
cover=instance.cover
|
||||
)
|
||||
Episode.objects.parent(instance).filter(cover__isnull=True).update(cover=instance.cover)
|
||||
|
||||
|
||||
@receiver(signals.pre_save, sender=Schedule)
|
||||
|
@ -79,8 +71,7 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
|||
corresponding diffusions accordingly."""
|
||||
initial = getattr(instance, "_initial", None)
|
||||
if not initial or (
|
||||
(instance.time, instance.duration, instance.timezone)
|
||||
== (initial.time, initial.duration, initial.timezone)
|
||||
(instance.time, instance.duration, instance.timezone) == (initial.time, initial.duration, initial.timezone)
|
||||
):
|
||||
return
|
||||
|
||||
|
@ -99,13 +90,9 @@ def schedule_post_save(sender, instance, created, *args, **kwargs):
|
|||
def schedule_pre_delete(sender, instance, *args, **kwargs):
|
||||
"""Delete later corresponding diffusion to a changed schedule."""
|
||||
Diffusion.objects.filter(schedule=instance).after(tz.now()).delete()
|
||||
Episode.objects.filter(
|
||||
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||
).delete()
|
||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
|
||||
|
||||
|
||||
@receiver(signals.post_delete, sender=Diffusion)
|
||||
def diffusion_post_delete(sender, instance, *args, **kwargs):
|
||||
Episode.objects.filter(
|
||||
diffusion__isnull=True, content__isnull=True, sound__isnull=True
|
||||
).delete()
|
||||
Episode.objects.filter(diffusion__isnull=True, content__isnull=True, sound__isnull=True).delete()
|
||||
|
|
|
@ -50,9 +50,7 @@ class SoundQuerySet(models.QuerySet):
|
|||
def path(self, paths):
|
||||
if isinstance(paths, str):
|
||||
return self.filter(file=paths.replace(conf.MEDIA_ROOT + "/", ""))
|
||||
return self.filter(
|
||||
file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths)
|
||||
)
|
||||
return self.filter(file__in=(p.replace(conf.MEDIA_ROOT + "/", "") for p in paths))
|
||||
|
||||
def playlist(self, archive=True, order_by=True):
|
||||
"""Return files absolute paths as a flat list (exclude sound without
|
||||
|
@ -66,9 +64,7 @@ class SoundQuerySet(models.QuerySet):
|
|||
self = self.order_by("file")
|
||||
return [
|
||||
os.path.join(conf.MEDIA_ROOT, file)
|
||||
for file in self.filter(file__isnull=False).values_list(
|
||||
"file", flat=True
|
||||
)
|
||||
for file in self.filter(file__isnull=False).values_list("file", flat=True)
|
||||
]
|
||||
|
||||
def search(self, query):
|
||||
|
@ -122,11 +118,7 @@ class Sound(models.Model):
|
|||
)
|
||||
|
||||
def _upload_to(self, filename):
|
||||
subdir = (
|
||||
settings.SOUND_ARCHIVES_SUBDIR
|
||||
if self.type == self.TYPE_ARCHIVE
|
||||
else settings.SOUND_EXCERPTS_SUBDIR
|
||||
)
|
||||
subdir = settings.SOUND_ARCHIVES_SUBDIR if self.type == self.TYPE_ARCHIVE else settings.SOUND_EXCERPTS_SUBDIR
|
||||
return os.path.join(self.program.path, subdir, filename)
|
||||
|
||||
file = models.FileField(
|
||||
|
@ -161,10 +153,7 @@ class Sound(models.Model):
|
|||
)
|
||||
is_downloadable = models.BooleanField(
|
||||
_("downloadable"),
|
||||
help_text=_(
|
||||
"whether it can be publicly downloaded by visitors (sound must be "
|
||||
"public)"
|
||||
),
|
||||
help_text=_("whether it can be publicly downloaded by visitors (sound must be " "public)"),
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
@ -224,9 +213,7 @@ class Sound(models.Model):
|
|||
if self.type == self.TYPE_REMOVED and self.program:
|
||||
changed = True
|
||||
self.type = (
|
||||
self.TYPE_ARCHIVE
|
||||
if self.file.name.startswith(self.program.archives_path)
|
||||
else self.TYPE_EXCERPT
|
||||
self.TYPE_ARCHIVE if self.file.name.startswith(self.program.archives_path) else self.TYPE_EXCERPT
|
||||
)
|
||||
|
||||
# check mtime -> reset quality if changed (assume file changed)
|
||||
|
@ -299,8 +286,7 @@ class Track(models.Model):
|
|||
blank=True,
|
||||
null=True,
|
||||
help_text=_(
|
||||
"additional informations about this track, such as "
|
||||
"the version, if is it a remix, features, etc."
|
||||
"additional informations about this track, such as " "the version, if is it a remix, features, etc."
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -310,13 +296,9 @@ class Track(models.Model):
|
|||
ordering = ("position",)
|
||||
|
||||
def __str__(self):
|
||||
return "{self.artist} -- {self.title} -- {self.position}".format(
|
||||
self=self
|
||||
)
|
||||
return "{self.artist} -- {self.title} -- {self.position}".format(self=self)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if (self.sound is None and self.episode is None) or (
|
||||
self.sound is not None and self.episode is not None
|
||||
):
|
||||
if (self.sound is None and self.episode is None) or (self.sound is not None and self.episode is not None):
|
||||
raise ValueError("sound XOR episode is required")
|
||||
super().save(*args, **kwargs)
|
||||
|
|
|
@ -67,9 +67,7 @@ class Station(models.Model):
|
|||
max_length=2048,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_(
|
||||
"Audio streams urls used by station's player. One url " "a line."
|
||||
),
|
||||
help_text=_("Audio streams urls used by station's player. One url " "a line."),
|
||||
)
|
||||
default_cover = FilerImageField(
|
||||
on_delete=models.SET_NULL,
|
||||
|
@ -153,16 +151,10 @@ class Port(models.Model):
|
|||
(TYPE_FILE, _("file")),
|
||||
)
|
||||
|
||||
station = models.ForeignKey(
|
||||
Station, models.CASCADE, verbose_name=_("station")
|
||||
)
|
||||
direction = models.SmallIntegerField(
|
||||
_("direction"), choices=DIRECTION_CHOICES
|
||||
)
|
||||
station = models.ForeignKey(Station, models.CASCADE, verbose_name=_("station"))
|
||||
direction = models.SmallIntegerField(_("direction"), choices=DIRECTION_CHOICES)
|
||||
type = models.SmallIntegerField(_("type"), choices=TYPE_CHOICES)
|
||||
active = models.BooleanField(
|
||||
_("active"), default=True, help_text=_("this port is active")
|
||||
)
|
||||
active = models.BooleanField(_("active"), default=True, help_text=_("this port is active"))
|
||||
settings = models.TextField(
|
||||
_("port settings"),
|
||||
help_text=_(
|
||||
|
@ -193,8 +185,6 @@ class Port(models.Model):
|
|||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.is_valid_type():
|
||||
raise ValueError(
|
||||
"port type is not allowed with the given port direction"
|
||||
)
|
||||
raise ValueError("port type is not allowed with the given port direction")
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
|
|
@ -15,6 +15,4 @@ class UserSettings(models.Model):
|
|||
related_name="aircox_settings",
|
||||
)
|
||||
playlist_editor_columns = models.JSONField(_("Playlist Editor Columns"))
|
||||
playlist_editor_sep = models.CharField(
|
||||
_("Playlist Editor Separator"), max_length=16
|
||||
)
|
||||
playlist_editor_sep = models.CharField(_("Playlist Editor Separator"), max_length=16)
|
||||
|
|
|
@ -52,9 +52,7 @@ def do_has_perm(context, obj, perm, user=None):
|
|||
"""Return True if ``user.has_perm('[APP].[perm]_[MODEL]')``"""
|
||||
if user is None:
|
||||
user = context["request"].user
|
||||
return user.has_perm(
|
||||
"{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name)
|
||||
)
|
||||
return user.has_perm("{}.{}_{}".format(obj._meta.app_label, perm, obj._meta.model_name))
|
||||
|
||||
|
||||
@register.filter(name="is_diffusion")
|
||||
|
@ -87,10 +85,7 @@ def do_player_live_attr(context):
|
|||
def do_nav_items(context, menu, **kwargs):
|
||||
"""Render navigation items for the provided menu name."""
|
||||
station, request = context["station"], context["request"]
|
||||
return [
|
||||
(item, item.render(request, **kwargs))
|
||||
for item in station.navitem_set.filter(menu=menu)
|
||||
]
|
||||
return [(item, item.render(request, **kwargs)) for item in station.navitem_set.filter(menu=menu)]
|
||||
|
||||
|
||||
@register.simple_tag(name="update_query")
|
||||
|
@ -108,10 +103,4 @@ def do_update_query(obj, **kwargs):
|
|||
def do_verbose_name(obj, plural=False):
|
||||
"""Return model's verbose name (singular or plural) or `obj` if it is a
|
||||
string (can act for default values)."""
|
||||
return (
|
||||
obj
|
||||
if isinstance(obj, str)
|
||||
else obj._meta.verbose_name_plural
|
||||
if plural
|
||||
else obj._meta.verbose_name
|
||||
)
|
||||
return obj if isinstance(obj, str) else obj._meta.verbose_name_plural if plural else obj._meta.verbose_name
|
||||
|
|
|
@ -51,9 +51,7 @@ class WrapperMixin:
|
|||
ns = None
|
||||
ns_attr = None
|
||||
|
||||
def __init__(
|
||||
self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs
|
||||
):
|
||||
def __init__(self, target=None, ns=None, ns_attr=None, type_interface=None, **kwargs):
|
||||
self.target = target
|
||||
if ns:
|
||||
self.inject(ns, ns_attr)
|
||||
|
@ -87,10 +85,7 @@ class WrapperMixin:
|
|||
if self.target is ns_target:
|
||||
return
|
||||
elif self.target is not None and self.ns:
|
||||
raise RuntimeError(
|
||||
"self target already injected. It must be "
|
||||
"`release` before `inject`."
|
||||
)
|
||||
raise RuntimeError("self target already injected. It must be " "`release` before `inject`.")
|
||||
|
||||
self.target = ns_target
|
||||
setattr(ns, ns_attr, self.interface)
|
||||
|
@ -145,9 +140,7 @@ class SpoofMixin:
|
|||
traces = self.traces[name]
|
||||
if not isinstance(traces, list):
|
||||
traces = (traces,)
|
||||
return tuple(
|
||||
self._get_trace(trace, args=args, kw=kw) for trace in traces
|
||||
)
|
||||
return tuple(self._get_trace(trace, args=args, kw=kw) for trace in traces)
|
||||
|
||||
def _get_trace(self, trace, args=False, kw=False):
|
||||
if (args and kw) or (not args and not kw):
|
||||
|
|
|
@ -48,15 +48,11 @@ class TestDateFieldFilter:
|
|||
def test___init__(self, date_filter):
|
||||
assert date_filter.date_params == {"pub_date__lte": tomorrow}
|
||||
|
||||
date_filter.links = [
|
||||
(str(link[0]), *list(link[1:])) for link in date_filter.links
|
||||
]
|
||||
date_filter.links = [(str(link[0]), *list(link[1:])) for link in date_filter.links]
|
||||
assert date_filter.links == [
|
||||
(str(_("None")), "pub_date__isnull", None, "1"),
|
||||
(str(_("Exact")), "pub_date__date", date_filter.input_type),
|
||||
(str(_("Since")), "pub_date__gte", date_filter.input_type),
|
||||
(str(_("Until")), "pub_date__lte", date_filter.input_type),
|
||||
]
|
||||
assert date_filter.query_attrs == {
|
||||
"pub_date__gte": today.strftime("%Y-%m-%d")
|
||||
}
|
||||
assert date_filter.query_attrs == {"pub_date__gte": today.strftime("%Y-%m-%d")}
|
||||
|
|
|
@ -30,9 +30,7 @@ def staff_user():
|
|||
|
||||
@pytest.fixture
|
||||
def logger():
|
||||
logger = Interface(
|
||||
logging, {"info": None, "debug": None, "error": None, "warning": None}
|
||||
)
|
||||
logger = Interface(logging, {"info": None, "debug": None, "error": None, "warning": None})
|
||||
return logger
|
||||
|
||||
|
||||
|
@ -123,10 +121,7 @@ def schedules(sched_initials, sched_reruns):
|
|||
|
||||
@pytest.fixture
|
||||
def episodes(programs):
|
||||
return [
|
||||
baker.make(models.Episode, parent=program, cover=None)
|
||||
for program in programs
|
||||
]
|
||||
return [baker.make(models.Episode, parent=program, cover=None) for program in programs]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -158,15 +153,7 @@ def sound(program):
|
|||
|
||||
@pytest.fixture
|
||||
def tracks(episode, sound):
|
||||
items = [
|
||||
baker.prepare(
|
||||
models.Track, episode=episode, position=i, timestamp=i * 60
|
||||
)
|
||||
for i in range(0, 3)
|
||||
]
|
||||
items += [
|
||||
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||
for i in range(0, 3)
|
||||
]
|
||||
items = [baker.prepare(models.Track, episode=episode, position=i, timestamp=i * 60) for i in range(0, 3)]
|
||||
items += [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 3)]
|
||||
models.Track.objects.bulk_create(items)
|
||||
return items
|
||||
|
|
|
@ -21,30 +21,21 @@ class TestDiffusion:
|
|||
def test_update(self, monitor, schedules, sched_initials, logger):
|
||||
monitor.update()
|
||||
|
||||
diffusions = models.Diffusion.objects.filter(
|
||||
schedule__in=sched_initials
|
||||
)
|
||||
diffusions = models.Diffusion.objects.filter(schedule__in=sched_initials)
|
||||
by_date = {}
|
||||
for diff in diffusions:
|
||||
assert diff.episode_id
|
||||
by_date.setdefault(diff.schedule_id, set()).add(
|
||||
(diff.start, diff.end)
|
||||
)
|
||||
by_date.setdefault(diff.schedule_id, set()).add((diff.start, diff.end))
|
||||
|
||||
for schedule in sched_initials:
|
||||
if schedule.pk not in by_date:
|
||||
continue
|
||||
_, items = schedule.diffusions_of_month(now)
|
||||
assert all(
|
||||
(item.start, item.end) in by_date[schedule.pk]
|
||||
for item in items
|
||||
)
|
||||
assert all((item.start, item.end) in by_date[schedule.pk] for item in items)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_clean(self, monitor, episode):
|
||||
start = tz.make_aware(
|
||||
datetime.combine(monitor.date - timedelta(days=1), time(10, 20))
|
||||
)
|
||||
start = tz.make_aware(datetime.combine(monitor.date - timedelta(days=1), time(10, 20)))
|
||||
diff = models.Diffusion(
|
||||
type=models.Diffusion.TYPE_UNCONFIRMED,
|
||||
episode=episode,
|
||||
|
|
|
@ -79,16 +79,12 @@ class TestLogArchiver:
|
|||
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
|
||||
# before logs are deleted from db, get data
|
||||
sorted = archiver.sort_logs(logs_qs)
|
||||
paths = {
|
||||
archiver.get_path(station, date) for station, date in sorted.keys()
|
||||
}
|
||||
paths = {archiver.get_path(station, date) for station, date in sorted.keys()}
|
||||
|
||||
count = archiver.archive(logs_qs, keep=False)
|
||||
assert count == len(logs)
|
||||
assert not logs_qs.count()
|
||||
assert all(
|
||||
path in paths for path, *_ in gzip._traces("open", args=True)
|
||||
)
|
||||
assert all(path in paths for path, *_ in gzip._traces("open", args=True))
|
||||
|
||||
results = archiver.load_file("dummy path")
|
||||
assert results
|
||||
|
@ -104,7 +100,4 @@ class TestLogArchiver:
|
|||
|
||||
assert sorted
|
||||
for (station, date), logs in sorted.items():
|
||||
assert all(
|
||||
log.station == station and log.date.date() == date
|
||||
for log in logs
|
||||
)
|
||||
assert all(log.station == station and log.date.date() == date for log in logs)
|
||||
|
|
|
@ -53,13 +53,7 @@ def path_infos():
|
|||
|
||||
@pytest.fixture
|
||||
def sound_files(path_infos):
|
||||
return {
|
||||
k: r
|
||||
for k, r in (
|
||||
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
|
||||
for path in path_infos.keys()
|
||||
)
|
||||
}
|
||||
return {k: r for k, r in ((path, SoundFile(conf.MEDIA_ROOT + "/" + path)) for path in path_infos.keys())}
|
||||
|
||||
|
||||
def test_sound_path(sound_files):
|
||||
|
@ -78,17 +72,9 @@ def test_read_path(path_infos, sound_files):
|
|||
|
||||
def _setup_diff(program, info):
|
||||
episode = models.Episode(program=program, title="test-episode")
|
||||
at = tz.datetime(
|
||||
**{
|
||||
k: info[k]
|
||||
for k in ("year", "month", "day", "hour", "minute")
|
||||
if info.get(k)
|
||||
}
|
||||
)
|
||||
at = tz.datetime(**{k: info[k] for k in ("year", "month", "day", "hour", "minute") if info.get(k)})
|
||||
at = tz.make_aware(at)
|
||||
diff = models.Diffusion(
|
||||
episode=episode, start=at, end=at + timedelta(hours=1)
|
||||
)
|
||||
diff = models.Diffusion(episode=episode, start=at, end=at + timedelta(hours=1))
|
||||
episode.save()
|
||||
diff.save()
|
||||
return diff
|
||||
|
|
|
@ -92,9 +92,7 @@ class TestTask:
|
|||
task.log_msg = "--{event.src_path}--"
|
||||
sound_file = task(event, logger=logger, kw=13)
|
||||
assert sound_file._trace("sync", kw=True) == {"kw": 13}
|
||||
assert logger._trace("info", args=True) == (
|
||||
task.log_msg.format(event=event),
|
||||
)
|
||||
assert logger._trace("info", args=True) == (task.log_msg.format(event=event),)
|
||||
|
||||
|
||||
class TestDeleteTask:
|
||||
|
@ -125,9 +123,7 @@ class TestModifiedTask:
|
|||
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
|
||||
|
||||
def sleep(imeta, n):
|
||||
datetime._imeta.funcs[
|
||||
"now"
|
||||
] = modified_task.timestamp + tz.timedelta(hours=10)
|
||||
datetime._imeta.funcs["now"] = modified_task.timestamp + tz.timedelta(hours=10)
|
||||
|
||||
time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
|
||||
modified_task.wait()
|
||||
|
@ -175,9 +171,7 @@ class TestMonitorHandler:
|
|||
|
||||
def test__submit(self, monitor_handler, event):
|
||||
handler = Interface()
|
||||
handler, created = monitor_handler._submit(
|
||||
handler, event, "prefix", kw=13
|
||||
)
|
||||
handler, created = monitor_handler._submit(handler, event, "prefix", kw=13)
|
||||
assert created
|
||||
assert handler.future._trace("add_done_callback")
|
||||
assert monitor_handler.pool._trace("submit") == (
|
||||
|
@ -192,9 +186,7 @@ class TestMonitorHandler:
|
|||
@pytest.fixture
|
||||
def monitor_interfaces():
|
||||
items = {
|
||||
"atexit": Interface.inject(
|
||||
sound_monitor, "atexit", {"register": None, "leave": None}
|
||||
),
|
||||
"atexit": Interface.inject(sound_monitor, "atexit", {"register": None, "leave": None}),
|
||||
"observer": Interface.inject(
|
||||
sound_monitor,
|
||||
"Observer",
|
||||
|
@ -214,29 +206,48 @@ def monitor():
|
|||
yield sound_monitor.SoundMonitor()
|
||||
|
||||
|
||||
class SoundMonitor:
|
||||
class TestSoundMonitor:
|
||||
@pytest.mark.django_db
|
||||
def test_report(self, monitor, program, logger):
|
||||
monitor.report(program, "component", "content", logger=logger)
|
||||
msg = f"{program}, component: content"
|
||||
assert logger._trace("info", args=True) == (msg,)
|
||||
|
||||
def test_scan(self, monitor, program, logger):
|
||||
@pytest.mark.django_db
|
||||
def test_scan(self, monitor, programs, logger):
|
||||
interface = Interface(None, {"scan_for_program": None})
|
||||
monitor.scan_for_program = interface.scan_for_program
|
||||
dirs = monitor.scan(logger)
|
||||
|
||||
assert logger._traces("info") == (
|
||||
"scan all programs...",
|
||||
f"#{program.id} {program.title}",
|
||||
assert logger._traces("info") == tuple(
|
||||
[
|
||||
(("scan all programs...",), {}),
|
||||
]
|
||||
+ [
|
||||
((f"#{program.id} {program.title}",), {})
|
||||
for program in programs
|
||||
]
|
||||
)
|
||||
assert dirs == [program.abspath]
|
||||
assert interface._traces("scan_for_program") == (
|
||||
((program, settings.SOUND_ARCHIVES_SUBDIR), {"logger": logger})(
|
||||
(program, settings.SOUND_EXCERPTS_SUBDIR), {"logger": logger}
|
||||
)
|
||||
assert dirs == [program.abspath for program in programs]
|
||||
traces = tuple(
|
||||
[
|
||||
[
|
||||
(
|
||||
(program, settings.SOUND_ARCHIVES_SUBDIR),
|
||||
{"logger": logger, "type": Sound.TYPE_ARCHIVE},
|
||||
),
|
||||
(
|
||||
(program, settings.SOUND_EXCERPTS_SUBDIR),
|
||||
{"logger": logger, "type": Sound.TYPE_EXCERPT},
|
||||
),
|
||||
]
|
||||
for program in programs
|
||||
]
|
||||
)
|
||||
traces_flat = tuple([item for sublist in traces for item in sublist])
|
||||
assert interface._traces("scan_for_program") == traces_flat
|
||||
|
||||
def test_monitor(self, monitor, monitor_interfaces, logger):
|
||||
def broken_test_monitor(self, monitor, monitor_interfaces, logger):
|
||||
def sleep(*args, **kwargs):
|
||||
monitor.stop()
|
||||
|
||||
|
|
|
@ -38,12 +38,8 @@ sox_values = {
|
|||
|
||||
@pytest.fixture
|
||||
def sox_interfaces():
|
||||
process = Interface(
|
||||
None, {"communicate": ("", sox_output.encode("utf-8"))}
|
||||
)
|
||||
subprocess = Interface.inject(
|
||||
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
|
||||
)
|
||||
process = Interface(None, {"communicate": ("", sox_output.encode("utf-8"))})
|
||||
subprocess = Interface.inject(sound_stats, "subprocess", {"Popen": lambda *_, **__: process})
|
||||
yield {"process": process, "subprocess": subprocess}
|
||||
subprocess._irelease()
|
||||
|
||||
|
@ -110,9 +106,7 @@ class TestSoundStats:
|
|||
|
||||
def test_check(self, stats):
|
||||
good = [{"val": i} for i in range(0, 11)]
|
||||
bad = [{"val": i} for i in range(-10, 0)] + [
|
||||
{"val": i} for i in range(11, 20)
|
||||
]
|
||||
bad = [{"val": i} for i in range(-10, 0)] + [{"val": i} for i in range(11, 20)]
|
||||
stats.stats = good + bad
|
||||
calls = {}
|
||||
stats.resume = lambda *_: calls.setdefault("resume", True)
|
||||
|
|
23
aircox/tests/management/test_sounds_monitor.py
Normal file
23
aircox/tests/management/test_sounds_monitor.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import pytest
|
||||
import os
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.conf import settings
|
||||
|
||||
wav = (
|
||||
b"RIFF$\x00\x00\x00WAVEfmt \x10\x00\x00\x00\x01\x00\x02\x00D\xac\x00\x00"
|
||||
b"\x10\xb1\x02\x00\x04\x00\x10\x00data\x00\x00\x00\x00"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_adding_a_sound(programs, fs):
|
||||
p0 = programs[0]
|
||||
assert len(p0.sound_set.all()) == 0
|
||||
|
||||
s0 = os.path.join(
|
||||
settings.PROJECT_ROOT, "static/media/%s/archives/sound.wav" % p0.path
|
||||
)
|
||||
fs.create_file(s0, contents=wav)
|
||||
call_command("sounds_monitor", "-s")
|
||||
assert len(p0.sound_set.all()) == 1
|
|
@ -12,11 +12,7 @@ class TestEpisode:
|
|||
|
||||
@pytest.mark.django_db
|
||||
def test_podcasts(self, episode, podcasts):
|
||||
podcasts = {
|
||||
podcast.pk: podcast
|
||||
for podcast in podcasts
|
||||
if podcast.episode == episode
|
||||
}
|
||||
podcasts = {podcast.pk: podcast for podcast in podcasts if podcast.episode == episode}
|
||||
for data in episode.podcasts:
|
||||
podcast = podcasts[data["pk"]]
|
||||
assert data["name"] == podcast.name
|
||||
|
|
|
@ -12,44 +12,28 @@ class TestRerunQuerySet:
|
|||
@pytest.mark.django_db
|
||||
def test_station_by_obj(self, stations, schedules):
|
||||
for station in stations:
|
||||
queryset = (
|
||||
Schedule.objects.station(station)
|
||||
.distinct()
|
||||
.values_list("program__station", flat=True)
|
||||
)
|
||||
queryset = Schedule.objects.station(station).distinct().values_list("program__station", flat=True)
|
||||
assert queryset.count() == 1
|
||||
assert queryset.first() == station.pk
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_station_by_id(self, stations, schedules):
|
||||
for station in stations:
|
||||
queryset = (
|
||||
Schedule.objects.station(id=station.pk)
|
||||
.distinct()
|
||||
.values_list("program__station", flat=True)
|
||||
)
|
||||
queryset = Schedule.objects.station(id=station.pk).distinct().values_list("program__station", flat=True)
|
||||
assert queryset.count() == 1
|
||||
assert queryset.first() == station.pk
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_program_by_obj(self, programs, schedules):
|
||||
for program in programs:
|
||||
queryset = (
|
||||
Schedule.objects.program(program)
|
||||
.distinct()
|
||||
.values_list("program", flat=True)
|
||||
)
|
||||
queryset = Schedule.objects.program(program).distinct().values_list("program", flat=True)
|
||||
assert queryset.count() == 1
|
||||
assert queryset.first() == program.pk
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_program_by_id(self, programs, schedules):
|
||||
for program in programs:
|
||||
queryset = (
|
||||
Schedule.objects.program(id=program.pk)
|
||||
.distinct()
|
||||
.values_list("program", flat=True)
|
||||
)
|
||||
queryset = Schedule.objects.program(id=program.pk).distinct().values_list("program", flat=True)
|
||||
assert queryset.count() == 1
|
||||
assert queryset.first() == program.pk
|
||||
|
||||
|
@ -60,11 +44,7 @@ class TestRerunQuerySet:
|
|||
|
||||
@pytest.mark.django_db
|
||||
def test_initial(self, schedules):
|
||||
queryset = (
|
||||
Schedule.objects.initial()
|
||||
.distinct()
|
||||
.values_list("initial", flat=True)
|
||||
)
|
||||
queryset = Schedule.objects.initial().distinct().values_list("initial", flat=True)
|
||||
assert queryset.count() == 1
|
||||
assert queryset.first() is None
|
||||
|
||||
|
|
|
@ -49,9 +49,7 @@ class TestSchedule:
|
|||
|
||||
@pytest.mark.django_db
|
||||
def test_dates_of_month_ponctual(self):
|
||||
schedule = baker.prepare(
|
||||
Schedule, frequency=Schedule.Frequency.ponctual
|
||||
)
|
||||
schedule = baker.prepare(Schedule, frequency=Schedule.Frequency.ponctual)
|
||||
at = schedule.date + relativedelta(months=4)
|
||||
assert schedule.dates_of_month(at) == []
|
||||
|
||||
|
@ -59,9 +57,7 @@ class TestSchedule:
|
|||
@pytest.mark.parametrize("months", range(0, 25, 4))
|
||||
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
||||
def test_dates_of_month_last(self, months, hour):
|
||||
schedule = baker.prepare(
|
||||
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last
|
||||
)
|
||||
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last)
|
||||
at = schedule.date + relativedelta(months=months)
|
||||
datetimes = schedule.dates_of_month(at)
|
||||
assert len(datetimes) == 1
|
||||
|
@ -73,9 +69,7 @@ class TestSchedule:
|
|||
at = date(at.year, at.month, month_info[1])
|
||||
if at.weekday() < schedule.date.weekday():
|
||||
at -= timedelta(days=7)
|
||||
at += timedelta(days=schedule.date.weekday()) - timedelta(
|
||||
days=at.weekday()
|
||||
)
|
||||
at += timedelta(days=schedule.date.weekday()) - timedelta(days=at.weekday())
|
||||
assert dt.date() == at
|
||||
|
||||
# since the same method is used for first, second, etc. frequencies
|
||||
|
@ -84,9 +78,7 @@ class TestSchedule:
|
|||
@pytest.mark.parametrize("months", range(0, 25, 4))
|
||||
@pytest.mark.parametrize("hour", range(0, 24, 4))
|
||||
def test_dates_of_month_every(self, months, hour):
|
||||
schedule = baker.prepare(
|
||||
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every
|
||||
)
|
||||
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every)
|
||||
at = schedule.date + relativedelta(months=months)
|
||||
datetimes = schedule.dates_of_month(at)
|
||||
last = None
|
||||
|
@ -128,8 +120,4 @@ class TestSchedule:
|
|||
episodes, diffusions = schedule.diffusions_of_month(at)
|
||||
|
||||
assert all(r.date in dates for r in episodes)
|
||||
assert all(
|
||||
(not r.initial or r.date in dates)
|
||||
and r.type == Diffusion.TYPE_ON_AIR
|
||||
for r in diffusions
|
||||
)
|
||||
assert all((not r.initial or r.date in dates) and r.type == Diffusion.TYPE_ON_AIR for r in diffusions)
|
||||
|
|
|
@ -39,8 +39,7 @@ def test_user_default_groups():
|
|||
groups = Group.objects.filter(name__in=default_groups.keys())
|
||||
assert groups.exists()
|
||||
assert all(
|
||||
set(group.permissions.all().values_list("codename", flat=True))
|
||||
== set(default_groups[group.name])
|
||||
set(group.permissions.all().values_list("codename", flat=True)) == set(default_groups[group.name])
|
||||
for group in groups
|
||||
)
|
||||
user_groups = set(user.groups.all().values_list("name", flat=True))
|
||||
|
@ -104,7 +103,5 @@ def test_schedule_pre_delete(sched, eps_diffs):
|
|||
@pytest.mark.django_db
|
||||
def test_diffusion_post_delete(eps_diffs):
|
||||
eps = eps_diffs[0][0]
|
||||
Diffusion.objects.filter(
|
||||
id__in=[r.id for r in eps.diffusion_set.all()]
|
||||
).delete()
|
||||
Diffusion.objects.filter(id__in=[r.id for r in eps.diffusion_set.all()]).delete()
|
||||
assert Episode.objects.filter(id=eps.id).first() is None
|
||||
|
|
|
@ -29,9 +29,7 @@ def test_date_or_default():
|
|||
|
||||
def test_to_timedelta():
|
||||
val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
|
||||
assert utils.to_timedelta(val) == timedelta(
|
||||
hours=20, minutes=10, seconds=1
|
||||
)
|
||||
assert utils.to_timedelta(val) == timedelta(hours=20, minutes=10, seconds=1)
|
||||
|
||||
|
||||
def test_to_seconds():
|
||||
|
|
|
@ -23,16 +23,12 @@ class FakeView:
|
|||
|
||||
@pytest.fixture
|
||||
def published_pages():
|
||||
return baker.make(
|
||||
models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3
|
||||
)
|
||||
return baker.make(models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unpublished_pages():
|
||||
return baker.make(
|
||||
models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3
|
||||
)
|
||||
return baker.make(models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -96,9 +96,7 @@ class TestParentMixin:
|
|||
@pytest.mark.django_db
|
||||
def test_get_parent_raises_404(self, parent_mixin):
|
||||
with pytest.raises(Http404):
|
||||
parent_mixin.get_parent(
|
||||
self.req, parent_slug="parent-invalid-slug"
|
||||
)
|
||||
parent_mixin.get_parent(self.req, parent_slug="parent-invalid-slug")
|
||||
|
||||
def test_get_parent_not_parent_model(self, parent_mixin):
|
||||
parent_mixin.parent_model = None
|
||||
|
|
|
@ -29,9 +29,7 @@ api = [
|
|||
path("logs/", views.LogListAPIView.as_view(), name="live"),
|
||||
path(
|
||||
"user/settings/",
|
||||
viewsets.UserSettingsViewSet.as_view(
|
||||
{"get": "retrieve", "post": "update", "put": "update"}
|
||||
),
|
||||
viewsets.UserSettingsViewSet.as_view({"get": "retrieve", "post": "update", "put": "update"}),
|
||||
name="user-settings",
|
||||
),
|
||||
] + router.urls
|
||||
|
|
|
@ -72,9 +72,7 @@ def date_or_default(date, into=None):
|
|||
def to_timedelta(time):
|
||||
"""Transform a datetime or a time instance to a timedelta, only using time
|
||||
info."""
|
||||
return datetime.timedelta(
|
||||
hours=time.hour, minutes=time.minute, seconds=time.second
|
||||
)
|
||||
return datetime.timedelta(hours=time.hour, minutes=time.minute, seconds=time.second)
|
||||
|
||||
|
||||
def to_seconds(time):
|
||||
|
|
|
@ -37,9 +37,5 @@ class StatisticsView(AdminMixin, LogListView, ListView):
|
|||
|
||||
def get_object_list(self, logs, full=False):
|
||||
if not logs.exists():
|
||||
logs = (
|
||||
LogArchiver().load(self.station, self.date)
|
||||
if self.date
|
||||
else []
|
||||
)
|
||||
logs = LogArchiver().load(self.station, self.date) if self.date else []
|
||||
return super().get_object_list(logs, True)
|
||||
|
|
|
@ -8,11 +8,7 @@ class ArticleDetailView(PageDetailView):
|
|||
model = Article
|
||||
|
||||
def get_sidebar_queryset(self):
|
||||
qs = (
|
||||
Article.objects.published()
|
||||
.select_related("cover")
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
qs = Article.objects.published().select_related("cover").order_by("-pub_date")
|
||||
return qs
|
||||
|
||||
|
||||
|
|
|
@ -28,11 +28,7 @@ class BaseView(TemplateResponseMixin, ContextMixin):
|
|||
kwargs["audio_streams"] = self.station.streams
|
||||
|
||||
if "model" not in kwargs:
|
||||
model = (
|
||||
getattr(self, "model", None)
|
||||
or hasattr(self, "object")
|
||||
and type(self.object)
|
||||
)
|
||||
model = getattr(self, "model", None) or hasattr(self, "object") and type(self.object)
|
||||
kwargs["model"] = model
|
||||
|
||||
page = kwargs.get("page")
|
||||
|
|
|
@ -31,9 +31,7 @@ class HomeView(AttachedToMixin, BaseView, ListView):
|
|||
current_diff = query.now(now).first()
|
||||
next_diffs = query.after(now)
|
||||
if current_diff:
|
||||
diffs = [current_diff] + list(
|
||||
next_diffs.exclude(pk=current_diff.pk)[:9]
|
||||
)
|
||||
diffs = [current_diff] + list(next_diffs.exclude(pk=current_diff.pk)[:9])
|
||||
else:
|
||||
diffs = next_diffs[:10]
|
||||
return diffs
|
||||
|
|
|
@ -27,13 +27,7 @@ class LogListMixin(GetDateMixin):
|
|||
def get_queryset(self):
|
||||
# only get logs for tracks: log for diffusion will be retrieved
|
||||
# by the diffusions' queryset.
|
||||
qs = (
|
||||
super()
|
||||
.get_queryset()
|
||||
.on_air()
|
||||
.filter(track__isnull=False)
|
||||
.filter(date__lte=tz.now())
|
||||
)
|
||||
qs = super().get_queryset().on_air().filter(track__isnull=False).filter(date__lte=tz.now())
|
||||
return (
|
||||
qs.date(self.date)
|
||||
if self.date is not None
|
||||
|
@ -43,11 +37,7 @@ class LogListMixin(GetDateMixin):
|
|||
)
|
||||
|
||||
def get_diffusions_queryset(self):
|
||||
qs = (
|
||||
Diffusion.objects.station(self.station)
|
||||
.on_air()
|
||||
.filter(start__lte=tz.now())
|
||||
)
|
||||
qs = Diffusion.objects.station(self.station).on_air().filter(start__lte=tz.now())
|
||||
return (
|
||||
qs.date(self.date)
|
||||
if self.date is not None
|
||||
|
@ -86,9 +76,7 @@ class LogListView(AttachedToMixin, BaseView, LogListMixin, ListView):
|
|||
kwargs.update(
|
||||
{
|
||||
"date": self.date,
|
||||
"dates": (
|
||||
today - datetime.timedelta(days=i) for i in range(0, 7)
|
||||
),
|
||||
"dates": (today - datetime.timedelta(days=i) for i in range(0, 7)),
|
||||
"object_list": self.get_object_list(self.object_list),
|
||||
}
|
||||
)
|
||||
|
@ -123,6 +111,4 @@ class LogListAPIView(LogListMixin, BaseAPIView, ListAPIView):
|
|||
|
||||
def get_serializer(self, queryset, *args, **kwargs):
|
||||
full = bool(self.request.GET.get("full"))
|
||||
return super().get_serializer(
|
||||
self.get_object_list(queryset, full), *args, **kwargs
|
||||
)
|
||||
return super().get_serializer(self.get_object_list(queryset, full), *args, **kwargs)
|
||||
|
|
|
@ -14,13 +14,7 @@ class GetDateMixin:
|
|||
|
||||
def get_date(self):
|
||||
date = self.request.GET.get("date")
|
||||
return (
|
||||
str_to_date(date, "-")
|
||||
if date is not None
|
||||
else self.kwargs["date"]
|
||||
if "date" in self.kwargs
|
||||
else None
|
||||
)
|
||||
return str_to_date(date, "-") if date is not None else self.kwargs["date"] if "date" in self.kwargs else None
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
if self.redirect_date_url and self.request.GET.get("date"):
|
||||
|
@ -55,9 +49,7 @@ class ParentMixin:
|
|||
return
|
||||
|
||||
lookup = {self.parent_field: kwargs[self.parent_url_kwarg]}
|
||||
return get_object_or_404(
|
||||
self.parent_model.objects.select_related("cover"), **lookup
|
||||
)
|
||||
return get_object_or_404(self.parent_model.objects.select_related("cover"), **lookup)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
self.parent = self.get_parent(request, *args, **kwargs)
|
||||
|
@ -83,11 +75,7 @@ class AttachedToMixin:
|
|||
|
||||
def get_page(self):
|
||||
if self.attach_to_value is not None:
|
||||
return (
|
||||
StaticPage.objects.filter(attach_to=self.attach_to_value)
|
||||
.published()
|
||||
.first()
|
||||
)
|
||||
return StaticPage.objects.filter(attach_to=self.attach_to_value).published().first()
|
||||
return super().get_page()
|
||||
|
||||
|
||||
|
|
|
@ -30,13 +30,7 @@ class BasePageListView(AttachedToMixin, ParentMixin, BaseView, ListView):
|
|||
return super().get(*args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
super()
|
||||
.get_queryset()
|
||||
.select_subclasses()
|
||||
.published()
|
||||
.select_related("cover")
|
||||
)
|
||||
return super().get_queryset().select_subclasses().published().select_related("cover")
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs.setdefault("has_headline", self.has_headline)
|
||||
|
@ -114,12 +108,7 @@ class PageListView(FiltersMixin, BasePageListView):
|
|||
return super().get_filterset(data, query)
|
||||
|
||||
def get_queryset(self):
|
||||
qs = (
|
||||
super()
|
||||
.get_queryset()
|
||||
.select_related("category")
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
qs = super().get_queryset().select_related("category").order_by("-pub_date")
|
||||
return qs
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
|
@ -152,9 +141,7 @@ class PageDetailView(BasePageDetailView):
|
|||
def get_context_data(self, **kwargs):
|
||||
if self.object.allow_comments and "comment_form" not in kwargs:
|
||||
kwargs["comment_form"] = CommentForm()
|
||||
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by(
|
||||
"-date"
|
||||
)
|
||||
kwargs["comments"] = Comment.objects.filter(page=self.object).order_by("-date")
|
||||
if self.object.parent_subclass:
|
||||
kwargs["parent"] = self.object.parent_subclass
|
||||
return super().get_context_data(**kwargs)
|
||||
|
|
|
@ -12,9 +12,7 @@ class BaseProgramMixin:
|
|||
return self.object
|
||||
|
||||
def get_sidebar_url(self):
|
||||
return reverse(
|
||||
"program-page-list", kwargs={"parent_slug": self.program.slug}
|
||||
)
|
||||
return reverse("program-page-list", kwargs={"parent_slug": self.program.slug})
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
self.program = self.get_program()
|
||||
|
@ -26,19 +24,9 @@ class ProgramDetailView(BaseProgramMixin, PageDetailView):
|
|||
model = Program
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
episodes = (
|
||||
Episode.objects.program(self.object)
|
||||
.published()
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
articles = (
|
||||
Article.objects.parent(self.object)
|
||||
.published()
|
||||
.order_by("-pub_date")
|
||||
)
|
||||
return super().get_context_data(
|
||||
articles=articles, episodes=episodes, **kwargs
|
||||
)
|
||||
episodes = Episode.objects.program(self.object).published().order_by("-pub_date")
|
||||
articles = Article.objects.parent(self.object).published().order_by("-pub_date")
|
||||
return super().get_context_data(articles=articles, episodes=episodes, **kwargs)
|
||||
|
||||
|
||||
class ProgramListView(PageListView):
|
||||
|
|
|
@ -70,9 +70,7 @@ class UserSettingsViewSet(viewsets.ViewSet):
|
|||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get_serializer(self, instance=None, **kwargs):
|
||||
return self.serializer_class(
|
||||
instance=instance, context={"user": self.request.user}, **kwargs
|
||||
)
|
||||
return self.serializer_class(instance=instance, context={"user": self.request.user}, **kwargs)
|
||||
|
||||
@action(detail=False, methods=["GET"])
|
||||
def retrieve(self, request):
|
||||
|
|
|
@ -45,9 +45,7 @@ class Connector:
|
|||
if self.is_open:
|
||||
return 1
|
||||
|
||||
family = (
|
||||
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
|
||||
)
|
||||
family = socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
|
||||
try:
|
||||
self.socket = self.socket_class(family, socket.SOCK_STREAM)
|
||||
self.socket.connect(self.address)
|
||||
|
@ -78,13 +76,7 @@ class Connector:
|
|||
|
||||
if data:
|
||||
data = response_re.sub(r"\1", data).strip()
|
||||
data = (
|
||||
self.parse(data)
|
||||
if parse
|
||||
else self.parse_json(data)
|
||||
if parse_json
|
||||
else data
|
||||
)
|
||||
data = self.parse(data) if parse else self.parse_json(data) if parse_json else data
|
||||
return data
|
||||
except Exception:
|
||||
self.close()
|
||||
|
|
|
@ -62,9 +62,7 @@ class Monitor:
|
|||
|
||||
def get_logs_queryset(self):
|
||||
"""Return queryset to assign as `self.logs`"""
|
||||
return self.station.log_set.select_related(
|
||||
"diffusion", "sound", "track"
|
||||
).order_by("-pk")
|
||||
return self.station.log_set.select_related("diffusion", "sound", "track").order_by("-pk")
|
||||
|
||||
def init_last_sound_logs(self):
|
||||
"""Retrieve last logs and initialize `last_sound_logs`"""
|
||||
|
@ -136,12 +134,7 @@ class Monitor:
|
|||
diff = None
|
||||
sound = Sound.objects.path(air_uri).first()
|
||||
if sound and sound.episode_id is not None:
|
||||
diff = (
|
||||
Diffusion.objects.episode(id=sound.episode_id)
|
||||
.on_air()
|
||||
.now(air_time)
|
||||
.first()
|
||||
)
|
||||
diff = Diffusion.objects.episode(id=sound.episode_id).on_air().now(air_time).first()
|
||||
|
||||
# log sound on air
|
||||
return self.log(
|
||||
|
@ -158,9 +151,7 @@ class Monitor:
|
|||
if log.diffusion:
|
||||
return
|
||||
|
||||
tracks = Track.objects.filter(
|
||||
sound_id=log.sound_id, timestamp__isnull=False
|
||||
).order_by("timestamp")
|
||||
tracks = Track.objects.filter(sound_id=log.sound_id, timestamp__isnull=False).order_by("timestamp")
|
||||
if not tracks.exists():
|
||||
return
|
||||
|
||||
|
@ -217,11 +208,7 @@ class Monitor:
|
|||
|
||||
dealer = self.streamer.dealer
|
||||
# start
|
||||
if (
|
||||
not dealer.queue
|
||||
and dealer.rid is None
|
||||
or dealer.remaining < self.delay.total_seconds()
|
||||
):
|
||||
if not dealer.queue and dealer.rid is None or dealer.remaining < self.delay.total_seconds():
|
||||
self.start_diff(dealer, diff)
|
||||
# cancel
|
||||
elif diff.start < now - self.cancel_timeout:
|
||||
|
|
|
@ -47,9 +47,7 @@ class Streamer:
|
|||
|
||||
self.id = self.station.slug.replace("-", "_")
|
||||
self.path = os.path.join(station.path, "station.liq")
|
||||
self.connector = connector or Connector(
|
||||
os.path.join(station.path, "station.sock")
|
||||
)
|
||||
self.connector = connector or Connector(os.path.join(station.path, "station.sock"))
|
||||
self.init_sources()
|
||||
|
||||
@property
|
||||
|
@ -91,9 +89,7 @@ class Streamer:
|
|||
def init_sources(self):
|
||||
streams = self.station.program_set.filter(stream__isnull=False)
|
||||
self.dealer = QueueSource(self, "dealer")
|
||||
self.sources = [self.dealer] + [
|
||||
PlaylistSource(self, program=program) for program in streams
|
||||
]
|
||||
self.sources = [self.dealer] + [PlaylistSource(self, program=program) for program in streams]
|
||||
|
||||
def make_config(self):
|
||||
"""Make configuration files and directory (and sync sources)"""
|
||||
|
@ -128,12 +124,7 @@ class Streamer:
|
|||
self.source = next(
|
||||
iter(
|
||||
sorted(
|
||||
(
|
||||
source
|
||||
for source in self.sources
|
||||
if source.request_status == "playing"
|
||||
and source.air_time
|
||||
),
|
||||
(source for source in self.sources if source.request_status == "playing" and source.air_time),
|
||||
key=lambda o: o.air_time,
|
||||
reverse=True,
|
||||
)
|
||||
|
@ -149,11 +140,7 @@ class Streamer:
|
|||
if not os.path.exists(self.socket_path):
|
||||
return
|
||||
|
||||
conns = [
|
||||
conn
|
||||
for conn in psutil.net_connections(kind="unix")
|
||||
if conn.laddr == self.socket_path
|
||||
]
|
||||
conns = [conn for conn in psutil.net_connections(kind="unix") if conn.laddr == self.socket_path]
|
||||
for conn in conns:
|
||||
if conn.pid is not None:
|
||||
os.kill(conn.pid, signal.SIGKILL)
|
||||
|
|
|
@ -23,9 +23,7 @@ class Streamers:
|
|||
def reset(self, stations=Station.objects.active()):
|
||||
# FIXME: cf. TODO in aircox.controllers about model updates
|
||||
stations = stations.all()
|
||||
self.streamers = {
|
||||
station.pk: self.streamer_class(station) for station in stations
|
||||
}
|
||||
self.streamers = {station.pk: self.streamer_class(station) for station in stations}
|
||||
|
||||
def fetch(self):
|
||||
"""Call streamers fetch if timed-out."""
|
||||
|
|
|
@ -62,42 +62,24 @@ class Command(BaseCommand):
|
|||
"--station",
|
||||
type=str,
|
||||
action="append",
|
||||
help="name of the station to monitor instead of monitoring "
|
||||
"all stations",
|
||||
help="name of the station to monitor instead of monitoring " "all stations",
|
||||
)
|
||||
group.add_argument(
|
||||
"-t",
|
||||
"--timeout",
|
||||
type=float,
|
||||
default=Monitor.cancel_timeout.total_seconds() / 60,
|
||||
help="time to wait in MINUTES before canceling a diffusion that "
|
||||
"should have ran but did not. ",
|
||||
help="time to wait in MINUTES before canceling a diffusion that " "should have ran but did not. ",
|
||||
)
|
||||
# TODO: sync-timeout, cancel-timeout
|
||||
|
||||
def handle(
|
||||
self,
|
||||
*args,
|
||||
config=None,
|
||||
run=None,
|
||||
monitor=None,
|
||||
station=[],
|
||||
delay=1000,
|
||||
timeout=600,
|
||||
**options
|
||||
):
|
||||
stations = (
|
||||
Station.objects.filter(name__in=station)
|
||||
if station
|
||||
else Station.objects.all()
|
||||
)
|
||||
def handle(self, *args, config=None, run=None, monitor=None, station=[], delay=1000, timeout=600, **options):
|
||||
stations = Station.objects.filter(name__in=station) if station else Station.objects.all()
|
||||
streamers = [Streamer(station) for station in stations]
|
||||
|
||||
for streamer in streamers:
|
||||
if not streamer.outputs:
|
||||
raise RuntimeError(
|
||||
"Streamer {} has no outputs".format(streamer.id)
|
||||
)
|
||||
raise RuntimeError("Streamer {} has no outputs".format(streamer.id))
|
||||
if config:
|
||||
streamer.make_config()
|
||||
if run:
|
||||
|
@ -106,10 +88,7 @@ class Command(BaseCommand):
|
|||
if monitor:
|
||||
delay = tz.timedelta(milliseconds=delay)
|
||||
timeout = tz.timedelta(minutes=timeout)
|
||||
monitors = [
|
||||
Monitor(streamer, delay, cancel_timeout=timeout)
|
||||
for streamer in streamers
|
||||
]
|
||||
monitors = [Monitor(streamer, delay, cancel_timeout=timeout) for streamer in streamers]
|
||||
|
||||
while not run or streamer.is_running:
|
||||
for monitor in monitors:
|
||||
|
|
|
@ -55,9 +55,7 @@ class FakeSocket:
|
|||
data = self.recv_data
|
||||
self.recv_data = self.recv_data[count:]
|
||||
data = data[:count]
|
||||
return (
|
||||
data.encode("utf-8") if isinstance(data, str) else data
|
||||
) or b"\nEND"
|
||||
return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
|
||||
|
||||
def is_sent(self, data):
|
||||
"""Return True if provided data have been sent."""
|
||||
|
@ -68,9 +66,7 @@ class FakeSocket:
|
|||
# -- models
|
||||
@pytest.fixture
|
||||
def station():
|
||||
station = models.Station(
|
||||
name="test", path=working_dir, default=True, active=True
|
||||
)
|
||||
station = models.Station(name="test", path=working_dir, default=True, active=True)
|
||||
station.save()
|
||||
return station
|
||||
|
||||
|
@ -136,9 +132,7 @@ def program(station):
|
|||
|
||||
@pytest.fixture
|
||||
def stream(program):
|
||||
stream = models.Stream(
|
||||
program=program, begin=time(10, 12), end=time(12, 13)
|
||||
)
|
||||
stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
|
||||
stream.save()
|
||||
return stream
|
||||
|
||||
|
@ -229,10 +223,7 @@ def metadata_data(metadata_data_air_time):
|
|||
|
||||
@pytest.fixture
|
||||
def metadata_string(metadata_data):
|
||||
return (
|
||||
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
|
||||
+ "\nEND"
|
||||
)
|
||||
return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
|
||||
|
||||
|
||||
# -- streamers
|
||||
|
@ -285,9 +276,7 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
|
|||
@pytest.fixture
|
||||
def streamer(station, station_ports):
|
||||
streamer = FakeStreamer(station=station)
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
|
||||
streamer.dealer = FakeQueueSource(len(streamer.sources))
|
||||
streamer.sources.append(streamer.dealer)
|
||||
return streamer
|
||||
|
@ -297,12 +286,8 @@ def streamer(station, station_ports):
|
|||
def streamers(stations, stations_ports):
|
||||
streamers = controllers.Streamers(streamer_class=FakeStreamer)
|
||||
# avoid unecessary db calls
|
||||
streamers.streamers = {
|
||||
station.pk: FakeStreamer(station=station) for station in stations
|
||||
}
|
||||
streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
|
||||
for j, streamer in enumerate(streamers.values()):
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
|
||||
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
||||
return streamers
|
||||
|
|
|
@ -16,9 +16,7 @@ class TestConnector:
|
|||
assert connector.is_open
|
||||
assert connector.socket.family == socket.AF_UNIX
|
||||
assert connector.socket.type == socket.SOCK_STREAM
|
||||
assert connector.socket.address == os.path.join(
|
||||
working_dir, "test.sock"
|
||||
)
|
||||
assert connector.socket.address == os.path.join(working_dir, "test.sock")
|
||||
connector.close()
|
||||
|
||||
def test_open_af_inet(self, connector):
|
||||
|
|
|
@ -37,9 +37,7 @@ class TestBaseMetaData:
|
|||
assert metadata.validate_status("any") == "stopped"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_air_time(
|
||||
self, metadata, metadata_data, metadata_data_air_time
|
||||
):
|
||||
def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
|
||||
air_time = metadata_data["on_air"]
|
||||
result = metadata.validate_air_time(air_time)
|
||||
assert result == metadata_data_air_time
|
||||
|
|
|
@ -43,10 +43,7 @@ def source(monitor, streamer, sound, diffusion):
|
|||
|
||||
@pytest.fixture
|
||||
def tracks(sound):
|
||||
items = [
|
||||
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||
for i in range(0, 4)
|
||||
]
|
||||
items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
|
||||
models.Track.objects.bulk_create(items)
|
||||
return items
|
||||
|
||||
|
@ -178,9 +175,7 @@ class TestMonitor:
|
|||
assert all(log_by_track.count(track) for track in tracks)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_tracks_returns_on_log_diffusion(
|
||||
self, monitor, log, diffusion, tracks
|
||||
):
|
||||
def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
|
||||
log.diffusion = None
|
||||
monitor.trace_tracks(log)
|
||||
|
||||
|
@ -210,9 +205,7 @@ class TestMonitor:
|
|||
assert not monitor.calls["cancel_diff"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_handle_diffusions_returns_on_diff(
|
||||
self, monitor, streamer, diffusion, log
|
||||
):
|
||||
def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
|
@ -232,9 +225,7 @@ class TestMonitor:
|
|||
assert not monitor.calls["cancel_diff"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_handle_diffusions_returns_on_diff_log_exists(
|
||||
self, monitor, streamer, diffusion, log
|
||||
):
|
||||
def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
|
@ -264,9 +255,7 @@ class TestMonitor:
|
|||
streamer.dealer.queue = None
|
||||
streamer.dealer.rid = "13"
|
||||
streamer.dealer.remaining = monitor.delay.total_seconds() + 10
|
||||
diffusion.start = (
|
||||
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
||||
)
|
||||
diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
||||
diffusion.end = tz.now() + tz.timedelta(minutes=30)
|
||||
diffusion.save()
|
||||
|
||||
|
@ -285,9 +274,7 @@ class TestMonitor:
|
|||
assert log.comment == "test"
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_start_diff(
|
||||
self, monitor, diffusion, source, episode, sound, tracks
|
||||
):
|
||||
def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
|
||||
result = {}
|
||||
monitor.log = lambda **kw: result.update(kw)
|
||||
|
||||
|
@ -321,17 +308,10 @@ class TestMonitor:
|
|||
monitor.sync()
|
||||
|
||||
assert monitor.sync_next >= now + monitor.sync_timeout
|
||||
assert all(
|
||||
source.calls.get("sync") for source in monitor.streamer.playlists
|
||||
)
|
||||
assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_sync_timeout_not_reached_skip_sync(self, monitor):
|
||||
monitor.sync_next = tz.now() + tz.timedelta(
|
||||
seconds=monitor.sync_timeout.total_seconds() + 20
|
||||
)
|
||||
monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
|
||||
monitor.sync()
|
||||
assert all(
|
||||
not source.calls.get("sync")
|
||||
for source in monitor.streamer.playlists
|
||||
)
|
||||
assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)
|
||||
|
|
|
@ -67,11 +67,7 @@ class TestPlaylistSource:
|
|||
@pytest.mark.django_db
|
||||
def test_get_sound_queryset(self, playlist_source, sounds):
|
||||
query = playlist_source.get_sound_queryset()
|
||||
assert all(
|
||||
r.program_id == playlist_source.program.pk
|
||||
and r.type == r.TYPE_ARCHIVE
|
||||
for r in query
|
||||
)
|
||||
assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_playlist(self, playlist_source, sounds):
|
||||
|
@ -114,9 +110,7 @@ class TestQueueSource:
|
|||
@pytest.mark.django_db
|
||||
def test_requests(self, queue_source, socket, metadata_string):
|
||||
queue_source.queue = [13, 14, 15]
|
||||
socket.recv_data = [
|
||||
f"{metadata_string}\nEND" for _ in queue_source.queue
|
||||
]
|
||||
socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
|
||||
|
||||
requests = queue_source.requests
|
||||
|
||||
|
@ -127,10 +121,7 @@ class TestQueueSource:
|
|||
def test_push(self, queue_source, socket):
|
||||
paths = ["/tmp/a", "/tmp/b"]
|
||||
queue_source.push(*paths)
|
||||
assert all(
|
||||
socket.is_sent(f"{queue_source.id}_queue.push {path}")
|
||||
for path in paths
|
||||
)
|
||||
assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, queue_source, socket, metadata_string):
|
||||
|
|
|
@ -12,9 +12,7 @@ class TestStreamers:
|
|||
@pytest.fixture
|
||||
def test_reset(self, streamers, stations):
|
||||
streamers.reset()
|
||||
assert all(
|
||||
streamers.streamers[station.pk] == station for station in stations
|
||||
)
|
||||
assert all(streamers.streamers[station.pk] == station for station in stations)
|
||||
|
||||
@pytest.fixture
|
||||
def test_fetch(self, streamers):
|
||||
|
|
|
@ -168,18 +168,14 @@ class TestQueueSourceViewSet:
|
|||
calls = {}
|
||||
sound = sounds[0]
|
||||
request = FakeRequest(station=station, data={"sound_id": sound.pk})
|
||||
queue_source_viewset._run = lambda pk, func: calls.setdefault(
|
||||
"_run", (pk, func)
|
||||
)
|
||||
queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
|
||||
result = queue_source_viewset.push(request, 13)
|
||||
assert "_run" in calls
|
||||
assert result[0] == 13
|
||||
assert callable(result[1])
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_push_missing_sound_in_request_post(
|
||||
self, queue_source_viewset, station
|
||||
):
|
||||
def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
|
||||
request = FakeRequest(station=station, data={})
|
||||
with pytest.raises(ValidationError):
|
||||
queue_source_viewset.push(request, 0)
|
||||
|
|
|
@ -73,9 +73,7 @@ class StreamerViewSet(ControllerViewSet):
|
|||
return Response(self.serialize(self.streamer))
|
||||
|
||||
def list(self, request, pk=None):
|
||||
return Response(
|
||||
{"results": self.serialize(self.streamers.values(), many=True)}
|
||||
)
|
||||
return Response({"results": self.serialize(self.streamers.values(), many=True)})
|
||||
|
||||
def dispatch(self, request, *args, pk=None, **kwargs):
|
||||
if pk is not None:
|
||||
|
@ -93,9 +91,7 @@ class SourceViewSet(ControllerViewSet):
|
|||
return (s for s in self.streamer.sources if isinstance(s, self.model))
|
||||
|
||||
def get_source(self, pk):
|
||||
source = next(
|
||||
(source for source in self.get_sources() if source.id == pk), None
|
||||
)
|
||||
source = next((source for source in self.get_sources() if source.id == pk), None)
|
||||
if source is None:
|
||||
raise Http404("source `%s` not found" % pk)
|
||||
return source
|
||||
|
@ -105,9 +101,7 @@ class SourceViewSet(ControllerViewSet):
|
|||
return Response(self.serialize(source))
|
||||
|
||||
def list(self, request):
|
||||
return Response(
|
||||
{"results": self.serialize(self.get_sources(), many=True)}
|
||||
)
|
||||
return Response({"results": self.serialize(self.get_sources(), many=True)})
|
||||
|
||||
def _run(self, pk, action):
|
||||
source = self.object = self.get_source(pk)
|
||||
|
@ -150,9 +144,5 @@ class QueueSourceViewSet(SourceViewSet):
|
|||
if not request.data.get("sound_id"):
|
||||
raise ValidationError('missing "sound_id" POST data')
|
||||
|
||||
sound = get_object_or_404(
|
||||
self.get_sound_queryset(request), pk=request.data["sound_id"]
|
||||
)
|
||||
return self._run(
|
||||
pk, lambda s: s.push(sound.file.path) if sound.file.path else None
|
||||
)
|
||||
sound = get_object_or_404(self.get_sound_queryset(request), pk=request.data["sound_id"])
|
||||
return self._run(pk, lambda s: s.push(sound.file.path) if sound.file.path else None)
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
|
||||
# General information
|
||||
Aircox is a set of Django applications that aims to provide a radio management solution, and is
|
||||
written in Python 3.5.
|
||||
|
||||
Running Aircox on production involves:
|
||||
* Aircox modules and a running Django project;
|
||||
* a supervisor for common tasks (sounds monitoring, stream control, etc.) -- `supervisord`;
|
||||
* a wsgi and an HTTP server -- `gunicorn`, `nginx`;
|
||||
* a database supported by Django (MySQL, SQLite, PostGresSQL);
|
||||
|
||||
# Architecture and concepts
|
||||
Aircox is divided in three main modules:
|
||||
* `programs`: basics of Aircox (programs, diffusions, sounds, etc. management);
|
||||
* `controllers`: interact with application to generate audio stream (LiquidSoap);
|
||||
* `cms`: create a website with Aircox elements (playlists, timetable, players on the website);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Installation
|
||||
|
||||
|
||||
# Configuration
|
|
@ -10,11 +10,7 @@ sys.path.insert(1, os.path.dirname(os.path.realpath(__file__)))
|
|||
PROJECT_ROOT = os.path.abspath(__file__ + "/../../../")
|
||||
|
||||
# DEBUG mode
|
||||
DEBUG = (
|
||||
(os.environ["AIRCOX_DEBUG"].lower() in ("true", 1))
|
||||
if "AIRCOX_DEBUG" in os.environ
|
||||
else False
|
||||
)
|
||||
DEBUG = (os.environ["AIRCOX_DEBUG"].lower() in ("true", 1)) if "AIRCOX_DEBUG" in os.environ else False
|
||||
|
||||
# Internationalization and timezones: thoses values may be set in order to
|
||||
# have correct translation and timezone.
|
||||
|
@ -74,9 +70,7 @@ try:
|
|||
except Exception:
|
||||
print(
|
||||
"Can not set locale {LC}. Is it available on you system? Hint: "
|
||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(
|
||||
LC=LANGUAGE_CODE
|
||||
)
|
||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
|
||||
)
|
||||
pass
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
DEBUG = True
|
||||
|
||||
LOCALE_PATHS = ["aircox/locale", "aircox_streamer/locale"]
|
||||
|
||||
|
@ -15,7 +16,7 @@ LOGGING = {
|
|||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"timestamp": {
|
||||
"format": "{asctime} {levelname} {message}",
|
||||
"format": "{asctime} {module} {levelname} {message}",
|
||||
"style": "{",
|
||||
},
|
||||
},
|
||||
|
@ -26,6 +27,10 @@ LOGGING = {
|
|||
},
|
||||
},
|
||||
"loggers": {
|
||||
"root": {
|
||||
"handlers": ["console"],
|
||||
"level": os.getenv("DJANGO_LOG_LEVEL", "DEBUG"),
|
||||
},
|
||||
"aircox": {
|
||||
"handlers": ["console"],
|
||||
"level": os.getenv("DJANGO_LOG_LEVEL", "DEBUG"),
|
||||
|
@ -40,3 +45,9 @@ LOGGING = {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ For Django settings see:
|
|||
https://docs.djangoproject.com/en/3.1/topics/settings/
|
||||
https://docs.djangoproject.com/en/3.1/ref/settings/
|
||||
"""
|
||||
from django.utils import timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
from .prod import *
|
||||
|
||||
|
@ -43,8 +44,6 @@ try:
|
|||
except Exception:
|
||||
print(
|
||||
"Can not set locale {LC}. Is it available on you system? Hint: "
|
||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(
|
||||
LC=LANGUAGE_CODE
|
||||
)
|
||||
"Check /etc/locale.gen and rerun locale-gen as sudo if needed.".format(LC=LANGUAGE_CODE)
|
||||
)
|
||||
pass
|
||||
|
|
|
@ -28,6 +28,6 @@ urlpatterns = aircox.urls.urls + [
|
|||
]
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += static(
|
||||
settings.STATIC_URL, document_root=settings.STATIC_ROOT
|
||||
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(
|
||||
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
|
||||
)
|
||||
|
|
83
pyproject.toml
Normal file
83
pyproject.toml
Normal file
|
@ -0,0 +1,83 @@
|
|||
[project]
|
||||
name = "aircox"
|
||||
# version = "0.1"
|
||||
description = "Radio management platform and website"
|
||||
readme = "README.md"
|
||||
license = {text = "GPLv3"}
|
||||
requires-python = ">=3.8"
|
||||
|
||||
authors = [
|
||||
{name = "Thomas", email = "thomas@bkfox.net"},
|
||||
]
|
||||
|
||||
classifiers = [
|
||||
"Framework :: Django",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
]
|
||||
|
||||
dynamic = ["version", "dependencies"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://git.radiocampus.be/rc/aircox/"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=60", "setuptools-scm>=8.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
include-package-data = true
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = ["aircox*",]
|
||||
exclude = ["aircox*.tests*",]
|
||||
namespaces = false
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
dependencies = {file = ["requirements.txt"]}
|
||||
|
||||
[tool.setuptools_scm]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
DJANGO_SETTINGS_MODULE = "instance.settings"
|
||||
python_files = ["tests.py", "test_*.py", "*_tests.py"]
|
||||
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
exclude = '''
|
||||
/(
|
||||
\.egg
|
||||
| \.git
|
||||
| \.hg
|
||||
| \.tox
|
||||
| \._build
|
||||
| \.build
|
||||
| \.bulk-out
|
||||
| \.dist
|
||||
| \.__pycache__
|
||||
| \.venv
|
||||
| \.migrations
|
||||
| \.static
|
||||
| \.instance/settings
|
||||
)
|
||||
'''
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
exclude = [
|
||||
"egg",
|
||||
"git",
|
||||
"hg",
|
||||
"tox",
|
||||
"_build",
|
||||
"build",
|
||||
"dist",
|
||||
"__pycache__",
|
||||
"venv",
|
||||
"*/migrations",
|
||||
"static",
|
||||
"instance/settings",
|
||||
]
|
|
@ -1,4 +0,0 @@
|
|||
[pytest]
|
||||
DJANGO_SETTINGS_MODULE = instance.settings
|
||||
# -- recommended but optional:
|
||||
python_files = tests.py test_*.py *_tests.py
|
|
@ -17,5 +17,5 @@ dateutils~=0.6
|
|||
mutagen~=1.45
|
||||
Pillow~=9.0
|
||||
psutil~=5.9
|
||||
PyYAML==5.4
|
||||
PyYAML==6.0.1
|
||||
watchdog~=2.1
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
pytest~=7.2
|
||||
pytest-django~=4.5
|
||||
model_bakery~=1.10
|
||||
pyfakefs~=5.2
|
||||
|
|
37
setup.py
37
setup.py
|
@ -1,37 +0,0 @@
|
|||
from setuptools import find_packages, setup
|
||||
|
||||
|
||||
def to_rst(path):
|
||||
try:
|
||||
from pypandoc import convert
|
||||
|
||||
return convert(path, "rst")
|
||||
except ImportError:
|
||||
print("pypandoc module not found, can not convert Markdown to RST")
|
||||
return open(path, "r").read()
|
||||
|
||||
|
||||
def to_array(path):
|
||||
with open(path, "r") as file:
|
||||
return [r for r in file.read().split("\n") if r]
|
||||
|
||||
|
||||
setup(
|
||||
name="aircox",
|
||||
version="0.9",
|
||||
license="GPLv3",
|
||||
author="bkfox",
|
||||
description="Aircox is a radio programs manager including tools and cms",
|
||||
long_description=to_rst("README.md"),
|
||||
url="https://github.com/bkfox/aircox",
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
install_requires=to_array("requirements.txt"),
|
||||
classifiers=[
|
||||
"Framework :: Django",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
],
|
||||
)
|
Loading…
Reference in New Issue
Block a user