Feat: packaging (#127)

- Add configuration files for packaging
- Precommit now uses ruff

Co-authored-by: bkfox <thomas bkfox net>
Reviewed-on: rc/aircox#127
This commit is contained in:
Thomas Kairos
2023-10-11 10:58:34 +02:00
parent 5ea092dba6
commit f7a61fe6c0
82 changed files with 332 additions and 935 deletions

View File

@ -48,15 +48,11 @@ class TestDateFieldFilter:
def test___init__(self, date_filter):
assert date_filter.date_params == {"pub_date__lte": tomorrow}
date_filter.links = [
(str(link[0]), *list(link[1:])) for link in date_filter.links
]
date_filter.links = [(str(link[0]), *list(link[1:])) for link in date_filter.links]
assert date_filter.links == [
(str(_("None")), "pub_date__isnull", None, "1"),
(str(_("Exact")), "pub_date__date", date_filter.input_type),
(str(_("Since")), "pub_date__gte", date_filter.input_type),
(str(_("Until")), "pub_date__lte", date_filter.input_type),
]
assert date_filter.query_attrs == {
"pub_date__gte": today.strftime("%Y-%m-%d")
}
assert date_filter.query_attrs == {"pub_date__gte": today.strftime("%Y-%m-%d")}

View File

@ -30,9 +30,7 @@ def staff_user():
@pytest.fixture
def logger():
logger = Interface(
logging, {"info": None, "debug": None, "error": None, "warning": None}
)
logger = Interface(logging, {"info": None, "debug": None, "error": None, "warning": None})
return logger
@ -123,10 +121,7 @@ def schedules(sched_initials, sched_reruns):
@pytest.fixture
def episodes(programs):
return [
baker.make(models.Episode, parent=program, cover=None)
for program in programs
]
return [baker.make(models.Episode, parent=program, cover=None) for program in programs]
@pytest.fixture
@ -158,15 +153,7 @@ def sound(program):
@pytest.fixture
def tracks(episode, sound):
items = [
baker.prepare(
models.Track, episode=episode, position=i, timestamp=i * 60
)
for i in range(0, 3)
]
items += [
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 3)
]
items = [baker.prepare(models.Track, episode=episode, position=i, timestamp=i * 60) for i in range(0, 3)]
items += [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 3)]
models.Track.objects.bulk_create(items)
return items

View File

@ -21,30 +21,21 @@ class TestDiffusion:
def test_update(self, monitor, schedules, sched_initials, logger):
monitor.update()
diffusions = models.Diffusion.objects.filter(
schedule__in=sched_initials
)
diffusions = models.Diffusion.objects.filter(schedule__in=sched_initials)
by_date = {}
for diff in diffusions:
assert diff.episode_id
by_date.setdefault(diff.schedule_id, set()).add(
(diff.start, diff.end)
)
by_date.setdefault(diff.schedule_id, set()).add((diff.start, diff.end))
for schedule in sched_initials:
if schedule.pk not in by_date:
continue
_, items = schedule.diffusions_of_month(now)
assert all(
(item.start, item.end) in by_date[schedule.pk]
for item in items
)
assert all((item.start, item.end) in by_date[schedule.pk] for item in items)
@pytest.mark.django_db
def test_clean(self, monitor, episode):
start = tz.make_aware(
datetime.combine(monitor.date - timedelta(days=1), time(10, 20))
)
start = tz.make_aware(datetime.combine(monitor.date - timedelta(days=1), time(10, 20)))
diff = models.Diffusion(
type=models.Diffusion.TYPE_UNCONFIRMED,
episode=episode,

View File

@ -79,16 +79,12 @@ class TestLogArchiver:
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
# before logs are deleted from db, get data
sorted = archiver.sort_logs(logs_qs)
paths = {
archiver.get_path(station, date) for station, date in sorted.keys()
}
paths = {archiver.get_path(station, date) for station, date in sorted.keys()}
count = archiver.archive(logs_qs, keep=False)
assert count == len(logs)
assert not logs_qs.count()
assert all(
path in paths for path, *_ in gzip._traces("open", args=True)
)
assert all(path in paths for path, *_ in gzip._traces("open", args=True))
results = archiver.load_file("dummy path")
assert results
@ -104,7 +100,4 @@ class TestLogArchiver:
assert sorted
for (station, date), logs in sorted.items():
assert all(
log.station == station and log.date.date() == date
for log in logs
)
assert all(log.station == station and log.date.date() == date for log in logs)

View File

@ -53,13 +53,7 @@ def path_infos():
@pytest.fixture
def sound_files(path_infos):
return {
k: r
for k, r in (
(path, SoundFile(conf.MEDIA_ROOT + "/" + path))
for path in path_infos.keys()
)
}
return {k: r for k, r in ((path, SoundFile(conf.MEDIA_ROOT + "/" + path)) for path in path_infos.keys())}
def test_sound_path(sound_files):
@ -78,17 +72,9 @@ def test_read_path(path_infos, sound_files):
def _setup_diff(program, info):
episode = models.Episode(program=program, title="test-episode")
at = tz.datetime(
**{
k: info[k]
for k in ("year", "month", "day", "hour", "minute")
if info.get(k)
}
)
at = tz.datetime(**{k: info[k] for k in ("year", "month", "day", "hour", "minute") if info.get(k)})
at = tz.make_aware(at)
diff = models.Diffusion(
episode=episode, start=at, end=at + timedelta(hours=1)
)
diff = models.Diffusion(episode=episode, start=at, end=at + timedelta(hours=1))
episode.save()
diff.save()
return diff

View File

@ -92,9 +92,7 @@ class TestTask:
task.log_msg = "--{event.src_path}--"
sound_file = task(event, logger=logger, kw=13)
assert sound_file._trace("sync", kw=True) == {"kw": 13}
assert logger._trace("info", args=True) == (
task.log_msg.format(event=event),
)
assert logger._trace("info", args=True) == (task.log_msg.format(event=event),)
class TestDeleteTask:
@ -125,9 +123,7 @@ class TestModifiedTask:
datetime = Interface.inject(sound_monitor, "datetime", {"now": dt_now})
def sleep(imeta, n):
datetime._imeta.funcs[
"now"
] = modified_task.timestamp + tz.timedelta(hours=10)
datetime._imeta.funcs["now"] = modified_task.timestamp + tz.timedelta(hours=10)
time = Interface.inject(sound_monitor, "time", {"sleep": sleep})
modified_task.wait()
@ -175,9 +171,7 @@ class TestMonitorHandler:
def test__submit(self, monitor_handler, event):
handler = Interface()
handler, created = monitor_handler._submit(
handler, event, "prefix", kw=13
)
handler, created = monitor_handler._submit(handler, event, "prefix", kw=13)
assert created
assert handler.future._trace("add_done_callback")
assert monitor_handler.pool._trace("submit") == (
@ -192,9 +186,7 @@ class TestMonitorHandler:
@pytest.fixture
def monitor_interfaces():
items = {
"atexit": Interface.inject(
sound_monitor, "atexit", {"register": None, "leave": None}
),
"atexit": Interface.inject(sound_monitor, "atexit", {"register": None, "leave": None}),
"observer": Interface.inject(
sound_monitor,
"Observer",

View File

@ -38,12 +38,8 @@ sox_values = {
@pytest.fixture
def sox_interfaces():
process = Interface(
None, {"communicate": ("", sox_output.encode("utf-8"))}
)
subprocess = Interface.inject(
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
)
process = Interface(None, {"communicate": ("", sox_output.encode("utf-8"))})
subprocess = Interface.inject(sound_stats, "subprocess", {"Popen": lambda *_, **__: process})
yield {"process": process, "subprocess": subprocess}
subprocess._irelease()
@ -110,9 +106,7 @@ class TestSoundStats:
def test_check(self, stats):
good = [{"val": i} for i in range(0, 11)]
bad = [{"val": i} for i in range(-10, 0)] + [
{"val": i} for i in range(11, 20)
]
bad = [{"val": i} for i in range(-10, 0)] + [{"val": i} for i in range(11, 20)]
stats.stats = good + bad
calls = {}
stats.resume = lambda *_: calls.setdefault("resume", True)

View File

@ -12,11 +12,7 @@ class TestEpisode:
@pytest.mark.django_db
def test_podcasts(self, episode, podcasts):
podcasts = {
podcast.pk: podcast
for podcast in podcasts
if podcast.episode == episode
}
podcasts = {podcast.pk: podcast for podcast in podcasts if podcast.episode == episode}
for data in episode.podcasts:
podcast = podcasts[data["pk"]]
assert data["name"] == podcast.name

View File

@ -12,44 +12,28 @@ class TestRerunQuerySet:
@pytest.mark.django_db
def test_station_by_obj(self, stations, schedules):
for station in stations:
queryset = (
Schedule.objects.station(station)
.distinct()
.values_list("program__station", flat=True)
)
queryset = Schedule.objects.station(station).distinct().values_list("program__station", flat=True)
assert queryset.count() == 1
assert queryset.first() == station.pk
@pytest.mark.django_db
def test_station_by_id(self, stations, schedules):
for station in stations:
queryset = (
Schedule.objects.station(id=station.pk)
.distinct()
.values_list("program__station", flat=True)
)
queryset = Schedule.objects.station(id=station.pk).distinct().values_list("program__station", flat=True)
assert queryset.count() == 1
assert queryset.first() == station.pk
@pytest.mark.django_db
def test_program_by_obj(self, programs, schedules):
for program in programs:
queryset = (
Schedule.objects.program(program)
.distinct()
.values_list("program", flat=True)
)
queryset = Schedule.objects.program(program).distinct().values_list("program", flat=True)
assert queryset.count() == 1
assert queryset.first() == program.pk
@pytest.mark.django_db
def test_program_by_id(self, programs, schedules):
for program in programs:
queryset = (
Schedule.objects.program(id=program.pk)
.distinct()
.values_list("program", flat=True)
)
queryset = Schedule.objects.program(id=program.pk).distinct().values_list("program", flat=True)
assert queryset.count() == 1
assert queryset.first() == program.pk
@ -60,11 +44,7 @@ class TestRerunQuerySet:
@pytest.mark.django_db
def test_initial(self, schedules):
queryset = (
Schedule.objects.initial()
.distinct()
.values_list("initial", flat=True)
)
queryset = Schedule.objects.initial().distinct().values_list("initial", flat=True)
assert queryset.count() == 1
assert queryset.first() is None

View File

@ -49,9 +49,7 @@ class TestSchedule:
@pytest.mark.django_db
def test_dates_of_month_ponctual(self):
schedule = baker.prepare(
Schedule, frequency=Schedule.Frequency.ponctual
)
schedule = baker.prepare(Schedule, frequency=Schedule.Frequency.ponctual)
at = schedule.date + relativedelta(months=4)
assert schedule.dates_of_month(at) == []
@ -59,9 +57,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_last(self, months, hour):
schedule = baker.prepare(
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last
)
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.last)
at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at)
assert len(datetimes) == 1
@ -73,9 +69,7 @@ class TestSchedule:
at = date(at.year, at.month, month_info[1])
if at.weekday() < schedule.date.weekday():
at -= timedelta(days=7)
at += timedelta(days=schedule.date.weekday()) - timedelta(
days=at.weekday()
)
at += timedelta(days=schedule.date.weekday()) - timedelta(days=at.weekday())
assert dt.date() == at
# since the same method is used for first, second, etc. frequencies
@ -84,9 +78,7 @@ class TestSchedule:
@pytest.mark.parametrize("months", range(0, 25, 4))
@pytest.mark.parametrize("hour", range(0, 24, 4))
def test_dates_of_month_every(self, months, hour):
schedule = baker.prepare(
Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every
)
schedule = baker.prepare(Schedule, time=time(hour, 00), frequency=Schedule.Frequency.every)
at = schedule.date + relativedelta(months=months)
datetimes = schedule.dates_of_month(at)
last = None
@ -128,8 +120,4 @@ class TestSchedule:
episodes, diffusions = schedule.diffusions_of_month(at)
assert all(r.date in dates for r in episodes)
assert all(
(not r.initial or r.date in dates)
and r.type == Diffusion.TYPE_ON_AIR
for r in diffusions
)
assert all((not r.initial or r.date in dates) and r.type == Diffusion.TYPE_ON_AIR for r in diffusions)

View File

@ -39,8 +39,7 @@ def test_user_default_groups():
groups = Group.objects.filter(name__in=default_groups.keys())
assert groups.exists()
assert all(
set(group.permissions.all().values_list("codename", flat=True))
== set(default_groups[group.name])
set(group.permissions.all().values_list("codename", flat=True)) == set(default_groups[group.name])
for group in groups
)
user_groups = set(user.groups.all().values_list("name", flat=True))
@ -104,7 +103,5 @@ def test_schedule_pre_delete(sched, eps_diffs):
@pytest.mark.django_db
def test_diffusion_post_delete(eps_diffs):
eps = eps_diffs[0][0]
Diffusion.objects.filter(
id__in=[r.id for r in eps.diffusion_set.all()]
).delete()
Diffusion.objects.filter(id__in=[r.id for r in eps.diffusion_set.all()]).delete()
assert Episode.objects.filter(id=eps.id).first() is None

View File

@ -29,9 +29,7 @@ def test_date_or_default():
def test_to_timedelta():
val = datetime(2023, 1, 10, hour=20, minute=10, second=1)
assert utils.to_timedelta(val) == timedelta(
hours=20, minutes=10, seconds=1
)
assert utils.to_timedelta(val) == timedelta(hours=20, minutes=10, seconds=1)
def test_to_seconds():

View File

@ -23,16 +23,12 @@ class FakeView:
@pytest.fixture
def published_pages():
return baker.make(
models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3
)
return baker.make(models.Page, status=models.StaticPage.STATUS_PUBLISHED, _quantity=3)
@pytest.fixture
def unpublished_pages():
return baker.make(
models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3
)
return baker.make(models.Page, status=models.StaticPage.STATUS_DRAFT, _quantity=3)
@pytest.fixture

View File

@ -96,9 +96,7 @@ class TestParentMixin:
@pytest.mark.django_db
def test_get_parent_raises_404(self, parent_mixin):
with pytest.raises(Http404):
parent_mixin.get_parent(
self.req, parent_slug="parent-invalid-slug"
)
parent_mixin.get_parent(self.req, parent_slug="parent-invalid-slug")
def test_get_parent_not_parent_model(self, parent_mixin):
parent_mixin.parent_model = None