Feat: packaging (#127)

- Add configuration files for packaging
- Precommit now uses ruff

Co-authored-by: bkfox <thomas bkfox net>
Reviewed-on: rc/aircox#127
This commit is contained in:
Thomas Kairos
2023-10-11 10:58:34 +02:00
parent 5ea092dba6
commit f7a61fe6c0
82 changed files with 332 additions and 935 deletions

View File

@ -45,9 +45,7 @@ class Connector:
if self.is_open:
return 1
family = (
socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
)
family = socket.AF_UNIX if isinstance(self.address, str) else socket.AF_INET
try:
self.socket = self.socket_class(family, socket.SOCK_STREAM)
self.socket.connect(self.address)
@ -78,13 +76,7 @@ class Connector:
if data:
data = response_re.sub(r"\1", data).strip()
data = (
self.parse(data)
if parse
else self.parse_json(data)
if parse_json
else data
)
data = self.parse(data) if parse else self.parse_json(data) if parse_json else data
return data
except Exception:
self.close()

View File

@ -62,9 +62,7 @@ class Monitor:
def get_logs_queryset(self):
"""Return queryset to assign as `self.logs`"""
return self.station.log_set.select_related(
"diffusion", "sound", "track"
).order_by("-pk")
return self.station.log_set.select_related("diffusion", "sound", "track").order_by("-pk")
def init_last_sound_logs(self):
"""Retrieve last logs and initialize `last_sound_logs`"""
@ -136,12 +134,7 @@ class Monitor:
diff = None
sound = Sound.objects.path(air_uri).first()
if sound and sound.episode_id is not None:
diff = (
Diffusion.objects.episode(id=sound.episode_id)
.on_air()
.now(air_time)
.first()
)
diff = Diffusion.objects.episode(id=sound.episode_id).on_air().now(air_time).first()
# log sound on air
return self.log(
@ -158,9 +151,7 @@ class Monitor:
if log.diffusion:
return
tracks = Track.objects.filter(
sound_id=log.sound_id, timestamp__isnull=False
).order_by("timestamp")
tracks = Track.objects.filter(sound_id=log.sound_id, timestamp__isnull=False).order_by("timestamp")
if not tracks.exists():
return
@ -217,11 +208,7 @@ class Monitor:
dealer = self.streamer.dealer
# start
if (
not dealer.queue
and dealer.rid is None
or dealer.remaining < self.delay.total_seconds()
):
if not dealer.queue and dealer.rid is None or dealer.remaining < self.delay.total_seconds():
self.start_diff(dealer, diff)
# cancel
elif diff.start < now - self.cancel_timeout:

View File

@ -47,9 +47,7 @@ class Streamer:
self.id = self.station.slug.replace("-", "_")
self.path = os.path.join(station.path, "station.liq")
self.connector = connector or Connector(
os.path.join(station.path, "station.sock")
)
self.connector = connector or Connector(os.path.join(station.path, "station.sock"))
self.init_sources()
@property
@ -91,9 +89,7 @@ class Streamer:
def init_sources(self):
streams = self.station.program_set.filter(stream__isnull=False)
self.dealer = QueueSource(self, "dealer")
self.sources = [self.dealer] + [
PlaylistSource(self, program=program) for program in streams
]
self.sources = [self.dealer] + [PlaylistSource(self, program=program) for program in streams]
def make_config(self):
"""Make configuration files and directory (and sync sources)"""
@ -128,12 +124,7 @@ class Streamer:
self.source = next(
iter(
sorted(
(
source
for source in self.sources
if source.request_status == "playing"
and source.air_time
),
(source for source in self.sources if source.request_status == "playing" and source.air_time),
key=lambda o: o.air_time,
reverse=True,
)
@ -149,11 +140,7 @@ class Streamer:
if not os.path.exists(self.socket_path):
return
conns = [
conn
for conn in psutil.net_connections(kind="unix")
if conn.laddr == self.socket_path
]
conns = [conn for conn in psutil.net_connections(kind="unix") if conn.laddr == self.socket_path]
for conn in conns:
if conn.pid is not None:
os.kill(conn.pid, signal.SIGKILL)

View File

@ -23,9 +23,7 @@ class Streamers:
def reset(self, stations=Station.objects.active()):
# FIXME: cf. TODO in aircox.controllers about model updates
stations = stations.all()
self.streamers = {
station.pk: self.streamer_class(station) for station in stations
}
self.streamers = {station.pk: self.streamer_class(station) for station in stations}
def fetch(self):
"""Call streamers fetch if timed-out."""

View File

@ -62,42 +62,24 @@ class Command(BaseCommand):
"--station",
type=str,
action="append",
help="name of the station to monitor instead of monitoring "
"all stations",
help="name of the station to monitor instead of monitoring " "all stations",
)
group.add_argument(
"-t",
"--timeout",
type=float,
default=Monitor.cancel_timeout.total_seconds() / 60,
help="time to wait in MINUTES before canceling a diffusion that "
"should have ran but did not. ",
help="time to wait in MINUTES before canceling a diffusion that " "should have ran but did not. ",
)
# TODO: sync-timeout, cancel-timeout
def handle(
self,
*args,
config=None,
run=None,
monitor=None,
station=[],
delay=1000,
timeout=600,
**options
):
stations = (
Station.objects.filter(name__in=station)
if station
else Station.objects.all()
)
def handle(self, *args, config=None, run=None, monitor=None, station=[], delay=1000, timeout=600, **options):
stations = Station.objects.filter(name__in=station) if station else Station.objects.all()
streamers = [Streamer(station) for station in stations]
for streamer in streamers:
if not streamer.outputs:
raise RuntimeError(
"Streamer {} has no outputs".format(streamer.id)
)
raise RuntimeError("Streamer {} has no outputs".format(streamer.id))
if config:
streamer.make_config()
if run:
@ -106,10 +88,7 @@ class Command(BaseCommand):
if monitor:
delay = tz.timedelta(milliseconds=delay)
timeout = tz.timedelta(minutes=timeout)
monitors = [
Monitor(streamer, delay, cancel_timeout=timeout)
for streamer in streamers
]
monitors = [Monitor(streamer, delay, cancel_timeout=timeout) for streamer in streamers]
while not run or streamer.is_running:
for monitor in monitors:

View File

@ -55,9 +55,7 @@ class FakeSocket:
data = self.recv_data
self.recv_data = self.recv_data[count:]
data = data[:count]
return (
data.encode("utf-8") if isinstance(data, str) else data
) or b"\nEND"
return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
def is_sent(self, data):
"""Return True if provided data have been sent."""
@ -68,9 +66,7 @@ class FakeSocket:
# -- models
@pytest.fixture
def station():
station = models.Station(
name="test", path=working_dir, default=True, active=True
)
station = models.Station(name="test", path=working_dir, default=True, active=True)
station.save()
return station
@ -136,9 +132,7 @@ def program(station):
@pytest.fixture
def stream(program):
stream = models.Stream(
program=program, begin=time(10, 12), end=time(12, 13)
)
stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
stream.save()
return stream
@ -229,10 +223,7 @@ def metadata_data(metadata_data_air_time):
@pytest.fixture
def metadata_string(metadata_data):
return (
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
+ "\nEND"
)
return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
# -- streamers
@ -285,9 +276,7 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
@pytest.fixture
def streamer(station, station_ports):
streamer = FakeStreamer(station=station)
streamer.sources = [
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
]
streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
streamer.dealer = FakeQueueSource(len(streamer.sources))
streamer.sources.append(streamer.dealer)
return streamer
@ -297,12 +286,8 @@ def streamer(station, station_ports):
def streamers(stations, stations_ports):
streamers = controllers.Streamers(streamer_class=FakeStreamer)
# avoid unecessary db calls
streamers.streamers = {
station.pk: FakeStreamer(station=station) for station in stations
}
streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
for j, streamer in enumerate(streamers.values()):
streamer.sources = [
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
]
streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
return streamers

View File

@ -16,9 +16,7 @@ class TestConnector:
assert connector.is_open
assert connector.socket.family == socket.AF_UNIX
assert connector.socket.type == socket.SOCK_STREAM
assert connector.socket.address == os.path.join(
working_dir, "test.sock"
)
assert connector.socket.address == os.path.join(working_dir, "test.sock")
connector.close()
def test_open_af_inet(self, connector):

View File

@ -37,9 +37,7 @@ class TestBaseMetaData:
assert metadata.validate_status("any") == "stopped"
@pytest.mark.django_db
def test_validate_air_time(
self, metadata, metadata_data, metadata_data_air_time
):
def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
air_time = metadata_data["on_air"]
result = metadata.validate_air_time(air_time)
assert result == metadata_data_air_time

View File

@ -43,10 +43,7 @@ def source(monitor, streamer, sound, diffusion):
@pytest.fixture
def tracks(sound):
items = [
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
for i in range(0, 4)
]
items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
models.Track.objects.bulk_create(items)
return items
@ -178,9 +175,7 @@ class TestMonitor:
assert all(log_by_track.count(track) for track in tracks)
@pytest.mark.django_db(transaction=True)
def test_trace_tracks_returns_on_log_diffusion(
self, monitor, log, diffusion, tracks
):
def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
log.diffusion = None
monitor.trace_tracks(log)
@ -210,9 +205,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff(
self, monitor, streamer, diffusion, log
):
def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
interface(
monitor,
{
@ -232,9 +225,7 @@ class TestMonitor:
assert not monitor.calls["cancel_diff"]
@pytest.mark.django_db(transaction=True)
def test_handle_diffusions_returns_on_diff_log_exists(
self, monitor, streamer, diffusion, log
):
def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
interface(
monitor,
{
@ -264,9 +255,7 @@ class TestMonitor:
streamer.dealer.queue = None
streamer.dealer.rid = "13"
streamer.dealer.remaining = monitor.delay.total_seconds() + 10
diffusion.start = (
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
)
diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
diffusion.end = tz.now() + tz.timedelta(minutes=30)
diffusion.save()
@ -285,9 +274,7 @@ class TestMonitor:
assert log.comment == "test"
@pytest.mark.django_db(transaction=True)
def test_start_diff(
self, monitor, diffusion, source, episode, sound, tracks
):
def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
result = {}
monitor.log = lambda **kw: result.update(kw)
@ -321,17 +308,10 @@ class TestMonitor:
monitor.sync()
assert monitor.sync_next >= now + monitor.sync_timeout
assert all(
source.calls.get("sync") for source in monitor.streamer.playlists
)
assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
@pytest.mark.django_db(transaction=True)
def test_sync_timeout_not_reached_skip_sync(self, monitor):
monitor.sync_next = tz.now() + tz.timedelta(
seconds=monitor.sync_timeout.total_seconds() + 20
)
monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
monitor.sync()
assert all(
not source.calls.get("sync")
for source in monitor.streamer.playlists
)
assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)

View File

@ -67,11 +67,7 @@ class TestPlaylistSource:
@pytest.mark.django_db
def test_get_sound_queryset(self, playlist_source, sounds):
query = playlist_source.get_sound_queryset()
assert all(
r.program_id == playlist_source.program.pk
and r.type == r.TYPE_ARCHIVE
for r in query
)
assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
@pytest.mark.django_db
def test_get_playlist(self, playlist_source, sounds):
@ -114,9 +110,7 @@ class TestQueueSource:
@pytest.mark.django_db
def test_requests(self, queue_source, socket, metadata_string):
queue_source.queue = [13, 14, 15]
socket.recv_data = [
f"{metadata_string}\nEND" for _ in queue_source.queue
]
socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
requests = queue_source.requests
@ -127,10 +121,7 @@ class TestQueueSource:
def test_push(self, queue_source, socket):
paths = ["/tmp/a", "/tmp/b"]
queue_source.push(*paths)
assert all(
socket.is_sent(f"{queue_source.id}_queue.push {path}")
for path in paths
)
assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
@pytest.mark.django_db
def test_fetch(self, queue_source, socket, metadata_string):

View File

@ -12,9 +12,7 @@ class TestStreamers:
@pytest.fixture
def test_reset(self, streamers, stations):
streamers.reset()
assert all(
streamers.streamers[station.pk] == station for station in stations
)
assert all(streamers.streamers[station.pk] == station for station in stations)
@pytest.fixture
def test_fetch(self, streamers):

View File

@ -168,18 +168,14 @@ class TestQueueSourceViewSet:
calls = {}
sound = sounds[0]
request = FakeRequest(station=station, data={"sound_id": sound.pk})
queue_source_viewset._run = lambda pk, func: calls.setdefault(
"_run", (pk, func)
)
queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
result = queue_source_viewset.push(request, 13)
assert "_run" in calls
assert result[0] == 13
assert callable(result[1])
@pytest.mark.django_db
def test_push_missing_sound_in_request_post(
self, queue_source_viewset, station
):
def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
request = FakeRequest(station=station, data={})
with pytest.raises(ValidationError):
queue_source_viewset.push(request, 0)

View File

@ -73,9 +73,7 @@ class StreamerViewSet(ControllerViewSet):
return Response(self.serialize(self.streamer))
def list(self, request, pk=None):
return Response(
{"results": self.serialize(self.streamers.values(), many=True)}
)
return Response({"results": self.serialize(self.streamers.values(), many=True)})
def dispatch(self, request, *args, pk=None, **kwargs):
if pk is not None:
@ -93,9 +91,7 @@ class SourceViewSet(ControllerViewSet):
return (s for s in self.streamer.sources if isinstance(s, self.model))
def get_source(self, pk):
source = next(
(source for source in self.get_sources() if source.id == pk), None
)
source = next((source for source in self.get_sources() if source.id == pk), None)
if source is None:
raise Http404("source `%s` not found" % pk)
return source
@ -105,9 +101,7 @@ class SourceViewSet(ControllerViewSet):
return Response(self.serialize(source))
def list(self, request):
return Response(
{"results": self.serialize(self.get_sources(), many=True)}
)
return Response({"results": self.serialize(self.get_sources(), many=True)})
def _run(self, pk, action):
source = self.object = self.get_source(pk)
@ -150,9 +144,5 @@ class QueueSourceViewSet(SourceViewSet):
if not request.data.get("sound_id"):
raise ValidationError('missing "sound_id" POST data')
sound = get_object_or_404(
self.get_sound_queryset(request), pk=request.data["sound_id"]
)
return self._run(
pk, lambda s: s.push(sound.file.path) if sound.file.path else None
)
sound = get_object_or_404(self.get_sound_queryset(request), pk=request.data["sound_id"])
return self._run(pk, lambda s: s.push(sound.file.path) if sound.file.path else None)