forked from rc/aircox
Feat: packaging (#127)
- Add configuration files for packaging - Precommit now uses ruff Co-authored-by: bkfox <thomas bkfox net> Reviewed-on: rc/aircox#127
This commit is contained in:
@ -55,9 +55,7 @@ class FakeSocket:
|
||||
data = self.recv_data
|
||||
self.recv_data = self.recv_data[count:]
|
||||
data = data[:count]
|
||||
return (
|
||||
data.encode("utf-8") if isinstance(data, str) else data
|
||||
) or b"\nEND"
|
||||
return (data.encode("utf-8") if isinstance(data, str) else data) or b"\nEND"
|
||||
|
||||
def is_sent(self, data):
|
||||
"""Return True if provided data have been sent."""
|
||||
@ -68,9 +66,7 @@ class FakeSocket:
|
||||
# -- models
|
||||
@pytest.fixture
|
||||
def station():
|
||||
station = models.Station(
|
||||
name="test", path=working_dir, default=True, active=True
|
||||
)
|
||||
station = models.Station(name="test", path=working_dir, default=True, active=True)
|
||||
station.save()
|
||||
return station
|
||||
|
||||
@ -136,9 +132,7 @@ def program(station):
|
||||
|
||||
@pytest.fixture
|
||||
def stream(program):
|
||||
stream = models.Stream(
|
||||
program=program, begin=time(10, 12), end=time(12, 13)
|
||||
)
|
||||
stream = models.Stream(program=program, begin=time(10, 12), end=time(12, 13))
|
||||
stream.save()
|
||||
return stream
|
||||
|
||||
@ -229,10 +223,7 @@ def metadata_data(metadata_data_air_time):
|
||||
|
||||
@pytest.fixture
|
||||
def metadata_string(metadata_data):
|
||||
return (
|
||||
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
|
||||
+ "\nEND"
|
||||
)
|
||||
return "\n".join(f"{key}={value}" for key, value in metadata_data.items()) + "\nEND"
|
||||
|
||||
|
||||
# -- streamers
|
||||
@ -285,9 +276,7 @@ class FakeQueueSource(FakeSource, controllers.QueueSource):
|
||||
@pytest.fixture
|
||||
def streamer(station, station_ports):
|
||||
streamer = FakeStreamer(station=station)
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources = [FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)]
|
||||
streamer.dealer = FakeQueueSource(len(streamer.sources))
|
||||
streamer.sources.append(streamer.dealer)
|
||||
return streamer
|
||||
@ -297,12 +286,8 @@ def streamer(station, station_ports):
|
||||
def streamers(stations, stations_ports):
|
||||
streamers = controllers.Streamers(streamer_class=FakeStreamer)
|
||||
# avoid unecessary db calls
|
||||
streamers.streamers = {
|
||||
station.pk: FakeStreamer(station=station) for station in stations
|
||||
}
|
||||
streamers.streamers = {station.pk: FakeStreamer(station=station) for station in stations}
|
||||
for j, streamer in enumerate(streamers.values()):
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources = [FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)]
|
||||
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
||||
return streamers
|
||||
|
@ -16,9 +16,7 @@ class TestConnector:
|
||||
assert connector.is_open
|
||||
assert connector.socket.family == socket.AF_UNIX
|
||||
assert connector.socket.type == socket.SOCK_STREAM
|
||||
assert connector.socket.address == os.path.join(
|
||||
working_dir, "test.sock"
|
||||
)
|
||||
assert connector.socket.address == os.path.join(working_dir, "test.sock")
|
||||
connector.close()
|
||||
|
||||
def test_open_af_inet(self, connector):
|
||||
|
@ -37,9 +37,7 @@ class TestBaseMetaData:
|
||||
assert metadata.validate_status("any") == "stopped"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_air_time(
|
||||
self, metadata, metadata_data, metadata_data_air_time
|
||||
):
|
||||
def test_validate_air_time(self, metadata, metadata_data, metadata_data_air_time):
|
||||
air_time = metadata_data["on_air"]
|
||||
result = metadata.validate_air_time(air_time)
|
||||
assert result == metadata_data_air_time
|
||||
|
@ -43,10 +43,7 @@ def source(monitor, streamer, sound, diffusion):
|
||||
|
||||
@pytest.fixture
|
||||
def tracks(sound):
|
||||
items = [
|
||||
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||
for i in range(0, 4)
|
||||
]
|
||||
items = [baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60) for i in range(0, 4)]
|
||||
models.Track.objects.bulk_create(items)
|
||||
return items
|
||||
|
||||
@ -178,9 +175,7 @@ class TestMonitor:
|
||||
assert all(log_by_track.count(track) for track in tracks)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_tracks_returns_on_log_diffusion(
|
||||
self, monitor, log, diffusion, tracks
|
||||
):
|
||||
def test_trace_tracks_returns_on_log_diffusion(self, monitor, log, diffusion, tracks):
|
||||
log.diffusion = None
|
||||
monitor.trace_tracks(log)
|
||||
|
||||
@ -210,9 +205,7 @@ class TestMonitor:
|
||||
assert not monitor.calls["cancel_diff"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_handle_diffusions_returns_on_diff(
|
||||
self, monitor, streamer, diffusion, log
|
||||
):
|
||||
def test_handle_diffusions_returns_on_diff(self, monitor, streamer, diffusion, log):
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
@ -232,9 +225,7 @@ class TestMonitor:
|
||||
assert not monitor.calls["cancel_diff"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_handle_diffusions_returns_on_diff_log_exists(
|
||||
self, monitor, streamer, diffusion, log
|
||||
):
|
||||
def test_handle_diffusions_returns_on_diff_log_exists(self, monitor, streamer, diffusion, log):
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
@ -264,9 +255,7 @@ class TestMonitor:
|
||||
streamer.dealer.queue = None
|
||||
streamer.dealer.rid = "13"
|
||||
streamer.dealer.remaining = monitor.delay.total_seconds() + 10
|
||||
diffusion.start = (
|
||||
tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
||||
)
|
||||
diffusion.start = tz.now() - monitor.cancel_timeout - tz.timedelta(seconds=30)
|
||||
diffusion.end = tz.now() + tz.timedelta(minutes=30)
|
||||
diffusion.save()
|
||||
|
||||
@ -285,9 +274,7 @@ class TestMonitor:
|
||||
assert log.comment == "test"
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_start_diff(
|
||||
self, monitor, diffusion, source, episode, sound, tracks
|
||||
):
|
||||
def test_start_diff(self, monitor, diffusion, source, episode, sound, tracks):
|
||||
result = {}
|
||||
monitor.log = lambda **kw: result.update(kw)
|
||||
|
||||
@ -321,17 +308,10 @@ class TestMonitor:
|
||||
monitor.sync()
|
||||
|
||||
assert monitor.sync_next >= now + monitor.sync_timeout
|
||||
assert all(
|
||||
source.calls.get("sync") for source in monitor.streamer.playlists
|
||||
)
|
||||
assert all(source.calls.get("sync") for source in monitor.streamer.playlists)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_sync_timeout_not_reached_skip_sync(self, monitor):
|
||||
monitor.sync_next = tz.now() + tz.timedelta(
|
||||
seconds=monitor.sync_timeout.total_seconds() + 20
|
||||
)
|
||||
monitor.sync_next = tz.now() + tz.timedelta(seconds=monitor.sync_timeout.total_seconds() + 20)
|
||||
monitor.sync()
|
||||
assert all(
|
||||
not source.calls.get("sync")
|
||||
for source in monitor.streamer.playlists
|
||||
)
|
||||
assert all(not source.calls.get("sync") for source in monitor.streamer.playlists)
|
||||
|
@ -67,11 +67,7 @@ class TestPlaylistSource:
|
||||
@pytest.mark.django_db
|
||||
def test_get_sound_queryset(self, playlist_source, sounds):
|
||||
query = playlist_source.get_sound_queryset()
|
||||
assert all(
|
||||
r.program_id == playlist_source.program.pk
|
||||
and r.type == r.TYPE_ARCHIVE
|
||||
for r in query
|
||||
)
|
||||
assert all(r.program_id == playlist_source.program.pk and r.type == r.TYPE_ARCHIVE for r in query)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_playlist(self, playlist_source, sounds):
|
||||
@ -114,9 +110,7 @@ class TestQueueSource:
|
||||
@pytest.mark.django_db
|
||||
def test_requests(self, queue_source, socket, metadata_string):
|
||||
queue_source.queue = [13, 14, 15]
|
||||
socket.recv_data = [
|
||||
f"{metadata_string}\nEND" for _ in queue_source.queue
|
||||
]
|
||||
socket.recv_data = [f"{metadata_string}\nEND" for _ in queue_source.queue]
|
||||
|
||||
requests = queue_source.requests
|
||||
|
||||
@ -127,10 +121,7 @@ class TestQueueSource:
|
||||
def test_push(self, queue_source, socket):
|
||||
paths = ["/tmp/a", "/tmp/b"]
|
||||
queue_source.push(*paths)
|
||||
assert all(
|
||||
socket.is_sent(f"{queue_source.id}_queue.push {path}")
|
||||
for path in paths
|
||||
)
|
||||
assert all(socket.is_sent(f"{queue_source.id}_queue.push {path}") for path in paths)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, queue_source, socket, metadata_string):
|
||||
|
@ -12,9 +12,7 @@ class TestStreamers:
|
||||
@pytest.fixture
|
||||
def test_reset(self, streamers, stations):
|
||||
streamers.reset()
|
||||
assert all(
|
||||
streamers.streamers[station.pk] == station for station in stations
|
||||
)
|
||||
assert all(streamers.streamers[station.pk] == station for station in stations)
|
||||
|
||||
@pytest.fixture
|
||||
def test_fetch(self, streamers):
|
||||
|
@ -168,18 +168,14 @@ class TestQueueSourceViewSet:
|
||||
calls = {}
|
||||
sound = sounds[0]
|
||||
request = FakeRequest(station=station, data={"sound_id": sound.pk})
|
||||
queue_source_viewset._run = lambda pk, func: calls.setdefault(
|
||||
"_run", (pk, func)
|
||||
)
|
||||
queue_source_viewset._run = lambda pk, func: calls.setdefault("_run", (pk, func))
|
||||
result = queue_source_viewset.push(request, 13)
|
||||
assert "_run" in calls
|
||||
assert result[0] == 13
|
||||
assert callable(result[1])
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_push_missing_sound_in_request_post(
|
||||
self, queue_source_viewset, station
|
||||
):
|
||||
def test_push_missing_sound_in_request_post(self, queue_source_viewset, station):
|
||||
request = FakeRequest(station=station, data={})
|
||||
with pytest.raises(ValidationError):
|
||||
queue_source_viewset.push(request, 0)
|
||||
|
Reference in New Issue
Block a user