forked from rc/aircox
- Writes tests for aircox streamer application; - Add test utilities in aircox Co-authored-by: bkfox <thomas bkfox net> Reviewed-on: rc/aircox#110
This commit is contained in:
0
aircox_streamer/tests/__init__.py
Normal file
0
aircox_streamer/tests/__init__.py
Normal file
337
aircox_streamer/tests/conftest.py
Normal file
337
aircox_streamer/tests/conftest.py
Normal file
@ -0,0 +1,337 @@
|
||||
import itertools
|
||||
import os
|
||||
|
||||
from datetime import datetime, time
|
||||
import tzlocal
|
||||
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
|
||||
from aircox import models
|
||||
from aircox_streamer import controllers
|
||||
from aircox_streamer.connector import Connector
|
||||
|
||||
|
||||
local_tz = tzlocal.get_localzone()
|
||||
|
||||
|
||||
working_dir = os.path.join(os.path.dirname(__file__), "working_dir")
|
||||
|
||||
|
||||
def interface_wrap(obj, attr, value):
|
||||
if not isinstance(getattr(obj, "calls", None), dict):
|
||||
obj.calls = {}
|
||||
obj.calls[attr] = None
|
||||
|
||||
def wrapper(*a, **kw):
|
||||
call = obj.calls.get(attr)
|
||||
if call is None:
|
||||
obj.calls[attr] = (a, kw)
|
||||
elif isinstance(call, tuple):
|
||||
obj.calls[attr] = [call, (a, kw)]
|
||||
else:
|
||||
call.append((a, kw))
|
||||
return value
|
||||
|
||||
setattr(obj, attr, wrapper)
|
||||
|
||||
|
||||
def interface(obj, funcs):
|
||||
"""Override provided object's functions using dict of funcs, as ``{
|
||||
func_name: return_value}``.
|
||||
|
||||
Attribute ``obj.calls`` is a dict
|
||||
with all call done using those methods, as
|
||||
``{func_name: (args, kwargs)}``.
|
||||
"""
|
||||
for attr, value in funcs.items():
|
||||
interface_wrap(obj, attr, value)
|
||||
|
||||
|
||||
class FakeSocket:
|
||||
FAILING_ADDRESS = -1
|
||||
"""Connect with this address fails."""
|
||||
|
||||
family, type, address = None, None, None
|
||||
sent_data = None
|
||||
"""List of data that have been `send[all]`"""
|
||||
recv_data = None
|
||||
"""Response data to return on recv."""
|
||||
|
||||
def __init__(self, family, type):
|
||||
self.family = family
|
||||
self.type = type
|
||||
self.sent_data = []
|
||||
self.recv_data = ""
|
||||
|
||||
def connect(self, address):
|
||||
if address == self.FAILING_ADDRESS:
|
||||
raise RuntimeError("invalid connection")
|
||||
self.address = address
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def sendall(self, data):
|
||||
self.sent_data.append(data.decode())
|
||||
|
||||
def recv(self, count):
|
||||
if isinstance(self.recv_data, list):
|
||||
if len(self.recv_data):
|
||||
data, self.recv_data = self.recv_data[0], self.recv_data[1:]
|
||||
else:
|
||||
data = ""
|
||||
else:
|
||||
data = self.recv_data
|
||||
self.recv_data = self.recv_data[count:]
|
||||
data = data[:count]
|
||||
return (
|
||||
data.encode("utf-8") if isinstance(data, str) else data
|
||||
) or b"\nEND"
|
||||
|
||||
def is_sent(self, data):
|
||||
"""Return True if provided data have been sent."""
|
||||
# use [:-1] because connector add "\n" at sent data
|
||||
return any(r for r in self.sent_data if r == data or r[:-1] == data)
|
||||
|
||||
|
||||
# -- models
|
||||
@pytest.fixture
|
||||
def station():
|
||||
station = models.Station(
|
||||
name="test", path=working_dir, default=True, active=True
|
||||
)
|
||||
station.save()
|
||||
return station
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stations(station):
|
||||
objs = [
|
||||
models.Station(
|
||||
name=f"test-{i}",
|
||||
slug=f"test-{i}",
|
||||
path=working_dir,
|
||||
default=(i == 0),
|
||||
active=True,
|
||||
)
|
||||
for i in range(0, 3)
|
||||
]
|
||||
models.Station.objects.bulk_create(objs)
|
||||
return [station] + objs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def station_ports(station):
|
||||
return _stations_ports(station)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stations_ports(stations):
|
||||
return _stations_ports(*stations)
|
||||
|
||||
|
||||
def _stations_ports(*stations):
|
||||
items = list(
|
||||
itertools.chain(
|
||||
*[
|
||||
(
|
||||
models.Port(
|
||||
station=station,
|
||||
direction=models.Port.DIRECTION_INPUT,
|
||||
type=models.Port.TYPE_HTTP,
|
||||
active=True,
|
||||
),
|
||||
models.Port(
|
||||
station=station,
|
||||
direction=models.Port.DIRECTION_OUTPUT,
|
||||
type=models.Port.TYPE_FILE,
|
||||
active=True,
|
||||
),
|
||||
)
|
||||
for station in stations
|
||||
]
|
||||
)
|
||||
)
|
||||
models.Port.objects.bulk_create(items)
|
||||
return items
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def program(station):
|
||||
program = models.Program(title="test", station=station)
|
||||
program.save()
|
||||
return program
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stream(program):
|
||||
stream = models.Stream(
|
||||
program=program, begin=time(10, 12), end=time(12, 13)
|
||||
)
|
||||
stream.save()
|
||||
return stream
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def episode(program):
|
||||
return baker.make(models.Episode, title="test episode", program=program)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sound(program, episode):
|
||||
sound = models.Sound(
|
||||
program=program,
|
||||
episode=episode,
|
||||
name="sound",
|
||||
type=models.Sound.TYPE_ARCHIVE,
|
||||
position=0,
|
||||
file="sound.mp3",
|
||||
)
|
||||
sound.save(check=False)
|
||||
return sound
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sounds(program):
|
||||
items = [
|
||||
models.Sound(
|
||||
name=f"sound {i}",
|
||||
program=program,
|
||||
type=models.Sound.TYPE_ARCHIVE,
|
||||
position=i,
|
||||
file=f"sound-{i}.mp3",
|
||||
)
|
||||
for i in range(0, 3)
|
||||
]
|
||||
models.Sound.objects.bulk_create(items)
|
||||
return items
|
||||
|
||||
|
||||
# -- connectors
|
||||
@pytest.fixture
|
||||
def connector():
|
||||
obj = Connector(os.path.join(working_dir, "test.sock"))
|
||||
obj.socket_class = FakeSocket
|
||||
yield obj
|
||||
obj.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fail_connector():
|
||||
obj = Connector(FakeSocket.FAILING_ADDRESS)
|
||||
obj.socket_class = FakeSocket
|
||||
yield obj
|
||||
obj.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def controller(station, connector):
|
||||
connector.open()
|
||||
return controllers.Streamer(station, connector)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def socket(controller):
|
||||
return controller.connector.socket
|
||||
|
||||
|
||||
# -- metadata
|
||||
@pytest.fixture
|
||||
def metadata(controller):
|
||||
return controllers.Metadata(controller, 1)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def metadata_data_air_time():
|
||||
return local_tz.localize(datetime(2023, 5, 1, 12, 10, 5))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def metadata_data(metadata_data_air_time):
|
||||
return {
|
||||
"rid": 1,
|
||||
"initial_uri": "request_uri",
|
||||
"on_air": metadata_data_air_time.strftime("%Y/%m/%d %H:%M:%S"),
|
||||
"status": "playing",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def metadata_string(metadata_data):
|
||||
return (
|
||||
"\n".join(f"{key}={value}" for key, value in metadata_data.items())
|
||||
+ "\nEND"
|
||||
)
|
||||
|
||||
|
||||
# -- streamers
|
||||
class FakeStreamer(controllers.Streamer):
|
||||
calls = {}
|
||||
is_ready = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(**kwargs)
|
||||
|
||||
def fetch(self):
|
||||
self.calls["fetch"] = True
|
||||
|
||||
|
||||
class FakeSource(controllers.Source):
|
||||
def __init__(self, id, *args, **kwargs):
|
||||
self.id = id
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.calls = {}
|
||||
|
||||
def fetch(self):
|
||||
self.calls["sync"] = True
|
||||
|
||||
def sync(self):
|
||||
self.calls["sync"] = True
|
||||
|
||||
def push(self, *path):
|
||||
self.calls["push"] = path
|
||||
return path
|
||||
|
||||
def skip(self):
|
||||
self.calls["skip"] = True
|
||||
|
||||
def restart(self):
|
||||
self.calls["restart"] = True
|
||||
|
||||
def seek(self, c):
|
||||
self.calls["seek"] = c
|
||||
|
||||
|
||||
class FakePlaylist(FakeSource, controllers.PlaylistSource):
|
||||
pass
|
||||
|
||||
|
||||
class FakeQueueSource(FakeSource, controllers.QueueSource):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def streamer(station, station_ports):
|
||||
streamer = FakeStreamer(station=station)
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
||||
return streamer
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def streamers(stations, stations_ports):
|
||||
streamers = controllers.Streamers(streamer_class=FakeStreamer)
|
||||
# avoid unecessary db calls
|
||||
streamers.streamers = {
|
||||
station.pk: FakeStreamer(station=station) for station in stations
|
||||
}
|
||||
for j, streamer in enumerate(streamers.values()):
|
||||
streamer.sources = [
|
||||
FakePlaylist(i, uri=f"source-{j}-{i}") for i in range(0, 3)
|
||||
]
|
||||
streamer.sources.append(FakeQueueSource(len(streamer.sources)))
|
||||
return streamers
|
39
aircox_streamer/tests/fake_modules/__init__.py
Normal file
39
aircox_streamer/tests/fake_modules/__init__.py
Normal file
@ -0,0 +1,39 @@
|
||||
import atexit as o_atexit
|
||||
import subprocess as o_subprocess
|
||||
import psutil as o_psutil
|
||||
|
||||
from . import atexit, subprocess, psutil
|
||||
|
||||
modules = [
|
||||
(o_atexit, atexit, {}),
|
||||
(o_subprocess, subprocess, {}),
|
||||
(o_psutil, psutil, {}),
|
||||
]
|
||||
|
||||
|
||||
def init_mappings():
|
||||
for original, spoof, mapping in modules:
|
||||
if mapping:
|
||||
continue
|
||||
mapping.update(
|
||||
{
|
||||
attr: (getattr(original, attr, None), spoofed)
|
||||
for attr, spoofed in vars(spoof).items()
|
||||
if not attr.startswith("_") and hasattr(original, attr)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup():
|
||||
for original, spoof, mappings in modules:
|
||||
for attr, (orig, spoofed) in mappings.items():
|
||||
setattr(original, attr, spoofed)
|
||||
|
||||
|
||||
def setdown():
|
||||
for original, spoof, mappings in modules:
|
||||
for attr, (orig, spoofed) in mappings.items():
|
||||
setattr(original, attr, orig)
|
||||
|
||||
|
||||
init_mappings()
|
10
aircox_streamer/tests/fake_modules/atexit.py
Normal file
10
aircox_streamer/tests/fake_modules/atexit.py
Normal file
@ -0,0 +1,10 @@
|
||||
registered = []
|
||||
"""Items registered by register()"""
|
||||
|
||||
|
||||
def register(func, *args, **kwargs):
|
||||
registered.append(func)
|
||||
|
||||
|
||||
def unregister(func):
|
||||
registered.remove(func)
|
15
aircox_streamer/tests/fake_modules/psutil.py
Normal file
15
aircox_streamer/tests/fake_modules/psutil.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""Spoof psutil module in order to run and check tests."""
|
||||
|
||||
|
||||
class FakeNetConnection:
|
||||
def __init__(self, laddr, pid=None):
|
||||
self.laddr = laddr
|
||||
self.pid = pid
|
||||
|
||||
|
||||
def net_connections(*args, **kwargs):
|
||||
return net_connections.result
|
||||
|
||||
|
||||
net_connections.result = []
|
||||
"""Result value of net_connections call."""
|
39
aircox_streamer/tests/fake_modules/subprocess.py
Normal file
39
aircox_streamer/tests/fake_modules/subprocess.py
Normal file
@ -0,0 +1,39 @@
|
||||
"""Spoof psutil module in order to run and check tests Resulting values of
|
||||
method calls are set inside `fixtures` module."""
|
||||
|
||||
STDOUT = 1
|
||||
STDERR = 2
|
||||
STDIN = 3
|
||||
|
||||
|
||||
class FakeProcess:
|
||||
args = None
|
||||
kwargs = None
|
||||
"""Kwargs passed to Popen."""
|
||||
killed = False
|
||||
"""kill() have been called."""
|
||||
waited = False
|
||||
"""wait() have been called."""
|
||||
polled = False
|
||||
"""poll() have been called."""
|
||||
poll_result = None
|
||||
"""Result of poll() method."""
|
||||
|
||||
def __init__(self, args=[], kwargs={}):
|
||||
self.pid = -13
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def kill(self):
|
||||
self.killed = True
|
||||
|
||||
def wait(self):
|
||||
self.waited = True
|
||||
|
||||
def poll(self):
|
||||
self.polled = True
|
||||
return self.poll_result
|
||||
|
||||
|
||||
def Popen(args, **kwargs):
|
||||
return FakeProcess(args, kwargs)
|
70
aircox_streamer/tests/test_connector.py
Normal file
70
aircox_streamer/tests/test_connector.py
Normal file
@ -0,0 +1,70 @@
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
|
||||
from .conftest import working_dir
|
||||
|
||||
|
||||
class TestConnector:
|
||||
payload = "non_value_info\n" 'a="value_1"\n' 'b="value_b"\n' "END"
|
||||
"""Test payload."""
|
||||
payload_data = {"a": "value_1", "b": "value_b"}
|
||||
"""Resulting data of payload."""
|
||||
|
||||
def test_open(self, connector):
|
||||
assert connector.open() == 0
|
||||
assert connector.is_open
|
||||
assert connector.socket.family == socket.AF_UNIX
|
||||
assert connector.socket.type == socket.SOCK_STREAM
|
||||
assert connector.socket.address == os.path.join(
|
||||
working_dir, "test.sock"
|
||||
)
|
||||
connector.close()
|
||||
|
||||
def test_open_af_inet(self, connector):
|
||||
address = ("test", 30)
|
||||
connector.address = address
|
||||
assert connector.open() == 0
|
||||
assert connector.is_open
|
||||
assert connector.socket.family == socket.AF_INET
|
||||
assert connector.socket.type == socket.SOCK_STREAM
|
||||
assert connector.socket.address == address
|
||||
|
||||
def test_open_is_already_open(self, connector):
|
||||
connector.open()
|
||||
assert connector.open() == 1
|
||||
|
||||
def test_open_failure(self, fail_connector):
|
||||
assert fail_connector.open() == -1
|
||||
assert fail_connector.socket is None # close() called
|
||||
|
||||
def test_close(self, connector):
|
||||
connector.open()
|
||||
assert connector.socket is not None
|
||||
connector.close()
|
||||
assert connector.socket is None
|
||||
|
||||
def test_send(self, connector):
|
||||
connector.open()
|
||||
connector.socket.recv_data = self.payload
|
||||
result = connector.send("fake_action", parse=True)
|
||||
assert result == self.payload_data
|
||||
|
||||
def test_send_open_failure(self, fail_connector):
|
||||
assert fail_connector.send("fake_action", parse=True) is None
|
||||
|
||||
def test_parse(self, connector):
|
||||
result = connector.parse(self.payload)
|
||||
assert result == self.payload_data
|
||||
|
||||
def test_parse_json(self, connector):
|
||||
# include case where json string is surrounded by '"'
|
||||
dumps = '"' + json.dumps(self.payload_data) + '"'
|
||||
result = connector.parse_json(dumps)
|
||||
assert result == self.payload_data
|
||||
|
||||
def test_parse_json_empty_value(self, connector):
|
||||
assert connector.parse_json('""') is None
|
||||
|
||||
def test_parse_json_failure(self, connector):
|
||||
assert connector.parse_json("-- invalid json string --") is None
|
59
aircox_streamer/tests/test_controllers_metadata.py
Normal file
59
aircox_streamer/tests/test_controllers_metadata.py
Normal file
@ -0,0 +1,59 @@
|
||||
import pytest
|
||||
|
||||
from aircox_streamer.controllers import Metadata
|
||||
|
||||
|
||||
class TestBaseMetaData:
|
||||
@pytest.mark.django_db
|
||||
def test_is_playing(self, metadata):
|
||||
metadata.status = "playing"
|
||||
assert metadata.is_playing
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_playing_false(self, metadata):
|
||||
metadata.status = "other"
|
||||
assert not metadata.is_playing
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, controller, metadata, metadata_data, metadata_string):
|
||||
controller.connector.socket.recv_data = metadata_string
|
||||
metadata.fetch()
|
||||
assert metadata.uri == metadata_data["initial_uri"]
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_status_playing(self, controller, metadata):
|
||||
controller.source = metadata
|
||||
assert metadata.validate_status("playing") == "playing"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_status_paused(self, controller, metadata):
|
||||
controller.source = Metadata(controller, metadata.rid + 1)
|
||||
assert metadata.validate_status("playing") == "paused"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_status_stopped(self, controller, metadata):
|
||||
controller.source = Metadata(controller, 2)
|
||||
assert metadata.validate_status("") == "stopped"
|
||||
assert metadata.validate_status("any") == "stopped"
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_air_time(
|
||||
self, metadata, metadata_data, metadata_data_air_time
|
||||
):
|
||||
air_time = metadata_data["on_air"]
|
||||
result = metadata.validate_air_time(air_time)
|
||||
assert result == metadata_data_air_time
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_air_time_none(self, metadata):
|
||||
assert metadata.validate_air_time("") is None
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate(self, metadata, metadata_data, metadata_data_air_time):
|
||||
metadata.validate(metadata_data)
|
||||
assert metadata.uri == metadata_data["initial_uri"]
|
||||
assert metadata.air_time == metadata_data_air_time
|
||||
# controller.source != metadata + status = "playing"
|
||||
# => status == "paused"
|
||||
assert metadata.status == "paused"
|
||||
assert metadata.request_status == "playing"
|
251
aircox_streamer/tests/test_controllers_monitor.py
Normal file
251
aircox_streamer/tests/test_controllers_monitor.py
Normal file
@ -0,0 +1,251 @@
|
||||
from django.utils import timezone as tz
|
||||
|
||||
import pytest
|
||||
from model_bakery import baker
|
||||
|
||||
from aircox import models
|
||||
from aircox.test import interface
|
||||
from aircox_streamer import controllers
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def monitor(streamer):
|
||||
streamer.calls = {}
|
||||
return controllers.Monitor(
|
||||
streamer,
|
||||
tz.timedelta(seconds=10),
|
||||
cancel_timeout=tz.timedelta(minutes=10),
|
||||
sync_timeout=tz.timedelta(minutes=5),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def diffusion(program, episode):
|
||||
return baker.make(
|
||||
models.Diffusion,
|
||||
program=program,
|
||||
episode=episode,
|
||||
start=tz.now() - tz.timedelta(minutes=10),
|
||||
end=tz.now() + tz.timedelta(minutes=30),
|
||||
schedule=None,
|
||||
type=models.Diffusion.TYPE_ON_AIR,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source(monitor, streamer, sound, diffusion):
|
||||
source = next(monitor.streamer.playlists)
|
||||
source.uri = sound.file.path
|
||||
source.episode_id = sound.episode_id
|
||||
source.air_time = diffusion.start + tz.timedelta(seconds=10)
|
||||
return source
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tracks(sound):
|
||||
items = [
|
||||
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||
for i in range(0, 4)
|
||||
]
|
||||
models.Track.objects.bulk_create(items)
|
||||
return items
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def log(station, source, sound):
|
||||
return baker.make(
|
||||
models.Log,
|
||||
station=station,
|
||||
type=models.Log.TYPE_START,
|
||||
sound=sound,
|
||||
source=source.id,
|
||||
)
|
||||
|
||||
|
||||
class TestMonitor:
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_last_diff_start(self, monitor):
|
||||
pass
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test___init__(self, monitor):
|
||||
assert isinstance(monitor.logs, models.LogQuerySet)
|
||||
assert isinstance(monitor.last_sound_logs, dict)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_get_logs_queryset(self, monitor, station, sounds):
|
||||
query = monitor.get_logs_queryset()
|
||||
assert all(log.station_id == station.pk for log in query)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_init_last_sound_logs(self, monitor, source, log):
|
||||
monitor.init_last_sound_logs()
|
||||
assert monitor.last_sound_logs[source.id] == log
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_monitor(self, monitor, source, log, sound):
|
||||
monitor.streamer.is_ready = True
|
||||
monitor.streamer.source = source
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
"trace_sound": log,
|
||||
"trace_tracks": None,
|
||||
"handle_diffusions": None,
|
||||
"sync": None,
|
||||
},
|
||||
)
|
||||
|
||||
monitor.monitor()
|
||||
assert monitor.streamer.calls.get("fetch")
|
||||
assert monitor.calls["trace_sound"] == ((source,), {})
|
||||
assert monitor.calls["trace_tracks"] == ((log,), {})
|
||||
assert monitor.calls["handle_diffusions"]
|
||||
assert monitor.calls["sync"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_monitor_streamer_not_ready(self, monitor):
|
||||
monitor.streamer.is_ready = False
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
"trace_sound": log,
|
||||
"trace_tracks": None,
|
||||
"handle_diffusions": None,
|
||||
"sync": None,
|
||||
},
|
||||
)
|
||||
|
||||
monitor.monitor()
|
||||
assert not monitor.streamer.calls.get("fetch")
|
||||
assert monitor.calls["trace_sound"] is None
|
||||
assert monitor.calls["trace_tracks"] is None
|
||||
assert not monitor.calls["handle_diffusions"]
|
||||
assert not monitor.calls["sync"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_monitor_no_source_uri(self, monitor, log):
|
||||
source.uri = None
|
||||
monitor.streamer.is_ready = True
|
||||
monitor.streamer.source = source
|
||||
interface(
|
||||
monitor,
|
||||
{
|
||||
"trace_sound": log,
|
||||
"trace_tracks": None,
|
||||
"handle_diffusions": None,
|
||||
"sync": None,
|
||||
},
|
||||
)
|
||||
|
||||
monitor.monitor()
|
||||
assert monitor.streamer.calls.get("fetch")
|
||||
assert monitor.calls["trace_sound"] is None
|
||||
assert monitor.calls["trace_tracks"] is None
|
||||
assert monitor.calls["handle_diffusions"]
|
||||
assert monitor.calls["sync"]
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_sound(self, monitor, diffusion, source, sound):
|
||||
monitor.last_sound_logs[source.id] = None
|
||||
|
||||
result = monitor.trace_sound(source)
|
||||
assert result.type == models.Log.TYPE_ON_AIR
|
||||
assert result.source == source.id
|
||||
assert result.sound == sound
|
||||
assert result.diffusion == diffusion
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_sound_returns_last_log(self, monitor, source, sound, log):
|
||||
log.sound = sound
|
||||
monitor.last_sound_logs[source.id] = log
|
||||
|
||||
result = monitor.trace_sound(source)
|
||||
assert result == log
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_tracks(self, monitor, log, tracks):
|
||||
interface(monitor, {"log": None})
|
||||
for track in tracks:
|
||||
log.date = tz.now() - tz.timedelta(seconds=track.timestamp + 5)
|
||||
monitor.trace_tracks(log)
|
||||
|
||||
assert monitor.calls["log"]
|
||||
log_by_track = [call[1].get("track") for call in monitor.calls["log"]]
|
||||
# only one call of log
|
||||
assert all(log_by_track.count(track) for track in tracks)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_tracks_returns_on_log_diffusion(
|
||||
self, monitor, log, diffusion, tracks
|
||||
):
|
||||
log.diffusion = None
|
||||
monitor.trace_tracks(log)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_trace_tracks_returns_on_no_tracks_exists(self, monitor, log):
|
||||
log.diffusion = None
|
||||
monitor.trace_tracks(log)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_handle_diffusions(self, monitor):
|
||||
pass
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_log(self, monitor, source):
|
||||
log = monitor.log("source", type=models.Log.TYPE_START, comment="test")
|
||||
assert log.source == "source"
|
||||
assert log.type == models.Log.TYPE_START
|
||||
assert log.comment == "test"
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_start_diff(
|
||||
self, monitor, diffusion, source, episode, sound, tracks
|
||||
):
|
||||
result = {}
|
||||
monitor.log = lambda **kw: result.update(kw)
|
||||
|
||||
monitor.start_diff(source, diffusion)
|
||||
assert source.calls["push"] == (sound.file.path,)
|
||||
assert result == {
|
||||
"type": models.Log.TYPE_START,
|
||||
"source": source.id,
|
||||
"diffusion": diffusion,
|
||||
"comment": str(diffusion),
|
||||
}
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_cancel_diff(self, monitor, source, diffusion):
|
||||
result = {}
|
||||
monitor.log = lambda **kw: result.update(kw)
|
||||
|
||||
monitor.cancel_diff(source, diffusion)
|
||||
assert diffusion.type == models.Log.TYPE_CANCEL
|
||||
assert result == {
|
||||
"type": models.Log.TYPE_CANCEL,
|
||||
"source": source.id,
|
||||
"diffusion": diffusion,
|
||||
"comment": str(diffusion),
|
||||
}
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_sync(self, monitor):
|
||||
now = tz.now()
|
||||
monitor.sync_next = now - tz.timedelta(minutes=1)
|
||||
monitor.sync()
|
||||
|
||||
assert monitor.sync_next >= now + monitor.sync_timeout
|
||||
assert all(
|
||||
source.calls.get("sync") for source in monitor.streamer.playlists
|
||||
)
|
||||
|
||||
@pytest.mark.django_db(transaction=True)
|
||||
def test_sync_timeout_not_reached_skip_sync(self, monitor):
|
||||
monitor.sync_next = tz.now() + tz.timedelta(
|
||||
seconds=monitor.sync_timeout.total_seconds() + 20
|
||||
)
|
||||
monitor.sync()
|
||||
assert all(
|
||||
not source.calls.get("sync")
|
||||
for source in monitor.streamer.playlists
|
||||
)
|
146
aircox_streamer/tests/test_controllers_sources.py
Normal file
146
aircox_streamer/tests/test_controllers_sources.py
Normal file
@ -0,0 +1,146 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from aircox_streamer.controllers import (
|
||||
Source,
|
||||
PlaylistSource,
|
||||
QueueSource,
|
||||
Request,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source(controller):
|
||||
return Source(controller, 13)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def playlist_source(controller, program):
|
||||
return PlaylistSource(controller, 14, program)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def queue_source(controller):
|
||||
return QueueSource(controller, 15)
|
||||
|
||||
|
||||
class TestSource:
|
||||
@pytest.mark.django_db
|
||||
def test_station(self, source, station):
|
||||
assert source.station == station
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, socket, source, metadata_string):
|
||||
remaining = 3.12
|
||||
socket.recv_data = [
|
||||
f"{remaining} END",
|
||||
metadata_string,
|
||||
]
|
||||
|
||||
source.fetch()
|
||||
assert socket.is_sent(f"{source.id}.remaining")
|
||||
assert socket.is_sent(f"{source.id}.get")
|
||||
|
||||
assert source.remaining == remaining
|
||||
assert source.request_status
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_skip(self, socket, source):
|
||||
socket.recv_data = "\nEND"
|
||||
source.skip()
|
||||
assert socket.is_sent(f"{source.id}.skip\n")
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_restart(self, socket, source):
|
||||
source.restart()
|
||||
prefix = f"{source.id}.seek"
|
||||
assert any(r for r in socket.sent_data if r.startswith(prefix))
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_seek(self, socket, source):
|
||||
source.seek(10)
|
||||
assert socket.is_sent(f"{source.id}.seek 10")
|
||||
|
||||
|
||||
class TestPlaylistSource:
|
||||
@pytest.mark.django_db
|
||||
def test_get_sound_queryset(self, playlist_source, sounds):
|
||||
query = playlist_source.get_sound_queryset()
|
||||
assert all(
|
||||
r.program_id == playlist_source.program.pk
|
||||
and r.type == r.TYPE_ARCHIVE
|
||||
for r in query
|
||||
)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_playlist(self, playlist_source, sounds):
|
||||
expected = {r.file.path for r in sounds}
|
||||
query = playlist_source.get_playlist()
|
||||
assert all(r in expected for r in query)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_write_playlist(self, playlist_source):
|
||||
playlist = ["/tmp/a", "/tmp/b"]
|
||||
playlist_source.write_playlist(playlist)
|
||||
with open(playlist_source.path, "r") as file:
|
||||
result = file.read()
|
||||
os.remove(playlist_source.path)
|
||||
|
||||
assert result == "\n".join(playlist)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_stream(self, playlist_source, stream):
|
||||
result = playlist_source.stream()
|
||||
assert result == {
|
||||
"begin": stream.begin.strftime("%Hh%M"),
|
||||
"end": stream.end.strftime("%Hh%M"),
|
||||
"delay": 0,
|
||||
}
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sync(self, playlist_source):
|
||||
# spoof method
|
||||
playlist = ["/tmp/a", "/tmp/b"]
|
||||
written_playlist = []
|
||||
playlist_source.get_playlist = lambda: playlist
|
||||
playlist_source.write_playlist = lambda p: written_playlist.extend(p)
|
||||
|
||||
playlist_source.sync()
|
||||
assert written_playlist == playlist
|
||||
|
||||
|
||||
class TestQueueSource:
|
||||
@pytest.mark.django_db
|
||||
def test_requests(self, queue_source, socket, metadata_string):
|
||||
queue_source.queue = [13, 14, 15]
|
||||
socket.recv_data = [
|
||||
f"{metadata_string}\nEND" for _ in queue_source.queue
|
||||
]
|
||||
|
||||
requests = queue_source.requests
|
||||
|
||||
assert all(isinstance(r, Request) for r in requests)
|
||||
assert all(r.uri for r in requests)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_push(self, queue_source, socket):
|
||||
paths = ["/tmp/a", "/tmp/b"]
|
||||
queue_source.push(*paths)
|
||||
assert all(
|
||||
socket.is_sent(f"{queue_source.id}_queue.push {path}")
|
||||
for path in paths
|
||||
)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, queue_source, socket, metadata_string):
|
||||
queue = ["13", "14", "15"]
|
||||
socket.recv_data = [
|
||||
# Source fetch remaining & metadata
|
||||
"13 END",
|
||||
metadata_string,
|
||||
" ".join(queue) + "\nEND",
|
||||
]
|
||||
queue_source.fetch()
|
||||
assert queue_source.uri
|
||||
assert queue_source.queue == queue
|
150
aircox_streamer/tests/test_controllers_streamer.py
Normal file
150
aircox_streamer/tests/test_controllers_streamer.py
Normal file
@ -0,0 +1,150 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from aircox_streamer import controllers
|
||||
from . import fake_modules
|
||||
from .fake_modules import atexit, subprocess, psutil
|
||||
|
||||
|
||||
class FakeSource:
|
||||
synced = False
|
||||
|
||||
def sync(self):
|
||||
self.synced = True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def streamer(station, connector, station_ports, stream):
|
||||
fake_modules.setup()
|
||||
streamer = controllers.Streamer(station, connector)
|
||||
psutil.net_connections.result = [
|
||||
psutil.FakeNetConnection(streamer.socket_path, None),
|
||||
]
|
||||
yield streamer
|
||||
fake_modules.setdown()
|
||||
|
||||
|
||||
class TestStreamer:
|
||||
@pytest.mark.django_db
|
||||
def test_socket_path(self, streamer):
|
||||
assert streamer.socket_path == streamer.connector.address
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_ready(self, streamer, socket):
|
||||
socket.recv_data = "item 1\nEND"
|
||||
assert streamer.is_ready
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_ready_false(self, streamer, socket):
|
||||
socket.recv_data = ""
|
||||
assert not streamer.is_ready
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_running(self, streamer):
|
||||
streamer.process = subprocess.FakeProcess()
|
||||
streamer.process.poll_result = None
|
||||
assert streamer.is_running
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_running_no_process(self, streamer):
|
||||
streamer.process = None
|
||||
assert not streamer.is_running
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_is_running_process_died(self, streamer):
|
||||
process = subprocess.FakeProcess()
|
||||
process.poll_result = 1
|
||||
streamer.process = process
|
||||
assert not streamer.is_running
|
||||
assert streamer.process is None
|
||||
assert process.polled
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_playlists(self, streamer, program):
|
||||
result = list(streamer.playlists)
|
||||
assert len(result) == 1
|
||||
|
||||
result = result[0]
|
||||
assert isinstance(result, controllers.PlaylistSource)
|
||||
assert result.program == program
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_queues(self, streamer):
|
||||
result = list(streamer.queues)
|
||||
assert len(result) == 1
|
||||
assert result[0] == streamer.dealer
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_init_sources(self, streamer, program):
|
||||
streamer.init_sources()
|
||||
assert isinstance(streamer.dealer, controllers.QueueSource)
|
||||
# one for dealer, one for program
|
||||
assert len(streamer.sources) == 2
|
||||
assert streamer.sources[1].program == program
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_make_config(self, streamer):
|
||||
streamer.make_config()
|
||||
assert os.path.exists(streamer.path)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sync(self, streamer):
|
||||
streamer.sources = [FakeSource(), FakeSource()]
|
||||
streamer.sync()
|
||||
assert all(source.synced for source in streamer.sources)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fetch(self, streamer):
|
||||
pass
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_process_args(self, streamer):
|
||||
assert streamer.get_process_args() == [
|
||||
"liquidsoap",
|
||||
"-v",
|
||||
streamer.path,
|
||||
]
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_check_zombie_process(self, streamer):
|
||||
with open(streamer.socket_path, "w+") as file:
|
||||
file.write("data")
|
||||
# This test is incomplete, but we can not go further because os module
|
||||
# is not spoofed (too much work) to check if os.kill is called.
|
||||
streamer.check_zombie_process()
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_check_zombie_process_no_socket(self, streamer):
|
||||
if os.path.exists(streamer.socket_path):
|
||||
os.remove(streamer.socket_path)
|
||||
streamer.check_zombie_process()
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_run_process(self, streamer):
|
||||
if os.path.exists(streamer.socket_path):
|
||||
os.remove(streamer.socket_path)
|
||||
streamer.run_process()
|
||||
process = streamer.process
|
||||
|
||||
assert process.args == streamer.get_process_args()
|
||||
assert streamer.kill_process in atexit.registered
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_kill_process(self, streamer):
|
||||
streamer.run_process()
|
||||
process = streamer.process
|
||||
streamer.kill_process()
|
||||
|
||||
assert process.killed
|
||||
assert streamer.process is None
|
||||
assert streamer.kill_process not in atexit.registered
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_wait_process(self, streamer):
|
||||
process = subprocess.FakeProcess()
|
||||
streamer.process = process
|
||||
streamer.wait_process()
|
||||
|
||||
assert process.waited
|
||||
assert streamer.process is None
|
37
aircox_streamer/tests/test_controllers_streamers.py
Normal file
37
aircox_streamer/tests/test_controllers_streamers.py
Normal file
@ -0,0 +1,37 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone as tz
|
||||
import pytest
|
||||
|
||||
|
||||
class TestStreamers:
|
||||
@pytest.fixture
|
||||
def test___init__(self, streamers):
|
||||
assert isinstance(streamers.timeout, timedelta)
|
||||
|
||||
@pytest.fixture
|
||||
def test_reset(self, streamers, stations):
|
||||
streamers.reset()
|
||||
assert all(
|
||||
streamers.streamers[station.pk] == station for station in stations
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def test_fetch(self, streamers):
|
||||
streamers.next_date = tz.now() - tz.timedelta(seconds=30)
|
||||
streamers.streamers = None
|
||||
|
||||
now = tz.now()
|
||||
streamers.fetch()
|
||||
|
||||
assert all(streamer.calls.get("fetch") for streamer in streamers)
|
||||
assert streamers.next_date > now
|
||||
|
||||
@pytest.fixture
|
||||
def test_fetch_timeout_not_reached(self, streamers):
|
||||
next_date = tz.now() + tz.timedelta(seconds=30)
|
||||
streamers.next_date = next_date
|
||||
|
||||
streamers.fetch()
|
||||
assert all(not streamer.calls.get("fetch") for streamer in streamers)
|
||||
assert streamers.next_date == next_date
|
185
aircox_streamer/tests/test_viewsets.py
Normal file
185
aircox_streamer/tests/test_viewsets.py
Normal file
@ -0,0 +1,185 @@
|
||||
import pytest
|
||||
|
||||
from django.http import Http404
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from aircox_streamer.viewsets import (
|
||||
ControllerViewSet,
|
||||
SourceViewSet,
|
||||
StreamerViewSet,
|
||||
QueueSourceViewSet,
|
||||
)
|
||||
|
||||
|
||||
class FakeSerializer:
|
||||
def __init__(self, instance, *args, **kwargs):
|
||||
self.instance = instance
|
||||
self.data = {"instance": self.instance}
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class FakeRequest:
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(**kwargs)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def controller_viewset(streamers, station):
|
||||
return ControllerViewSet(
|
||||
streamers=streamers,
|
||||
streamer=streamers[station.pk],
|
||||
serializer_class=FakeSerializer,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def streamer_viewset(streamers, station):
|
||||
return StreamerViewSet(
|
||||
streamers=streamers,
|
||||
streamer=streamers[station.pk],
|
||||
serializer_class=FakeSerializer,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source_viewset(streamers, station):
|
||||
return SourceViewSet(
|
||||
streamers=streamers,
|
||||
streamer=streamers[station.pk],
|
||||
serializer_class=FakeSerializer,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def queue_source_viewset(streamers, station):
|
||||
return QueueSourceViewSet(
|
||||
streamers=streamers,
|
||||
streamer=streamers[station.pk],
|
||||
serializer_class=FakeSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TestControllerViewSet:
|
||||
@pytest.mark.django_db
|
||||
def test_get_streamer(self, controller_viewset, stations):
|
||||
station = stations[0]
|
||||
streamer = controller_viewset.get_streamer(station.pk)
|
||||
assert streamer.station.pk == station.pk
|
||||
assert streamer.calls.get("fetch")
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_streamer_station_not_found(self, controller_viewset):
|
||||
controller_viewset.streamers.streamers = {}
|
||||
with pytest.raises(Http404):
|
||||
controller_viewset.get_streamer(1)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_serializer(self, controller_viewset):
|
||||
controller_viewset.object = {"object": "value"}
|
||||
serializer = controller_viewset.get_serializer(test=True)
|
||||
assert serializer.kwargs["test"]
|
||||
assert serializer.instance == controller_viewset.object
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_serialize(self, controller_viewset):
|
||||
instance = {}
|
||||
data = controller_viewset.serialize(instance, test=True)
|
||||
assert data == {"instance": instance}
|
||||
|
||||
|
||||
class TestStreamerViewSet:
|
||||
@pytest.mark.django_db
|
||||
def test_retrieve(self, streamer_viewset):
|
||||
streamer_viewset.streamer = {"streamer": "test"}
|
||||
resp = streamer_viewset.retrieve(None, None)
|
||||
assert resp.data == {"instance": streamer_viewset.streamer}
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list(self, streamer_viewset):
|
||||
streamers = {"a": 1, "b": 2}
|
||||
streamer_viewset.streamers.streamers = streamers
|
||||
resp = streamer_viewset.list(None)
|
||||
assert set(resp.data["results"]["instance"]) == set(streamers.values())
|
||||
|
||||
|
||||
class TestSourceViewSet:
|
||||
@pytest.mark.django_db
|
||||
def test_get_sources(self, source_viewset, streamers):
|
||||
source_viewset.streamer.sources.append(45)
|
||||
sources = source_viewset.get_sources()
|
||||
assert 45 not in set(sources)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_source(self, source_viewset):
|
||||
source = source_viewset.get_source(1)
|
||||
assert source.id == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_source_not_found(self, source_viewset):
|
||||
with pytest.raises(Http404):
|
||||
source_viewset.get_source(1000)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_retrieve(self, source_viewset, station):
|
||||
resp = source_viewset.retrieve(None, 0)
|
||||
source = source_viewset.streamers[station.pk].sources[0]
|
||||
# this is FakeSerializer being used which provides us the proof
|
||||
assert resp.data["instance"] == source
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list(self, source_viewset, station):
|
||||
sources = source_viewset.streamers[station.pk].sources
|
||||
resp = source_viewset.list(None)
|
||||
assert list(resp.data["results"]["instance"]) == sources
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test__run(self, source_viewset):
|
||||
calls = {}
|
||||
|
||||
def action(x):
|
||||
return calls.setdefault("action", True)
|
||||
|
||||
source_viewset._run(0, action)
|
||||
assert calls.get("action")
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all_api_source_actions(self, source_viewset, station):
|
||||
source = source_viewset.streamers[station.pk].sources[0]
|
||||
request = FakeRequest(POST={"seek": 1})
|
||||
source_viewset.get_source = lambda x: source
|
||||
|
||||
for action in ("sync", "skip", "restart", "seek"):
|
||||
func = getattr(source_viewset, action)
|
||||
func(request, 1)
|
||||
assert source.calls.get(action)
|
||||
|
||||
|
||||
class TestQueueSourceViewSet:
|
||||
@pytest.mark.django_db
|
||||
def test_get_sound_queryset(self, queue_source_viewset, station, sounds):
|
||||
ids = {sound.pk for sound in sounds}
|
||||
request = FakeRequest(station=station)
|
||||
query = queue_source_viewset.get_sound_queryset(request)
|
||||
assert set(query.values_list("pk", flat=True)) == ids
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_push(self, queue_source_viewset, station, sounds):
|
||||
calls = {}
|
||||
sound = sounds[0]
|
||||
request = FakeRequest(station=station, data={"sound_id": sound.pk})
|
||||
queue_source_viewset._run = lambda pk, func: calls.setdefault(
|
||||
"_run", (pk, func)
|
||||
)
|
||||
result = queue_source_viewset.push(request, 13)
|
||||
assert "_run" in calls
|
||||
assert result[0] == 13
|
||||
assert callable(result[1])
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_push_missing_sound_in_request_post(
|
||||
self, queue_source_viewset, station
|
||||
):
|
||||
request = FakeRequest(station=station, data={})
|
||||
with pytest.raises(ValidationError):
|
||||
queue_source_viewset.push(request, 0)
|
0
aircox_streamer/tests/working_dir/keepme.txt
Normal file
0
aircox_streamer/tests/working_dir/keepme.txt
Normal file
Reference in New Issue
Block a user