|
@ -42,6 +42,7 @@ class LogArchiver:
|
||||||
# exists yet <3
|
# exists yet <3
|
||||||
for (station, date), logs in logs.items():
|
for (station, date), logs in logs.items():
|
||||||
path = self.get_path(station, date)
|
path = self.get_path(station, date)
|
||||||
|
# FIXME: remove binary mode
|
||||||
with gzip.open(path, "ab") as archive:
|
with gzip.open(path, "ab") as archive:
|
||||||
data = yaml.dump(
|
data = yaml.dump(
|
||||||
[self.serialize(line) for line in logs]
|
[self.serialize(line) for line in logs]
|
||||||
|
@ -60,11 +61,8 @@ class LogArchiver:
|
||||||
qs = qs.order_by("date")
|
qs = qs.order_by("date")
|
||||||
logs = {}
|
logs = {}
|
||||||
for log in qs:
|
for log in qs:
|
||||||
key = (log.station, log.date)
|
key = (log.station, log.date.date())
|
||||||
if key not in logs:
|
logs.setdefault(key, []).append(log)
|
||||||
logs[key] = [log]
|
|
||||||
else:
|
|
||||||
logs[key].append(log)
|
|
||||||
return logs
|
return logs
|
||||||
|
|
||||||
def serialize(self, log):
|
def serialize(self, log):
|
||||||
|
@ -73,13 +71,13 @@ class LogArchiver:
|
||||||
|
|
||||||
def load(self, station, date):
|
def load(self, station, date):
|
||||||
"""Load an archive returning logs in a list."""
|
"""Load an archive returning logs in a list."""
|
||||||
from aircox.models import Log
|
|
||||||
|
|
||||||
path = self.get_path(station, date)
|
path = self.get_path(station, date)
|
||||||
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return []
|
return []
|
||||||
|
return self.load_file(path)
|
||||||
|
|
||||||
|
def load_file(self, path):
|
||||||
with gzip.open(path, "rb") as archive:
|
with gzip.open(path, "rb") as archive:
|
||||||
data = archive.read()
|
data = archive.read()
|
||||||
logs = yaml.load(data)
|
logs = yaml.load(data)
|
||||||
|
@ -110,5 +108,5 @@ class LogArchiver:
|
||||||
"""From a list of dict representing logs, retrieve related objects of
|
"""From a list of dict representing logs, retrieve related objects of
|
||||||
the given type."""
|
the given type."""
|
||||||
attr_id = attr + "_id"
|
attr_id = attr + "_id"
|
||||||
pks = (log[attr_id] for log in logs if attr_id in log)
|
pks = {log[attr_id] for log in logs if attr_id in log}
|
||||||
return {rel.pk: rel for rel in model.objects.filter(pk__in=pks)}
|
return {rel.pk: rel for rel in model.objects.filter(pk__in=pks)}
|
||||||
|
|
|
@ -24,16 +24,30 @@ class SoxStats:
|
||||||
"Length s",
|
"Length s",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, path, **kwargs):
|
values = None
|
||||||
|
|
||||||
|
def __init__(self, path=None, **kwargs):
|
||||||
"""If path is given, call analyse with path and kwargs."""
|
"""If path is given, call analyse with path and kwargs."""
|
||||||
self.values = {}
|
|
||||||
if path:
|
if path:
|
||||||
self.analyse(path, **kwargs)
|
self.analyse(path, **kwargs)
|
||||||
|
|
||||||
def get(self, attr):
|
def analyse(self, path, at=None, length=None):
|
||||||
return self.values.get(attr)
|
"""If at and length are given use them as excerpt to analyse."""
|
||||||
|
args = ["sox", path, "-n"]
|
||||||
|
if at is not None and length is not None:
|
||||||
|
args += ["trim", str(at), str(length)]
|
||||||
|
args.append("stats")
|
||||||
|
|
||||||
|
p = subprocess.Popen(
|
||||||
|
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
|
# sox outputs to stderr (my god WHYYYY)
|
||||||
|
out_, out = p.communicate()
|
||||||
|
self.values = self.parse(str(out, encoding="utf-8"))
|
||||||
|
|
||||||
def parse(self, output):
|
def parse(self, output):
|
||||||
|
"""Parse sox output, settubg values from it."""
|
||||||
|
values = {}
|
||||||
for attr in self.attributes:
|
for attr in self.attributes:
|
||||||
value = re.search(attr + r"\s+(?P<value>\S+)", output)
|
value = re.search(attr + r"\s+(?P<value>\S+)", output)
|
||||||
value = value and value.groupdict()
|
value = value and value.groupdict()
|
||||||
|
@ -42,24 +56,12 @@ class SoxStats:
|
||||||
value = float(value.get("value"))
|
value = float(value.get("value"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
value = None
|
value = None
|
||||||
self.values[attr] = value
|
values[attr] = value
|
||||||
self.values["length"] = self.values["Length s"]
|
values["length"] = values.pop("Length s", None)
|
||||||
|
return values
|
||||||
|
|
||||||
def analyse(self, path, at=None, length=None):
|
def get(self, attr):
|
||||||
"""If at and length are given use them as excerpt to analyse."""
|
return self.values.get(attr)
|
||||||
args = ["sox", path, "-n"]
|
|
||||||
|
|
||||||
if at is not None and length is not None:
|
|
||||||
args += ["trim", str(at), str(length)]
|
|
||||||
|
|
||||||
args.append("stats")
|
|
||||||
|
|
||||||
p = subprocess.Popen(
|
|
||||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
|
||||||
)
|
|
||||||
# sox outputs to stderr (my god WHYYYY)
|
|
||||||
out_, out = p.communicate()
|
|
||||||
self.parse(str(out, encoding="utf-8"))
|
|
||||||
|
|
||||||
|
|
||||||
class SoundStats:
|
class SoundStats:
|
||||||
|
|
|
@ -3,7 +3,7 @@ from collections import namedtuple
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
|
|
||||||
__all__ = ("interface", "Interface")
|
__all__ = ("interface", "Interface", "File")
|
||||||
|
|
||||||
|
|
||||||
def interface(obj, funcs):
|
def interface(obj, funcs):
|
||||||
|
@ -233,6 +233,7 @@ class Interface:
|
||||||
def _irelease(self):
|
def _irelease(self):
|
||||||
"""Shortcut to `self._imeta.release`."""
|
"""Shortcut to `self._imeta.release`."""
|
||||||
self._imeta.release()
|
self._imeta.release()
|
||||||
|
self._imeta.reset()
|
||||||
|
|
||||||
def _trace(self, *args, **kw):
|
def _trace(self, *args, **kw):
|
||||||
"""Shortcut to `self._imeta.get_trace`."""
|
"""Shortcut to `self._imeta.get_trace`."""
|
||||||
|
@ -266,3 +267,23 @@ class Interface:
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
iface = super().__str__()
|
iface = super().__str__()
|
||||||
return f"{iface}::{self._imeta.target}"
|
return f"{iface}::{self._imeta.target}"
|
||||||
|
|
||||||
|
|
||||||
|
class File:
|
||||||
|
def __init__(self, data=""):
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
self.data += data
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.data = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *_, **__):
|
||||||
|
pass
|
||||||
|
|
|
@ -115,3 +115,19 @@ def podcasts(episodes):
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sound(program):
|
def sound(program):
|
||||||
return baker.make(models.Sound, file="tmp/test.wav", program=program)
|
return baker.make(models.Sound, file="tmp/test.wav", program=program)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def tracks(episode, sound):
|
||||||
|
items = [
|
||||||
|
baker.prepare(
|
||||||
|
models.Track, episode=episode, position=i, timestamp=i * 60
|
||||||
|
)
|
||||||
|
for i in range(0, 3)
|
||||||
|
]
|
||||||
|
items += [
|
||||||
|
baker.prepare(models.Track, sound=sound, position=i, timestamp=i * 60)
|
||||||
|
for i in range(0, 3)
|
||||||
|
]
|
||||||
|
models.Track.objects.bulk_create(items)
|
||||||
|
return items
|
||||||
|
|
3
aircox/tests/controllers/playlist.csv
Normal file
3
aircox/tests/controllers/playlist.csv
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
Artist 1;Title 1;1;0;tag1,tag12;info1
|
||||||
|
Artist 2;Title 2;2;1;tag2,tag12;info2
|
||||||
|
Artist 3;Title 3;3;2;;
|
|
|
@ -1,37 +1,110 @@
|
||||||
import pytest
|
from django.utils import timezone as tz
|
||||||
|
|
||||||
from aircox.controllers.log_archiver import LogArchiver
|
import pytest
|
||||||
|
from model_bakery import baker
|
||||||
|
|
||||||
|
from aircox import models
|
||||||
|
from aircox.test import Interface, File
|
||||||
|
from aircox.controllers import log_archiver
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def log_archiver():
|
def diffusions(episodes):
|
||||||
return LogArchiver()
|
items = [
|
||||||
|
baker.prepare(
|
||||||
|
models.Diffusion,
|
||||||
|
program=episode.program,
|
||||||
|
episode=episode,
|
||||||
|
type=models.Diffusion.TYPE_ON_AIR,
|
||||||
|
)
|
||||||
|
for episode in episodes
|
||||||
|
]
|
||||||
|
models.Diffusion.objects.bulk_create(items)
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logs(diffusions, sound, tracks):
|
||||||
|
now = tz.now()
|
||||||
|
station = diffusions[0].program.station
|
||||||
|
items = [
|
||||||
|
models.Log(
|
||||||
|
station=diffusion.program.station,
|
||||||
|
type=models.Log.TYPE_START,
|
||||||
|
date=now + tz.timedelta(hours=-10, minutes=i),
|
||||||
|
source="13",
|
||||||
|
diffusion=diffusion,
|
||||||
|
)
|
||||||
|
for i, diffusion in enumerate(diffusions)
|
||||||
|
]
|
||||||
|
items += [
|
||||||
|
models.Log(
|
||||||
|
station=station,
|
||||||
|
type=models.Log.TYPE_ON_AIR,
|
||||||
|
date=now + tz.timedelta(hours=-9, minutes=i),
|
||||||
|
source="14",
|
||||||
|
track=track,
|
||||||
|
sound=track.sound,
|
||||||
|
)
|
||||||
|
for i, track in enumerate(tracks)
|
||||||
|
]
|
||||||
|
models.Log.objects.bulk_create(items)
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logs_qs(logs):
|
||||||
|
return models.Log.objects.filter(pk__in=(r.pk for r in logs))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def file():
|
||||||
|
return File(data=b"")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def gzip(file):
|
||||||
|
gzip = Interface.inject(log_archiver, "gzip", {"open": file})
|
||||||
|
yield gzip
|
||||||
|
gzip._irelease()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def archiver():
|
||||||
|
return log_archiver.LogArchiver()
|
||||||
|
|
||||||
|
|
||||||
class TestLogArchiver:
|
class TestLogArchiver:
|
||||||
def test_get_path(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_archive_then_load_file(self, archiver, file, gzip, logs, logs_qs):
|
||||||
|
# before logs are deleted from db, get data
|
||||||
|
sorted = archiver.sort_logs(logs_qs)
|
||||||
|
paths = {
|
||||||
|
archiver.get_path(station, date) for station, date in sorted.keys()
|
||||||
|
}
|
||||||
|
|
||||||
def test_archive(self):
|
count = archiver.archive(logs_qs, keep=False)
|
||||||
pass
|
assert count == len(logs)
|
||||||
|
assert not logs_qs.count()
|
||||||
|
assert all(
|
||||||
|
path in paths for path, *_ in gzip._traces("open", args=True)
|
||||||
|
)
|
||||||
|
|
||||||
def test_archive_no_qs(self):
|
results = archiver.load_file("dummy path")
|
||||||
pass
|
assert results
|
||||||
|
|
||||||
def test_archive_not_keep(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_archive_no_qs(self, archiver):
|
||||||
|
count = archiver.archive(models.Log.objects.none())
|
||||||
|
assert not count
|
||||||
|
|
||||||
def test_sort_log(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_sort_log(self, archiver, logs_qs):
|
||||||
|
sorted = archiver.sort_logs(logs_qs)
|
||||||
|
|
||||||
def test_serialize(self):
|
assert sorted
|
||||||
pass
|
for (station, date), logs in sorted.items():
|
||||||
|
assert all(
|
||||||
def test_load(self):
|
log.station == station and log.date.date() == date
|
||||||
pass
|
for log in logs
|
||||||
|
)
|
||||||
def test_load_file_not_exists(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_get_relations(self):
|
|
||||||
pass
|
|
||||||
|
|
|
@ -1,22 +1,64 @@
|
||||||
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from aircox.controller.playlist_import import PlaylistImport
|
from aircox.test import Interface
|
||||||
|
from aircox.controllers import playlist_import
|
||||||
|
|
||||||
|
|
||||||
|
csv_data = [
|
||||||
|
{
|
||||||
|
"artist": "Artist 1",
|
||||||
|
"title": "Title 1",
|
||||||
|
"minutes": "1",
|
||||||
|
"seconds": "0",
|
||||||
|
"tags": "tag1,tag12",
|
||||||
|
"info": "info1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"artist": "Artist 2",
|
||||||
|
"title": "Title 2",
|
||||||
|
"minutes": "2",
|
||||||
|
"seconds": "1",
|
||||||
|
"tags": "tag2,tag12",
|
||||||
|
"info": "info2",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"artist": "Artist 3",
|
||||||
|
"title": "Title 3",
|
||||||
|
"minutes": "3",
|
||||||
|
"seconds": "2",
|
||||||
|
"tags": "",
|
||||||
|
"info": "",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def playlist_import():
|
def importer(sound):
|
||||||
return PlaylistImport()
|
path = os.path.join(os.path.dirname(__file__), "playlist.csv")
|
||||||
|
return playlist_import.PlaylistImport(path, sound=sound)
|
||||||
|
|
||||||
|
|
||||||
class TestPlaylistImport:
|
class TestPlaylistImport:
|
||||||
def test_reset(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_run(self, importer):
|
||||||
|
iface = Interface(None, {"read": None, "make_playlist": None})
|
||||||
|
importer.read = iface.read
|
||||||
|
importer.make_playlist = iface.make_playlist
|
||||||
|
importer.run()
|
||||||
|
assert iface._trace("read")
|
||||||
|
assert iface._trace("make_playlist")
|
||||||
|
|
||||||
def test_run(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_read(self, importer):
|
||||||
|
importer.read()
|
||||||
|
assert importer.data == csv_data
|
||||||
|
|
||||||
def test_read(self):
|
@pytest.mark.django_db
|
||||||
pass
|
def test_make_playlist(self, importer, sound):
|
||||||
|
importer.data = csv_data
|
||||||
def make_playlist(self):
|
importer.make_playlist()
|
||||||
pass
|
track_artists = sound.track_set.all().values_list("artist", flat=True)
|
||||||
|
csv_artists = {r["artist"] for r in csv_data}
|
||||||
|
assert set(track_artists) == csv_artists
|
||||||
|
# TODO: check other values
|
||||||
|
|
|
@ -1,30 +1,75 @@
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from aircox.controllers.sound_stats import SoxStats, SoundStats
|
from aircox.test import Interface
|
||||||
|
from aircox.controllers import sound_stats
|
||||||
|
|
||||||
|
|
||||||
|
sox_output = """
|
||||||
|
DC offset 0.000000\n
|
||||||
|
Min level 0.000000\n
|
||||||
|
Max level 0.000000\n
|
||||||
|
Pk lev dB -inf\n
|
||||||
|
RMS lev dB -inf\n
|
||||||
|
RMS Pk dB -inf\n
|
||||||
|
RMS Tr dB -inf\n
|
||||||
|
Crest factor 1.00\n
|
||||||
|
Flat factor 179.37\n
|
||||||
|
Pk count 1.86G\n
|
||||||
|
Bit-depth 0/0\n
|
||||||
|
Num samples 930M\n
|
||||||
|
Length s 19383.312\n
|
||||||
|
Scale max 1.000000\n
|
||||||
|
Window s 0.050\n
|
||||||
|
"""
|
||||||
|
sox_values = {
|
||||||
|
"DC offset": 0.0,
|
||||||
|
"Min level": 0.0,
|
||||||
|
"Max level": 0.0,
|
||||||
|
"Pk lev dB": float("-inf"),
|
||||||
|
"RMS lev dB": float("-inf"),
|
||||||
|
"RMS Pk dB": float("-inf"),
|
||||||
|
"RMS Tr dB": float("-inf"),
|
||||||
|
"Flat factor": 179.37,
|
||||||
|
"length": 19383.312,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sox_stats():
|
def sox_interfaces():
|
||||||
return SoxStats()
|
process = Interface(
|
||||||
|
None, {"communicate": ("", sox_output.encode("utf-8"))}
|
||||||
|
)
|
||||||
|
subprocess = Interface.inject(
|
||||||
|
sound_stats, "subprocess", {"Popen": lambda *_, **__: process}
|
||||||
|
)
|
||||||
|
yield {"process": process, "subprocess": subprocess}
|
||||||
|
subprocess._irelease()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sound_stats():
|
def sox_stats(sox_interfaces):
|
||||||
return SoundStats()
|
return sound_stats.SoxStats()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def stats():
|
||||||
|
return sound_stats.SoundStats()
|
||||||
|
|
||||||
|
|
||||||
class TestSoxStats:
|
class TestSoxStats:
|
||||||
def test___init__(self):
|
def test_parse(self, sox_stats):
|
||||||
pass
|
values = sox_stats.parse(sox_output)
|
||||||
|
assert values == sox_values
|
||||||
|
|
||||||
def test_get(self):
|
def test_analyse(self, sox_stats, sox_interfaces):
|
||||||
pass
|
sox_stats.analyse("fake_path", 1, 2)
|
||||||
|
assert sox_interfaces["subprocess"]._trace("Popen") == (
|
||||||
def test_parse(self):
|
(["sox", "fake_path", "-n", "trim", "1", "2", "stats"],),
|
||||||
pass
|
{"stdout": subprocess.PIPE, "stderr": subprocess.PIPE},
|
||||||
|
)
|
||||||
def test_analyse(self):
|
assert sox_stats.values == sox_values
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TestSoundStats:
|
class TestSoundStats:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user