include logs in stats; fix bug
This commit is contained in:
parent
280864768b
commit
0ecaa63663
|
@ -145,6 +145,7 @@ class Monitor:
|
||||||
|
|
||||||
is_diff = log.date != on_air
|
is_diff = log.date != on_air
|
||||||
except:
|
except:
|
||||||
|
on_air = None
|
||||||
is_diff = log.source != current_source.id or \
|
is_diff = log.source != current_source.id or \
|
||||||
(log.sound and log.sound.path != current_sound)
|
(log.sound and log.sound.path != current_sound)
|
||||||
|
|
||||||
|
@ -159,12 +160,11 @@ class Monitor:
|
||||||
if archives.filter(pk = sound.pk).exists():
|
if archives.filter(pk = sound.pk).exists():
|
||||||
diff = last_diff.diffusion
|
diff = last_diff.diffusion
|
||||||
|
|
||||||
|
|
||||||
# log sound on air
|
# log sound on air
|
||||||
log = self.log(
|
log = self.log(
|
||||||
type = Log.Type.on_air,
|
type = Log.Type.on_air,
|
||||||
source = current_source.id,
|
source = current_source.id,
|
||||||
date = on_air or tz.now(),
|
date = on_air or tz.now(),
|
||||||
sound = sound,
|
sound = sound,
|
||||||
diffusion = diff,
|
diffusion = diff,
|
||||||
# if sound is removed, we keep sound path info
|
# if sound is removed, we keep sound path info
|
||||||
|
@ -181,7 +181,6 @@ class Monitor:
|
||||||
Log tracks for the given sound log (for streamed programs).
|
Log tracks for the given sound log (for streamed programs).
|
||||||
Called by self.trace
|
Called by self.trace
|
||||||
"""
|
"""
|
||||||
# TODO take restart in account
|
|
||||||
tracks = Track.objects.get_for(object = log.sound) \
|
tracks = Track.objects.get_for(object = log.sound) \
|
||||||
.filter(in_seconds = True)
|
.filter(in_seconds = True)
|
||||||
if not tracks.exists():
|
if not tracks.exists():
|
||||||
|
|
|
@ -1242,12 +1242,28 @@ class LogManager(models.Manager):
|
||||||
# of retrieving archive when it changes
|
# of retrieving archive when it changes
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
settings.AIRCOX_LOGS_ARCHIVES_DIR,
|
settings.AIRCOX_LOGS_ARCHIVES_DIR,
|
||||||
# FIXME: number format
|
'{}_{}.log.gz'.format(date.strftime("%Y%m%d"), station.pk)
|
||||||
'{}{}{}_{}.log.gz'.format(
|
|
||||||
date.year, date.month, date.day, station.pk
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_rel_objects(logs, type, attr):
|
||||||
|
"""
|
||||||
|
From a list of dict representing logs, retrieve related objects
|
||||||
|
of the given type.
|
||||||
|
|
||||||
|
Example: _get_rel_objects([{..},..], Diffusion, 'diffusion')
|
||||||
|
"""
|
||||||
|
attr_id = attr + '_id'
|
||||||
|
return {
|
||||||
|
rel.pk: rel
|
||||||
|
for rel in type.objects.filter(
|
||||||
|
pk__in = (
|
||||||
|
log[attr_id]
|
||||||
|
for log in logs if attr_id in log
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
def load_archive(self, station, date):
|
def load_archive(self, station, date):
|
||||||
"""
|
"""
|
||||||
Return archived logs for a specific date as a list
|
Return archived logs for a specific date as a list
|
||||||
|
@ -1262,7 +1278,28 @@ class LogManager(models.Manager):
|
||||||
with gzip.open(path, 'rb') as archive:
|
with gzip.open(path, 'rb') as archive:
|
||||||
data = archive.read()
|
data = archive.read()
|
||||||
logs = yaml.load(data)
|
logs = yaml.load(data)
|
||||||
return logs
|
|
||||||
|
# we need to preload diffusions, sounds and tracks
|
||||||
|
# we get them all at once, in order to reduce db calls
|
||||||
|
rels = {
|
||||||
|
'diffusion': self._get_rel_objects(logs, Diffusion, 'diffusion'),
|
||||||
|
'sound': self._get_rel_objects(logs, Sound, 'sound'),
|
||||||
|
'track': self._get_rel_objects(logs, Track, 'track'),
|
||||||
|
}
|
||||||
|
|
||||||
|
def rel_obj(log, attr):
|
||||||
|
attr_id = attr + '_id'
|
||||||
|
rel_id = log.get(attr + '_id')
|
||||||
|
return rels[attr][rel_id] if rel_id else None
|
||||||
|
|
||||||
|
# make logs
|
||||||
|
return [
|
||||||
|
Log(diffusion = rel_obj(log, 'diffusion'),
|
||||||
|
sound = rel_obj(log, 'sound'),
|
||||||
|
track = rel_obj(log, 'track'),
|
||||||
|
**log)
|
||||||
|
for log in logs
|
||||||
|
]
|
||||||
|
|
||||||
def make_archive(self, station, date, force = False, keep = False):
|
def make_archive(self, station, date, force = False, keep = False):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -201,6 +201,9 @@ class StatisticsView(View,TemplateResponseMixin,LoginRequiredMixin):
|
||||||
qs = station.raw_on_air(date = date) \
|
qs = station.raw_on_air(date = date) \
|
||||||
.prefetch_related('diffusion', 'sound', 'track',
|
.prefetch_related('diffusion', 'sound', 'track',
|
||||||
'track__tags')
|
'track__tags')
|
||||||
|
if not qs.exists():
|
||||||
|
qs = models.Log.objects.load_archive(station, date)
|
||||||
|
|
||||||
sound_log = None
|
sound_log = None
|
||||||
for log in qs:
|
for log in qs:
|
||||||
rel = None
|
rel = None
|
||||||
|
|
|
@ -225,7 +225,7 @@ class GenericMenu(Menu):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def page_of(item):
|
def page_of(item):
|
||||||
return item.page
|
return hasattr(item, 'page') and item.page
|
||||||
|
|
||||||
def page_url(self, item):
|
def page_url(self, item):
|
||||||
page = self.page_of(item)
|
page = self.page_of(item)
|
||||||
|
@ -317,8 +317,9 @@ class TodayMenu(GenericMenu):
|
||||||
|
|
||||||
attrs = {}
|
attrs = {}
|
||||||
|
|
||||||
qs = PageRevision.objects.filter(page = item.page)
|
qs = hasattr(item, 'page') and \
|
||||||
if qs.count():
|
PageRevision.objects.filter(page = item.page)
|
||||||
|
if qs and qs.count():
|
||||||
headline = qs.latest('created_at').content_json
|
headline = qs.latest('created_at').content_json
|
||||||
headline = json.loads(headline).get('headline')
|
headline = json.loads(headline).get('headline')
|
||||||
attrs['title'] = headline
|
attrs['title'] = headline
|
||||||
|
|
Loading…
Reference in New Issue
Block a user