Agregado cache a release of recordings
This commit is contained in:
@@ -28,7 +28,7 @@ import json
|
|||||||
import django_rq
|
import django_rq
|
||||||
|
|
||||||
from fetcher import musicbrainz as mb
|
from fetcher import musicbrainz as mb
|
||||||
from utils import get_redis_connection, parse_date
|
from utils import get_redis_connection, parse_date, pretty_print_json
|
||||||
|
|
||||||
_log = logging.getLogger('fetcher_jobs')
|
_log = logging.getLogger('fetcher_jobs')
|
||||||
_log.addHandler(logging.NullHandler())
|
_log.addHandler(logging.NullHandler())
|
||||||
@@ -74,7 +74,7 @@ def load_entities_of_recording(recording):
|
|||||||
mbid = recording.get('id')
|
mbid = recording.get('id')
|
||||||
|
|
||||||
with get_redis_connection() as redis:
|
with get_redis_connection() as redis:
|
||||||
if f'recording:{mbid}' not in redis:
|
if f'recording:{mbid}' in redis:
|
||||||
# La única forma de agregar en cache una recording es a través de su release
|
# La única forma de agregar en cache una recording es a través de su release
|
||||||
# por lo que si ya esta guardada, su release lo estará
|
# por lo que si ya esta guardada, su release lo estará
|
||||||
return
|
return
|
||||||
@@ -82,14 +82,18 @@ def load_entities_of_recording(recording):
|
|||||||
offset = 0
|
offset = 0
|
||||||
while True:
|
while True:
|
||||||
releases = mb.browse_releases({'recording': mbid},
|
releases = mb.browse_releases({'recording': mbid},
|
||||||
includes=['recordings', 'artists'],
|
includes=['recordings', 'artist-credits'],
|
||||||
limit=100, offset=offset)
|
limit=100, offset=offset)
|
||||||
|
pretty_print_json(releases)
|
||||||
|
|
||||||
for release in releases:
|
# lo unico que sirve hacer aqui es almacenar en cuantas releases se encuentra la
|
||||||
|
# grabacion
|
||||||
|
redis.set(f'recording:{mbid}:release:count', releases.get('release_count'))
|
||||||
|
for release in releases.get('releases'):
|
||||||
load_entities_of_release.delay(release.get('id'))
|
load_entities_of_release.delay(release.get('id'))
|
||||||
|
|
||||||
offset += 100
|
offset += 100
|
||||||
if offset > releases.get('release-count'):
|
if offset > releases.get('release_count'):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
@@ -104,7 +108,7 @@ def load_entities_of_release(release):
|
|||||||
# Cargar release solo si no esta almacenado
|
# Cargar release solo si no esta almacenado
|
||||||
if f'release:{mbid}' not in redis:
|
if f'release:{mbid}' not in redis:
|
||||||
if isinstance(release, str):
|
if isinstance(release, str):
|
||||||
release = mb.get_release_by_mbid(mbid, ['recordings', 'artists'])
|
release = mb.get_release_by_mbid(mbid, ['recordings', 'artist-credits'])
|
||||||
redis.set(f'release:{mbid}', json.dumps(release))
|
redis.set(f'release:{mbid}', json.dumps(release))
|
||||||
_log.info('Release %s fue almacenada en cache', mbid)
|
_log.info('Release %s fue almacenada en cache', mbid)
|
||||||
else:
|
else:
|
||||||
@@ -135,12 +139,18 @@ def load_entities_of_release(release):
|
|||||||
redis.zadd(f'release:{mbid}:media', {json.dumps(media): media['position']})
|
redis.zadd(f'release:{mbid}:media', {json.dumps(media): media['position']})
|
||||||
for track in raw_media.get('tracks', []):
|
for track in raw_media.get('tracks', []):
|
||||||
recording_key = f'release:{mbid}:media:{media.get("position")}:recordings'
|
recording_key = f'release:{mbid}:media:{media.get("position")}:recordings'
|
||||||
|
|
||||||
recording = track.get('recording')
|
recording = track.get('recording')
|
||||||
recording_id = recording.get('id')
|
recording_id = recording.get('id')
|
||||||
|
recording_artist = redis.get(f'release:{mbid}:artist')
|
||||||
|
|
||||||
redis.set(f'{recording_key}:count', media.get('track_count'))
|
redis.set(f'{recording_key}:count', media.get('track_count'))
|
||||||
redis.zadd(recording_key, {recording_id: track.get("position")})
|
redis.zadd(recording_key, {recording_id: track.get("position")})
|
||||||
redis.set(f'recording:{recording_id}', json.dumps(recording))
|
redis.set(f'recording:{recording_id}', json.dumps(recording))
|
||||||
redis.set(f'recording:{recording_id}:release', mbid)
|
redis.set(f'recording:{recording_id}:artist', recording_artist)
|
||||||
|
|
||||||
|
release_date = parse_date(release.get('release_events', [{}])[0].get('date'))
|
||||||
|
redis.zadd(f'recording:{recording_id}:release', {mbid: release_date.timestamp()})
|
||||||
|
|
||||||
|
|
||||||
@django_rq.job
|
@django_rq.job
|
||||||
|
|||||||
@@ -385,6 +385,9 @@ def get_artist_of_release(mbid, limit, page):
|
|||||||
if key in redis:
|
if key in redis:
|
||||||
_log.debug('El artista de la release %s se encontro en redis', mbid)
|
_log.debug('El artista de la release %s se encontro en redis', mbid)
|
||||||
mb_artist = get_artist(redis.get(key))
|
mb_artist = get_artist(redis.get(key))
|
||||||
|
# TODO parece que tengo un error aqui, el mb_artist lo cargo con get_artist
|
||||||
|
# eso significa que ya lo mapee, pero al final de la funcion lo estoy mapeando de nuevo
|
||||||
|
# eso tiene que traer errores si o tambien
|
||||||
|
|
||||||
if mb_artist is None:
|
if mb_artist is None:
|
||||||
_log.debug('El artista de la release %s se no se encontraba en redis', mbid)
|
_log.debug('El artista de la release %s se no se encontraba en redis', mbid)
|
||||||
@@ -488,15 +491,46 @@ def get_recordings_of_release(mbid):
|
|||||||
|
|
||||||
def get_release_of_recording(mbid, limit, page):
|
def get_release_of_recording(mbid, limit, page):
|
||||||
"""Obtiene la release de una grabacion incluyendo los creditos a su artista"""
|
"""Obtiene la release de una grabacion incluyendo los creditos a su artista"""
|
||||||
mb_releases = mb.browse_releases(params={'recording': mbid}, includes=['artists-credits'],
|
_log.info('Obteniendo las releases de la recording %s', mbid)
|
||||||
|
|
||||||
|
releases = []
|
||||||
|
offset = limit * (page - 1)
|
||||||
|
total = 0
|
||||||
|
|
||||||
|
with get_redis_connection() as redis:
|
||||||
|
_log.debug('Intentando obtener las releases de la recording %s desde redis', mbid)
|
||||||
|
releases_key = f'recording:{mbid}:release'
|
||||||
|
if releases_key in redis or f'{releases_key}:count' in redis:
|
||||||
|
if redis.zcard(releases_key) == int(redis.get(f'{releases_key}:count')):
|
||||||
|
release_mbids = redis.zrange(releases_key, offset, offset + limit)
|
||||||
|
if redis.exists(*[f'release:{mbid}' for mbid in release_mbids]):
|
||||||
|
total = redis.zcard(releases_key)
|
||||||
|
releases = [get_release(mbid) for mbid in release_mbids]
|
||||||
|
else:
|
||||||
|
_log.debug('No estan almacenadas todas las releases de la recording %s', mbid)
|
||||||
|
else:
|
||||||
|
_log.debug('No hay almacenadas tantas que de release de la recording %s como '
|
||||||
|
'deberian haber', mbid)
|
||||||
|
else:
|
||||||
|
_log.debug('No esta %s ni %s:count en redis', releases_key, releases_key)
|
||||||
|
|
||||||
|
if len(releases) == 0:
|
||||||
|
_log.debug('Las releases de la recording %s no estaban cargadas en redis, se va a utilizar '
|
||||||
|
'musicbrainz', mbid)
|
||||||
|
mb_releases_browse = mb.browse_releases(params={'recording': mbid},
|
||||||
|
includes=['artist-credits'],
|
||||||
limit=limit, offset=limit * (page - 1))
|
limit=limit, offset=limit * (page - 1))
|
||||||
|
|
||||||
if 'error' in mb_releases:
|
if 'error' in mb_releases_browse:
|
||||||
return mb_releases
|
_log.debug('Error al hacer browse de las releases de la recording %s %s',
|
||||||
|
mbid, mb_releases_browse)
|
||||||
|
return mb_releases_browse
|
||||||
|
|
||||||
|
releases = [map_release(release) for release in mb_releases_browse.get('releases')]
|
||||||
|
jobs.load_entities_of_recording.delay(mbid)
|
||||||
return {
|
return {
|
||||||
'paginate': paginate(mb_releases.get('release_count', 0), limit, page),
|
'paginate': paginate(total, limit, page),
|
||||||
'releases': [map_release(release) for release in mb_releases.get('releases')]
|
'releases': releases
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user