Continua la separacion de cache
This commit is contained in:
171
fetcher/cache.py
171
fetcher/cache.py
@@ -54,15 +54,15 @@ def get_discs_of_artist(mbid, limit, offset):
|
||||
jobs.load_artist_on_cache.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
release_ids = redis.zrange(key_releases, offset, limit)
|
||||
release_ids = redis.zrange(key_releases, offset, offset + limit)
|
||||
keys = [f'release_group:{mbid}' for mbid in release_ids]
|
||||
if redis.exists(*keys) != len(keys):
|
||||
_log.debug('Aun no se cargan todas las release_groups del artista %s', mbid)
|
||||
jobs.load_artist_on_cache.delay(mbid)
|
||||
return None, None
|
||||
return None, 0
|
||||
|
||||
_log.info('Se encontraron los discos en redis')
|
||||
return [json.loads(disc) for disc in redis.mget(keys)]
|
||||
return [json.loads(disc) for disc in redis.mget(keys)], count
|
||||
|
||||
|
||||
def get_artist_of_disc(mbid):
|
||||
@@ -77,3 +77,168 @@ def get_artist_of_disc(mbid):
|
||||
|
||||
_log.debug('Se encontro el artista')
|
||||
return json.loads(redis.get(f'artist:{artist_id}'))
|
||||
|
||||
|
||||
def get_release(mbid):
|
||||
_log.info('Intentado obtener la release %s en redis', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
release = redis.get(f'release:{mbid}')
|
||||
if not release:
|
||||
_log.info('La release no estaba en redis')
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
return json.loads(release)
|
||||
|
||||
|
||||
def get_releases_of_disc(mbid, limit, offset):
|
||||
_log.info('Intentando obtener las releases del disco %s', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
key_releases = f'release_group:{mbid}:releases'
|
||||
|
||||
if key_releases not in redis:
|
||||
_log.debug('%s no existe en redis', key_releases)
|
||||
jobs.load_entities_of_release_group.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
count = int(redis.get(f'{key_releases}:count'))
|
||||
if count != redis.zcard(key_releases):
|
||||
_log.debug('La cantidad de releases del disco %s no coinciden con el total', mbid)
|
||||
jobs.load_entities_of_release_group.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
release_ids = redis.zrange(key_releases, offset, offset + limit)
|
||||
keys = [f'release:{mbid}' for mbid in release_ids]
|
||||
if redis.exists(*keys) != len(keys):
|
||||
_log.debug('Aun no se cargan todas las releases del disco %s', mbid)
|
||||
jobs.load_entities_of_release_group.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
_log.info('Se encontraron las releases en redis')
|
||||
return [json.loads(disc) for disc in redis.mget(keys)], count
|
||||
|
||||
|
||||
def get_artist_of_release(mbid):
|
||||
_log.info('Intentando obtener al artista de la release %s en redis', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
artist_id = redis.get(f'release:{mbid}:artist')
|
||||
if not artist_id:
|
||||
_log.debug('No se encontro el artista')
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
artist = redis.get(f'artist:{artist_id}')
|
||||
if not artist:
|
||||
_log.debug('El artista aun no se carga en redis')
|
||||
return None
|
||||
|
||||
_log.debug('Se encontro el artista')
|
||||
return json.loads(artist)
|
||||
|
||||
|
||||
def get_recording(mbid):
|
||||
_log.info('Intentado obtener la grabacion %s en redis', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
recording = redis.get(f'recording:{mbid}')
|
||||
if not recording:
|
||||
_log.info('La grabacion no estaba en redis')
|
||||
jobs.load_entities_of_recording.delay(mbid)
|
||||
return None
|
||||
|
||||
return json.loads(recording)
|
||||
|
||||
|
||||
def get_recordings_of_release(mbid):
|
||||
_log.info('Intentando obtener las grabaciones de la release %s', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
medias_key = f'release:{mbid}:media'
|
||||
|
||||
if medias_key not in redis:
|
||||
_log.debug('%s no existe en redis', medias_key)
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
media_count = int(redis.get(f'{medias_key}:count'))
|
||||
if redis.zcard(medias_key) != media_count:
|
||||
_log.debug('La cantidad de medias de la release no coinciden con el total', mbid)
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
medias = [json.loads(media) for media in redis.zrange(medias_key, 0, -1)]
|
||||
for media in medias:
|
||||
recordings_key = f'{medias_key}:{media.get("position")}:recordings'
|
||||
if recordings_key not in redis:
|
||||
_log.debug('%s no existe en redis', medias_key)
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
recordings_count = int(redis.get(f'{recordings_key}:count'))
|
||||
if redis.zcard(recordings_key) != recordings_count:
|
||||
_log.debug('La cantidad de recordings de la media no coinciden con el total', mbid)
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
recording_ids = redis.zrange(recordings_key, 0, -1)
|
||||
keys = [f'recording:{mbid}' for mbid in recording_ids]
|
||||
if redis.exists(*keys) != len(keys):
|
||||
_log.debug('No estan todos los recordings almacenados')
|
||||
jobs.load_entities_of_release.delay(mbid)
|
||||
return None
|
||||
|
||||
media['recordings'] = []
|
||||
for key in keys:
|
||||
recording = json.loads(redis.get(key))
|
||||
media['recordings'].append(recording)
|
||||
|
||||
return medias
|
||||
|
||||
|
||||
def get_releases_of_recording(mbid, limit, offset):
|
||||
_log.info('Intentando obtener la release de la recording %s', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
releases_key = f'recording:{mbid}:release'
|
||||
if releases_key not in redis or f'{releases_key}:count' not in redis:
|
||||
_log.debug('No existe %s en redis', releases_key)
|
||||
jobs.load_entities_of_recording.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
count = int(redis.get(f'{releases_key}:count'))
|
||||
if redis.zcard(releases_key) != count:
|
||||
_log.debug('No estan almacenadas todas las keys de las releases')
|
||||
jobs.load_entities_of_recording.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
release_ids = redis.zrange(releases_key, offset, offset + limit)
|
||||
keys = [f'release:{mbid}' for mbid in release_ids]
|
||||
if redis.exists(*keys) != len(keys):
|
||||
_log.debug('No estan todas las releses')
|
||||
jobs.load_entities_of_recording.delay(mbid)
|
||||
return None, 0
|
||||
|
||||
_log.debug('Se encontraron las releases en redis')
|
||||
return [json.loads(release) for release in redis.mget(keys)], count
|
||||
|
||||
|
||||
def get_artist_of_recording(mbid):
|
||||
_log.info('Intentando obtener el artista de la grabacion %s desde redis', mbid)
|
||||
|
||||
with get_redis_connection() as redis:
|
||||
artist_id = redis.get(f'recording:{mbid}:artist')
|
||||
if not artist_id:
|
||||
_log.debug('El artista no se encuentra en redis')
|
||||
jobs.load_entities_of_recording.delay(mbid)
|
||||
return None
|
||||
|
||||
artist = redis.get(f'artist:{artist_id}')
|
||||
if not artist:
|
||||
_log.debug('El artista aun no se carga en redis')
|
||||
return None
|
||||
|
||||
_log.debug('Se encontro el artista')
|
||||
return json.loads(artist)
|
||||
|
||||
Reference in New Issue
Block a user