Trabajando para aprovechar el cache el las relaciones
This commit is contained in:
@@ -6,6 +6,7 @@ import django_rq
|
|||||||
from django_redis import get_redis_connection
|
from django_redis import get_redis_connection
|
||||||
|
|
||||||
from fetcher import musicbrainz as mb
|
from fetcher import musicbrainz as mb
|
||||||
|
from utils import make_key
|
||||||
|
|
||||||
_log = logging.getLogger('fetcher_jobs')
|
_log = logging.getLogger('fetcher_jobs')
|
||||||
_log.addHandler(logging.NullHandler())
|
_log.addHandler(logging.NullHandler())
|
||||||
@@ -19,12 +20,12 @@ def load_release_cover_art(release):
|
|||||||
if isinstance(release, dict):
|
if isinstance(release, dict):
|
||||||
mbid = release.get('id')
|
mbid = release.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'release_cover_art_{mbid}') == -1:
|
if cache.ttl(make_key(f'release_cover_art_{mbid}')) == -1:
|
||||||
_log.info('El cover art ya estaba guardado')
|
_log.info('El cover art ya estaba guardado')
|
||||||
return
|
return
|
||||||
|
|
||||||
cover_art = mb.get_release_cover_art(mbid)
|
cover_art = mb.get_release_cover_art(mbid)
|
||||||
cache.set(f'release_cover_art_{mbid}', json.dumps(cover_art))
|
cache.set(make_key(f'release_cover_art_{mbid}'), json.dumps(cover_art))
|
||||||
_log.info('Cover art de release %s almacenado en cache', mbid)
|
_log.info('Cover art de release %s almacenado en cache', mbid)
|
||||||
|
|
||||||
|
|
||||||
@@ -36,12 +37,12 @@ def load_release_group_cover_art(release_group):
|
|||||||
if isinstance(release_group, dict):
|
if isinstance(release_group, dict):
|
||||||
mbid = release_group.get('id')
|
mbid = release_group.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'release_group_cover_art_{mbid}') == -1:
|
if cache.ttl(make_key(f'release_group_cover_art_{mbid}')) == -1:
|
||||||
_log.info('El cover art ya estaba guardado')
|
_log.info('El cover art ya estaba guardado')
|
||||||
return
|
return
|
||||||
|
|
||||||
cover_art = mb.get_release_group_cover_art(mbid)
|
cover_art = mb.get_release_group_cover_art(mbid)
|
||||||
cache.set(f'release_group_cover_art_{mbid}', json.dumps(cover_art))
|
cache.set(make_key(f'release_group_cover_art_{mbid}'), json.dumps(cover_art))
|
||||||
_log.info('Cover art de release group %s almacenado en cache', mbid)
|
_log.info('Cover art de release group %s almacenado en cache', mbid)
|
||||||
|
|
||||||
|
|
||||||
@@ -53,14 +54,14 @@ def load_entities_of_recording(recording):
|
|||||||
if isinstance(recording, dict):
|
if isinstance(recording, dict):
|
||||||
mbid = recording.get('id')
|
mbid = recording.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'recording_{mbid}') == -1:
|
if cache.ttl(make_key(f'recording_{mbid}')) == -1:
|
||||||
_log.info('La recording ya se habia procesado anteriormente')
|
_log.info('La recording ya se habia procesado anteriormente')
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(recording, str):
|
if isinstance(recording, str):
|
||||||
recording = mb.get_recording_by_mbid(mbid)
|
recording = mb.get_recording_by_mbid(mbid)
|
||||||
|
|
||||||
cache.set(f'recording_{mbid}', json.dumps(recording))
|
cache.set(make_key(f'recording_{mbid}'), json.dumps(recording))
|
||||||
_log.info('Recording %s fue almacenada correctamente', mbid)
|
_log.info('Recording %s fue almacenada correctamente', mbid)
|
||||||
|
|
||||||
|
|
||||||
@@ -72,14 +73,14 @@ def load_entities_of_release(release):
|
|||||||
if isinstance(release, dict):
|
if isinstance(release, dict):
|
||||||
mbid = release.get('id')
|
mbid = release.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'release_{mbid}') == -1:
|
if cache.ttl(make_key(f'release_{mbid}')) == -1:
|
||||||
_log.info('La release ya se habia procesado anteriormente')
|
_log.info('La release ya se habia procesado anteriormente')
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(release, str):
|
if isinstance(release, str):
|
||||||
release = mb.get_release_by_mbid(mbid)
|
release = mb.get_release_by_mbid(mbid)
|
||||||
|
|
||||||
cache.set(f'release_{mbid}', json.dumps(release))
|
cache.set(make_key(f'release_{mbid}'), json.dumps(release))
|
||||||
_log.info('Release %s fue almacenada en cache', mbid)
|
_log.info('Release %s fue almacenada en cache', mbid)
|
||||||
|
|
||||||
load_release_cover_art.delay(release)
|
load_release_cover_art.delay(release)
|
||||||
@@ -103,14 +104,14 @@ def load_entities_of_release_group(release_group):
|
|||||||
if isinstance(release_group, dict):
|
if isinstance(release_group, dict):
|
||||||
mbid = release_group.get('id')
|
mbid = release_group.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'release_group_{mbid}') == -1:
|
if cache.ttl(make_key(f'release_group_{mbid}')) == -1:
|
||||||
_log.info('La release group ya se habia procesado anteriormente')
|
_log.info('La release group ya se habia procesado anteriormente')
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(release_group, str):
|
if isinstance(release_group, str):
|
||||||
release_group = mb.get_release_group_by_mbid(mbid)
|
release_group = mb.get_release_group_by_mbid(mbid)
|
||||||
|
|
||||||
cache.set(f'release_group_{mbid}', json.dumps(release_group))
|
cache.set(make_key(f'release_group_{mbid}'), json.dumps(release_group))
|
||||||
_log.info('Release Group %s almacenado en cache', mbid)
|
_log.info('Release Group %s almacenado en cache', mbid)
|
||||||
|
|
||||||
load_release_group_cover_art.delay(release_group)
|
load_release_group_cover_art.delay(release_group)
|
||||||
@@ -134,20 +135,24 @@ def load_artist_on_cache(artist):
|
|||||||
if isinstance(artist, dict):
|
if isinstance(artist, dict):
|
||||||
mbid = artist.get('id')
|
mbid = artist.get('id')
|
||||||
|
|
||||||
if cache.ttl(f'artist_{mbid}') == -1:
|
if cache.ttl(make_key(f'artist_{mbid}')) == -1:
|
||||||
_log.info('El artista ya se había procesado anteriormente')
|
_log.info('El artista ya se había procesado anteriormente')
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(artist, str):
|
if isinstance(artist, str):
|
||||||
artist = mb.get_artist_by_mbid(mbid, includes=['tags'])
|
artist = mb.get_artist_by_mbid(mbid, includes=['tags'])
|
||||||
|
|
||||||
cache.set(f'artist_{mbid}', json.dumps(artist))
|
cache.set(make_key(f'artist_{mbid}'), json.dumps(artist))
|
||||||
_log.info('Artista %s almacenado en cache', mbid)
|
_log.info('Artista %s almacenado en cache', mbid)
|
||||||
|
|
||||||
offset = 0
|
offset = 0
|
||||||
while True:
|
while True:
|
||||||
release_groups = mb.browse_release_groups({'artist': mbid}, limit=100, offset=offset)
|
release_groups = mb.browse_release_groups({'artist': mbid}, limit=100, offset=offset)
|
||||||
|
if cache.ttl(make_key(f'artist_{mbid}:release_group_count')) != -1:
|
||||||
|
cache.set(make_key(f'artist_{mbid}:release_group_count'),
|
||||||
|
release_groups.get('release_group_count'))
|
||||||
for release_group in release_groups.get('release_groups'):
|
for release_group in release_groups.get('release_groups'):
|
||||||
|
cache.rpush(make_key(f'artist_{mbid}:release_groups'), release_group.get('id'))
|
||||||
load_entities_of_release_group.delay(release_group)
|
load_entities_of_release_group.delay(release_group)
|
||||||
|
|
||||||
offset += 100
|
offset += 100
|
||||||
|
|||||||
@@ -8,10 +8,12 @@ import json
|
|||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
from django_redis import get_redis_connection
|
||||||
from country_list import countries_for_language
|
from country_list import countries_for_language
|
||||||
|
|
||||||
import fetcher.musicbrainz as mb
|
import fetcher.musicbrainz as mb
|
||||||
from fetcher import jobs
|
from fetcher import jobs
|
||||||
|
from utils import make_key
|
||||||
|
|
||||||
|
|
||||||
###
|
###
|
||||||
@@ -182,17 +184,34 @@ def get_disc(mbid):
|
|||||||
return disc
|
return disc
|
||||||
|
|
||||||
|
|
||||||
def get_discs_of_artist(mbid, limit, page):
|
def get_discs_of_artist(mbid):
|
||||||
"""Obtiene los discos de un artista desde musicbrainz incluyendo los creditos al artista"""
|
"""Obtiene los discos de un artista desde musicbrainz incluyendo"""
|
||||||
mb_discs = mb.browse_release_groups(params={'artist': mbid}, includes=['artist-credits'],
|
|
||||||
limit=limit, offset=limit * (page - 1))
|
|
||||||
|
|
||||||
if 'error' in mb_discs:
|
mb_discs = []
|
||||||
return mb_discs
|
|
||||||
|
with get_redis_connection("default") as raw_cache:
|
||||||
|
if raw_cache.exists(make_key(f'artist_{mbid}:release_group_count')) == 1:
|
||||||
|
mb_discs_ids = raw_cache.lrange(make_key(f'artist_{mbid}:release_groups'), 0, -1)
|
||||||
|
mb_discs = [get_disc(str(mbid, 'utf-8')) for mbid in mb_discs_ids]
|
||||||
|
|
||||||
|
if len(mb_discs) == 0:
|
||||||
|
jobs.load_artist_on_cache.delay(mbid)
|
||||||
|
|
||||||
|
offset = 0
|
||||||
|
while True:
|
||||||
|
mb_discs_browse = mb.browse_release_groups(params={'artist': mbid},
|
||||||
|
limit=100, offset=offset)
|
||||||
|
if 'error' in mb_discs_browse:
|
||||||
|
return mb_discs_browse
|
||||||
|
|
||||||
|
mb_discs.extend(*mb_discs_browse.get('release_groups'))
|
||||||
|
|
||||||
|
offset += 100
|
||||||
|
if offset > mb_discs_browse.get('release_group_count'):
|
||||||
|
break
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'paginate': paginate(mb_discs['release_group_count'], limit, page),
|
'discs': [map_disc(disc) for disc in mb_discs]
|
||||||
'discs': [map_disc(disc) for disc in mb_discs['release_groups']]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,15 +60,8 @@ def search_disc(request):
|
|||||||
|
|
||||||
@api_view(['GET'])
|
@api_view(['GET'])
|
||||||
def get_discs_of_artist(request, mbid):
|
def get_discs_of_artist(request, mbid):
|
||||||
""" Obtiene los discos de un artista dado el mbid del artista
|
""" Obtiene todos los discos de un artista dado el mbid del artista"""
|
||||||
|
return Response(medium.get_discs_of_artist(mbid))
|
||||||
Como los datos son paginables la query puede contener per_page y page para definir cuantos
|
|
||||||
elementos mostrar por pagina y que pagina mostrar.
|
|
||||||
"""
|
|
||||||
limit = int(request.GET.get('per_page', 10))
|
|
||||||
page = int(request.GET.get('page', 1))
|
|
||||||
|
|
||||||
return Response(medium.get_discs_of_artist(mbid, limit, page))
|
|
||||||
|
|
||||||
|
|
||||||
@api_view(['GET'])
|
@api_view(['GET'])
|
||||||
|
|||||||
@@ -86,7 +86,8 @@ CACHES = {
|
|||||||
"OPTIONS": {
|
"OPTIONS": {
|
||||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||||
},
|
},
|
||||||
"KEY_PREFIX": "DJANGO_SERVER"
|
"KEY_PREFIX": "DJANGO_SERVER",
|
||||||
|
"KEY_FUNCTION": "utils.make_key"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import logging
|
|||||||
import functools
|
import functools
|
||||||
|
|
||||||
from django.http import JsonResponse
|
from django.http import JsonResponse
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
from pygments import highlight
|
from pygments import highlight
|
||||||
from pygments.lexers import JsonLexer # pylint: disable=no-name-in-module
|
from pygments.lexers import JsonLexer # pylint: disable=no-name-in-module
|
||||||
@@ -76,3 +77,11 @@ def sanitize_keys(dictionary):
|
|||||||
new_key = key.replace('-', '_')
|
new_key = key.replace('-', '_')
|
||||||
replace_key(dictionary, key, new_key)
|
replace_key(dictionary, key, new_key)
|
||||||
return dictionary
|
return dictionary
|
||||||
|
|
||||||
|
|
||||||
|
def make_key(key, key_prefix=None, version=1):
|
||||||
|
"""Key generation function for cache"""
|
||||||
|
if key_prefix is None:
|
||||||
|
key_prefix = settings.CACHES.get('default').get('KEY_PREFIX')
|
||||||
|
|
||||||
|
return ':'.join([key_prefix, str(version), key])
|
||||||
|
|||||||
Reference in New Issue
Block a user