Data invalidation/cache/delete functions now just take a single argument instead of a parameter hash which is slow

This commit is contained in:
Sumner Evans
2020-05-15 16:59:10 -06:00
parent 10c968ea44
commit eb59fa4adf
7 changed files with 201 additions and 202 deletions

View File

@@ -1,4 +1,6 @@
import abc
import hashlib
import json
from dataclasses import dataclass
from datetime import timedelta
from enum import Enum
@@ -103,6 +105,22 @@ class AlbumSearchQuery:
year_range: Tuple[int, int] = (2010, 2020)
genre: Genre = _Genre("Rock")
def strhash(self) -> str:
"""
Returns a deterministic hash of the query as a string.
>>> query = AlbumSearchQuery(
... AlbumSearchQuery.Type.YEAR_RANGE, year_range=(2018, 2019)
... )
>>> query.strhash()
'a6571bb7be65984c6627f545cab9fc767fce6d07'
"""
return hashlib.sha1(
bytes(
json.dumps((self.type.value, self.year_range, self.genre.name)), "utf8"
)
).hexdigest()
class CacheMissError(Exception):
"""
@@ -725,25 +743,25 @@ class CachingAdapter(Adapter):
SONG_FILE_PERMANENT = "song_file_permanent"
@abc.abstractmethod
def ingest_new_data(
self, data_key: CachedDataKey, params: Tuple[Any, ...], data: Any
):
def ingest_new_data(self, data_key: CachedDataKey, param: Optional[str], data: Any):
"""
This function will be called after the fallback, ground-truth adapter returns
new data. This normally will happen if this adapter has a cache miss or if the
UI forces retrieval from the ground-truth adapter.
:param data_key: the type of data to be ingested.
:param params: the parameters that uniquely identify the data to be ingested.
For example, with playlist details, this will be a tuple containing a single
element: the playlist ID. If that playlist ID is requested again, the
adapter should service that request, but it should not service a request for
a different playlist ID.
:param param: a string that uniquely identify the data to be ingested. For
example, with playlist details, this will be the playlist ID. If that
playlist ID is requested again, the adapter should service that request, but
it should not service a request for a different playlist ID.
For the playlist list, this will be none since there are no parameters to
that request.
:param data: the data that was returned by the ground truth adapter.
"""
@abc.abstractmethod
def invalidate_data(self, data_key: CachedDataKey, params: Tuple[Any, ...]):
def invalidate_data(self, data_key: CachedDataKey, param: Optional[str]):
"""
This function will be called if the adapter should invalidate some of its data.
This should not destroy the invalidated data. If invalid data is requested, a
@@ -752,12 +770,14 @@ class CachingAdapter(Adapter):
:param data_key: the type of data to be invalidated.
:param params: the parameters that uniquely identify the data to be invalidated.
For example, with playlist details, this will be a tuple containing a single
element: the playlist ID.
For example, with playlist details, this will be the playlist ID.
For the playlist list, this will be none since there are no parameters to
that request.
"""
@abc.abstractmethod
def delete_data(self, data_key: CachedDataKey, params: Tuple[Any, ...]):
def delete_data(self, data_key: CachedDataKey, param: Optional[str]):
"""
This function will be called if the adapter should delete some of its data.
This should destroy the data. If the deleted data is requested, a
@@ -765,8 +785,10 @@ class CachingAdapter(Adapter):
:param data_key: the type of data to be deleted.
:param params: the parameters that uniquely identify the data to be invalidated.
For example, with playlist details, this will be a tuple containing a single
element: the playlist ID.
For example, with playlist details, this will be the playlist ID.
For the playlist list, this will be none since there are no parameters to
that request.
"""
# Cache-Specific Methods

View File

@@ -8,7 +8,6 @@ from typing import Any, cast, Dict, Optional, Sequence, Set, Tuple, Union
from peewee import fn
from sublime import util
from sublime.adapters import api_objects as API
from . import models
@@ -146,7 +145,7 @@ class FilesystemAdapter(CachingAdapter):
# raise a CacheMissError with the partial data.
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == cache_key,
models.CacheInfo.params_hash == util.params_hash(id),
models.CacheInfo.parameter == id,
models.CacheInfo.valid == True, # noqa: 712
*cache_where_clause,
)
@@ -202,7 +201,7 @@ class FilesystemAdapter(CachingAdapter):
def get_cover_art_uri(self, cover_art_id: str, scheme: str) -> str:
cover_art = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == CachingAdapter.CachedDataKey.COVER_ART_FILE,
models.CacheInfo.params_hash == util.params_hash(cover_art_id),
models.CacheInfo.parameter == cover_art_id,
)
if cover_art:
filename = self.cover_art_dir.joinpath(str(cover_art.file_hash))
@@ -281,7 +280,7 @@ class FilesystemAdapter(CachingAdapter):
# not, cache miss.
if not models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == CachingAdapter.CachedDataKey.ALBUMS,
models.CacheInfo.params_hash == util.params_hash(query),
models.CacheInfo.parameter == query.strhash(),
models.CacheInfo.valid == True, # noqa: 712
):
raise CacheMissError(partial_data=sql_query)
@@ -334,42 +333,37 @@ class FilesystemAdapter(CachingAdapter):
# Data Ingestion Methods
# ==================================================================================
def ingest_new_data(
self,
data_key: CachingAdapter.CachedDataKey,
params: Tuple[Any, ...],
data: Any,
self, data_key: CachingAdapter.CachedDataKey, param: Optional[str], data: Any,
):
assert self.is_cache, "FilesystemAdapter is not in cache mode!"
# Wrap the actual ingestion function in a database lock, and an atomic
# transaction.
with self.db_write_lock, models.database.atomic():
self._do_ingest_new_data(data_key, params, data)
self._do_ingest_new_data(data_key, param, data)
def invalidate_data(
self, function: CachingAdapter.CachedDataKey, params: Tuple[Any, ...]
self, function: CachingAdapter.CachedDataKey, param: Optional[str]
):
assert self.is_cache, "FilesystemAdapter is not in cache mode!"
# Wrap the actual ingestion function in a database lock, and an atomic
# transaction.
with self.db_write_lock, models.database.atomic():
self._do_invalidate_data(function, params)
self._do_invalidate_data(function, param)
def delete_data(
self, function: CachingAdapter.CachedDataKey, params: Tuple[Any, ...]
):
def delete_data(self, function: CachingAdapter.CachedDataKey, param: Optional[str]):
assert self.is_cache, "FilesystemAdapter is not in cache mode!"
# Wrap the actual ingestion function in a database lock, and an atomic
# transaction.
with self.db_write_lock, models.database.atomic():
self._do_delete_data(function, params)
self._do_delete_data(function, param)
def _do_ingest_new_data(
self,
data_key: CachingAdapter.CachedDataKey,
params: Tuple[Any, ...],
param: Optional[str],
data: Any,
partial: bool = False,
) -> Any:
@@ -379,11 +373,10 @@ class FilesystemAdapter(CachingAdapter):
# TODO refactor to to be a recursive function like invalidate_data?
params_hash = util.params_hash(*params)
cache_info_extra: Dict[str, Any] = {}
logging.debug(
f"_do_ingest_new_data params={params} params_hash={params_hash} data_key={data_key} data={data}" # noqa: 502
f"_do_ingest_new_data param={param} data_key={data_key} data={data}"
)
KEYS = CachingAdapter.CachedDataKey
@@ -407,12 +400,12 @@ class FilesystemAdapter(CachingAdapter):
if hasattr(c, "children"): # directory
directory_data["directory_children"].append(
self._do_ingest_new_data(
KEYS.DIRECTORY, (c.id,), c, partial=True
KEYS.DIRECTORY, c.id, c, partial=True
)
)
else:
directory_data["song_children"].append(
self._do_ingest_new_data(KEYS.SONG, (c.id,), c)
self._do_ingest_new_data(KEYS.SONG, c.id, c)
)
directory, created = models.Directory.get_or_create(
@@ -459,7 +452,7 @@ class FilesystemAdapter(CachingAdapter):
ingest_song_data(s, fill_album=False) for s in api_album.songs or []
],
"_cover_art": self._do_ingest_new_data(
KEYS.COVER_ART_FILE, params=(api_album.cover_art,), data=None,
KEYS.COVER_ART_FILE, api_album.cover_art, data=None,
)
if api_album.cover_art
else None,
@@ -505,9 +498,7 @@ class FilesystemAdapter(CachingAdapter):
for a in api_artist.albums or []
],
"_artist_image_url": self._do_ingest_new_data(
KEYS.COVER_ART_FILE,
params=(api_artist.artist_image_url,),
data=None,
KEYS.COVER_ART_FILE, api_artist.artist_image_url, data=None,
)
if api_artist.artist_image_url
else None,
@@ -540,12 +531,12 @@ class FilesystemAdapter(CachingAdapter):
"artist": ingest_artist_data(ar) if (ar := api_song.artist) else None,
"album": ingest_album_data(al) if (al := api_song.album) else None,
"_cover_art": self._do_ingest_new_data(
KEYS.COVER_ART_FILE, params=(api_song.cover_art,), data=None,
KEYS.COVER_ART_FILE, api_song.cover_art, data=None,
)
if api_song.cover_art
else None,
"file": self._do_ingest_new_data(
KEYS.SONG_FILE, params=(api_song.id,), data=(api_song.path, None)
KEYS.SONG_FILE, api_song.id, data=(api_song.path, None)
)
if api_song.path
else None,
@@ -575,7 +566,7 @@ class FilesystemAdapter(CachingAdapter):
"owner": getattr(api_playlist, "owner", None),
"public": getattr(api_playlist, "public", None),
"songs": [
self._do_ingest_new_data(KEYS.SONG, (s.id,), s)
self._do_ingest_new_data(KEYS.SONG, s.id, s)
for s in (
cast(API.PlaylistDetails, api_playlist).songs
if hasattr(api_playlist, "songs")
@@ -583,7 +574,7 @@ class FilesystemAdapter(CachingAdapter):
)
],
"_cover_art": self._do_ingest_new_data(
KEYS.COVER_ART_FILE, (api_playlist.cover_art,), None
KEYS.COVER_ART_FILE, api_playlist.cover_art, None
)
if api_playlist.cover_art
else None,
@@ -630,7 +621,7 @@ class FilesystemAdapter(CachingAdapter):
).execute()
elif data_key == KEYS.COVER_ART_FILE:
cache_info_extra["file_id"] = params[0]
cache_info_extra["file_id"] = param
if data is not None:
file_hash = compute_file_hash(data)
@@ -682,7 +673,7 @@ class FilesystemAdapter(CachingAdapter):
return_val = ingest_song_data(data)
elif data_key == KEYS.SONG_FILE:
cache_info_extra["file_id"] = params[0]
cache_info_extra["file_id"] = param
elif data_key == KEYS.SONG_FILE_PERMANENT:
data_key = KEYS.SONG_FILE
@@ -692,10 +683,10 @@ class FilesystemAdapter(CachingAdapter):
now = datetime.now()
cache_info, cache_info_created = models.CacheInfo.get_or_create(
cache_key=data_key,
params_hash=params_hash,
parameter=param,
defaults={
"cache_key": data_key,
"params_hash": params_hash,
"parameter": param,
"last_ingestion_time": now,
# If it's partial data, then set it to be invalid so it will only be
# used in the event that the ground truth adapter can't service the
@@ -736,57 +727,47 @@ class FilesystemAdapter(CachingAdapter):
return return_val if return_val is not None else cache_info
def _do_invalidate_data(
self, data_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...],
self, data_key: CachingAdapter.CachedDataKey, param: Optional[str],
):
params_hash = util.params_hash(*params)
logging.debug(
f"_do_invalidate_data params={params} params_hash={params_hash} data_key={data_key}" # noqa: 502
)
logging.debug(f"_do_invalidate_data param={param} data_key={data_key}")
models.CacheInfo.update({"valid": False}).where(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == params_hash,
models.CacheInfo.cache_key == data_key, models.CacheInfo.parameter == param
).execute()
cover_art_cache_key = CachingAdapter.CachedDataKey.COVER_ART_FILE
if data_key == CachingAdapter.CachedDataKey.ALBUM:
album = models.Album.get_or_none(models.Album.id == params[0])
album = models.Album.get_or_none(models.Album.id == param)
if album:
self._do_invalidate_data(cover_art_cache_key, (album.cover_art,))
self._do_invalidate_data(cover_art_cache_key, album.cover_art)
elif data_key == CachingAdapter.CachedDataKey.ARTIST:
# Invalidate the corresponding cover art.
if artist := models.Artist.get_or_none(models.Artist.id == params[0]):
self._do_invalidate_data(
cover_art_cache_key, (artist.artist_image_url,)
)
if artist := models.Artist.get_or_none(models.Artist.id == param):
self._do_invalidate_data(cover_art_cache_key, artist.artist_image_url)
for album in artist.albums or []:
self._do_invalidate_data(
CachingAdapter.CachedDataKey.ALBUM, (album.id,)
CachingAdapter.CachedDataKey.ALBUM, album.id
)
elif data_key == CachingAdapter.CachedDataKey.PLAYLIST_DETAILS:
# Invalidate the corresponding cover art.
if playlist := models.Playlist.get_or_none(models.Playlist.id == params[0]):
self._do_invalidate_data(cover_art_cache_key, (playlist.cover_art,))
if playlist := models.Playlist.get_or_none(models.Playlist.id == param):
self._do_invalidate_data(cover_art_cache_key, playlist.cover_art)
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE:
# Invalidate the corresponding cover art.
if song := models.Song.get_or_none(models.Song.id == params[0]):
if song := models.Song.get_or_none(models.Song.id == param):
self._do_invalidate_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (song.cover_art,)
CachingAdapter.CachedDataKey.COVER_ART_FILE, song.cover_art
)
def _do_delete_data(
self, data_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...],
self, data_key: CachingAdapter.CachedDataKey, param: Optional[str]
):
params_hash = util.params_hash(*params)
logging.debug(
f"_do_delete_data params={params} params_hash={params_hash} data_key={data_key}" # noqa: 502
)
logging.debug(f"_do_delete_data param={param} data_key={data_key}")
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == params_hash,
models.CacheInfo.cache_key == data_key, models.CacheInfo.parameter == param,
)
if data_key == CachingAdapter.CachedDataKey.COVER_ART_FILE:
@@ -797,10 +778,10 @@ class FilesystemAdapter(CachingAdapter):
elif data_key == CachingAdapter.CachedDataKey.PLAYLIST_DETAILS:
# Delete the playlist and corresponding cover art.
if playlist := models.Playlist.get_or_none(models.Playlist.id == params[0]):
if playlist := models.Playlist.get_or_none(models.Playlist.id == param):
if cover_art := playlist.cover_art:
self._do_delete_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (cover_art,),
CachingAdapter.CachedDataKey.COVER_ART_FILE, cover_art
)
playlist.delete_instance()

View File

@@ -32,12 +32,12 @@ class CacheInfo(BaseModel):
id = AutoField()
valid = BooleanField(default=False)
cache_key = CacheConstantsField()
params_hash = TextField()
parameter = TextField(null=True, default="")
# TODO (#2) actually use this for cache expiry.
last_ingestion_time = TzDateTimeField(null=False)
class Meta:
indexes = ((("cache_key", "params_hash"), True),)
indexes = ((("cache_key", "parameter"), True),)
# Used for cached files.
file_id = TextField(null=True)

View File

@@ -1,3 +1,4 @@
import hashlib
import logging
import tempfile
import threading
@@ -16,7 +17,6 @@ from typing import (
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
@@ -24,7 +24,6 @@ from typing import (
import requests
from sublime import util
from sublime.config import AppConfiguration
from .adapter_base import (
@@ -149,7 +148,7 @@ class Result(Generic[T]):
class AdapterManager:
available_adapters: Set[Any] = {FilesystemAdapter, SubsonicAdapter}
current_download_hashes: Set[str] = set()
current_download_uris: Set[str] = set()
download_set_lock = threading.Lock()
executor: ThreadPoolExecutor = ThreadPoolExecutor()
download_executor: ThreadPoolExecutor = ThreadPoolExecutor()
@@ -286,7 +285,10 @@ class AdapterManager:
@staticmethod
def _create_ground_truth_result(
function_name: str, *args, before_download: Callable[[], None] = None, **kwargs
function_name: str,
*params: Any,
before_download: Callable[[], None] = None,
**kwargs,
) -> Result:
"""
Creates a Result using the given ``function_name`` on the ground truth adapter.
@@ -296,14 +298,13 @@ class AdapterManager:
assert AdapterManager._instance
if before_download:
before_download()
return getattr(
AdapterManager._instance.ground_truth_adapter, function_name
)(*args, **kwargs)
fn = getattr(AdapterManager._instance.ground_truth_adapter, function_name)
return fn(*params, **kwargs)
return Result(future_fn)
@staticmethod
def _create_download_fn(uri: str, params_hash: str) -> Callable[[], str]:
def _create_download_fn(uri: str) -> Callable[[], str]:
"""
Create a function to download the given URI to a temporary file, and return the
filename. The returned function will spin-loop if the resource is already being
@@ -313,14 +314,14 @@ class AdapterManager:
def download_fn() -> str:
assert AdapterManager._instance
download_tmp_filename = AdapterManager._instance.download_path.joinpath(
params_hash
hashlib.sha1(bytes(uri, "utf8")).hexdigest()
)
resource_downloading = False
with AdapterManager.download_set_lock:
if params_hash in AdapterManager.current_download_hashes:
if uri in AdapterManager.current_download_uris:
resource_downloading = True
AdapterManager.current_download_hashes.add(params_hash)
AdapterManager.current_download_uris.add(uri)
# TODO (#122): figure out how to retry if the other request failed.
if resource_downloading:
@@ -330,7 +331,7 @@ class AdapterManager:
# it has completed. Then, just return the path to the
# resource.
t = 0.0
while params_hash in AdapterManager.current_download_hashes and t < 20:
while uri in AdapterManager.current_download_uris and t < 20:
sleep(0.2)
t += 0.2
# TODO (#122): handle the timeout
@@ -350,7 +351,7 @@ class AdapterManager:
finally:
# Always release the download set lock, even if there's an error.
with AdapterManager.download_set_lock:
AdapterManager.current_download_hashes.discard(params_hash)
AdapterManager.current_download_uris.discard(uri)
logging.info(f"{uri} downloaded. Returning.")
return str(download_tmp_filename)
@@ -359,7 +360,7 @@ class AdapterManager:
@staticmethod
def _create_caching_done_callback(
cache_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...]
cache_key: CachingAdapter.CachedDataKey, param: Optional[str]
) -> Callable[[Result], None]:
"""
Create a function to let the caching_adapter ingest new data.
@@ -372,7 +373,7 @@ class AdapterManager:
assert AdapterManager._instance
assert AdapterManager._instance.caching_adapter
AdapterManager._instance.caching_adapter.ingest_new_data(
cache_key, params, f.result(),
cache_key, param, f.result(),
)
return future_finished
@@ -409,7 +410,7 @@ class AdapterManager:
@staticmethod
def _get_from_cache_or_ground_truth(
function_name: str,
*args: Any,
param: Optional[str],
cache_key: CachingAdapter.CachedDataKey = None,
before_download: Callable[[], None] = None,
use_ground_truth_adapter: bool = False,
@@ -421,8 +422,8 @@ class AdapterManager:
Get data from one of the adapters.
:param function_name: The function to call on the adapter.
:param args: The arguments to pass to the adapter function (also used for the
cache parameter tuple to uniquely identify the request).
:param param: The parameter to pass to the adapter function (also used for the
cache parameter to uniquely identify the request).
:param cache_key: The cache key to use to invalidate caches and ingest caches.
:param before_download: Function to call before doing a network request.
:param allow_download: Whether or not to allow a network request to retrieve the
@@ -439,7 +440,7 @@ class AdapterManager:
assert (caching_adapter := AdapterManager._instance.caching_adapter)
try:
logging.info(f"END: {function_name}: serving from cache")
return Result(getattr(caching_adapter, function_name)(*args, **kwargs))
return Result(getattr(caching_adapter, function_name)(param, **kwargs))
except CacheMissError as e:
partial_data = e.partial_data
logging.info(f"Cache Miss on {function_name}.")
@@ -451,7 +452,7 @@ class AdapterManager:
and AdapterManager._instance.caching_adapter
and use_ground_truth_adapter
):
AdapterManager._instance.caching_adapter.invalidate_data(cache_key, args)
AdapterManager._instance.caching_adapter.invalidate_data(cache_key, param)
# TODO (#188): don't short circuit if not allow_download because it could be the
# filesystem adapter.
@@ -463,13 +464,16 @@ class AdapterManager:
raise Exception(f"No adapters can service {function_name} at the moment.")
result: Result[AdapterManager.R] = AdapterManager._create_ground_truth_result(
function_name, *args, before_download=before_download, **kwargs,
function_name,
*((param,) if param is not None else ()),
before_download=before_download,
**kwargs,
)
if AdapterManager._instance.caching_adapter:
if cache_key:
result.add_done_callback(
AdapterManager._create_caching_done_callback(cache_key, args)
AdapterManager._create_caching_done_callback(cache_key, param)
)
if on_result_finished:
@@ -552,6 +556,7 @@ class AdapterManager:
) -> Result[Sequence[Playlist]]:
return AdapterManager._get_from_cache_or_ground_truth(
"get_playlists",
None,
cache_key=CachingAdapter.CachedDataKey.PLAYLISTS,
before_download=before_download,
use_ground_truth_adapter=force,
@@ -584,12 +589,12 @@ class AdapterManager:
if playlist := f.result():
AdapterManager._instance.caching_adapter.ingest_new_data(
CachingAdapter.CachedDataKey.PLAYLIST_DETAILS,
(playlist.id,),
playlist.id,
playlist,
)
else:
AdapterManager._instance.caching_adapter.invalidate_data(
CachingAdapter.CachedDataKey.PLAYLISTS, ()
CachingAdapter.CachedDataKey.PLAYLISTS, None
)
return AdapterManager._get_from_cache_or_ground_truth(
@@ -631,7 +636,7 @@ class AdapterManager:
if AdapterManager._instance.caching_adapter:
AdapterManager._instance.caching_adapter.delete_data(
CachingAdapter.CachedDataKey.PLAYLIST_DETAILS, (playlist_id,)
CachingAdapter.CachedDataKey.PLAYLIST_DETAILS, playlist_id
)
# TODO (#189): allow this to take a set of schemes and unify with
@@ -688,7 +693,7 @@ class AdapterManager:
if AdapterManager._instance.caching_adapter and force:
AdapterManager._instance.caching_adapter.invalidate_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (cover_art_id,)
CachingAdapter.CachedDataKey.COVER_ART_FILE, cover_art_id
)
if not AdapterManager._ground_truth_can_do("get_cover_art_uri"):
@@ -703,7 +708,6 @@ class AdapterManager:
AdapterManager._instance.ground_truth_adapter.get_cover_art_uri(
cover_art_id, AdapterManager._get_scheme()
),
util.params_hash("cover_art", cover_art_id),
),
is_download=True,
default_value=existing_cover_art_filename,
@@ -712,7 +716,7 @@ class AdapterManager:
if AdapterManager._instance.caching_adapter:
future.add_done_callback(
AdapterManager._create_caching_done_callback(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (cover_art_id,),
CachingAdapter.CachedDataKey.COVER_ART_FILE, cover_art_id
)
)
@@ -798,11 +802,10 @@ class AdapterManager:
AdapterManager._instance.ground_truth_adapter.get_song_uri(
song_id, AdapterManager._get_scheme()
),
util.params_hash("song", song_id),
)()
AdapterManager._instance.caching_adapter.ingest_new_data(
CachingAdapter.CachedDataKey.SONG_FILE,
(song_id,),
song_id,
(None, song_tmp_filename),
)
on_song_download_complete(song_id)
@@ -868,7 +871,7 @@ class AdapterManager:
for song_id in song_ids:
song = AdapterManager.get_song_details(song_id).result()
AdapterManager._instance.caching_adapter.delete_data(
CachingAdapter.CachedDataKey.SONG_FILE, (song.id,)
CachingAdapter.CachedDataKey.SONG_FILE, song.id
)
on_song_delete(song_id)
@@ -892,6 +895,7 @@ class AdapterManager:
def get_genres(force: bool = False) -> Result[Sequence[Genre]]:
return AdapterManager._get_from_cache_or_ground_truth(
"get_genres",
None,
use_ground_truth_adapter=force,
cache_key=CachingAdapter.CachedDataKey.GENRES,
)
@@ -909,6 +913,7 @@ class AdapterManager:
return AdapterManager.sort_by_ignored_articles(
AdapterManager._get_from_cache_or_ground_truth(
"get_artists",
None,
use_ground_truth_adapter=force,
before_download=before_download,
cache_key=CachingAdapter.CachedDataKey.ARTISTS,
@@ -921,11 +926,13 @@ class AdapterManager:
@staticmethod
def _get_ignored_articles(use_ground_truth_adapter: bool) -> Set[str]:
# TODO get this at first startup.
if not AdapterManager._any_adapter_can_do("get_ignored_articles"):
return set()
try:
return AdapterManager._get_from_cache_or_ground_truth(
"get_ignored_articles",
None,
use_ground_truth_adapter=use_ground_truth_adapter,
cache_key=CachingAdapter.CachedDataKey.IGNORED_ARTICLES,
).result()
@@ -970,7 +977,7 @@ class AdapterManager:
if artist := f.result():
for album in artist.albums or []:
AdapterManager._instance.caching_adapter.invalidate_data(
CachingAdapter.CachedDataKey.ALBUM, (album.id,)
CachingAdapter.CachedDataKey.ALBUM, album.id
)
return AdapterManager._get_from_cache_or_ground_truth(
@@ -991,7 +998,7 @@ class AdapterManager:
) -> Result[Sequence[Album]]:
return AdapterManager._get_from_cache_or_ground_truth(
"get_albums",
query,
query.strhash(),
cache_key=CachingAdapter.CachedDataKey.ALBUMS,
before_download=before_download,
use_ground_truth_adapter=force,
@@ -1042,7 +1049,7 @@ class AdapterManager:
if play_queue := f.result():
for song in play_queue.songs:
AdapterManager._instance.caching_adapter.ingest_new_data(
CachingAdapter.CachedDataKey.SONG, (song.id,), song
CachingAdapter.CachedDataKey.SONG, song.id, song
)
future.add_done_callback(future_finished)
@@ -1133,7 +1140,7 @@ class AdapterManager:
if AdapterManager._instance.caching_adapter:
AdapterManager._instance.caching_adapter.ingest_new_data(
CachingAdapter.CachedDataKey.SEARCH_RESULTS,
(),
None,
ground_truth_search_results,
)
@@ -1155,7 +1162,7 @@ class AdapterManager:
if not AdapterManager._instance.caching_adapter:
return SongCacheStatus.NOT_CACHED
if util.params_hash("song", song.id) in AdapterManager.current_download_hashes:
if song.id in AdapterManager.current_download_uris:
return SongCacheStatus.DOWNLOADING
return AdapterManager._instance.caching_adapter.get_cached_status(song)

View File

@@ -103,6 +103,8 @@ def esc(string: Optional[str]) -> str:
"""
>>> esc("test & <a href='ohea' target='_blank'>test</a>")
"test &amp; <a href='ohea'>test</a>"
>>> esc(None)
''
"""
if string is None:
return ""
@@ -112,6 +114,9 @@ def esc(string: Optional[str]) -> str:
def dot_join(*items: Any) -> str:
"""
Joins the given strings with a dot character. Filters out ``None`` values.
>>> dot_join(None, "foo", "bar", None, "baz")
'foo • bar • baz'
"""
return "".join(map(str, filter(lambda x: x is not None, items)))
@@ -127,6 +132,16 @@ def get_cached_status_icon(song: Song) -> str:
def _parse_diff_location(location: str) -> Tuple:
"""
Parses a diff location as returned by deepdiff.
>>> _parse_diff_location("root[22]")
('22',)
>>> _parse_diff_location("root[22][4]")
('22', '4')
>>> _parse_diff_location("root[22].foo")
('22', 'foo')
"""
match = re.match(r"root\[(\d*)\](?:\[(\d*)\]|\.(.*))?", location)
return tuple(g for g in cast(Match, match).groups() if g is not None)
@@ -161,6 +176,7 @@ def diff_model_store(store_to_edit: Any, new_store: Iterable[Any]):
The diff here is that if there are any differences, then we refresh the
entire list. This is because it is too hard to do editing.
"""
# TODO: figure out if there's a way to do editing.
old_store = store_to_edit[:]
diff = DeepDiff(old_store, new_store)

View File

@@ -1,14 +0,0 @@
import hashlib
import json
from typing import Any
from sublime.adapters import AlbumSearchQuery
def params_hash(*params: Any) -> str:
# TODO determine if we ever have more than one parameter.
# Special handling for AlbumSearchQuery objects.
# TODO figure out if I can optimize this
if len(params) > 0 and isinstance(params[0], AlbumSearchQuery):
params = (hash(params[0]), *params[1:])
return hashlib.sha1(bytes(json.dumps(params), "utf8")).hexdigest()

View File

@@ -124,7 +124,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
cache_adapter.get_playlists()
# Ingest an empty list (for example, no playlists added yet to server).
cache_adapter.ingest_new_data(KEYS.PLAYLISTS, (), [])
cache_adapter.ingest_new_data(KEYS.PLAYLISTS, None, [])
# After the first cache miss of get_playlists, even if an empty list is
# returned, the next one should not be a cache miss.
@@ -133,7 +133,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
# Ingest two playlists.
cache_adapter.ingest_new_data(
KEYS.PLAYLISTS,
(),
None,
[
SubsonicAPI.Playlist("1", "test1", comment="comment"),
SubsonicAPI.Playlist("2", "test2"),
@@ -152,7 +152,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
# Ingest a new playlist list with one of them deleted.
cache_adapter.ingest_new_data(
KEYS.PLAYLISTS,
(),
None,
[
SubsonicAPI.Playlist("1", "test1", comment="comment"),
SubsonicAPI.Playlist("3", "test3"),
@@ -188,7 +188,7 @@ def test_caching_get_playlist_details(cache_adapter: FilesystemAdapter):
# Simulate the playlist being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("1",),
"1",
SubsonicAPI.PlaylistWithSongs("1", "test1", songs=MOCK_SUBSONIC_SONGS[:2]),
)
@@ -202,7 +202,7 @@ def test_caching_get_playlist_details(cache_adapter: FilesystemAdapter):
# "Force refresh" the playlist and add a new song (duplicate).
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("1",),
"1",
SubsonicAPI.PlaylistWithSongs("1", "foo", songs=MOCK_SUBSONIC_SONGS),
)
@@ -233,7 +233,7 @@ def test_caching_get_playlist_then_details(cache_adapter: FilesystemAdapter):
# Ingest a list of playlists (like the sidebar, without songs)
cache_adapter.ingest_new_data(
KEYS.PLAYLISTS,
(),
None,
[SubsonicAPI.Playlist("1", "test1"), SubsonicAPI.Playlist("2", "test2")],
)
@@ -249,12 +249,12 @@ def test_caching_get_playlist_then_details(cache_adapter: FilesystemAdapter):
# Simulate getting playlist details for id=1, then id=2
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS, ("1",), SubsonicAPI.PlaylistWithSongs("1", "test1"),
KEYS.PLAYLIST_DETAILS, "1", SubsonicAPI.PlaylistWithSongs("1", "test1"),
)
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("2",),
"2",
SubsonicAPI.PlaylistWithSongs("2", "test2", songs=MOCK_SUBSONIC_SONGS),
)
@@ -270,9 +270,7 @@ def test_cache_cover_art(cache_adapter: FilesystemAdapter):
cache_adapter.get_cover_art_uri("pl_test1", "file")
# After ingesting the data, reading from the cache should give the exact same file.
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(KEYS.COVER_ART_FILE, "pl_test1", MOCK_ALBUM_ART)
with open(cache_adapter.get_cover_art_uri("pl_test1", "file"), "wb+") as cached:
with open(MOCK_ALBUM_ART, "wb+") as expected:
assert cached.read() == expected.read()
@@ -281,27 +279,27 @@ def test_cache_cover_art(cache_adapter: FilesystemAdapter):
def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
KEYS.PLAYLISTS,
(),
None,
[SubsonicAPI.Playlist("1", "test1"), SubsonicAPI.Playlist("2", "test2")],
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, "pl_test1", MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("2",),
"2",
SubsonicAPI.PlaylistWithSongs("2", "test2", cover_art="pl_2", songs=[]),
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("pl_2",), MOCK_ALBUM_ART2,
KEYS.COVER_ART_FILE, "pl_2", MOCK_ALBUM_ART2,
)
stale_uri_1 = cache_adapter.get_cover_art_uri("pl_test1", "file")
stale_uri_2 = cache_adapter.get_cover_art_uri("pl_2", "file")
cache_adapter.invalidate_data(KEYS.PLAYLISTS, ())
cache_adapter.invalidate_data(KEYS.PLAYLIST_DETAILS, ("2",))
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, ("pl_test1",))
cache_adapter.invalidate_data(KEYS.PLAYLISTS, None)
cache_adapter.invalidate_data(KEYS.PLAYLIST_DETAILS, "2")
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, "pl_test1")
# After invalidating the data, it should cache miss, but still have the old, stale,
# data.
@@ -336,16 +334,16 @@ def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
def test_invalidate_song_file(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(KEYS.SONG, ("2",), MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, "2", MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("s1", "song"), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, "s1", MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("1",), (None, MOCK_SONG_FILE))
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("2",), (None, MOCK_SONG_FILE2))
cache_adapter.ingest_new_data(KEYS.SONG_FILE, "1", (None, MOCK_SONG_FILE))
cache_adapter.ingest_new_data(KEYS.SONG_FILE, "2", (None, MOCK_SONG_FILE2))
cache_adapter.invalidate_data(KEYS.SONG_FILE, ("1",))
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, ("s1", "song"))
cache_adapter.invalidate_data(KEYS.SONG_FILE, "1")
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, "s1")
with pytest.raises(CacheMissError):
cache_adapter.get_song_uri("1", "file")
@@ -358,13 +356,13 @@ def test_invalidate_song_file(cache_adapter: FilesystemAdapter):
def test_malformed_song_path(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, ("2",), MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, "2", MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(
KEYS.SONG_FILE, ("1",), ("/malformed/path", MOCK_SONG_FILE)
KEYS.SONG_FILE, "1", ("/malformed/path", MOCK_SONG_FILE)
)
cache_adapter.ingest_new_data(
KEYS.SONG_FILE, ("2",), ("fine/path/song2.mp3", MOCK_SONG_FILE2)
KEYS.SONG_FILE, "2", ("fine/path/song2.mp3", MOCK_SONG_FILE2)
)
song_uri = cache_adapter.get_song_uri("1", "file")
@@ -375,36 +373,31 @@ def test_malformed_song_path(cache_adapter: FilesystemAdapter):
def test_get_cached_status(cache_adapter: FilesystemAdapter):
print('ohea1')
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
assert (
cache_adapter.get_cached_status(cache_adapter.get_song_details("1"))
== SongCacheStatus.NOT_CACHED
)
print('ohea2')
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("1",), (None, MOCK_SONG_FILE))
cache_adapter.ingest_new_data(KEYS.SONG_FILE, "1", (None, MOCK_SONG_FILE))
assert (
cache_adapter.get_cached_status(cache_adapter.get_song_details("1"))
== SongCacheStatus.CACHED
)
print('ohea3')
cache_adapter.ingest_new_data(KEYS.SONG_FILE_PERMANENT, ("1",), None)
cache_adapter.ingest_new_data(KEYS.SONG_FILE_PERMANENT, "1", None)
assert (
cache_adapter.get_cached_status(cache_adapter.get_song_details("1"))
== SongCacheStatus.PERMANENTLY_CACHED
)
print('ohea4')
cache_adapter.invalidate_data(KEYS.SONG_FILE, ("1",))
cache_adapter.invalidate_data(KEYS.SONG_FILE, "1")
assert (
cache_adapter.get_cached_status(cache_adapter.get_song_details("1"))
== SongCacheStatus.CACHED_STALE
)
print('ohea5')
cache_adapter.delete_data(KEYS.SONG_FILE, ("1",))
cache_adapter.delete_data(KEYS.SONG_FILE, "1")
assert (
cache_adapter.get_cached_status(cache_adapter.get_song_details("1"))
== SongCacheStatus.NOT_CACHED
@@ -414,20 +407,20 @@ def test_get_cached_status(cache_adapter: FilesystemAdapter):
def test_delete_playlists(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("1",),
"1",
SubsonicAPI.PlaylistWithSongs("1", "test1", cover_art="pl_1", songs=[]),
)
cache_adapter.ingest_new_data(
KEYS.PLAYLIST_DETAILS,
("2",),
"2",
SubsonicAPI.PlaylistWithSongs("2", "test1", cover_art="pl_2", songs=[]),
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("pl_1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, "pl_1", MOCK_ALBUM_ART,
)
# Deleting a playlist should get rid of it entirely.
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, ("2",))
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, "2")
try:
cache_adapter.get_playlist_details("2")
assert 0, "DID NOT raise CacheMissError"
@@ -435,7 +428,7 @@ def test_delete_playlists(cache_adapter: FilesystemAdapter):
assert e.partial_data is None
# Deleting a playlist with associated cover art should get rid the cover art too.
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, ("1",))
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, "1")
try:
cache_adapter.get_cover_art_uri("pl_1", "file")
assert 0, "DID NOT raise CacheMissError"
@@ -454,17 +447,17 @@ def test_delete_playlists(cache_adapter: FilesystemAdapter):
def test_delete_song_data(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("1",), (None, MOCK_SONG_FILE))
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG_FILE, "1", (None, MOCK_SONG_FILE))
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("s1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, "s1", MOCK_ALBUM_ART,
)
music_file_path = cache_adapter.get_song_uri("1", "file")
cover_art_path = cache_adapter.get_cover_art_uri("s1", "file")
cache_adapter.delete_data(KEYS.SONG_FILE, ("1",))
cache_adapter.delete_data(KEYS.COVER_ART_FILE, ("s1",))
cache_adapter.delete_data(KEYS.SONG_FILE, "1")
cache_adapter.delete_data(KEYS.COVER_ART_FILE, "s1")
assert not Path(music_file_path).exists()
assert not Path(cover_art_path).exists()
@@ -486,8 +479,8 @@ def test_caching_get_genres(cache_adapter: FilesystemAdapter):
with pytest.raises(CacheMissError):
cache_adapter.get_genres()
cache_adapter.ingest_new_data(KEYS.SONG, ("2",), MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, "2", MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
# Getting genres now should look at what's on the songs. This sould cache miss, but
# still give some data.
@@ -500,7 +493,7 @@ def test_caching_get_genres(cache_adapter: FilesystemAdapter):
# After we actually ingest the actual list, it should be returned instead.
cache_adapter.ingest_new_data(
KEYS.GENRES,
(),
None,
[
SubsonicAPI.Genre("Bar", 10, 20),
SubsonicAPI.Genre("Baz", 10, 20),
@@ -515,7 +508,7 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
cache_adapter.get_song_details("1")
# Simulate the song details being retrieved from Subsonic.
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG, "1", MOCK_SUBSONIC_SONGS[1])
song = cache_adapter.get_song_details("1")
assert song.id == "1"
@@ -531,7 +524,7 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
# "Force refresh" the song details
cache_adapter.ingest_new_data(
KEYS.SONG,
("1",),
"1",
SubsonicAPI.Song(
"1",
title="Song 1",
@@ -564,7 +557,7 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
def test_caching_less_info(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
KEYS.SONG,
("1",),
"1",
SubsonicAPI.Song(
"1",
title="Song 1",
@@ -580,7 +573,7 @@ def test_caching_less_info(cache_adapter: FilesystemAdapter):
)
cache_adapter.ingest_new_data(
KEYS.SONG,
("1",),
"1",
SubsonicAPI.Song(
"1",
title="Song 1",
@@ -603,7 +596,7 @@ def test_caching_get_artists(cache_adapter: FilesystemAdapter):
# Ingest artists.
cache_adapter.ingest_new_data(
KEYS.ARTISTS,
(),
None,
[
SubsonicAPI.ArtistAndArtistInfo("1", "test1", album_count=3, albums=[]),
SubsonicAPI.ArtistAndArtistInfo("2", "test2", album_count=4),
@@ -618,7 +611,7 @@ def test_caching_get_artists(cache_adapter: FilesystemAdapter):
# Ingest a new artists list with one of them deleted.
cache_adapter.ingest_new_data(
KEYS.ARTISTS,
(),
None,
[
SubsonicAPI.ArtistAndArtistInfo("1", "test1", album_count=3),
SubsonicAPI.ArtistAndArtistInfo("3", "test3", album_count=8),
@@ -637,12 +630,12 @@ def test_caching_get_ignored_articles(cache_adapter: FilesystemAdapter):
cache_adapter.get_ignored_articles()
# Ingest ignored_articles.
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, (), {"Foo", "Bar"})
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, None, {"Foo", "Bar"})
artists = cache_adapter.get_ignored_articles()
assert {"Foo", "Bar"} == artists
# Ingest a new artists list with one of them deleted.
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, (), {"Foo", "Baz"})
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, None, {"Foo", "Baz"})
artists = cache_adapter.get_ignored_articles()
assert {"Foo", "Baz"} == artists
@@ -654,7 +647,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.ARTIST,
("1",),
"1",
SubsonicAPI.ArtistAndArtistInfo(
"1",
"Bar",
@@ -689,7 +682,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
# Simulate "force refreshing" the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.ARTIST,
("1",),
"1",
SubsonicAPI.ArtistAndArtistInfo(
"1",
"Foo",
@@ -735,7 +728,7 @@ def test_caching_get_album(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.ALBUM,
("a1",),
"a1",
SubsonicAPI.Album(
"a1",
"foo",
@@ -770,7 +763,7 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.ARTIST,
("artist1",),
"artist1",
SubsonicAPI.ArtistAndArtistInfo(
"artist1",
"Bar",
@@ -790,23 +783,17 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
)
cache_adapter.ingest_new_data(
KEYS.ALBUM,
("1",),
"1",
SubsonicAPI.Album("1", "Foo", artist_id="artist1", cover_art="1"),
)
cache_adapter.ingest_new_data(
KEYS.ALBUM,
("2",),
"2",
SubsonicAPI.Album("2", "Bar", artist_id="artist1", cover_art="2"),
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("image",), MOCK_ALBUM_ART3,
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
KEYS.COVER_ART_FILE, ("2",), MOCK_ALBUM_ART2,
)
cache_adapter.ingest_new_data(KEYS.COVER_ART_FILE, "image", MOCK_ALBUM_ART3)
cache_adapter.ingest_new_data(KEYS.COVER_ART_FILE, "1", MOCK_ALBUM_ART)
cache_adapter.ingest_new_data(KEYS.COVER_ART_FILE, "2", MOCK_ALBUM_ART2)
stale_artist = cache_adapter.get_artist("artist1")
stale_album_1 = cache_adapter.get_album("1")
@@ -815,7 +802,7 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
stale_cover_art_1 = cache_adapter.get_cover_art_uri("1", "file")
stale_cover_art_2 = cache_adapter.get_cover_art_uri("2", "file")
cache_adapter.invalidate_data(KEYS.ARTIST, ("artist1",))
cache_adapter.invalidate_data(KEYS.ARTIST, "artist1")
# Test the cascade of cache invalidations.
try:
@@ -869,7 +856,7 @@ def test_get_music_directory(cache_adapter: FilesystemAdapter):
# Simulate the directory details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.DIRECTORY,
(dir_id,),
dir_id,
SubsonicAPI.Directory(
dir_id,
title="foo",