Migrate get_directory to Adapter system

This commit is contained in:
Sumner Evans
2020-05-13 21:55:41 -06:00
parent 14ffb75063
commit d54dcdbd8b
21 changed files with 715 additions and 871 deletions

View File

@@ -1,10 +1,10 @@
from .adapter_base import (
Adapter,
AlbumSearchQuery,
CacheMissError,
CachingAdapter,
ConfigParamDescriptor,
SongCacheStatus,
AlbumSearchQuery,
)
from .manager import AdapterManager, Result, SearchResult

View File

@@ -18,6 +18,7 @@ from typing import (
from .api_objects import (
Album,
Artist,
Directory,
Genre,
Playlist,
PlaylistDetails,
@@ -398,7 +399,15 @@ class Adapter(abc.ABC):
"""
return False
# Misc
# Browse directories
@property
def can_get_directory(self) -> bool:
"""
Whether :class:`get_directory` can be called on the adapter right now.
"""
return False
# Genres
@property
def can_get_genres(self) -> bool:
"""
@@ -594,6 +603,21 @@ class Adapter(abc.ABC):
"""
raise self._check_can_error("get_album")
def get_directory(self, directory_id: str) -> Directory:
"""
Return a Directory object representing the song files and directories in the
given directory. This may not make sense for your adapter (for example, if
there's no actual underlying filesystem). In that case, make sure to set
:class:`can_get_directory` to ``False``.
:param directory_id: The directory to retrieve. If the special value ``"root"``
is given, the adapter should list all of the directories at the root of the
filesystem tree.
:returns: A list of the :class:`sublime.adapter.api_objects.Directory` and
:class:`sublime.adapter.api_objects.Song` objects in the given directory.
"""
raise self._check_can_error("get_directory")
def get_genres(self) -> Sequence[Genre]:
"""
Get a list of the genres known to the adapter.
@@ -683,6 +707,7 @@ class CachingAdapter(Adapter):
ARTIST = "artist"
ARTISTS = "artists"
COVER_ART_FILE = "cover_art_file"
DIRECTORY = "directory"
GENRES = "genres"
IGNORED_ARTICLES = "ignored_articles"
PLAYLIST_DETAILS = "get_playlist_details"

View File

@@ -16,6 +16,7 @@ from typing import (
Optional,
Sequence,
TypeVar,
Union,
)
from fuzzywuzzy import fuzz
@@ -67,10 +68,10 @@ class Directory(abc.ABC):
id: str
title: Optional[str]
parent: Optional["Directory"]
children: Sequence[Union["Directory", "Song"]]
class Song(abc.ABC):
# TODO make these cross-reference the corresponding Album / Artist / Directory
id: str
title: str
parent: Directory
@@ -97,7 +98,7 @@ class Song(abc.ABC):
created: Optional[datetime]
starred: Optional[datetime]
type: Optional[MediaType]
# TODO trim down, make another data structure for directory?
# TODO trim down
# TODO remove distinction between Playlist and PlaylistDetails

View File

@@ -1,3 +1,4 @@
import hashlib
import logging
import shutil
import threading
@@ -84,6 +85,7 @@ class FilesystemAdapter(CachingAdapter):
can_get_albums = True
can_get_album = True
can_get_ignored_articles = True
can_get_directory = True
can_get_genres = True
can_search = True
@@ -113,7 +115,8 @@ class FilesystemAdapter(CachingAdapter):
# Determine if the adapter has ingested data for this key before, and if
# not, cache miss.
if not models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == cache_key
models.CacheInfo.valid == True, # noqa: 712
models.CacheInfo.cache_key == cache_key,
):
raise CacheMissError(partial_data=result)
return result
@@ -134,49 +137,27 @@ class FilesystemAdapter(CachingAdapter):
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == cache_key,
models.CacheInfo.params_hash == util.params_hash(id),
models.CacheInfo.valid == True, # noqa: 712
)
if not cache_info:
raise CacheMissError(partial_data=obj)
return obj
def _get_download_filename(
self,
filename: Path,
params: Tuple[Any],
cache_key: CachingAdapter.CachedDataKey,
) -> str:
if not filename.exists():
# Handle the case that this is the ground truth adapter.
if self.is_cache:
raise CacheMissError()
else:
raise Exception(f"File for {cache_key} {params} does not exist.")
if not self.is_cache:
return str(filename)
# If we haven't ingested data for this file before, or it's been invalidated,
# raise a CacheMissError with the filename.
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == cache_key,
models.CacheInfo.params_hash == util.params_hash(*params),
)
if not cache_info:
raise CacheMissError(partial_data=str(filename))
return str(filename)
# Data Retrieval Methods
# ==================================================================================
def get_cached_status(self, song: API.Song) -> SongCacheStatus:
song = models.Song.get_or_none(models.Song.id == song.id)
if not song:
song_model = models.Song.get_or_none(models.Song.id == song.id)
if not song_model:
return SongCacheStatus.NOT_CACHED
cache_path = self.music_dir.joinpath(song.path)
if cache_path.exists():
# TODO check if path is permanently cached
return SongCacheStatus.CACHED
try:
file = song_model.file
if file.valid and self.music_dir.joinpath(file.file_hash).exists():
# TODO check if path is permanently cached
return SongCacheStatus.CACHED
except Exception:
pass
return SongCacheStatus.NOT_CACHED
@@ -193,14 +174,17 @@ class FilesystemAdapter(CachingAdapter):
)
def get_cover_art_uri(self, cover_art_id: str, scheme: str) -> str:
# TODO cache by the content of the file (need to see if cover art ID is
# duplicated a lot)?
params_hash = util.params_hash(cover_art_id)
return self._get_download_filename(
self.cover_art_dir.joinpath(params_hash),
(cover_art_id,),
CachingAdapter.CachedDataKey.COVER_ART_FILE,
cover_art = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == CachingAdapter.CachedDataKey.COVER_ART_FILE,
models.CacheInfo.params_hash == util.params_hash(cover_art_id),
)
if cover_art:
filename = self.cover_art_dir.joinpath(str(cover_art.file_hash))
if cover_art.valid and filename.exists():
return str(filename)
raise CacheMissError(partial_data=str(filename))
raise CacheMissError()
def get_song_uri(self, song_id: str, scheme: str, stream: bool = False) -> str:
song = models.Song.get_or_none(models.Song.id == song_id)
@@ -210,11 +194,17 @@ class FilesystemAdapter(CachingAdapter):
else:
raise Exception(f"Song {song_id} does not exist.")
return self._get_download_filename(
self.music_dir.joinpath(song.path),
(song_id,),
CachingAdapter.CachedDataKey.SONG_FILE,
)
try:
if (song_file := song.file) and (
filename := self.music_dir.joinpath(str(song_file.file_hash))
):
if song_file.valid and filename.exists():
return str(filename)
raise CacheMissError(partial_data=str(filename))
except models.CacheInfo.DoesNotExist:
pass
raise CacheMissError()
def get_song_details(self, song_id: str) -> API.Song:
return self._get_object_details(
@@ -239,7 +229,7 @@ class FilesystemAdapter(CachingAdapter):
# TODO: deal with cache invalidation
sql_query = models.Album.select()
Type = AlbumSearchQuery.Type)
Type = AlbumSearchQuery.Type
if query.type == Type.GENRE:
assert query.genre
genre_name = genre.name if (genre := query.genre) else None
@@ -264,6 +254,7 @@ class FilesystemAdapter(CachingAdapter):
if not models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == CachingAdapter.CachedDataKey.ALBUMS,
models.CacheInfo.params_hash == util.params_hash(query),
models.CacheInfo.valid == True, # noqa: 712
):
raise CacheMissError(partial_data=sql_query)
@@ -289,6 +280,20 @@ class FilesystemAdapter(CachingAdapter):
)
)
def get_directory(self, directory_id: str) -> API.Directory:
# ohea
result = list(model.select())
if self.is_cache and not ignore_cache_miss:
# Determine if the adapter has ingested data for this key before, and if
# not, cache miss.
if not models.CacheInfo.get_or_none(
models.CacheInfo.valid == True, # noqa: 712
models.CacheInfo.cache_key == cache_key,
):
raise CacheMissError(partial_data=result)
return result
pass
def get_genres(self) -> Sequence[API.Genre]:
return self._get_list(models.Genre, CachingAdapter.CachedDataKey.GENRES)
@@ -349,7 +354,7 @@ class FilesystemAdapter(CachingAdapter):
data_key: CachingAdapter.CachedDataKey,
params: Tuple[Any, ...],
data: Any,
):
) -> Any:
# TODO: this entire function is not exactly efficient due to the nested
# dependencies and everything. I'm not sure how to improve it, and I'm not sure
# if it needs improving at this point.
@@ -358,11 +363,25 @@ class FilesystemAdapter(CachingAdapter):
# TODO may need to remove reliance on asdict in order to support more backends.
params_hash = util.params_hash(*params)
models.CacheInfo.insert(
logging.debug(
f"_do_ingest_new_data params={params} params_hash={params_hash} data_key={data_key} data={data}" # noqa: 502
)
now = datetime.now()
cache_info, cache_info_created = models.CacheInfo.get_or_create(
cache_key=data_key,
params_hash=params_hash,
last_ingestion_time=datetime.now(),
).on_conflict_replace().execute()
defaults={
"cache_key": data_key,
"params_hash": params_hash,
"last_ingestion_time": now,
},
)
cache_info.last_ingestion_time = now
if not cache_info_created:
cache_info.valid = True
cache_info.save()
cover_art_key = CachingAdapter.CachedDataKey.COVER_ART_FILE
def setattrs(obj: Any, data: Dict[str, Any]):
for k, v in data.items():
@@ -403,7 +422,13 @@ class FilesystemAdapter(CachingAdapter):
"songs": [
ingest_song_data(s, fill_album=False) for s in api_album.songs or []
],
"_cover_art": self._do_ingest_new_data(
cover_art_key, params=(api_album.cover_art, "album"), data=None
)
if api_album.cover_art
else None,
}
del album_data["cover_art"]
if exclude_artist:
del album_data["artist"]
@@ -439,7 +464,15 @@ class FilesystemAdapter(CachingAdapter):
ingest_album_data(a, exclude_artist=True)
for a in api_artist.albums or []
],
"_artist_image_url": self._do_ingest_new_data(
cover_art_key,
params=(api_artist.artist_image_url, "artist"),
data=None,
)
if api_artist.artist_image_url
else None,
}
del artist_data["artist_image_url"]
del artist_data["similar_artists"]
artist, created = models.Artist.get_or_create(
@@ -460,7 +493,15 @@ class FilesystemAdapter(CachingAdapter):
"parent": ingest_directory_data(d) if (d := api_song.parent) else None,
"genre": ingest_genre_data(g) if (g := api_song.genre) else None,
"artist": ingest_artist_data(ar) if (ar := api_song.artist) else None,
"_cover_art": self._do_ingest_new_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE,
params=(api_song.cover_art,),
data=None,
)
if api_song.cover_art
else None,
}
del song_data["cover_art"]
if fill_album:
# Don't incurr the overhead of creating an album if we are going to turn
@@ -492,7 +533,14 @@ class FilesystemAdapter(CachingAdapter):
else ()
)
],
"_cover_art": self._do_ingest_new_data(
cover_art_key, (api_playlist.cover_art,), None
)
if api_playlist.cover_art
else None,
}
del playlist_data["cover_art"]
playlist, playlist_created = models.Playlist.get_or_create(
id=playlist_data["id"], defaults=playlist_data
)
@@ -504,6 +552,14 @@ class FilesystemAdapter(CachingAdapter):
return playlist
def compute_file_hash(filename: str) -> str:
file_hash = hashlib.sha1()
with open(filename, "rb") as f:
while chunk := f.read(8192):
file_hash.update(chunk)
return file_hash.hexdigest()
if data_key == CachingAdapter.CachedDataKey.ALBUM:
ingest_album_data(data)
@@ -523,8 +579,19 @@ class FilesystemAdapter(CachingAdapter):
).execute()
elif data_key == CachingAdapter.CachedDataKey.COVER_ART_FILE:
# ``data`` is the filename of the tempfile in this case
shutil.copy(str(data), str(self.cover_art_dir.joinpath(params_hash)))
cache_info.file_id = params[0]
if data is None:
cache_info.save()
return cache_info
file_hash = compute_file_hash(data)
cache_info.file_hash = file_hash
cache_info.save()
# Copy the actual cover art file
shutil.copy(str(data), str(self.cover_art_dir.joinpath(file_hash)))
return cache_info
elif data_key == CachingAdapter.CachedDataKey.GENRES:
for g in data:
@@ -566,10 +633,23 @@ class FilesystemAdapter(CachingAdapter):
ingest_song_data(data)
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE:
relative_path = models.Song.get_by_id(params[0]).path
absolute_path = self.music_dir.joinpath(relative_path)
absolute_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(str(data), str(absolute_path))
cache_info.file_id = params[0]
if data is None:
cache_info.save()
return cache_info
file_hash = compute_file_hash(data)
cache_info.file_hash = file_hash
cache_info.save()
# Copy the actual cover art file
shutil.copy(str(data), str(self.music_dir.joinpath(file_hash)))
song = models.Song.get_by_id(params[0])
song.file = cache_info
song.save()
return cache_info
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE_PERMANENT:
raise NotImplementedError()
@@ -577,9 +657,13 @@ class FilesystemAdapter(CachingAdapter):
def _do_invalidate_data(
self, data_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...],
):
models.CacheInfo.delete().where(
params_hash = util.params_hash(*params)
logging.debug(
f"_do_invalidate_data params={params} params_hash={params_hash} data_key={data_key}" # noqa: 502
)
models.CacheInfo.update({"valid": False}).where(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == util.params_hash(*params),
models.CacheInfo.params_hash == params_hash,
).execute()
cover_art_cache_key = CachingAdapter.CachedDataKey.COVER_ART_FILE
@@ -608,33 +692,49 @@ class FilesystemAdapter(CachingAdapter):
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE:
# Invalidate the corresponding cover art.
if song := models.Song.get_or_none(models.Song.id == params[0]):
self._do_invalidate_data(cover_art_cache_key, (song.cover_art,))
self._do_invalidate_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (song.cover_art,)
)
def _do_delete_data(
self, data_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...],
):
# Invalidate it.
self._do_invalidate_data(data_key, params)
cover_art_cache_key = CachingAdapter.CachedDataKey.COVER_ART_FILE
params_hash = util.params_hash(*params)
logging.debug(
f"_do_delete_data params={params} params_hash={params_hash} data_key={data_key}" # noqa: 502
)
if data_key == CachingAdapter.CachedDataKey.COVER_ART_FILE:
cover_art_file = self.cover_art_dir.joinpath(util.params_hash(*params))
cover_art_file.unlink(missing_ok=True)
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == params_hash,
)
if cache_info:
cover_art_file = self.cover_art_dir.joinpath(str(cache_info.file_hash))
cover_art_file.unlink(missing_ok=True)
cache_info.delete()
elif data_key == CachingAdapter.CachedDataKey.PLAYLIST_DETAILS:
# Delete the playlist and corresponding cover art.
if playlist := models.Playlist.get_or_none(models.Playlist.id == params[0]):
if cover_art := playlist.cover_art:
self._do_delete_data(cover_art_cache_key, (cover_art,))
self._do_delete_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE, (cover_art,),
)
playlist.delete_instance()
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE:
if song := models.Song.get_or_none(models.Song.id == params[0]):
# Delete the song
music_filename = self.music_dir.joinpath(song.path)
music_filename.unlink(missing_ok=True)
cache_info = models.CacheInfo.get_or_none(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == params_hash,
)
if cache_info:
cover_art_file = self.music_dir.joinpath(str(cache_info.file_hash))
cover_art_file.unlink(missing_ok=True)
cache_info.delete()
# Delete the corresponding cover art.
if cover_art := song.cover_art:
self._do_delete_data(cover_art_cache_key, (cover_art,))
models.CacheInfo.delete().where(
models.CacheInfo.cache_key == data_key,
models.CacheInfo.params_hash == params_hash,
).execute()

View File

@@ -1,6 +1,8 @@
from typing import Optional
from peewee import (
AutoField,
BooleanField,
CompositeKey,
ForeignKeyField,
IntegerField,
Model,
@@ -26,9 +28,19 @@ class BaseModel(Model):
database = database
# class CachedFile(BaseModel):
# id = TextField(unique=True, primary_key=True)
# filename = TextField(null=True)
class CacheInfo(BaseModel):
id = AutoField()
valid = BooleanField(default=True)
cache_key = CacheConstantsField()
params_hash = TextField()
last_ingestion_time = TzDateTimeField(null=False)
file_id = TextField(null=True)
file_hash = TextField(null=True)
# TODO some sort of expiry?
class Meta:
indexes = ((("cache_key", "params_hash"), True),)
class Genre(BaseModel):
@@ -41,12 +53,20 @@ class Artist(BaseModel):
id = TextField(unique=True, primary_key=True)
name = TextField(null=True)
album_count = IntegerField(null=True)
artist_image_url = TextField(null=True)
starred = TzDateTimeField(null=True)
biography = TextField(null=True)
music_brainz_id = TextField(null=True)
last_fm_url = TextField(null=True)
_artist_image_url = ForeignKeyField(CacheInfo, null=True)
@property
def artist_image_url(self) -> Optional[str]:
try:
return self._artist_image_url.file_id
except Exception:
return None
@property
def similar_artists(self) -> Query:
return SimilarArtist.select().where(SimilarArtist.artist == self.id)
@@ -64,7 +84,6 @@ class SimilarArtist(BaseModel):
class Album(BaseModel):
id = TextField(unique=True, primary_key=True)
cover_art = TextField(null=True)
created = TzDateTimeField(null=True)
duration = DurationField(null=True)
name = TextField(null=True)
@@ -76,6 +95,15 @@ class Album(BaseModel):
artist = ForeignKeyField(Artist, null=True, backref="albums")
genre = ForeignKeyField(Genre, null=True, backref="albums")
_cover_art = ForeignKeyField(CacheInfo, null=True)
@property
def cover_art(self) -> Optional[str]:
try:
return self._cover_art.file_id
except Exception:
return None
class IgnoredArticle(BaseModel):
name = TextField(unique=True, primary_key=True)
@@ -91,19 +119,32 @@ class Song(BaseModel):
id = TextField(unique=True, primary_key=True)
title = TextField()
duration = DurationField()
path = TextField()
album = ForeignKeyField(Album, null=True, backref="songs")
artist = ForeignKeyField(Artist, null=True, backref="songs")
parent = ForeignKeyField(Directory, null=True, backref="songs")
genre = ForeignKeyField(Genre, null=True, backref="songs")
# figure out how to deal with different transcodings, etc.
file = ForeignKeyField(CacheInfo, null=True)
_cover_art = ForeignKeyField(CacheInfo, null=True)
@property
def cover_art(self) -> Optional[str]:
try:
return self._cover_art.file_id
except Exception:
return None
track = IntegerField(null=True)
year = IntegerField(null=True)
cover_art = TextField(null=True) # TODO: fk?
path = TextField()
play_count = TextField(null=True)
created = TzDateTimeField(null=True)
starred = TzDateTimeField(null=True)
# TODO do I need any of these?
# size: Optional[int] = None
# content_type: Optional[str] = None
# suffix: Optional[str] = None
@@ -120,15 +161,9 @@ class Song(BaseModel):
# original_height: Optional[int] = None
class CacheInfo(BaseModel):
cache_key = CacheConstantsField()
params_hash = TextField()
last_ingestion_time = TzDateTimeField(null=False)
# TODO some sort of expiry?
class Meta:
primary_key = CompositeKey("cache_key", "params_hash")
class DirectoryXChildren(BaseModel):
directory_id = TextField()
order = IntegerField()
class Playlist(BaseModel):
@@ -141,12 +176,18 @@ class Playlist(BaseModel):
created = TzDateTimeField(null=True)
changed = TzDateTimeField(null=True)
public = BooleanField(null=True)
cover_art = TextField(null=True) # TODO: fk
# cover_art_file = ForeignKeyField(CachedFile, null=True)
songs = SortedManyToManyField(Song, backref="playlists")
_cover_art = ForeignKeyField(CacheInfo, null=True)
@property
def cover_art(self) -> Optional[str]:
try:
return self._cover_art.file_id
except Exception:
return None
class Version(BaseModel):
id = IntegerField(unique=True, primary_key=True)
@@ -177,6 +218,7 @@ ALL_TABLES = (
Artist,
CacheInfo,
Directory,
DirectoryXChildren,
Genre,
IgnoredArticle,
Playlist,

View File

@@ -52,26 +52,28 @@ class SortedManyToManyQuery(ManyToManyQuery):
accessor = self._accessor
src_id = getattr(self._instance, self._src_attr)
if isinstance(value, SelectQuery):
raise NotImplementedError("Can't use a select query here")
# query = value.columns(Value(src_id), accessor.dest_fk.rel_field)
# accessor.through_model.insert_from(
# fields=[accessor.src_fk, accessor.dest_fk],
# query=query).execute()
else:
value = ensure_tuple(value)
if not value:
return
assert not isinstance(value, SelectQuery)
# TODO DEAD CODE
# if isinstance(value, SelectQuery):
# raise NotImplementedError("Can't use a select query here")
# # query = value.columns(Value(src_id), accessor.dest_fk.rel_field)
# # accessor.through_model.insert_from(
# # fields=[accessor.src_fk, accessor.dest_fk],
# # query=query).execute()
# else:
value = ensure_tuple(value)
if not value:
return
inserts = [
{
accessor.src_fk.name: src_id,
accessor.dest_fk.name: rel_id,
"position": i,
}
for i, rel_id in enumerate(self._id_list(value))
]
accessor.through_model.insert_many(inserts).execute()
inserts = [
{
accessor.src_fk.name: src_id,
accessor.dest_fk.name: rel_id,
"position": i,
}
for i, rel_id in enumerate(self._id_list(value))
]
accessor.through_model.insert_many(inserts).execute()
# TODO probably don't need
# def remove(self, value: Any) -> Any:

View File

@@ -4,6 +4,7 @@ import threading
from concurrent.futures import Future, ThreadPoolExecutor
from dataclasses import dataclass
from datetime import timedelta
from functools import partial
from pathlib import Path
from time import sleep
from typing import (
@@ -35,6 +36,7 @@ from .adapter_base import (
from .api_objects import (
Album,
Artist,
Directory,
Genre,
Playlist,
PlaylistDetails,
@@ -114,8 +116,9 @@ class Result(Generic[T]):
assert 0, "AdapterManager.Result had neither _data nor _future member!"
except Exception as e:
if self._default_value:
self._data = self._default_value
raise e
return self._default_value
else:
raise e
def add_done_callback(self, fn: Callable, *args):
"""Attaches the callable ``fn`` to the future."""
@@ -160,6 +163,7 @@ class AdapterManager:
def __post_init__(self):
self._download_dir = tempfile.TemporaryDirectory()
self.download_path = Path(self._download_dir.name)
# TODO can we use the threadpool executor max workersfor this
self.download_limiter_semaphore = threading.Semaphore(
self.concurrent_download_limit
)
@@ -522,6 +526,10 @@ class AdapterManager:
def can_get_artist() -> bool:
return AdapterManager._any_adapter_can_do("get_artist")
@staticmethod
def can_get_directory() -> bool:
return AdapterManager._any_adapter_can_do("get_directory")
@staticmethod
def can_get_play_queue() -> bool:
return AdapterManager._ground_truth_can_do("get_play_queue")
@@ -850,8 +858,9 @@ class AdapterManager:
return
for song_id in song_ids:
song = AdapterManager.get_song_details(song_id).result()
AdapterManager._instance.caching_adapter.delete_data(
CachingAdapter.CachedDataKey.SONG_FILE, (song_id,)
CachingAdapter.CachedDataKey.SONG_FILE, (song.id,)
)
on_song_delete(song_id)
@@ -895,28 +904,33 @@ class AdapterManager:
before_download=before_download,
cache_key=CachingAdapter.CachedDataKey.ARTISTS,
).result()
ignored_articles: Set[str] = set()
if AdapterManager._any_adapter_can_do("get_ignored_articles"):
try:
ignored_articles = AdapterManager._get_from_cache_or_ground_truth(
"get_ignored_articles",
use_ground_truth_adapter=force,
cache_key=CachingAdapter.CachedDataKey.IGNORED_ARTICLES,
).result()
except Exception:
logging.exception("Failed to retrieve ignored_articles")
def strip_ignored_articles(artist: Artist) -> str:
name_parts = artist.name.split()
if name_parts[0] in ignored_articles:
name_parts = name_parts[1:]
return " ".join(name_parts)
return sorted(artists, key=strip_ignored_articles)
return sorted(
artists, key=partial(AdapterManager._strip_ignored_articles, force)
)
return Result(do_get_artists)
@staticmethod
def _get_ignored_articles(force: bool) -> Set[str]:
if not AdapterManager._any_adapter_can_do("get_ignored_articles"):
return set()
try:
return AdapterManager._get_from_cache_or_ground_truth(
"get_ignored_articles",
use_ground_truth_adapter=force,
cache_key=CachingAdapter.CachedDataKey.IGNORED_ARTICLES,
).result()
except Exception:
logging.exception("Failed to retrieve ignored_articles")
return set()
@staticmethod
def _strip_ignored_articles(force: bool, artist: Artist) -> str:
first_word, rest = (name := artist.name).split(maxsplit=1)
if first_word in AdapterManager._get_ignored_articles(force):
return rest
return name
@staticmethod
def get_artist(
artist_id: str,
@@ -973,6 +987,21 @@ class AdapterManager:
cache_key=CachingAdapter.CachedDataKey.ALBUM,
)
# Browse
@staticmethod
def get_directory(
directory_id: str,
before_download: Callable[[], None] = lambda: None,
force: bool = False,
) -> Result[Directory]:
return AdapterManager._get_from_cache_or_ground_truth(
"get_directory",
directory_id,
before_download=before_download,
use_ground_truth_adapter=force,
cache_key=CachingAdapter.CachedDataKey.DIRECTORY,
)
@staticmethod
def get_play_queue() -> Result[Optional[PlayQueue]]:
assert AdapterManager._instance

View File

@@ -5,6 +5,7 @@ import multiprocessing
import os
import pickle
import random
from dataclasses import asdict
from datetime import datetime, timedelta
from pathlib import Path
from time import sleep
@@ -24,7 +25,7 @@ from urllib.parse import urlencode, urlparse
import requests
from .api_objects import Response
from .api_objects import Directory, Response, Song
from .. import Adapter, AlbumSearchQuery, api_objects as API, ConfigParamDescriptor
@@ -61,6 +62,7 @@ class SubsonicAdapter(Adapter):
self.hostname = config["server_address"]
self.username = config["username"]
self.password = config["password"]
# TODO: SSID stuff
self.disable_cert_verify = config.get("disable_cert_verify")
self.is_shutting_down = False
@@ -121,6 +123,7 @@ class SubsonicAdapter(Adapter):
can_get_ignored_articles = True
can_get_albums = True
can_get_album = True
can_get_directory = True
can_get_genres = True
can_get_play_queue = True
can_save_play_queue = True
@@ -184,7 +187,7 @@ class SubsonicAdapter(Adapter):
)
logging.info(
"SUBSONIC_ADAPTER_DEBUG_DELAY enabled. Pausing for {delay} seconds"
f"SUBSONIC_ADAPTER_DEBUG_DELAY enabled. Pausing for {delay} seconds"
)
sleep(delay)
@@ -235,7 +238,7 @@ class SubsonicAdapter(Adapter):
)
raise Exception(f"Subsonic API Error #{code}: {message}")
logging.debug(f"Response from {url}", subsonic_response)
logging.debug(f"Response from {url}: {subsonic_response}")
return Response.from_dict(subsonic_response)
# Helper Methods for Testing
@@ -422,6 +425,28 @@ class SubsonicAdapter(Adapter):
assert album, f"Error getting album {album_id}"
return album
def _get_indexes(self) -> API.Directory:
indexes = self._get_json(self._make_url("getIndexes")).indexes
assert indexes, "Error getting indexes"
with open(self.ignored_articles_cache_file, "wb+") as f:
pickle.dump(indexes.ignored_articles, f)
root_dir_items: List[Union[Dict[str, Any], Directory, Song]] = []
for index in indexes.index:
# TODO figure out a more efficient way of doing this.
root_dir_items += index.artist
return Directory(id="root", _children=root_dir_items, _is_root=True)
def get_directory(self, directory_id: str) -> API.Directory:
if directory_id == "root":
return self._get_indexes()
directory = self._get_json(
self._make_url("getMusicDirectory"), id=directory_id
).directory
assert directory, f"Error getting directory {directory_id}"
return directory
def get_genres(self) -> Sequence[API.Genre]:
if genres := self._get_json(self._make_url("getGenres")).genres:
return genres.genre

View File

@@ -4,7 +4,7 @@ These are the API objects that are returned by Subsonic.
from dataclasses import asdict, dataclass, field
from datetime import datetime, timedelta
from typing import List, Optional
from typing import Any, Dict, List, Optional, Union
import dataclasses_json
from dataclasses_json import (
@@ -122,12 +122,33 @@ class ArtistInfo:
self.artist_image_url = ""
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Directory(SublimeAPI.Directory):
class Directory(DataClassJsonMixin, SublimeAPI.Directory):
id: str
title: Optional[str] = None
parent: Optional["Directory"] = None
title: Optional[str] = field(default=None, metadata=config(field_name="name"))
parent: Optional["Directory"] = field(init=False)
_parent: Optional[str] = field(default=None, metadata=config(field_name="parent"))
_is_root: bool = False
children: List[Union["Directory", "Song"]] = field(default_factory=list, init=False)
_children: List[Union[Dict[str, Any], "Directory", "Song"]] = field(
default_factory=list, metadata=config(field_name="child")
)
def __post_init__(self):
self.parent = (
Directory(self._parent or "root", _is_root=(self._parent is None))
if not self._is_root
else None
)
self.children = (
self._children
if self._is_root
else [
Directory.from_dict(c) if c.get("isDir") else Song.from_dict(c)
for c in self._children
]
)
@dataclass_json(letter_case=LetterCase.CAMEL)
@@ -245,6 +266,13 @@ class PlayQueue(SublimeAPI.PlayQueue):
self.current_index = [int(s.id) for s in self.songs].index(cur)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Index:
name: str
artist: List[Directory] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class IndexID3:
@@ -271,6 +299,13 @@ class Genres:
genre: List[Genre] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Indexes:
ignored_articles: str
index: List[Index] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Playlists:
@@ -300,12 +335,19 @@ class Response(DataClassJsonMixin):
)
album: Optional[Album] = None
directory: Optional[Directory] = None
genres: Optional[Genres] = None
indexes: Optional[Indexes] = None
playlist: Optional[PlaylistWithSongs] = None
playlists: Optional[Playlists] = None
play_queue: Optional[PlayQueue] = field(
default=None, metadata=config(field_name="playQueue")
)
song: Optional[Song] = None
search_result: Optional[SearchResult3] = field(

View File

@@ -1075,24 +1075,29 @@ class SublimeMusicApp(Gtk.Application):
# Download current song and prefetch songs. Only do this if
# download_on_stream is True and always_stream is off.
def on_song_download_complete(song_id: str):
if (
order_token != self.song_playing_order_token
or not self.app_config.state.playing
or not self.app_config.state.current_song
or self.app_config.state.current_song.id != song_id
):
if order_token != self.song_playing_order_token:
return
# Switch to the local media if the player can hotswap without lag.
# For example, MPV can is barely noticable whereas there's quite a delay
# with Chromecast.
assert self.player
if self.player.can_hotswap_source:
self.player.play_media(
AdapterManager.get_song_filename_or_stream(song)[0],
self.app_config.state.song_progress,
song,
)
# Hotswap to the downloaded song.
if (
# TODO allow hotswap if not playing. This requires being able to
# replace the currently playing URI with something different.
self.app_config.state.playing
and self.app_config.state.current_song
and self.app_config.state.current_song.id == song_id
):
# Switch to the local media if the player can hotswap without lag.
# For example, MPV can is barely noticable whereas there's quite a
# delay with Chromecast.
assert self.player
if self.player.can_hotswap_source:
self.player.play_media(
AdapterManager.get_song_filename_or_stream(song)[0],
self.app_config.state.song_progress,
song,
)
# Always update the window
self.update_window()
if (

View File

@@ -1,6 +1,3 @@
import glob
import hashlib
import itertools
import json
import logging
import os
@@ -10,17 +7,14 @@ import threading
from collections import defaultdict
from concurrent.futures import Future, ThreadPoolExecutor
from datetime import datetime
from enum import Enum, EnumMeta
from functools import lru_cache
from enum import EnumMeta
from pathlib import Path
from time import sleep
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Generic,
Iterable,
List,
Optional,
Set,
@@ -28,9 +22,6 @@ from typing import (
Union,
)
import requests
from fuzzywuzzy import fuzz
try:
import gi
@@ -46,12 +37,10 @@ except Exception:
)
networkmanager_imported = False
from .adapters import AdapterManager, api_objects as API, Result as AdapterResult
from .config import AppConfiguration
from .server import Server
from .server.api_object import APIObject
from .server.api_objects import (
AlbumID3,
AlbumWithSongsID3,
Artist,
ArtistID3,
@@ -82,96 +71,6 @@ class Singleton(type):
return None
class SongCacheStatus(Enum):
NOT_CACHED = 0
CACHED = 1
PERMANENTLY_CACHED = 2
DOWNLOADING = 3
@lru_cache(maxsize=8192)
def similarity_ratio(query: str, string: str) -> int:
"""
Return the :class:`fuzzywuzzy.fuzz.partial_ratio` between the ``query`` and
the given ``string``.
This ends up being called quite a lot, so the result is cached in an LRU
cache using :class:`functools.lru_cache`.
:param query: the query string
:param string: the string to compare to the query string
"""
return fuzz.partial_ratio(query.lower(), string.lower())
class SearchResult:
"""
An object representing the aggregate results of a search which can include
both server and local results.
"""
_artist: Set[API.Artist] = set()
_album: Set[API.Album] = set()
_song: Set[API.Song] = set()
_playlist: Set[API.Playlist] = set()
def __init__(self, query: str):
self.query = query
def add_results(self, result_type: str, results: Iterable):
"""Adds the ``results`` to the ``_result_type`` set."""
if results is None:
return
member = f"_{result_type}"
if getattr(self, member) is None:
setattr(self, member, set())
setattr(self, member, getattr(self, member, set()).union(set(results)))
S = TypeVar("S")
def _to_result(self, it: Iterable[S], transform: Callable[[S], str],) -> List[S]:
all_results = sorted(
((similarity_ratio(self.query, transform(x)), x) for x in it),
key=lambda rx: rx[0],
reverse=True,
)
result: List[SearchResult.S] = []
for ratio, x in all_results:
if ratio > 60 and len(result) < 20:
result.append(x)
else:
# No use going on, all the rest are less.
break
return result
@property
def artist(self) -> Optional[List[API.Artist]]:
if self._artist is None:
return None
return self._to_result(self._artist, lambda a: a.name)
@property
def album(self) -> Optional[List[API.Album]]:
if self._album is None:
return None
return self._to_result(self._album, lambda a: f"{a.name} - {a.artist}")
@property
def song(self) -> Optional[List[API.Song]]:
if self._song is None:
return None
return self._to_result(self._song, lambda s: f"{s.title} - {s.artist}")
@property
def playlist(self) -> Optional[List[API.Playlist]]:
if self._playlist is None:
return None
return self._to_result(self._playlist, lambda p: p.name)
T = TypeVar("T")
@@ -435,114 +334,11 @@ class CacheManager(metaclass=Singleton):
self.app_config.server.strhash(), *relative_paths
)
def calculate_download_path(self, *relative_paths) -> Path:
"""
Determine where to temporarily put the file as it is downloading.
"""
assert self.app_config.server is not None
xdg_cache_home = os.environ.get("XDG_CACHE_HOME") or os.path.expanduser(
"~/.cache"
)
return Path(xdg_cache_home).joinpath(
"sublime-music", self.app_config.server.strhash(), *relative_paths,
)
def return_cached_or_download(
self,
relative_path: Union[Path, str],
download_fn: Callable[[], bytes],
before_download: Callable[[], None] = lambda: None,
force: bool = False,
allow_download: bool = True,
) -> "CacheManager.Result[str]":
abs_path = self.calculate_abs_path(relative_path)
abs_path_str = str(abs_path)
download_path = self.calculate_download_path(relative_path)
if abs_path.exists() and not force:
return CacheManager.Result.from_data(abs_path_str)
if not allow_download:
return CacheManager.Result.from_data("")
def do_download() -> str:
resource_downloading = False
with self.download_set_lock:
if abs_path_str in self.current_downloads:
resource_downloading = True
self.current_downloads.add(abs_path_str)
if resource_downloading:
logging.info(f"{abs_path} already being downloaded.")
# The resource is already being downloaded. Busy loop until
# it has completed. Then, just return the path to the
# resource.
while abs_path_str in self.current_downloads:
sleep(0.2)
else:
logging.info(f"{abs_path} not found. Downloading...")
os.makedirs(download_path.parent, exist_ok=True)
try:
self.save_file(download_path, download_fn())
except requests.exceptions.ConnectionError:
with self.download_set_lock:
self.current_downloads.discard(abs_path_str)
# Move the file to its cache download location.
os.makedirs(abs_path.parent, exist_ok=True)
if download_path.exists():
shutil.move(str(download_path), abs_path)
logging.info(f"{abs_path} downloaded. Returning.")
return abs_path_str
def after_download(path: str):
with self.download_set_lock:
self.current_downloads.discard(path)
return CacheManager.Result.from_server(
do_download,
before_download=before_download,
after_download=after_download,
)
@staticmethod
def create_future(fn: Callable, *args) -> Future:
"""Creates a future on the CacheManager's executor."""
return CacheManager.executor.submit(fn, *args)
def delete_cached_cover_art(self, id: int):
relative_path = f"cover_art/*{id}*"
abs_path = self.calculate_abs_path(relative_path)
for path in glob.glob(str(abs_path)):
Path(path).unlink()
def get_artist(
self,
artist_id: int,
before_download: Callable[[], None] = lambda: None,
force: bool = False,
) -> "CacheManager.Result[ArtistWithAlbumsID3]":
cache_name = "artist_details"
if artist_id in self.cache.get(cache_name, {}) and not force:
return CacheManager.Result.from_data(self.cache[cache_name][artist_id])
def after_download(artist: ArtistWithAlbumsID3):
with self.cache_lock:
self.cache[cache_name][artist_id] = artist
self.save_cache_info()
return CacheManager.Result.from_server(
lambda: self.server.get_artist(artist_id),
before_download=before_download,
after_download=after_download,
)
def get_indexes(
self,
before_download: Callable[[], None] = lambda: None,
@@ -592,239 +388,6 @@ class CacheManager(metaclass=Singleton):
after_download=after_download,
)
def get_artist_info(
self,
artist_id: int,
before_download: Callable[[], None] = lambda: None,
force: bool = False,
) -> "CacheManager.Result[ArtistInfo2]":
cache_name = "artist_infos"
if artist_id in self.cache.get(cache_name, {}) and not force:
return CacheManager.Result.from_data(self.cache[cache_name][artist_id])
def after_download(artist_info: ArtistInfo2):
if not artist_info:
return
with self.cache_lock:
self.cache[cache_name][artist_id] = artist_info
self.save_cache_info()
return CacheManager.Result.from_server(
lambda: (self.server.get_artist_info2(id=artist_id) or ArtistInfo2()),
before_download=before_download,
after_download=after_download,
)
def get_artist_artwork(
self,
artist: Union[Artist, ArtistID3],
before_download: Callable[[], None] = lambda: None,
force: bool = False,
) -> AdapterResult[str]:
def do_get_artist_artwork(artist_info: ArtistInfo2) -> AdapterResult[str]:
lastfm_url = "".join(artist_info.largeImageUrl or [])
is_placeholder = lastfm_url == ""
is_placeholder |= lastfm_url.endswith(
"2a96cbd8b46e442fc41c2b86b821562f.png"
)
is_placeholder |= lastfm_url.endswith(
"1024px-No_image_available.svg.png"
)
# If it is the placeholder LastFM image, try and use the cover
# art filename given by the server.
if is_placeholder:
if isinstance(artist, (ArtistWithAlbumsID3, ArtistID3)):
if artist.coverArt:
return AdapterManager.get_cover_art_filename(
artist.coverArt
)
elif (
isinstance(artist, ArtistWithAlbumsID3)
and artist.album
and len(artist.album) > 0
):
return AdapterManager.get_cover_art_filename(
artist.album[0].coverArt
)
elif isinstance(artist, Directory) and len(artist.child) > 0:
# Retrieve the first album's cover art
return AdapterManager.get_cover_art_filename(
artist.child[0].coverArt
)
if lastfm_url == "":
return CacheManager.Result.from_data("")
url_hash = hashlib.md5(lastfm_url.encode("utf-8")).hexdigest()
return self.return_cached_or_download(
f"cover_art/artist.{url_hash}",
lambda: requests.get(lastfm_url).content,
before_download=before_download,
force=force,
)
def download_fn(artist_info: CacheManager.Result[ArtistInfo2]) -> str:
# In this case, artist_info is a future, so we have to wait for
# its result before calculating. Then, immediately unwrap the
# result() because we are already within a future.
return do_get_artist_artwork(artist_info.result()).result()
artist_info = CacheManager.get_artist_info(artist.id)
if artist_info.is_future:
return CacheManager.Result.from_server(
lambda: download_fn(artist_info), before_download=before_download,
)
else:
return do_get_artist_artwork(artist_info.result())
def get_album_list(
self,
type_: str,
before_download: Callable[[], None] = lambda: None,
force: bool = False,
# Look at documentation for get_album_list in server.py:
**params,
) -> "CacheManager.Result[List[AlbumID3]]":
cache_name = "albums"
if len(self.cache.get(cache_name, {}).get(type_, [])) > 0 and not force:
return CacheManager.Result.from_data(self.cache[cache_name][type_])
def do_get_album_list() -> List[AlbumID3]:
def get_page(offset: int, page_size: int = 500,) -> List[AlbumID3]:
return (
self.server.get_album_list2(
type_, size=page_size, offset=offset, **params,
).album
or []
)
page_size = 40 if type_ == "random" else 500
offset = 0
next_page = get_page(offset, page_size=page_size)
albums = next_page
# If it returns 500 things, then there's more leftover.
while len(next_page) == 500:
next_page = get_page(offset)
albums.extend(next_page)
offset += 500
return albums
def after_download(albums: List[AlbumID3]):
with self.cache_lock:
if not self.cache[cache_name].get(type_):
self.cache[cache_name][type_] = []
self.cache[cache_name][type_] = albums
self.save_cache_info()
return CacheManager.Result.from_server(
do_get_album_list,
before_download=before_download,
after_download=after_download,
)
def get_album(
self,
album_id: int,
before_download: Callable[[], None] = lambda: None,
force: bool = False,
) -> "CacheManager.Result[AlbumWithSongsID3]":
cache_name = "album_details"
if album_id in self.cache.get(cache_name, {}) and not force:
return CacheManager.Result.from_data(self.cache[cache_name][album_id])
def after_download(album: AlbumWithSongsID3):
with self.cache_lock:
self.cache[cache_name][album_id] = album
# Albums have the song details as well, so save those too.
for song in album.get("song", []):
self.cache["song_details"][song.id] = song
self.save_cache_info()
return CacheManager.Result.from_server(
lambda: self.server.get_album(album_id),
before_download=before_download,
after_download=after_download,
)
def search(
self,
query: str,
search_callback: Callable[[SearchResult, bool], None],
before_download: Callable[[], None] = lambda: None,
) -> "CacheManager.Result":
if query == "":
search_callback(SearchResult(""), True)
return CacheManager.Result.from_data(None)
before_download()
# Keep track of if the result is cancelled and if it is, then don't
# do anything with any results.
cancelled = False
# This future actually does the search and calls the
# search_callback when each of the futures completes.
def do_search():
# Sleep for a little while before returning the local results.
# They are less expensive to retrieve (but they still incur
# some overhead due to the GTK UI main loop queue).
sleep(0.2)
if cancelled:
return
# Local Results
search_result = SearchResult(query)
search_result.add_results(
"album", itertools.chain(*self.cache["albums"].values())
)
search_result.add_results("artist", self.cache["artists"])
search_result.add_results("song", self.cache["song_details"].values())
search_result.add_results("playlist", self.cache["playlists"])
search_callback(search_result, False)
# Wait longer to see if the user types anything else so we
# don't peg the server with tons of requests.
sleep(0.2)
if cancelled:
return
# Server Results
search_fn = self.server.search3
try:
# Attempt to add the server search results to the
# SearchResult. If it fails, that's fine, we will use the
# finally to always return a final SearchResult to the UI.
server_result = search_fn(query)
search_result.add_results("album", server_result.album)
search_result.add_results("artist", server_result.artist)
search_result.add_results("song", server_result.song)
except Exception:
# We really don't care about what the exception was (could
# be connection error, could be invalid JSON, etc.) because
# we will always have returned local results.
return
finally:
search_callback(search_result, True)
# When the future is cancelled (this will happen if a new search is
# created).
def on_cancel():
nonlocal cancelled
cancelled = True
return CacheManager.Result.from_server(do_search, on_cancel=on_cancel)
_instance: Optional[__CacheManagerInternal] = None
def __init__(self):
@@ -833,4 +396,3 @@ class CacheManager(metaclass=Singleton):
@staticmethod
def reset(app_config: AppConfiguration):
CacheManager._instance = CacheManager.__CacheManagerInternal(app_config)
similarity_ratio.cache_clear()

View File

@@ -46,7 +46,8 @@ class ServerConfiguration:
self.version = 0
def strhash(self) -> str:
# TODO: needs to change to something better
# TODO: make this configurable by the adapters the combination of the hashes
# will be the hash dir
"""
Returns the MD5 hash of the server's name, server address, and
username. This should be used whenever it's necessary to uniquely

View File

@@ -1,11 +1,10 @@
from typing import Any, List, Optional, Tuple, Type, Union
from functools import partial
from typing import Any, List, Optional, Tuple, Union
from gi.repository import Gdk, Gio, GLib, GObject, Gtk, Pango
from sublime.adapters import AdapterManager
from sublime.cache_manager import CacheManager
from sublime.adapters import AdapterManager, api_objects as API, Result
from sublime.config import AppConfiguration
from sublime.server.api_objects import Artist, Child, Directory
from sublime.ui import util
from sublime.ui.common import IconButton, SongListColumn
@@ -33,7 +32,7 @@ class BrowsePanel(Gtk.Overlay):
super().__init__()
scrolled_window = Gtk.ScrolledWindow()
self.root_directory_listing = ListAndDrilldown(IndexList)
self.root_directory_listing = ListAndDrilldown()
self.root_directory_listing.connect(
"song-clicked", lambda _, *args: self.emit("song-clicked", *args),
)
@@ -53,38 +52,44 @@ class BrowsePanel(Gtk.Overlay):
self.add_overlay(self.spinner)
def update(self, app_config: AppConfiguration, force: bool = False):
if not CacheManager.ready:
if not AdapterManager.can_get_directory():
return
self.update_order_token += 1
def do_update(id_stack: List[int], update_order_token: int):
def do_update(update_order_token: int, id_stack: Result[List[int]]):
if self.update_order_token != update_order_token:
return
# TODO pass order token here?
self.root_directory_listing.update(
id_stack, app_config=app_config, force=force,
id_stack.result(), app_config, force=force,
)
self.spinner.hide()
def calculate_path(update_order_token: int) -> Tuple[List[str], int]:
def calculate_path() -> List[str]:
if app_config.state.selected_browse_element_id is None:
return [], update_order_token
return []
id_stack = []
directory = None
current_dir_id = app_config.state.selected_browse_element_id
while directory is None or directory.parent is not None:
directory = CacheManager.get_music_directory(
current_dir_id: Optional[str] = app_config.state.selected_browse_element_id
while current_dir_id and (
directory := AdapterManager.get_directory(
current_dir_id, before_download=self.spinner.show,
).result()
):
id_stack.append(directory.id)
current_dir_id = directory.parent # Detect loops?
if directory.id == "root":
break
# Detect loops?
current_dir_id = directory.parent.id if directory.parent else None
return id_stack, update_order_token
return id_stack
path_fut = CacheManager.create_future(calculate_path, self.update_order_token)
path_fut.add_done_callback(lambda f: GLib.idle_add(do_update, *f.result()))
path_result: Result[List[str]] = Result(calculate_path)
path_result.add_done_callback(
partial(GLib.idle_add, partial(do_update, self.update_order_token))
)
class ListAndDrilldown(Gtk.Paned):
@@ -103,10 +108,10 @@ class ListAndDrilldown(Gtk.Paned):
id_stack = None
def __init__(self, list_type: Type):
def __init__(self):
Gtk.Paned.__init__(self, orientation=Gtk.Orientation.HORIZONTAL)
self.list = list_type()
self.list = MusicDirectoryList()
self.list.connect(
"song-clicked", lambda _, *args: self.emit("song-clicked", *args),
)
@@ -119,46 +124,45 @@ class ListAndDrilldown(Gtk.Paned):
self.pack2(self.drilldown, True, False)
def update(
self,
id_stack: List[int],
app_config: AppConfiguration,
force: bool = False,
directory_id: int = None,
self, id_stack: List[str], app_config: AppConfiguration, force: bool = False
):
dir_id = id_stack[-1]
selected_id = (
id_stack[-2]
if len(id_stack) > 2
else app_config.state.selected_browse_element_id
)
self.list.update(
None if len(id_stack) == 0 else id_stack[-1],
app_config,
directory_id=dir_id,
selected_id=selected_id,
app_config=app_config,
force=force,
directory_id=directory_id,
)
if self.id_stack == id_stack:
# We always want to update, but in this case, we don't want to blow
# away the drilldown.
if isinstance(self.drilldown, ListAndDrilldown):
self.drilldown.update(
id_stack[:-1], app_config, force=force, directory_id=id_stack[-1],
)
self.drilldown.update(id_stack[:-1], app_config, force=force)
return
self.id_stack = id_stack
if len(id_stack) > 0:
if len(id_stack) > 1:
self.remove(self.drilldown)
self.drilldown = ListAndDrilldown(MusicDirectoryList)
self.drilldown = ListAndDrilldown()
self.drilldown.connect(
"song-clicked", lambda _, *args: self.emit("song-clicked", *args),
)
self.drilldown.connect(
"refresh-window", lambda _, *args: self.emit("refresh-window", *args),
)
self.drilldown.update(
id_stack[:-1], app_config, force=force, directory_id=id_stack[-1],
)
self.drilldown.update(id_stack[:-1], app_config, force=force)
self.drilldown.show_all()
self.pack2(self.drilldown, True, False)
class DrilldownList(Gtk.Box):
class MusicDirectoryList(Gtk.Box):
__gsignals__ = {
"song-clicked": (
GObject.SignalFlags.RUN_FIRST,
@@ -172,16 +176,20 @@ class DrilldownList(Gtk.Box):
),
}
update_order_token = 0
directory_id: Optional[str] = None
selected_id: Optional[str] = None
class DrilldownElement(GObject.GObject):
id = GObject.Property(type=str)
name = GObject.Property(type=str)
is_dir = GObject.Property(type=bool, default=True)
def __init__(self, element: Union[Child, Artist]):
def __init__(self, element: Union[API.Directory, API.Song]):
GObject.GObject.__init__(self)
self.id = element.id
self.name = element.name if isinstance(element, Artist) else element.title
self.is_dir = element.get("isDir", True)
self.is_dir = isinstance(element, API.Directory)
self.name = element.title
def __init__(self):
Gtk.Box.__init__(self, orientation=Gtk.Orientation.VERTICAL)
@@ -215,7 +223,7 @@ class DrilldownList(Gtk.Box):
self.directory_song_list = Gtk.TreeView(
model=self.directory_song_store,
name="album-songs-list",
name="directory-songs-list",
headers_visible=False,
)
self.directory_song_list.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
@@ -241,6 +249,110 @@ class DrilldownList(Gtk.Box):
self.scroll_window.add(scrollbox)
self.pack_start(self.scroll_window, True, True, 0)
def update(
self,
app_config: AppConfiguration = None,
force: bool = False,
directory_id: str = None,
selected_id: str = None,
):
self.directory_id = directory_id or self.directory_id
self.selected_id = selected_id or self.selected_id
self.update_store(
self.directory_id, force=force, order_token=self.update_order_token,
)
@util.async_callback(
AdapterManager.get_directory,
before_download=lambda self: self.loading_indicator.show(),
on_failure=lambda self, e: self.loading_indicator.hide(),
)
def update_store(
self,
directory: API.Directory,
app_config: AppConfiguration = None,
force: bool = False,
order_token: int = None,
):
if order_token != self.update_order_token:
return
new_directories_store = []
new_songs_store = []
selected_dir_idx = None
for idx, el in enumerate(directory.children):
if isinstance(el, API.Directory):
new_directories_store.append(MusicDirectoryList.DrilldownElement(el))
if el.id == self.selected_id:
selected_dir_idx = idx
else:
new_songs_store.append(
[
util.get_cached_status_icon(
AdapterManager.get_cached_status(el)
),
util.esc(el.title),
util.format_song_duration(el.duration),
el.id,
]
)
util.diff_model_store(self.drilldown_directories_store, new_directories_store)
util.diff_song_store(self.directory_song_store, new_songs_store)
if len(new_directories_store) == 0:
self.list.hide()
else:
self.list.show()
if len(new_songs_store) == 0:
self.directory_song_list.hide()
self.scroll_window.set_min_content_width(275)
else:
self.directory_song_list.show()
self.scroll_window.set_min_content_width(350)
# Preserve selection
if selected_dir_idx is not None:
row = self.list.get_row_at_index(selected_dir_idx)
self.list.select_row(row)
self.loading_indicator.hide()
def on_download_state_change(self, _):
self.update()
# Create Element Helper Functions
# ==================================================================================
def create_row(self, model: DrilldownElement) -> Gtk.ListBoxRow:
row = Gtk.ListBoxRow(
action_name="app.browse-to", action_target=GLib.Variant("s", model.id),
)
rowbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
rowbox.add(
Gtk.Label(
label=f"<b>{util.esc(model.name)}</b>",
use_markup=True,
margin=8,
halign=Gtk.Align.START,
ellipsize=Pango.EllipsizeMode.END,
)
)
icon = Gio.ThemedIcon(name="go-next-symbolic")
image = Gtk.Image.new_from_gicon(icon, Gtk.IconSize.BUTTON)
rowbox.pack_end(image, False, False, 5)
row.add(rowbox)
row.show_all()
return row
# Event Handlers
# ==================================================================================
def on_refresh_clicked(self, _: Any):
self.update(force=True)
def on_song_activated(self, treeview: Any, idx: Gtk.TreePath, column: Any):
# The song ID is in the last column of the model.
self.emit(
@@ -284,153 +396,3 @@ class DrilldownList(Gtk.Box):
return True
return False
def do_update_store(self, elements: Optional[List[Any]]):
new_directories_store = []
new_songs_store = []
selected_dir_idx = None
for idx, el in enumerate(elements or []):
if el.get("isDir", True):
new_directories_store.append(DrilldownList.DrilldownElement(el))
if el.id == self.selected_id:
selected_dir_idx = idx
else:
new_songs_store.append(
[
util.get_cached_status_icon(
AdapterManager.get_cached_status(el)
),
util.esc(el.title),
util.format_song_duration(el.duration),
el.id,
]
)
util.diff_model_store(self.drilldown_directories_store, new_directories_store)
util.diff_song_store(self.directory_song_store, new_songs_store)
if len(new_directories_store) == 0:
self.list.hide()
else:
self.list.show()
if len(new_songs_store) == 0:
self.directory_song_list.hide()
self.scroll_window.set_min_content_width(275)
else:
self.directory_song_list.show()
self.scroll_window.set_min_content_width(350)
# Preserve selection
if selected_dir_idx is not None:
row = self.list.get_row_at_index(selected_dir_idx)
self.list.select_row(row)
self.loading_indicator.hide()
def create_row(self, model: "DrilldownList.DrilldownElement") -> Gtk.ListBoxRow:
row = Gtk.ListBoxRow(
action_name="app.browse-to", action_target=GLib.Variant("s", model.id),
)
rowbox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
rowbox.add(
Gtk.Label(
label=f"<b>{util.esc(model.name)}</b>",
use_markup=True,
margin=8,
halign=Gtk.Align.START,
ellipsize=Pango.EllipsizeMode.END,
)
)
icon = Gio.ThemedIcon(name="go-next-symbolic")
image = Gtk.Image.new_from_gicon(icon, Gtk.IconSize.BUTTON)
rowbox.pack_end(image, False, False, 5)
row.add(rowbox)
row.show_all()
return row
class IndexList(DrilldownList):
update_order_token = 0
def update(
self,
selected_id: int,
app_config: AppConfiguration = None,
force: bool = False,
**kwargs,
):
self.update_order_token += 1
self.selected_id = selected_id
self.update_store(
force=force, app_config=app_config, order_token=self.update_order_token,
)
def on_refresh_clicked(self, _: Any):
self.update(self.selected_id, force=True)
@util.async_callback(
lambda *a, **k: CacheManager.get_indexes(*a, **k),
before_download=lambda self: self.loading_indicator.show(),
on_failure=lambda self, e: self.loading_indicator.hide(),
)
def update_store(
self,
artists: List[Artist],
app_config: AppConfiguration = None,
force: bool = False,
order_token: int = None,
):
if order_token != self.update_order_token:
return
self.do_update_store(artists)
def on_download_state_change(self):
self.update(self.selected_id)
class MusicDirectoryList(DrilldownList):
update_order_token = 0
def update(
self,
selected_id: int,
app_config: AppConfiguration = None,
force: bool = False,
directory_id: int = None,
):
self.directory_id = directory_id
self.selected_id = selected_id
self.update_store(
directory_id,
force=force,
app_config=app_config,
order_token=self.update_order_token,
)
def on_refresh_clicked(self, _: Any):
self.update(self.selected_id, force=True, directory_id=self.directory_id)
@util.async_callback(
lambda *a, **k: CacheManager.get_music_directory(*a, **k),
before_download=lambda self: self.loading_indicator.show(),
on_failure=lambda self, e: self.loading_indicator.hide(),
)
def update_store(
self,
directory: Directory,
app_config: AppConfiguration = None,
force: bool = False,
order_token: int = None,
):
if order_token != self.update_order_token:
return
self.do_update_store(directory.child)
def on_download_state_change(self):
self.update(self.selected_id, directory_id=self.directory_id)

View File

@@ -3,7 +3,7 @@ from typing import Any, Optional, Set
from gi.repository import Gdk, Gio, GLib, GObject, Gtk, Pango
from sublime.adapters import AdapterManager, Result, api_objects as API
from sublime.adapters import AdapterManager, api_objects as API, Result
from sublime.config import AppConfiguration
from sublime.ui import albums, artists, browse, player_controls, playlists, util
from sublime.ui.common import SpinnerImage

View File

@@ -3,8 +3,8 @@ from datetime import timedelta
from enum import Enum
from typing import Dict, Optional, Tuple
from sublime.adapters.api_objects import Song
from sublime.adapters import AlbumSearchQuery
from sublime.adapters.api_objects import Genre, Song
class RepeatType(Enum):
@@ -52,9 +52,13 @@ class UIState:
selected_browse_element_id: Optional[str] = None
selected_playlist_id: Optional[str] = None
class _DefaultGenre(Genre):
def __init__(self):
self.name = "Rock"
# State for Album sort.
current_album_search_query: AlbumSearchQuery = AlbumSearchQuery(
AlbumSearchQuery.Type.RANDOM, genre=None, year_range=(2010, 2020),
AlbumSearchQuery.Type.RANDOM, genre=_DefaultGenre(), year_range=(2010, 2020),
)
active_playlist_id: Optional[str] = None

View File

@@ -1,6 +1,26 @@
from pathlib import Path
from time import sleep
from sublime.adapters import Result
import pytest
from sublime.adapters import AdapterManager, Result
from sublime.config import AppConfiguration, ServerConfiguration
@pytest.fixture
def adapter_manager(tmp_path: Path):
config = AppConfiguration(
servers=[
ServerConfiguration(
name="foo", server_address="bar", username="baz", password="ohea",
)
],
current_server_index=0,
cache_location=tmp_path.as_posix(),
)
AdapterManager.reset(config)
yield
AdapterManager.shutdown()
def test_result_immediate():
@@ -24,7 +44,7 @@ def test_result_immediate_callback():
def test_result_future():
def resolve_later() -> int:
sleep(1)
sleep(0.1)
return 42
result = Result(resolve_later)
@@ -35,7 +55,7 @@ def test_result_future():
def test_result_future_callback():
def resolve_later() -> int:
sleep(1)
sleep(0.1)
return 42
check_done = False
@@ -49,21 +69,48 @@ def test_result_future_callback():
result = Result(resolve_later)
result.add_done_callback(check_done_callback)
# Should take much less than 2 seconds to complete. If the assertion fails, then the
# Should take much less than 1 seconds to complete. If the assertion fails, then the
# check_done_callback failed.
t = 0
while not check_done:
assert t < 2
assert t < 1
t += 0.1
sleep(0.1)
def test_default_value():
def resolve_fail() -> int:
sleep(1)
sleep(0.1)
raise Exception()
result = Result(resolve_fail, default_value=42)
assert not result.data_is_available
assert result.result() == 42
assert result.data_is_available
def test_cancel():
def resolve_later() -> int:
sleep(0.1)
return 42
cancel_called = False
def on_cancel():
nonlocal cancel_called
cancel_called = True
result = Result(resolve_later, on_cancel=on_cancel)
result.cancel()
assert cancel_called
assert not result.data_is_available
with pytest.raises(Exception):
result.result()
def test_get_song_details(adapter_manager: AdapterManager):
# song = AdapterManager.get_song_details("1")
# print(song)
# assert 0
# TODO
pass

View File

@@ -8,14 +8,21 @@ import pytest
from peewee import SelectQuery
from sublime import util
from sublime.adapters import api_objects as SublimeAPI, CacheMissError
from sublime.adapters.filesystem import FilesystemAdapter
from sublime.adapters.subsonic import api_objects as SubsonicAPI
MOCK_DATA_FILES = Path(__file__).parent.joinpath("mock_data")
MOCK_ALBUM_ART = MOCK_DATA_FILES.joinpath("album-art.png")
MOCK_ALBUM_ART2 = MOCK_DATA_FILES.joinpath("album-art2.png")
MOCK_ALBUM_ART3 = MOCK_DATA_FILES.joinpath("album-art3.png")
MOCK_SONG_FILE = MOCK_DATA_FILES.joinpath("test-song.mp3")
MOCK_SONG_FILE2 = MOCK_DATA_FILES.joinpath("test-song2.mp3")
MOCK_ALBUM_ART_HASH = "5d7bee4f3fe25b18cd2a66f1c9767e381bc64328"
MOCK_ALBUM_ART2_HASH = "031a8a1ca01f64f851a22d5478e693825a00fb23"
MOCK_ALBUM_ART3_HASH = "46a8af0f8fe370e59202a545803e8bbb3a4a41ee"
MOCK_SONG_FILE_HASH = "fe12d0712dbfd6ff7f75ef3783856a7122a78b0a"
MOCK_SONG_FILE2_HASH = "c32597c724e2e484dbf5856930b2e5bb80de13b7"
MOCK_SUBSONIC_SONGS = [
SubsonicAPI.Song(
@@ -28,7 +35,7 @@ MOCK_SUBSONIC_SONGS = [
artist_id="art1",
duration=timedelta(seconds=20.8),
path="foo/song2.mp3",
cover_art="2",
cover_art="s2",
_genre="Bar",
),
SubsonicAPI.Song(
@@ -41,7 +48,7 @@ MOCK_SUBSONIC_SONGS = [
artist_id="art2",
duration=timedelta(seconds=10.2),
path="foo/song1.mp3",
cover_art="1",
cover_art="s1",
_genre="Foo",
),
SubsonicAPI.Song(
@@ -54,7 +61,7 @@ MOCK_SUBSONIC_SONGS = [
artist_id="art2",
duration=timedelta(seconds=10.2),
path="foo/song1.mp3",
cover_art="1",
cover_art="s1",
_genre="Foo",
),
]
@@ -89,21 +96,16 @@ def mock_data_files(
def verify_songs(
actual_songs: Iterable[SublimeAPI.Song], expected_songs: Iterable[SubsonicAPI.Song]
):
actual_songs, expected_songs = (list(actual_songs), list(expected_songs))
assert len(actual_songs) == len(expected_songs)
for actual, song in zip(actual_songs, expected_songs):
for k, v in asdict(song).items():
ignore = (
"_genre",
"_album",
"_artist",
"_parent",
"album_id",
"artist_id",
)
if k in ignore:
if k in ("_genre", "_album", "_artist", "_parent", "album_id", "artist_id"):
continue
print(k) # noqa: T001
print(k, "->", v) # noqa: T001
actual_value = getattr(actual, k, None)
if k == "album":
assert ("a1", "foo") == (actual_value.id, actual_value.name)
elif k == "genre":
@@ -292,7 +294,7 @@ def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
SubsonicAPI.PlaylistWithSongs("2", "test2", cover_art="pl_2", songs=[]),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_2",), MOCK_ALBUM_ART,
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_2",), MOCK_ALBUM_ART2,
)
stale_uri_1 = cache_adapter.get_cover_art_uri("pl_test1", "file")
@@ -338,43 +340,31 @@ def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
assert e.partial_data == stale_uri_2
def test_invalidate_song_data(cache_adapter: FilesystemAdapter):
def test_invalidate_song_file(cache_adapter: FilesystemAdapter):
CACHE_KEYS = FilesystemAdapter.CachedDataKey
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("2",), MOCK_SUBSONIC_SONGS[0]
CACHE_KEYS.SONG_DETAILS, ("2",), MOCK_SUBSONIC_SONGS[0]
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
CACHE_KEYS.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",), MOCK_SONG_FILE
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_FILE, ("2",), MOCK_SONG_FILE
CACHE_KEYS.COVER_ART_FILE, ("s1", "song"), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(CACHE_KEYS.SONG_FILE, ("1",), MOCK_SONG_FILE)
cache_adapter.ingest_new_data(CACHE_KEYS.SONG_FILE, ("2",), MOCK_SONG_FILE2)
stale_song_file = cache_adapter.get_song_uri("1", "file")
stale_cover_art_file = cache_adapter.get_cover_art_uri("1", "file")
cache_adapter.invalidate_data(FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",))
cache_adapter.invalidate_data(CACHE_KEYS.SONG_FILE, ("1",))
cache_adapter.invalidate_data(CACHE_KEYS.COVER_ART_FILE, ("s1", "song"))
try:
with pytest.raises(CacheMissError):
cache_adapter.get_song_uri("1", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_song_file
try:
cache_adapter.get_cover_art_uri("1", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_cover_art_file
with pytest.raises(CacheMissError):
cache_adapter.get_cover_art_uri("s1", "file")
# Make sure it didn't delete the other ones.
assert cache_adapter.get_song_uri("2", "file").endswith("song2.mp3")
# Make sure it didn't delete the other song.
assert cache_adapter.get_song_uri("2", "file").endswith(MOCK_SONG_FILE2_HASH)
def test_delete_playlists(cache_adapter: FilesystemAdapter):
@@ -410,11 +400,13 @@ def test_delete_playlists(cache_adapter: FilesystemAdapter):
# Even if the cover art failed to be deleted, it should cache miss.
shutil.copy(
MOCK_ALBUM_ART,
str(cache_adapter.cover_art_dir.joinpath(util.params_hash("pl_1"))),
MOCK_ALBUM_ART, str(cache_adapter.cover_art_dir.joinpath(MOCK_ALBUM_ART_HASH)),
)
with pytest.raises(CacheMissError):
try:
cache_adapter.get_cover_art_uri("pl_1", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data is None
def test_delete_song_data(cache_adapter: FilesystemAdapter):
@@ -422,16 +414,17 @@ def test_delete_song_data(cache_adapter: FilesystemAdapter):
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",), MOCK_SONG_FILE
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",), MOCK_SONG_FILE
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("s1",), MOCK_ALBUM_ART,
)
music_file_path = cache_adapter.get_song_uri("1", "file")
cover_art_path = cache_adapter.get_cover_art_uri("1", "file")
cover_art_path = cache_adapter.get_cover_art_uri("s1", "file")
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",))
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("s1",))
assert not Path(music_file_path).exists()
assert not Path(cover_art_path).exists()
@@ -443,7 +436,7 @@ def test_delete_song_data(cache_adapter: FilesystemAdapter):
assert e.partial_data is None
try:
cache_adapter.get_cover_art_uri("1", "file")
cache_adapter.get_cover_art_uri("s1", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data is None
@@ -648,7 +641,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
)
artist = cache_adapter.get_artist("1")
assert (
assert artist.artist_image_url and (
artist.id,
artist.name,
artist.album_count,
@@ -686,7 +679,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
)
artist = cache_adapter.get_artist("1")
assert (
assert artist.artist_image_url and (
artist.id,
artist.name,
artist.album_count,
@@ -728,7 +721,7 @@ def test_caching_get_album(cache_adapter: FilesystemAdapter):
)
album = cache_adapter.get_album("a1")
assert album
assert album and album.cover_art
assert (
album.id,
album.name,
@@ -747,9 +740,9 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTIST,
("1",),
("artist1",),
SubsonicAPI.ArtistAndArtistInfo(
"1",
"artist1",
"Bar",
album_count=1,
artist_image_url="image",
@@ -768,47 +761,40 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ALBUM,
("1",),
SubsonicAPI.Album("1", "Foo", artist_id="1", cover_art="1"),
SubsonicAPI.Album("1", "Foo", artist_id="artist1", cover_art="1"),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ALBUM,
("2",),
SubsonicAPI.Album("2", "Bar", artist_id="1", cover_art="2"),
SubsonicAPI.Album("2", "Bar", artist_id="artist1", cover_art="2"),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("image",), MOCK_ALBUM_ART,
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("image",), MOCK_ALBUM_ART3,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("2",), MOCK_ALBUM_ART,
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("2",), MOCK_ALBUM_ART2,
)
stale_artist = cache_adapter.get_artist("1")
stale_artist = cache_adapter.get_artist("artist1")
stale_album_1 = cache_adapter.get_album("1")
stale_album_2 = cache_adapter.get_album("2")
stale_artist_artwork = cache_adapter.get_cover_art_uri("image", "file")
stale_cover_art_1 = cache_adapter.get_cover_art_uri("1", "file")
stale_cover_art_2 = cache_adapter.get_cover_art_uri("2", "file")
cache_adapter.invalidate_data(FilesystemAdapter.CachedDataKey.ARTIST, ("1",))
cache_adapter.invalidate_data(FilesystemAdapter.CachedDataKey.ARTIST, ("artist1",))
# Test the cascade of cache invalidations.
try:
cache_adapter.get_artist("1")
cache_adapter.get_artist("artist1")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_artist
try:
cache_adapter.get_cover_art_uri("image", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_artist_artwork
try:
cache_adapter.get_album("1")
assert 0, "DID NOT raise CacheMissError"
@@ -823,6 +809,13 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
assert e.partial_data
assert e.partial_data == stale_album_2
try:
cache_adapter.get_cover_art_uri("image", "file")
assert 0, "DID NOT raise CacheMissError"
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_artist_artwork
try:
cache_adapter.get_cover_art_uri("1", "file")
assert 0, "DID NOT raise CacheMissError"

View File

@@ -1,2 +1,4 @@
test-song.mp3 was originally named Happy_Music-2018-09-18_-_Beautiful_Memories_-_David_Fesliyan.mp3
which is royalty free music from https://www.fesliyanstudios.com
The test songs are royalty free music from https://www.fesliyanstudios.com
* test-song.mp3 (originally named Happy_Music-2018-09-18_-_Beautiful_Memories_-_David_Fesliyan.mp3)
* test-song2.mp3 (originally named 2017-03-24_-_Lone_Rider_-_David_Fesliyan.mp3)

View File

@@ -0,0 +1 @@
really not a PNG

View File

@@ -0,0 +1 @@
definitely not a PNG. Stop looking lol

Binary file not shown.