Got browse caching working

This commit is contained in:
Sumner Evans
2020-05-14 22:49:30 -06:00
parent 8f07d1ec48
commit 8017aac704
24 changed files with 900 additions and 388 deletions

View File

@@ -25,8 +25,7 @@ def main():
parser.add_argument(
"-c",
"--config",
help="specify a configuration file. Defaults to "
"~/.config/sublime-music/config.json",
help="specify a configuration file. Defaults to ~/.config/sublime-music/config.json", # noqa: 512
)
args, unknown_args = parser.parse_known_args()

View File

@@ -32,16 +32,22 @@ class SongCacheStatus(Enum):
"""
Represents the cache state of a given song.
* :class:`SongCacheStatus.NOT_CACHED` -- indicates
* :class:`SongCacheStatus.CACHED` -- indicates
* :class:`SongCacheStatus.PERMANENTLY_CACHED` -- indicates
* :class:`SongCacheStatus.DOWNLOADING` -- indicates
* :class:`SongCacheStatus.NOT_CACHED` -- indicates that the song is not cached on
disk.
* :class:`SongCacheStatus.CACHED` -- indicates that the song is cached on disk.
* :class:`SongCacheStatus.PERMANENTLY_CACHED` -- indicates that the song is cached
on disk and will not be deleted when the cache gets too big.
* :class:`SongCacheStatus.DOWNLOADING` -- indicates that the song is being
downloaded.
* :class:`SongCacheStatus.CACHED_STALE` -- indicates that the song is cached on
disk, but has been invalidated.
"""
NOT_CACHED = 0
CACHED = 1
PERMANENTLY_CACHED = 2
DOWNLOADING = 3
CACHED_STALE = 4
@dataclass(frozen=True)
@@ -713,7 +719,7 @@ class CachingAdapter(Adapter):
PLAYLIST_DETAILS = "get_playlist_details"
PLAYLISTS = "get_playlists"
SEARCH_RESULTS = "search_results"
SONG_DETAILS = "song_details"
SONG = "song"
SONG_FILE = "song_file"
SONG_FILE_PERMANENT = "song_file_permanent"

View File

@@ -5,7 +5,7 @@ import abc
import logging
from datetime import datetime, timedelta
from enum import Enum
from functools import lru_cache
from functools import lru_cache, partial
from typing import (
Any,
Callable,
@@ -15,6 +15,7 @@ from typing import (
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
@@ -65,16 +66,24 @@ class Artist(abc.ABC):
class Directory(abc.ABC):
"""
The special directory with ``name`` and ``id`` should be used to indicate the
top-level directory.
"""
id: str
title: Optional[str]
parent: Optional["Directory"]
name: Optional[str]
parent_id: Optional[str]
children: Sequence[Union["Directory", "Song"]]
class Song(abc.ABC):
id: str
title: str
parent: Directory
path: Optional[str]
parent_id: Optional[str]
duration: Optional[timedelta]
album: Optional[Album]
artist: Optional[Artist]
genre: Optional[Genre]
@@ -82,14 +91,13 @@ class Song(abc.ABC):
track: Optional[int]
year: Optional[int]
cover_art: Optional[str]
size: Optional[int]
content_type: Optional[str]
suffix: Optional[str]
transcoded_content_type: Optional[str]
transcoded_suffix: Optional[str]
duration: Optional[timedelta]
bit_rate: Optional[int]
path: str
is_video: Optional[bool]
user_rating: Optional[int]
average_rating: Optional[float]
@@ -142,7 +150,7 @@ class PlayQueue(abc.ABC):
@lru_cache(maxsize=8192)
def similarity_ratio(query: str, string: str) -> int:
def similarity_ratio(query: str, string: Optional[str]) -> int:
"""
Return the :class:`fuzzywuzzy.fuzz.partial_ratio` between the ``query`` and
the given ``string``.
@@ -153,6 +161,8 @@ def similarity_ratio(query: str, string: str) -> int:
:param query: the query string
:param string: the string to compare to the query string
"""
if not string:
return 0
return fuzz.partial_ratio(query.lower(), string.lower())
@@ -186,10 +196,13 @@ class SearchResult:
_S = TypeVar("_S")
def _to_result(
self, it: Dict[str, _S], transform: Callable[[_S], str],
self, it: Dict[str, _S], transform: Callable[[_S], Tuple[Optional[str], ...]],
) -> List[_S]:
all_results = sorted(
((similarity_ratio(self.query, transform(x)), x) for id, x in it.items()),
(
(max(map(partial(similarity_ratio, self.query), transform(x))), x)
for x in it.values()
),
key=lambda rx: rx[0],
reverse=True,
)
@@ -206,16 +219,20 @@ class SearchResult:
@property
def artists(self) -> List[Artist]:
return self._to_result(self._artists, lambda a: a.name)
return self._to_result(self._artists, lambda a: (a.name,))
@property
def albums(self) -> List[Album]:
return self._to_result(self._albums, lambda a: f"{a.name}{a.artist}")
return self._to_result(
self._albums, lambda a: (a.name, a.artist.name if a.artist else None)
)
@property
def songs(self) -> List[Song]:
return self._to_result(self._songs, lambda s: f"{s.title}{s.artist}")
return self._to_result(
self._songs, lambda s: (s.title, s.artist.name if s.artist else None)
)
@property
def playlists(self) -> List[Playlist]:
return self._to_result(self._playlists, lambda p: p.name)
return self._to_result(self._playlists, lambda p: (p.name,))

View File

@@ -2,7 +2,6 @@ import hashlib
import logging
import shutil
import threading
from dataclasses import asdict
from datetime import datetime
from pathlib import Path
from typing import Any, cast, Dict, Optional, Sequence, Set, Tuple, Union
@@ -109,8 +108,12 @@ class FilesystemAdapter(CachingAdapter):
model: Any,
cache_key: CachingAdapter.CachedDataKey,
ignore_cache_miss: bool = False,
where_clause: Optional[Tuple[Any, ...]] = None,
) -> Sequence:
result = list(model.select())
query = model.select()
if where_clause:
query = query.where(*where_clause)
result = list(query)
if self.is_cache and not ignore_cache_miss:
# Determine if the adapter has ingested data for this key before, and if
# not, cache miss.
@@ -122,9 +125,14 @@ class FilesystemAdapter(CachingAdapter):
return result
def _get_object_details(
self, model: Any, id: str, cache_key: CachingAdapter.CachedDataKey
self,
model: Any,
id: str,
cache_key: CachingAdapter.CachedDataKey,
where_clause: Tuple[Any, ...] = (),
cache_where_clause: Tuple[Any, ...] = (),
) -> Any:
obj = model.get_or_none(model.id == id)
obj = model.get_or_none(model.id == id, *where_clause)
# Handle the case that this is the ground truth adapter.
if not self.is_cache:
@@ -138,6 +146,7 @@ class FilesystemAdapter(CachingAdapter):
models.CacheInfo.cache_key == cache_key,
models.CacheInfo.params_hash == util.params_hash(id),
models.CacheInfo.valid == True, # noqa: 712
*cache_where_clause,
)
if not cache_info:
raise CacheMissError(partial_data=obj)
@@ -147,11 +156,8 @@ class FilesystemAdapter(CachingAdapter):
# Data Retrieval Methods
# ==================================================================================
def get_cached_status(self, song: API.Song) -> SongCacheStatus:
song_model = models.Song.get_or_none(models.Song.id == song.id)
if not song_model:
return SongCacheStatus.NOT_CACHED
try:
song_model = self.get_song_details(song.id)
file = song_model.file
if file.valid and self.music_dir.joinpath(file.file_hash).exists():
# TODO check if path is permanently cached
@@ -206,12 +212,13 @@ class FilesystemAdapter(CachingAdapter):
raise CacheMissError()
def get_song_details(self, song_id: str) -> API.Song:
def get_song_details(self, song_id: str) -> models.Song:
return self._get_object_details(
models.Song, song_id, CachingAdapter.CachedDataKey.SONG_DETAILS
models.Song, song_id, CachingAdapter.CachedDataKey.SONG,
)
def get_artists(self, ignore_cache_miss: bool = False) -> Sequence[API.Artist]:
# TODO order_by
return self._get_list(
models.Artist,
CachingAdapter.CachedDataKey.ARTISTS,
@@ -228,6 +235,7 @@ class FilesystemAdapter(CachingAdapter):
# TODO: deal with paging
# TODO: deal with cache invalidation
sql_query = models.Album.select()
# TODO use the new ``where_clause`` from get_list
Type = AlbumSearchQuery.Type
if query.type == Type.GENRE:
@@ -280,19 +288,10 @@ class FilesystemAdapter(CachingAdapter):
)
)
def get_directory(self, directory_id: str) -> API.Directory:
# ohea
result = list(model.select())
if self.is_cache and not ignore_cache_miss:
# Determine if the adapter has ingested data for this key before, and if
# not, cache miss.
if not models.CacheInfo.get_or_none(
models.CacheInfo.valid == True, # noqa: 712
models.CacheInfo.cache_key == cache_key,
):
raise CacheMissError(partial_data=result)
return result
pass
def get_directory(self, directory_id: str) -> models.Directory:
return self._get_object_details(
models.Directory, directory_id, CachingAdapter.CachedDataKey.DIRECTORY
)
def get_genres(self) -> Sequence[API.Genre]:
return self._get_list(models.Genre, CachingAdapter.CachedDataKey.GENRES)
@@ -304,9 +303,7 @@ class FilesystemAdapter(CachingAdapter):
search_result.add_results(
"songs",
self._get_list(
models.Song,
CachingAdapter.CachedDataKey.SONG_DETAILS,
ignore_cache_miss=True,
models.Song, CachingAdapter.CachedDataKey.SONG, ignore_cache_miss=True
),
)
search_result.add_results(
@@ -354,6 +351,7 @@ class FilesystemAdapter(CachingAdapter):
data_key: CachingAdapter.CachedDataKey,
params: Tuple[Any, ...],
data: Any,
partial: bool = False,
) -> Any:
# TODO: this entire function is not exactly efficient due to the nested
# dependencies and everything. I'm not sure how to improve it, and I'm not sure
@@ -361,27 +359,14 @@ class FilesystemAdapter(CachingAdapter):
# TODO refactor to to be a recursive function like invalidate_data?
# TODO may need to remove reliance on asdict in order to support more backends.
params_hash = util.params_hash(*params)
cache_info_extra: Dict[str, Any] = {}
logging.debug(
f"_do_ingest_new_data params={params} params_hash={params_hash} data_key={data_key} data={data}" # noqa: 502
)
now = datetime.now()
cache_info, cache_info_created = models.CacheInfo.get_or_create(
cache_key=data_key,
params_hash=params_hash,
defaults={
"cache_key": data_key,
"params_hash": params_hash,
"last_ingestion_time": now,
},
)
cache_info.last_ingestion_time = now
if not cache_info_created:
cache_info.valid = True
cache_info.save()
cover_art_key = CachingAdapter.CachedDataKey.COVER_ART_FILE
KEYS = CachingAdapter.CachedDataKey
def setattrs(obj: Any, data: Dict[str, Any]):
for k, v in data.items():
@@ -389,7 +374,27 @@ class FilesystemAdapter(CachingAdapter):
setattr(obj, k, v)
def ingest_directory_data(api_directory: API.Directory) -> models.Directory:
directory_data = asdict(api_directory)
directory_data: Dict[str, Any] = {
"id": api_directory.id,
"name": api_directory.name,
"parent_id": api_directory.parent_id,
}
if not partial:
directory_data["directory_children"] = []
directory_data["song_children"] = []
for c in api_directory.children:
if hasattr(c, "children"): # directory
directory_data["directory_children"].append(
self._do_ingest_new_data(
KEYS.DIRECTORY, (c.id,), c, partial=True
)
)
else:
directory_data["song_children"].append(
self._do_ingest_new_data(KEYS.SONG, (c.id,), c)
)
directory, created = models.Directory.get_or_create(
id=api_directory.id, defaults=directory_data
)
@@ -401,7 +406,11 @@ class FilesystemAdapter(CachingAdapter):
return directory
def ingest_genre_data(api_genre: API.Genre) -> models.Genre:
genre_data = asdict(api_genre)
genre_data = {
"name": api_genre.name,
"song_count": getattr(api_genre, "song_count", None),
"album_count": getattr(api_genre, "album_count", None),
}
genre, created = models.Genre.get_or_create(
name=api_genre.name, defaults=genre_data
)
@@ -416,22 +425,25 @@ class FilesystemAdapter(CachingAdapter):
api_album: API.Album, exclude_artist: bool = False
) -> models.Album:
album_data = {
**asdict(api_album),
"id": api_album.id,
"name": api_album.name,
"created": getattr(api_album, "created", None),
"duration": getattr(api_album, "duration", None),
"play_count": getattr(api_album, "play_count", None),
"song_count": getattr(api_album, "song_count", None),
"starred": getattr(api_album, "starred", None),
"year": getattr(api_album, "year", None),
"genre": ingest_genre_data(g) if (g := api_album.genre) else None,
"artist": ingest_artist_data(ar) if (ar := api_album.artist) else None,
"songs": [
ingest_song_data(s, fill_album=False) for s in api_album.songs or []
],
"_cover_art": self._do_ingest_new_data(
cover_art_key, params=(api_album.cover_art, "album"), data=None
KEYS.COVER_ART_FILE, params=(api_album.cover_art,), data=None,
)
if api_album.cover_art
else None,
}
del album_data["cover_art"]
if exclude_artist:
del album_data["artist"]
album, created = models.Album.get_or_create(
id=api_album.id, defaults=album_data
@@ -445,35 +457,43 @@ class FilesystemAdapter(CachingAdapter):
def ingest_artist_data(api_artist: API.Artist) -> models.Artist:
# Ingest similar artists.
models.SimilarArtist.insert_many(
[
{"artist": api_artist.id, "similar_artist": a.id, "order": i}
for i, a in enumerate(api_artist.similar_artists or [])
]
).on_conflict_replace().execute()
models.SimilarArtist.delete().where(
models.SimilarArtist.similar_artist.not_in(
[sa.id for sa in api_artist.similar_artists or []]
),
models.Artist == api_artist.id,
).execute()
# TODO figure out which order to do this in to be msot efficient.
if api_artist.similar_artists:
models.SimilarArtist.delete().where(
models.SimilarArtist.similar_artist.not_in(
[sa.id for sa in api_artist.similar_artists or []]
),
models.Artist == api_artist.id,
).execute()
models.SimilarArtist.insert_many(
[
{"artist": api_artist.id, "similar_artist": a.id, "order": i}
for i, a in enumerate(api_artist.similar_artists or [])
]
).on_conflict_replace().execute()
artist_data = {
**asdict(api_artist),
"id": api_artist.id,
"name": api_artist.name,
"album_count": getattr(api_artist, "album_count", None),
"starred": getattr(api_artist, "starred", None),
"biography": getattr(api_artist, "biography", None),
"music_brainz_id": getattr(api_artist, "music_brainz_id", None),
"last_fm_url": getattr(api_artist, "last_fm_url", None),
"albums": [
ingest_album_data(a, exclude_artist=True)
for a in api_artist.albums or []
],
"_artist_image_url": self._do_ingest_new_data(
cover_art_key,
params=(api_artist.artist_image_url, "artist"),
KEYS.COVER_ART_FILE,
params=(api_artist.artist_image_url,),
data=None,
)
if api_artist.artist_image_url
else None,
}
del artist_data["artist_image_url"]
del artist_data["similar_artists"]
# del artist_data["artist_image_url"]
# del artist_data["similar_artists"]
artist, created = models.Artist.get_or_create(
id=api_artist.id, defaults=artist_data
@@ -489,26 +509,23 @@ class FilesystemAdapter(CachingAdapter):
api_song: API.Song, fill_album: bool = True
) -> models.Song:
song_data = {
**asdict(api_song),
"parent": ingest_directory_data(d) if (d := api_song.parent) else None,
"id": api_song.id,
"title": api_song.title,
"path": getattr(api_song, "path", None),
"track": getattr(api_song, "track", None),
"year": getattr(api_song, "year", None),
"duration": getattr(api_song, "duration", None),
# Ingest the FKs.
"genre": ingest_genre_data(g) if (g := api_song.genre) else None,
"artist": ingest_artist_data(ar) if (ar := api_song.artist) else None,
"album": ingest_album_data(al) if (al := api_song.album) else None,
"_cover_art": self._do_ingest_new_data(
CachingAdapter.CachedDataKey.COVER_ART_FILE,
params=(api_song.cover_art,),
data=None,
KEYS.COVER_ART_FILE, params=(api_song.cover_art,), data=None,
)
if api_song.cover_art
else None,
"parent_id": api_song.parent_id,
}
del song_data["cover_art"]
if fill_album:
# Don't incurr the overhead of creating an album if we are going to turn
# around and do it in the ingest_album_data function.
song_data["album"] = (
ingest_album_data(al) if (al := api_song.album) else None
)
song, created = models.Song.get_or_create(
id=song_data["id"], defaults=song_data
@@ -524,22 +541,29 @@ class FilesystemAdapter(CachingAdapter):
api_playlist: Union[API.Playlist, API.PlaylistDetails]
) -> models.Playlist:
playlist_data = {
**asdict(api_playlist),
"id": api_playlist.id,
"name": api_playlist.name,
"song_count": api_playlist.song_count,
"duration": api_playlist.duration,
"created": getattr(api_playlist, "created", None),
"changed": getattr(api_playlist, "changed", None),
"comment": getattr(api_playlist, "comment", None),
"owner": getattr(api_playlist, "owner", None),
"public": getattr(api_playlist, "public", None),
"songs": [
ingest_song_data(s)
self._do_ingest_new_data(KEYS.SONG, (s.id,), s)
for s in (
api_playlist.songs
if isinstance(api_playlist, API.PlaylistDetails)
cast(API.PlaylistDetails, api_playlist).songs
if hasattr(api_playlist, "songs")
else ()
)
],
"_cover_art": self._do_ingest_new_data(
cover_art_key, (api_playlist.cover_art,), None
KEYS.COVER_ART_FILE, (api_playlist.cover_art,), None
)
if api_playlist.cover_art
else None,
}
del playlist_data["cover_art"]
playlist, playlist_created = models.Playlist.get_or_create(
id=playlist_data["id"], defaults=playlist_data
@@ -560,44 +584,45 @@ class FilesystemAdapter(CachingAdapter):
return file_hash.hexdigest()
if data_key == CachingAdapter.CachedDataKey.ALBUM:
ingest_album_data(data)
return_val = None
elif data_key == CachingAdapter.CachedDataKey.ALBUMS:
if data_key == KEYS.ALBUM:
return_val = ingest_album_data(data)
elif data_key == KEYS.ALBUMS:
for a in data:
ingest_album_data(a)
# TODO deal with sorting here
# TODO need some other way of deleting stale albums
elif data_key == CachingAdapter.CachedDataKey.ARTIST:
ingest_artist_data(data)
elif data_key == KEYS.ARTIST:
return_val = ingest_artist_data(data)
elif data_key == CachingAdapter.CachedDataKey.ARTISTS:
elif data_key == KEYS.ARTISTS:
for a in data:
ingest_artist_data(a)
models.Artist.delete().where(
models.Artist.id.not_in([a.id for a in data])
).execute()
elif data_key == CachingAdapter.CachedDataKey.COVER_ART_FILE:
cache_info.file_id = params[0]
elif data_key == KEYS.COVER_ART_FILE:
cache_info_extra["file_id"] = params[0]
if data is None:
cache_info.save()
return cache_info
if data is not None:
file_hash = compute_file_hash(data)
cache_info_extra["file_hash"] = file_hash
file_hash = compute_file_hash(data)
cache_info.file_hash = file_hash
cache_info.save()
# Copy the actual cover art file
shutil.copy(str(data), str(self.cover_art_dir.joinpath(file_hash)))
# Copy the actual cover art file
shutil.copy(str(data), str(self.cover_art_dir.joinpath(file_hash)))
return cache_info
elif data_key == KEYS.DIRECTORY:
return_val = ingest_directory_data(data)
elif data_key == CachingAdapter.CachedDataKey.GENRES:
elif data_key == KEYS.GENRES:
for g in data:
ingest_genre_data(g)
elif data_key == CachingAdapter.CachedDataKey.IGNORED_ARTICLES:
elif data_key == KEYS.IGNORED_ARTICLES:
models.IgnoredArticle.insert_many(
map(lambda s: {"name": s}, data)
).on_conflict_replace().execute()
@@ -605,17 +630,17 @@ class FilesystemAdapter(CachingAdapter):
models.IgnoredArticle.name.not_in(data)
).execute()
elif data_key == CachingAdapter.CachedDataKey.PLAYLIST_DETAILS:
ingest_playlist(data)
elif data_key == KEYS.PLAYLIST_DETAILS:
return_val = ingest_playlist(data)
elif data_key == CachingAdapter.CachedDataKey.PLAYLISTS:
elif data_key == KEYS.PLAYLISTS:
for p in data:
ingest_playlist(p)
models.Playlist.delete().where(
models.Playlist.id.not_in([p.id for p in data])
).execute()
elif data_key == CachingAdapter.CachedDataKey.SEARCH_RESULTS:
elif data_key == KEYS.SEARCH_RESULTS:
data = cast(API.SearchResult, data)
for a in data._artists.values():
ingest_artist_data(a)
@@ -627,32 +652,54 @@ class FilesystemAdapter(CachingAdapter):
ingest_song_data(s)
for p in data._playlists.values():
ingest_song_data(p)
ingest_playlist(p)
elif data_key == CachingAdapter.CachedDataKey.SONG_DETAILS:
ingest_song_data(data)
elif data_key == KEYS.SONG:
return_val = ingest_song_data(data)
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE:
cache_info.file_id = params[0]
elif data_key == KEYS.SONG_FILE:
cache_info_extra["file_id"] = params[0]
if data is None:
cache_info.save()
return cache_info
if data is not None:
file_hash = compute_file_hash(data)
cache_info_extra["file_hash"] = file_hash
file_hash = compute_file_hash(data)
cache_info.file_hash = file_hash
# Copy the actual cover art file
shutil.copy(str(data), str(self.music_dir.joinpath(file_hash)))
elif data_key == KEYS.SONG_FILE_PERMANENT:
cache_info_extra["cache_permanently"] = True
# Set the cache info.
now = datetime.now()
cache_info, cache_info_created = models.CacheInfo.get_or_create(
cache_key=data_key,
params_hash=params_hash,
defaults={
"cache_key": data_key,
"params_hash": params_hash,
"last_ingestion_time": now,
# If it's partial data, then set it to be invalid so it will only be
# used in the event that the ground truth adapter can't service the
# request.
"valid": not partial,
**cache_info_extra,
},
)
if not cache_info_created:
cache_info.last_ingestion_time = now
cache_info.valid = not partial
for k, v in cache_info_extra.items():
setattr(cache_info, k, v)
cache_info.save()
# Copy the actual cover art file
shutil.copy(str(data), str(self.music_dir.joinpath(file_hash)))
# Special handling for Song
if data_key == KEYS.SONG_FILE:
song = models.Song.get_by_id(params[0])
song.file = cache_info
song.save()
return cache_info
elif data_key == CachingAdapter.CachedDataKey.SONG_FILE_PERMANENT:
raise NotImplementedError()
return return_val if return_val is not None else cache_info
def _do_invalidate_data(
self, data_key: CachingAdapter.CachedDataKey, params: Tuple[Any, ...],

View File

@@ -1,4 +1,4 @@
from typing import Optional
from typing import List, Optional, Union
from peewee import (
AutoField,
@@ -30,12 +30,14 @@ class BaseModel(Model):
class CacheInfo(BaseModel):
id = AutoField()
valid = BooleanField(default=True)
valid = BooleanField(default=False)
cache_key = CacheConstantsField()
params_hash = TextField()
last_ingestion_time = TzDateTimeField(null=False)
file_id = TextField(null=True)
file_hash = TextField(null=True)
# TODO store path
cache_permanently = BooleanField(null=True)
# TODO some sort of expiry?
@@ -112,18 +114,30 @@ class IgnoredArticle(BaseModel):
class Directory(BaseModel):
id = TextField(unique=True, primary_key=True)
name = TextField(null=True)
parent = ForeignKeyField("self", null=True, backref="children")
parent_id = TextField(null=True)
_children: Optional[List[Union["Directory", "Song"]]] = None
@property
def children(self) -> List[Union["Directory", "Song"]]:
if not self._children:
self._children = list(
Directory.select().where(Directory.parent_id == self.id)
) + list(Song.select().where(Song.parent_id == self.id))
return self._children
class Song(BaseModel):
id = TextField(unique=True, primary_key=True)
title = TextField()
duration = DurationField()
path = TextField()
duration = DurationField(null=True)
# TODO move path to file foreign key
path = TextField(null=True)
parent_id = TextField(null=True)
album = ForeignKeyField(Album, null=True, backref="songs")
artist = ForeignKeyField(Artist, null=True, backref="songs")
parent = ForeignKeyField(Directory, null=True, backref="songs")
artist = ForeignKeyField(Artist, null=True)
genre = ForeignKeyField(Genre, null=True, backref="songs")
# figure out how to deal with different transcodings, etc.
@@ -161,9 +175,13 @@ class Song(BaseModel):
# original_height: Optional[int] = None
class DirectoryXChildren(BaseModel):
directory_id = TextField()
order = IntegerField()
# class DirectoryXChildren(BaseModel):
# directory_id = ForeignKeyField(Entity)
# order = IntegerField()
# child_id = ForeignKeyField(Entity, null=True)
# class Meta:
# indexes = ((("directory_id", "order", "child_id"), True),)
class Playlist(BaseModel):
@@ -218,7 +236,6 @@ ALL_TABLES = (
Artist,
CacheInfo,
Directory,
DirectoryXChildren,
Genre,
IgnoredArticle,
Playlist,

View File

@@ -4,7 +4,6 @@ import threading
from concurrent.futures import Future, ThreadPoolExecutor
from dataclasses import dataclass
from datetime import timedelta
from functools import partial
from pathlib import Path
from time import sleep
from typing import (
@@ -12,6 +11,8 @@ from typing import (
Callable,
cast,
Generic,
Iterable,
List,
Optional,
Sequence,
Set,
@@ -877,7 +878,7 @@ class AdapterManager:
allow_download=allow_download,
before_download=before_download,
use_ground_truth_adapter=force,
cache_key=CachingAdapter.CachedDataKey.SONG_DETAILS,
cache_key=CachingAdapter.CachedDataKey.SONG,
)
@staticmethod
@@ -898,26 +899,27 @@ class AdapterManager:
force: bool = False, before_download: Callable[[], None] = lambda: None
) -> Result[Sequence[Artist]]:
def do_get_artists() -> Sequence[Artist]:
artists: Sequence[Artist] = AdapterManager._get_from_cache_or_ground_truth(
"get_artists",
return AdapterManager.sort_by_ignored_articles(
AdapterManager._get_from_cache_or_ground_truth(
"get_artists",
use_ground_truth_adapter=force,
before_download=before_download,
cache_key=CachingAdapter.CachedDataKey.ARTISTS,
).result(),
key=lambda a: a.name,
use_ground_truth_adapter=force,
before_download=before_download,
cache_key=CachingAdapter.CachedDataKey.ARTISTS,
).result()
return sorted(
artists, key=partial(AdapterManager._strip_ignored_articles, force)
)
return Result(do_get_artists)
@staticmethod
def _get_ignored_articles(force: bool) -> Set[str]:
def _get_ignored_articles(use_ground_truth_adapter: bool) -> Set[str]:
if not AdapterManager._any_adapter_can_do("get_ignored_articles"):
return set()
try:
return AdapterManager._get_from_cache_or_ground_truth(
"get_ignored_articles",
use_ground_truth_adapter=force,
use_ground_truth_adapter=use_ground_truth_adapter,
cache_key=CachingAdapter.CachedDataKey.IGNORED_ARTICLES,
).result()
except Exception:
@@ -925,11 +927,26 @@ class AdapterManager:
return set()
@staticmethod
def _strip_ignored_articles(force: bool, artist: Artist) -> str:
first_word, rest = (name := artist.name).split(maxsplit=1)
if first_word in AdapterManager._get_ignored_articles(force):
return rest
return name
def _strip_ignored_articles(use_ground_truth_adapter: bool, string: str) -> str:
first_word, *rest = string.split(maxsplit=1)
if first_word in AdapterManager._get_ignored_articles(use_ground_truth_adapter):
return rest[0]
return string
_S = TypeVar("_S")
@staticmethod
def sort_by_ignored_articles(
it: Iterable[_S],
key: Callable[[_S], str],
use_ground_truth_adapter: bool = False,
) -> List[_S]:
return sorted(
it,
key=lambda x: AdapterManager._strip_ignored_articles(
use_ground_truth_adapter, key(x)
),
)
@staticmethod
def get_artist(
@@ -1002,6 +1019,7 @@ class AdapterManager:
cache_key=CachingAdapter.CachedDataKey.DIRECTORY,
)
# Play Queue
@staticmethod
def get_play_queue() -> Result[Optional[PlayQueue]]:
assert AdapterManager._instance
@@ -1017,7 +1035,7 @@ class AdapterManager:
if play_queue := f.result():
for song in play_queue.songs:
AdapterManager._instance.caching_adapter.ingest_new_data(
CachingAdapter.CachedDataKey.SONG_DETAILS, (song.id,), song
CachingAdapter.CachedDataKey.SONG, (song.id,), song
)
future.add_done_callback(future_finished)

View File

@@ -5,7 +5,6 @@ import multiprocessing
import os
import pickle
import random
from dataclasses import asdict
from datetime import datetime, timedelta
from pathlib import Path
from time import sleep
@@ -25,9 +24,17 @@ from urllib.parse import urlencode, urlparse
import requests
from .api_objects import Directory, Response, Song
from .api_objects import Directory, Response
from .. import Adapter, AlbumSearchQuery, api_objects as API, ConfigParamDescriptor
SUBSONIC_ADAPTER_DEBUG_DELAY = None
if delay_str := os.environ.get("SUBSONIC_ADAPTER_DEBUG_DELAY"):
SUBSONIC_ADAPTER_DEBUG_DELAY = (
random.uniform(*map(float, delay_str.split(",")))
if "," in delay_str
else float(delay_str)
)
class SubsonicAdapter(Adapter):
"""
@@ -179,17 +186,11 @@ class SubsonicAdapter(Adapter):
params = {**self._get_params(), **params}
logging.info(f"[START] get: {url}")
if delay_str := os.environ.get("SUBSONIC_ADAPTER_DEBUG_DELAY"):
delay = (
random.uniform(*map(float, delay_str.split(",")))
if "," in delay_str
else float(delay_str)
)
if SUBSONIC_ADAPTER_DEBUG_DELAY:
logging.info(
f"SUBSONIC_ADAPTER_DEBUG_DELAY enabled. Pausing for {delay} seconds"
f"SUBSONIC_ADAPTER_DEBUG_DELAY enabled. Pausing for {SUBSONIC_ADAPTER_DEBUG_DELAY} seconds" # noqa: 512
)
sleep(delay)
sleep(SUBSONIC_ADAPTER_DEBUG_DELAY)
# Deal with datetime parameters (convert to milliseconds since 1970)
for k, v in params.items():
@@ -431,11 +432,10 @@ class SubsonicAdapter(Adapter):
with open(self.ignored_articles_cache_file, "wb+") as f:
pickle.dump(indexes.ignored_articles, f)
root_dir_items: List[Union[Dict[str, Any], Directory, Song]] = []
root_dir_items: List[Dict[str, Any]] = []
for index in indexes.index:
# TODO figure out a more efficient way of doing this.
root_dir_items += index.artist
return Directory(id="root", _children=root_dir_items, _is_root=True)
root_dir_items.extend(map(lambda x: {**x, "isDir": True}, index.artist))
return Directory(id="root", _children=root_dir_items)
def get_directory(self, directory_id: str) -> API.Directory:
if directory_id == "root":

View File

@@ -17,15 +17,23 @@ from dataclasses_json import (
from .. import api_objects as SublimeAPI
# Translation map
extra_translation_map = {
decoder_functions = {
datetime: (lambda s: datetime.strptime(s, "%Y-%m-%dT%H:%M:%S.%f%z") if s else None),
timedelta: (lambda s: timedelta(seconds=s) if s else None),
}
encoder_functions = {
datetime: (lambda d: datetime.strftime(d, "%Y-%m-%dT%H:%M:%S.%f%z") if d else None),
timedelta: (lambda t: t.total_seconds() if t else None),
}
for type_, translation_function in extra_translation_map.items():
for type_, translation_function in decoder_functions.items():
dataclasses_json.cfg.global_config.decoders[type_] = translation_function
dataclasses_json.cfg.global_config.decoders[Optional[type_]] = translation_function
for type_, translation_function in encoder_functions.items():
dataclasses_json.cfg.global_config.encoders[type_] = translation_function
dataclasses_json.cfg.global_config.encoders[Optional[type_]] = translation_function
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
@@ -122,43 +130,36 @@ class ArtistInfo:
self.artist_image_url = ""
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Directory(DataClassJsonMixin, SublimeAPI.Directory):
class Directory(SublimeAPI.Directory):
id: str
title: Optional[str] = field(default=None, metadata=config(field_name="name"))
parent: Optional["Directory"] = field(init=False)
_parent: Optional[str] = field(default=None, metadata=config(field_name="parent"))
_is_root: bool = False
name: Optional[str] = None
title: Optional[str] = None
parent_id: Optional[str] = field(default=None, metadata=config(field_name="parent"))
children: List[Union["Directory", "Song"]] = field(default_factory=list, init=False)
_children: List[Union[Dict[str, Any], "Directory", "Song"]] = field(
children: List[Union["Directory", "Song"]] = field(init=False)
_children: List[Dict[str, Any]] = field(
default_factory=list, metadata=config(field_name="child")
)
def __post_init__(self):
self.parent = (
Directory(self._parent or "root", _is_root=(self._parent is None))
if not self._is_root
else None
)
self.children = (
self._children
if self._is_root
else [
Directory.from_dict(c) if c.get("isDir") else Song.from_dict(c)
for c in self._children
]
)
self.parent_id = (self.parent_id or "root") if self.id != "root" else None
self.name = self.name or self.title
self.children = [
Directory.from_dict(c) if c.get("isDir") else Song.from_dict(c)
for c in self._children
]
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class Song(SublimeAPI.Song):
class Song(SublimeAPI.Song, DataClassJsonMixin):
id: str
title: str
path: str
parent: Directory = field(init=False)
_parent: Optional[str] = field(default=None, metadata=config(field_name="parent"))
title: str = field(metadata=config(field_name="name"))
path: Optional[str] = None
parent_id: Optional[str] = field(default=None, metadata=config(field_name="parent"))
# Artist
artist: Optional[ArtistAndArtistInfo] = field(init=False)
@@ -195,8 +196,7 @@ class Song(SublimeAPI.Song):
type: Optional[SublimeAPI.MediaType] = None
def __post_init__(self):
# Initialize the cross-references
self.parent = None if not self._parent else Directory(self._parent)
self.parent_id = (self.parent_id or "root") if self.id != "root" else None
self.artist = (
None
if not self.artist_id
@@ -270,7 +270,7 @@ class PlayQueue(SublimeAPI.PlayQueue):
@dataclass
class Index:
name: str
artist: List[Directory] = field(default_factory=list)
artist: List[Dict[str, Any]] = field(default_factory=list)
@dataclass_json(letter_case=LetterCase.CAMEL)

View File

@@ -43,7 +43,7 @@ from .adapters import AdapterManager, AlbumSearchQuery, Result
from .adapters.api_objects import Playlist, PlayQueue, Song
from .config import AppConfiguration, ReplayGainType
from .dbus import dbus_propagate, DBusManager
from .players import ChromecastPlayer, MPVPlayer, PlayerEvent
from .players import ChromecastPlayer, MPVPlayer, Player, PlayerEvent
from .ui.configure_servers import ConfigureServersDialog
from .ui.main import MainWindow
from .ui.settings import SettingsDialog
@@ -58,11 +58,12 @@ class SublimeMusicApp(Gtk.Application):
self.window: Optional[Gtk.Window] = None
self.app_config = AppConfiguration.load_from_file(config_file)
self.player = None
self.dbus_manager: Optional[DBusManager] = None
self.connect("shutdown", self.on_app_shutdown)
player: Player
def do_startup(self):
Gtk.Application.do_startup(self)
@@ -1004,7 +1005,9 @@ class SublimeMusicApp(Gtk.Application):
return
self.player.play_media(
uri, 0 if reset else self.app_config.state.song_progress, song,
uri,
timedelta(0) if reset else self.app_config.state.song_progress,
song,
)
self.app_config.state.playing = True
self.update_window()

View File

@@ -10,7 +10,7 @@ from deepdiff import DeepDiff
from gi.repository import Gio, GLib
from sublime.adapters import AdapterManager, CacheMissError
from sublime.adapters.api_objects import PlaylistDetails, Song
from sublime.adapters.api_objects import PlaylistDetails
from sublime.config import AppConfiguration
from sublime.players import Player
from sublime.ui.state import RepeatType
@@ -271,10 +271,10 @@ class DBusManager:
self, idx: int, play_queue: Tuple[str, ...]
) -> Dict[str, Any]:
try:
song: Song = AdapterManager.get_song_details(
song = AdapterManager.get_song_details(
play_queue[idx], allow_download=False
).result()
except CacheMissError:
except Exception:
return {}
trackid = self.get_dbus_playlist(play_queue)[idx]

View File

@@ -1,3 +1,4 @@
import abc
import base64
import io
import logging
@@ -22,6 +23,7 @@ from sublime.config import AppConfiguration
class PlayerEvent:
# TODO standardize this
name: str
value: Any
@@ -30,9 +32,9 @@ class PlayerEvent:
self.value = value
class Player:
# TODO: convert to ABC and pull players out into different modules and actually
# document this API because it's kinda a bit strange tbh.
class Player(abc.ABC):
# TODO: pull players out into different modules and actually document this API
# because it's kinda a bit strange tbh.
_can_hotswap_source: bool
def __init__(
@@ -97,7 +99,7 @@ class Player:
"toggle_play must be implemented by implementor of Player"
)
def seek(self, value: float):
def seek(self, value: timedelta):
raise NotImplementedError("seek must be implemented by implementor of Player")
def _get_timepos(self):
@@ -190,8 +192,8 @@ class MPVPlayer(Player):
def toggle_play(self):
self.mpv.cycle("pause")
def seek(self, value: float):
self.mpv.seek(str(value), "absolute")
def seek(self, value: timedelta):
self.mpv.seek(str(value.total_seconds()), "absolute")
def _get_volume(self) -> float:
return self._volume
@@ -480,9 +482,9 @@ class ChromecastPlayer(Player):
self.chromecast.media_controller.play()
self.wait_for_playing(self.start_time_incrementor)
def seek(self, value: float):
def seek(self, value: timedelta):
do_pause = not self.playing
self.chromecast.media_controller.seek(value)
self.chromecast.media_controller.seek(value.total_seconds())
if do_pause:
self.pause()

View File

@@ -563,11 +563,7 @@ class AlbumsGrid(Gtk.Overlay):
)
# Header for the widget
header_text = (
item.album.title if isinstance(item.album, API.Song) else item.album.name
)
header_label = make_label(header_text, "grid-header-label")
header_label = make_label(item.album.name, "grid-header-label")
widget_box.pack_start(header_label, False, False, 0)
# Extra info for the widget

View File

@@ -1,5 +1,5 @@
from functools import partial
from typing import Any, List, Optional, Tuple, Union
from typing import Any, cast, Optional, Tuple
from gi.repository import Gdk, Gio, GLib, GObject, Gtk, Pango
@@ -25,7 +25,6 @@ class BrowsePanel(Gtk.Overlay):
),
}
id_stack = None
update_order_token = 0
def __init__(self):
@@ -57,7 +56,7 @@ class BrowsePanel(Gtk.Overlay):
self.update_order_token += 1
def do_update(update_order_token: int, id_stack: Result[List[int]]):
def do_update(update_order_token: int, id_stack: Result[Tuple[str, ...]]):
if self.update_order_token != update_order_token:
return
@@ -67,26 +66,23 @@ class BrowsePanel(Gtk.Overlay):
)
self.spinner.hide()
def calculate_path() -> List[str]:
if app_config.state.selected_browse_element_id is None:
return []
def calculate_path() -> Tuple[str, ...]:
if (current_dir_id := app_config.state.selected_browse_element_id) is None:
return ("root",)
id_stack = []
current_dir_id: Optional[str] = app_config.state.selected_browse_element_id
while current_dir_id and (
directory := AdapterManager.get_directory(
current_dir_id, before_download=self.spinner.show,
).result()
):
id_stack.append(directory.id)
if directory.id == "root":
break
# Detect loops?
current_dir_id = directory.parent.id if directory.parent else None
current_dir_id = directory.parent_id
return id_stack
return tuple(id_stack)
path_result: Result[List[str]] = Result(calculate_path)
# TODO figure out why this updates so many times on startup
path_result: Result[Tuple[str, ...]] = Result(calculate_path)
path_result.add_done_callback(
partial(GLib.idle_add, partial(do_update, self.update_order_token))
)
@@ -124,14 +120,14 @@ class ListAndDrilldown(Gtk.Paned):
self.pack2(self.drilldown, True, False)
def update(
self, id_stack: List[str], app_config: AppConfiguration, force: bool = False
self,
id_stack: Tuple[str, ...],
app_config: AppConfiguration,
force: bool = False,
):
dir_id = id_stack[-1]
selected_id = (
id_stack[-2]
if len(id_stack) > 2
else app_config.state.selected_browse_element_id
)
*rest, dir_id = id_stack
child_id_stack = tuple(rest)
selected_id = child_id_stack[-1] if len(child_id_stack) > 0 else None
self.list.update(
directory_id=dir_id,
@@ -144,11 +140,11 @@ class ListAndDrilldown(Gtk.Paned):
# We always want to update, but in this case, we don't want to blow
# away the drilldown.
if isinstance(self.drilldown, ListAndDrilldown):
self.drilldown.update(id_stack[:-1], app_config, force=force)
self.drilldown.update(child_id_stack, app_config, force=force)
return
self.id_stack = id_stack
if len(id_stack) > 1:
if len(child_id_stack) > 0:
self.remove(self.drilldown)
self.drilldown = ListAndDrilldown()
self.drilldown.connect(
@@ -157,7 +153,7 @@ class ListAndDrilldown(Gtk.Paned):
self.drilldown.connect(
"refresh-window", lambda _, *args: self.emit("refresh-window", *args),
)
self.drilldown.update(id_stack[:-1], app_config, force=force)
self.drilldown.update(child_id_stack, app_config, force=force)
self.drilldown.show_all()
self.pack2(self.drilldown, True, False)
@@ -183,13 +179,11 @@ class MusicDirectoryList(Gtk.Box):
class DrilldownElement(GObject.GObject):
id = GObject.Property(type=str)
name = GObject.Property(type=str)
is_dir = GObject.Property(type=bool, default=True)
def __init__(self, element: Union[API.Directory, API.Song]):
def __init__(self, element: API.Directory):
GObject.GObject.__init__(self)
self.id = element.id
self.is_dir = isinstance(element, API.Directory)
self.name = element.title
self.name = element.name
def __init__(self):
Gtk.Box.__init__(self, orientation=Gtk.Orientation.VERTICAL)
@@ -197,7 +191,7 @@ class MusicDirectoryList(Gtk.Box):
list_actions = Gtk.ActionBar()
refresh = IconButton("view-refresh-symbolic", "Refresh folder")
refresh.connect("clicked", self.on_refresh_clicked)
refresh.connect("clicked", lambda *a: self.update(force=True))
list_actions.pack_end(refresh)
self.add(list_actions)
@@ -218,7 +212,7 @@ class MusicDirectoryList(Gtk.Box):
scrollbox.add(self.list)
self.directory_song_store = Gtk.ListStore(
str, str, str, str, # cache status # title # duration # song ID
str, str, str, str, # cache status, title, duration, song ID
)
self.directory_song_list = Gtk.TreeView(
@@ -262,6 +256,8 @@ class MusicDirectoryList(Gtk.Box):
self.directory_id, force=force, order_token=self.update_order_token,
)
# TODO this causes probalems because the callback may try and call an object that
# doesn't exist anymore since we delete these panels a lot.
@util.async_callback(
AdapterManager.get_directory,
before_download=lambda self: self.loading_indicator.show(),
@@ -281,25 +277,37 @@ class MusicDirectoryList(Gtk.Box):
new_songs_store = []
selected_dir_idx = None
for idx, el in enumerate(directory.children):
if isinstance(el, API.Directory):
new_directories_store.append(MusicDirectoryList.DrilldownElement(el))
if el.id == self.selected_id:
selected_dir_idx = idx
for el in directory.children:
if hasattr(el, "children"):
new_directories_store.append(
MusicDirectoryList.DrilldownElement(cast(API.Directory, el))
)
else:
song = cast(API.Song, el)
new_songs_store.append(
[
util.get_cached_status_icon(
AdapterManager.get_cached_status(el)
),
util.esc(el.title),
util.format_song_duration(el.duration),
el.id,
util.get_cached_status_icon(song),
util.esc(song.title),
util.format_song_duration(song.duration),
song.id,
]
)
util.diff_model_store(self.drilldown_directories_store, new_directories_store)
# TODO figure out a way to push the sorting into the AdapterManager.
# start = time()
new_directories_store = AdapterManager.sort_by_ignored_articles(
new_directories_store, key=lambda d: d.name, use_ground_truth_adapter=force
)
new_songs_store = AdapterManager.sort_by_ignored_articles(
new_songs_store, key=lambda s: s[1], use_ground_truth_adapter=force
)
# print("CONSTRUCTING STORE TOOK", time() - start, force)
for idx, el in enumerate(new_directories_store):
if el.id == self.selected_id:
selected_dir_idx = idx
util.diff_model_store(self.drilldown_directories_store, new_directories_store)
util.diff_song_store(self.directory_song_store, new_songs_store)
if len(new_directories_store) == 0:
@@ -350,9 +358,6 @@ class MusicDirectoryList(Gtk.Box):
# Event Handlers
# ==================================================================================
def on_refresh_clicked(self, _: Any):
self.update(force=True)
def on_song_activated(self, treeview: Any, idx: Gtk.TreePath, column: Any):
# The song ID is in the last column of the model.
self.emit(

View File

@@ -264,7 +264,7 @@ class AlbumWithSongs(Gtk.Box):
):
new_store = [
[
util.get_cached_status_icon(AdapterManager.get_cached_status(song)),
util.get_cached_status_icon(song),
util.esc(song.title),
util.format_song_duration(song.duration),
song.id,

View File

@@ -141,12 +141,16 @@ class PlayerControls(Gtk.ActionBar):
artist = app_config.state.current_song.artist
if album:
self.album_name.set_markup(util.esc(album.name))
self.artist_name.show()
else:
self.album_name.set_markup("")
self.album_name.hide()
if artist:
self.artist_name.set_markup(util.esc(artist.name))
self.artist_name.show()
else:
self.artist_name.set_markup("")
self.artist_name.hide()
else:
# Clear out the cover art and song tite if no song
self.album_art.set_from_file(None)

View File

@@ -479,7 +479,7 @@ class PlaylistDetailPanel(Gtk.Overlay):
new_store = [
[
util.get_cached_status_icon(AdapterManager.get_cached_status(song)),
util.get_cached_status_icon(song),
song.title,
album.name if (album := song.album) else None,
artist.name if (artist := song.artist) else None,

View File

@@ -17,7 +17,7 @@ from deepdiff import DeepDiff
from gi.repository import Gdk, GLib, Gtk
from sublime.adapters import AdapterManager, Result, SongCacheStatus
from sublime.adapters.api_objects import Playlist
from sublime.adapters.api_objects import Playlist, Song
from sublime.config import AppConfiguration
@@ -116,14 +116,14 @@ def dot_join(*items: Any) -> str:
return "".join(map(str, filter(lambda x: x is not None, items)))
def get_cached_status_icon(cache_status: SongCacheStatus) -> str:
def get_cached_status_icon(song: Song) -> str:
cache_icon = {
SongCacheStatus.NOT_CACHED: "",
SongCacheStatus.CACHED: "folder-download-symbolic",
SongCacheStatus.PERMANENTLY_CACHED: "view-pin-symbolic",
SongCacheStatus.DOWNLOADING: "emblem-synchronizing-symbolic",
}
return cache_icon[cache_status]
return cache_icon[AdapterManager.get_cached_status(song)]
def _parse_diff_location(location: str) -> Tuple:
@@ -214,7 +214,7 @@ def show_song_popover(
status = AdapterManager.get_cached_status(details)
albums.add(album.id if (album := details.album) else None)
artists.add(artist.id if (artist := details.artist) else None)
parents.add(parent.id if (parent := details.parent) else None)
parents.add(parent_id if (parent_id := details.parent_id) else None)
if download_sensitive or status == SongCacheStatus.NOT_CACHED:
download_sensitive = True

View File

@@ -7,6 +7,7 @@ from sublime.adapters import AlbumSearchQuery
def params_hash(*params: Any) -> str:
# Special handling for AlbumSearchQuery objects.
# TODO figure out if I can optimize this
if len(params) > 0 and isinstance(params[0], AlbumSearchQuery):
params = (hash(params[0]), *params[1:])
return hashlib.sha1(bytes(json.dumps(params), "utf8")).hexdigest()

View File

@@ -3,7 +3,7 @@ from time import sleep
import pytest
from sublime.adapters import AdapterManager, Result
from sublime.adapters import AdapterManager, Result, SearchResult
from sublime.config import AppConfiguration, ServerConfiguration
@@ -114,3 +114,22 @@ def test_get_song_details(adapter_manager: AdapterManager):
# assert 0
# TODO
pass
def test_search(adapter_manager: AdapterManager):
# TODO
return
results = []
# TODO ingest data
def search_callback(result: SearchResult):
results.append((result.artists, result.albums, result.songs, result.playlists))
AdapterManager.search("ohea", search_callback=search_callback).result()
# TODO test getting results from the server and updating using that
while len(results) < 1:
sleep(0.1)
assert len(results) == 1

View File

@@ -1,3 +1,4 @@
import json
import shutil
from dataclasses import asdict
from datetime import timedelta
@@ -27,8 +28,8 @@ MOCK_SONG_FILE2_HASH = "c32597c724e2e484dbf5856930b2e5bb80de13b7"
MOCK_SUBSONIC_SONGS = [
SubsonicAPI.Song(
"2",
"Song 2",
_parent="foo",
title="Song 2",
parent_id="d1",
_album="foo",
album_id="a1",
_artist="cool",
@@ -40,8 +41,8 @@ MOCK_SUBSONIC_SONGS = [
),
SubsonicAPI.Song(
"1",
"Song 1",
_parent="foo",
title="Song 1",
parent_id="d1",
_album="foo",
album_id="a1",
_artist="foo",
@@ -53,8 +54,8 @@ MOCK_SUBSONIC_SONGS = [
),
SubsonicAPI.Song(
"1",
"Song 1",
_parent="foo",
title="Song 1",
parent_id="d1",
_album="foo",
album_id="a1",
_artist="foo",
@@ -66,6 +67,8 @@ MOCK_SUBSONIC_SONGS = [
),
]
KEYS = FilesystemAdapter.CachedDataKey
@pytest.fixture
def adapter(tmp_path: Path):
@@ -100,7 +103,7 @@ def verify_songs(
assert len(actual_songs) == len(expected_songs)
for actual, song in zip(actual_songs, expected_songs):
for k, v in asdict(song).items():
if k in ("_genre", "_album", "_artist", "_parent", "album_id", "artist_id"):
if k in ("_genre", "_album", "_artist", "album_id", "artist_id"):
continue
print(k, "->", v) # noqa: T001
@@ -110,8 +113,6 @@ def verify_songs(
assert ("a1", "foo") == (actual_value.id, actual_value.name)
elif k == "genre":
assert v["name"] == actual_value.name
elif k == "parent":
assert "foo" == actual_value.id
elif k == "artist":
assert (v["id"], v["name"]) == (actual_value.id, actual_value.name)
else:
@@ -123,7 +124,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
cache_adapter.get_playlists()
# Ingest an empty list (for example, no playlists added yet to server).
cache_adapter.ingest_new_data(FilesystemAdapter.CachedDataKey.PLAYLISTS, (), [])
cache_adapter.ingest_new_data(KEYS.PLAYLISTS, (), [])
# After the first cache miss of get_playlists, even if an empty list is
# returned, the next one should not be a cache miss.
@@ -131,7 +132,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
# Ingest two playlists.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLISTS,
KEYS.PLAYLISTS,
(),
[
SubsonicAPI.Playlist("1", "test1", comment="comment"),
@@ -150,7 +151,7 @@ def test_caching_get_playlists(cache_adapter: FilesystemAdapter):
# Ingest a new playlist list with one of them deleted.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLISTS,
KEYS.PLAYLISTS,
(),
[
SubsonicAPI.Playlist("1", "test1", comment="comment"),
@@ -186,7 +187,7 @@ def test_caching_get_playlist_details(cache_adapter: FilesystemAdapter):
# Simulate the playlist being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("1",),
SubsonicAPI.PlaylistWithSongs("1", "test1", songs=MOCK_SUBSONIC_SONGS[:2]),
)
@@ -200,7 +201,7 @@ def test_caching_get_playlist_details(cache_adapter: FilesystemAdapter):
# "Force refresh" the playlist and add a new song (duplicate).
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("1",),
SubsonicAPI.PlaylistWithSongs("1", "foo", songs=MOCK_SUBSONIC_SONGS),
)
@@ -231,7 +232,7 @@ def test_no_caching_get_playlist_details(adapter: FilesystemAdapter):
def test_caching_get_playlist_then_details(cache_adapter: FilesystemAdapter):
# Ingest a list of playlists (like the sidebar, without songs)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLISTS,
KEYS.PLAYLISTS,
(),
[SubsonicAPI.Playlist("1", "test1"), SubsonicAPI.Playlist("2", "test2")],
)
@@ -248,13 +249,11 @@ def test_caching_get_playlist_then_details(cache_adapter: FilesystemAdapter):
# Simulate getting playlist details for id=1, then id=2
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
("1",),
SubsonicAPI.PlaylistWithSongs("1", "test1"),
KEYS.PLAYLIST_DETAILS, ("1",), SubsonicAPI.PlaylistWithSongs("1", "test1"),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("2",),
SubsonicAPI.PlaylistWithSongs("2", "test2", songs=MOCK_SUBSONIC_SONGS),
)
@@ -272,7 +271,7 @@ def test_cache_cover_art(cache_adapter: FilesystemAdapter):
# After ingesting the data, reading from the cache should give the exact same file.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
)
with open(cache_adapter.get_cover_art_uri("pl_test1", "file"), "wb+") as cached:
with open(MOCK_ALBUM_ART, "wb+") as expected:
@@ -281,32 +280,28 @@ def test_cache_cover_art(cache_adapter: FilesystemAdapter):
def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLISTS,
KEYS.PLAYLISTS,
(),
[SubsonicAPI.Playlist("1", "test1"), SubsonicAPI.Playlist("2", "test2")],
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, ("pl_test1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("2",),
SubsonicAPI.PlaylistWithSongs("2", "test2", cover_art="pl_2", songs=[]),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_2",), MOCK_ALBUM_ART2,
KEYS.COVER_ART_FILE, ("pl_2",), MOCK_ALBUM_ART2,
)
stale_uri_1 = cache_adapter.get_cover_art_uri("pl_test1", "file")
stale_uri_2 = cache_adapter.get_cover_art_uri("pl_2", "file")
cache_adapter.invalidate_data(FilesystemAdapter.CachedDataKey.PLAYLISTS, ())
cache_adapter.invalidate_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS, ("2",)
)
cache_adapter.invalidate_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_test1",)
)
cache_adapter.invalidate_data(KEYS.PLAYLISTS, ())
cache_adapter.invalidate_data(KEYS.PLAYLIST_DETAILS, ("2",))
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, ("pl_test1",))
# After invalidating the data, it should cache miss, but still have the old, stale,
# data.
@@ -341,21 +336,16 @@ def test_invalidate_playlist(cache_adapter: FilesystemAdapter):
def test_invalidate_song_file(cache_adapter: FilesystemAdapter):
CACHE_KEYS = FilesystemAdapter.CachedDataKey
cache_adapter.ingest_new_data(KEYS.SONG, ("2",), MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(
CACHE_KEYS.SONG_DETAILS, ("2",), MOCK_SUBSONIC_SONGS[0]
KEYS.COVER_ART_FILE, ("s1", "song"), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
CACHE_KEYS.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(
CACHE_KEYS.COVER_ART_FILE, ("s1", "song"), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(CACHE_KEYS.SONG_FILE, ("1",), MOCK_SONG_FILE)
cache_adapter.ingest_new_data(CACHE_KEYS.SONG_FILE, ("2",), MOCK_SONG_FILE2)
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("1",), MOCK_SONG_FILE)
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("2",), MOCK_SONG_FILE2)
cache_adapter.invalidate_data(CACHE_KEYS.SONG_FILE, ("1",))
cache_adapter.invalidate_data(CACHE_KEYS.COVER_ART_FILE, ("s1", "song"))
cache_adapter.invalidate_data(KEYS.SONG_FILE, ("1",))
cache_adapter.invalidate_data(KEYS.COVER_ART_FILE, ("s1", "song"))
with pytest.raises(CacheMissError):
cache_adapter.get_song_uri("1", "file")
@@ -369,21 +359,21 @@ def test_invalidate_song_file(cache_adapter: FilesystemAdapter):
def test_delete_playlists(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("1",),
SubsonicAPI.PlaylistWithSongs("1", "test1", cover_art="pl_1", songs=[]),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS,
KEYS.PLAYLIST_DETAILS,
("2",),
SubsonicAPI.PlaylistWithSongs("2", "test1", cover_art="pl_2", songs=[]),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("pl_1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, ("pl_1",), MOCK_ALBUM_ART,
)
# Deleting a playlist should get rid of it entirely.
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS, ("2",))
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, ("2",))
try:
cache_adapter.get_playlist_details("2")
assert 0, "DID NOT raise CacheMissError"
@@ -391,7 +381,7 @@ def test_delete_playlists(cache_adapter: FilesystemAdapter):
assert e.partial_data is None
# Deleting a playlist with associated cover art should get rid the cover art too.
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.PLAYLIST_DETAILS, ("1",))
cache_adapter.delete_data(KEYS.PLAYLIST_DETAILS, ("1",))
try:
cache_adapter.get_cover_art_uri("pl_1", "file")
assert 0, "DID NOT raise CacheMissError"
@@ -410,21 +400,17 @@ def test_delete_playlists(cache_adapter: FilesystemAdapter):
def test_delete_song_data(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
cache_adapter.ingest_new_data(KEYS.SONG_FILE, ("1",), MOCK_SONG_FILE)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",), MOCK_SONG_FILE
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("s1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, ("s1",), MOCK_ALBUM_ART,
)
music_file_path = cache_adapter.get_song_uri("1", "file")
cover_art_path = cache_adapter.get_cover_art_uri("s1", "file")
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.SONG_FILE, ("1",))
cache_adapter.delete_data(FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("s1",))
cache_adapter.delete_data(KEYS.SONG_FILE, ("1",))
cache_adapter.delete_data(KEYS.COVER_ART_FILE, ("s1",))
assert not Path(music_file_path).exists()
assert not Path(cover_art_path).exists()
@@ -446,12 +432,8 @@ def test_caching_get_genres(cache_adapter: FilesystemAdapter):
with pytest.raises(CacheMissError):
cache_adapter.get_genres()
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("2",), MOCK_SUBSONIC_SONGS[0]
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(KEYS.SONG, ("2",), MOCK_SUBSONIC_SONGS[0])
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
# Getting genres now should look at what's on the songs. This sould cache miss, but
# still give some data.
@@ -463,7 +445,7 @@ def test_caching_get_genres(cache_adapter: FilesystemAdapter):
# After we actually ingest the actual list, it should be returned instead.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.GENRES,
KEYS.GENRES,
(),
[
SubsonicAPI.Genre("Bar", 10, 20),
@@ -479,9 +461,7 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
cache_adapter.get_song_details("1")
# Simulate the song details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS, ("1",), MOCK_SUBSONIC_SONGS[1]
)
cache_adapter.ingest_new_data(KEYS.SONG, ("1",), MOCK_SUBSONIC_SONGS[1])
song = cache_adapter.get_song_details("1")
assert song.id == "1"
@@ -489,19 +469,19 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
assert song.album
assert (song.album.id, song.album.name) == ("a1", "foo")
assert song.artist and song.artist.name == "foo"
assert song.parent.id == "foo"
assert song.parent_id == "d1"
assert song.duration == timedelta(seconds=10.2)
assert song.path == "foo/song1.mp3"
assert song.genre and song.genre.name == "Foo"
# "Force refresh" the song details
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS,
KEYS.SONG,
("1",),
SubsonicAPI.Song(
"1",
"Song 1",
_parent="bar",
title="Song 1",
parent_id="bar",
_album="bar",
album_id="a2",
_artist="bar",
@@ -518,7 +498,7 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
assert song.album
assert (song.album.id, song.album.name) == ("a2", "bar")
assert song.artist and song.artist.name == "bar"
assert song.parent.id == "bar"
assert song.parent_id == "bar"
assert song.duration == timedelta(seconds=10.2)
assert song.path == "bar/song1.mp3"
assert song.genre and song.genre.name == "Bar"
@@ -529,12 +509,12 @@ def test_caching_get_song_details(cache_adapter: FilesystemAdapter):
def test_caching_less_info(cache_adapter: FilesystemAdapter):
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS,
KEYS.SONG,
("1",),
SubsonicAPI.Song(
"1",
"Song 1",
_parent="bar",
title="Song 1",
parent_id="bar",
_album="bar",
album_id="a2",
_artist="bar",
@@ -545,12 +525,12 @@ def test_caching_less_info(cache_adapter: FilesystemAdapter):
),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.SONG_DETAILS,
KEYS.SONG,
("1",),
SubsonicAPI.Song(
"1",
"Song 1",
_parent="bar",
title="Song 1",
parent_id="bar",
duration=timedelta(seconds=10.2),
path="bar/song1.mp3",
),
@@ -568,7 +548,7 @@ def test_caching_get_artists(cache_adapter: FilesystemAdapter):
# Ingest artists.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTISTS,
KEYS.ARTISTS,
(),
[
SubsonicAPI.ArtistAndArtistInfo("1", "test1", album_count=3, albums=[]),
@@ -583,7 +563,7 @@ def test_caching_get_artists(cache_adapter: FilesystemAdapter):
# Ingest a new artists list with one of them deleted.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTISTS,
KEYS.ARTISTS,
(),
[
SubsonicAPI.ArtistAndArtistInfo("1", "test1", album_count=3),
@@ -603,16 +583,12 @@ def test_caching_get_ignored_articles(cache_adapter: FilesystemAdapter):
cache_adapter.get_ignored_articles()
# Ingest ignored_articles.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.IGNORED_ARTICLES, (), {"Foo", "Bar"}
)
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, (), {"Foo", "Bar"})
artists = cache_adapter.get_ignored_articles()
assert {"Foo", "Bar"} == artists
# Ingest a new artists list with one of them deleted.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.IGNORED_ARTICLES, (), {"Foo", "Baz"}
)
cache_adapter.ingest_new_data(KEYS.IGNORED_ARTICLES, (), {"Foo", "Baz"})
artists = cache_adapter.get_ignored_articles()
assert {"Foo", "Baz"} == artists
@@ -623,7 +599,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTIST,
KEYS.ARTIST,
("1",),
SubsonicAPI.ArtistAndArtistInfo(
"1",
@@ -658,7 +634,7 @@ def test_caching_get_artist(cache_adapter: FilesystemAdapter):
# Simulate "force refreshing" the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTIST,
KEYS.ARTIST,
("1",),
SubsonicAPI.ArtistAndArtistInfo(
"1",
@@ -704,7 +680,7 @@ def test_caching_get_album(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ALBUM,
KEYS.ALBUM,
("a1",),
SubsonicAPI.Album(
"a1",
@@ -739,7 +715,7 @@ def test_caching_get_album(cache_adapter: FilesystemAdapter):
def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
# Simulate the artist details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ARTIST,
KEYS.ARTIST,
("artist1",),
SubsonicAPI.ArtistAndArtistInfo(
"artist1",
@@ -759,23 +735,23 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ALBUM,
KEYS.ALBUM,
("1",),
SubsonicAPI.Album("1", "Foo", artist_id="artist1", cover_art="1"),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.ALBUM,
KEYS.ALBUM,
("2",),
SubsonicAPI.Album("2", "Bar", artist_id="artist1", cover_art="2"),
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("image",), MOCK_ALBUM_ART3,
KEYS.COVER_ART_FILE, ("image",), MOCK_ALBUM_ART3,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
KEYS.COVER_ART_FILE, ("1",), MOCK_ALBUM_ART,
)
cache_adapter.ingest_new_data(
FilesystemAdapter.CachedDataKey.COVER_ART_FILE, ("2",), MOCK_ALBUM_ART2,
KEYS.COVER_ART_FILE, ("2",), MOCK_ALBUM_ART2,
)
stale_artist = cache_adapter.get_artist("artist1")
@@ -785,7 +761,7 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
stale_cover_art_1 = cache_adapter.get_cover_art_uri("1", "file")
stale_cover_art_2 = cache_adapter.get_cover_art_uri("2", "file")
cache_adapter.invalidate_data(FilesystemAdapter.CachedDataKey.ARTIST, ("artist1",))
cache_adapter.invalidate_data(KEYS.ARTIST, ("artist1",))
# Test the cascade of cache invalidations.
try:
@@ -829,3 +805,45 @@ def test_caching_invalidate_artist(cache_adapter: FilesystemAdapter):
except CacheMissError as e:
assert e.partial_data
assert e.partial_data == stale_cover_art_2
def test_get_music_directory(cache_adapter: FilesystemAdapter):
dir_id = "d1"
with pytest.raises(CacheMissError):
cache_adapter.get_directory(dir_id)
# Simulate the directory details being retrieved from Subsonic.
cache_adapter.ingest_new_data(
KEYS.DIRECTORY,
(dir_id,),
SubsonicAPI.Directory(
dir_id,
title="foo",
parent_id=None,
_children=[json.loads(s.to_json()) for s in MOCK_SUBSONIC_SONGS[:2]]
+ [
{
"id": "542",
"parent": dir_id,
"isDir": True,
"title": "Crash My Party",
}
],
),
)
directory = cache_adapter.get_directory(dir_id)
assert directory and directory.id == dir_id
assert directory.name == "foo"
assert directory.parent_id == "root"
dir_child, *song_children = directory.children
verify_songs(song_children, MOCK_SUBSONIC_SONGS[:2])
assert dir_child.id == "542"
assert dir_child.parent_id
assert dir_child.name == "Crash My Party"
def test_search(adapter: FilesystemAdapter):
# TODO
pass

View File

@@ -0,0 +1,55 @@
{
"subsonic-response": {
"status": "ok",
"version": "1.15.0",
"indexes": {
"lastModified": 1588577415000,
"ignoredArticles": "The El La Los Las Le Les",
"index": [
{
"name": "A",
"artist": [
{
"id": "73",
"name": "The Afters"
},
{
"id": "100",
"name": "Adele"
},
{
"id": "120",
"name": "Austin French"
}
]
},
{
"name": "B",
"artist": [
{
"id": "93",
"name": "The Band Perry"
},
{
"id": "41",
"name": "Basshunter"
}
]
},
{
"name": "X-Z",
"artist": [
{
"id": "155",
"name": "Zac Brown Band"
},
{
"id": "25",
"name": "Zach Williams"
}
]
}
]
}
}
}

View File

@@ -0,0 +1,24 @@
{
"subsonic-response" : {
"status" : "ok",
"version" : "1.15.0",
"directory" : {
"id" : "60",
"name" : "Luke Bryan",
"playCount" : 0,
"child" : [ {
"id" : "542",
"parent" : "60",
"isDir" : true,
"title" : "Crash My Party",
"album" : "Crash My Party",
"artist" : "Luke Bryan",
"year" : 2013,
"genre" : "Country",
"coverArt" : "542",
"playCount" : 48,
"created" : "2020-03-27T05:27:57.000Z"
} ]
}
}
}

View File

@@ -0,0 +1,235 @@
{
"subsonic-response" : {
"status" : "ok",
"version" : "1.15.0",
"searchResult3" : {
"artist" : [ {
"id" : "25",
"name" : "Zach Williams",
"coverArt" : "ar-25",
"albumCount" : 1
}, {
"id" : "154",
"name" : "Zac Brown Band",
"coverArt" : "ar-154",
"albumCount" : 3
} ],
"album" : [ {
"id" : "31",
"name" : "Chain Breaker",
"artist" : "Zach Williams",
"artistId" : "25",
"coverArt" : "al-31",
"songCount" : 1,
"duration" : 196,
"created" : "2020-03-27T05:32:31.000Z",
"year" : 2016,
"genre" : "Christian & Gospel"
}, {
"id" : "235",
"name" : "The Foundation",
"artist" : "Zac Brown Band",
"artistId" : "154",
"coverArt" : "al-235",
"songCount" : 3,
"duration" : 675,
"created" : "2020-03-27T05:32:15.000Z",
"year" : 2008,
"genre" : "Country"
}, {
"id" : "236",
"name" : "Uncaged",
"artist" : "Zac Brown Band",
"artistId" : "154",
"coverArt" : "al-236",
"songCount" : 2,
"duration" : 602,
"created" : "2020-03-27T05:32:24.000Z",
"year" : 2012,
"genre" : "Country"
}, {
"id" : "237",
"name" : "You Get What You Give",
"artist" : "Zac Brown Band",
"artistId" : "154",
"coverArt" : "al-237",
"songCount" : 1,
"duration" : 273,
"created" : "2020-03-27T05:32:28.000Z",
"year" : 2010,
"genre" : "Country"
} ],
"song" : [ {
"id" : "246",
"parent" : "360",
"isDir" : false,
"title" : "Chain Breaker",
"album" : "Chain Breaker",
"artist" : "Zach Williams",
"track" : 1,
"year" : 2016,
"genre" : "Christian & Gospel",
"coverArt" : "360",
"size" : 7038712,
"contentType" : "audio/mp4",
"suffix" : "m4a",
"transcodedContentType" : "audio/mpeg",
"transcodedSuffix" : "mp3",
"duration" : 196,
"bitRate" : 256,
"path" : "Zach Williams/Chain Breaker/01 Chain Breaker.m4a",
"isVideo" : false,
"playCount" : 8,
"discNumber" : 1,
"created" : "2020-03-27T05:32:31.000Z",
"albumId" : "31",
"artistId" : "25",
"type" : "music"
}, {
"id" : "737",
"parent" : "738",
"isDir" : false,
"title" : "Highway 20 Ride",
"album" : "The Foundation",
"artist" : "Zac Brown Band",
"track" : 9,
"year" : 2008,
"genre" : "Country",
"coverArt" : "738",
"size" : 7843278,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 229,
"bitRate" : 263,
"path" : "Zac Brown Band/The Foundation/09 - Highway 20 Ride.mp3",
"isVideo" : false,
"playCount" : 8,
"discNumber" : 1,
"created" : "2020-03-27T05:32:15.000Z",
"albumId" : "235",
"artistId" : "154",
"type" : "music"
}, {
"id" : "743",
"parent" : "738",
"isDir" : false,
"title" : "Chicken Fried",
"album" : "The Foundation",
"artist" : "Zac Brown Band",
"track" : 6,
"year" : 2008,
"genre" : "Country",
"coverArt" : "738",
"size" : 8420335,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 238,
"bitRate" : 272,
"path" : "Zac Brown Band/The Foundation/06 - Chicken Fried.mp3",
"isVideo" : false,
"playCount" : 9,
"discNumber" : 1,
"created" : "2020-03-27T05:32:11.000Z",
"albumId" : "235",
"artistId" : "154",
"type" : "music"
}, {
"id" : "744",
"parent" : "738",
"isDir" : false,
"title" : "Whatever It Is",
"album" : "The Foundation",
"artist" : "Zac Brown Band",
"track" : 2,
"year" : 2008,
"genre" : "Country",
"coverArt" : "738",
"size" : 7313167,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 208,
"bitRate" : 269,
"path" : "Zac Brown Band/The Foundation/02 - Whatever It Is.mp3",
"isVideo" : false,
"playCount" : 17,
"discNumber" : 1,
"created" : "2020-03-27T05:32:08.000Z",
"albumId" : "235",
"artistId" : "154",
"type" : "music"
}, {
"id" : "739",
"parent" : "740",
"isDir" : false,
"title" : "Sweet Annie",
"album" : "Uncaged",
"artist" : "Zac Brown Band",
"track" : 6,
"year" : 2012,
"genre" : "Country",
"coverArt" : "740",
"size" : 9523591,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 278,
"bitRate" : 265,
"path" : "Zac Brown Band/Uncaged/06 - Sweet Annie.mp3",
"isVideo" : false,
"playCount" : 10,
"discNumber" : 1,
"created" : "2020-03-27T05:32:24.000Z",
"albumId" : "236",
"artistId" : "154",
"type" : "music"
}, {
"id" : "745",
"parent" : "740",
"isDir" : false,
"title" : "Goodbye In Her Eyes",
"album" : "Uncaged",
"artist" : "Zac Brown Band",
"track" : 3,
"year" : 2012,
"genre" : "Country",
"coverArt" : "740",
"size" : 11111186,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 324,
"bitRate" : 267,
"path" : "Zac Brown Band/Uncaged/03 - Goodbye In Her Eyes.mp3",
"isVideo" : false,
"playCount" : 7,
"discNumber" : 1,
"created" : "2020-03-27T05:32:20.000Z",
"albumId" : "236",
"artistId" : "154",
"type" : "music"
}, {
"id" : "741",
"parent" : "742",
"isDir" : false,
"title" : "Colder Weather",
"album" : "You Get What You Give",
"artist" : "Zac Brown Band",
"track" : 8,
"year" : 2010,
"genre" : "Country",
"coverArt" : "742",
"size" : 9088683,
"contentType" : "audio/mpeg",
"suffix" : "mp3",
"duration" : 273,
"bitRate" : 255,
"path" : "Zac Brown Band/You Get What You Give/08 - Colder Weather.mp3",
"isVideo" : false,
"playCount" : 8,
"discNumber" : 1,
"created" : "2020-03-27T05:32:28.000Z",
"albumId" : "237",
"artistId" : "154",
"type" : "music"
} ]
}
}
}

View File

@@ -58,6 +58,7 @@ def mock_data_files(
num_files += 1
yield file, iter(parts)
# Make sure that is at least one test file
assert num_files > 0
@@ -158,7 +159,7 @@ def test_get_playlist_details(adapter: SubsonicAdapter):
# Make sure that at least the first song got decoded properly.
assert playlist_details.songs[0] == SubsonicAPI.Song(
id="202",
_parent="318",
parent_id="318",
title="What a Beautiful Name",
_album="What a Beautiful Name - Single",
album_id="48",
@@ -201,7 +202,7 @@ def test_create_playlist(adapter: SubsonicAdapter):
songs=[
SubsonicAPI.Song(
id="202",
_parent="318",
parent_id="318",
title="What a Beautiful Name",
_album="What a Beautiful Name - Single",
album_id="48",
@@ -263,8 +264,8 @@ def test_get_song_details(adapter: SubsonicAdapter):
"544",
timedelta(seconds=203),
)
assert song.path.endswith("Sweet Caroline.mp3")
assert song.parent and song.parent.id == "544"
assert song.path and song.path.endswith("Sweet Caroline.mp3")
assert song.parent_id == "544"
assert song.artist
assert (song.artist.id, song.artist.name) == ("60", "Neil Diamond")
assert song.album
@@ -425,3 +426,48 @@ def test_get_album(adapter: SubsonicAdapter):
"Nothing Like You",
"Better Together",
]
def test_get_music_directory(adapter: SubsonicAdapter):
for filename, data in mock_data_files("get_music_directory"):
logging.info(filename)
logging.debug(data)
adapter._set_mock_data(data)
directory = adapter.get_directory("3")
assert directory.id == "60"
assert directory.name == "Luke Bryan"
assert directory.parent_id == "root"
assert directory.children and len(directory.children) == 1
child = directory.children[0]
assert isinstance(child, SubsonicAPI.Directory)
assert child.id == "542"
assert child.name == "Crash My Party"
assert child.parent_id == "60"
for filename, data in mock_data_files("get_indexes"):
logging.info(filename)
logging.debug(data)
adapter._set_mock_data(data)
directory = adapter.get_directory("root")
assert directory.id == "root"
assert directory.parent_id is None
assert len(directory.children) == 7
child = directory.children[0]
assert isinstance(child, SubsonicAPI.Directory)
assert child.id == "73"
assert child.name == "The Afters"
assert child.parent_id == "root"
def test_search(adapter: SubsonicAdapter):
for filename, data in mock_data_files("search3"):
logging.info(filename)
logging.debug(data)
adapter._set_mock_data(data)
search_results = adapter.search("3")
assert len(search_results._songs) == 7
assert len(search_results._artists) == 2
assert len(search_results._albums) == 4