From 00853243259b8f189a62faacccb94d6a2583f90a Mon Sep 17 00:00:00 2001 From: Eliot Berriot <contact@eliotberriot.com> Date: Sat, 13 Jul 2019 17:32:35 +0200 Subject: [PATCH] Splitted big CLI module in smaller chunks --- funkwhale_cli/api.py | 1 - funkwhale_cli/cli.py | 1205 -------------------------------- funkwhale_cli/cli/__init__.py | 24 + funkwhale_cli/cli/albums.py | 20 + funkwhale_cli/cli/artists.py | 20 + funkwhale_cli/cli/auth.py | 98 +++ funkwhale_cli/cli/base.py | 498 +++++++++++++ funkwhale_cli/cli/favorites.py | 59 ++ funkwhale_cli/cli/libraries.py | 51 ++ funkwhale_cli/cli/playlists.py | 116 +++ funkwhale_cli/cli/server.py | 49 ++ funkwhale_cli/cli/tracks.py | 142 ++++ funkwhale_cli/cli/uploads.py | 93 +++ funkwhale_cli/cli/users.py | 32 + funkwhale_cli/utils.py | 84 +++ setup.cfg | 5 + tests/test_api.py | 2 - tests/test_cli.py | 41 +- 18 files changed, 1311 insertions(+), 1229 deletions(-) delete mode 100644 funkwhale_cli/cli.py create mode 100644 funkwhale_cli/cli/__init__.py create mode 100644 funkwhale_cli/cli/albums.py create mode 100644 funkwhale_cli/cli/artists.py create mode 100644 funkwhale_cli/cli/auth.py create mode 100644 funkwhale_cli/cli/base.py create mode 100644 funkwhale_cli/cli/favorites.py create mode 100644 funkwhale_cli/cli/libraries.py create mode 100644 funkwhale_cli/cli/playlists.py create mode 100644 funkwhale_cli/cli/server.py create mode 100644 funkwhale_cli/cli/tracks.py create mode 100644 funkwhale_cli/cli/uploads.py create mode 100644 funkwhale_cli/cli/users.py diff --git a/funkwhale_cli/api.py b/funkwhale_cli/api.py index 8f98e33..082b22f 100644 --- a/funkwhale_cli/api.py +++ b/funkwhale_cli/api.py @@ -1,7 +1,6 @@ import aiohttp from . import exceptions -from . import logs from . import schemas from . import settings diff --git a/funkwhale_cli/cli.py b/funkwhale_cli/cli.py deleted file mode 100644 index cd299b4..0000000 --- a/funkwhale_cli/cli.py +++ /dev/null @@ -1,1205 +0,0 @@ -import asyncio -import aiohttp -import click -import click_log -import collections -import datetime -import dotenv -import functools -import keyring -import ssl -import sys - -# importing the backends explicitely is required for PyInstaller to work -import keyring.backends.kwallet -import keyring.backends.Windows -import keyring.backends.OS_X -import keyring.backends.SecretService -import keyring.backends.chainer - -import logging -import math -import urllib.parse -import json -import os -import pathvalidate -import pathlib -import urllib.parse -import tqdm - -from funkwhale_cli import api -from funkwhale_cli import config -from funkwhale_cli import exceptions -from funkwhale_cli import logs -from funkwhale_cli import output -from funkwhale_cli import utils - -click_log.basic_config(logs.logger) - -NOOP = object() - -SSL_PROTOCOLS = (asyncio.sslproto.SSLProtocol,) -try: - import uvloop.loop -except ImportError: - pass -else: - SSL_PROTOCOLS = (*SSL_PROTOCOLS, uvloop.loop.SSLProtocol) - - -def ignore_aiohttp_ssl_eror(loop): - """Ignore aiohttp #3535 / cpython #13548 issue with SSL data after close - - There is an issue in Python 3.7 up to 3.7.3 that over-reports a - ssl.SSLError fatal error (ssl.SSLError: [SSL: KRB5_S_INIT] application data - after close notify (_ssl.c:2609)) after we are already done with the - connection. See GitHub issues aio-libs/aiohttp#3535 and - python/cpython#13548. - - Given a loop, this sets up an exception handler that ignores this specific - exception, but passes everything else on to the previous exception handler - this one replaces. - - Checks for fixed Python versions, disabling itself when running on 3.7.4+ - or 3.8. - - """ - if sys.version_info >= (3, 7, 4): - return - - orig_handler = loop.get_exception_handler() - - def ignore_ssl_error(loop, context): - if context.get("message") in { - "SSL error in data received", - "Fatal error on transport", - }: - # validate we have the right exception, transport and protocol - exception = context.get("exception") - protocol = context.get("protocol") - if ( - isinstance(exception, ssl.SSLError) - and exception.reason == "KRB5_S_INIT" - and isinstance(protocol, SSL_PROTOCOLS) - ): - if loop.get_debug(): - asyncio.log.logger.debug("Ignoring asyncio SSL KRB5_S_INIT error") - return - if orig_handler is not None: - orig_handler(loop, context) - else: - loop.default_exception_handler(context) - - loop.set_exception_handler(ignore_ssl_error) - - -def noop_decorator(f): - return f - - -def URL(v): - if v is NOOP: - raise click.ClickException( - "You need to specify a server, either via the -H flag or using the FUNKWHALE_SERVER_URL environment variable" - ) - v = str(v) if v else None - parsed = urllib.parse.urlparse(v) - if parsed.scheme not in ["http", "https"] or not parsed.netloc: - raise ValueError("{} is not a valid url".format(v)) - if not v.endswith("/"): - v = v + "/" - return v - - -def env_file(v): - if v is NOOP: - v = None - if v is not None: - v = click.Path(exists=True)(v) - env_files = [v or ".env", config.get_env_file()] - for p in env_files: - logs.logger.debug("Loading env file at {}".format(p)) - dotenv.load_dotenv(p, override=False) - - return v - - -def async_command(f): - def wrapper(*args, **kwargs): - loop = asyncio.get_event_loop() - ignore_aiohttp_ssl_eror(loop) - _async_reraise = kwargs.pop("_async_reraise", False) - try: - return loop.run_until_complete(f(*args, **kwargs)) - except (aiohttp.client_exceptions.ClientError) as e: - if _async_reraise: - raise - message = str(e) - if hasattr(e, "status") and e.status == 401: - message = "Remote answered with {}, ensure your are logged in first".format( - e.status - ) - raise click.ClickException(message) - except (exceptions.FunkwhaleError) as e: - if _async_reraise: - raise - message = str(e) - raise click.ClickException(message) - else: - raise - - return functools.update_wrapper(wrapper, f) - - -SERVER_DECORATOR = click.option( - "-H", - "--url", - envvar="FUNKWHALE_SERVER_URL", - type=URL, - default=NOOP, - help="The URL of the Funkwhale server to query", -) -TOKEN_DECORATOR = click.option( - "-t", - "--token", - envvar="FUNKWHALE_TOKEN", - help="A JWT token to use for authentication", -) -RAW_DECORATOR = click.option( - "--raw", is_flag=True, help="Directly output JSON returned by the happy" -) - - -class lazy_credential: - """ - A proxy object to request access to the proxy object at the later possible point, - cf #4 - """ - - def __init__(self, *args): - self.args = args - self._cached_value = None - - @property - def value(self): - if self._cached_value: - return self._cached_value - try: - v = keyring.get_password(*self.args) - except ValueError as e: - raise click.ClickException( - "Error while retrieving password from keyring: {}. Your password may be incorrect.".format( - e.args[0] - ) - ) - except Exception as e: - raise click.ClickException( - "Error while retrieving password from keyring: {}".format(e.args[0]) - ) - self._cached_value = v - return v - - def __str__(self): - return str(self.value) - - def __eq__(self, other): - return self.value == other - - def __repr__(self): - return str(self.value) - - def __bool__(self): - return bool(self.value) - - -def set_server(ctx, url, token, use_auth=True): - ctx.ensure_object(dict) - ctx.obj["SERVER_URL"] = url - parsed = urllib.parse.urlparse(url) - ctx.obj["SERVER_NETLOC"] = parsed.netloc - ctx.obj["SERVER_PROTOCOL"] = parsed.scheme - token = (token or lazy_credential(url, "_")) if use_auth else None - ctx.obj["remote"] = api.get_api( - domain=ctx.obj["SERVER_NETLOC"], - protocol=ctx.obj["SERVER_PROTOCOL"], - token=token, - ) - - -@click.group() -@click.option( - "-e", - "--env-file", - envvar="ENV_FILE", - type=env_file, - default=NOOP, - help="Path to an env file to use. A .env file will be used automatically if any", -) -@click.option( - "-q", - "--quiet", - envvar="FUNKWHALE_QUIET", - is_flag=True, - default=False, - help="Disable logging", -) -@click.option( - "--no-login", - envvar="FUNKWHALE_NO_LOGIN", - is_flag=True, - default=False, - help="Disable authentication/keyring", -) -@SERVER_DECORATOR -@TOKEN_DECORATOR -@click_log.simple_verbosity_option(logs.logger, expose_value=True) -@click.pass_context -def cli(ctx, env_file, url, verbosity, token, quiet, no_login): - # small hack to fix some weird issues with pyinstaller and keyring - # there seems to be a cache issue somewhere - del keyring.backend.get_all_keyring.__wrapped__.always_returns - keyring.core.init_backend() - # /end of hack - ctx.ensure_object(dict) - logs.logger.disabled = quiet - set_server(ctx, url, token, use_auth=not no_login) - - -@cli.command() -@click.option("-u", "--username", envvar="FUNKWHALE_USERNAME", prompt=True) -@click.option( - "-p", "--password", envvar="FUNKWHALE_PASSWORD", prompt=True, hide_input=True -) -@click.pass_context -@async_command -async def login(ctx, username, password): - async with api.get_session() as session: - token = await api.get_jwt_token( - session, ctx.obj["SERVER_URL"], username=username, password=password - ) - - try: - keyring.set_password(ctx.obj["SERVER_URL"], "_", token) - except ValueError as e: - raise click.ClickException( - "Error while retrieving password from keyring: {}. Your password may be incorrect.".format( - e.args[0] - ) - ) - except Exception as e: - raise click.ClickException( - "Error while retrieving password from keyring: {}".format(e.args[0]) - ) - click.echo("Login successfull!") - - -@cli.command() -@click.pass_context -@async_command -async def logout(ctx): - keyring.delete_password(ctx.obj["SERVER_URL"], "_") - click.echo("Logout successfull!") - - -@cli.group() -@click.pass_context -def server(ctx, url): - pass - - -@cli.group() -@click.pass_context -def server(ctx): - pass - - -@server.command() -@RAW_DECORATOR -@click.pass_context -@async_command -async def info(ctx, raw): - async with api.get_session() as session: - nodeinfo = await api.fetch_nodeinfo( - session, - domain=ctx.obj["SERVER_NETLOC"], - protocol=ctx.obj["SERVER_PROTOCOL"], - ) - if raw: - click.echo(json.dumps(nodeinfo, sort_keys=True, indent=4)) - return - click.echo("\n") - click.echo("General") - click.echo("-------") - click.echo("Url: {}".format(ctx.obj["SERVER_URL"])) - click.echo("Name: {}".format(nodeinfo["metadata"]["nodeName"])) - click.echo( - "Short description: {}".format(nodeinfo["metadata"]["shortDescription"]) - ) - click.echo("\n") - click.echo("Software") - click.echo("----------") - click.echo("Software name: {}".format(nodeinfo["software"]["name"])) - click.echo("Version: {}".format(nodeinfo["software"]["version"])) - click.echo("\n") - click.echo("Configuration") - click.echo("---------------") - click.echo( - "Registrations: {}".format( - "open" if nodeinfo["openRegistrations"] else "closed" - ) - ) - - -@cli.group() -@click.pass_context -def libraries(ctx): - """ - Manage libraries - """ - - -def get_url_param(url, name): - parsed = urllib.parse.urlparse(url) - v = urllib.parse.parse_qs(parsed.query).get(name) - if v: - return v[0] - return None - - -def get_pagination_data(payload): - data = {"next_page": None, "page_size": None} - if payload.get("next"): - next_page = get_url_param(payload["next"], "page") - data["next_page"] = int(next_page) - data["total_pages"] = math.ceil(payload["count"] / len(payload["results"])) - data["current_page"] = int(next_page) - 1 - data["page_size"] = len(payload["results"]) - - if payload.get("previous"): - previous_page = get_url_param(payload["previous"], "page") or 0 - data.setdefault("current_page", int(previous_page) + 1) - data.setdefault("total_pages", data["current_page"]) - if ( - not data["page_size"] - and payload["count"] - len(payload["results"]) > 0 - and data["total_pages"] > 1 - ): - data["page_size"] = int(payload["count"] - len(payload["results"])) / ( - data["total_pages"] - 1 - ) - data.setdefault("current_page", 1) - data.setdefault("total_pages", 1) - return data - - -def get_ls_command( - group, - endpoint, - output_conf, - pagination=True, - filter=True, - ordering=True, - with_id=False, - owned_conf=None, - name="ls", - doc="", - id_metavar="ID", -): - - available_fields = sorted( - set(output_conf["labels"]) | set(output.FIELDS["*"].keys()) - ) - id_decorator = ( - click.argument("id", metavar=id_metavar) if with_id else noop_decorator - ) - page_decorator = ( - click.option("--page", "-p", type=click.INT, default=1) - if pagination - else noop_decorator - ) - page_size_decorator = ( - click.option("--page-size", "-s", type=click.INT, default=None) - if pagination - else noop_decorator - ) - limit_decorator = ( - click.option("--limit", "-l", type=click.INT, default=1) - if pagination - else noop_decorator - ) - ordering_decorator = ( - click.option("--ordering", "-o", default=None) if ordering else noop_decorator - ) - filter_decorator = ( - click.option("--filter", "-f", multiple=True) if filter else noop_decorator - ) - owned_decorator = ( - click.option("--owned", is_flag=True, default=False) if owned_conf else noop_decorator - ) - - @id_decorator - @click.argument("query", nargs=-1) - @RAW_DECORATOR - @click.option( - "--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple" - ) - @click.option("--no-headers", "-h", is_flag=True, default=False) - @click.option("--ids", "-i", is_flag=True) - @page_decorator - @page_size_decorator - @ordering_decorator - @filter_decorator - @limit_decorator - @owned_decorator - @click.option( - "--column", - "-c", - multiple=True, - help="Which column to display. Available: {}. \nDefault: {}".format( - ", ".join(available_fields), ", ".join(output_conf["labels"]) - ), - ) - @click.pass_context - @async_command - async def ls(ctx, raw, column, format, no_headers, ids, **kwargs): - id = kwargs.get("id") - limit = kwargs.get("limit") - page = kwargs.get("page") - page_size = kwargs.get("page_size") - ordering = kwargs.get("ordering") - filter = kwargs.get("filter") - query = kwargs.get("query") - owned = kwargs.get("owned") - if ids: - no_headers = True - column = [output_conf.get("id_field", "UUID")] - format = "plain" - base_url = endpoint - if with_id: - base_url = base_url.format(id) - next_page_url = None - page_count = 0 - while True: - if limit and page_count >= limit: - break - async with ctx.obj["remote"]: - if not pagination or page_count == 0: - url = base_url - params = {} - if page: - params["page"] = page - if page_size: - params["page_size"] = page_size - if ordering: - params["ordering"] = ordering - if query: - params["q"] = " ".join(query) - if filter: - for f in filter: - query = urllib.parse.parse_qs(f) - for k, v in query.items(): - params[k] = v[0] - if owned_conf and owned: - user_info = await get_user_info(ctx) - params[owned_conf['param']] = utils.recursive_getattr(user_info, owned_conf['field']) - - else: - params = {} - url = next_page_url - if not url: - break - result = await ctx.obj["remote"].request("get", url, params=params) - result.raise_for_status() - payload = await result.json() - next_page_url = payload.get("next") - page_count += 1 - if raw: - click.echo(json.dumps(payload, sort_keys=True, indent=4)) - else: - click.echo( - output.table( - payload["results"], - column or output_conf["labels"], - type=output_conf["type"], - format=format, - headers=not no_headers, - ) - ) - if not pagination: - break - pagination_data = get_pagination_data(payload) - if pagination_data["page_size"]: - start = ( - int( - (pagination_data["current_page"] - 1) - * pagination_data["page_size"] - ) - + 1 - ) - else: - start = 1 - end = min(start + len(payload["results"]) - 1, payload["count"]) - logs.logger.info( - "\nObjects {start}-{end} on {total} (page {current_page}/{total_pages})".format( - start=start, - end=end, - total=payload["count"], - current_page=pagination_data["current_page"], - total_pages=pagination_data["total_pages"] or 1, - ) - ) - - ls.__doc__ = doc - return group.command(name)(ls) - - -def get_show_command( - group, url_template, output_conf, name="show", force_id=None, doc="" -): - - available_fields = sorted( - set(output_conf["labels"]) | set(output.FIELDS["*"].keys()) - ) - if force_id: - - def id_decorator(f): - @functools.wraps(f) - def inner(raw, column, format): - return f(raw, force_id, column, format) - - return inner - - else: - id_decorator = click.argument("id") - - @id_decorator - @RAW_DECORATOR - @click.option( - "--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple" - ) - @click.option( - "--column", - "-c", - multiple=True, - help="Which column to display. Available: {}. \nDefault: {}".format( - ", ".join(available_fields), ", ".join(output_conf["labels"]) - ), - ) - @click.pass_context - @async_command - async def show(ctx, raw, id, column, format): - - async with ctx.obj["remote"]: - async with ctx.obj["remote"].request( - "get", url_template.format(id) - ) as result: - result.raise_for_status() - payload = await result.json() - if raw: - click.echo(json.dumps(payload, sort_keys=True, indent=4)) - else: - click.echo( - output.obj_table( - payload, - column or output_conf["labels"], - type=output_conf["type"], - format=format, - ) - ) - - show.__doc__ = "" - return group.command(name)(show) - - -def get_delete_command( - group, - url_template, - confirm="Do you want to delete {} objects? This action is irreversible.", - doc='Delect the given objects', - name="rm", - id_metavar='ID' -): - @click.argument("id", nargs=-1, metavar=id_metavar) - @RAW_DECORATOR - @click.option("--no-input", is_flag=True) - @click.pass_context - @async_command - async def delete(ctx, raw, id, no_input): - async with ctx.obj["remote"]: - if not no_input and not click.confirm(confirm.format(len(id)), abort=True): - return - for i in id: - result = await ctx.obj["remote"].request( - "delete", url_template.format(i) - ) - if result.status == 404: - logs.logger.warn("Couldn't delete {}: object not found".format(i)) - else: - result.raise_for_status() - - click.echo("{} Objects deleted!".format(len(id))) - - delete.__doc__ = doc - return group.command(name)(delete) - - -libraries_ls = get_ls_command( - libraries, - "api/v1/libraries/", - output_conf={ - "labels": ["UUID", "Name", "Visibility", "Uploads"], - "type": "LIBRARY", - }, -) -libraries_rm = get_delete_command(libraries, "api/v1/libraries/{}/") - - -@libraries.command("create") -@click.option("--name", prompt=True) -@click.option( - "--visibility", - type=click.Choice(["me", "instance", "everyone"]), - default="me", - prompt=True, -) -@click.option("--raw", is_flag=True) -@click.pass_context -@async_command -async def libraries_create(ctx, raw, name, visibility): - async with ctx.obj["remote"]: - result = await ctx.obj["remote"].request( - "post", "api/v1/libraries/", data={"name": name, "visibility": visibility} - ) - result.raise_for_status() - payload = await result.json() - if raw: - click.echo(json.dumps(payload, sort_keys=True, indent=4)) - else: - click.echo("Library created:") - click.echo( - output.table([payload], ["UUID", "Name", "Visibility"], type="LIBRARY") - ) - - -@cli.group() -@click.pass_context -def artists(ctx): - pass - - -artists_ls = get_ls_command( - artists, - "api/v1/artists/", - output_conf={ - "labels": ["ID", "Name", "Albums", "Tracks", "Created"], - "type": "ARTIST", - "id_field": "ID", - }, -) - - -@cli.group() -@click.pass_context -def albums(ctx): - pass - - -albums_ls = get_ls_command( - albums, - "api/v1/albums/", - output_conf={ - "labels": ["ID", "Title", "Artist", "Tracks", "Created"], - "type": "ALBUM", - "id_field": "ID", - }, -) - - -@cli.group() -@click.pass_context -def tracks(ctx): - pass - - -tracks_ls = get_ls_command( - tracks, - "api/v1/tracks/", - output_conf={ - "labels": ["ID", "Title", "Artist", "Album", "Disc", "Position"], - "type": "TRACK", - "id_field": "ID", - }, -) - - -async def get_track_download_url(id, remote, format=None): - result = await remote.request("get", "api/v1/tracks/{}/".format(id)) - result.raise_for_status() - payload = await result.json() - - try: - download_url = payload["uploads"][0]["listen_url"] - except IndexError: - if remote.token: - raise click.ClickException("This file is not available for download") - else: - raise click.ClickException( - "This file is not available for download, try to login first" - ) - - if download_url.startswith("/"): - download_url = remote.base_url[:-1] + download_url - if format: - download_url = add_url_params(download_url, {"to": format}) - else: - format = payload["uploads"][0]["extension"] - - return download_url, format, payload - - -def add_url_params(url, params): - """ Add GET params to provided URL being aware of existing. - - :param url: string of target URL - :param params: dict containing requested params to be added - :return: string with updated URL - - >> url = 'http://stackoverflow.com/test?answers=true' - >> new_params = {'answers': False, 'data': ['some','values']} - >> add_url_params(url, new_params) - 'http://stackoverflow.com/test?data=some&data=values&answers=false' - """ - # Unquoting URL first so we don't loose existing args - url = urllib.parse.unquote(url) - # Extracting url info - parsed_url = urllib.parse.urlparse(url) - # Extracting URL arguments from parsed URL - get_args = parsed_url.query - # Converting URL arguments to dict - parsed_get_args = dict(urllib.parse.parse_qsl(get_args)) - # Merging URL arguments dict with new params - parsed_get_args.update(params) - - # Bool and Dict values should be converted to json-friendly values - # you may throw this part away if you don't like it :) - parsed_get_args.update( - { - k: json.dumps(v) - for k, v in parsed_get_args.items() - if isinstance(v, (bool, dict)) - } - ) - - # Converting URL argument to proper query string - encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True) - # Creating new parsed result object based on provided with new - # URL arguments. Same thing happens inside of urlparse. - new_url = urllib.parse.ParseResult( - parsed_url.scheme, - parsed_url.netloc, - parsed_url.path, - parsed_url.params, - encoded_get_args, - parsed_url.fragment, - ).geturl() - - return new_url - - -def sanitize_recursive(value): - if isinstance(value, dict): - return {k: sanitize_recursive(v) for k, v in value.items()} - elif isinstance(value, list): - return [sanitize_recursive(v) for v in value] - else: - return pathvalidate.sanitize_filepath(str(value)) - - -def flatten(d, parent_key="", sep="_"): - items = [] - for k, v in d.items(): - new_key = parent_key + sep + k if parent_key else k - if isinstance(v, collections.MutableMapping): - items.extend(flatten(v, new_key, sep=sep).items()) - else: - items.append((new_key, v)) - return dict(items) - - -@tracks.command("download") -@click.argument("id", nargs=-1, required=True) -@click.option("--format", "-f") -@click.option("-d", "--directory", type=click.Path(exists=True)) -@click.option("-o", "--overwrite", is_flag=True, default=False) -@click.option("-s", "--skip-existing", is_flag=True, default=False) -@click.option("-i", "--ignore-errors", multiple=True, type=int) -@click.option( - "-t", - "--template", - default="{artist} - {album} - {title}.{extension}", - envvar="FUNKWHALE_DOWNLOAD_PATH_TEMPLATE", -) -@click.pass_context -@async_command -async def track_download( - ctx, id, format, directory, template, overwrite, ignore_errors, skip_existing -): - async with ctx.obj["remote"]: - progressbar = tqdm.tqdm(id, unit="Files") - for i in progressbar: - download_url, format, track_data = await get_track_download_url( - i, ctx.obj["remote"], format=format - ) - logs.logger.info("Downloading from {}".format(download_url)) - - filename_params = flatten(track_data) - filename_params["album"] = filename_params["album_title"] - filename_params["artist"] = filename_params["artist_name"] - filename_params["extension"] = format - filename_params["year"] = ( - filename_params["album_release_date"][:4] - if filename_params["album_release_date"] - else None - ) - filename_params = { - k: sanitize_recursive(v) for k, v in filename_params.items() - } - if directory: - filename = template.format(**filename_params) - full_path = os.path.join(directory, filename) - existing = os.path.exists(full_path) - if skip_existing and existing: - logs.logger.info( - "'{}' already exists on disk, skipping download".format( - full_path - ) - ) - continue - elif not overwrite and existing: - raise click.ClickException( - "'{}' already exists on disk. Relaunch this command with --overwrite if you want to replace it".format( - full_path - ) - ) - - async with ctx.obj["remote"].request( - "get", download_url, timeout=0 - ) as response: - try: - response.raise_for_status() - except aiohttp.ClientResponseError as e: - if response.status in ignore_errors: - logs.logger.warning( - "Remote answered with {} for url {}, skipping".format( - response.status, download_url - ) - ) - continue - else: - raise click.ClickException( - "Remote answered with {} for url {}, exiting".format( - response.status, download_url - ) - ) - if directory: - final_directory = os.path.dirname(full_path) - pathlib.Path(final_directory).mkdir(parents=True, exist_ok=True) - logs.logger.info("Downloading to {}".format(full_path)) - out = open(full_path, "wb") - else: - out = click.get_binary_stream("stdout") - while True: - chunk = await response.content.read(1024) - if not chunk: - break - out.write(chunk) - logs.logger.info("Download complete") - - -@cli.group() -@click.pass_context -def uploads(ctx): - pass - - -uploads_ls = get_ls_command( # noqa - uploads, - "api/v1/uploads/", - output_conf={ - "labels": ["UUID", "Track", "Artist", "Import status", "Size", "Mimetype"], - "type": "UPLOAD", - }, -) - - -def track_read(file_obj, name, progress): - read = file_obj.read - - def patched_read(size): - content = read(size) - progress.update(len(content)) - progress.set_postfix(file=name[-30:], refresh=False) - return content - - setattr(file_obj, "read", patched_read) - - -async def upload(path, size, remote, ref, library_id, semaphore, global_progress): - async with semaphore: - filename = os.path.basename(path) - data = { - "library": library_id, - "import_reference": ref, - "source": "upload://{}".format(filename), - "audio_file": open(path, "rb"), - } - track_read(data["audio_file"], filename, global_progress) - response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0) - response.raise_for_status() - return response - - -@uploads.command("create") -@click.argument("library_id") -@click.argument("paths", nargs=-1) -@click.option("-r", "--ref", default=None) -@click.option("-p", "--parallel", type=click.INT, default=1) -@click.pass_context -@async_command -async def uploads_create(ctx, library_id, paths, ref, parallel): - logs.logger.info("Uploading {} files…".format(len(paths))) - paths = sorted(set(paths)) - if not paths: - return - ref = ref or "funkwhale-cli-import-{}".format(datetime.datetime.now().isoformat()) - sizes = {path: os.path.getsize(path) for path in paths} - - async with ctx.obj["remote"]: - logs.logger.info("Checking library {} existence…".format(library_id)) - library_data = await ctx.obj["remote"].request( - "get", "api/v1/libraries/{}/".format(library_id) - ) - library_data.raise_for_status() - sem = asyncio.Semaphore(parallel) - progressbar = tqdm.tqdm( - total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024 - ) - tasks = [ - upload( - path=path, - ref=ref, - size=sizes[path], - global_progress=progressbar, - remote=ctx.obj["remote"], - library_id=library_id, - semaphore=sem, - ) - for path in paths - ] - await asyncio.gather(*tasks) - logs.logger.info("Upload complete") - - -@cli.group() -@click.pass_context -def favorites(ctx): - pass - - -@favorites.group("tracks") -@click.pass_context -def favorites_tracks(ctx): - pass - - -@favorites_tracks.command("create") -@click.argument("id", nargs=-1, required=True) -@click.pass_context -@async_command -async def favorites_tracks_create(ctx, id): - click.echo("Adding {} tracks to favorites…".format(len(id))) - - async with ctx.obj["remote"]: - for i in id: - data = {"track": i} - async with ctx.obj["remote"].request( - "post", "api/v1/favorites/tracks/", data=data - ) as response: - response.raise_for_status() - click.echo("Track {} added to favorites".format(i)) - - -@favorites_tracks.command("rm") -@click.argument("id", nargs=-1, required=True) -@click.pass_context -@async_command -async def favorites_tracks_rm(ctx, id): - click.echo("Removing {} tracks to favorites…".format(len(id))) - - async with ctx.obj["remote"]: - for i in id: - data = {"track": i} - async with ctx.obj["remote"].request( - "delete", "api/v1/favorites/tracks/remove/", data=data - ) as response: - response.raise_for_status() - click.echo("Track {} removed from favorites".format(i)) - - -favorites_tracks_ls = get_ls_command( # noqa - favorites_tracks, - "api/v1/favorites/tracks/", - output_conf={ - "labels": ["Track ID", "Track", "Artist", "Favorite Date"], - "type": "TRACK_FAVORITE", - }, -) - - -@cli.group() -@click.pass_context -def playlists(ctx): - """ - Manage playlists - """ - - -playlists_ls = get_ls_command( - playlists, - "api/v1/playlists/", - doc="List available playlists", - owned_conf={'param': 'user', 'field': 'id'}, - output_conf={ - "labels": [ - "ID", - "Name", - "Visibility", - "Tracks Count", - "User", - "Created", - "Modified", - ], - "type": "PLAYLIST", - }, -) -playlists_rm = get_delete_command( - playlists, "api/v1/playlists/{}/", id_metavar="PLAYLIST_ID", doc='Delete the given playlists' -) - - -@playlists.command("create") -@click.option("--name", prompt=True) -@click.option( - "--visibility", - type=click.Choice(["me", "instance", "everyone"]), - default="me", - prompt=True, -) -@click.option("--raw", is_flag=True) -@click.pass_context -@async_command -async def playlists_create(ctx, raw, name, visibility): - """ - Create a new playlist - """ - async with ctx.obj["remote"]: - result = await ctx.obj["remote"].request( - "post", "api/v1/playlists/", data={"name": name, "visibility": visibility} - ) - result.raise_for_status() - payload = await result.json() - if raw: - click.echo(json.dumps(payload, sort_keys=True, indent=4)) - else: - click.echo("Playlist created:") - click.echo( - output.table( - [payload], ["ID", "Name", "Visibility", "Tracks Count"], type="PLAYLIST" - ) - ) - - -@playlists.command("tracks-add") -@click.argument("id", metavar="PLAYLIST_ID") -@click.argument("track", nargs=-1, metavar="[TRACK_ID]…") -@click.option( - "--no-duplicates", - is_flag=True, - default=False, - help="Prevent insertion of tracks that already exist in the playlist. An error will be raised in this case.", -) -@click.pass_context -@async_command -async def playlists_tracks_add(ctx, id, track, no_duplicates): - """ - Insert one or more tracks in a playlist - """ - if not track: - return click.echo("No track id provided") - - async with ctx.obj["remote"]: - async with ctx.obj["remote"].request( - "post", - "api/v1/playlists/{}/".format(id), - data={"tracks": track, "allow_duplicates": not no_duplicates}, - ) as response: - response.raise_for_status() - - -playlists_tracks = get_ls_command( - playlists, - "api/v1/playlists/{}/tracks/", - name="tracks", - with_id=True, - pagination=False, - ordering=False, - filter=False, - output_conf={ - "labels": ["Position", "ID", "Title", "Artist", "Album", "Created"], - "id_field": "ID", - "type": "PLAYLIST_TRACK", - }, - id_metavar="PLAYLIST_ID", - doc="List the tracks included in a playlist" -) - - -@cli.group() -@click.pass_context -def users(ctx): - pass - - -users_me = get_show_command( - users, - "api/v1/users/users/{}/", - output_conf={ - "labels": [ - "ID", - "Username", - "Name", - "Email", - "Federation ID", - "Joined", - "Visibility", - "Staff", - "Admin", - "Permissions", - ], - "type": "USER", - }, - force_id="me", - name="me", -) - -async def get_user_info(ctx): - async with ctx.obj["remote"].request( - "get", "api/v1/users/users/me/", - ) as result: - result.raise_for_status() - return await result.json() - - - -if __name__ == "__main__": - cli() diff --git a/funkwhale_cli/cli/__init__.py b/funkwhale_cli/cli/__init__.py new file mode 100644 index 0000000..2ae0ba7 --- /dev/null +++ b/funkwhale_cli/cli/__init__.py @@ -0,0 +1,24 @@ +from . import auth +from . import albums +from . import artists +from . import favorites +from . import libraries +from . import playlists +from . import tracks +from . import uploads +from . import users + +from .base import cli + +__all__ = [ + "auth", + "albums", + "artists", + "favorites", + "libraries", + "playlists", + "tracks", + "uploads", + "users", + "cli", +] diff --git a/funkwhale_cli/cli/albums.py b/funkwhale_cli/cli/albums.py new file mode 100644 index 0000000..6bd1c1f --- /dev/null +++ b/funkwhale_cli/cli/albums.py @@ -0,0 +1,20 @@ +import click + +from . import base + + +@base.cli.group() +@click.pass_context +def albums(ctx): + pass + + +albums_ls = base.get_ls_command( + albums, + "api/v1/albums/", + output_conf={ + "labels": ["ID", "Title", "Artist", "Tracks", "Created"], + "type": "ALBUM", + "id_field": "ID", + }, +) diff --git a/funkwhale_cli/cli/artists.py b/funkwhale_cli/cli/artists.py new file mode 100644 index 0000000..3e062fb --- /dev/null +++ b/funkwhale_cli/cli/artists.py @@ -0,0 +1,20 @@ +import click + +from . import base + + +@base.cli.group() +@click.pass_context +def artists(ctx): + pass + + +artists_ls = base.get_ls_command( + artists, + "api/v1/artists/", + output_conf={ + "labels": ["ID", "Name", "Albums", "Tracks", "Created"], + "type": "ARTIST", + "id_field": "ID", + }, +) diff --git a/funkwhale_cli/cli/auth.py b/funkwhale_cli/cli/auth.py new file mode 100644 index 0000000..7d96aa0 --- /dev/null +++ b/funkwhale_cli/cli/auth.py @@ -0,0 +1,98 @@ +import click +import keyring + +# importing the backends explicitely is required for PyInstaller to work +import keyring.backends.kwallet +import keyring.backends.Windows +import keyring.backends.OS_X +import keyring.backends.SecretService +import keyring.backends.chainer + +from . import base +from .. import api + + +class lazy_credential: + """ + A proxy object to request access to the proxy object at the later possible point, + cf #4 + """ + + def __init__(self, *args): + self.args = args + self._cached_value = None + + @property + def value(self): + if self._cached_value: + return self._cached_value + try: + v = keyring.get_password(*self.args) + except ValueError as e: + raise click.ClickException( + "Error while retrieving password from keyring: {}. Your password may be incorrect.".format( + e.args[0] + ) + ) + except Exception as e: + raise click.ClickException( + "Error while retrieving password from keyring: {}".format(e.args[0]) + ) + self._cached_value = v + return v + + def __str__(self): + return str(self.value) + + def __eq__(self, other): + return self.value == other + + def __repr__(self): + return str(self.value) + + def __bool__(self): + return bool(self.value) + + +def init_keyring(): + # small hack to fix some weird issues with pyinstaller and keyring + # there seems to be a cache issue somewhere + del keyring.backend.get_all_keyring.__wrapped__.always_returns + keyring.core.init_backend() + # /end of hack + + +@base.cli.command() +@click.option("-u", "--username", envvar="FUNKWHALE_USERNAME", prompt=True) +@click.option( + "-p", "--password", envvar="FUNKWHALE_PASSWORD", prompt=True, hide_input=True +) +@click.pass_context +@base.async_command +async def login(ctx, username, password): + async with api.get_session() as session: + token = await api.get_jwt_token( + session, ctx.obj["SERVER_URL"], username=username, password=password + ) + + try: + keyring.set_password(ctx.obj["SERVER_URL"], "_", token) + except ValueError as e: + raise click.ClickException( + "Error while retrieving password from keyring: {}. Your password may be incorrect.".format( + e.args[0] + ) + ) + except Exception as e: + raise click.ClickException( + "Error while retrieving password from keyring: {}".format(e.args[0]) + ) + click.echo("Login successfull!") + + +@base.cli.command() +@click.pass_context +@base.async_command +async def logout(ctx): + keyring.delete_password(ctx.obj["SERVER_URL"], "_") + click.echo("Logout successfull!") diff --git a/funkwhale_cli/cli/base.py b/funkwhale_cli/cli/base.py new file mode 100644 index 0000000..298171e --- /dev/null +++ b/funkwhale_cli/cli/base.py @@ -0,0 +1,498 @@ +import asyncio +import aiohttp +import click +import click_log +import dotenv +import functools +import ssl +import sys +import urllib.parse + +import math +import json + +from funkwhale_cli import api +from funkwhale_cli import config +from funkwhale_cli import exceptions +from funkwhale_cli import logs +from funkwhale_cli import output +from funkwhale_cli import utils + +click_log.basic_config(logs.logger) + +NOOP = object() + +SSL_PROTOCOLS = (asyncio.sslproto.SSLProtocol,) +try: + import uvloop.loop +except ImportError: + pass +else: + SSL_PROTOCOLS = (*SSL_PROTOCOLS, uvloop.loop.SSLProtocol) + + +def ignore_aiohttp_ssl_eror(loop): + """Ignore aiohttp #3535 / cpython #13548 issue with SSL data after close + + There is an issue in Python 3.7 up to 3.7.3 that over-reports a + ssl.SSLError fatal error (ssl.SSLError: [SSL: KRB5_S_INIT] application data + after close notify (_ssl.c:2609)) after we are already done with the + connection. See GitHub issues aio-libs/aiohttp#3535 and + python/cpython#13548. + + Given a loop, this sets up an exception handler that ignores this specific + exception, but passes everything else on to the previous exception handler + this one replaces. + + Checks for fixed Python versions, disabling itself when running on 3.7.4+ + or 3.8. + + """ + if sys.version_info >= (3, 7, 4): + return + + orig_handler = loop.get_exception_handler() + + def ignore_ssl_error(loop, context): + if context.get("message") in { + "SSL error in data received", + "Fatal error on transport", + }: + # validate we have the right exception, transport and protocol + exception = context.get("exception") + protocol = context.get("protocol") + if ( + isinstance(exception, ssl.SSLError) + and exception.reason == "KRB5_S_INIT" + and isinstance(protocol, SSL_PROTOCOLS) + ): + if loop.get_debug(): + asyncio.log.logger.debug("Ignoring asyncio SSL KRB5_S_INIT error") + return + if orig_handler is not None: + orig_handler(loop, context) + else: + loop.default_exception_handler(context) + + loop.set_exception_handler(ignore_ssl_error) + + +def noop_decorator(f): + return f + + +def URL(v): + if v is NOOP: + raise click.ClickException( + "You need to specify a server, either via the -H flag or using the FUNKWHALE_SERVER_URL environment variable" + ) + v = str(v) if v else None + parsed = urllib.parse.urlparse(v) + if parsed.scheme not in ["http", "https"] or not parsed.netloc: + raise ValueError("{} is not a valid url".format(v)) + if not v.endswith("/"): + v = v + "/" + return v + + +def env_file(v): + if v is NOOP: + v = None + if v is not None: + v = click.Path(exists=True)(v) + env_files = [v or ".env", config.get_env_file()] + for p in env_files: + logs.logger.debug("Loading env file at {}".format(p)) + dotenv.load_dotenv(p, override=False) + + return v + + +def async_command(f): + def wrapper(*args, **kwargs): + loop = asyncio.get_event_loop() + ignore_aiohttp_ssl_eror(loop) + _async_reraise = kwargs.pop("_async_reraise", False) + try: + return loop.run_until_complete(f(*args, **kwargs)) + except (aiohttp.client_exceptions.ClientError) as e: + if _async_reraise: + raise + message = str(e) + if hasattr(e, "status") and e.status == 401: + message = "Remote answered with {}, ensure your are logged in first".format( + e.status + ) + raise click.ClickException(message) + except (exceptions.FunkwhaleError) as e: + if _async_reraise: + raise + message = str(e) + raise click.ClickException(message) + else: + raise + + return functools.update_wrapper(wrapper, f) + + +SERVER_DECORATOR = click.option( + "-H", + "--url", + envvar="FUNKWHALE_SERVER_URL", + type=URL, + default=NOOP, + help="The URL of the Funkwhale server to query", +) +TOKEN_DECORATOR = click.option( + "-t", + "--token", + envvar="FUNKWHALE_TOKEN", + help="A JWT token to use for authentication", +) +RAW_DECORATOR = click.option( + "--raw", is_flag=True, help="Directly output JSON returned by the happy" +) + + +def set_server(ctx, url, token, use_auth=True): + from . import auth + + ctx.ensure_object(dict) + ctx.obj["SERVER_URL"] = url + parsed = urllib.parse.urlparse(url) + ctx.obj["SERVER_NETLOC"] = parsed.netloc + ctx.obj["SERVER_PROTOCOL"] = parsed.scheme + token = (token or auth.lazy_credential(url, "_")) if use_auth else None + ctx.obj["remote"] = api.get_api( + domain=ctx.obj["SERVER_NETLOC"], + protocol=ctx.obj["SERVER_PROTOCOL"], + token=token, + ) + + +@click.group() +@click.option( + "-e", + "--env-file", + envvar="ENV_FILE", + type=env_file, + default=NOOP, + help="Path to an env file to use. A .env file will be used automatically if any", +) +@click.option( + "-q", + "--quiet", + envvar="FUNKWHALE_QUIET", + is_flag=True, + default=False, + help="Disable logging", +) +@click.option( + "--no-login", + envvar="FUNKWHALE_NO_LOGIN", + is_flag=True, + default=False, + help="Disable authentication/keyring", +) +@SERVER_DECORATOR +@TOKEN_DECORATOR +@click_log.simple_verbosity_option(logs.logger, expose_value=True) +@click.pass_context +def cli(ctx, env_file, url, verbosity, token, quiet, no_login): + from . import auth + + auth.init_keyring() + ctx.ensure_object(dict) + logs.logger.disabled = quiet + set_server(ctx, url, token, use_auth=not no_login) + + +def get_pagination_data(payload): + data = {"next_page": None, "page_size": None} + if payload.get("next"): + next_page = utils.get_url_param(payload["next"], "page") + data["next_page"] = int(next_page) + data["total_pages"] = math.ceil(payload["count"] / len(payload["results"])) + data["current_page"] = int(next_page) - 1 + data["page_size"] = len(payload["results"]) + + if payload.get("previous"): + previous_page = utils.get_url_param(payload["previous"], "page") or 0 + data.setdefault("current_page", int(previous_page) + 1) + data.setdefault("total_pages", data["current_page"]) + if ( + not data["page_size"] + and payload["count"] - len(payload["results"]) > 0 + and data["total_pages"] > 1 + ): + data["page_size"] = int(payload["count"] - len(payload["results"])) / ( + data["total_pages"] - 1 + ) + data.setdefault("current_page", 1) + data.setdefault("total_pages", 1) + return data + + +def get_ls_command( + group, + endpoint, + output_conf, + pagination=True, + filter=True, + ordering=True, + with_id=False, + owned_conf=None, + name="ls", + doc="", + id_metavar="ID", +): + + available_fields = sorted( + set(output_conf["labels"]) | set(output.FIELDS["*"].keys()) + ) + id_decorator = ( + click.argument("id", metavar=id_metavar) if with_id else noop_decorator + ) + page_decorator = ( + click.option("--page", "-p", type=click.INT, default=1) + if pagination + else noop_decorator + ) + page_size_decorator = ( + click.option("--page-size", "-s", type=click.INT, default=None) + if pagination + else noop_decorator + ) + limit_decorator = ( + click.option("--limit", "-l", type=click.INT, default=1) + if pagination + else noop_decorator + ) + ordering_decorator = ( + click.option("--ordering", "-o", default=None) if ordering else noop_decorator + ) + filter_decorator = ( + click.option("--filter", "-f", multiple=True) if filter else noop_decorator + ) + owned_decorator = ( + click.option("--owned", is_flag=True, default=False) + if owned_conf + else noop_decorator + ) + + @id_decorator + @click.argument("query", nargs=-1) + @RAW_DECORATOR + @click.option( + "--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple" + ) + @click.option("--no-headers", "-h", is_flag=True, default=False) + @click.option("--ids", "-i", is_flag=True) + @page_decorator + @page_size_decorator + @ordering_decorator + @filter_decorator + @limit_decorator + @owned_decorator + @click.option( + "--column", + "-c", + multiple=True, + help="Which column to display. Available: {}. \nDefault: {}".format( + ", ".join(available_fields), ", ".join(output_conf["labels"]) + ), + ) + @click.pass_context + @async_command + async def ls(ctx, raw, column, format, no_headers, ids, **kwargs): + id = kwargs.get("id") + limit = kwargs.get("limit") + page = kwargs.get("page") + page_size = kwargs.get("page_size") + ordering = kwargs.get("ordering") + filter = kwargs.get("filter") + query = kwargs.get("query") + owned = kwargs.get("owned") + if ids: + no_headers = True + column = [output_conf.get("id_field", "UUID")] + format = "plain" + base_url = endpoint + if with_id: + base_url = base_url.format(id) + next_page_url = None + page_count = 0 + while True: + if limit and page_count >= limit: + break + async with ctx.obj["remote"]: + if not pagination or page_count == 0: + url = base_url + params = {} + if page: + params["page"] = page + if page_size: + params["page_size"] = page_size + if ordering: + params["ordering"] = ordering + if query: + params["q"] = " ".join(query) + if filter: + for f in filter: + query = urllib.parse.parse_qs(f) + for k, v in query.items(): + params[k] = v[0] + if owned_conf and owned: + user_info = await get_user_info(ctx) + params[owned_conf["param"]] = utils.recursive_getattr( + user_info, owned_conf["field"] + ) + + else: + params = {} + url = next_page_url + if not url: + break + result = await ctx.obj["remote"].request("get", url, params=params) + result.raise_for_status() + payload = await result.json() + next_page_url = payload.get("next") + page_count += 1 + if raw: + click.echo(json.dumps(payload, sort_keys=True, indent=4)) + else: + click.echo( + output.table( + payload["results"], + column or output_conf["labels"], + type=output_conf["type"], + format=format, + headers=not no_headers, + ) + ) + if not pagination: + break + pagination_data = get_pagination_data(payload) + if pagination_data["page_size"]: + start = ( + int( + (pagination_data["current_page"] - 1) + * pagination_data["page_size"] + ) + + 1 + ) + else: + start = 1 + end = min(start + len(payload["results"]) - 1, payload["count"]) + logs.logger.info( + "\nObjects {start}-{end} on {total} (page {current_page}/{total_pages})".format( + start=start, + end=end, + total=payload["count"], + current_page=pagination_data["current_page"], + total_pages=pagination_data["total_pages"] or 1, + ) + ) + + ls.__doc__ = doc + return group.command(name)(ls) + + +def get_show_command( + group, url_template, output_conf, name="show", force_id=None, doc="" +): + + available_fields = sorted( + set(output_conf["labels"]) | set(output.FIELDS["*"].keys()) + ) + if force_id: + + def id_decorator(f): + @functools.wraps(f) + def inner(raw, column, format): + return f(raw, force_id, column, format) + + return inner + + else: + id_decorator = click.argument("id") + + @id_decorator + @RAW_DECORATOR + @click.option( + "--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple" + ) + @click.option( + "--column", + "-c", + multiple=True, + help="Which column to display. Available: {}. \nDefault: {}".format( + ", ".join(available_fields), ", ".join(output_conf["labels"]) + ), + ) + @click.pass_context + @async_command + async def show(ctx, raw, id, column, format): + + async with ctx.obj["remote"]: + async with ctx.obj["remote"].request( + "get", url_template.format(id) + ) as result: + result.raise_for_status() + payload = await result.json() + if raw: + click.echo(json.dumps(payload, sort_keys=True, indent=4)) + else: + click.echo( + output.obj_table( + payload, + column or output_conf["labels"], + type=output_conf["type"], + format=format, + ) + ) + + show.__doc__ = "" + return group.command(name)(show) + + +def get_delete_command( + group, + url_template, + confirm="Do you want to delete {} objects? This action is irreversible.", + doc="Delect the given objects", + name="rm", + id_metavar="ID", +): + @click.argument("id", nargs=-1, metavar=id_metavar) + @RAW_DECORATOR + @click.option("--no-input", is_flag=True) + @click.pass_context + @async_command + async def delete(ctx, raw, id, no_input): + async with ctx.obj["remote"]: + if not no_input and not click.confirm(confirm.format(len(id)), abort=True): + return + for i in id: + result = await ctx.obj["remote"].request( + "delete", url_template.format(i) + ) + if result.status == 404: + logs.logger.warn("Couldn't delete {}: object not found".format(i)) + else: + result.raise_for_status() + + click.echo("{} Objects deleted!".format(len(id))) + + delete.__doc__ = doc + return group.command(name)(delete) + + +async def get_user_info(ctx): + async with ctx.obj["remote"].request("get", "api/v1/users/users/me/") as result: + result.raise_for_status() + return await result.json() + + +if __name__ == "__main__": + cli() diff --git a/funkwhale_cli/cli/favorites.py b/funkwhale_cli/cli/favorites.py new file mode 100644 index 0000000..97d3b81 --- /dev/null +++ b/funkwhale_cli/cli/favorites.py @@ -0,0 +1,59 @@ +import click + +from . import base + + +@base.cli.group() +@click.pass_context +def favorites(ctx): + pass + + +@favorites.group("tracks") +@click.pass_context +def favorites_tracks(ctx): + pass + + +@favorites_tracks.command("create") +@click.argument("id", nargs=-1, required=True) +@click.pass_context +@base.async_command +async def favorites_tracks_create(ctx, id): + click.echo("Adding {} tracks to favorites…".format(len(id))) + + async with ctx.obj["remote"]: + for i in id: + data = {"track": i} + async with ctx.obj["remote"].request( + "post", "api/v1/favorites/tracks/", data=data + ) as response: + response.raise_for_status() + click.echo("Track {} added to favorites".format(i)) + + +@favorites_tracks.command("rm") +@click.argument("id", nargs=-1, required=True) +@click.pass_context +@base.async_command +async def favorites_tracks_rm(ctx, id): + click.echo("Removing {} tracks to favorites…".format(len(id))) + + async with ctx.obj["remote"]: + for i in id: + data = {"track": i} + async with ctx.obj["remote"].request( + "delete", "api/v1/favorites/tracks/remove/", data=data + ) as response: + response.raise_for_status() + click.echo("Track {} removed from favorites".format(i)) + + +favorites_tracks_ls = base.get_ls_command( # noqa + favorites_tracks, + "api/v1/favorites/tracks/", + output_conf={ + "labels": ["Track ID", "Track", "Artist", "Favorite Date"], + "type": "TRACK_FAVORITE", + }, +) diff --git a/funkwhale_cli/cli/libraries.py b/funkwhale_cli/cli/libraries.py new file mode 100644 index 0000000..2e4bed6 --- /dev/null +++ b/funkwhale_cli/cli/libraries.py @@ -0,0 +1,51 @@ +import click +import json + +from . import base +from .. import output + + +@base.cli.group() +@click.pass_context +def libraries(ctx): + """ + Manage libraries + """ + + +libraries_ls = base.get_ls_command( + libraries, + "api/v1/libraries/", + output_conf={ + "labels": ["UUID", "Name", "Visibility", "Uploads"], + "type": "LIBRARY", + }, +) +libraries_rm = base.get_delete_command(libraries, "api/v1/libraries/{}/") + + +@libraries.command("create") +@click.option("--name", prompt=True) +@click.option( + "--visibility", + type=click.Choice(["me", "instance", "everyone"]), + default="me", + prompt=True, +) +@click.option("--raw", is_flag=True) +@click.pass_context +@base.async_command +async def libraries_create(ctx, raw, name, visibility): + async with ctx.obj["remote"]: + result = await ctx.obj["remote"].request( + "post", "api/v1/libraries/", data={"name": name, "visibility": visibility} + ) + result.raise_for_status() + payload = await result.json() + if raw: + click.echo(json.dumps(payload, sort_keys=True, indent=4)) + else: + click.echo("Library created:") + click.echo( + output.table([payload], ["UUID", "Name", "Visibility"], type="LIBRARY") + ) diff --git a/funkwhale_cli/cli/playlists.py b/funkwhale_cli/cli/playlists.py new file mode 100644 index 0000000..893d39d --- /dev/null +++ b/funkwhale_cli/cli/playlists.py @@ -0,0 +1,116 @@ +import click +import json + +from . import base +from .. import output + + +@base.cli.group() +@click.pass_context +def playlists(ctx): + """ + Manage playlists + """ + + +playlists_ls = base.get_ls_command( + playlists, + "api/v1/playlists/", + doc="List available playlists", + owned_conf={"param": "user", "field": "id"}, + output_conf={ + "labels": [ + "ID", + "Name", + "Visibility", + "Tracks Count", + "User", + "Created", + "Modified", + ], + "type": "PLAYLIST", + }, +) +playlists_rm = base.get_delete_command( + playlists, + "api/v1/playlists/{}/", + id_metavar="PLAYLIST_ID", + doc="Delete the given playlists", +) + + +@playlists.command("create") +@click.option("--name", prompt=True) +@click.option( + "--visibility", + type=click.Choice(["me", "instance", "everyone"]), + default="me", + prompt=True, +) +@click.option("--raw", is_flag=True) +@click.pass_context +@base.async_command +async def playlists_create(ctx, raw, name, visibility): + """ + Create a new playlist + """ + async with ctx.obj["remote"]: + result = await ctx.obj["remote"].request( + "post", "api/v1/playlists/", data={"name": name, "visibility": visibility} + ) + result.raise_for_status() + payload = await result.json() + if raw: + click.echo(json.dumps(payload, sort_keys=True, indent=4)) + else: + click.echo("Playlist created:") + click.echo( + output.table( + [payload], ["ID", "Name", "Visibility", "Tracks Count"], type="PLAYLIST" + ) + ) + + +@playlists.command("tracks-add") +@click.argument("id", metavar="PLAYLIST_ID") +@click.argument("track", nargs=-1, metavar="[TRACK_ID]…") +@click.option( + "--no-duplicates", + is_flag=True, + default=False, + help="Prevent insertion of tracks that already exist in the playlist. An error will be raised in this case.", +) +@click.pass_context +@base.async_command +async def playlists_tracks_add(ctx, id, track, no_duplicates): + """ + Insert one or more tracks in a playlist + """ + if not track: + return click.echo("No track id provided") + + async with ctx.obj["remote"]: + async with ctx.obj["remote"].request( + "post", + "api/v1/playlists/{}/".format(id), + data={"tracks": track, "allow_duplicates": not no_duplicates}, + ) as response: + response.raise_for_status() + + +playlists_tracks = base.get_ls_command( + playlists, + "api/v1/playlists/{}/tracks/", + name="tracks", + with_id=True, + pagination=False, + ordering=False, + filter=False, + output_conf={ + "labels": ["Position", "ID", "Title", "Artist", "Album", "Created"], + "id_field": "ID", + "type": "PLAYLIST_TRACK", + }, + id_metavar="PLAYLIST_ID", + doc="List the tracks included in a playlist", +) diff --git a/funkwhale_cli/cli/server.py b/funkwhale_cli/cli/server.py new file mode 100644 index 0000000..e7ca464 --- /dev/null +++ b/funkwhale_cli/cli/server.py @@ -0,0 +1,49 @@ +import json + +import click + +from . import base +from .. import api + + +@base.cli.group() +@click.pass_context +def server(ctx): + pass + + +@server.command() +@base.RAW_DECORATOR +@click.pass_context +@base.async_command +async def info(ctx, raw): + async with api.get_session() as session: + nodeinfo = await api.fetch_nodeinfo( + session, + domain=ctx.obj["SERVER_NETLOC"], + protocol=ctx.obj["SERVER_PROTOCOL"], + ) + if raw: + click.echo(json.dumps(nodeinfo, sort_keys=True, indent=4)) + return + click.echo("\n") + click.echo("General") + click.echo("-------") + click.echo("Url: {}".format(ctx.obj["SERVER_URL"])) + click.echo("Name: {}".format(nodeinfo["metadata"]["nodeName"])) + click.echo( + "Short description: {}".format(nodeinfo["metadata"]["shortDescription"]) + ) + click.echo("\n") + click.echo("Software") + click.echo("----------") + click.echo("Software name: {}".format(nodeinfo["software"]["name"])) + click.echo("Version: {}".format(nodeinfo["software"]["version"])) + click.echo("\n") + click.echo("Configuration") + click.echo("---------------") + click.echo( + "Registrations: {}".format( + "open" if nodeinfo["openRegistrations"] else "closed" + ) + ) diff --git a/funkwhale_cli/cli/tracks.py b/funkwhale_cli/cli/tracks.py new file mode 100644 index 0000000..7433338 --- /dev/null +++ b/funkwhale_cli/cli/tracks.py @@ -0,0 +1,142 @@ +import os +import pathlib + +import aiohttp +import click +import tqdm + +from . import base +from .. import logs +from .. import utils + + +@base.cli.group() +@click.pass_context +def tracks(ctx): + pass + + +tracks_ls = base.get_ls_command( + tracks, + "api/v1/tracks/", + output_conf={ + "labels": ["ID", "Title", "Artist", "Album", "Disc", "Position"], + "type": "TRACK", + "id_field": "ID", + }, +) + + +async def get_track_download_url(id, remote, format=None): + result = await remote.request("get", "api/v1/tracks/{}/".format(id)) + result.raise_for_status() + payload = await result.json() + + try: + download_url = payload["uploads"][0]["listen_url"] + except IndexError: + if remote.token: + raise click.ClickException("This file is not available for download") + else: + raise click.ClickException( + "This file is not available for download, try to login first" + ) + + if download_url.startswith("/"): + download_url = remote.base_url[:-1] + download_url + if format: + download_url = utils.add_url_params(download_url, {"to": format}) + else: + format = payload["uploads"][0]["extension"] + + return download_url, format, payload + + +@tracks.command("download") +@click.argument("id", nargs=-1, required=True) +@click.option("--format", "-f") +@click.option("-d", "--directory", type=click.Path(exists=True)) +@click.option("-o", "--overwrite", is_flag=True, default=False) +@click.option("-s", "--skip-existing", is_flag=True, default=False) +@click.option("-i", "--ignore-errors", multiple=True, type=int) +@click.option( + "-t", + "--template", + default="{artist} - {album} - {title}.{extension}", + envvar="FUNKWHALE_DOWNLOAD_PATH_TEMPLATE", +) +@click.pass_context +@base.async_command +async def track_download( + ctx, id, format, directory, template, overwrite, ignore_errors, skip_existing +): + async with ctx.obj["remote"]: + progressbar = tqdm.tqdm(id, unit="Files") + for i in progressbar: + download_url, format, track_data = await get_track_download_url( + i, ctx.obj["remote"], format=format + ) + logs.logger.info("Downloading from {}".format(download_url)) + + filename_params = utils.flatten(track_data) + filename_params["album"] = filename_params["album_title"] + filename_params["artist"] = filename_params["artist_name"] + filename_params["extension"] = format + filename_params["year"] = ( + filename_params["album_release_date"][:4] + if filename_params["album_release_date"] + else None + ) + filename_params = { + k: utils.sanitize_recursive(v) for k, v in filename_params.items() + } + if directory: + filename = template.format(**filename_params) + full_path = os.path.join(directory, filename) + existing = os.path.exists(full_path) + if skip_existing and existing: + logs.logger.info( + "'{}' already exists on disk, skipping download".format( + full_path + ) + ) + continue + elif not overwrite and existing: + raise click.ClickException( + "'{}' already exists on disk. Relaunch this command with --overwrite if you want to replace it".format( + full_path + ) + ) + + async with ctx.obj["remote"].request( + "get", download_url, timeout=0 + ) as response: + try: + response.raise_for_status() + except aiohttp.ClientResponseError as e: + if response.status in ignore_errors: + logs.logger.warning( + "Remote answered with {} for url {}, skipping".format( + response.status, download_url + ) + ) + continue + else: + raise click.ClickException( + "Remote answered with {} for url {}, exiting".format( + response.status, download_url + ) + ) + if directory: + final_directory = os.path.dirname(full_path) + pathlib.Path(final_directory).mkdir(parents=True, exist_ok=True) + logs.logger.info("Downloading to {}".format(full_path)) + out = open(full_path, "wb") + else: + out = click.get_binary_stream("stdout") + while True: + chunk = await response.content.read(1024) + if not chunk: + break + out.write(chunk) + logs.logger.info("Download complete") diff --git a/funkwhale_cli/cli/uploads.py b/funkwhale_cli/cli/uploads.py new file mode 100644 index 0000000..4c1bf39 --- /dev/null +++ b/funkwhale_cli/cli/uploads.py @@ -0,0 +1,93 @@ +import datetime +import os + +import asyncio +import click +import tqdm + +from . import base +from .. import logs + + +@base.cli.group() +@click.pass_context +def uploads(ctx): + pass + + +uploads_ls = base.get_ls_command( # noqa + uploads, + "api/v1/uploads/", + output_conf={ + "labels": ["UUID", "Track", "Artist", "Import status", "Size", "Mimetype"], + "type": "UPLOAD", + }, +) + + +def track_read(file_obj, name, progress): + read = file_obj.read + + def patched_read(size): + content = read(size) + progress.update(len(content)) + progress.set_postfix(file=name[-30:], refresh=False) + return content + + setattr(file_obj, "read", patched_read) + + +async def upload(path, size, remote, ref, library_id, semaphore, global_progress): + async with semaphore: + filename = os.path.basename(path) + data = { + "library": library_id, + "import_reference": ref, + "source": "upload://{}".format(filename), + "audio_file": open(path, "rb"), + } + track_read(data["audio_file"], filename, global_progress) + response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0) + response.raise_for_status() + return response + + +@uploads.command("create") +@click.argument("library_id") +@click.argument("paths", nargs=-1) +@click.option("-r", "--ref", default=None) +@click.option("-p", "--parallel", type=click.INT, default=1) +@click.pass_context +@base.async_command +async def uploads_create(ctx, library_id, paths, ref, parallel): + logs.logger.info("Uploading {} files…".format(len(paths))) + paths = sorted(set(paths)) + if not paths: + return + ref = ref or "funkwhale-cli-import-{}".format(datetime.datetime.now().isoformat()) + sizes = {path: os.path.getsize(path) for path in paths} + + async with ctx.obj["remote"]: + logs.logger.info("Checking library {} existence…".format(library_id)) + library_data = await ctx.obj["remote"].request( + "get", "api/v1/libraries/{}/".format(library_id) + ) + library_data.raise_for_status() + sem = asyncio.Semaphore(parallel) + progressbar = tqdm.tqdm( + total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024 + ) + tasks = [ + upload( + path=path, + ref=ref, + size=sizes[path], + global_progress=progressbar, + remote=ctx.obj["remote"], + library_id=library_id, + semaphore=sem, + ) + for path in paths + ] + await asyncio.gather(*tasks) + logs.logger.info("Upload complete") diff --git a/funkwhale_cli/cli/users.py b/funkwhale_cli/cli/users.py new file mode 100644 index 0000000..9e764a0 --- /dev/null +++ b/funkwhale_cli/cli/users.py @@ -0,0 +1,32 @@ +import click + +from . import base + + +@base.cli.group() +@click.pass_context +def users(ctx): + pass + + +users_me = base.get_show_command( + users, + "api/v1/users/users/{}/", + output_conf={ + "labels": [ + "ID", + "Username", + "Name", + "Email", + "Federation ID", + "Joined", + "Visibility", + "Staff", + "Admin", + "Permissions", + ], + "type": "USER", + }, + force_id="me", + name="me", +) diff --git a/funkwhale_cli/utils.py b/funkwhale_cli/utils.py index 44ec2a4..27f3f8e 100644 --- a/funkwhale_cli/utils.py +++ b/funkwhale_cli/utils.py @@ -1,3 +1,10 @@ +import collections +import json +import urllib.parse + +import pathvalidate + + def recursive_getattr(obj, key, permissive=False): """ Given a dictionary such as {'user': {'name': 'Bob'}} and @@ -17,3 +24,80 @@ def recursive_getattr(obj, key, permissive=False): return return v + + +def add_url_params(url, params): + """ Add GET params to provided URL being aware of existing. + + :param url: string of target URL + :param params: dict containing requested params to be added + :return: string with updated URL + + >> url = 'http://stackoverflow.com/test?answers=true' + >> new_params = {'answers': False, 'data': ['some','values']} + >> add_url_params(url, new_params) + 'http://stackoverflow.com/test?data=some&data=values&answers=false' + """ + # Unquoting URL first so we don't loose existing args + url = urllib.parse.unquote(url) + # Extracting url info + parsed_url = urllib.parse.urlparse(url) + # Extracting URL arguments from parsed URL + get_args = parsed_url.query + # Converting URL arguments to dict + parsed_get_args = dict(urllib.parse.parse_qsl(get_args)) + # Merging URL arguments dict with new params + parsed_get_args.update(params) + + # Bool and Dict values should be converted to json-friendly values + # you may throw this part away if you don't like it :) + parsed_get_args.update( + { + k: json.dumps(v) + for k, v in parsed_get_args.items() + if isinstance(v, (bool, dict)) + } + ) + + # Converting URL argument to proper query string + encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True) + # Creating new parsed result object based on provided with new + # URL arguments. Same thing happens inside of urlparse. + new_url = urllib.parse.ParseResult( + parsed_url.scheme, + parsed_url.netloc, + parsed_url.path, + parsed_url.params, + encoded_get_args, + parsed_url.fragment, + ).geturl() + + return new_url + + +def sanitize_recursive(value): + if isinstance(value, dict): + return {k: sanitize_recursive(v) for k, v in value.items()} + elif isinstance(value, list): + return [sanitize_recursive(v) for v in value] + else: + return pathvalidate.sanitize_filepath(str(value)) + + +def flatten(d, parent_key="", sep="_"): + items = [] + for k, v in d.items(): + new_key = parent_key + sep + k if parent_key else k + if isinstance(v, collections.MutableMapping): + items.extend(flatten(v, new_key, sep=sep).items()) + else: + items.append((new_key, v)) + return dict(items) + + +def get_url_param(url, name): + parsed = urllib.parse.urlparse(url) + v = urllib.parse.parse_qs(parsed.query).get(name) + if v: + return v[0] + return None diff --git a/setup.cfg b/setup.cfg index 37b5220..a4b8fd3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -56,3 +56,8 @@ universal = 1 [tool:pytest] testpaths = tests + +[flake8] +max-line-length = 120 +exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,tests/data,tests/music/conftest.py +ignore = F405,W503,E203 diff --git a/tests/test_api.py b/tests/test_api.py index 49225a5..372fed6 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,4 +1,3 @@ -import aiohttp import marshmallow import pytest @@ -80,6 +79,5 @@ def test_clean_nodeinfo(): def test_clean_nodeinfo_raises_on_validation_failure(): - payload = {} with pytest.raises(marshmallow.ValidationError): api.clean_nodeinfo({}) diff --git a/tests/test_cli.py b/tests/test_cli.py index 1c7cc06..2784f77 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,5 @@ import pytest import click -import keyring from funkwhale_cli import api from funkwhale_cli import cli @@ -32,7 +31,7 @@ def cli_ctx(mocker): def test_delete_command(group, cli_ctx, session, responses): - command = cli.get_delete_command(group, "api/v1/noop/{}/") + command = cli.base.get_delete_command(group, "api/v1/noop/{}/") id = "fake_id" responses.delete("https://test.funkwhale/api/v1/noop/fake_id/") command.callback(id=[id], raw=False, no_input=True, _async_reraise=True) @@ -61,12 +60,12 @@ def test_delete_command(group, cli_ctx, session, responses): ], ) def test_get_pagination_data(input, output): - assert cli.get_pagination_data(input) == output + assert cli.base.get_pagination_data(input) == output def test_lazy_credential(mocker): get_password = mocker.patch("keyring.get_password", return_value="password") - credential = cli.lazy_credential("http://testurl", "_") + credential = cli.auth.lazy_credential("http://testurl", "_") get_password.assert_not_called() @@ -82,7 +81,7 @@ def test_lazy_credential(mocker): def test_users_me(cli_ctx, session, responses, get_requests): - command = cli.users_me + command = cli.users.users_me url = "https://test.funkwhale/api/v1/users/users/me/" responses.get( url, @@ -116,7 +115,7 @@ def test_users_me(cli_ctx, session, responses, get_requests): def test_libraries_create(cli_ctx, session, responses, get_requests): - command = cli.libraries_create + command = cli.libraries.libraries_create url = "https://test.funkwhale/api/v1/libraries/" responses.post(url) @@ -128,7 +127,7 @@ def test_libraries_create(cli_ctx, session, responses, get_requests): def test_libraries_ls(cli_ctx, session, responses, get_requests): - command = cli.libraries_ls + command = cli.libraries.libraries_ls url = "https://test.funkwhale/api/v1/libraries/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -153,7 +152,7 @@ def test_libraries_ls(cli_ctx, session, responses, get_requests): def test_libraries_rm(cli_ctx, session, responses, get_requests): - command = cli.libraries_rm + command = cli.libraries.libraries_rm url = "https://test.funkwhale/api/v1/libraries/" responses.delete(url + "1/") responses.delete(url + "42/") @@ -165,7 +164,7 @@ def test_libraries_rm(cli_ctx, session, responses, get_requests): def test_favorites_tracks_create(cli_ctx, session, responses, get_requests): - command = cli.favorites_tracks_create + command = cli.favorites.favorites_tracks_create url = "https://test.funkwhale/api/v1/favorites/tracks/" responses.post(url, repeat=True) @@ -178,7 +177,7 @@ def test_favorites_tracks_create(cli_ctx, session, responses, get_requests): def test_favorites_tracks_ls(cli_ctx, session, responses, get_requests): - command = cli.favorites_tracks_ls + command = cli.favorites.favorites_tracks_ls url = "https://test.funkwhale/api/v1/favorites/tracks/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -203,7 +202,7 @@ def test_favorites_tracks_ls(cli_ctx, session, responses, get_requests): def test_favorites_tracks_rm(cli_ctx, session, responses, get_requests): - command = cli.favorites_tracks_rm + command = cli.favorites.favorites_tracks_rm url = "https://test.funkwhale/api/v1/favorites/tracks/remove/" responses.delete(url, repeat=True) @@ -216,7 +215,7 @@ def test_favorites_tracks_rm(cli_ctx, session, responses, get_requests): def test_tracks_ls(cli_ctx, session, responses, get_requests): - command = cli.tracks_ls + command = cli.tracks.tracks_ls url = "https://test.funkwhale/api/v1/tracks/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -241,7 +240,7 @@ def test_tracks_ls(cli_ctx, session, responses, get_requests): def test_artists_ls(cli_ctx, session, responses, get_requests): - command = cli.artists_ls + command = cli.artists.artists_ls url = "https://test.funkwhale/api/v1/artists/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -266,7 +265,7 @@ def test_artists_ls(cli_ctx, session, responses, get_requests): def test_albums_ls(cli_ctx, session, responses, get_requests): - command = cli.albums_ls + command = cli.albums.albums_ls url = "https://test.funkwhale/api/v1/albums/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -291,7 +290,7 @@ def test_albums_ls(cli_ctx, session, responses, get_requests): def test_playlists_create(cli_ctx, session, responses, get_requests): - command = cli.playlists_create + command = cli.playlists.playlists_create url = "https://test.funkwhale/api/v1/playlists/" responses.post(url) @@ -303,7 +302,7 @@ def test_playlists_create(cli_ctx, session, responses, get_requests): def test_playlists_ls(cli_ctx, session, responses, get_requests): - command = cli.playlists_ls + command = cli.playlists.playlists_ls url = "https://test.funkwhale/api/v1/playlists/?ordering=-creation_date&page=1&page_size=5&q=hello" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -331,9 +330,9 @@ def test_playlists_ls_mine( cli_ctx, session, responses, get_requests, mocker, coroutine_mock ): get_user_info = mocker.patch.object( - cli, "get_user_info", coroutine_mock(return_value={"id": 42}) + cli.base, "get_user_info", coroutine_mock(return_value={"id": 42}) ) - command = cli.playlists_ls + command = cli.playlists.playlists_ls url = "https://test.funkwhale/api/v1/playlists/?ordering=-creation_date&page=1&page_size=5&q=hello&user=42" responses.get( url, payload={"results": [], "next": None, "previous": None, "count": 0} @@ -360,7 +359,7 @@ def test_playlists_ls_mine( def test_playlists_rm(cli_ctx, session, responses, get_requests): - command = cli.playlists_rm + command = cli.playlists.playlists_rm url = "https://test.funkwhale/api/v1/playlists/" responses.delete(url + "1/") responses.delete(url + "42/") @@ -372,7 +371,7 @@ def test_playlists_rm(cli_ctx, session, responses, get_requests): def test_playlists_tracks_add(cli_ctx, session, responses, get_requests): - command = cli.playlists_tracks_add + command = cli.playlists.playlists_tracks_add url = "https://test.funkwhale/api/v1/playlists/66/" responses.post(url) @@ -384,7 +383,7 @@ def test_playlists_tracks_add(cli_ctx, session, responses, get_requests): def test_playlists_tracks(cli_ctx, session, responses, get_requests): - command = cli.playlists_tracks + command = cli.playlists.playlists_tracks url = "https://test.funkwhale/api/v1/playlists/66/tracks/" responses.get(url, payload={"results": [], "count": 0}) -- GitLab