Newer
Older
import asyncio
import aiohttp
import click
import click_log
import datetime
# importing the backends explicitely is required for PyInstaller to work
import keyring.backends.kwallet
import keyring.backends.Windows
import keyring.backends.OS_X
import keyring.backends.SecretService
import keyring.backends.chainer
import os
import tqdm
from funkwhale_cli import api
from funkwhale_cli import config
from funkwhale_cli import exceptions
from funkwhale_cli import logs
from funkwhale_cli import output
if v is NOOP:
raise click.ClickException(
"You need to specify a server, either via the -H flag or using the FUNKWHALE_SERVER_URL environment variable"
)
v = str(v) if v else None
parsed = urllib.parse.urlparse(v)
if parsed.scheme not in ["http", "https"] or not parsed.netloc:
raise ValueError("{} is not a valid url".format(v))
if not v.endswith("/"):
v = v + "/"
return v
def env_file(v):
if v is NOOP:
v = None
if v is not None:
v = click.Path(exists=True)(v)
env_files = [v or ".env", config.get_env_file()]
for p in env_files:
logs.logger.debug("Loading env file at {}".format(p))
dotenv.load_dotenv(p, override=False)
return v
def async_command(f):
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
_async_reraise = kwargs.pop("_async_reraise", False)
try:
return loop.run_until_complete(f(*args, **kwargs))
except (aiohttp.client_exceptions.ClientError) as e:
message = str(e)
if hasattr(e, 'status') and e.status == 401:
message = "Remote answered with {}, ensure your are logged in first".format(e.status)
raise click.ClickException(message)
except (exceptions.FunkwhaleError) as e:
if _async_reraise:
raise
message = str(e)
raise click.ClickException(message)
else:
raise
return functools.update_wrapper(wrapper, f)
"-H",
"--url",
envvar="FUNKWHALE_SERVER_URL",
type=URL,
default=NOOP,
help="The URL of the Funkwhale server to query",
)
TOKEN_DECORATOR = click.option(
"-t",
"--token",
envvar="FUNKWHALE_TOKEN",
help="A JWT token to use for authentication",
)
RAW_DECORATOR = click.option(
"--raw", is_flag=True, help="Directly output JSON returned by the happy"
class lazy_credential():
"""
A proxy object to request access to the proxy object at the later possible point,
cf #4
"""
def __init__(self, *args):
self.args = args
self._cached_value = None
@property
def value(self):
if self._cached_value:
return self._cached_value
try:
v = keyring.get_password(*self.args)
except ValueError as e:
raise click.ClickException("Error while retrieving password from keyring: {}. Your password may be incorrect.".format(e.args[0]))
except Exception as e:
raise click.ClickException("Error while retrieving password from keyring: {}".format(e.args[0]))
self._cached_value = v
return v
def __str__(self):
return str(self.value)
def __eq__(self, other):
return self.value == other
def __repr__(self):
return str(self.value)
def __bool__(self):
return bool(self.value)
def set_server(ctx, url, token, use_auth=True):
ctx.ensure_object(dict)
ctx.obj["SERVER_URL"] = url
parsed = urllib.parse.urlparse(url)
ctx.obj["SERVER_NETLOC"] = parsed.netloc
ctx.obj["SERVER_PROTOCOL"] = parsed.scheme
token = (token or lazy_credential(url, "_")) if use_auth else None
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
token=token,
@click.option(
"-e",
"--env-file",
envvar="ENV_FILE",
type=env_file,
default=NOOP,
help="Path to an env file to use. A .env file will be used automatically if any",
)
@click.option(
"-q",
"--quiet",
envvar="FUNKWHALE_QUIET",
is_flag=True,
default=False,
help="Disable logging",
)
@click.option(
"--no-login",
envvar="FUNKWHALE_NO_LOGIN",
is_flag=True,
default=False,
help="Disable authentication/keyring",
)
@click_log.simple_verbosity_option(logs.logger, expose_value=True)
@click.pass_context
def cli(ctx, env_file, url, verbosity, token, quiet, no_login):
# small hack to fix some weird issues with pyinstaller and keyring
# there seems to be a cache issue somewhere
del keyring.backend.get_all_keyring.__wrapped__.always_returns
keyring.core.init_backend()
# /end of hack
set_server(ctx, url, token, use_auth=not no_login)
@cli.command()
@click.option("-u", "--username", envvar="FUNKWHALE_USERNAME", prompt=True)
@click.option(
"-p", "--password", envvar="FUNKWHALE_PASSWORD", prompt=True, hide_input=True
)
@click.pass_context
@async_command
async with api.get_session() as session:
token = await api.get_jwt_token(
session, ctx.obj["SERVER_URL"], username=username, password=password
)
try:
keyring.set_password(ctx.obj["SERVER_URL"], "_", token)
except ValueError as e:
raise click.ClickException("Error while retrieving password from keyring: {}. Your password may be incorrect.".format(e.args[0]))
except Exception as e:
raise click.ClickException("Error while retrieving password from keyring: {}".format(e.args[0]))
click.echo("Login successfull!")
@cli.command()
@click.pass_context
@async_command
async def logout(ctx):
keyring.delete_password(ctx.obj["SERVER_URL"], "_")
pass
@cli.group()
@click.pass_context
def server(ctx):
pass
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
async with api.get_session() as session:
nodeinfo = await api.fetch_nodeinfo(
session,
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
)
if raw:
click.echo(json.dumps(nodeinfo, sort_keys=True, indent=4))
return
click.echo("\n")
click.echo("General")
click.echo("-------")
click.echo("Url: {}".format(ctx.obj["SERVER_URL"]))
click.echo("Name: {}".format(nodeinfo["metadata"]["nodeName"]))
click.echo(
"Short description: {}".format(nodeinfo["metadata"]["shortDescription"])
)
click.echo("\n")
click.echo("Software")
click.echo("----------")
click.echo("Software name: {}".format(nodeinfo["software"]["name"]))
click.echo("Version: {}".format(nodeinfo["software"]["version"]))
click.echo("\n")
click.echo("Configuration")
click.echo("---------------")
click.echo(
"Registrations: {}".format(
"open" if nodeinfo["openRegistrations"] else "closed"
)
)
@cli.group()
@click.pass_context
def libraries(ctx):
pass
def get_url_param(url, name):
parsed = urllib.parse.urlparse(url)
v = urllib.parse.parse_qs(parsed.query).get(name)
if v:
return v[0]
return None
def get_pagination_data(payload):
data = {"next_page": None, "page_size": None}
if payload.get("next"):
next_page = get_url_param(payload["next"], "page")
data["next_page"] = int(next_page)
data["total_pages"] = math.ceil(payload["count"] / len(payload["results"]))
data["current_page"] = int(next_page) - 1
data["page_size"] = len(payload["results"])
if payload.get("previous"):
previous_page = get_url_param(payload["previous"], "page") or 0
data.setdefault("current_page", int(previous_page) + 1)
data.setdefault("total_pages", data["current_page"])
if (
not data["page_size"]
and payload["count"] - len(payload["results"]) > 0
and data["total_pages"] > 1
):
data["page_size"] = int(payload["count"] - len(payload["results"])) / (
data["total_pages"] - 1
)
data.setdefault("current_page", 1)
data.setdefault("total_pages", 1)
def get_ls_command(group, endpoint, output_conf):
available_fields = sorted(
set(output_conf["labels"]) | set(output.FIELDS["*"].keys())
)
@RAW_DECORATOR
@click.option(
"--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple"
)
@click.option("--no-headers", "-h", is_flag=True, default=False)
@click.option("--page", "-p", type=click.INT, default=1)
@click.option("--page-size", "-s", type=click.INT, default=None)
@click.option("--ordering", "-o", default=None)
@click.option("--filter", "-f", multiple=True)
@click.option("--limit", "-l", type=click.INT, default=1)
@click.option(
"--column",
"-c",
multiple=True,
help="Which column to display. Available: {}. \nDefault: {}".format(
", ".join(available_fields), ", ".join(output_conf["labels"])
ctx,
raw,
page,
page_size,
ordering,
filter,
query,
column,
format,
no_headers,
ids,
if ids:
no_headers = True
column = [output_conf.get("id_field", "UUID")]
format = "plain"
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
next_page_url = None
page_count = 0
while True:
if limit and page_count >= limit:
break
async with ctx.obj["remote"]:
if page_count == 0:
url = endpoint
params = {"page": page}
if page_size:
params["page_size"] = page_size
if ordering:
params["ordering"] = ordering
if query:
params["q"] = " ".join(query)
if filter:
for f in filter:
query = urllib.parse.parse_qs(f)
for k, v in query.items():
params[k] = v[0]
else:
params = {}
url = next_page_url
if not url:
break
result = await ctx.obj["remote"].request("get", url, params=params)
result.raise_for_status()
payload = await result.json()
next_page_url = payload['next']
page_count += 1
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo(
output.table(
payload["results"],
column or output_conf["labels"],
type=output_conf["type"],
format=format,
headers=not no_headers,
pagination_data = get_pagination_data(payload)
if pagination_data["page_size"]:
start = (
int(
(pagination_data["current_page"] - 1)
* pagination_data["page_size"]
)
+ 1
)
else:
start = 1
end = min(start + len(payload["results"]) - 1, payload["count"])
logs.logger.info(
"\nObjects {start}-{end} on {total} (page {current_page}/{total_pages})".format(
start=start,
end=end,
total=payload["count"],
current_page=pagination_data["current_page"],
total_pages=pagination_data["total_pages"] or 1,
)
def get_delete_command(
group,
url_template,
confirm="Do you want to delete {} objects? This action is irreversible.",
@group.command("rm")
@click.argument("id", nargs=-1)
@RAW_DECORATOR
@click.option("--no-input", is_flag=True)
@click.pass_context
@async_command
if not no_input and not click.confirm(confirm.format(len(id)), abort=True):
for i in id:
result = await ctx.obj["remote"].request(
"delete", url_template.format(i)
)
if result.status == 404:
logs.logger.warn("Couldn't delete {}: object not found".format(i))
else:
result.raise_for_status()
click.echo("{} Objects deleted!".format(len(id)))
libraries,
"api/v1/libraries/",
output_conf={
"labels": ["UUID", "Name", "Visibility", "Uploads"],
"type": "LIBRARY",
},
)
libraries_delete = get_delete_command(libraries, "api/v1/libraries/{}/")
@libraries.command("create")
@click.option("--name", prompt=True)
@click.option(
"--visibility",
type=click.Choice(["me", "instance", "everyone"]),
default="me",
prompt=True,
)
@click.option("--raw", is_flag=True)
@click.pass_context
@async_command
async def libraries_create(ctx, raw, name, visibility):
async with ctx.obj["remote"]:
result = await ctx.obj["remote"].request(
"post", "api/v1/libraries/", data={"name": name, "visibility": visibility}
)
result.raise_for_status()
payload = await result.json()
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo("Library created:")
click.echo(
output.table([payload], ["UUID", "Name", "Visibility"], type="LIBRARY")
)
@cli.group()
@click.pass_context
def artists(ctx):
pass
artists_ls = get_ls_command(
artists,
"api/v1/artists/",
output_conf={
"labels": ["ID", "Name", "Albums", "Tracks", "Created"],
"type": "ARTIST",
@cli.group()
@click.pass_context
def albums(ctx):
pass
albums_ls = get_ls_command(
albums,
"api/v1/albums/",
output_conf={
"labels": ["ID", "Title", "Artist", "Tracks", "Created"],
"type": "ALBUM",
output_conf={
"labels": ["ID", "Title", "Artist", "Album", "Disc", "Position"],
"type": "TRACK",
async def get_track_download_url(id, remote, format=None):
result = await remote.request("get", "api/v1/tracks/{}/".format(id))
result.raise_for_status()
payload = await result.json()
try:
download_url = payload["uploads"][0]["listen_url"]
except IndexError:
if remote.token:
raise click.ClickException("This file is not available for download")
else:
raise click.ClickException(
"This file is not available for download, try to login first"
)
if download_url.startswith("/"):
download_url = remote.base_url[:-1] + download_url
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
if format:
download_url = add_url_params(download_url, {"to": format})
else:
format = payload["uploads"][0]["extension"]
return download_url, format, payload
def add_url_params(url, params):
""" Add GET params to provided URL being aware of existing.
:param url: string of target URL
:param params: dict containing requested params to be added
:return: string with updated URL
>> url = 'http://stackoverflow.com/test?answers=true'
>> new_params = {'answers': False, 'data': ['some','values']}
>> add_url_params(url, new_params)
'http://stackoverflow.com/test?data=some&data=values&answers=false'
"""
# Unquoting URL first so we don't loose existing args
url = urllib.parse.unquote(url)
# Extracting url info
parsed_url = urllib.parse.urlparse(url)
# Extracting URL arguments from parsed URL
get_args = parsed_url.query
# Converting URL arguments to dict
parsed_get_args = dict(urllib.parse.parse_qsl(get_args))
# Merging URL arguments dict with new params
parsed_get_args.update(params)
# Bool and Dict values should be converted to json-friendly values
# you may throw this part away if you don't like it :)
parsed_get_args.update(
{
k: json.dumps(v)
for k, v in parsed_get_args.items()
if isinstance(v, (bool, dict))
}
)
# Converting URL argument to proper query string
encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
# Creating new parsed result object based on provided with new
# URL arguments. Same thing happens inside of urlparse.
new_url = urllib.parse.ParseResult(
parsed_url.scheme,
parsed_url.netloc,
parsed_url.path,
parsed_url.params,
encoded_get_args,
parsed_url.fragment,
).geturl()
return new_url
def sanitize_recursive(value):
if isinstance(value, dict):
return {k: sanitize_recursive(v) for k, v in value.items()}
elif isinstance(value, list):
return [sanitize_recursive(v) for v in value]
else:
return pathvalidate.sanitize_filepath(str(value))
def flatten(d, parent_key="", sep="_"):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
@click.argument("id", nargs=-1, required=True)
@click.option("--format", "-f")
@click.option("-d", "--directory", type=click.Path(exists=True))
@click.option("-o", "--overwrite", is_flag=True, default=False)
@click.option("-s", "--skip-existing", is_flag=True, default=False)
@click.option("-i", "--ignore-errors", multiple=True, type=int)
@click.option(
"-t",
"--template",
envvar="FUNKWHALE_DOWNLOAD_PATH_TEMPLATE",
)
async def track_download(ctx, id, format, directory, template, overwrite, ignore_errors, skip_existing):
progressbar = tqdm.tqdm(id, unit="Files")
for i in progressbar:
download_url, format, track_data = await get_track_download_url(
i, ctx.obj["remote"], format=format
)
logs.logger.info("Downloading from {}".format(download_url))
response = await ctx.obj["remote"].request("get", download_url, timeout=0)
except aiohttp.ClientResponseError as e:
if response.status in ignore_errors:
logs.logger.warning("Remote answered with {} for url {}, skipping".format(response.status, download_url))
continue
else:
raise click.ClickException("Remote answered with {} for url {}, exiting".format(response.status, download_url))
filename_params = flatten(track_data)
filename_params["album"] = filename_params['album_title']
filename_params["artist"] = filename_params['artist_name']
filename_params["year"] = (
filename_params["album_release_date"][:4]
if filename_params["album_release_date"]
else None
)
filename_params = {
k: sanitize_recursive(v) for k, v in filename_params.items()
}
if directory:
filename = template.format(**filename_params)
full_path = os.path.join(directory, filename)
final_directory = os.path.dirname(full_path)
pathlib.Path(final_directory).mkdir(parents=True, exist_ok=True)
logs.logger.info("Downloading to {}".format(full_path))
existing = os.path.exists(full_path)
if skip_existing and existing:
logs.logger.info("'{}' already exists on disk, skipping download".format(full_path))
continue
elif not overwrite and existing:
raise click.ClickException(
"'{}' already exists on disk. Relaunch this command with --overwrite if you want to replace it".format(
full_path
)
)
out = open(full_path, "wb")
else:
out = click.get_binary_stream("stdout")
while True:
chunk = await response.content.read(1024)
if not chunk:
break
out.write(chunk)
@cli.group()
@click.pass_context
def uploads(ctx):
pass
uploads_ls = get_ls_command( # noqa
uploads,
"api/v1/uploads/",
output_conf={
"labels": ["UUID", "Track", "Artist", "Import status", "Size", "Mimetype"],
"type": "UPLOAD",
},
)
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
def track_read(file_obj, name, progress):
read = file_obj.read
def patched_read(size):
content = read(size)
progress.update(len(content))
progress.set_postfix(file=name[-30:], refresh=False)
return content
setattr(file_obj, "read", patched_read)
async def upload(path, size, remote, ref, library_id, semaphore, global_progress):
async with semaphore:
filename = os.path.basename(path)
data = {
"library": library_id,
"import_reference": ref,
"source": "upload://{}".format(filename),
"audio_file": open(path, "rb"),
}
track_read(data["audio_file"], filename, global_progress)
response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0)
response.raise_for_status()
return response
@uploads.command("create")
@click.argument("library_id")
@click.argument("paths", nargs=-1)
@click.option("-r", "--ref", default=None)
@click.option("-p", "--parallel", type=click.INT, default=1)
@click.pass_context
@async_command
async def uploads_create(ctx, library_id, paths, ref, parallel):
logs.logger.info("Uploading {} files…".format(len(paths)))
paths = sorted(set(paths))
if not paths:
return
ref = ref or "funkwhale-cli-import-{}".format(datetime.datetime.now().isoformat())
sizes = {path: os.path.getsize(path) for path in paths}
async with ctx.obj["remote"]:
logs.logger.info("Checking library {} existence…".format(library_id))
library_data = await ctx.obj["remote"].request(
"get", "api/v1/libraries/{}/".format(library_id)
)
library_data.raise_for_status()
sem = asyncio.Semaphore(parallel)
progressbar = tqdm.tqdm(
total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024
)
tasks = [
upload(
path=path,
ref=ref,
size=sizes[path],
global_progress=progressbar,
remote=ctx.obj["remote"],
library_id=library_id,
semaphore=sem,
)
for path in paths
]
await asyncio.gather(*tasks)
logs.logger.info("Upload complete")