Newer
Older
import asyncio
import aiohttp
import click
import click_log
import datetime
import os
import tqdm
if v is NOOP:
raise click.ClickException(
"You need to specify a server, either via the -H flag or using the FUNKWHALE_SERVER_URL environment variable"
)
v = str(v) if v else None
parsed = urllib.parse.urlparse(v)
if parsed.scheme not in ["http", "https"] or not parsed.netloc:
raise ValueError("{} is not a valid url".format(v))
if not v.endswith("/"):
v = v + "/"
return v
def env_file(v):
if v is NOOP:
v = None
if v is not None:
v = click.Path(exists=True)(v)
env_files = [v or ".env", config.get_env_file()]
for p in env_files:
logs.logger.debug("Loading env file at {}".format(p))
dotenv.load_dotenv(p, override=False)
return v
def async_command(f):
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
_async_reraise = kwargs.pop("_async_reraise", False)
try:
return loop.run_until_complete(f(*args, **kwargs))
except (exceptions.FunkwhaleError, aiohttp.client_exceptions.ClientError) as e:
raise click.ClickException(str(e))
else:
raise
return functools.update_wrapper(wrapper, f)
"-H",
"--url",
envvar="FUNKWHALE_SERVER_URL",
type=URL,
default=NOOP,
help="The URL of the Funkwhale server to query",
)
TOKEN_DECORATOR = click.option(
"-t",
"--token",
envvar="FUNKWHALE_TOKEN",
help="A JWT token to use for authentication",
)
RAW_DECORATOR = click.option(
"--raw", is_flag=True, help="Directly output JSON returned by the happy"
ctx.ensure_object(dict)
ctx.obj["SERVER_URL"] = url
parsed = urllib.parse.urlparse(url)
ctx.obj["SERVER_NETLOC"] = parsed.netloc
ctx.obj["SERVER_PROTOCOL"] = parsed.scheme
try:
token = token or keyring.get_password(url, "_")
except ValueError as e:
raise click.ClickException("Error while retrieving password from keyring: {}. Your password may be incorrect.".format(e.args[0]))
except Exception as e:
raise click.ClickException("Error while retrieving password from keyring: {}".format(e.args[0]))
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
token=token,
@click.option(
"-e",
"--env-file",
envvar="ENV_FILE",
type=env_file,
default=NOOP,
help="Path to an env file to use. A .env file will be used automatically if any",
)
@click.option(
"-q",
"--quiet",
envvar="FUNKWHALE_QUIET",
is_flag=True,
default=False,
help="Disable logging",
)
@click_log.simple_verbosity_option(logs.logger, expose_value=True)
@click.pass_context
def cli(ctx, env_file, url, verbosity, token, quiet):
@cli.command()
@click.option("-u", "--username", envvar="FUNKWHALE_USERNAME", prompt=True)
@click.option(
"-p", "--password", envvar="FUNKWHALE_PASSWORD", prompt=True, hide_input=True
)
@click.pass_context
@async_command
async with api.get_session() as session:
token = await api.get_jwt_token(
session, ctx.obj["SERVER_URL"], username=username, password=password
)
keyring.set_password(ctx.obj["SERVER_URL"], "_", token)
click.echo("Login successfull!")
@cli.command()
@click.pass_context
@async_command
async def logout(ctx):
keyring.delete_password(ctx.obj["SERVER_URL"], "_")
pass
@cli.group()
@click.pass_context
def server(ctx):
pass
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
async with api.get_session() as session:
nodeinfo = await api.fetch_nodeinfo(
session,
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
)
if raw:
click.echo(json.dumps(nodeinfo, sort_keys=True, indent=4))
return
click.echo("\n")
click.echo("General")
click.echo("-------")
click.echo("Url: {}".format(ctx.obj["SERVER_URL"]))
click.echo("Name: {}".format(nodeinfo["metadata"]["nodeName"]))
click.echo(
"Short description: {}".format(nodeinfo["metadata"]["shortDescription"])
)
click.echo("\n")
click.echo("Software")
click.echo("----------")
click.echo("Software name: {}".format(nodeinfo["software"]["name"]))
click.echo("Version: {}".format(nodeinfo["software"]["version"]))
click.echo("\n")
click.echo("Configuration")
click.echo("---------------")
click.echo(
"Registrations: {}".format(
"open" if nodeinfo["openRegistrations"] else "closed"
)
)
@cli.group()
@click.pass_context
def libraries(ctx):
pass
def get_url_param(url, name):
parsed = urllib.parse.urlparse(url)
v = urllib.parse.parse_qs(parsed.query).get(name)
if v:
return v[0]
return None
def get_pagination_data(payload):
data = {"next_page": None, "page_size": None}
if payload.get("next"):
next_page = get_url_param(payload["next"], "page")
data["next_page"] = int(next_page)
data["total_pages"] = math.ceil(payload["count"] / len(payload["results"]))
data["current_page"] = int(next_page) - 1
data["page_size"] = len(payload["results"])
if payload.get("previous"):
previous_page = get_url_param(payload["previous"], "page") or 0
data.setdefault("current_page", int(previous_page) + 1)
data.setdefault("total_pages", data["current_page"])
if (
not data["page_size"]
and payload["count"] - len(payload["results"]) > 0
and data["total_pages"] > 1
):
data["page_size"] = int(payload["count"] - len(payload["results"])) / (
data["total_pages"] - 1
)
data.setdefault("current_page", 1)
data.setdefault("total_pages", 1)
def get_ls_command(group, endpoint, output_conf):
available_fields = sorted(
set(output_conf["labels"]) | set(output.FIELDS["*"].keys())
)
@RAW_DECORATOR
@click.option(
"--format", "-t", type=click.Choice(output.TABLE_FORMATS), default="simple"
)
@click.option("--no-headers", "-h", is_flag=True, default=False)
@click.option("--page", "-p", type=click.INT, default=1)
@click.option("--page-size", "-s", type=click.INT, default=None)
@click.option("--ordering", "-o", default=None)
@click.option("--filter", "-f", multiple=True)
@click.option(
"--column",
"-c",
multiple=True,
help="Which column to display. Available: {}. \nDefault: {}".format(
", ".join(available_fields), ", ".join(output_conf["labels"])
ctx,
raw,
page,
page_size,
ordering,
filter,
query,
column,
format,
no_headers,
ids,
if ids:
no_headers = True
column = [output_conf.get("id_field", "UUID")]
format = "plain"
async with ctx.obj["remote"]:
params = {"page": page}
if page_size:
params["page_size"] = page_size
if ordering:
params["ordering"] = ordering
if filter:
for f in filter:
query = urllib.parse.parse_qs(f)
for k, v in query.items():
params[k] = v[0]
result = await ctx.obj["remote"].request("get", endpoint, params=params)
result.raise_for_status()
payload = await result.json()
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo(
output.table(
payload["results"],
column or output_conf["labels"],
type=output_conf["type"],
format=format,
headers=not no_headers,
pagination_data = get_pagination_data(payload)
if pagination_data["page_size"]:
start = (
int(
(pagination_data["current_page"] - 1)
* pagination_data["page_size"]
end = min(start + len(payload["results"]) - 1, payload["count"])
"\nObjects {start}-{end} on {total} (page {current_page}/{total_pages})".format(
start=start,
end=end,
total=payload["count"],
current_page=pagination_data["current_page"],
total_pages=pagination_data["total_pages"] or 1,
def get_delete_command(
group,
url_template,
confirm="Do you want to delete {} objects? This action is irreversible.",
@group.command("rm")
@click.argument("id", nargs=-1)
@RAW_DECORATOR
@click.option("--no-input", is_flag=True)
@click.pass_context
@async_command
if not no_input and not click.confirm(confirm.format(len(id)), abort=True):
for i in id:
result = await ctx.obj["remote"].request(
"delete", url_template.format(i)
)
if result.status == 404:
logs.logger.warn("Couldn't delete {}: object not found".format(i))
else:
result.raise_for_status()
click.echo("{} Objects deleted!".format(len(id)))
libraries,
"api/v1/libraries/",
output_conf={
"labels": ["UUID", "Name", "Visibility", "Uploads"],
"type": "LIBRARY",
},
)
libraries_delete = get_delete_command(libraries, "api/v1/libraries/{}/")
@libraries.command("create")
@click.option("--name", prompt=True)
@click.option(
"--visibility",
type=click.Choice(["me", "instance", "everyone"]),
default="me",
prompt=True,
)
@click.option("--raw", is_flag=True)
@click.pass_context
@async_command
async def libraries_create(ctx, raw, name, visibility):
async with ctx.obj["remote"]:
result = await ctx.obj["remote"].request(
"post", "api/v1/libraries/", data={"name": name, "visibility": visibility}
)
result.raise_for_status()
payload = await result.json()
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo("Library created:")
click.echo(
output.table([payload], ["UUID", "Name", "Visibility"], type="LIBRARY")
)
@cli.group()
@click.pass_context
def artists(ctx):
pass
artists_ls = get_ls_command(
artists,
"api/v1/artists/",
output_conf={
"labels": ["ID", "Name", "Albums", "Tracks", "Created"],
"type": "ARTIST",
@cli.group()
@click.pass_context
def albums(ctx):
pass
albums_ls = get_ls_command(
albums,
"api/v1/albums/",
output_conf={
"labels": ["ID", "Title", "Artist", "Tracks", "Created"],
"type": "ALBUM",
output_conf={
"labels": ["ID", "Title", "Artist", "Album", "Disc", "Position"],
"type": "TRACK",
async def get_track_download_url(id, remote, format=None):
result = await remote.request("get", "api/v1/tracks/{}/".format(id))
result.raise_for_status()
payload = await result.json()
try:
download_url = payload["uploads"][0]["listen_url"]
except IndexError:
if remote.token:
raise click.ClickException("This file is not available for download")
else:
raise click.ClickException(
"This file is not available for download, try to login first"
)
if download_url.startswith("/"):
download_url = remote.base_url[:-1] + download_url
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
if format:
download_url = add_url_params(download_url, {"to": format})
else:
format = payload["uploads"][0]["extension"]
return download_url, format, payload
def add_url_params(url, params):
""" Add GET params to provided URL being aware of existing.
:param url: string of target URL
:param params: dict containing requested params to be added
:return: string with updated URL
>> url = 'http://stackoverflow.com/test?answers=true'
>> new_params = {'answers': False, 'data': ['some','values']}
>> add_url_params(url, new_params)
'http://stackoverflow.com/test?data=some&data=values&answers=false'
"""
# Unquoting URL first so we don't loose existing args
url = urllib.parse.unquote(url)
# Extracting url info
parsed_url = urllib.parse.urlparse(url)
# Extracting URL arguments from parsed URL
get_args = parsed_url.query
# Converting URL arguments to dict
parsed_get_args = dict(urllib.parse.parse_qsl(get_args))
# Merging URL arguments dict with new params
parsed_get_args.update(params)
# Bool and Dict values should be converted to json-friendly values
# you may throw this part away if you don't like it :)
parsed_get_args.update(
{
k: json.dumps(v)
for k, v in parsed_get_args.items()
if isinstance(v, (bool, dict))
}
)
# Converting URL argument to proper query string
encoded_get_args = urllib.parse.urlencode(parsed_get_args, doseq=True)
# Creating new parsed result object based on provided with new
# URL arguments. Same thing happens inside of urlparse.
new_url = urllib.parse.ParseResult(
parsed_url.scheme,
parsed_url.netloc,
parsed_url.path,
parsed_url.params,
encoded_get_args,
parsed_url.fragment,
).geturl()
return new_url
def sanitize_recursive(value):
if isinstance(value, dict):
return {k: sanitize_recursive(v) for k, v in value.items()}
elif isinstance(value, list):
return [sanitize_recursive(v) for v in value]
else:
return pathvalidate.sanitize_filepath(str(value))
def flatten(d, parent_key="", sep="_"):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
@click.argument("id", nargs=-1, required=True)
@click.option("--format", "-f")
@click.option("-d", "--directory", type=click.Path(exists=True))
@click.option("-o", "--overwrite", is_flag=True, default=False)
@click.option(
"-t",
"--template",
envvar="FUNKWHALE_DOWNLOAD_PATH_TEMPLATE",
)
async def track_download(ctx, id, format, directory, template, overwrite):
progressbar = tqdm.tqdm(id, unit="Files")
for i in progressbar:
download_url, format, track_data = await get_track_download_url(
i, ctx.obj["remote"], format=format
)
logs.logger.info("Downloading from {}".format(download_url))
response = await ctx.obj["remote"].request("get", download_url, timeout=0)
response.raise_for_status()
filename_params = flatten(track_data)
filename_params["album"] = filename_params['album_title']
filename_params["artist"] = filename_params['artist_name']
filename_params["year"] = (
filename_params["album_release_date"][:4]
if filename_params["album_release_date"]
else None
)
filename_params = {
k: sanitize_recursive(v) for k, v in filename_params.items()
}
if directory:
filename = template.format(**filename_params)
full_path = os.path.join(directory, filename)
final_directory = os.path.dirname(full_path)
pathlib.Path(final_directory).mkdir(parents=True, exist_ok=True)
logs.logger.info("Downloading to {}".format(full_path))
if not overwrite and os.path.exists(full_path):
raise click.ClickException(
"'{}' already exists on disk. Relaunch this command with --overwrite if you want to replace it".format(
full_path
)
)
out = open(full_path, "wb")
else:
out = click.get_binary_stream("stdout")
while True:
chunk = await response.content.read(1024)
if not chunk:
break
out.write(chunk)
@cli.group()
@click.pass_context
def uploads(ctx):
pass
uploads_ls = get_ls_command( # noqa
uploads,
"api/v1/uploads/",
output_conf={
"labels": ["UUID", "Track", "Artist", "Import status", "Size", "Mimetype"],
"type": "UPLOAD",
},
)
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
def track_read(file_obj, name, progress):
read = file_obj.read
def patched_read(size):
content = read(size)
progress.update(len(content))
progress.set_postfix(file=name[-30:], refresh=False)
return content
setattr(file_obj, "read", patched_read)
async def upload(path, size, remote, ref, library_id, semaphore, global_progress):
async with semaphore:
filename = os.path.basename(path)
data = {
"library": library_id,
"import_reference": ref,
"source": "upload://{}".format(filename),
"audio_file": open(path, "rb"),
}
track_read(data["audio_file"], filename, global_progress)
response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0)
response.raise_for_status()
return response
@uploads.command("create")
@click.argument("library_id")
@click.argument("paths", nargs=-1)
@click.option("-r", "--ref", default=None)
@click.option("-p", "--parallel", type=click.INT, default=1)
@click.pass_context
@async_command
async def uploads_create(ctx, library_id, paths, ref, parallel):
logs.logger.info("Uploading {} files…".format(len(paths)))
paths = sorted(set(paths))
if not paths:
return
ref = ref or "funkwhale-cli-import-{}".format(datetime.datetime.now().isoformat())
sizes = {path: os.path.getsize(path) for path in paths}
async with ctx.obj["remote"]:
logs.logger.info("Checking library {} existence…".format(library_id))
library_data = await ctx.obj["remote"].request(
"get", "api/v1/libraries/{}/".format(library_id)
)
library_data.raise_for_status()
sem = asyncio.Semaphore(parallel)
progressbar = tqdm.tqdm(
total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024
)
tasks = [
upload(
path=path,
ref=ref,
size=sizes[path],
global_progress=progressbar,
remote=ctx.obj["remote"],
library_id=library_id,
semaphore=sem,
)
for path in paths
]
await asyncio.gather(*tasks)
logs.logger.info("Upload complete")