Newer
Older
import asyncio
import aiohttp
import click
import click_log
import datetime
import os
import tqdm
if v is NOOP:
raise click.ClickException(
"You need to specify a server, either via the -H flag or using the FUNKWHALE_SERVER_URL environment variable"
)
v = str(v) if v else None
parsed = urllib.parse.urlparse(v)
if parsed.scheme not in ["http", "https"] or not parsed.netloc:
raise ValueError("{} is not a valid url".format(v))
if not v.endswith("/"):
v = v + "/"
return v
def env_file(v):
if v is NOOP:
v = None
if v is not None:
v = click.Path(exists=True)(v)
env_files = [v or ".env", config.get_env_file()]
for p in env_files:
logs.logger.debug("Loading env file at {}".format(p))
dotenv.load_dotenv(p, override=False)
return v
def async_command(f):
def wrapper(*args, **kwargs):
loop = asyncio.get_event_loop()
_async_reraise = kwargs.pop("_async_reraise", False)
try:
return loop.run_until_complete(f(*args, **kwargs))
except (exceptions.FunkwhaleError, aiohttp.client_exceptions.ClientError) as e:
raise click.ClickException(str(e))
else:
raise
return functools.update_wrapper(wrapper, f)
SERVER_DECORATOR = click.option(
"-H", "--url", envvar="FUNKWHALE_SERVER_URL", type=URL, default=NOOP
)
TOKEN_DECORATOR = click.option("-t", "--token", envvar="FUNKWHALE_TOKEN")
ctx.ensure_object(dict)
ctx.obj["SERVER_URL"] = url
parsed = urllib.parse.urlparse(url)
ctx.obj["SERVER_NETLOC"] = parsed.netloc
ctx.obj["SERVER_PROTOCOL"] = parsed.scheme
token = token or keyring.get_password(url, "_")
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
token=token,
@click.option("-e", "--env-file", envvar="ENV_FILE", type=env_file, default=NOOP)
@SERVER_DECORATOR
@TOKEN_DECORATOR
@click_log.simple_verbosity_option(logs.logger, expose_value=True)
@click.pass_context
def cli(ctx, env_file, url, verbosity, token):
ctx.ensure_object(dict)
set_server(ctx, url, token)
@cli.command()
@click.option("-u", "--username", envvar="FUNKWHALE_USERNAME", prompt=True)
@click.option(
"-p", "--password", envvar="FUNKWHALE_PASSWORD", prompt=True, hide_input=True
)
@click.pass_context
@async_command
async with api.get_session() as session:
token = await api.get_jwt_token(
session, ctx.obj["SERVER_URL"], username=username, password=password
)
keyring.set_password(ctx.obj["SERVER_URL"], "_", token)
click.echo("Login successfull!")
@cli.command()
@click.pass_context
@async_command
async def logout(ctx):
keyring.delete_password(ctx.obj["SERVER_URL"], "_")
pass
@cli.group()
@click.pass_context
def server(ctx):
pass
@server.command()
@click.option("--raw", is_flag=True)
@click.pass_context
@async_command
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
async with api.get_session() as session:
nodeinfo = await api.fetch_nodeinfo(
session,
domain=ctx.obj["SERVER_NETLOC"],
protocol=ctx.obj["SERVER_PROTOCOL"],
)
if raw:
click.echo(json.dumps(nodeinfo, sort_keys=True, indent=4))
return
click.echo("\n")
click.echo("General")
click.echo("-------")
click.echo("Url: {}".format(ctx.obj["SERVER_URL"]))
click.echo("Name: {}".format(nodeinfo["metadata"]["nodeName"]))
click.echo(
"Short description: {}".format(nodeinfo["metadata"]["shortDescription"])
)
click.echo("\n")
click.echo("Software")
click.echo("----------")
click.echo("Software name: {}".format(nodeinfo["software"]["name"]))
click.echo("Version: {}".format(nodeinfo["software"]["version"]))
click.echo("\n")
click.echo("Configuration")
click.echo("---------------")
click.echo(
"Registrations: {}".format(
"open" if nodeinfo["openRegistrations"] else "closed"
)
)
@cli.group()
@click.pass_context
def libraries(ctx):
pass
def get_url_param(url, name):
parsed = urllib.parse.urlparse(url)
v = urllib.parse.parse_qs(parsed.query).get(name)
if v:
return v[0]
return None
def get_pagination_data(payload):
data = {"next_page": None, "page_size": None}
if payload.get("next"):
next_page = get_url_param(payload["next"], "page")
data["next_page"] = int(next_page)
data["total_pages"] = math.ceil(payload["count"] / len(payload["results"]))
data["current_page"] = int(next_page) - 1
data["page_size"] = len(payload["results"])
if payload.get("previous"):
previous_page = get_url_param(payload["previous"], "page") or 0
data.setdefault('current_page', int(previous_page) + 1)
data.setdefault('total_pages', data["current_page"])
if not data["page_size"] and payload["count"] - len(payload["results"]) > 0 and data["total_pages"] > 1:
data["page_size"] = int(payload["count"] - len(payload["results"])) / (
data["total_pages"] - 1
)
data.setdefault('current_page', 1)
data.setdefault('total_pages', 1)
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
def get_ls_command(group, endpoint, output_conf):
@group.command("ls")
@click.option("--raw", is_flag=True)
@click.option("--page", "-p", type=click.INT, default=1)
@click.option("--page-size", "-s", type=click.INT, default=None)
@click.option("--ordering", "-o", default=None)
@click.option("--filter", "-f", multiple=True)
@click.pass_context
@async_command
async def ls(ctx, raw, page, page_size, ordering, filter):
async with ctx.obj["remote"]:
params = {"page": page}
if page_size:
params["page_size"] = page_size
if ordering:
params["ordering"] = ordering
if filter:
for f in filter:
query = urllib.parse.parse_qs(f)
for k, v in query.items():
params[k] = v[0]
result = await ctx.obj["remote"].request("get", endpoint, params=params)
result.raise_for_status()
payload = await result.json()
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo(
output.table(
payload["results"], output_conf["labels"], type=output_conf["type"]
)
pagination_data = get_pagination_data(payload)
if pagination_data["page_size"]:
start = (
int(
(pagination_data["current_page"] - 1) * pagination_data["page_size"]
)
+ 1
end = min(start + len(payload["results"]) - 1, payload["count"])
click.echo(
"\nObjects {start}-{end} on {total} (page {current_page}/{total_pages})".format(
start=start,
end=end,
total=payload["count"],
current_page=pagination_data["current_page"],
total_pages=pagination_data["total_pages"] or 1,
def get_delete_command(
group,
url_template,
confirm="Do you want to delete this object? This action is irreversible.",
):
@group.command("delete")
@click.argument("id")
@click.option("--raw", is_flag=True)
@click.option("--no-input", is_flag=True)
@click.pass_context
@async_command
async with ctx.obj["remote"]:
if not no_input and not click.confirm(confirm, abort=True):
return
result = await ctx.obj["remote"].request("delete", url_template.format(id))
result.raise_for_status()
click.echo("Object deleted!")
libraries,
"api/v1/libraries/",
output_conf={
"labels": ["UUID", "Name", "Visibility", "Uploads"],
"type": "LIBRARY",
},
)
libraries_delete = get_delete_command(libraries, "api/v1/libraries/{}/")
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
@libraries.command("create")
@click.argument("name")
@click.option(
"--visibility", type=click.Choice(["me", "instance", "everyone"]), default="me"
)
@click.option("--raw", is_flag=True)
@click.pass_context
@async_command
async def libraries_create(ctx, raw, name, visibility):
async with ctx.obj["remote"]:
result = await ctx.obj["remote"].request(
"post", "api/v1/libraries/", data={"name": name, "visibility": visibility}
)
result.raise_for_status()
payload = await result.json()
if raw:
click.echo(json.dumps(payload, sort_keys=True, indent=4))
else:
click.echo("Library created:")
click.echo(
output.table([payload], ["UUID", "Name", "Visibility"], type="LIBRARY")
)
@cli.group()
@click.pass_context
def artists(ctx):
pass
artists_ls = get_ls_command(
artists,
"api/v1/artists/",
output_conf={"labels": ["ID", "Name", "Created"], "type": "ARTIST"},
)
tracks,
"api/v1/tracks/",
output_conf={"labels": ["ID", "Title", "Artist", "Album"], "type": "TRACK"},
)
async def get_track_download_url(id, remote):
result = await remote.request("get", "api/v1/tracks/{}/".format(id))
result.raise_for_status()
payload = await result.json()
try:
download_url = payload["uploads"][0]["listen_url"]
except IndexError:
if remote.token:
raise click.ClickException("This file is not available for download")
else:
raise click.ClickException(
"This file is not available for download, try to login first"
)
if download_url.startswith("/"):
download_url = remote.base_url[:-1] + download_url
return download_url
@tracks.command("download")
@click.argument("id")
@click.pass_context
@async_command
async def track_download(ctx, id):
async with ctx.obj["remote"]:
download_url = await get_track_download_url(id, ctx.obj["remote"])
logs.logger.info("Downloading from {}".format(download_url))
response = await ctx.obj["remote"].request("get", download_url, timeout=0)
response.raise_for_status()
stdout_binary = click.get_binary_stream("stdout")
while True:
chunk = await response.content.read(1024)
if not chunk:
break
stdout_binary.write(chunk)
logs.logger.info("Download complete")
@cli.group()
@click.pass_context
def uploads(ctx):
pass
uploads_ls = get_ls_command( # noqa
uploads,
"api/v1/uploads/",
output_conf={
"labels": ["UUID", "Track", "Artist", "Import status", "Size", "Mimetype"],
"type": "UPLOAD",
},
)
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
def track_read(file_obj, name, progress):
read = file_obj.read
def patched_read(size):
content = read(size)
progress.update(len(content))
progress.set_postfix(file=name[-30:], refresh=False)
return content
setattr(file_obj, "read", patched_read)
async def upload(path, size, remote, ref, library_id, semaphore, global_progress):
async with semaphore:
filename = os.path.basename(path)
data = {
"library": library_id,
"import_reference": ref,
"source": "upload://{}".format(filename),
"audio_file": open(path, "rb"),
}
track_read(data["audio_file"], filename, global_progress)
response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0)
response.raise_for_status()
return response
@uploads.command("create")
@click.argument("library_id")
@click.argument("paths", nargs=-1)
@click.option("-r", "--ref", default=None)
@click.option("-p", "--parallel", type=click.INT, default=1)
@click.pass_context
@async_command
async def uploads_create(ctx, library_id, paths, ref, parallel):
logs.logger.info("Uploading {} files…".format(len(paths)))
paths = sorted(set(paths))
if not paths:
return
ref = ref or "funkwhale-cli-import-{}".format(datetime.datetime.now().isoformat())
sizes = {path: os.path.getsize(path) for path in paths}
async with ctx.obj["remote"]:
logs.logger.info("Checking library {} existence…".format(library_id))
library_data = await ctx.obj["remote"].request(
"get", "api/v1/libraries/{}/".format(library_id)
)
library_data.raise_for_status()
sem = asyncio.Semaphore(parallel)
progressbar = tqdm.tqdm(
total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024
)
tasks = [
upload(
path=path,
ref=ref,
size=sizes[path],
global_progress=progressbar,
remote=ctx.obj["remote"],
library_id=library_id,
semaphore=sem,
)
for path in paths
]
await asyncio.gather(*tasks)
logs.logger.info("Upload complete")