Skip to content
Snippets Groups Projects
Verified Commit 3e8ee3ba authored by Eliot Berriot's avatar Eliot Berriot
Browse files

Can now publish uploads in channels

parent 9f8ce428
No related branches found
No related tags found
No related merge requests found
Pipeline #10054 failed
import datetime
import json
import os
import asyncio
......@@ -37,29 +38,106 @@ def track_read(file_obj, name, progress):
setattr(file_obj, "read", patched_read)
async def upload(path, size, remote, ref, library_id, semaphore, global_progress):
def get_valid_fields(metadata):
data = {}
for field in ["title", "position", "tags", "description"]:
if field not in metadata:
continue
if field == "description":
data[field] = metadata[field]["text"]
else:
data[field] = metadata[field]
return data
async def upload(
path,
size,
remote,
ref,
container_type,
draft,
album,
license,
library_or_channel_id,
semaphore,
global_progress,
):
async with semaphore:
filename = os.path.basename(path)
data = {
"library": library_id,
container_type: library_or_channel_id,
"import_reference": ref,
"source": "upload://{}".format(filename),
"audio_file": open(path, "rb"),
}
if container_type == "channel":
# needed to set proper metadata in the file later on before publication
data["import_status"] = "draft"
data["import_metadata"] = json.dumps(
{"title": filename, "album": album, "license": license}
)
track_read(data["audio_file"], filename, global_progress)
response = await remote.request("post", "api/v1/uploads/", data=data, timeout=0)
response.raise_for_status()
upload = await base.check_status(response)
upload = await response.json()
if container_type == "channel":
metadata_response = await remote.request(
"get", "api/v1/uploads/{}/audio-file-metadata/".format(upload["uuid"])
)
metadata_response.raise_for_status()
metadata = await metadata_response.json()
new_data = {"import_metadata": upload["import_metadata"]}
new_data["import_metadata"].update(get_valid_fields(metadata))
if not draft:
new_data["import_status"] = "pending"
patch_response = await remote.request(
"patch", "api/v1/uploads/{}/".format(upload["uuid"]), json=new_data
)
print(await patch_response.json())
patch_response.raise_for_status()
return response
@uploads.command("create")
@click.argument("library_id")
@click.argument("library_or_channel_id")
@click.argument("paths", nargs=-1)
@click.option("-r", "--ref", default=None)
@click.option(
"-a",
"--album",
default=None,
help="Album to associate with the uploads. Only used when --channel is provided",
)
@click.option(
"-l",
"--license",
default=None,
help="License to associate with the uploads. Only used when --channel is provided",
)
@click.option(
"-c",
"--channel",
is_flag=True,
default=False,
help="Provide this flag if you're uploading to a channel",
)
@click.option(
"-d",
"--draft",
is_flag=True,
default=False,
help="Provide this flag if you want to upload in draft and publish later",
)
@click.option("-p", "--parallel", type=click.INT, default=1)
@click.pass_context
@base.async_command
async def uploads_create(ctx, library_id, paths, ref, parallel):
async def uploads_create(
ctx, library_or_channel_id, paths, ref, parallel, draft, channel, album, license
):
logs.logger.info("Uploading {} files…".format(len(paths)))
paths = sorted(set(paths))
if not paths:
......@@ -68,23 +146,39 @@ async def uploads_create(ctx, library_id, paths, ref, parallel):
sizes = {path: os.path.getsize(path) for path in paths}
async with ctx.obj["remote"]:
logs.logger.info("Checking library {} existence…".format(library_id))
if channel:
logs.logger.info(
"Checking channel {} existence…".format(library_or_channel_id)
)
channel_data = await ctx.obj["remote"].request(
"get", "api/v1/channels/{}/".format(library_or_channel_id)
)
channel_data.raise_for_status()
else:
logs.logger.info(
"Checking library {} existence…".format(library_or_channel_id)
)
library_data = await ctx.obj["remote"].request(
"get", "api/v1/libraries/{}/".format(library_id)
"get", "api/v1/libraries/{}/".format(library_or_channel_id)
)
library_data.raise_for_status()
sem = asyncio.Semaphore(parallel)
progressbar = tqdm.tqdm(
total=sum(sizes.values()), unit="B", unit_scale=True, unit_divisor=1024
)
tasks = [
upload(
path=path,
ref=ref,
size=sizes[path],
draft=draft,
album=album,
license=license,
global_progress=progressbar,
remote=ctx.obj["remote"],
library_id=library_id,
library_or_channel_id=library_or_channel_id,
container_type="channel" if channel else "library",
semaphore=sem,
)
for path in paths
......
import json
import uuid
import pytest
......@@ -175,10 +176,14 @@ def test_uploads_create(cli_ctx, session, responses, get_requests, tmpdir):
responses.get("https://test.funkwhale/api/v1/libraries/{}/".format(library_id))
command.callback(
library_id=library_id,
library_or_channel_id=library_id,
paths=[str(tmp_file)],
parallel=1,
channel=False,
ref="test-import",
draft=False,
album=None,
license=None,
_async_reraise=True,
)
expected_data = {
......@@ -198,6 +203,59 @@ def test_uploads_create(cli_ctx, session, responses, get_requests, tmpdir):
assert len(libraries_requests) == 1
def test_uploads_create_channel(cli_ctx, session, responses, get_requests, tmpdir):
tmp_file = tmpdir.join("test.mp3")
tmp_file.write_text("content", "ascii")
channel_id = str(uuid.uuid4())
upload_id = str(uuid.uuid4())
command = cli.uploads.uploads_create
upload_data = {
"uuid": upload_id,
"import_metadata": {"title": "test.mp3", "album": 12, "license": "cc-by-sa-4.0"}
}
responses.post("https://test.funkwhale/api/v1/uploads/", payload=upload_data)
responses.get("https://test.funkwhale/api/v1/channels/{}/".format(channel_id))
responses.get("https://test.funkwhale/api/v1/uploads/{}/audio-file-metadata/".format(upload_id), payload={"title": 'test title'})
responses.patch("https://test.funkwhale/api/v1/uploads/{}/".format(upload_id))
command.callback(
library_or_channel_id=channel_id,
channel=True,
paths=[str(tmp_file)],
parallel=1,
ref="test-import",
draft=False,
album=12,
license="cc-by-sa-4.0",
_async_reraise=True,
)
expected_data = {
"channel": channel_id,
"import_reference": "test-import",
"source": "upload://test.mp3",
"import_status": "draft",
"import_metadata": json.dumps({"title": "test.mp3", "album": 12, "license": "cc-by-sa-4.0"}),
}
upload_requests = get_requests("post", "https://test.funkwhale/api/v1/uploads/")
assert len(upload_requests) == 1
data = upload_requests[0].kwargs["data"]
audio_file = data.pop("audio_file")
assert data == expected_data
assert audio_file.name == str(tmp_file)
channels_requests = get_requests(
"get", "https://test.funkwhale/api/v1/channels/{}/".format(channel_id)
)
assert len(channels_requests) == 1
publish_requests = get_requests("patch", "https://test.funkwhale/api/v1/uploads/{}/".format(upload_id))
assert len(publish_requests) == 1
data = publish_requests[0].kwargs["json"]
expected_data = {
"import_status": "pending",
"import_metadata": {"title": "test title", "album": 12, "license": "cc-by-sa-4.0"},
}
assert data == expected_data
def test_uploads_ls(cli_ctx, session, responses, get_requests):
command = cli.uploads.uploads_ls
url = "https://test.funkwhale/api/v1/uploads/?ordering=-creation_date&page=1&page_size=5&q=hello"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment