Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • funkwhale/funkwhale
  • Luclu7/funkwhale
  • mbothorel/funkwhale
  • EorlBruder/funkwhale
  • tcit/funkwhale
  • JocelynDelalande/funkwhale
  • eneiluj/funkwhale
  • reg/funkwhale
  • ButterflyOfFire/funkwhale
  • m4sk1n/funkwhale
  • wxcafe/funkwhale
  • andybalaam/funkwhale
  • jcgruenhage/funkwhale
  • pblayo/funkwhale
  • joshuaboniface/funkwhale
  • n3ddy/funkwhale
  • gegeweb/funkwhale
  • tohojo/funkwhale
  • emillumine/funkwhale
  • Te-k/funkwhale
  • asaintgenis/funkwhale
  • anoadragon453/funkwhale
  • Sakada/funkwhale
  • ilianaw/funkwhale
  • l4p1n/funkwhale
  • pnizet/funkwhale
  • dante383/funkwhale
  • interfect/funkwhale
  • akhardya/funkwhale
  • svfusion/funkwhale
  • noplanman/funkwhale
  • nykopol/funkwhale
  • roipoussiere/funkwhale
  • Von/funkwhale
  • aurieh/funkwhale
  • icaria36/funkwhale
  • floreal/funkwhale
  • paulwalko/funkwhale
  • comradekingu/funkwhale
  • FurryJulie/funkwhale
  • Legolars99/funkwhale
  • Vierkantor/funkwhale
  • zachhats/funkwhale
  • heyjake/funkwhale
  • sn0w/funkwhale
  • jvoisin/funkwhale
  • gordon/funkwhale
  • Alexander/funkwhale
  • bignose/funkwhale
  • qasim.ali/funkwhale
  • fakegit/funkwhale
  • Kxze/funkwhale
  • stenstad/funkwhale
  • creak/funkwhale
  • Kaze/funkwhale
  • Tixie/funkwhale
  • IISergII/funkwhale
  • lfuelling/funkwhale
  • nhaddag/funkwhale
  • yoasif/funkwhale
  • ifischer/funkwhale
  • keslerm/funkwhale
  • flupe/funkwhale
  • petitminion/funkwhale
  • ariasuni/funkwhale
  • ollie/funkwhale
  • ngaumont/funkwhale
  • techknowlogick/funkwhale
  • Shleeble/funkwhale
  • theflyingfrog/funkwhale
  • jonatron/funkwhale
  • neobrain/funkwhale
  • eorn/funkwhale
  • KokaKiwi/funkwhale
  • u1-liquid/funkwhale
  • marzzzello/funkwhale
  • sirenwatcher/funkwhale
  • newer027/funkwhale
  • codl/funkwhale
  • Zwordi/funkwhale
  • gisforgabriel/funkwhale
  • iuriatan/funkwhale
  • simon/funkwhale
  • bheesham/funkwhale
  • zeoses/funkwhale
  • accraze/funkwhale
  • meliurwen/funkwhale
  • divadsn/funkwhale
  • Etua/funkwhale
  • sdrik/funkwhale
  • Soran/funkwhale
  • kuba-orlik/funkwhale
  • cristianvogel/funkwhale
  • Forceu/funkwhale
  • jeff/funkwhale
  • der_scheibenhacker/funkwhale
  • owlnical/funkwhale
  • jovuit/funkwhale
  • SilverFox15/funkwhale
  • phw/funkwhale
  • mayhem/funkwhale
  • sridhar/funkwhale
  • stromlin/funkwhale
  • rrrnld/funkwhale
  • nitaibezerra/funkwhale
  • jaller94/funkwhale
  • pcouy/funkwhale
  • eduxstad/funkwhale
  • codingHahn/funkwhale
  • captain/funkwhale
  • polyedre/funkwhale
  • leishenailong/funkwhale
  • ccritter/funkwhale
  • lnceballosz/funkwhale
  • fpiesche/funkwhale
  • Fanyx/funkwhale
  • markusblogde/funkwhale
  • Firobe/funkwhale
  • devilcius/funkwhale
  • freaktechnik/funkwhale
  • blopware/funkwhale
  • cone/funkwhale
  • thanksd/funkwhale
  • vachan-maker/funkwhale
  • bbenti/funkwhale
  • tarator/funkwhale
  • prplecake/funkwhale
  • DMarzal/funkwhale
  • lullis/funkwhale
  • hanacgr/funkwhale
  • albjeremias/funkwhale
  • xeruf/funkwhale
  • llelite/funkwhale
  • RoiArthurB/funkwhale
  • cloo/funkwhale
  • nztvar/funkwhale
  • Keunes/funkwhale
  • petitminion/funkwhale-petitminion
  • m-idler/funkwhale
  • SkyLeite/funkwhale
140 results
Select Git revision
Show changes
Showing
with 26705 additions and 114 deletions
This diff is collapsed.
...@@ -293,11 +293,22 @@ class AttachmentSerializer(serializers.Serializer): ...@@ -293,11 +293,22 @@ class AttachmentSerializer(serializers.Serializer):
file = StripExifImageField(write_only=True) file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField() urls = serializers.SerializerMethodField()
@extend_schema_field(OpenApiTypes.OBJECT) @extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
def get_urls(self, o): def get_urls(self, o):
urls = {} urls = {}
urls["source"] = o.url urls["source"] = o.url
urls["original"] = o.download_url_original urls["original"] = o.download_url_original
urls["small_square_crop"] = o.download_url_small_square_crop
urls["medium_square_crop"] = o.download_url_medium_square_crop urls["medium_square_crop"] = o.download_url_medium_square_crop
urls["large_square_crop"] = o.download_url_large_square_crop urls["large_square_crop"] = o.download_url_large_square_crop
return urls return urls
......
...@@ -176,7 +176,12 @@ class AttachmentViewSet( ...@@ -176,7 +176,12 @@ class AttachmentViewSet(
return r return r
size = request.GET.get("next", "original").lower() size = request.GET.get("next", "original").lower()
if size not in ["original", "medium_square_crop", "large_square_crop"]: if size not in [
"original",
"small_square_crop",
"medium_square_crop",
"large_square_crop",
]:
size = "original" size = "original"
try: try:
......
...@@ -3,7 +3,9 @@ import hashlib ...@@ -3,7 +3,9 @@ import hashlib
import logging import logging
import os import os
import tempfile import tempfile
import time
import urllib.parse import urllib.parse
from datetime import timedelta
import requests import requests
from django.core.files import File from django.core.files import File
...@@ -16,6 +18,41 @@ from funkwhale_api.taskapp import celery ...@@ -16,6 +18,41 @@ from funkwhale_api.taskapp import celery
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class TooManyQueriesError(Exception):
pass
def check_existing_download_task(track):
if models.Upload.objects.filter(
track=track,
import_status__in=["pending", "finished"],
third_party_provider="archive-dl",
).exists():
raise TooManyQueriesError(
"Upload for this track already exist or is pending. Stopping task."
)
def check_last_third_party_queries(track, count):
# 15 per minutes according to their doc = one each 4 seconds
time_threshold = timezone.now() - timedelta(seconds=5)
if models.Upload.objects.filter(
third_party_provider="archive-dl",
import_status__in=["pending", "finished"],
creation_date__gte=time_threshold,
).exists():
logger.info(
"Last archive.org query was too recent. Trying to wait 2 seconds..."
)
time.sleep(2)
count += 1
if count > 3:
raise TooManyQueriesError(
"Probably too many archivedl tasks are queue, stopping this task"
)
check_last_third_party_queries(track, count)
def create_upload(url, track, files_data): def create_upload(url, track, files_data):
mimetype = f"audio/{files_data.get('format', 'unknown')}" mimetype = f"audio/{files_data.get('format', 'unknown')}"
duration = files_data.get("mtime", 0) duration = files_data.get("mtime", 0)
...@@ -38,13 +75,19 @@ def create_upload(url, track, files_data): ...@@ -38,13 +75,19 @@ def create_upload(url, track, files_data):
bitrate=bitrate, bitrate=bitrate,
library=service_library, library=service_library,
from_activity=None, from_activity=None,
import_status="finished", import_status="pending",
) )
@celery.app.task(name="archivedl.archive_download") @celery.app.task(name="archivedl.archive_download")
@celery.require_instance(models.Track.objects.select_related(), "track") @celery.require_instance(models.Track.objects.select_related(), "track")
def archive_download(track, conf): def archive_download(track, conf):
try:
check_existing_download_task(track)
check_last_third_party_queries(track, 0)
except TooManyQueriesError as e:
logger.error(e)
return
artist_name = utils.get_artist_credit_string(track) artist_name = utils.get_artist_credit_string(track)
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}" query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
with requests.Session() as session: with requests.Session() as session:
...@@ -145,4 +188,5 @@ def filter_files(files, allowed_extensions): ...@@ -145,4 +188,5 @@ def filter_files(files, allowed_extensions):
def get_search_url(query, page_size, page): def get_search_url(query, page_size, page):
q = urllib.parse.urlencode({"q": query}) q = urllib.parse.urlencode({"q": query})
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}&page={page}&output=json" return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}\
&page={page}&output=json"
...@@ -55,12 +55,7 @@ class Migration(migrations.Migration): ...@@ -55,12 +55,7 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name="trackfavorite", model_name="trackfavorite",
name="fid", name="fid",
field=models.URLField( field=models.URLField(default="https://default.fid"),
db_index=True,
default="https://default.fid",
max_length=500,
unique=True,
),
preserve_default=False, preserve_default=False,
), ),
migrations.AddField( migrations.AddField(
...@@ -79,6 +74,15 @@ class Migration(migrations.Migration): ...@@ -79,6 +74,15 @@ class Migration(migrations.Migration):
name="uuid", name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True, null=False), field=models.UUIDField(default=uuid.uuid4, unique=True, null=False),
), ),
migrations.AlterField(
model_name="trackfavorite",
name="fid",
field=models.URLField(
db_index=True,
max_length=500,
unique=True,
),
),
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop), migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
migrations.AlterField( migrations.AlterField(
model_name="trackfavorite", model_name="trackfavorite",
......
...@@ -10,9 +10,10 @@ from drf_spectacular.utils import extend_schema_field ...@@ -10,9 +10,10 @@ from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers from rest_framework import serializers
from funkwhale_api.audio import models as audio_models from funkwhale_api.audio import models as audio_models
from funkwhale_api.common import fields as common_fields from funkwhale_api.audio import serializers as audio_serializers
from funkwhale_api.common import serializers as common_serializers from funkwhale_api.common import serializers as common_serializers
from funkwhale_api.music import models as music_models from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.users import serializers as users_serializers from funkwhale_api.users import serializers as users_serializers
from . import filters, models from . import filters, models
...@@ -192,19 +193,17 @@ class InboxItemActionSerializer(common_serializers.ActionSerializer): ...@@ -192,19 +193,17 @@ class InboxItemActionSerializer(common_serializers.ActionSerializer):
return objects.update(is_read=True) return objects.update(is_read=True)
FETCH_OBJECT_CONFIG = { OBJECT_SERIALIZER_MAPPING = {
"artist": {"queryset": music_models.Artist.objects.all()}, music_models.Artist: federation_serializers.ArtistSerializer,
"album": {"queryset": music_models.Album.objects.all()}, music_models.Album: federation_serializers.AlbumSerializer,
"track": {"queryset": music_models.Track.objects.all()}, music_models.Track: federation_serializers.TrackSerializer,
"library": {"queryset": music_models.Library.objects.all(), "id_attr": "uuid"}, models.Actor: federation_serializers.APIActorSerializer,
"upload": {"queryset": music_models.Upload.objects.all(), "id_attr": "uuid"}, audio_models.Channel: audio_serializers.ChannelSerializer,
"account": {"queryset": models.Actor.objects.all(), "id_attr": "full_username"}, playlists_models.Playlist: federation_serializers.PlaylistSerializer,
"channel": {"queryset": audio_models.Channel.objects.all(), "id_attr": "uuid"},
} }
FETCH_OBJECT_FIELD = common_fields.GenericRelation(FETCH_OBJECT_CONFIG)
def convert_url_to_webginfer(url): def convert_url_to_webfinger(url):
parsed_url = urlparse(url) parsed_url = urlparse(url)
domain = parsed_url.netloc # e.g., "node1.funkwhale.test" domain = parsed_url.netloc # e.g., "node1.funkwhale.test"
path_parts = parsed_url.path.strip("/").split("/") path_parts = parsed_url.path.strip("/").split("/")
...@@ -217,7 +216,9 @@ def convert_url_to_webginfer(url): ...@@ -217,7 +216,9 @@ def convert_url_to_webginfer(url):
class FetchSerializer(serializers.ModelSerializer): class FetchSerializer(serializers.ModelSerializer):
actor = federation_serializers.APIActorSerializer(read_only=True) actor = federation_serializers.APIActorSerializer(read_only=True)
object = serializers.CharField(write_only=True) object_uri = serializers.CharField(required=True, write_only=True)
object = serializers.SerializerMethodField(read_only=True)
type = serializers.SerializerMethodField(read_only=True)
force = serializers.BooleanField(default=False, required=False, write_only=True) force = serializers.BooleanField(default=False, required=False, write_only=True)
class Meta: class Meta:
...@@ -230,8 +231,10 @@ class FetchSerializer(serializers.ModelSerializer): ...@@ -230,8 +231,10 @@ class FetchSerializer(serializers.ModelSerializer):
"detail", "detail",
"creation_date", "creation_date",
"fetch_date", "fetch_date",
"object", "object_uri",
"force", "force",
"type",
"object",
] ]
read_only_fields = [ read_only_fields = [
"id", "id",
...@@ -241,14 +244,36 @@ class FetchSerializer(serializers.ModelSerializer): ...@@ -241,14 +244,36 @@ class FetchSerializer(serializers.ModelSerializer):
"detail", "detail",
"creation_date", "creation_date",
"fetch_date", "fetch_date",
"type",
"object",
] ]
def validate_object(self, value): def get_type(self, fetch):
obj = fetch.object
if obj is None:
return None
# Return the type as a string
if isinstance(obj, music_models.Artist):
return "artist"
elif isinstance(obj, music_models.Album):
return "album"
elif isinstance(obj, music_models.Track):
return "track"
elif isinstance(obj, models.Actor):
return "account"
elif isinstance(obj, audio_models.Channel):
return "channel"
elif isinstance(obj, playlists_models.Playlist):
return "playlist"
else:
return None
def validate_object_uri(self, value):
if value.startswith("https://"): if value.startswith("https://"):
converted = convert_url_to_webginfer(value) converted = convert_url_to_webfinger(value)
if converted: if converted:
value = converted value = converted
# if value is a webginfer lookup, we craft a special url
if value.startswith("@"): if value.startswith("@"):
value = value.lstrip("@") value = value.lstrip("@")
validator = validators.EmailValidator() validator = validators.EmailValidator()
...@@ -256,9 +281,30 @@ class FetchSerializer(serializers.ModelSerializer): ...@@ -256,9 +281,30 @@ class FetchSerializer(serializers.ModelSerializer):
validator(value) validator(value)
except validators.ValidationError: except validators.ValidationError:
return value return value
return f"webfinger://{value}" return f"webfinger://{value}"
@extend_schema_field(
{
"oneOf": [
{"$ref": "#/components/schemas/Artist"},
{"$ref": "#/components/schemas/Album"},
{"$ref": "#/components/schemas/Track"},
{"$ref": "#/components/schemas/APIActor"},
{"$ref": "#/components/schemas/Channel"},
{"$ref": "#/components/schemas/Playlist"},
]
}
)
def get_object(self, fetch):
obj = fetch.object
if obj is None:
return None
serializer_class = OBJECT_SERIALIZER_MAPPING.get(type(obj))
if serializer_class:
return serializer_class(obj).data
return None
def create(self, validated_data): def create(self, validated_data):
check_duplicates = not validated_data.get("force", False) check_duplicates = not validated_data.get("force", False)
if check_duplicates: if check_duplicates:
...@@ -267,7 +313,7 @@ class FetchSerializer(serializers.ModelSerializer): ...@@ -267,7 +313,7 @@ class FetchSerializer(serializers.ModelSerializer):
validated_data["actor"] validated_data["actor"]
.fetches.filter( .fetches.filter(
status="finished", status="finished",
url=validated_data["object"], url=validated_data["object_uri"],
creation_date__gte=timezone.now() creation_date__gte=timezone.now()
- datetime.timedelta( - datetime.timedelta(
seconds=settings.FEDERATION_DUPLICATE_FETCH_DELAY seconds=settings.FEDERATION_DUPLICATE_FETCH_DELAY
...@@ -280,18 +326,10 @@ class FetchSerializer(serializers.ModelSerializer): ...@@ -280,18 +326,10 @@ class FetchSerializer(serializers.ModelSerializer):
return duplicate return duplicate
fetch = models.Fetch.objects.create( fetch = models.Fetch.objects.create(
actor=validated_data["actor"], url=validated_data["object"] actor=validated_data["actor"], url=validated_data["object_uri"]
) )
return fetch return fetch
def to_representation(self, obj):
repr = super().to_representation(obj)
object_data = None
if obj.object:
object_data = FETCH_OBJECT_FIELD.to_representation(obj.object)
repr["object"] = object_data
return repr
class FullActorSerializer(serializers.Serializer): class FullActorSerializer(serializers.Serializer):
fid = serializers.URLField() fid = serializers.URLField()
......
...@@ -4,6 +4,7 @@ from django.db import transaction ...@@ -4,6 +4,7 @@ from django.db import transaction
from django.db.models import Count, Q from django.db.models import Count, Q
from drf_spectacular.utils import extend_schema, extend_schema_view from drf_spectacular.utils import extend_schema, extend_schema_view
from rest_framework import decorators, mixins, permissions, response, viewsets from rest_framework import decorators, mixins, permissions, response, viewsets
from rest_framework.exceptions import NotFound as RestNotFound
from funkwhale_api.common import preferences from funkwhale_api.common import preferences
from funkwhale_api.common import utils as common_utils from funkwhale_api.common import utils as common_utils
...@@ -289,7 +290,12 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet): ...@@ -289,7 +290,12 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
def get_object(self): def get_object(self):
queryset = self.get_queryset() queryset = self.get_queryset()
username, domain = self.kwargs["full_username"].split("@", 1) username, domain = self.kwargs["full_username"].split("@", 1)
try:
return queryset.get(preferred_username=username, domain_id=domain) return queryset.get(preferred_username=username, domain_id=domain)
except models.Actor.DoesNotExist:
raise RestNotFound(
detail=f"Actor {username}@{domain} not found",
)
def get_queryset(self): def get_queryset(self):
qs = super().get_queryset() qs = super().get_queryset()
......
...@@ -81,6 +81,7 @@ class SignatureAuthentication(authentication.BaseAuthentication): ...@@ -81,6 +81,7 @@ class SignatureAuthentication(authentication.BaseAuthentication):
fetch_delay = 24 * 3600 fetch_delay = 24 * 3600
now = timezone.now() now = timezone.now()
last_fetch = actor.domain.nodeinfo_fetch_date last_fetch = actor.domain.nodeinfo_fetch_date
if not actor.domain.is_local:
if not last_fetch or ( if not last_fetch or (
last_fetch < (now - datetime.timedelta(seconds=fetch_delay)) last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
): ):
......
...@@ -405,6 +405,7 @@ class Fetch(models.Model): ...@@ -405,6 +405,7 @@ class Fetch(models.Model):
serializers.ChannelUploadSerializer, serializers.ChannelUploadSerializer,
], ],
contexts.FW.Library: [serializers.LibrarySerializer], contexts.FW.Library: [serializers.LibrarySerializer],
contexts.FW.Playlist: [serializers.PlaylistSerializer],
contexts.AS.Group: [serializers.ActorSerializer], contexts.AS.Group: [serializers.ActorSerializer],
contexts.AS.Person: [serializers.ActorSerializer], contexts.AS.Person: [serializers.ActorSerializer],
contexts.AS.Organization: [serializers.ActorSerializer], contexts.AS.Organization: [serializers.ActorSerializer],
......
...@@ -679,9 +679,6 @@ def inbox_delete_favorite(payload, context): ...@@ -679,9 +679,6 @@ def inbox_delete_favorite(payload, context):
favorite.delete() favorite.delete()
# to do : test listening routes and broadcast
@outbox.register({"type": "Listen", "object.type": "Track"}) @outbox.register({"type": "Listen", "object.type": "Track"})
def outbox_create_listening(context): def outbox_create_listening(context):
track = context["track"] track = context["track"]
...@@ -807,18 +804,19 @@ def inbox_delete_playlist(payload, context): ...@@ -807,18 +804,19 @@ def inbox_delete_playlist(payload, context):
@inbox.register({"type": "Update", "object.type": "Playlist"}) @inbox.register({"type": "Update", "object.type": "Playlist"})
def inbox_update_playlist(payload, context): def inbox_update_playlist(payload, context):
actor = context["actor"] """If we receive an update on an unkwnown playlist, we create the playlist"""
playlist_id = payload["object"].get("id")
if not actor.playlists.filter(fid=playlist_id).exists(): playlist_id = payload["object"].get("id")
logger.debug("Discarding update of unkwnown playlist_id %s", playlist_id)
return
serializer = serializers.PlaylistSerializer(data=payload["object"]) serializer = serializers.PlaylistSerializer(data=payload["object"])
if serializer.is_valid(raise_exception=True): if serializer.is_valid(raise_exception=True):
playlist = serializer.save() playlist = serializer.save()
# we update the playlist.library to get the plt.track.uploads locally
if follows := playlist.library.received_follows.filter(approved=True):
playlist.library.schedule_scan(follows[0].actor, force=True)
# we trigger a scan since we use this activity to avoid sending many PlaylistTracks activities # we trigger a scan since we use this activity to avoid sending many PlaylistTracks activities
playlist.schedule_scan(actors.get_service_actor()) playlist.schedule_scan(actors.get_service_actor(), force=True)
return return
else: else:
logger.debug( logger.debug(
......
import logging import logging
import os import os
import re
import urllib.parse import urllib.parse
import uuid import uuid
...@@ -939,10 +940,13 @@ OBJECT_SERIALIZERS = {t: ObjectSerializer for t in activity.OBJECT_TYPES} ...@@ -939,10 +940,13 @@ OBJECT_SERIALIZERS = {t: ObjectSerializer for t in activity.OBJECT_TYPES}
def get_additional_fields(data): def get_additional_fields(data):
UNSET = object() UNSET = object()
additional_fields = {} additional_fields = {}
for field in ["name", "summary"]: for field in ["name", "summary", "library", "audience", "published"]:
v = data.get(field, UNSET) v = data.get(field, UNSET)
if v == UNSET: if v == UNSET:
continue continue
# in some cases we use the serializer context to pass objects instances, we don't want to add them
if not isinstance(v, str) or isinstance(v, dict):
continue
additional_fields[field] = v additional_fields[field] = v
return additional_fields return additional_fields
...@@ -1036,7 +1040,11 @@ class LibrarySerializer(PaginatedCollectionSerializer): ...@@ -1036,7 +1040,11 @@ class LibrarySerializer(PaginatedCollectionSerializer):
"page_size": 100, "page_size": 100,
"attributedTo": library.actor, "attributedTo": library.actor,
"actor": library.actor, "actor": library.actor,
"items": library.uploads.for_federation(), "items": (
library.uploads.for_federation()
if not library.playlist_uploads.all()
else library.playlist_uploads.for_federation()
),
"type": "Library", "type": "Library",
} }
r = super().to_representation(conf) r = super().to_representation(conf)
...@@ -1128,7 +1136,12 @@ class CollectionPageSerializer(jsonld.JsonLdSerializer): ...@@ -1128,7 +1136,12 @@ class CollectionPageSerializer(jsonld.JsonLdSerializer):
"last": last, "last": last,
"items": [ "items": [
conf["item_serializer"]( conf["item_serializer"](
i, context={"actor": conf["actor"], "include_ap_context": False} i,
context={
"actor": conf["actor"],
"library": conf.get("library", None),
"include_ap_context": False,
},
).data ).data
for i in page.object_list for i in page.object_list
], ],
...@@ -1580,6 +1593,50 @@ class TrackSerializer(MusicEntitySerializer): ...@@ -1580,6 +1593,50 @@ class TrackSerializer(MusicEntitySerializer):
return super().update(obj, validated_data) return super().update(obj, validated_data)
def duration_int_to_xml(duration):
if not duration:
return None
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
ret = "P"
days, seconds = divmod(int(duration), multipliers["D"])
ret += f"{days:d}DT" if days > 0 else "T"
hours, seconds = divmod(seconds, multipliers["H"])
ret += f"{hours:d}H" if hours > 0 else ""
minutes, seconds = divmod(seconds, multipliers["M"])
ret += f"{minutes:d}M" if minutes > 0 else ""
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
return ret
class DayTimeDurationSerializer(serializers.DurationField):
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
def to_internal_value(self, value):
if isinstance(value, float):
return value
parsed = re.match(
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
)
if parsed is not None:
return int(
sum(
[
self.multipliers[s[-1]] * float("0" + s[:-1])
for s in parsed.groups()
if s is not None
]
)
)
self.fail(
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
)
def to_representation(self, value):
duration_int_to_xml(value)
class UploadSerializer(jsonld.JsonLdSerializer): class UploadSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Audio]) type = serializers.ChoiceField(choices=[contexts.AS.Audio])
id = serializers.URLField(max_length=500) id = serializers.URLField(max_length=500)
...@@ -1589,7 +1646,7 @@ class UploadSerializer(jsonld.JsonLdSerializer): ...@@ -1589,7 +1646,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
updated = serializers.DateTimeField(required=False, allow_null=True) updated = serializers.DateTimeField(required=False, allow_null=True)
bitrate = serializers.IntegerField(min_value=0) bitrate = serializers.IntegerField(min_value=0)
size = serializers.IntegerField(min_value=0) size = serializers.IntegerField(min_value=0)
duration = serializers.IntegerField(min_value=0) duration = DayTimeDurationSerializer(min_value=0)
track = TrackSerializer(required=True) track = TrackSerializer(required=True)
...@@ -1625,8 +1682,9 @@ class UploadSerializer(jsonld.JsonLdSerializer): ...@@ -1625,8 +1682,9 @@ class UploadSerializer(jsonld.JsonLdSerializer):
def validate_library(self, v): def validate_library(self, v):
lb = self.context.get("library") lb = self.context.get("library")
if lb: if lb:
if lb.fid != v: # the upload can come from a playlist lib
raise serializers.ValidationError("Invalid library") if lb.fid != v and not lb.playlist.library and lb.playlist.library.fid != v:
raise serializers.ValidationError("Invalid library fid")
return lb return lb
actor = self.context.get("actor") actor = self.context.get("actor")
...@@ -1638,10 +1696,10 @@ class UploadSerializer(jsonld.JsonLdSerializer): ...@@ -1638,10 +1696,10 @@ class UploadSerializer(jsonld.JsonLdSerializer):
queryset=music_models.Library, queryset=music_models.Library,
serializer_class=LibrarySerializer, serializer_class=LibrarySerializer,
) )
except Exception: except Exception as e:
raise serializers.ValidationError("Invalid library") raise serializers.ValidationError(f"Invalid library : {e}")
if actor and library.actor != actor: if actor and library.actor != actor:
raise serializers.ValidationError("Invalid library") raise serializers.ValidationError("Invalid library, actor check fails")
return library return library
def update(self, instance, validated_data): def update(self, instance, validated_data):
...@@ -1692,16 +1750,17 @@ class UploadSerializer(jsonld.JsonLdSerializer): ...@@ -1692,16 +1750,17 @@ class UploadSerializer(jsonld.JsonLdSerializer):
return music_models.Upload.objects.create(**data) return music_models.Upload.objects.create(**data)
def to_representation(self, instance): def to_representation(self, instance):
lib = instance.library if instance.library else self.context.get("library")
track = instance.track track = instance.track
d = { d = {
"type": "Audio", "type": "Audio",
"id": instance.get_federation_id(), "id": instance.get_federation_id(),
"library": instance.library.fid, "library": lib.fid,
"name": track.full_name, "name": track.full_name,
"published": instance.creation_date.isoformat(), "published": instance.creation_date.isoformat(),
"bitrate": instance.bitrate, "bitrate": instance.bitrate,
"size": instance.size, "size": instance.size,
"duration": instance.duration, "duration": duration_int_to_xml(instance.duration),
"url": [ "url": [
{ {
"href": utils.full_url(instance.listen_url_no_download), "href": utils.full_url(instance.listen_url_no_download),
...@@ -1715,12 +1774,8 @@ class UploadSerializer(jsonld.JsonLdSerializer): ...@@ -1715,12 +1774,8 @@ class UploadSerializer(jsonld.JsonLdSerializer):
}, },
], ],
"track": TrackSerializer(track, context={"include_ap_context": False}).data, "track": TrackSerializer(track, context={"include_ap_context": False}).data,
"to": ( "to": (contexts.AS.Public if lib.privacy_level == "everyone" else ""),
contexts.AS.Public "attributedTo": lib.actor.fid,
if instance.library.privacy_level == "everyone"
else ""
),
"attributedTo": instance.library.actor.fid,
} }
if instance.modification_date: if instance.modification_date:
d["updated"] = instance.modification_date.isoformat() d["updated"] = instance.modification_date.isoformat()
...@@ -1851,7 +1906,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer): ...@@ -1851,7 +1906,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1) url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
name = serializers.CharField() name = serializers.CharField()
published = serializers.DateTimeField(required=False) published = serializers.DateTimeField(required=False)
duration = serializers.IntegerField(min_value=0, required=False) duration = DayTimeDurationSerializer(required=False)
position = serializers.IntegerField(min_value=0, allow_null=True, required=False) position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False) disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
album = serializers.URLField(max_length=500, required=False) album = serializers.URLField(max_length=500, required=False)
...@@ -1960,7 +2015,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer): ...@@ -1960,7 +2015,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
if upload.track.local_license: if upload.track.local_license:
data["license"] = upload.track.local_license["identifiers"][0] data["license"] = upload.track.local_license["identifiers"][0]
include_if_not_none(data, upload.duration, "duration") include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
include_if_not_none(data, upload.track.position, "position") include_if_not_none(data, upload.track.position, "position")
include_if_not_none(data, upload.track.disc_number, "disc") include_if_not_none(data, upload.track.disc_number, "disc")
include_if_not_none(data, upload.track.copyright, "copyright") include_if_not_none(data, upload.track.copyright, "copyright")
...@@ -2280,7 +2335,7 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer): ...@@ -2280,7 +2335,7 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
validated_data["playlist"], validated_data["playlist"],
actor=self.context.get("fetch_actor"), actor=self.context.get("fetch_actor"),
queryset=playlists_models.Playlist, queryset=playlists_models.Playlist,
serializer_class=PlaylistTrackSerializer, serializer_class=PlaylistSerializer,
) )
defaults = { defaults = {
...@@ -2289,6 +2344,10 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer): ...@@ -2289,6 +2344,10 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
"creation_date": validated_data["creation_date"], "creation_date": validated_data["creation_date"],
"playlist": playlist, "playlist": playlist,
} }
if existing_plt := playlists_models.PlaylistTrack.objects.filter(
playlist=playlist, index=validated_data["index"]
):
existing_plt.delete()
plt, created = playlists_models.PlaylistTrack.objects.update_or_create( plt, created = playlists_models.PlaylistTrack.objects.update_or_create(
defaults, defaults,
...@@ -2297,7 +2356,6 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer): ...@@ -2297,7 +2356,6 @@ class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
"fid": validated_data["id"], "fid": validated_data["id"],
}, },
) )
return plt return plt
...@@ -2319,6 +2377,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer): ...@@ -2319,6 +2377,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer):
allow_null=True, allow_null=True,
allow_blank=True, allow_blank=True,
) )
library = serializers.URLField(max_length=500, required=True)
updateable_fields = [ updateable_fields = [
("name", "title"), ("name", "title"),
("attributedTo", "attributed_to"), ("attributedTo", "attributed_to"),
...@@ -2332,6 +2391,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer): ...@@ -2332,6 +2391,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer):
"updated": jsonld.first_val(contexts.AS.published), "updated": jsonld.first_val(contexts.AS.published),
"audience": jsonld.first_id(contexts.AS.audience), "audience": jsonld.first_id(contexts.AS.audience),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo), "attributedTo": jsonld.first_id(contexts.AS.attributedTo),
"library": jsonld.first_id(contexts.FW.library),
}, },
) )
...@@ -2343,6 +2403,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer): ...@@ -2343,6 +2403,7 @@ class PlaylistSerializer(jsonld.JsonLdSerializer):
"attributedTo": playlist.actor.fid, "attributedTo": playlist.actor.fid,
"published": playlist.creation_date.isoformat(), "published": playlist.creation_date.isoformat(),
"audience": playlist.privacy_level, "audience": playlist.privacy_level,
"library": playlist.library.fid,
} }
payload["audience"] = ( payload["audience"] = (
contexts.AS.Public if playlist.privacy_level == "everyone" else "" contexts.AS.Public if playlist.privacy_level == "everyone" else ""
...@@ -2360,12 +2421,22 @@ class PlaylistSerializer(jsonld.JsonLdSerializer): ...@@ -2360,12 +2421,22 @@ class PlaylistSerializer(jsonld.JsonLdSerializer):
queryset=models.Actor, queryset=models.Actor,
serializer_class=ActorSerializer, serializer_class=ActorSerializer,
) )
library = utils.retrieve_ap_object(
validated_data["library"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Library,
serializer_class=LibrarySerializer,
)
ap_to_fw_data = { ap_to_fw_data = {
"actor": actor, "actor": actor,
"name": validated_data["name"], "name": validated_data["name"],
"creation_date": validated_data["published"], "creation_date": validated_data["published"],
"privacy_level": validated_data["audience"], "privacy_level": validated_data["audience"],
"library": library,
} }
playlist, created = playlists_models.Playlist.objects.update_or_create( playlist, created = playlists_models.Playlist.objects.update_or_create(
defaults=ap_to_fw_data, defaults=ap_to_fw_data,
**{ **{
...@@ -2375,19 +2446,23 @@ class PlaylistSerializer(jsonld.JsonLdSerializer): ...@@ -2375,19 +2446,23 @@ class PlaylistSerializer(jsonld.JsonLdSerializer):
), ),
}, },
) )
return playlist return playlist
def validate(self, data): def validate(self, data):
validated_data = super().validate(data) validated_data = super().validate(data)
if validated_data["audience"] not in [ if validated_data["audience"] in [
"https://www.w3.org/ns/activitystreams#Public", "https://www.w3.org/ns/activitystreams#Public",
"everyone", "everyone",
]: ]:
raise serializers.ValidationError("Privacy_level must be everyone")
validated_data["audience"] = "everyone" validated_data["audience"] = "everyone"
else:
validated_data.pop("audience")
return validated_data return validated_data
def update(self, instance, validated_data):
return self.create(validated_data)
class PlaylistCollectionSerializer(PaginatedCollectionSerializer): class PlaylistCollectionSerializer(PaginatedCollectionSerializer):
""" """
...@@ -2406,6 +2481,8 @@ class PlaylistCollectionSerializer(PaginatedCollectionSerializer): ...@@ -2406,6 +2481,8 @@ class PlaylistCollectionSerializer(PaginatedCollectionSerializer):
"tracks", "tracks",
), ),
"type": "Playlist", "type": "Playlist",
"library": playlist.library.fid,
"published": playlist.creation_date.isoformat(),
} }
r = super().to_representation(conf) r = super().to_representation(conf)
return r return r
...@@ -236,8 +236,10 @@ def refresh_nodeinfo_known_nodes(): ...@@ -236,8 +236,10 @@ def refresh_nodeinfo_known_nodes():
settings.NODEINFO_REFRESH_DELAY settings.NODEINFO_REFRESH_DELAY
""" """
limit = timezone.now() - datetime.timedelta(seconds=settings.NODEINFO_REFRESH_DELAY) limit = timezone.now() - datetime.timedelta(seconds=settings.NODEINFO_REFRESH_DELAY)
candidates = models.Domain.objects.external().exclude( candidates = (
nodeinfo_fetch_date__gte=limit models.Domain.objects.external()
.exclude(nodeinfo_fetch_date__gte=limit)
.filter(nodeinfo__software__name="Funkwhale")
) )
names = candidates.values_list("name", flat=True) names = candidates.values_list("name", flat=True)
logger.info("Launching periodic nodeinfo refresh on %s domains", len(names)) logger.info("Launching periodic nodeinfo refresh on %s domains", len(names))
......
...@@ -23,6 +23,8 @@ music_router.register(r"tracks", views.MusicTrackViewSet, "tracks") ...@@ -23,6 +23,8 @@ music_router.register(r"tracks", views.MusicTrackViewSet, "tracks")
music_router.register(r"likes", views.TrackFavoriteViewSet, "likes") music_router.register(r"likes", views.TrackFavoriteViewSet, "likes")
music_router.register(r"listenings", views.ListeningsViewSet, "listenings") music_router.register(r"listenings", views.ListeningsViewSet, "listenings")
music_router.register(r"playlists", views.PlaylistViewSet, "playlists") music_router.register(r"playlists", views.PlaylistViewSet, "playlists")
music_router.register(r"playlists", views.PlaylistTrackViewSet, "playlist-tracks")
index_router.register(r"index", views.IndexViewSet, "index") index_router.register(r"index", views.IndexViewSet, "index")
......
...@@ -365,6 +365,20 @@ def has_library_access(request, library): ...@@ -365,6 +365,20 @@ def has_library_access(request, library):
return library.received_follows.filter(actor=actor, approved=True).exists() return library.received_follows.filter(actor=actor, approved=True).exists()
def has_playlist_access(request, playlist):
if playlist.privacy_level == "everyone":
return True
if request.user.is_authenticated and request.user.is_superuser:
return True
try:
actor = request.actor
except AttributeError:
return False
return playlist.library.received_follows.filter(actor=actor, approved=True).exists()
class MusicLibraryViewSet( class MusicLibraryViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
): ):
...@@ -383,13 +397,16 @@ class MusicLibraryViewSet( ...@@ -383,13 +397,16 @@ class MusicLibraryViewSet(
lb = self.get_object() lb = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")): if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(lb.get_absolute_url()) return redirect_to_html(lb.get_absolute_url())
items_qs = (
lb.uploads.for_federation()
if not lb.playlist_uploads.all()
else lb.playlist_uploads.for_federation()
)
conf = { conf = {
"id": lb.get_federation_id(), "id": lb.get_federation_id(),
"actor": lb.actor, "actor": lb.actor,
"name": lb.name, "name": lb.name,
"items": lb.uploads.for_federation() "items": items_qs.order_by("-creation_date").prefetch_related(
.order_by("-creation_date")
.prefetch_related(
Prefetch( Prefetch(
"track", "track",
queryset=music_models.Track.objects.select_related( queryset=music_models.Track.objects.select_related(
...@@ -413,8 +430,8 @@ class MusicLibraryViewSet( ...@@ -413,8 +430,8 @@ class MusicLibraryViewSet(
) )
), ),
"item_serializer": serializers.UploadSerializer, "item_serializer": serializers.UploadSerializer,
"library": lb,
} }
return get_collection_response( return get_collection_response(
conf=conf, conf=conf,
querystring=request.GET, querystring=request.GET,
...@@ -709,7 +726,6 @@ class PlaylistViewSet( ...@@ -709,7 +726,6 @@ class PlaylistViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
): ):
authentication_classes = [authentication.SignatureAuthentication] authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers() renderer_classes = renderers.get_ap_renderers()
queryset = playlists_models.Playlist.objects.local().select_related("actor") queryset = playlists_models.Playlist.objects.local().select_related("actor")
serializer_class = serializers.PlaylistCollectionSerializer serializer_class = serializers.PlaylistCollectionSerializer
...@@ -728,9 +744,31 @@ class PlaylistViewSet( ...@@ -728,9 +744,31 @@ class PlaylistViewSet(
"track", "track",
), ),
"item_serializer": serializers.PlaylistTrackSerializer, "item_serializer": serializers.PlaylistTrackSerializer,
"library": playlist.library.fid,
} }
return get_collection_response( return get_collection_response(
conf=conf, conf=conf,
querystring=request.GET, querystring=request.GET,
collection_serializer=serializers.PlaylistCollectionSerializer(playlist), collection_serializer=serializers.PlaylistCollectionSerializer(playlist),
page_access_check=lambda: has_playlist_access(request, playlist),
) )
class PlaylistTrackViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = playlists_models.PlaylistTrack.objects.local().select_related("actor")
serializer_class = serializers.PlaylistTrackSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
plt = self.get_object()
if not has_playlist_access(request, plt.playlist):
return response.Response(status=403)
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(plt.get_absolute_url())
serializer = self.get_serializer(plt)
return response.Response(serializer.data)
...@@ -126,7 +126,7 @@ class NodeInfo21(NodeInfo20): ...@@ -126,7 +126,7 @@ class NodeInfo21(NodeInfo20):
serializer_class = serializers.NodeInfo21Serializer serializer_class = serializers.NodeInfo21Serializer
@extend_schema( @extend_schema(
responses=serializers.NodeInfo20Serializer, operation_id="getNodeInfo20" responses=serializers.NodeInfo21Serializer, operation_id="getNodeInfo21"
) )
def get(self, request): def get(self, request):
pref = preferences.all() pref = preferences.all()
......
...@@ -626,6 +626,7 @@ class ManageUploadSerializer(serializers.ModelSerializer): ...@@ -626,6 +626,7 @@ class ManageUploadSerializer(serializers.ModelSerializer):
track = ManageNestedTrackSerializer() track = ManageNestedTrackSerializer()
library = ManageNestedLibrarySerializer() library = ManageNestedLibrarySerializer()
domain = serializers.CharField(source="domain_name") domain = serializers.CharField(source="domain_name")
import_metadata = music_serializers.ImportMetadataField()
class Meta: class Meta:
model = music_models.Upload model = music_models.Upload
......
...@@ -26,6 +26,15 @@ class AlbumAdmin(admin.ModelAdmin): ...@@ -26,6 +26,15 @@ class AlbumAdmin(admin.ModelAdmin):
search_fields = ["title", "mbid"] search_fields = ["title", "mbid"]
list_select_related = True list_select_related = True
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name == "artist_credit":
object_id = request.resolver_match.kwargs.get("object_id")
kwargs["queryset"] = models.ArtistCredit.objects.filter(
albums__id=object_id
)
return super().formfield_for_foreignkey(db_field, request, **kwargs)
@admin.register(models.Track) @admin.register(models.Track)
class TrackAdmin(admin.ModelAdmin): class TrackAdmin(admin.ModelAdmin):
...@@ -35,6 +44,14 @@ class TrackAdmin(admin.ModelAdmin): ...@@ -35,6 +44,14 @@ class TrackAdmin(admin.ModelAdmin):
def artist(self, obj): def artist(self, obj):
return obj.get_artist_credit_string return obj.get_artist_credit_string
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name == "artist_credit":
object_id = request.resolver_match.kwargs.get("object_id")
kwargs["queryset"] = models.ArtistCredit.objects.filter(
tracks__id=object_id
)
return super().formfield_for_foreignkey(db_field, request, **kwargs)
@admin.register(models.TrackActor) @admin.register(models.TrackActor)
class TrackActorAdmin(admin.ModelAdmin): class TrackActorAdmin(admin.ModelAdmin):
...@@ -81,6 +98,14 @@ class UploadAdmin(admin.ModelAdmin): ...@@ -81,6 +98,14 @@ class UploadAdmin(admin.ModelAdmin):
] ]
list_filter = ["mimetype", "import_status", "library__privacy_level"] list_filter = ["mimetype", "import_status", "library__privacy_level"]
def formfield_for_manytomany(self, db_field, request, **kwargs):
if db_field.name == "playlist_libraries":
object_id = request.resolver_match.kwargs.get("object_id")
kwargs["queryset"] = models.Library.objects.filter(
playlist_uploads=object_id
).distinct()
return super().formfield_for_foreignkey(db_field, request, **kwargs)
@admin.register(models.UploadVersion) @admin.register(models.UploadVersion)
class UploadVersionAdmin(admin.ModelAdmin): class UploadVersionAdmin(admin.ModelAdmin):
...@@ -116,7 +141,7 @@ launch_scan.short_description = "Launch scan" ...@@ -116,7 +141,7 @@ launch_scan.short_description = "Launch scan"
class LibraryAdmin(admin.ModelAdmin): class LibraryAdmin(admin.ModelAdmin):
list_display = ["id", "name", "actor", "uuid", "privacy_level", "creation_date"] list_display = ["id", "name", "actor", "uuid", "privacy_level", "creation_date"]
list_select_related = True list_select_related = True
search_fields = ["actor__username", "name", "description"] search_fields = ["uuid", "name", "actor__preferred_username"]
list_filter = ["privacy_level"] list_filter = ["privacy_level"]
actions = [launch_scan] actions = [launch_scan]
......
...@@ -129,7 +129,7 @@ class Format(types.MultipleChoicePreference): ...@@ -129,7 +129,7 @@ class Format(types.MultipleChoicePreference):
("aac", "aac"), ("aac", "aac"),
("mp3", "mp3"), ("mp3", "mp3"),
] ]
help_text = "Witch audio format to allow" help_text = "Which audio format to allow"
@global_preferences_registry.register @global_preferences_registry.register
......
...@@ -47,7 +47,7 @@ def create_data(super_user_name=None): ...@@ -47,7 +47,7 @@ def create_data(super_user_name=None):
library = federation_factories.MusicLibraryFactory( library = federation_factories.MusicLibraryFactory(
actor=(super_user.actor if super_user else federation_factories.ActorFactory()), actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
local=True, local=True if super_user else False,
) )
uploads = music_factories.UploadFactory.create_batch( uploads = music_factories.UploadFactory.create_batch(
size=random.randint(3, 18), size=random.randint(3, 18),
...@@ -68,6 +68,7 @@ def create_data(super_user_name=None): ...@@ -68,6 +68,7 @@ def create_data(super_user_name=None):
playlist = playlist_factories.PlaylistFactory( playlist = playlist_factories.PlaylistFactory(
name="playlist test public", name="playlist test public",
privacy_level="everyone", privacy_level="everyone",
local=True if super_user else False,
actor=(super_user.actor if super_user else federation_factories.ActorFactory()), actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
) )
playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track) playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track)
...@@ -112,7 +113,7 @@ def create_data(super_user_name=None): ...@@ -112,7 +113,7 @@ def create_data(super_user_name=None):
# my artist channel # my artist channel
my_artist_library = federation_factories.MusicLibraryFactory( my_artist_library = federation_factories.MusicLibraryFactory(
actor=(super_user.actor if super_user else federation_factories.ActorFactory()), actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
local=True, local=True if super_user else False,
) )
my_artist_channel = audio_factories.ChannelFactory( my_artist_channel = audio_factories.ChannelFactory(
library=my_artist_library, library=my_artist_library,
......
...@@ -4,6 +4,7 @@ import logging ...@@ -4,6 +4,7 @@ import logging
from collections.abc import Mapping from collections.abc import Mapping
import arrow import arrow
import magic
import mutagen._util import mutagen._util
import mutagen.flac import mutagen.flac
import mutagen.oggtheora import mutagen.oggtheora
...@@ -131,6 +132,28 @@ def clean_flac_pictures(apic): ...@@ -131,6 +132,28 @@ def clean_flac_pictures(apic):
return pictures return pictures
def clean_ogg_coverart(metadata_block_picture):
pictures = []
for b64_data in [metadata_block_picture]:
try:
data = base64.b64decode(b64_data)
except (TypeError, ValueError):
continue
mime = magic.Magic(mime=True)
mime.from_buffer(data)
pictures.append(
{
"mimetype": mime.from_buffer(data),
"content": data,
"description": "",
"type": mutagen.id3.PictureType.COVER_FRONT,
}
)
return pictures
def clean_ogg_pictures(metadata_block_picture): def clean_ogg_pictures(metadata_block_picture):
pictures = [] pictures = []
for b64_data in [metadata_block_picture]: for b64_data in [metadata_block_picture]:
...@@ -196,10 +219,16 @@ CONF = { ...@@ -196,10 +219,16 @@ CONF = {
"license": {}, "license": {},
"copyright": {}, "copyright": {},
"genre": {}, "genre": {},
"pictures": { "pictures": [
{
"field": "metadata_block_picture", "field": "metadata_block_picture",
"to_application": clean_ogg_pictures, "to_application": clean_ogg_pictures,
}, },
{
"field": "coverart",
"to_application": clean_ogg_coverart,
},
],
"comment": {"field": "comment"}, "comment": {"field": "comment"},
}, },
}, },
...@@ -221,10 +250,16 @@ CONF = { ...@@ -221,10 +250,16 @@ CONF = {
"license": {}, "license": {},
"copyright": {}, "copyright": {},
"genre": {}, "genre": {},
"pictures": { "pictures": [
{
"field": "metadata_block_picture", "field": "metadata_block_picture",
"to_application": clean_ogg_pictures, "to_application": clean_ogg_pictures,
}, },
{
"field": "coverart",
"to_application": clean_ogg_coverart,
},
],
"comment": {"field": "comment"}, "comment": {"field": "comment"},
}, },
}, },
...@@ -415,17 +450,19 @@ class Metadata(Mapping): ...@@ -415,17 +450,19 @@ class Metadata(Mapping):
def _get_from_self(self, key, default=NODEFAULT): def _get_from_self(self, key, default=NODEFAULT):
try: try:
field_conf = self._conf["fields"][key] field_confs = self._conf["fields"][key]
except KeyError: except KeyError:
raise UnsupportedTag(f"{key} is not supported for this file format") raise UnsupportedTag(f"{key} is not supported for this file format")
if not isinstance(field_confs, list):
field_confs = [field_confs]
for field_conf in field_confs:
real_key = field_conf.get("field", key) real_key = field_conf.get("field", key)
try: try:
getter = field_conf.get("getter", self._conf["getter"]) getter = field_conf.get("getter", self._conf["getter"])
v = getter(self._file, real_key) v = getter(self._file, real_key)
except KeyError: except KeyError:
if default == NODEFAULT: continue
raise TagNotFound(real_key)
return default
converter = field_conf.get("to_application") converter = field_conf.get("to_application")
if converter: if converter:
...@@ -434,6 +471,9 @@ class Metadata(Mapping): ...@@ -434,6 +471,9 @@ class Metadata(Mapping):
if field: if field:
v = field.to_python(v) v = field.to_python(v)
return v return v
if default == NODEFAULT:
raise TagNotFound(real_key)
return default
def get_picture(self, *picture_types): def get_picture(self, *picture_types):
if not picture_types: if not picture_types:
......