Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • funkwhale/funkwhale
  • Luclu7/funkwhale
  • mbothorel/funkwhale
  • EorlBruder/funkwhale
  • tcit/funkwhale
  • JocelynDelalande/funkwhale
  • eneiluj/funkwhale
  • reg/funkwhale
  • ButterflyOfFire/funkwhale
  • m4sk1n/funkwhale
  • wxcafe/funkwhale
  • andybalaam/funkwhale
  • jcgruenhage/funkwhale
  • pblayo/funkwhale
  • joshuaboniface/funkwhale
  • n3ddy/funkwhale
  • gegeweb/funkwhale
  • tohojo/funkwhale
  • emillumine/funkwhale
  • Te-k/funkwhale
  • asaintgenis/funkwhale
  • anoadragon453/funkwhale
  • Sakada/funkwhale
  • ilianaw/funkwhale
  • l4p1n/funkwhale
  • pnizet/funkwhale
  • dante383/funkwhale
  • interfect/funkwhale
  • akhardya/funkwhale
  • svfusion/funkwhale
  • noplanman/funkwhale
  • nykopol/funkwhale
  • roipoussiere/funkwhale
  • Von/funkwhale
  • aurieh/funkwhale
  • icaria36/funkwhale
  • floreal/funkwhale
  • paulwalko/funkwhale
  • comradekingu/funkwhale
  • FurryJulie/funkwhale
  • Legolars99/funkwhale
  • Vierkantor/funkwhale
  • zachhats/funkwhale
  • heyjake/funkwhale
  • sn0w/funkwhale
  • jvoisin/funkwhale
  • gordon/funkwhale
  • Alexander/funkwhale
  • bignose/funkwhale
  • qasim.ali/funkwhale
  • fakegit/funkwhale
  • Kxze/funkwhale
  • stenstad/funkwhale
  • creak/funkwhale
  • Kaze/funkwhale
  • Tixie/funkwhale
  • IISergII/funkwhale
  • lfuelling/funkwhale
  • nhaddag/funkwhale
  • yoasif/funkwhale
  • ifischer/funkwhale
  • keslerm/funkwhale
  • flupe/funkwhale
  • petitminion/funkwhale
  • ariasuni/funkwhale
  • ollie/funkwhale
  • ngaumont/funkwhale
  • techknowlogick/funkwhale
  • Shleeble/funkwhale
  • theflyingfrog/funkwhale
  • jonatron/funkwhale
  • neobrain/funkwhale
  • eorn/funkwhale
  • KokaKiwi/funkwhale
  • u1-liquid/funkwhale
  • marzzzello/funkwhale
  • sirenwatcher/funkwhale
  • newer027/funkwhale
  • codl/funkwhale
  • Zwordi/funkwhale
  • gisforgabriel/funkwhale
  • iuriatan/funkwhale
  • simon/funkwhale
  • bheesham/funkwhale
  • zeoses/funkwhale
  • accraze/funkwhale
  • meliurwen/funkwhale
  • divadsn/funkwhale
  • Etua/funkwhale
  • sdrik/funkwhale
  • Soran/funkwhale
  • kuba-orlik/funkwhale
  • cristianvogel/funkwhale
  • Forceu/funkwhale
  • jeff/funkwhale
  • der_scheibenhacker/funkwhale
  • owlnical/funkwhale
  • jovuit/funkwhale
  • SilverFox15/funkwhale
  • phw/funkwhale
  • mayhem/funkwhale
  • sridhar/funkwhale
  • stromlin/funkwhale
  • rrrnld/funkwhale
  • nitaibezerra/funkwhale
  • jaller94/funkwhale
  • pcouy/funkwhale
  • eduxstad/funkwhale
  • codingHahn/funkwhale
  • captain/funkwhale
  • polyedre/funkwhale
  • leishenailong/funkwhale
  • ccritter/funkwhale
  • lnceballosz/funkwhale
  • fpiesche/funkwhale
  • Fanyx/funkwhale
  • markusblogde/funkwhale
  • Firobe/funkwhale
  • devilcius/funkwhale
  • freaktechnik/funkwhale
  • blopware/funkwhale
  • cone/funkwhale
  • thanksd/funkwhale
  • vachan-maker/funkwhale
  • bbenti/funkwhale
  • tarator/funkwhale
  • prplecake/funkwhale
  • DMarzal/funkwhale
  • lullis/funkwhale
  • hanacgr/funkwhale
  • albjeremias/funkwhale
  • xeruf/funkwhale
  • llelite/funkwhale
  • RoiArthurB/funkwhale
  • cloo/funkwhale
  • nztvar/funkwhale
  • Keunes/funkwhale
  • petitminion/funkwhale-petitminion
  • m-idler/funkwhale
  • SkyLeite/funkwhale
140 results
Select Git revision
Show changes
Showing
with 1861 additions and 159 deletions
# Generated by Django 2.2.10 on 2020-02-06 15:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('audio', '0002_channel_metadata'),
]
operations = [
migrations.AddField(
model_name='channel',
name='rss_url',
field=models.URLField(blank=True, max_length=500, null=True),
),
]
# Generated by Django 3.2.13 on 2022-06-27 19:15
import django.core.serializers.json
from django.db import migrations, models
import funkwhale_api.audio.models
class Migration(migrations.Migration):
dependencies = [
('audio', '0003_channel_rss_url'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='metadata',
field=models.JSONField(blank=True, default=funkwhale_api.audio.models.empty_dict, encoder=django.core.serializers.json.DjangoJSONEncoder, max_length=50000),
),
]
import uuid
from django.contrib.contenttypes.fields import GenericRelation
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.db.models import JSONField
from django.db.models.signals import post_delete
from django.dispatch import receiver
from django.urls import reverse
from django.utils import timezone
from funkwhale_api.federation import keys
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.users import models as user_models
def empty_dict():
return {}
class ChannelQuerySet(models.QuerySet):
def external_rss(self, include=True):
from funkwhale_api.federation import actors
query = models.Q(
attributed_to=actors.get_service_actor(),
actor__preferred_username__startswith="rssfeed-",
)
if include:
return self.filter(query)
return self.exclude(query)
def subscribed(self, actor):
if not actor:
return self.none()
subscriptions = actor.emitted_follows.filter(
approved=True, target__channel__isnull=False
)
return self.filter(actor__in=subscriptions.values_list("target", flat=True))
class Channel(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
artist = models.OneToOneField(
......@@ -28,6 +60,51 @@ class Channel(models.Model):
"music.Library", on_delete=models.CASCADE, related_name="channel"
)
creation_date = models.DateTimeField(default=timezone.now)
rss_url = models.URLField(max_length=500, null=True, blank=True)
# metadata to enhance rss feed
metadata = JSONField(
default=empty_dict, max_length=50000, encoder=DjangoJSONEncoder, blank=True
)
fetches = GenericRelation(
"federation.Fetch",
content_type_field="object_content_type",
object_id_field="object_id",
)
objects = ChannelQuerySet.as_manager()
@property
def fid(self):
if not self.is_external_rss:
return self.actor.fid
@property
def is_local(self) -> bool:
return self.actor.is_local
@property
def is_external_rss(self):
return self.actor.preferred_username.startswith("rssfeed-")
def get_absolute_url(self):
suffix = self.uuid
if self.actor.is_local:
suffix = self.actor.preferred_username
else:
suffix = self.actor.full_username
return federation_utils.full_url(f"/channels/{suffix}")
def get_rss_url(self):
if not self.artist.is_local or self.is_external_rss:
return self.rss_url
return federation_utils.full_url(
reverse(
"api:v1:channels-rss",
kwargs={"composite": self.actor.preferred_username},
)
)
def generate_actor(username, **kwargs):
......@@ -37,3 +114,9 @@ def generate_actor(username, **kwargs):
actor_data["public_key"] = public.decode("utf-8")
return federation_models.Actor.objects.create(**actor_data)
@receiver(post_delete, sender=Channel)
def delete_channel_related_objs(instance, **kwargs):
instance.library.delete()
instance.artist.delete()
import xml.etree.ElementTree as ET
from rest_framework import negotiation, renderers
from funkwhale_api.subsonic.renderers import dict_to_xml_tree
class PodcastRSSRenderer(renderers.JSONRenderer):
media_type = "application/rss+xml"
def render(self, data, accepted_media_type=None, renderer_context=None):
if not data:
# when stream view is called, we don't have any data
return super().render(data, accepted_media_type, renderer_context)
final = {
"version": "2.0",
"xmlns:atom": "http://www.w3.org/2005/Atom",
"xmlns:itunes": "http://www.itunes.com/dtds/podcast-1.0.dtd",
"xmlns:content": "http://purl.org/rss/1.0/modules/content/",
"xmlns:media": "http://search.yahoo.com/mrss/",
}
final.update(data)
tree = dict_to_xml_tree("rss", final)
return render_xml(tree)
class PodcastRSSContentNegociation(negotiation.DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
return (PodcastRSSRenderer(), PodcastRSSRenderer.media_type)
def render_xml(tree):
return b'<?xml version="1.0" encoding="UTF-8"?>\n' + ET.tostring(
tree, encoding="utf-8"
)
from django.db import transaction
import datetime
import logging
import sys
import time
import uuid
import feedparser
import requests
from django.conf import settings
from django.db import transaction
from django.db.models import Q
from django.templatetags.static import static
from django.urls import reverse
from django.utils import timezone
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.common import locales, preferences
from funkwhale_api.common import serializers as common_serializers
from funkwhale_api.common import session
from funkwhale_api.common import utils as common_utils
from funkwhale_api.federation import actors
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.moderation import mrf
from funkwhale_api.music import models as music_models
from funkwhale_api.music import serializers as music_serializers
from funkwhale_api.music import tasks
from funkwhale_api.music.serializers import COVER_WRITE_FIELD, CoverField
from funkwhale_api.tags import models as tags_models
from funkwhale_api.tags import serializers as tags_serializers
from funkwhale_api.users import serializers as users_serializers
from . import categories, models
if sys.version_info < (3, 9):
from backports.zoneinfo import ZoneInfo
else:
from zoneinfo import ZoneInfo
logger = logging.getLogger(__name__)
class ChannelMetadataSerializer(serializers.Serializer):
itunes_category = serializers.ChoiceField(
choices=categories.ITUNES_CATEGORIES, required=True
)
itunes_subcategory = serializers.CharField(required=False, allow_null=True)
language = serializers.ChoiceField(required=True, choices=locales.ISO_639_CHOICES)
copyright = serializers.CharField(required=False, allow_null=True, max_length=255)
owner_name = serializers.CharField(required=False, allow_null=True, max_length=255)
owner_email = serializers.EmailField(required=False, allow_null=True)
explicit = serializers.BooleanField(required=False)
def validate(self, validated_data):
validated_data = super().validate(validated_data)
subcategory = self._validate_itunes_subcategory(
validated_data["itunes_category"], validated_data.get("itunes_subcategory")
)
if subcategory:
validated_data["itunes_subcategory"] = subcategory
return validated_data
def _validate_itunes_subcategory(self, parent, child):
if not child:
return
if child not in categories.ITUNES_CATEGORIES[parent]:
raise serializers.ValidationError(
f'"{child}" is not a valid subcategory for "{parent}"'
)
from . import models
return child
class ChannelCreateSerializer(serializers.Serializer):
name = serializers.CharField(max_length=music_models.MAX_LENGTHS["ARTIST_NAME"])
username = serializers.CharField(max_length=music_models.MAX_LENGTHS["ARTIST_NAME"])
summary = serializers.CharField(max_length=500, allow_blank=True, allow_null=True)
name = serializers.CharField(max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"])
username = serializers.CharField(
max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"],
validators=[users_serializers.ASCIIUsernameValidator()],
)
description = common_serializers.ContentSerializer(allow_null=True)
tags = tags_serializers.TagsListField()
content_category = serializers.ChoiceField(
choices=music_models.ARTIST_CONTENT_CATEGORY_CHOICES
)
metadata = serializers.DictField(required=False)
cover = COVER_WRITE_FIELD
def validate(self, validated_data):
existing_channels = self.context["actor"].owned_channels.count()
if existing_channels >= preferences.get("audio__max_channels"):
raise serializers.ValidationError(
"You have reached the maximum amount of allowed channels"
)
validated_data = super().validate(validated_data)
metadata = validated_data.pop("metadata", {})
if validated_data["content_category"] == "podcast":
metadata_serializer = ChannelMetadataSerializer(data=metadata)
metadata_serializer.is_valid(raise_exception=True)
metadata = metadata_serializer.validated_data
validated_data["metadata"] = metadata
return validated_data
def validate_username(self, value):
if value.lower() in [n.lower() for n in settings.ACCOUNT_USERNAME_BLACKLIST]:
raise serializers.ValidationError("This username is already taken")
matching = federation_models.Actor.objects.local().filter(
preferred_username__iexact=value
)
if matching.exists():
raise serializers.ValidationError("This username is already taken")
return value
@transaction.atomic
def create(self, validated_data):
from . import views
cover = validated_data.pop("cover", None)
description = validated_data.get("description")
artist = music_models.Artist.objects.create(
attributed_to=validated_data["attributed_to"], name=validated_data["name"]
attributed_to=validated_data["attributed_to"],
name=validated_data["name"],
content_category=validated_data["content_category"],
attachment_cover=cover,
)
common_utils.attach_content(artist, "description", description)
if validated_data.get("tags", []):
tags_models.set_tags(artist, *validated_data["tags"])
channel = models.Channel(
artist=artist, attributed_to=validated_data["attributed_to"]
artist=artist,
attributed_to=validated_data["attributed_to"],
metadata=validated_data["metadata"],
)
channel.actor = models.generate_actor(
validated_data["username"],
summary=validated_data["summary"],
name=validated_data["name"],
)
channel.library = music_models.Library.objects.create(
name=channel.actor.preferred_username,
privacy_level="public",
privacy_level="everyone",
actor=validated_data["attributed_to"],
)
channel.save()
channel = views.ChannelViewSet.queryset.get(pk=channel.pk)
return channel
def to_representation(self, obj):
return ChannelSerializer(obj).data
return ChannelSerializer(obj, context=self.context).data
NOOP = object()
class ChannelUpdateSerializer(serializers.Serializer):
name = serializers.CharField(max_length=music_models.MAX_LENGTHS["ARTIST_NAME"])
summary = serializers.CharField(max_length=500, allow_blank=True, allow_null=True)
name = serializers.CharField(max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"])
description = common_serializers.ContentSerializer(allow_null=True)
tags = tags_serializers.TagsListField()
content_category = serializers.ChoiceField(
choices=music_models.ARTIST_CONTENT_CATEGORY_CHOICES
)
metadata = serializers.DictField(required=False)
cover = COVER_WRITE_FIELD
def validate(self, validated_data):
validated_data = super().validate(validated_data)
require_metadata_validation = False
new_content_category = validated_data.get("content_category")
metadata = validated_data.pop("metadata", NOOP)
if (
new_content_category == "podcast"
and self.instance.artist.content_category != "postcast"
):
# updating channel, setting as podcast
require_metadata_validation = True
elif self.instance.artist.content_category == "postcast" and metadata != NOOP:
# channel is podcast, and metadata was updated
require_metadata_validation = True
else:
metadata = self.instance.metadata
if require_metadata_validation:
metadata_serializer = ChannelMetadataSerializer(data=metadata)
metadata_serializer.is_valid(raise_exception=True)
metadata = metadata_serializer.validated_data
validated_data["metadata"] = metadata
return validated_data
@transaction.atomic
def update(self, obj, validated_data):
if validated_data.get("tags") is not None:
tags_models.set_tags(obj.artist, *validated_data["tags"])
actor_update_fields = []
artist_update_fields = []
obj.metadata = validated_data["metadata"]
obj.save(update_fields=["metadata"])
if "description" in validated_data:
common_utils.attach_content(
obj.artist, "description", validated_data["description"]
)
if "summary" in validated_data:
actor_update_fields.append(("summary", validated_data["summary"]))
if "name" in validated_data:
obj.artist.name = validated_data["name"]
obj.artist.save(update_fields=["name"])
actor_update_fields.append(("name", validated_data["name"]))
artist_update_fields.append(("name", validated_data["name"]))
if "content_category" in validated_data:
artist_update_fields.append(
("content_category", validated_data["content_category"])
)
if "cover" in validated_data:
artist_update_fields.append(("attachment_cover", validated_data["cover"]))
if actor_update_fields:
for field, value in actor_update_fields:
setattr(obj.actor, field, value)
obj.actor.save(update_fields=[f for f, _ in actor_update_fields])
if artist_update_fields:
for field, value in artist_update_fields:
setattr(obj.artist, field, value)
obj.artist.save(update_fields=[f for f, _ in artist_update_fields])
return obj
def to_representation(self, obj):
return ChannelSerializer(obj).data
return ChannelSerializer(obj, context=self.context).data
class SimpleChannelArtistSerializer(serializers.Serializer):
id = serializers.IntegerField()
fid = serializers.URLField()
mbid = serializers.CharField()
name = serializers.CharField()
creation_date = serializers.DateTimeField()
modification_date = serializers.DateTimeField()
is_local = serializers.BooleanField()
content_category = serializers.CharField()
description = common_serializers.ContentSerializer(allow_null=True, required=False)
cover = CoverField(allow_null=True, required=False)
channel = serializers.UUIDField(allow_null=True, required=False)
tracks_count = serializers.SerializerMethodField(required=False)
tags = serializers.ListField(
child=serializers.CharField(), source="_prefetched_tagged_items", required=False
)
def get_tracks_count(self, o) -> int:
return getattr(o, "_tracks_count", 0)
class ChannelSerializer(serializers.ModelSerializer):
artist = serializers.SerializerMethodField()
actor = federation_serializers.APIActorSerializer()
artist = SimpleChannelArtistSerializer()
actor = serializers.SerializerMethodField()
downloads_count = serializers.SerializerMethodField()
attributed_to = federation_serializers.APIActorSerializer()
rss_url = serializers.CharField(source="get_rss_url")
url = serializers.SerializerMethodField()
subscriptions_count = serializers.SerializerMethodField()
class Meta:
model = models.Channel
fields = ["uuid", "artist", "attributed_to", "actor", "creation_date"]
fields = [
"uuid",
"artist",
"attributed_to",
"actor",
"creation_date",
"metadata",
"rss_url",
"url",
"downloads_count",
"subscriptions_count",
]
def to_representation(self, obj):
data = super().to_representation(obj)
if self.context.get("subscriptions_count"):
data["subscriptions_count"] = self.get_subscriptions_count(obj)
return data
@extend_schema_field(OpenApiTypes.INT)
def get_subscriptions_count(self, obj) -> int:
return obj.actor.received_follows.exclude(approved=False).count()
def get_downloads_count(self, obj) -> int:
return getattr(obj, "_downloads_count", None) or 0
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, obj):
if obj.attributed_to == actors.get_service_actor():
return None
return federation_serializers.APIActorSerializer(obj.actor).data
@extend_schema_field(OpenApiTypes.URI)
def get_url(self, obj):
return obj.actor.url
class InlineSubscriptionSerializer(serializers.Serializer):
uuid = serializers.UUIDField()
channel = serializers.UUIDField(source="target__channel__uuid")
class AllSubscriptionsSerializer(serializers.Serializer):
results = InlineSubscriptionSerializer(source="*", many=True)
count = serializers.SerializerMethodField()
def get_count(self, o) -> int:
return len(o)
class SubscriptionSerializer(serializers.Serializer):
approved = serializers.BooleanField(read_only=True)
fid = serializers.URLField(read_only=True)
uuid = serializers.UUIDField(read_only=True)
creation_date = serializers.DateTimeField(read_only=True)
def to_representation(self, obj):
data = super().to_representation(obj)
data["channel"] = ChannelSerializer(obj.target.channel).data
return data
class RssSubscribeSerializer(serializers.Serializer):
url = serializers.URLField()
class FeedFetchException(Exception):
pass
class BlockedFeedException(FeedFetchException):
pass
def retrieve_feed(url):
try:
logger.info("Fetching RSS feed at %s", url)
response = session.get_session().get(url)
response.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response:
raise FeedFetchException(
f"Error while fetching feed: HTTP {e.response.status_code}"
)
raise FeedFetchException("Error while fetching feed: unknown error")
except requests.exceptions.Timeout:
raise FeedFetchException("Error while fetching feed: timeout")
except requests.exceptions.ConnectionError:
raise FeedFetchException("Error while fetching feed: connection error")
except requests.RequestException as e:
raise FeedFetchException(f"Error while fetching feed: {e}")
except Exception as e:
raise FeedFetchException(f"Error while fetching feed: {e}")
return response
@transaction.atomic
def get_channel_from_rss_url(url, raise_exception=False):
# first, check if the url is blocked
is_valid, _ = mrf.inbox.apply({"id": url})
if not is_valid:
logger.warn("Feed fetch for url %s dropped by MRF", url)
raise BlockedFeedException("This feed or domain is blocked")
# retrieve the XML payload at the given URL
response = retrieve_feed(url)
parsed_feed = feedparser.parse(response.text)
serializer = RssFeedSerializer(data=parsed_feed["feed"])
if not serializer.is_valid(raise_exception=raise_exception):
raise FeedFetchException(f"Invalid xml content: {serializer.errors}")
# second mrf check with validated data
urls_to_check = set()
atom_link = serializer.validated_data.get("atom_link")
if atom_link and atom_link != url:
urls_to_check.add(atom_link)
if serializer.validated_data["link"] != url:
urls_to_check.add(serializer.validated_data["link"])
for u in urls_to_check:
is_valid, _ = mrf.inbox.apply({"id": u})
if not is_valid:
logger.warn("Feed fetch for url %s dropped by MRF", u)
raise BlockedFeedException("This feed or domain is blocked")
# now, we're clear, we can save the data
channel = serializer.save(rss_url=url)
entries = parsed_feed.entries or []
uploads = []
track_defaults = {}
existing_uploads = list(
channel.library.uploads.all().select_related(
"track__description", "track__attachment_cover"
)
)
if parsed_feed.feed.get("rights"):
track_defaults["copyright"] = parsed_feed.feed.rights
for entry in entries[: settings.PODCASTS_RSS_FEED_MAX_ITEMS]:
logger.debug("Importing feed item %s", entry.id)
s = RssFeedItemSerializer(data=entry)
if not s.is_valid(raise_exception=raise_exception):
logger.debug("Skipping invalid RSS feed item %s, ", entry, str(s.errors))
continue
uploads.append(
s.save(channel, existing_uploads=existing_uploads, **track_defaults)
)
common_utils.on_commit(
music_models.TrackActor.create_entries,
library=channel.library,
delete_existing=True,
)
if uploads:
latest_track_date = max([upload.track.creation_date for upload in uploads])
common_utils.update_modification_date(channel.artist, date=latest_track_date)
return channel, uploads
# RSS related stuff
# https://github.com/simplepie/simplepie-ng/wiki/Spec:-iTunes-Podcast-RSS
# is extremely useful
class RssFeedSerializer(serializers.Serializer):
title = serializers.CharField()
link = serializers.URLField(required=False, allow_blank=True)
language = serializers.CharField(required=False, allow_blank=True)
rights = serializers.CharField(required=False, allow_blank=True)
itunes_explicit = serializers.BooleanField(required=False, allow_null=True)
tags = serializers.ListField(required=False)
atom_link = serializers.DictField(required=False)
links = serializers.ListField(required=False)
summary_detail = serializers.DictField(required=False)
author_detail = serializers.DictField(required=False)
image = serializers.DictField(required=False)
def validate_atom_link(self, v):
if (
v.get("rel", "self") == "self"
and v.get("type", "application/rss+xml") == "application/rss+xml"
):
return v["href"]
def validate_links(self, v):
for link in v:
if link.get("rel") == "self":
return link.get("href")
def validate_summary_detail(self, v):
content = v.get("value")
if not content:
return
return {
"content_type": v.get("type", "text/plain"),
"text": content,
}
def validate_image(self, v):
url = v.get("href")
if url:
return {
"url": url,
"mimetype": common_utils.get_mimetype_from_ext(url) or "image/jpeg",
}
def validate_tags(self, v):
data = {}
for row in v:
if row.get("scheme") != "http://www.itunes.com/":
continue
term = row["term"]
if "parent" not in data and term in categories.ITUNES_CATEGORIES:
data["parent"] = term
elif "child" not in data and term in categories.ITUNES_SUBCATEGORIES:
data["child"] = term
elif (
term not in categories.ITUNES_SUBCATEGORIES
and term not in categories.ITUNES_CATEGORIES
):
raw_tags = term.split(" ")
data["tags"] = []
tag_serializer = tags_serializers.TagNameField()
for tag in raw_tags:
try:
data["tags"].append(tag_serializer.to_internal_value(tag))
except Exception:
pass
return data
def validate(self, data):
validated_data = super().validate(data)
if not validated_data.get("link"):
validated_data["link"] = validated_data.get("links")
if not validated_data.get("link"):
raise serializers.ValidationError("Missing link")
return validated_data
@transaction.atomic
def save(self, rss_url):
validated_data = self.validated_data
# because there may be redirections from the original feed URL
real_rss_url = validated_data.get("atom_link", rss_url) or rss_url
service_actor = actors.get_service_actor()
author = validated_data.get("author_detail", {})
categories = validated_data.get("tags", {})
metadata = {
"explicit": validated_data.get("itunes_explicit", False),
"copyright": validated_data.get("rights"),
"owner_name": author.get("name"),
"owner_email": author.get("email"),
"itunes_category": categories.get("parent"),
"itunes_subcategory": categories.get("child"),
"language": validated_data.get("language"),
}
public_url = validated_data["link"]
existing = (
models.Channel.objects.external_rss()
.filter(
Q(rss_url=real_rss_url) | Q(rss_url=rss_url) | Q(actor__url=public_url)
)
.first()
)
channel_defaults = {
"rss_url": real_rss_url,
"metadata": metadata,
}
if existing:
artist_kwargs = {"channel": existing}
actor_kwargs = {"channel": existing}
actor_defaults = {"url": public_url}
else:
artist_kwargs = {"pk": None}
actor_kwargs = {"pk": None}
preferred_username = f"rssfeed-{uuid.uuid4()}"
actor_defaults = {
"preferred_username": preferred_username,
"type": "Application",
"domain": service_actor.domain,
"url": public_url,
"fid": federation_utils.full_url(
reverse(
"federation:actors-detail",
kwargs={"preferred_username": preferred_username},
)
),
}
channel_defaults["attributed_to"] = service_actor
actor_defaults["last_fetch_date"] = timezone.now()
# create/update the artist profile
artist, created = music_models.Artist.objects.update_or_create(
**artist_kwargs,
defaults={
"attributed_to": service_actor,
"name": validated_data["title"],
"content_category": "podcast",
},
)
cover = validated_data.get("image")
if cover:
common_utils.attach_file(artist, "attachment_cover", cover)
tags = categories.get("tags", [])
if tags:
tags_models.set_tags(artist, *tags)
summary = validated_data.get("summary_detail")
if summary:
common_utils.attach_content(artist, "description", summary)
if created:
channel_defaults["artist"] = artist
# create/update the actor
actor, created = federation_models.Actor.objects.update_or_create(
**actor_kwargs, defaults=actor_defaults
)
if created:
channel_defaults["actor"] = actor
# create the library
if not existing:
channel_defaults["library"] = music_models.Library.objects.create(
actor=service_actor,
privacy_level=settings.PODCASTS_THIRD_PARTY_VISIBILITY,
name=actor_defaults["preferred_username"],
)
# create/update the channel
channel, created = models.Channel.objects.update_or_create(
pk=existing.pk if existing else None,
defaults=channel_defaults,
)
return channel
class ItunesDurationField(serializers.CharField):
def to_internal_value(self, v):
try:
return int(v)
except (ValueError, TypeError):
pass
parts = v.split(":")
int_parts = []
for part in parts:
try:
int_parts.append(int(part))
except (ValueError, TypeError):
raise serializers.ValidationError(f"Invalid duration {v}")
if len(int_parts) == 2:
hours = 0
minutes, seconds = int_parts
elif len(int_parts) == 3:
hours, minutes, seconds = int_parts
else:
raise serializers.ValidationError(f"Invalid duration {v}")
return (hours * 3600) + (minutes * 60) + seconds
class DummyField(serializers.Field):
def to_internal_value(self, v):
return v
def get_cached_upload(uploads, expected_track_uuid):
for upload in uploads:
if upload.track.uuid == expected_track_uuid:
return upload
class PermissiveIntegerField(serializers.IntegerField):
def to_internal_value(self, v):
try:
return super().to_internal_value(v)
except serializers.ValidationError:
return self.default
class RssFeedItemSerializer(serializers.Serializer):
id = serializers.CharField()
title = serializers.CharField()
rights = serializers.CharField(required=False, allow_blank=True)
itunes_season = serializers.IntegerField(
required=False, allow_null=True, default=None
)
itunes_episode = PermissiveIntegerField(
required=False, allow_null=True, default=None
)
itunes_duration = ItunesDurationField(
required=False, allow_null=True, default=None, allow_blank=True
)
links = serializers.ListField()
tags = serializers.ListField(required=False)
summary_detail = serializers.DictField(required=False)
content = serializers.ListField(required=False)
published_parsed = DummyField(required=False)
image = serializers.DictField(required=False)
def validate_summary_detail(self, v):
content = v.get("value")
if not content:
return
return {
"content_type": v.get("type", "text/plain"),
"text": content,
}
def validate_content(self, v):
# TODO: Are there RSS feeds that use more than one content item?
content = v[0].get("value")
if not content:
return
return {
"content_type": v[0].get("type", "text/plain"),
"text": content,
}
def validate_image(self, v):
url = v.get("href")
if url:
return {
"url": url,
"mimetype": common_utils.get_mimetype_from_ext(url) or "image/jpeg",
}
def validate_links(self, v):
data = {}
for row in v:
if not row.get("type", "").startswith("audio/"):
continue
if row.get("rel") != "enclosure":
continue
try:
size = int(row.get("length", 0) or 0) or None
except (TypeError, ValueError):
raise serializers.ValidationError("Invalid size")
data["audio"] = {
"mimetype": common_utils.get_audio_mimetype(row["type"]),
"size": size,
"source": row["href"],
}
if not data:
raise serializers.ValidationError("No valid audio enclosure found")
return data
def validate_tags(self, v):
data = {}
for row in v:
if row.get("scheme") != "http://www.itunes.com/":
continue
term = row["term"]
raw_tags = term.split(" ")
data["tags"] = []
tag_serializer = tags_serializers.TagNameField()
for tag in raw_tags:
try:
data["tags"].append(tag_serializer.to_internal_value(tag))
except Exception:
pass
return data
@transaction.atomic
def save(self, channel, existing_uploads=[], **track_defaults):
validated_data = self.validated_data
categories = validated_data.get("tags", {})
expected_uuid = uuid.uuid3(
uuid.NAMESPACE_URL, "rss://{}-{}".format(channel.pk, validated_data["id"])
)
existing_upload = get_cached_upload(existing_uploads, expected_uuid)
if existing_upload:
existing_track = existing_upload.track
else:
existing_track = (
music_models.Track.objects.filter(
uuid=expected_uuid, artist_credit__artist__channel=channel
)
.select_related("description", "attachment_cover")
.first()
)
if existing_track:
existing_upload = existing_track.uploads.filter(
library=channel.library
).first()
track_defaults = track_defaults
track_defaults.update(
{
"disc_number": validated_data.get("itunes_season", 1) or 1,
"position": validated_data.get("itunes_episode", 1) or 1,
"title": validated_data["title"],
}
)
if "rights" in validated_data:
track_defaults["copyright"] = validated_data["rights"]
if "published_parsed" in validated_data:
track_defaults["creation_date"] = datetime.datetime.fromtimestamp(
time.mktime(validated_data["published_parsed"])
).replace(tzinfo=ZoneInfo("UTC"))
upload_defaults = {
"source": validated_data["links"]["audio"]["source"],
"size": validated_data["links"]["audio"]["size"],
"mimetype": validated_data["links"]["audio"]["mimetype"],
"duration": validated_data.get("itunes_duration") or None,
"import_status": "finished",
"library": channel.library,
}
if existing_track:
track_kwargs = {"pk": existing_track.pk}
upload_kwargs = {"track": existing_track}
else:
track_kwargs = {"pk": None}
track_defaults["uuid"] = expected_uuid
upload_kwargs = {"pk": None}
if existing_upload and existing_upload.source != upload_defaults["source"]:
# delete existing upload, the url to the audio file has changed
existing_upload.delete()
# create/update the track
track, created = music_models.Track.objects.update_or_create(
**track_kwargs,
defaults=track_defaults,
)
# channel only have one artist so we can safely update artist_credit
defaults = {
"artist": channel.artist,
"credit": channel.artist.name,
"joinphrase": "",
}
query = (
Q(artist=channel.artist) & Q(credit=channel.artist.name) & Q(joinphrase="")
)
artist_credit = tasks.get_best_candidate_or_create(
music_models.ArtistCredit, query, defaults, ["artist", "joinphrase"]
)
track.artist_credit.set([artist_credit[0]])
# optimisation for reducing SQL queries, because we cannot use select_related with
# update or create, so we restore the cache by hand
if existing_track:
for field in ["attachment_cover", "description"]:
cached_id_value = getattr(existing_track, f"{field}_id")
new_id_value = getattr(track, f"{field}_id")
if new_id_value and cached_id_value == new_id_value:
setattr(track, field, getattr(existing_track, field))
cover = validated_data.get("image")
if cover:
common_utils.attach_file(track, "attachment_cover", cover)
tags = categories.get("tags", [])
if tags:
tags_models.set_tags(track, *tags)
# "content" refers to the <content:encoded> node in the RSS feed,
# whereas "summary_detail" refers to the <description> node.
# <description> is intended to be used as a short summary and is often
# encoded merely as plain text, whereas <content:encoded> contains
# the full episode description and is generally encoded as HTML.
#
# For details, see https://www.rssboard.org/rss-profile#element-channel-item-description
summary = validated_data.get("content")
if not summary:
summary = validated_data.get("summary_detail")
if summary:
common_utils.attach_content(track, "description", summary)
if created:
upload_defaults["track"] = track
# create/update the upload
upload, created = music_models.Upload.objects.update_or_create(
**upload_kwargs, defaults=upload_defaults
)
return upload
def rfc822_date(dt):
return dt.strftime("%a, %d %b %Y %H:%M:%S %z")
def rss_duration(seconds):
if not seconds:
return "00:00:00"
full_hours = seconds // 3600
full_minutes = (seconds - (full_hours * 3600)) // 60
remaining_seconds = seconds - (full_hours * 3600) - (full_minutes * 60)
return "{}:{}:{}".format(
str(full_hours).zfill(2),
str(full_minutes).zfill(2),
str(remaining_seconds).zfill(2),
)
def rss_serialize_item(upload):
data = {
"title": [{"value": upload.track.title}],
"itunes:title": [{"value": upload.track.title}],
"guid": [{"cdata_value": str(upload.uuid), "isPermaLink": "false"}],
"pubDate": [{"value": rfc822_date(upload.creation_date)}],
"itunes:duration": [{"value": rss_duration(upload.duration)}],
"itunes:explicit": [{"value": "no"}],
"itunes:episodeType": [{"value": "full"}],
"itunes:season": [{"value": upload.track.disc_number or 1}],
"itunes:episode": [{"value": upload.track.position or 1}],
"link": [{"value": federation_utils.full_url(upload.track.get_absolute_url())}],
"enclosure": [
{
# we enforce MP3, since it's the only format supported everywhere
"url": federation_utils.full_url(
reverse(
"api:v1:stream-detail", kwargs={"uuid": str(upload.track.uuid)}
)
+ ".mp3"
),
"length": upload.size or 0,
"type": "audio/mpeg",
}
],
}
if upload.track.description:
data["itunes:subtitle"] = [{"value": upload.track.description.truncate(254)}]
data["itunes:summary"] = [{"cdata_value": upload.track.description.rendered}]
data["description"] = [{"value": upload.track.description.as_plain_text}]
if upload.track.attachment_cover:
data["itunes:image"] = [
{"href": upload.track.attachment_cover.download_url_original}
]
tagged_items = getattr(upload.track, "_prefetched_tagged_items", [])
if tagged_items:
data["itunes:keywords"] = [
{"value": ",".join([ti.tag.name for ti in tagged_items])}
]
return data
def rss_serialize_channel(channel):
metadata = channel.metadata or {}
explicit = metadata.get("explicit", False)
copyright = metadata.get("copyright", "All rights reserved")
owner_name = metadata.get("owner_name", channel.attributed_to.display_name)
owner_email = metadata.get("owner_email")
itunes_category = metadata.get("itunes_category")
itunes_subcategory = metadata.get("itunes_subcategory")
language = metadata.get("language")
data = {
"title": [{"value": channel.artist.name}],
"copyright": [{"value": copyright}],
"itunes:explicit": [{"value": "no" if not explicit else "yes"}],
"itunes:author": [{"value": owner_name}],
"itunes:owner": [{"itunes:name": [{"value": owner_name}]}],
"itunes:type": [{"value": "episodic"}],
"link": [{"value": channel.get_absolute_url()}],
"atom:link": [
{
"href": channel.get_rss_url(),
"rel": "self",
"type": "application/rss+xml",
},
{
"href": channel.actor.fid,
"rel": "alternate",
"type": "application/activity+json",
},
],
}
if language:
data["language"] = [{"value": language}]
if owner_email:
data["itunes:owner"][0]["itunes:email"] = [{"value": owner_email}]
if itunes_category:
node = {"text": itunes_category}
if itunes_subcategory:
node["itunes:category"] = [{"text": itunes_subcategory}]
data["itunes:category"] = [node]
if channel.artist.description:
data["itunes:subtitle"] = [{"value": channel.artist.description.truncate(254)}]
data["itunes:summary"] = [{"cdata_value": channel.artist.description.rendered}]
data["description"] = [{"value": channel.artist.description.as_plain_text}]
if channel.artist.attachment_cover:
data["itunes:image"] = [
{"href": channel.artist.attachment_cover.download_url_original}
]
else:
placeholder_url = federation_utils.full_url(
static("images/podcasts-cover-placeholder.png")
)
data["itunes:image"] = [{"href": placeholder_url}]
tagged_items = getattr(channel.artist, "_prefetched_tagged_items", [])
if tagged_items:
data["itunes:keywords"] = [
{"value": " ".join([ti.tag.name for ti in tagged_items])}
]
return data
def rss_serialize_channel_full(channel, uploads):
channel_data = rss_serialize_channel(channel)
channel_data["item"] = [rss_serialize_item(upload) for upload in uploads]
return {"channel": channel_data}
# OPML stuff
def get_opml_outline(channel):
return {
"title": channel.artist.name,
"text": channel.artist.name,
"type": "rss",
"xmlUrl": channel.get_rss_url(),
"htmlUrl": channel.actor.url,
}
def get_artist(self, obj):
return music_serializers.serialize_artist_simple(obj.artist)
def get_opml(channels, date, title):
return {
"version": "2.0",
"head": [{"date": [{"value": rfc822_date(date)}], "title": [{"value": title}]}],
"body": [{"outline": [get_opml_outline(channel) for channel in channels]}],
}
import urllib.parse
from django.conf import settings
from django.db.models import Q
from django.urls import reverse
from rest_framework import serializers
from funkwhale_api.common import middleware, preferences, utils
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music import spa_views
from . import models
def channel_detail(query, redirect_to_ap):
queryset = models.Channel.objects.filter(query).select_related(
"artist__attachment_cover", "actor", "library"
)
try:
obj = queryset.get()
except models.Channel.DoesNotExist:
return []
if redirect_to_ap:
raise middleware.ApiRedirect(obj.actor.fid)
obj_url = utils.join_url(
settings.FUNKWHALE_URL,
utils.spa_reverse(
"channel_detail", kwargs={"username": obj.actor.full_username}
),
)
metas = [
{"tag": "meta", "property": "og:url", "content": obj_url},
{"tag": "meta", "property": "og:title", "content": obj.artist.name},
{"tag": "meta", "property": "og:type", "content": "profile"},
]
if obj.artist.attachment_cover:
metas.append(
{
"tag": "meta",
"property": "og:image",
"content": obj.artist.attachment_cover.download_url_medium_square_crop,
}
)
if preferences.get("federation__enabled"):
metas.append(
{
"tag": "link",
"rel": "alternate",
"type": "application/activity+json",
"href": obj.actor.fid,
}
)
metas.append(
{
"tag": "link",
"rel": "alternate",
"type": "application/rss+xml",
"href": obj.get_rss_url(),
"title": f"{obj.artist.name} - RSS Podcast Feed",
},
)
if obj.library.uploads.all().playable_by(None).exists():
metas.append(
{
"tag": "link",
"rel": "alternate",
"type": "application/json+oembed",
"href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
),
}
)
# twitter player is also supported in various software
metas += spa_views.get_twitter_card_metas(type="channel", id=obj.uuid)
return metas
def channel_detail_uuid(request, uuid, redirect_to_ap):
validator = serializers.UUIDField().to_internal_value
try:
uuid = validator(uuid)
except serializers.ValidationError:
return []
return channel_detail(Q(uuid=uuid), redirect_to_ap)
def channel_detail_username(request, username, redirect_to_ap):
validator = federation_utils.get_actor_data_from_username
try:
username_data = validator(username)
except serializers.ValidationError:
return []
query = Q(
actor__domain=username_data["domain"],
actor__preferred_username__iexact=username_data["username"],
)
return channel_detail(query, redirect_to_ap)
import datetime
import logging
from django.conf import settings
from django.db import transaction
from django.utils import timezone
from funkwhale_api.taskapp import celery
from . import models, serializers
logger = logging.getLogger(__name__)
@celery.app.task(name="audio.fetch_rss_feeds")
def fetch_rss_feeds():
limit = timezone.now() - datetime.timedelta(
seconds=settings.PODCASTS_RSS_FEED_REFRESH_DELAY
)
candidates = (
models.Channel.objects.external_rss()
.filter(actor__last_fetch_date__lte=limit)
.values_list("rss_url", flat=True)
)
total = len(candidates)
logger.info("Refreshing %s rss feeds…", total)
for url in candidates:
fetch_rss_feed.delay(rss_url=url)
@celery.app.task(name="audio.fetch_rss_feed")
@transaction.atomic
def fetch_rss_feed(rss_url):
channel = (
models.Channel.objects.external_rss()
.filter(rss_url=rss_url)
.order_by("id")
.first()
)
if not channel:
logger.warn("Cannot refresh non external feed")
return
try:
serializers.get_channel_from_rss_url(rss_url)
except serializers.BlockedFeedException:
# channel was blocked since last fetch, let's delete it
logger.info("Deleting blocked channel linked to %s", rss_url)
channel.delete()
from rest_framework import exceptions, mixins, viewsets
from django import http
from django.db import transaction
from django.db.models import Count, Prefetch, Q, Sum
from django.utils import timezone
from drf_spectacular.utils import extend_schema, extend_schema_view, inline_serializer
from rest_framework import decorators, exceptions, mixins
from rest_framework import permissions as rest_permissions
from rest_framework import response
from rest_framework import serializers as rest_serializers
from rest_framework import viewsets
from funkwhale_api.common import permissions
from funkwhale_api.common import preferences
from funkwhale_api.common import locales, permissions, preferences
from funkwhale_api.common import utils as common_utils
from funkwhale_api.common.mixins import MultipleLookupDetailMixin
from funkwhale_api.federation import actors
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import routes
from funkwhale_api.federation import tasks as federation_tasks
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music import models as music_models
from funkwhale_api.music import views as music_views
from funkwhale_api.users.oauth import permissions as oauth_permissions
from . import filters, models, serializers
from . import categories, filters, models, renderers, serializers
ARTIST_PREFETCH_QS = (
music_models.Artist.objects.select_related(
"description",
"attachment_cover",
)
.prefetch_related(music_views.TAG_PREFETCH)
.annotate(_tracks_count=Count("artist_credit__tracks"))
)
class ChannelsMixin(object):
class ChannelsMixin:
def dispatch(self, request, *args, **kwargs):
if not preferences.get("audio__channels_enabled"):
return http.HttpResponse(status=405)
return super().dispatch(request, *args, **kwargs)
@extend_schema_view(
metedata_choices=extend_schema(operation_id="get_channel_metadata_choices"),
subscribe=extend_schema(operation_id="subscribe_channel"),
unsubscribe=extend_schema(operation_id="unsubscribe_channel"),
rss_subscribe=extend_schema(operation_id="subscribe_channel_rss"),
)
class ChannelViewSet(
ChannelsMixin,
MultipleLookupDetailMixin,
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
......@@ -25,12 +56,30 @@ class ChannelViewSet(
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
lookup_field = "uuid"
url_lookups = [
{
"lookup_field": "uuid",
"validator": serializers.serializers.UUIDField().to_internal_value,
},
{
"lookup_field": "username",
"validator": federation_utils.get_actor_data_from_username,
"get_query": lambda v: Q(
actor__domain=v["domain"],
actor__preferred_username__iexact=v["username"],
),
},
]
filterset_class = filters.ChannelFilter
serializer_class = serializers.ChannelSerializer
queryset = (
models.Channel.objects.all()
.prefetch_related("library", "attributed_to", "artist", "actor")
.prefetch_related(
"library",
"attributed_to",
"actor",
Prefetch("artist", queryset=ARTIST_PREFETCH_QS),
)
.order_by("-creation_date")
)
permission_classes = [
......@@ -48,7 +97,275 @@ class ChannelViewSet(
return serializers.ChannelSerializer
elif self.action in ["update", "partial_update"]:
return serializers.ChannelUpdateSerializer
elif self.action == "create":
return serializers.ChannelCreateSerializer
return serializers.ChannelSerializer
def get_queryset(self):
queryset = super().get_queryset()
if self.action == "retrieve":
queryset = queryset.annotate(
_downloads_count=Sum("artist__artist_credit__tracks__downloads_count")
)
return queryset
def perform_create(self, serializer):
return serializer.save(attributed_to=self.request.user.actor)
def list(self, request, *args, **kwargs):
if self.request.GET.get("output") == "opml":
queryset = self.filter_queryset(self.get_queryset())[:500]
opml = serializers.get_opml(
channels=queryset,
date=timezone.now(),
title="Funkwhale channels OPML export",
)
xml_body = renderers.render_xml(renderers.dict_to_xml_tree("opml", opml))
return http.HttpResponse(xml_body, content_type="application/xml")
else:
return super().list(request, *args, **kwargs)
def get_object(self):
obj = super().get_object()
if (
self.action == "retrieve"
and self.request.GET.get("refresh", "").lower() == "true"
):
obj = music_views.refetch_obj(obj, self.get_queryset())
return obj
@decorators.action(
detail=True,
methods=["post"],
permission_classes=[rest_permissions.IsAuthenticated],
serializer_class=serializers.SubscriptionSerializer,
)
def subscribe(self, request, *args, **kwargs):
object = self.get_object()
subscription = federation_models.Follow(actor=request.user.actor)
subscription.fid = subscription.get_federation_id()
subscription, created = SubscriptionsViewSet.queryset.get_or_create(
target=object.actor,
actor=request.user.actor,
defaults={
"approved": True,
"fid": subscription.fid,
"uuid": subscription.uuid,
},
)
# prefetch stuff
subscription = SubscriptionsViewSet.queryset.get(pk=subscription.pk)
if not object.actor.is_local:
routes.outbox.dispatch({"type": "Follow"}, context={"follow": subscription})
data = serializers.SubscriptionSerializer(subscription).data
return response.Response(data, status=201)
@extend_schema(responses={204: None})
@decorators.action(
detail=True,
methods=["post", "delete"],
permission_classes=[rest_permissions.IsAuthenticated],
)
def unsubscribe(self, request, *args, **kwargs):
object = self.get_object()
follow_qs = request.user.actor.emitted_follows.filter(target=object.actor)
follow = follow_qs.first()
if follow:
if not object.actor.is_local:
routes.outbox.dispatch(
{"type": "Undo", "object": {"type": "Follow"}},
context={"follow": follow},
)
follow_qs.delete()
return response.Response(status=204)
@decorators.action(
detail=True,
methods=["get"],
content_negotiation_class=renderers.PodcastRSSContentNegociation,
)
def rss(self, request, *args, **kwargs):
object = self.get_object()
if not object.attributed_to.is_local:
return response.Response({"detail": "Not found"}, status=404)
if object.attributed_to == actors.get_service_actor():
# external feed, we redirect to the canonical one
return http.HttpResponseRedirect(object.rss_url)
uploads = (
object.library.uploads.playable_by(None)
.prefetch_related(
Prefetch(
"track",
queryset=music_models.Track.objects.select_related(
"attachment_cover", "description"
).prefetch_related(
music_views.TAG_PREFETCH,
),
),
)
.select_related("track__attachment_cover", "track__description")
.order_by("-creation_date")
)[:50]
data = serializers.rss_serialize_channel_full(channel=object, uploads=uploads)
return response.Response(data, status=200)
@extend_schema(
responses=inline_serializer(
name="MetedataChoicesSerializer",
fields={
"language": rest_serializers.ListField(
child=inline_serializer(
name="LanguageItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
},
)
),
"itunes_category": rest_serializers.ListField(
child=inline_serializer(
name="iTunesCategoryItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
"children": rest_serializers.CharField(),
},
)
),
},
)
)
@decorators.action(
methods=["get"],
detail=False,
url_path="metadata-choices",
url_name="metadata_choices",
permission_classes=[],
)
def metedata_choices(self, request, *args, **kwargs):
data = {
"language": [
{"value": code, "label": name} for code, name in locales.ISO_639_CHOICES
],
"itunes_category": [
{"value": code, "label": code, "children": children}
for code, children in categories.ITUNES_CATEGORIES.items()
],
}
return response.Response(data)
@decorators.action(
methods=["post"],
detail=False,
url_path="rss-subscribe",
url_name="rss_subscribe",
)
@transaction.atomic
def rss_subscribe(self, request, *args, **kwargs):
serializer = serializers.RssSubscribeSerializer(data=request.data)
if not serializer.is_valid():
return response.Response(serializer.errors, status=400)
channel = (
models.Channel.objects.filter(
rss_url=serializer.validated_data["url"],
)
.order_by("id")
.first()
)
if not channel:
# try to retrieve the channel via its URL and create it
try:
channel, uploads = serializers.get_channel_from_rss_url(
serializer.validated_data["url"]
)
except serializers.FeedFetchException as e:
return response.Response(
{"detail": str(e)},
status=400,
)
subscription = federation_models.Follow(actor=request.user.actor)
subscription.fid = subscription.get_federation_id()
subscription, created = SubscriptionsViewSet.queryset.get_or_create(
target=channel.actor,
actor=request.user.actor,
defaults={
"approved": True,
"fid": subscription.fid,
"uuid": subscription.uuid,
},
)
# prefetch stuff
subscription = SubscriptionsViewSet.queryset.get(pk=subscription.pk)
return response.Response(
serializers.SubscriptionSerializer(subscription).data, status=201
)
def get_serializer_context(self):
context = super().get_serializer_context()
context["subscriptions_count"] = self.action in [
"retrieve",
"create",
"update",
"partial_update",
]
if self.request.user.is_authenticated:
context["actor"] = self.request.user.actor
return context
@transaction.atomic
def perform_destroy(self, instance):
instance.__class__.objects.filter(pk=instance.pk).delete()
common_utils.on_commit(
federation_tasks.remove_actor.delay, actor_id=instance.actor.pk
)
class SubscriptionsViewSet(
ChannelsMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet,
):
lookup_field = "uuid"
serializer_class = serializers.SubscriptionSerializer
queryset = (
federation_models.Follow.objects.exclude(target__channel__isnull=True)
.prefetch_related(
"target__channel__library",
"target__channel__attributed_to",
"actor",
Prefetch("target__channel__artist", queryset=ARTIST_PREFETCH_QS),
)
.order_by("-creation_date")
)
permission_classes = [
oauth_permissions.ScopePermission,
rest_permissions.IsAuthenticated,
]
required_scope = "libraries"
anonymous_policy = False
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(actor=self.request.user.actor)
@extend_schema(
responses=serializers.AllSubscriptionsSerializer(),
operation_id="get_all_subscriptions",
)
@decorators.action(methods=["get"], detail=False)
def all(self, request, *args, **kwargs):
"""
Return all the subscriptions of the current user, with only limited data
to have a performant endpoint and avoid lots of queries just to display
subscription status in the UI
"""
subscriptions = self.get_queryset().values("uuid", "target__channel__uuid")
payload = serializers.AllSubscriptionsSerializer(subscriptions).data
return response.Response(payload, status=200)
import click
import functools
import click
@click.group()
def cli():
......
import click
from funkwhale_api.music import tasks
from . import base
def handler_add_tags_from_tracks(
artists=False,
albums=False,
):
result = None
if artists:
result = tasks.artists_set_tags_from_tracks()
elif albums:
result = tasks.albums_set_tags_from_tracks()
else:
raise click.BadOptionUsage("You must specify artists or albums")
if result is None:
click.echo(" No relevant tags found")
else:
click.echo(f" Relevant tags added to {len(result)} objects")
@base.cli.group()
def albums():
"""Manage albums"""
pass
@base.cli.group()
def artists():
"""Manage artists"""
pass
@albums.command(name="add-tags-from-tracks")
def albums_add_tags_from_tracks():
"""
Associate tags to album with no genre tags, assuming identical tags are found on the album tracks
"""
handler_add_tags_from_tracks(albums=True)
@artists.command(name="add-tags-from-tracks")
def artists_add_tags_from_tracks():
"""
Associate tags to artists with no genre tags, assuming identical tags are found on the artist tracks
"""
handler_add_tags_from_tracks(artists=True)
import click
import sys
from . import base
from . import users # noqa
import click
from rest_framework.exceptions import ValidationError
from . import library # noqa
from . import media # noqa
from . import plugins # noqa
from . import users # noqa
from . import base
def invoke():
try:
......@@ -13,7 +16,7 @@ def invoke():
except ValidationError as e:
click.secho("Invalid data:", fg="red")
for field, errors in e.detail.items():
click.secho(" {}:".format(field), fg="red")
click.secho(f" {field}:", fg="red")
for error in errors:
click.secho(" - {}".format(error), fg="red")
click.secho(f" - {error}", fg="red")
sys.exit(1)
import click
from django.conf import settings
from django.core.cache import cache
from django.core.files.storage import default_storage
from versatileimagefield import settings as vif_settings
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
from funkwhale_api.common import utils as common_utils
from funkwhale_api.common.models import Attachment
from . import base
@base.cli.group()
def media():
"""Manage media files (avatars, covers, attachments…)"""
pass
@media.command("generate-thumbnails")
@click.option("-d", "--delete", is_flag=True)
def generate_thumbnails(delete):
"""
Generate thumbnails for all images (avatars, covers, etc.).
This can take a long time and generate a lot of I/O depending of the size
of your library.
"""
click.echo("Deleting existing thumbnails…")
if delete:
try:
# FileSystemStorage doesn't support deleting a non-empty directory
# so we reimplemented a method to do so
default_storage.force_delete("__sized__")
except AttributeError:
# backends doesn't support directory deletion
pass
MODELS = [
(Attachment, "file", "attachment_square"),
]
for model, attribute, key_set in MODELS:
click.echo(f"Generating thumbnails for {model._meta.label}.{attribute}")
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
qs = qs.exclude(**{attribute: ""})
cache_key = "*{}{}*".format(
settings.MEDIA_URL, vif_settings.VERSATILEIMAGEFIELD_SIZED_DIRNAME
)
entries = cache.keys(cache_key)
if entries:
click.echo(f" Clearing {len(entries)} cache entries: {cache_key}")
for keys in common_utils.batch(iter(entries)):
cache.delete_many(keys)
warmer = VersatileImageFieldWarmer(
instance_or_queryset=qs,
rendition_key_set=key_set,
image_attr=attribute,
verbose=True,
)
click.echo(" Creating images")
num_created, failed_to_create = warmer.warm()
click.echo(f" {num_created} created, {len(failed_to_create)} in error")
import os
import subprocess
import sys
import click
from django.conf import settings
from . import base
@base.cli.group()
def plugins():
"""Manage plugins"""
pass
@plugins.command("install")
@click.argument("plugin", nargs=-1)
def install(plugin):
"""
Install a plugin from a given URL (zip, pip or git are supported)
"""
if not plugin:
return click.echo("No plugin provided")
click.echo("Installing plugins…")
pip_install(list(plugin), settings.FUNKWHALE_PLUGINS_PATH)
def pip_install(deps, target):
if not deps:
return
pip_path = os.path.join(os.path.dirname(sys.executable), "pip")
subprocess.check_call([pip_path, "install", "-t", target] + deps)
import click
from django.db import transaction
from funkwhale_api.federation import models as federation_models
from funkwhale_api.users import models
from funkwhale_api.users import serializers
from funkwhale_api.users import tasks
from funkwhale_api.users import models, serializers, tasks
from . import base
from . import utils
from . import base, utils
class FakeRequest(object):
class FakeRequest:
def __init__(self, session={}):
self.session = session
......@@ -37,22 +33,23 @@ def handler_create_user(
utils.logger.debug("Validating user data…")
serializer.is_valid(raise_exception=True)
# Override email validation, we assume accounts created from CLI have a valid email
# Override e-mail validation, we assume accounts created from CLI have a valid e-mail
request = FakeRequest(session={"account_verified_email": email})
utils.logger.debug("Creating user…")
user = serializer.save(request=request)
utils.logger.debug("Setting permissions and other attributes…")
user.is_staff = is_staff
user.is_staff = is_staff or is_superuser # Always set staff if superuser is set
user.upload_quota = upload_quota
user.is_superuser = is_superuser
for permission in permissions:
if permission in models.PERMISSIONS:
utils.logger.debug("Setting %s permission to True", permission)
setattr(user, "permission_{}".format(permission), True)
setattr(user, f"permission_{permission}", True)
else:
utils.logger.warn("Unknown permission %s", permission)
utils.logger.debug("Creating actor…")
user.actor = models.create_actor(user)
models.create_user_libraries(user)
user.save()
return user
......@@ -60,7 +57,7 @@ def handler_create_user(
@transaction.atomic
def handler_delete_user(usernames, soft=True):
for username in usernames:
click.echo("Deleting {}…".format(username))
click.echo(f"Deleting {username}")
actor = None
user = None
try:
......@@ -157,13 +154,16 @@ def users():
type=click.INT,
)
@click.option(
"--superuser/--no-superuser", default=False,
"--superuser/--no-superuser",
default=False,
)
@click.option(
"--staff/--no-staff", default=False,
"--staff/--no-staff",
default=False,
)
@click.option(
"--permission", multiple=True,
"--permission",
multiple=True,
)
def create(username, password, email, superuser, staff, permission, upload_quota):
"""Create a new user"""
......@@ -179,9 +179,9 @@ def create(username, password, email, superuser, staff, permission, upload_quota
permissions=permission,
upload_quota=upload_quota,
)
click.echo("User {} created!".format(user.username))
click.echo(f"User {user.username} created!")
if generated_password:
click.echo(" Generated password: {}".format(generated_password))
click.echo(f" Generated password: {generated_password}")
@base.delete_command(group=users, id_var="username")
......@@ -210,7 +210,9 @@ def delete(username, hard):
@click.option("--permission-settings/--no-permission-settings", default=None)
@click.option("--password", default=None, envvar="FUNKWHALE_CLI_USER_UPDATE_PASSWORD")
@click.option(
"-q", "--upload-quota", type=click.INT,
"-q",
"--upload-quota",
type=click.INT,
)
def update(username, **kwargs):
"""Update attributes for given users"""
......
from django.contrib.admin import register as initial_register, site, ModelAdmin # noqa
from django.contrib.admin import site # noqa: F401
from django.contrib.admin import ModelAdmin
from django.contrib.admin import register as initial_register
from django.db.models.fields.related import RelatedField
from . import models
from . import tasks
from . import models, tasks
def register(model):
......
from django.apps import AppConfig, apps
from django.conf import settings
from . import mutations
from config import plugins
from . import mutations, utils
class CommonConfig(AppConfig):
......@@ -11,3 +14,7 @@ class CommonConfig(AppConfig):
app_names = [app.name for app in apps.app_configs.values()]
mutations.registry.autodiscover(app_names)
utils.monkey_patch_request_build_absolute_uri()
plugins.startup.autodiscover([p + ".funkwhale_ready" for p in settings.PLUGINS])
for p in plugins._plugins.values():
p["settings"] = plugins.load_settings(p["name"], p["settings"])
from urllib.parse import parse_qs
from django.contrib.auth.models import AnonymousUser
from rest_framework import exceptions
from rest_framework_jwt.authentication import BaseJSONWebTokenAuthentication
from funkwhale_api.users.models import User
class TokenHeaderAuth(BaseJSONWebTokenAuthentication):
def get_jwt_value(self, request):
try:
qs = request.get("query_string", b"").decode("utf-8")
parsed = parse_qs(qs)
token = parsed["token"][0]
except KeyError:
raise exceptions.AuthenticationFailed("No token")
if not token:
raise exceptions.AuthenticationFailed("Empty token")
return token
class TokenAuthMiddleware:
def __init__(self, inner):
# Store the ASGI application we were passed
self.inner = inner
def __call__(self, scope):
auth = TokenHeaderAuth()
try:
user, token = auth.authenticate(scope)
except (User.DoesNotExist, exceptions.AuthenticationFailed):
user = AnonymousUser()
scope["user"] = user
return self.inner(scope)
from django.utils.encoding import smart_text
from django.utils.translation import ugettext as _
from allauth.account.models import EmailAddress
from django.core.cache import cache
from django.utils.translation import gettext as _
from oauth2_provider.contrib.rest_framework.authentication import (
OAuth2Authentication as BaseOAuth2Authentication,
)
from rest_framework import exceptions
from rest_framework_jwt import authentication
from rest_framework_jwt.settings import api_settings
from funkwhale_api.users import models as users_models
class JSONWebTokenAuthenticationQS(authentication.BaseJSONWebTokenAuthentication):
www_authenticate_realm = "api"
class UnverifiedEmail(Exception):
def __init__(self, user):
self.user = user
def get_jwt_value(self, request):
token = request.query_params.get("jwt")
if "jwt" in request.query_params and not token:
msg = _("Invalid Authorization header. No credentials provided.")
raise exceptions.AuthenticationFailed(msg)
return token
def authenticate_header(self, request):
return '{0} realm="{1}"'.format(
api_settings.JWT_AUTH_HEADER_PREFIX, self.www_authenticate_realm
)
def resend_confirmation_email(request, user):
THROTTLE_DELAY = 500
cache_key = f"auth:resent-email-confirmation:{user.pk}"
if cache.get(cache_key):
return False
# We do the sending of the conformation by hand because we don't want to pass the request down
# to the email rendering, which would cause another UnverifiedEmail Exception and restarts the sending
# again and again
email = EmailAddress.objects.get_for_user(user, user.email)
email.send_confirmation()
cache.set(cache_key, True, THROTTLE_DELAY)
return True
class BearerTokenHeaderAuth(authentication.BaseJSONWebTokenAuthentication):
"""
For backward compatibility purpose, we used Authorization: JWT <token>
but Authorization: Bearer <token> is probably better.
"""
www_authenticate_realm = "api"
class OAuth2Authentication(BaseOAuth2Authentication):
def authenticate(self, request):
try:
return super().authenticate(request)
except UnverifiedEmail as e:
request.oauth2_error = {"error": "unverified_email"}
resend_confirmation_email(request, e.user)
def get_jwt_value(self, request):
auth = authentication.get_authorization_header(request).split()
auth_header_prefix = "bearer"
if not auth:
if api_settings.JWT_AUTH_COOKIE:
return request.COOKIES.get(api_settings.JWT_AUTH_COOKIE)
return None
class ApplicationTokenAuthentication:
def authenticate(self, request):
try:
header = request.headers["Authorization"]
except KeyError:
return
if smart_text(auth[0].lower()) != auth_header_prefix:
return None
if "Bearer" not in header:
return
if len(auth) == 1:
msg = _("Invalid Authorization header. No credentials provided.")
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _(
"Invalid Authorization header. Credentials string "
"should not contain spaces."
token = header.split()[-1].strip()
try:
application = users_models.Application.objects.exclude(user=None).get(
token=token
)
except users_models.Application.DoesNotExist:
return
user = users_models.User.objects.all().for_auth().get(id=application.user_id)
if not user.is_active:
msg = _("User account is disabled.")
raise exceptions.AuthenticationFailed(msg)
return auth[1]
def authenticate_header(self, request):
return '{0} realm="{1}"'.format("Bearer", self.www_authenticate_realm)
def authenticate(self, request):
auth = super().authenticate(request)
if auth:
if not auth[0].actor:
auth[0].create_actor()
return auth
class JSONWebTokenAuthentication(authentication.JSONWebTokenAuthentication):
def authenticate(self, request):
auth = super().authenticate(request)
if user.should_verify_email():
raise UnverifiedEmail(user)
if auth:
if not auth[0].actor:
auth[0].create_actor()
return auth
request.scopes = application.scope.split()
return user, None
import logging
from django_redis.client import default
logger = logging.getLogger(__name__)
class RedisClient(default.DefaultClient):
def get(self, key, default=None, version=None, client=None):
try:
return super().get(key, default=default, version=version, client=client)
except ValueError as e:
if "unsupported pickle protocol" in str(e):
# pickle deserialization error
logger.warn("Error while deserializing pickle value from cache")
return default
else:
raise
def get_many(self, *args, **kwargs):
try:
return super().get_many(*args, **kwargs)
except ValueError as e:
if "unsupported pickle protocol" in str(e):
# pickle deserialization error
logger.warn("Error while deserializing pickle value from cache")
return {}
else:
raise
......@@ -8,6 +8,7 @@ from django.core.serializers.json import DjangoJSONEncoder
logger = logging.getLogger(__name__)
channel_layer = get_channel_layer()
group_add = async_to_sync(channel_layer.group_add)
group_discard = async_to_sync(channel_layer.group_discard)
def group_send(group, event):
......