Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • funkwhale/funkwhale
  • Luclu7/funkwhale
  • mbothorel/funkwhale
  • EorlBruder/funkwhale
  • tcit/funkwhale
  • JocelynDelalande/funkwhale
  • eneiluj/funkwhale
  • reg/funkwhale
  • ButterflyOfFire/funkwhale
  • m4sk1n/funkwhale
  • wxcafe/funkwhale
  • andybalaam/funkwhale
  • jcgruenhage/funkwhale
  • pblayo/funkwhale
  • joshuaboniface/funkwhale
  • n3ddy/funkwhale
  • gegeweb/funkwhale
  • tohojo/funkwhale
  • emillumine/funkwhale
  • Te-k/funkwhale
  • asaintgenis/funkwhale
  • anoadragon453/funkwhale
  • Sakada/funkwhale
  • ilianaw/funkwhale
  • l4p1n/funkwhale
  • pnizet/funkwhale
  • dante383/funkwhale
  • interfect/funkwhale
  • akhardya/funkwhale
  • svfusion/funkwhale
  • noplanman/funkwhale
  • nykopol/funkwhale
  • roipoussiere/funkwhale
  • Von/funkwhale
  • aurieh/funkwhale
  • icaria36/funkwhale
  • floreal/funkwhale
  • paulwalko/funkwhale
  • comradekingu/funkwhale
  • FurryJulie/funkwhale
  • Legolars99/funkwhale
  • Vierkantor/funkwhale
  • zachhats/funkwhale
  • heyjake/funkwhale
  • sn0w/funkwhale
  • jvoisin/funkwhale
  • gordon/funkwhale
  • Alexander/funkwhale
  • bignose/funkwhale
  • qasim.ali/funkwhale
  • fakegit/funkwhale
  • Kxze/funkwhale
  • stenstad/funkwhale
  • creak/funkwhale
  • Kaze/funkwhale
  • Tixie/funkwhale
  • IISergII/funkwhale
  • lfuelling/funkwhale
  • nhaddag/funkwhale
  • yoasif/funkwhale
  • ifischer/funkwhale
  • keslerm/funkwhale
  • flupe/funkwhale
  • petitminion/funkwhale
  • ariasuni/funkwhale
  • ollie/funkwhale
  • ngaumont/funkwhale
  • techknowlogick/funkwhale
  • Shleeble/funkwhale
  • theflyingfrog/funkwhale
  • jonatron/funkwhale
  • neobrain/funkwhale
  • eorn/funkwhale
  • KokaKiwi/funkwhale
  • u1-liquid/funkwhale
  • marzzzello/funkwhale
  • sirenwatcher/funkwhale
  • newer027/funkwhale
  • codl/funkwhale
  • Zwordi/funkwhale
  • gisforgabriel/funkwhale
  • iuriatan/funkwhale
  • simon/funkwhale
  • bheesham/funkwhale
  • zeoses/funkwhale
  • accraze/funkwhale
  • meliurwen/funkwhale
  • divadsn/funkwhale
  • Etua/funkwhale
  • sdrik/funkwhale
  • Soran/funkwhale
  • kuba-orlik/funkwhale
  • cristianvogel/funkwhale
  • Forceu/funkwhale
  • jeff/funkwhale
  • der_scheibenhacker/funkwhale
  • owlnical/funkwhale
  • jovuit/funkwhale
  • SilverFox15/funkwhale
  • phw/funkwhale
  • mayhem/funkwhale
  • sridhar/funkwhale
  • stromlin/funkwhale
  • rrrnld/funkwhale
  • nitaibezerra/funkwhale
  • jaller94/funkwhale
  • pcouy/funkwhale
  • eduxstad/funkwhale
  • codingHahn/funkwhale
  • captain/funkwhale
  • polyedre/funkwhale
  • leishenailong/funkwhale
  • ccritter/funkwhale
  • lnceballosz/funkwhale
  • fpiesche/funkwhale
  • Fanyx/funkwhale
  • markusblogde/funkwhale
  • Firobe/funkwhale
  • devilcius/funkwhale
  • freaktechnik/funkwhale
  • blopware/funkwhale
  • cone/funkwhale
  • thanksd/funkwhale
  • vachan-maker/funkwhale
  • bbenti/funkwhale
  • tarator/funkwhale
  • prplecake/funkwhale
  • DMarzal/funkwhale
  • lullis/funkwhale
  • hanacgr/funkwhale
  • albjeremias/funkwhale
  • xeruf/funkwhale
  • llelite/funkwhale
  • RoiArthurB/funkwhale
  • cloo/funkwhale
  • nztvar/funkwhale
  • Keunes/funkwhale
  • petitminion/funkwhale-petitminion
  • m-idler/funkwhale
  • SkyLeite/funkwhale
140 results
Select Git revision
Show changes
Showing
with 27837 additions and 40 deletions
from django.db.models import Q
from django.shortcuts import get_object_or_404
from rest_framework import serializers
class MultipleLookupDetailMixin:
lookup_value_regex = "[^/]+"
lookup_field = "composite"
def get_object(self):
queryset = self.filter_queryset(self.get_queryset())
relevant_lookup = None
value = None
for lookup in self.url_lookups:
field_validator = lookup["validator"]
try:
value = field_validator(self.kwargs["composite"])
except serializers.ValidationError:
continue
else:
relevant_lookup = lookup
break
get_query = relevant_lookup.get(
"get_query", lambda value: Q(**{relevant_lookup["lookup_field"]: value})
)
query = get_query(value)
obj = get_object_or_404(queryset, query)
# May raise a permission denied
self.check_object_permissions(self.request, obj)
return obj
import mimetypes
import uuid
import magic
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.json import DjangoJSONEncoder
from django.db import connections, models, transaction
from django.db.models import JSONField, Lookup
from django.db.models.fields import Field
from django.db.models.sql.compiler import SQLCompiler
from django.dispatch import receiver
from django.urls import reverse
from django.utils import timezone
from versatileimagefield.fields import VersatileImageField
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
from funkwhale_api.federation import utils as federation_utils
from . import utils, validators
CONTENT_TEXT_MAX_LENGTH = 5000
CONTENT_TEXT_SUPPORTED_TYPES = [
"text/html",
"text/markdown",
"text/plain",
]
@Field.register_lookup
class NotEqual(Lookup):
lookup_name = "ne"
def as_sql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params
return f"{lhs} <> {rhs}", params
class NullsLastSQLCompiler(SQLCompiler):
def get_order_by(self):
result = super().get_order_by()
if result and self.connection.vendor == "postgresql":
return [
(
expr,
(
sql + " NULLS LAST" if not sql.endswith(" NULLS LAST") else sql,
params,
is_ref,
),
)
for (expr, (sql, params, is_ref)) in result
]
return result
class NullsLastQuery(models.sql.query.Query):
"""Use a custom compiler to inject 'NULLS LAST' (for PostgreSQL)."""
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return NullsLastSQLCompiler(self, connection, using, elide_empty)
class NullsLastQuerySet(models.QuerySet):
def __init__(self, model=None, query=None, using=None, hints=None):
super().__init__(model, query, using, hints)
self.query = query or NullsLastQuery(self.model)
class LocalFromFidQuerySet:
def local(self, include=True):
host = settings.FEDERATION_HOSTNAME
query = models.Q(fid__startswith=f"http://{host}/") | models.Q(
fid__startswith=f"https://{host}/"
)
if include:
return self.filter(query)
else:
return self.filter(~query)
class GenericTargetQuerySet(models.QuerySet):
def get_for_target(self, target):
content_type = ContentType.objects.get_for_model(target)
return self.filter(target_content_type=content_type, target_id=target.pk)
class Mutation(models.Model):
fid = models.URLField(unique=True, max_length=500, db_index=True)
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
created_by = models.ForeignKey(
"federation.Actor",
related_name="created_mutations",
on_delete=models.SET_NULL,
null=True,
blank=True,
)
approved_by = models.ForeignKey(
"federation.Actor",
related_name="approved_mutations",
on_delete=models.SET_NULL,
null=True,
blank=True,
)
type = models.CharField(max_length=100, db_index=True)
# None = no choice, True = approved, False = refused
is_approved = models.BooleanField(default=None, null=True)
# None = not applied, True = applied, False = failed
is_applied = models.BooleanField(default=None, null=True)
creation_date = models.DateTimeField(default=timezone.now, db_index=True)
applied_date = models.DateTimeField(null=True, blank=True, db_index=True)
summary = models.TextField(max_length=2000, null=True, blank=True)
payload = JSONField(encoder=DjangoJSONEncoder)
previous_state = JSONField(null=True, default=None, encoder=DjangoJSONEncoder)
target_id = models.IntegerField(null=True)
target_content_type = models.ForeignKey(
ContentType,
null=True,
on_delete=models.CASCADE,
related_name="targeting_mutations",
)
target = GenericForeignKey("target_content_type", "target_id")
objects = GenericTargetQuerySet.as_manager()
def get_federation_id(self):
if self.fid:
return self.fid
return federation_utils.full_url(
reverse("federation:edits-detail", kwargs={"uuid": self.uuid})
)
def save(self, **kwargs):
if not self.pk and not self.fid:
self.fid = self.get_federation_id()
return super().save(**kwargs)
@transaction.atomic
def apply(self):
from . import mutations
if self.is_applied:
raise ValueError("Mutation was already applied")
previous_state = mutations.registry.apply(
type=self.type, obj=self.target, payload=self.payload
)
self.previous_state = previous_state
self.is_applied = True
self.applied_date = timezone.now()
self.save(update_fields=["is_applied", "applied_date", "previous_state"])
return previous_state
def get_file_path(instance, filename):
return utils.ChunkedPath("attachments")(instance, filename)
class AttachmentQuerySet(models.QuerySet):
def attached(self, include=True):
related_fields = [
"covered_album",
"mutation_attachment",
"covered_track",
"covered_artist",
"iconed_actor",
]
query = None
for field in related_fields:
field_query = ~models.Q(**{field: None})
query = query | field_query if query else field_query
if not include:
query = ~query
return self.filter(query)
def local(self, include=True):
if include:
return self.filter(actor__domain_id=settings.FEDERATION_HOSTNAME)
else:
return self.exclude(actor__domain_id=settings.FEDERATION_HOSTNAME)
class Attachment(models.Model):
# Remote URL where the attachment can be fetched
url = models.URLField(max_length=500, null=True, blank=True)
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
# Actor associated with the attachment
actor = models.ForeignKey(
"federation.Actor",
related_name="attachments",
on_delete=models.CASCADE,
null=True,
)
creation_date = models.DateTimeField(default=timezone.now)
last_fetch_date = models.DateTimeField(null=True, blank=True)
# File size
size = models.IntegerField(null=True, blank=True)
mimetype = models.CharField(null=True, blank=True, max_length=200)
file = VersatileImageField(
upload_to=get_file_path,
max_length=255,
validators=[
validators.ImageDimensionsValidator(min_width=50, min_height=50),
validators.FileValidator(
allowed_extensions=["png", "jpg", "jpeg"],
max_size=1024 * 1024 * 5,
),
],
)
objects = AttachmentQuerySet.as_manager()
def save(self, **kwargs):
if self.file and not self.size:
self.size = self.file.size
if self.file and not self.mimetype:
self.mimetype = self.guess_mimetype()
return super().save()
@property
def is_local(self):
return federation_utils.is_local(self.fid)
def guess_mimetype(self):
f = self.file
b = min(1000000, f.size)
t = magic.from_buffer(f.read(b), mime=True)
if not t.startswith("image/"):
# failure, we try guessing by extension
mt, _ = mimetypes.guess_type(f.name)
if mt:
t = mt
return t
@property
def download_url_original(self):
if self.file:
return utils.media_url(self.file.url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=original")
@property
def download_url_small_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["50x50"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=small_square_crop")
@property
def download_url_medium_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["200x200"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=medium_square_crop")
@property
def download_url_large_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["600x600"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=large_square_crop")
class MutationAttachment(models.Model):
"""
When using attachments in mutations, we need to keep a reference to
the attachment to ensure it is not pruned by common/tasks.py.
This is what this model does.
"""
attachment = models.OneToOneField(
Attachment, related_name="mutation_attachment", on_delete=models.CASCADE
)
mutation = models.OneToOneField(
Mutation, related_name="mutation_attachment", on_delete=models.CASCADE
)
class Meta:
unique_together = ("attachment", "mutation")
class Content(models.Model):
"""
A text content that can be associated to other models, like a description, a summary, etc.
"""
text = models.CharField(max_length=CONTENT_TEXT_MAX_LENGTH, blank=True, null=True)
content_type = models.CharField(max_length=100)
@property
def rendered(self):
from . import utils
return utils.render_html(self.text, self.content_type)
@property
def as_plain_text(self):
from . import utils
return utils.render_plain_text(self.rendered)
def truncate(self, length):
text = self.as_plain_text
truncated = text[:length]
if len(truncated) < len(text):
truncated += ""
return truncated
@receiver(models.signals.post_save, sender=Attachment)
def warm_attachment_thumbnails(sender, instance, **kwargs):
if not instance.file or not settings.CREATE_IMAGE_THUMBNAILS:
return
warmer = VersatileImageFieldWarmer(
instance_or_queryset=instance,
rendition_key_set="attachment_square",
image_attr="file",
)
num_created, failed_to_create = warmer.warm()
@receiver(models.signals.post_save, sender=Mutation)
def trigger_mutation_post_init(sender, instance, created, **kwargs):
if not created:
return
from . import mutations
try:
conf = mutations.registry.get_conf(instance.type, instance.target)
except mutations.ConfNotFound:
return
serializer = conf["serializer_class"]()
try:
handler = serializer.mutation_post_init
except AttributeError:
return
handler(instance)
CONTENT_FKS = {
"music.Track": ["description"],
"music.Album": ["description"],
"music.Artist": ["description"],
}
@receiver(models.signals.post_delete, sender=None)
def remove_attached_content(sender, instance, **kwargs):
fk_fields = CONTENT_FKS.get(instance._meta.label, [])
for field in fk_fields:
if getattr(instance, f"{field}_id"):
try:
getattr(instance, field).delete()
except Content.DoesNotExist:
pass
class PluginConfiguration(models.Model):
"""
Store plugin configuration in DB
"""
code = models.CharField(max_length=100)
user = models.ForeignKey(
"users.User",
related_name="plugins",
on_delete=models.CASCADE,
null=True,
blank=True,
)
conf = JSONField(null=True, blank=True)
enabled = models.BooleanField(default=False)
creation_date = models.DateTimeField(default=timezone.now)
class Meta:
unique_together = ("user", "code")
import persisting_theory
from django.db import models, transaction
from rest_framework import serializers
class ConfNotFound(KeyError):
pass
class Registry(persisting_theory.Registry):
look_into = "mutations"
def connect(self, type, klass, perm_checkers=None):
def decorator(serializer_class):
t = self.setdefault(type, {})
t[klass] = {
"serializer_class": serializer_class,
"perm_checkers": perm_checkers or {},
}
return serializer_class
return decorator
@transaction.atomic
def apply(self, type, obj, payload):
conf = self.get_conf(type, obj)
serializer = conf["serializer_class"](obj, data=payload)
serializer.is_valid(raise_exception=True)
previous_state = serializer.get_previous_state(obj, serializer.validated_data)
serializer.apply(obj, serializer.validated_data)
return previous_state
def is_valid(self, type, obj, payload):
conf = self.get_conf(type, obj)
serializer = conf["serializer_class"](obj, data=payload)
return serializer.is_valid(raise_exception=True)
def get_validated_payload(self, type, obj, payload):
conf = self.get_conf(type, obj)
serializer = conf["serializer_class"](obj, data=payload)
serializer.is_valid(raise_exception=True)
return serializer.payload_serialize(serializer.validated_data)
def has_perm(self, perm, type, obj, actor):
if perm not in ["approve", "suggest"]:
raise ValueError(f"Invalid permission {perm}")
conf = self.get_conf(type, obj)
checker = conf["perm_checkers"].get(perm)
if not checker:
return False
return checker(obj=obj, actor=actor)
def get_conf(self, type, obj):
try:
type_conf = self[type]
except KeyError:
raise ConfNotFound(f"{type} is not a registered mutation")
try:
conf = type_conf[obj.__class__]
except KeyError:
try:
conf = type_conf[None]
except KeyError:
raise ConfNotFound(
f"No mutation configuration found for {obj.__class__}"
)
return conf
class MutationSerializer(serializers.Serializer):
def apply(self, obj, validated_data):
raise NotImplementedError()
def post_apply(self, obj, validated_data):
pass
def get_previous_state(self, obj, validated_data):
return
def payload_serialize(self, data):
return data
class UpdateMutationSerializer(serializers.ModelSerializer, MutationSerializer):
def __init__(self, *args, **kwargs):
# we force partial mode, because update mutations are partial
kwargs.setdefault("partial", True)
super().__init__(*args, **kwargs)
@transaction.atomic
def apply(self, obj, validated_data):
r = self.update(obj, validated_data)
self.post_apply(r, validated_data)
return r
def validate(self, validated_data):
if not validated_data:
raise serializers.ValidationError("You must update at least one field")
return super().validate(validated_data)
def db_serialize(self, validated_data):
serialized_relations = self.get_serialized_relations()
data = {}
# ensure model fields are serialized properly
for key, value in list(validated_data.items()):
if not isinstance(value, models.Model):
data[key] = value
continue
field = serialized_relations[key]
data[key] = getattr(value, field)
return data
def payload_serialize(self, data):
data = super().payload_serialize(data)
# we use our serialized_relations configuration
# to ensure we store ids instead of model instances in our json
# payload
for field, attr in self.get_serialized_relations().items():
try:
obj = data[field]
except KeyError:
continue
if obj is None:
data[field] = None
else:
data[field] = getattr(obj, attr)
return data
def create(self, validated_data):
validated_data = self.db_serialize(validated_data)
return super().create(validated_data)
def get_previous_state(self, obj, validated_data):
return get_update_previous_state(
obj,
*list(validated_data.keys()),
serialized_relations=self.get_serialized_relations(),
handlers=self.get_previous_state_handlers(),
)
def get_serialized_relations(self):
return {}
def get_previous_state_handlers(self):
return {}
def get_update_previous_state(obj, *fields, serialized_relations={}, handlers={}):
if not fields:
raise ValueError("You need to provide at least one field")
state = {}
for field in fields:
if field in handlers:
state[field] = handlers[field](obj)
continue
value = getattr(obj, field)
if isinstance(value, models.Model):
# we store the related object id and repr for better UX
id_field = serialized_relations[field]
related_value = getattr(value, id_field)
state[field] = {"value": related_value, "repr": str(value)}
else:
state[field] = {"value": value}
return state
registry = Registry()
from rest_framework.pagination import PageNumberPagination from rest_framework.pagination import PageNumberPagination, _positive_int
class FunkwhalePagination(PageNumberPagination): class FunkwhalePagination(PageNumberPagination):
page_size_query_param = "page_size" page_size_query_param = "page_size"
max_page_size = 50 default_max_page_size = 50
default_page_size = None
view = None
def paginate_queryset(self, queryset, request, view=None):
self.view = view
return super().paginate_queryset(queryset, request, view)
def get_page_size(self, request):
max_page_size = (
getattr(self.view, "max_page_size", 0) or self.default_max_page_size
)
page_size = getattr(self.view, "default_page_size", 0) or max_page_size
if self.page_size_query_param:
try:
return _positive_int(
request.query_params[self.page_size_query_param],
strict=True,
cutoff=max_page_size,
)
except (KeyError, ValueError):
pass
return page_size
import operator import operator
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404 from django.http import Http404
from rest_framework.permissions import BasePermission from rest_framework.permissions import BasePermission
...@@ -9,7 +10,9 @@ from funkwhale_api.common import preferences ...@@ -9,7 +10,9 @@ from funkwhale_api.common import preferences
class ConditionalAuthentication(BasePermission): class ConditionalAuthentication(BasePermission):
def has_permission(self, request, view): def has_permission(self, request, view):
if preferences.get("common__api_authentication_required"): if preferences.get("common__api_authentication_required"):
return request.user and request.user.is_authenticated return (request.user and request.user.is_authenticated) or (
hasattr(request, "actor") and request.actor
)
return True return True
...@@ -44,7 +47,68 @@ class OwnerPermission(BasePermission): ...@@ -44,7 +47,68 @@ class OwnerPermission(BasePermission):
return True return True
owner_field = getattr(view, "owner_field", "user") owner_field = getattr(view, "owner_field", "user")
owner_exception = getattr(view, "owner_exception", Http404)
try:
owner = operator.attrgetter(owner_field)(obj) owner = operator.attrgetter(owner_field)(obj)
if owner != request.user: except ObjectDoesNotExist:
raise Http404 raise owner_exception
if not owner or not request.user.is_authenticated or owner != request.user:
raise owner_exception
return True
class PrivacyLevelPermission(BasePermission):
"""
Ensure the request actor have access to the object considering the privacylevel configuration
of the user.
request.user is None if actor, else its Anonymous if user is not auth.
"""
def has_object_permission(self, request, view, obj):
if (
not hasattr(obj, "user")
and hasattr(obj, "actor")
and not obj.actor.is_local
):
# it's a remote actor object. It should be public.
# But we could trigger an update of the remote actor data
# to avoid leaking data (#2326)
return True
if hasattr(obj, "privacy_level"):
privacy_level = obj.privacy_level
elif hasattr(obj, "actor") and obj.actor.user:
privacy_level = obj.actor.user.privacy_level
else:
privacy_level = obj.user.privacy_level
obj_actor = obj.actor if hasattr(obj, "actor") else obj.user.actor
if privacy_level == "everyone":
return True
# user is anonymous
if hasattr(request, "actor"):
request_actor = request.actor
elif request.user and request.user.is_authenticated:
request_actor = request.user.actor
else:
return False
if privacy_level == "instance":
# user is local
if request.user and hasattr(request.user, "actor"):
return True
elif hasattr(request, "actor") and request.actor and request.actor.is_local:
return True
else:
return False
elif privacy_level == "me" and obj_actor == request_actor:
return True
elif request_actor in obj_actor.get_approved_followers():
return True return True
else:
return False
import json
from django import forms from django import forms
from django.conf import settings from django.conf import settings
from django.forms import JSONField
from dynamic_preferences import serializers, types from dynamic_preferences import serializers, types
from dynamic_preferences.registries import global_preferences_registry from dynamic_preferences.registries import global_preferences_registry
class DefaultFromSettingMixin(object): class DefaultFromSettingMixin:
def get_default(self): def get_default(self):
return getattr(settings, self.setting) return getattr(settings, self.setting)
...@@ -14,6 +17,16 @@ def get(pref): ...@@ -14,6 +17,16 @@ def get(pref):
return manager[pref] return manager[pref]
def all():
manager = global_preferences_registry.manager()
return manager.all()
def set(pref, value):
manager = global_preferences_registry.manager()
manager[pref] = value
class StringListSerializer(serializers.BaseSerializer): class StringListSerializer(serializers.BaseSerializer):
separator = "," separator = ","
sort = True sort = True
...@@ -25,7 +38,7 @@ class StringListSerializer(serializers.BaseSerializer): ...@@ -25,7 +38,7 @@ class StringListSerializer(serializers.BaseSerializer):
if type(value) not in [list, tuple]: if type(value) not in [list, tuple]:
raise cls.exception( raise cls.exception(
"Cannot serialize, value {} is not a list or a tuple".format(value) f"Cannot serialize, value {value} is not a list or a tuple"
) )
if cls.sort: if cls.sort:
...@@ -44,6 +57,50 @@ class StringListPreference(types.BasePreferenceType): ...@@ -44,6 +57,50 @@ class StringListPreference(types.BasePreferenceType):
field_class = forms.MultipleChoiceField field_class = forms.MultipleChoiceField
def get_api_additional_data(self): def get_api_additional_data(self):
d = super(StringListPreference, self).get_api_additional_data() d = super().get_api_additional_data()
d["choices"] = self.get("choices") d["choices"] = self.get("choices")
return d return d
class JSONSerializer(serializers.BaseSerializer):
required = True
@classmethod
def to_db(cls, value, **kwargs):
if not cls.required and value is None:
return json.dumps(value)
data_serializer = cls.data_serializer_class(data=value)
if not data_serializer.is_valid():
raise cls.exception(
f"{value} is not a valid value: {data_serializer.errors}"
)
value = data_serializer.validated_data
try:
return json.dumps(value, sort_keys=True)
except TypeError:
raise cls.exception(
f"Cannot serialize, value {value} is not JSON serializable"
)
@classmethod
def to_python(cls, value, **kwargs):
return json.loads(value)
class SerializedPreference(types.BasePreferenceType):
"""
A preference that store arbitrary JSON and validate it using a rest_framework
serializer
"""
data_serializer_class = None
field_class = JSONField
widget = forms.Textarea
@property
def serializer(self):
class _internal(JSONSerializer):
data_serializer_class = self.data_serializer_class
required = self.get("required")
return _internal
from rest_framework.renderers import JSONRenderer
class ActivityStreamRenderer(JSONRenderer):
media_type = "application/activity+json"
from rest_framework.routers import DefaultRouter
class OptionalSlashRouter(DefaultRouter):
def __init__(self):
super().__init__()
self.trailing_slash = "/?"
Source diff could not be displayed: it is too large. Options to address this: view the blob.
from . import (
create_actors,
delete_pre_017_federated_uploads,
django_permissions_to_user_permissions,
migrate_to_user_libraries,
test,
)
__all__ = [
"create_actors",
"django_permissions_to_user_permissions",
"migrate_to_user_libraries",
"delete_pre_017_federated_uploads",
"test",
]
"""
Compute different sizes of image used for Album covers and User avatars
"""
from django.db.utils import IntegrityError
from funkwhale_api.users.models import User, create_actor
def main(command, **kwargs):
qs = User.objects.filter(actor__isnull=True).order_by("username")
total = len(qs)
command.stdout.write(f"{total} users found without actors")
for i, user in enumerate(qs):
command.stdout.write(f"{i + 1}/{total} creating actor for {user.username}")
try:
user.actor = create_actor(user)
except IntegrityError as e:
# somehow, an actor with the the url exists in the database
command.stderr.write(f"Error while creating actor: {str(e)}")
continue
user.save(update_fields=["actor"])
"""
Compute different sizes of image used for Album covers and User avatars
"""
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
from funkwhale_api.common.models import Attachment
MODELS = [
(Attachment, "file", "attachment_square"),
]
def main(command, **kwargs):
for model, attribute, key_set in MODELS:
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
qs = qs.exclude(**{attribute: ""})
warmer = VersatileImageFieldWarmer(
instance_or_queryset=qs,
rendition_key_set=key_set,
image_attr=attribute,
verbose=True,
)
command.stdout.write(f"Creating images for {model.__name__} / {attribute}")
num_created, failed_to_create = warmer.warm()
command.stdout.write(
f" {num_created} created, {len(failed_to_create)} in error"
)
"""
Compute different sizes of image used for Album covers and User avatars
"""
from funkwhale_api.music.models import Upload
def main(command, **kwargs):
queryset = Upload.objects.filter(
source__startswith="http", source__contains="/federation/music/file/"
).exclude(source__contains="youtube")
total = queryset.count()
command.stdout.write(f"{total} uploads found")
queryset.delete()
...@@ -10,7 +10,6 @@ from funkwhale_api.users import models ...@@ -10,7 +10,6 @@ from funkwhale_api.users import models
mapping = { mapping = {
"dynamic_preferences.change_globalpreferencemodel": "settings", "dynamic_preferences.change_globalpreferencemodel": "settings",
"music.add_importbatch": "library", "music.add_importbatch": "library",
"federation.change_library": "federation",
} }
...@@ -24,6 +23,6 @@ def main(command, **kwargs): ...@@ -24,6 +23,6 @@ def main(command, **kwargs):
total = users.count() total = users.count()
command.stdout.write( command.stdout.write(
"Updating {} users with {} permission...".format(total, user_permission) f"Updating {total} users with {user_permission} permission..."
) )
users.update(**{"permission_{}".format(user_permission): True}) users.update(**{f"permission_{user_permission}": True})
"""
Mirate instance files to a library #463. For each user that imported music on an
instance, we will create a "default" library with related files and an instance-level
visibility (unless instance has common__api_authentication_required set to False,
in which case the libraries will be public).
Files without any import job will be bounded to a "default" library on the first
superuser account found. This should now happen though.
This command will also generate federation ids for existing resources.
"""
from django.conf import settings
from django.db.models import CharField, F, Value, functions
from funkwhale_api.common import preferences
from funkwhale_api.federation import models as federation_models
from funkwhale_api.music import models
from funkwhale_api.users.models import User
def create_libraries(open_api, stdout):
local_actors = federation_models.Actor.objects.exclude(user=None).only("pk", "user")
privacy_level = "everyone" if open_api else "instance"
stdout.write(
"* Creating {} libraries with {} visibility".format(
len(local_actors), privacy_level
)
)
libraries_by_user = {}
for a in local_actors:
library, created = models.Library.objects.get_or_create(
name="default", actor=a, defaults={"privacy_level": privacy_level}
)
libraries_by_user[library.actor.user.pk] = library.pk
if created:
stdout.write(f" * Created library {library.pk} for user {a.user.pk}")
else:
stdout.write(
" * Found existing library {} for user {}".format(
library.pk, a.user.pk
)
)
return libraries_by_user
def update_uploads(libraries_by_user, stdout):
stdout.write("* Updating uploads with proper libraries...")
for user_id, library_id in libraries_by_user.items():
jobs = models.ImportJob.objects.filter(
upload__library=None, batch__submitted_by=user_id
)
candidates = models.Upload.objects.filter(
pk__in=jobs.values_list("upload", flat=True)
)
total = candidates.update(library=library_id, import_status="finished")
if total:
stdout.write(f" * Assigned {total} uploads to user {user_id}'s library")
else:
stdout.write(f" * No uploads to assign to user {user_id}'s library")
def update_orphan_uploads(open_api, stdout):
privacy_level = "everyone" if open_api else "instance"
first_superuser = (
User.objects.filter(is_superuser=True)
.exclude(actor=None)
.order_by("pk")
.first()
)
if not first_superuser:
stdout.write("* No superuser found, skipping update orphan uploads")
return
library, _ = models.Library.objects.get_or_create(
name="default",
actor=first_superuser.actor,
defaults={"privacy_level": privacy_level},
)
candidates = (
models.Upload.objects.filter(library=None, jobs__isnull=True)
.exclude(audio_file=None)
.exclude(audio_file="")
)
total = candidates.update(library=library, import_status="finished")
if total:
stdout.write(
"* Assigned {} orphaned uploads to superuser {}".format(
total, first_superuser.pk
)
)
else:
stdout.write("* No orphaned uploads found")
def set_fid(queryset, path, stdout):
model = queryset.model._meta.label
qs = queryset.filter(fid=None)
base_url = f"{settings.FUNKWHALE_URL}{path}"
stdout.write(f"* Assigning federation ids to {model} entries (path: {base_url})")
new_fid = functions.Concat(Value(base_url), F("uuid"), output_field=CharField())
total = qs.update(fid=new_fid)
stdout.write(f" * {total} entries updated")
def update_shared_inbox_url(stdout):
stdout.write("* Update shared inbox url for local actors...")
candidates = federation_models.Actor.objects.local()
url = federation_models.get_shared_inbox_url()
candidates.update(shared_inbox_url=url)
def generate_actor_urls(part, stdout):
field = f"{part}_url"
stdout.write(f"* Update {field} for local actors...")
queryset = federation_models.Actor.objects.local().filter(**{field: None})
base_url = f"{settings.FUNKWHALE_URL}/federation/actors/"
new_field = functions.Concat(
Value(base_url),
F("preferred_username"),
Value(f"/{part}"),
output_field=CharField(),
)
queryset.update(**{field: new_field})
def main(command, **kwargs):
open_api = not preferences.get("common__api_authentication_required")
libraries_by_user = create_libraries(open_api, command.stdout)
update_uploads(libraries_by_user, command.stdout)
update_orphan_uploads(open_api, command.stdout)
set_fid_params = [
(
models.Upload.objects.exclude(library__actor__user=None),
"/federation/music/uploads/",
),
(models.Artist.objects.all(), "/federation/music/artists/"),
(models.Album.objects.all(), "/federation/music/albums/"),
(models.Track.objects.all(), "/federation/music/tracks/"),
]
for qs, path in set_fid_params:
set_fid(qs, path, command.stdout)
update_shared_inbox_url(command.stdout)
for part in ["followers", "following"]:
generate_actor_urls(part, command.stdout)
import re import re
from django.contrib.postgres.search import SearchQuery
from django.db.models import Q from django.db.models import Q
from . import utils
QUERY_REGEX = re.compile('(((?P<key>\w+):)?(?P<value>"[^"]+"|[\S]+))') QUERY_REGEX = re.compile(r'(((?P<key>\w+):)?(?P<value>"[^"]+"|[\S]+))')
def parse_query(query): def parse_query(query):
""" """
Given a search query such as "hello is:issue status:opened", Given a search query such as "hello is:issue status:opened",
returns a list of dictionnaries discribing each query token returns a list of dictionaries describing each query token
""" """
matches = [m.groupdict() for m in QUERY_REGEX.finditer(query.lower())] matches = [m.groupdict() for m in QUERY_REGEX.finditer(query.lower())]
for m in matches: for m in matches:
...@@ -23,7 +25,7 @@ def normalize_query( ...@@ -23,7 +25,7 @@ def normalize_query(
findterms=re.compile(r'"([^"]+)"|(\S+)').findall, findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r"\s{2,}").sub, normspace=re.compile(r"\s{2,}").sub,
): ):
""" Splits the query string in invidual keywords, getting rid of unecessary spaces """Splits the query string in individual keywords, getting rid of unnecessary spaces
and grouping quoted words together. and grouping quoted words together.
Example: Example:
...@@ -56,6 +58,59 @@ def get_query(query_string, search_fields): ...@@ -56,6 +58,59 @@ def get_query(query_string, search_fields):
return query return query
def remove_chars(string, chars):
for char in chars:
string = string.replace(char, "")
return string
def get_fts_query(query_string, fts_fields=["body_text"], model=None):
search_type = "raw"
if query_string.startswith('"') and query_string.endswith('"'):
# we pass the query directly to the FTS engine
query_string = query_string[1:-1]
else:
query_string = remove_chars(query_string, ['"', "&", "(", ")", "!", "'"])
parts = query_string.replace(":", "").split(" ")
parts = [f"{p}:*" for p in parts if p]
if not parts:
return Q(pk=None)
query_string = "&".join(parts)
if not fts_fields or not query_string.strip():
return Q(pk=None)
query = None
for field in fts_fields:
if "__" in field and model:
# When we have a nested lookup, we switch to a subquery for enhanced performance
fk_field_name, lookup = (
field.split("__")[0],
"__".join(field.split("__")[1:]),
)
fk_field = model._meta.get_field(fk_field_name)
related_model = fk_field.related_model
subquery = related_model.objects.filter(
**{
lookup: SearchQuery(
query_string, search_type=search_type, config="english_nostop"
)
}
).values_list("pk", flat=True)
new_query = Q(**{f"{fk_field_name}__in": list(subquery)})
else:
new_query = Q(
**{
field: SearchQuery(
query_string, search_type=search_type, config="english_nostop"
)
}
)
query = utils.join_queries_or(query, new_query)
return query
def filter_tokens(tokens, valid): def filter_tokens(tokens, valid):
return [t for t in tokens if t["key"] in valid] return [t for t in tokens if t["key"] in valid]
...@@ -65,6 +120,9 @@ def apply(qs, config_data): ...@@ -65,6 +120,9 @@ def apply(qs, config_data):
q = config_data.get(k) q = config_data.get(k)
if q: if q:
qs = qs.filter(q) qs = qs.filter(q)
distinct = config_data.get("distinct", False)
if distinct:
qs = qs.distinct()
return qs return qs
...@@ -77,13 +135,28 @@ class SearchConfig: ...@@ -77,13 +135,28 @@ class SearchConfig:
def clean(self, query): def clean(self, query):
tokens = parse_query(query) tokens = parse_query(query)
cleaned_data = {} cleaned_data = {}
cleaned_data["types"] = self.clean_types(filter_tokens(tokens, ["is"])) cleaned_data["types"] = self.clean_types(filter_tokens(tokens, ["is"]))
cleaned_data["search_query"] = self.clean_search_query( cleaned_data["search_query"] = self.clean_search_query(
filter_tokens(tokens, [None, "in"]) filter_tokens(tokens, [None, "in"] + list(self.search_fields.keys()))
)
unhandled_tokens = [
t
for t in tokens
if t["key"] not in [None, "is", "in"] + list(self.search_fields.keys())
]
cleaned_data["filter_query"], matching_filters = self.clean_filter_query(
unhandled_tokens
) )
unhandled_tokens = [t for t in tokens if t["key"] not in [None, "is", "in"]] if matching_filters:
cleaned_data["filter_query"] = self.clean_filter_query(unhandled_tokens) cleaned_data["distinct"] = any(
[
self.filter_fields[k].get("distinct", False)
for k in matching_filters
if k in self.filter_fields
]
)
else:
cleaned_data["distinct"] = False
return cleaned_data return cleaned_data
def clean_search_query(self, tokens): def clean_search_query(self, tokens):
...@@ -95,24 +168,67 @@ class SearchConfig: ...@@ -95,24 +168,67 @@ class SearchConfig:
} or set(self.search_fields.keys()) } or set(self.search_fields.keys())
fields_subset = set(self.search_fields.keys()) & fields_subset fields_subset = set(self.search_fields.keys()) & fields_subset
to_fields = [self.search_fields[k]["to"] for k in fields_subset] to_fields = [self.search_fields[k]["to"] for k in fields_subset]
specific_field_query = None
for token in tokens:
if token["key"] not in self.search_fields:
continue
to = self.search_fields[token["key"]]["to"]
try:
field = token["field"]
value = field.clean(token["value"])
except KeyError:
# no cleaning to apply
value = token["value"]
q = Q(**{f"{to}__icontains": value})
if not specific_field_query:
specific_field_query = q
else:
specific_field_query &= q
query_string = " ".join([t["value"] for t in filter_tokens(tokens, [None])]) query_string = " ".join([t["value"] for t in filter_tokens(tokens, [None])])
return get_query(query_string, sorted(to_fields)) unhandled_tokens_query = get_query(query_string, sorted(to_fields))
if specific_field_query and unhandled_tokens_query:
return unhandled_tokens_query & specific_field_query
elif specific_field_query:
return specific_field_query
elif unhandled_tokens_query:
return unhandled_tokens_query
return None
def clean_filter_query(self, tokens): def clean_filter_query(self, tokens):
if not self.filter_fields or not tokens: if not self.filter_fields or not tokens:
return return None, []
matching = [t for t in tokens if t["key"] in self.filter_fields] matching = [t for t in tokens if t["key"] in self.filter_fields]
queries = [ queries = [self.get_filter_query(token) for token in matching]
Q(**{self.filter_fields[t["key"]]["to"]: t["value"]}) for t in matching
]
query = None query = None
for q in queries: for q in queries:
if not query: if not query:
query = q query = q
else: else:
query = query & q query = query & q
return query return query, [m["key"] for m in matching]
def get_filter_query(self, token):
raw_value = token["value"]
try:
field = self.filter_fields[token["key"]]["field"]
value = field.clean(raw_value)
except KeyError:
# no cleaning to apply
value = raw_value
try:
query_field = self.filter_fields[token["key"]]["to"]
return Q(**{query_field: value})
except KeyError:
pass
# we don't have a basic filter -> field mapping, this likely means we
# have a dynamic handler in the config
handler = self.filter_fields[token["key"]]["handler"]
value = handler(value)
return value
def clean_types(self, tokens): def clean_types(self, tokens):
if not self.types: if not self.types:
......
import collections
import io
import os
import PIL
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.encoding import smart_str
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers from rest_framework import serializers
from . import models, utils
class RelatedField(serializers.RelatedField):
default_error_messages = {
"does_not_exist": _("Object with {related_field_name}={value} does not exist."),
"invalid": _("Invalid value."),
}
def __init__(self, related_field_name, serializer, **kwargs):
self.related_field_name = related_field_name
self.serializer = serializer
self.filters = kwargs.pop("filters", None)
self.queryset_filter = kwargs.pop("queryset_filter", None)
try:
kwargs["queryset"] = kwargs.pop("queryset")
except KeyError:
kwargs["queryset"] = self.serializer.Meta.model.objects.all()
super().__init__(**kwargs)
def get_filters(self, data):
filters = {self.related_field_name: data}
if self.filters:
filters.update(self.filters(self.context))
return filters
def filter_queryset(self, queryset):
if self.queryset_filter:
queryset = self.queryset_filter(queryset, self.context)
return queryset
def to_internal_value(self, data):
try:
queryset = self.get_queryset()
filters = self.get_filters(data)
queryset = self.filter_queryset(queryset)
return queryset.get(**filters)
except ObjectDoesNotExist:
self.fail(
"does_not_exist",
related_field_name=self.related_field_name,
value=smart_str(data),
)
except (TypeError, ValueError):
self.fail("invalid")
def to_representation(self, obj):
return self.serializer.to_representation(obj)
def get_choices(self, cutoff=None):
queryset = self.get_queryset()
if queryset is None:
# Ensure that field.choices returns something sensible
# even when accessed with a read-only field.
return {}
if cutoff is not None:
queryset = queryset[:cutoff]
return collections.OrderedDict(
[
(
self.to_representation(item)[self.related_field_name],
self.display_value(item),
)
for item in queryset
if self.serializer
]
)
class Action(object): class Action:
def __init__(self, name, allow_all=False, qs_filter=None): def __init__(self, name, allow_all=False, qs_filter=None):
self.name = name self.name = name
self.allow_all = allow_all self.allow_all = allow_all
self.qs_filter = qs_filter self.qs_filter = qs_filter
def __repr__(self): def __repr__(self):
return "<Action {}>".format(self.name) return f"<Action {self.name}>"
class ActionSerializer(serializers.Serializer): class ActionSerializer(serializers.Serializer):
...@@ -21,6 +102,7 @@ class ActionSerializer(serializers.Serializer): ...@@ -21,6 +102,7 @@ class ActionSerializer(serializers.Serializer):
objects = serializers.JSONField(required=True) objects = serializers.JSONField(required=True)
filters = serializers.DictField(required=False) filters = serializers.DictField(required=False)
actions = None actions = None
pk_field = "pk"
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.actions_by_name = {a.name: a for a in self.actions} self.actions_by_name = {a.name: a for a in self.actions}
...@@ -31,7 +113,7 @@ class ActionSerializer(serializers.Serializer): ...@@ -31,7 +113,7 @@ class ActionSerializer(serializers.Serializer):
) )
for action in self.actions_by_name.keys(): for action in self.actions_by_name.keys():
handler_name = "handle_{}".format(action) handler_name = f"handle_{action}"
assert hasattr(self, handler_name), "{} miss a {} method".format( assert hasattr(self, handler_name), "{} miss a {} method".format(
self.__class__.__name__, handler_name self.__class__.__name__, handler_name
) )
...@@ -51,7 +133,9 @@ class ActionSerializer(serializers.Serializer): ...@@ -51,7 +133,9 @@ class ActionSerializer(serializers.Serializer):
if value == "all": if value == "all":
return self.queryset.all().order_by("id") return self.queryset.all().order_by("id")
if type(value) in [list, tuple]: if type(value) in [list, tuple]:
return self.queryset.filter(pk__in=value).order_by("id") return self.queryset.filter(**{f"{self.pk_field}__in": value}).order_by(
self.pk_field
)
raise serializers.ValidationError( raise serializers.ValidationError(
"{} is not a valid value for objects. You must provide either a " "{} is not a valid value for objects. You must provide either a "
...@@ -91,3 +175,206 @@ class ActionSerializer(serializers.Serializer): ...@@ -91,3 +175,206 @@ class ActionSerializer(serializers.Serializer):
"result": result, "result": result,
} }
return payload return payload
def track_fields_for_update(*fields):
"""
Apply this decorator to serializer to call function when specific values
are updated on an object:
.. code-block:: python
@track_fields_for_update('privacy_level')
class LibrarySerializer(serializers.ModelSerializer):
def on_updated_privacy_level(self, obj, old_value, new_value):
print('Do someting')
"""
def decorator(serializer_class):
original_update = serializer_class.update
def new_update(self, obj, validated_data):
tracked_fields_before = {f: getattr(obj, f) for f in fields}
obj = original_update(self, obj, validated_data)
tracked_fields_after = {f: getattr(obj, f) for f in fields}
if tracked_fields_before != tracked_fields_after:
self.on_updated_fields(obj, tracked_fields_before, tracked_fields_after)
return obj
serializer_class.update = new_update
return serializer_class
return decorator
class StripExifImageField(serializers.ImageField):
def to_internal_value(self, data):
file_obj = super().to_internal_value(data)
image = PIL.Image.open(file_obj)
data = list(image.getdata())
image_without_exif = PIL.Image.new(image.mode, image.size)
image_without_exif.putdata(data)
with io.BytesIO() as output:
image_without_exif.save(
output,
format=PIL.Image.EXTENSION[os.path.splitext(file_obj.name)[-1].lower()],
quality=100,
)
content = output.getvalue()
return SimpleUploadedFile(
file_obj.name, content, content_type=file_obj.content_type
)
from funkwhale_api.federation import serializers as federation_serializers # noqa
TARGET_ID_TYPE_MAPPING = {
"music.Track": ("id", "track"),
"music.Artist": ("id", "artist"),
"music.Album": ("id", "album"),
}
class APIMutationSerializer(serializers.ModelSerializer):
created_by = federation_serializers.APIActorSerializer(read_only=True)
target = serializers.SerializerMethodField()
class Meta:
model = models.Mutation
fields = [
"fid",
"uuid",
"type",
"creation_date",
"applied_date",
"is_approved",
"is_applied",
"created_by",
"approved_by",
"summary",
"payload",
"previous_state",
"target",
]
read_only_fields = [
"uuid",
"creation_date",
"fid",
"is_applied",
"created_by",
"approved_by",
"previous_state",
]
@extend_schema_field(OpenApiTypes.OBJECT)
def get_target(self, obj):
target = obj.target
if not target:
return
id_field, type = TARGET_ID_TYPE_MAPPING[target._meta.label]
return {"type": type, "id": getattr(target, id_field), "repr": str(target)}
def validate_type(self, value):
if value not in self.context["registry"]:
raise serializers.ValidationError(f"Invalid mutation type {value}")
return value
class AttachmentSerializer(serializers.Serializer):
uuid = serializers.UUIDField(read_only=True)
size = serializers.IntegerField(read_only=True)
mimetype = serializers.CharField(read_only=True)
creation_date = serializers.DateTimeField(read_only=True)
file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField()
@extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
def get_urls(self, o):
urls = {}
urls["source"] = o.url
urls["original"] = o.download_url_original
urls["small_square_crop"] = o.download_url_small_square_crop
urls["medium_square_crop"] = o.download_url_medium_square_crop
urls["large_square_crop"] = o.download_url_large_square_crop
return urls
def create(self, validated_data):
return models.Attachment.objects.create(
file=validated_data["file"], actor=validated_data["actor"]
)
class ContentSerializer(serializers.Serializer):
text = serializers.CharField(
max_length=models.CONTENT_TEXT_MAX_LENGTH,
allow_null=True,
allow_blank=True,
)
content_type = serializers.ChoiceField(
choices=models.CONTENT_TEXT_SUPPORTED_TYPES,
)
html = serializers.SerializerMethodField()
def get_html(self, o) -> str:
return utils.render_html(o.text, o.content_type)
class NullToEmptDict:
def get_attribute(self, o):
attr = super().get_attribute(o)
if attr is None:
return {}
return attr
def to_representation(self, v):
if not v:
return v
return super().to_representation(v)
class ScopesSerializer(serializers.Serializer):
id = serializers.CharField()
rate = serializers.CharField()
description = serializers.CharField()
limit = serializers.IntegerField()
duration = serializers.IntegerField()
remaining = serializers.IntegerField()
available = serializers.IntegerField()
available_seconds = serializers.IntegerField()
reset = serializers.IntegerField()
reset_seconds = serializers.IntegerField()
class IdentSerializer(serializers.Serializer):
type = serializers.CharField()
id = serializers.CharField()
class RateLimitSerializer(serializers.Serializer):
enabled = serializers.BooleanField()
ident = IdentSerializer()
scopes = serializers.ListField(child=ScopesSerializer())
class ErrorDetailSerializer(serializers.Serializer):
detail = serializers.CharField(source="*")
class TextPreviewSerializer(serializers.Serializer):
rendered = serializers.CharField(read_only=True, source="*")
text = serializers.CharField(write_only=True)
...@@ -4,6 +4,13 @@ from django.conf import settings ...@@ -4,6 +4,13 @@ from django.conf import settings
import funkwhale_api import funkwhale_api
class FunkwhaleSession(requests.Session):
def request(self, *args, **kwargs):
kwargs.setdefault("verify", settings.EXTERNAL_REQUESTS_VERIFY_SSL)
kwargs.setdefault("timeout", settings.EXTERNAL_REQUESTS_TIMEOUT)
return super().request(*args, **kwargs)
def get_user_agent(): def get_user_agent():
return "python-requests (funkwhale/{}; +{})".format( return "python-requests (funkwhale/{}; +{})".format(
funkwhale_api.__version__, settings.FUNKWHALE_URL funkwhale_api.__version__, settings.FUNKWHALE_URL
...@@ -11,6 +18,6 @@ def get_user_agent(): ...@@ -11,6 +18,6 @@ def get_user_agent():
def get_session(): def get_session():
s = requests.Session() s = FunkwhaleSession()
s.headers["User-Agent"] = get_user_agent() s.headers["User-Agent"] = get_user_agent()
return s return s
import django.dispatch
""" Required args: mutation """
mutation_created = django.dispatch.Signal()
""" Required args: mutation, old_is_approved, new_is_approved """
mutation_updated = django.dispatch.Signal()
import unicodedata import os
import shutil
import slugify
from django.core.files.storage import FileSystemStorage from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
class ASCIIFileSystemStorage(FileSystemStorage): def asciionly(name):
""" """
Convert unicode characters in name to ASCII characters. Convert unicode characters in name to ASCII characters.
""" """
return slugify.slugify(name, ok=slugify.SLUG_OK + ".", only_ascii=True)
class ASCIIFileSystemStorage(FileSystemStorage):
def get_valid_name(self, name):
return super().get_valid_name(asciionly(name))
def force_delete(self, name):
path = self.path(name)
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
return super().delete(name)
except FileNotFoundError:
pass
class ASCIIS3Boto3Storage(S3Boto3Storage):
def get_valid_name(self, name): def get_valid_name(self, name):
name = unicodedata.normalize("NFKD", name).encode("ascii", "ignore") return super().get_valid_name(asciionly(name))
return super().get_valid_name(name)