Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • funkwhale/funkwhale
  • Luclu7/funkwhale
  • mbothorel/funkwhale
  • EorlBruder/funkwhale
  • tcit/funkwhale
  • JocelynDelalande/funkwhale
  • eneiluj/funkwhale
  • reg/funkwhale
  • ButterflyOfFire/funkwhale
  • m4sk1n/funkwhale
  • wxcafe/funkwhale
  • andybalaam/funkwhale
  • jcgruenhage/funkwhale
  • pblayo/funkwhale
  • joshuaboniface/funkwhale
  • n3ddy/funkwhale
  • gegeweb/funkwhale
  • tohojo/funkwhale
  • emillumine/funkwhale
  • Te-k/funkwhale
  • asaintgenis/funkwhale
  • anoadragon453/funkwhale
  • Sakada/funkwhale
  • ilianaw/funkwhale
  • l4p1n/funkwhale
  • pnizet/funkwhale
  • dante383/funkwhale
  • interfect/funkwhale
  • akhardya/funkwhale
  • svfusion/funkwhale
  • noplanman/funkwhale
  • nykopol/funkwhale
  • roipoussiere/funkwhale
  • Von/funkwhale
  • aurieh/funkwhale
  • icaria36/funkwhale
  • floreal/funkwhale
  • paulwalko/funkwhale
  • comradekingu/funkwhale
  • FurryJulie/funkwhale
  • Legolars99/funkwhale
  • Vierkantor/funkwhale
  • zachhats/funkwhale
  • heyjake/funkwhale
  • sn0w/funkwhale
  • jvoisin/funkwhale
  • gordon/funkwhale
  • Alexander/funkwhale
  • bignose/funkwhale
  • qasim.ali/funkwhale
  • fakegit/funkwhale
  • Kxze/funkwhale
  • stenstad/funkwhale
  • creak/funkwhale
  • Kaze/funkwhale
  • Tixie/funkwhale
  • IISergII/funkwhale
  • lfuelling/funkwhale
  • nhaddag/funkwhale
  • yoasif/funkwhale
  • ifischer/funkwhale
  • keslerm/funkwhale
  • flupe/funkwhale
  • petitminion/funkwhale
  • ariasuni/funkwhale
  • ollie/funkwhale
  • ngaumont/funkwhale
  • techknowlogick/funkwhale
  • Shleeble/funkwhale
  • theflyingfrog/funkwhale
  • jonatron/funkwhale
  • neobrain/funkwhale
  • eorn/funkwhale
  • KokaKiwi/funkwhale
  • u1-liquid/funkwhale
  • marzzzello/funkwhale
  • sirenwatcher/funkwhale
  • newer027/funkwhale
  • codl/funkwhale
  • Zwordi/funkwhale
  • gisforgabriel/funkwhale
  • iuriatan/funkwhale
  • simon/funkwhale
  • bheesham/funkwhale
  • zeoses/funkwhale
  • accraze/funkwhale
  • meliurwen/funkwhale
  • divadsn/funkwhale
  • Etua/funkwhale
  • sdrik/funkwhale
  • Soran/funkwhale
  • kuba-orlik/funkwhale
  • cristianvogel/funkwhale
  • Forceu/funkwhale
  • jeff/funkwhale
  • der_scheibenhacker/funkwhale
  • owlnical/funkwhale
  • jovuit/funkwhale
  • SilverFox15/funkwhale
  • phw/funkwhale
  • mayhem/funkwhale
  • sridhar/funkwhale
  • stromlin/funkwhale
  • rrrnld/funkwhale
  • nitaibezerra/funkwhale
  • jaller94/funkwhale
  • pcouy/funkwhale
  • eduxstad/funkwhale
  • codingHahn/funkwhale
  • captain/funkwhale
  • polyedre/funkwhale
  • leishenailong/funkwhale
  • ccritter/funkwhale
  • lnceballosz/funkwhale
  • fpiesche/funkwhale
  • Fanyx/funkwhale
  • markusblogde/funkwhale
  • Firobe/funkwhale
  • devilcius/funkwhale
  • freaktechnik/funkwhale
  • blopware/funkwhale
  • cone/funkwhale
  • thanksd/funkwhale
  • vachan-maker/funkwhale
  • bbenti/funkwhale
  • tarator/funkwhale
  • prplecake/funkwhale
  • DMarzal/funkwhale
  • lullis/funkwhale
  • hanacgr/funkwhale
  • albjeremias/funkwhale
  • xeruf/funkwhale
  • llelite/funkwhale
  • RoiArthurB/funkwhale
  • cloo/funkwhale
  • nztvar/funkwhale
  • Keunes/funkwhale
  • petitminion/funkwhale-petitminion
  • m-idler/funkwhale
  • SkyLeite/funkwhale
140 results
Show changes
Commits on Source (94)
Showing
with 26545 additions and 92 deletions
COMPOSE_BAKE=true
# api + celeryworker
DEBUG=True
DEFAULT_FROM_EMAIL=hello@funkwhale.test
......@@ -20,6 +22,8 @@ C_FORCE_ROOT=true
PYTHONDONTWRITEBYTECODE=true
PYTHONTRACEMALLOC=0
CELERY_TASK_TIME_LIMIT=300
# api
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
......
......@@ -34,6 +34,8 @@ pip-log.txt
.tox
nosetests.xml
htmlcov
coverage.xml
report.xml
# Translations
*.mo
......@@ -75,11 +77,13 @@ api/staticfiles
api/static
api/.pytest_cache
api/celerybeat-*
# Front
oldfront/node_modules/
front/static/translations
front/node_modules/
front/dist/
front/dev-dist/
front/npm-debug.log*
front/yarn-debug.log*
front/yarn-error.log*
......@@ -88,7 +92,16 @@ front/tests/e2e/reports
front/test_results.xml
front/coverage/
front/selenium-debug.log
# Vitepress
front/ui-docs/.vitepress/.vite
front/ui-docs/.vitepress/cache
front/ui-docs/.vitepress/dist
front/ui-docs/public
# Docs
docs/_build
#Tauri
front/tauri/gen
......@@ -116,8 +129,14 @@ tsconfig.tsbuildinfo
flake.nix
flake.lock
# Vscode
# VS Code
.vscode/
# Zed
.zed/
# Node version (asdf)
.tool-versions
# Lychee link checker
.lycheecache
......@@ -8,50 +8,56 @@ include:
file: /templates/ssh-agent.yml
variables:
PYTHONDONTWRITEBYTECODE: "true"
PYTHONDONTWRITEBYTECODE: 'true'
PIP_CACHE_DIR: $CI_PROJECT_DIR/.cache/pip
YARN_CACHE_FOLDER: $CI_PROJECT_DIR/.cache/yarn
POETRY_VIRTUALENVS_IN_PROJECT: "true"
POETRY_VIRTUALENVS_IN_PROJECT: 'true'
.shared_variables:
# Keep the git files permissions during job setup
keep_git_files_permissions: &keep_git_files_permissions
GIT_STRATEGY: clone
GIT_DEPTH: "5"
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: "true"
GIT_DEPTH: '5'
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 'true'
.shared_caches:
# Cache for front related jobs
front_cache: &front_cache
- key: front-yarn
paths: [$YARN_CACHE_FOLDER]
- key:
prefix: front-node_modules
files: [front/yarn.lock]
paths: [front/node_modules]
- key:
prefix: front-lint
files:
- front/.eslintcache
- front/tsconfig.tsbuildinfo
yarn_cache: &yarn_cache
key: front-yarn-$CI_COMMIT_REF_SLUG
paths: [$YARN_CACHE_FOLDER]
node_cache: &node_cache
key:
prefix: front-node_modules-$CI_COMMIT_REF_SLUG
files: [front/yarn.lock]
paths: [front/node_modules]
lint_cache: &lint_cache
key:
prefix: front-lint-$CI_COMMIT_REF_SLUG
files:
- front/.eslintcache
- front/tsconfig.tsbuildinfo
cypress_cache: &cypress_cache
key: cypress-cache-$CI_COMMIT_REF_SLUG
paths:
- /root/.cache/Cypress
# Cache for api related jobs
# Include the python version to prevent loosing caches in the test matrix
api_cache: &api_cache
- key: api-pip-$PYTHON_VERSION
- key: api-pip-$CI_COMMIT_REF_SLUG
paths: [$PIP_CACHE_DIR]
- key:
prefix: api-venv-$PYTHON_VERSION
prefix: api-venv-$CI_COMMIT_REF_SLUG
files: [api/poetry.lock]
paths: [api/.venv]
# Cache for docs related jobs
docs_cache: &docs_cache
- key: docs-pip
- key: docs-pip-$CI_COMMIT_REF_SLUG
paths: [$PIP_CACHE_DIR]
- key:
prefix: docs-venv
prefix: docs-venv-$CI_COMMIT_REF_SLUG
files: [docs/poetry.lock]
paths: [docs/.venv]
......@@ -71,6 +77,8 @@ workflow:
)
# Run for merge requests from any repo or branches
- if: $CI_MERGE_REQUEST_ID
# run if NOCHANGELOG is added in the title
- if: $CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/
stages:
- review
......@@ -97,7 +105,10 @@ review_front:
environment:
name: review/front/$CI_COMMIT_REF_NAME
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/index.html
cache: *front_cache
cache:
- *yarn_cache
- *node_cache
- *lint_cache
before_script:
- mkdir front-review
- cd front
......@@ -143,6 +154,7 @@ find_broken_links:
lychee
--cache
--no-progress
--include-fragments
--exclude-all-private
--exclude 'demo\.funkwhale\.audio'
--exclude 'nginx\.com'
......@@ -150,13 +162,13 @@ find_broken_links:
-- . || exit $?
require_changelog:
allow_failure: true
allow_failure: false
stage: lint
rules:
# Don't run on merge request that mention NOCHANGELOG or renovate bot commits
- if: >
$CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/ ||
$CI_COMMIT_AUTHOR == "Renovate Bot <bot@dev.funkwhale.audio>"
$CI_COMMIT_AUTHOR == "RenovateBot <bot@dev.funkwhale.audio>"
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
......@@ -175,7 +187,7 @@ lint_api:
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
- changes: [api/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
cache: *api_cache
before_script:
- cd api
......@@ -191,13 +203,15 @@ lint_front:
- changes: [front/**/*]
image: $CI_REGISTRY/funkwhale/ci/node-python:18
cache: *front_cache
cache:
- *yarn_cache
- *node_cache
- *lint_cache
before_script:
- cd front
- yarn install --frozen-lockfile
script:
- yarn lint --max-warnings 0
- yarn lint:tsc
- yarn lint
test_scripts:
stage: test
......@@ -232,7 +246,7 @@ test_api:
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:$PYTHON_VERSION
parallel:
matrix:
- PYTHON_VERSION: ["3.10", "3.11", "3.12"]
- PYTHON_VERSION: ['3.11', '3.12', '3.13']
services:
- name: postgres:15-alpine
command:
......@@ -242,11 +256,11 @@ test_api:
- name: redis:7-alpine
cache: *api_cache
variables:
DATABASE_URL: "postgresql://postgres@postgres/postgres"
FUNKWHALE_URL: "https://funkwhale.ci"
DATABASE_URL: 'postgresql://postgres@postgres/postgres'
FUNKWHALE_URL: 'https://funkwhale.ci'
DJANGO_SETTINGS_MODULE: config.settings.local
POSTGRES_HOST_AUTH_METHOD: trust
CACHE_URL: "redis://redis:6379/0"
CACHE_URL: 'redis://redis:6379/0'
before_script:
- cd api
- make install
......@@ -277,7 +291,10 @@ test_front:
- changes: [front/**/*]
image: $CI_REGISTRY/funkwhale/ci/node-python:18
cache: *front_cache
cache:
- *yarn_cache
- *node_cache
- *lint_cache
before_script:
- cd front
- yarn install --frozen-lockfile
......@@ -316,12 +333,13 @@ test_integration:
image:
name: cypress/included:13.6.4
entrypoint: [""]
entrypoint: ['']
cache:
- *front_cache
- key:
paths:
- /root/.cache/Cypress
- *yarn_cache
- *node_cache
- *lint_cache
- *cypress_cache
before_script:
- cd front
- yarn install
......@@ -339,24 +357,29 @@ build_api_schema:
# Add build_docs rules because it depends on the build_api_schema artifact
- changes: [docs/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
services:
- postgres:15-alpine
- redis:7-alpine
cache: *api_cache
variables:
DATABASE_URL: "postgresql://postgres@postgres/postgres"
FUNKWHALE_URL: "https://funkwhale.ci"
DATABASE_URL: 'postgresql://postgres@postgres/postgres'
FUNKWHALE_URL: 'https://funkwhale.ci'
DJANGO_SETTINGS_MODULE: config.settings.local
POSTGRES_HOST_AUTH_METHOD: trust
CACHE_URL: "redis://redis:6379/0"
API_TYPE: "v1"
CACHE_URL: 'redis://redis:6379/0'
API_TYPE: 'v1'
before_script:
- cd api
- make install
- poetry run funkwhale-manage migrate
script:
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
- diff ../docs/schema.yml ./funkwhale_api/common/schema.yml || (
echo "Schema files do not match! run sudo docker compose run --rm
api funkwhale-manage spectacular > ./api/funkwhale_api/common/schema.yml" &&
exit 1
)
artifacts:
expire_in: 2 weeks
paths:
......@@ -398,7 +421,10 @@ build_front:
variables:
<<: *keep_git_files_permissions
NODE_OPTIONS: --max-old-space-size=4096
cache: *front_cache
cache:
- *yarn_cache
- *node_cache
- *lint_cache
before_script:
- cd front
- yarn install --frozen-lockfile
......@@ -508,7 +534,7 @@ docker:
<<: *keep_git_files_permissions
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
DOCKER_TLS_CERTDIR: ''
BUILDKIT_PROGRESS: plain
DOCKER_CACHE_IMAGE: $CI_REGISTRY/funkwhale/funkwhale/cache
......
......@@ -9,7 +9,7 @@
"group:monorepos",
"group:recommended"
],
"baseBranches": ["stable", "develop"],
"baseBranches": ["develop"],
"branchConcurrentLimit": 2,
"prConcurrentLimit": 1,
"rangeStrategy": "pin",
......@@ -55,19 +55,19 @@
},
{
"groupName": "vueuse",
"matchPackageNames": ["/^@vueuse/.*/"]
"matchDepNames": ["/^@vueuse/.*/"]
},
{
"matchPackageNames": ["channels", "channels-redis", "daphne"],
"matchDepNames": ["channels", "channels-redis", "daphne"],
"groupName": "channels"
},
{
"matchPackageNames": ["node"],
"matchDepNames": ["node"],
"allowedVersions": "/\\d+[02468]$/"
},
{
"matchFileNames": ["deploy/docker-compose.yml"],
"matchPackageNames": ["postgres"],
"matchDepNames": ["postgres"],
"postUpgradeTasks": {
"commands": [
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
......@@ -76,7 +76,7 @@
}
},
{
"matchPackageNames": ["python"],
"matchDepNames": ["python"],
"rangeStrategy": "widen"
}
]
......
version: "3"
version: '3'
services:
postgres:
image: postgres:15-alpine
environment:
- "POSTGRES_HOST_AUTH_METHOD=trust"
- 'POSTGRES_HOST_AUTH_METHOD=trust'
volumes:
- "../data/postgres:/var/lib/postgresql/data"
- '../data/postgres:/var/lib/postgresql/data'
ports:
- 5432:5432
redis:
image: redis:7-alpine
volumes:
- "../data/redis:/data"
- '../data/redis:/data'
ports:
- 6379:6379
......@@ -26,14 +26,14 @@ services:
extra_hosts:
- host.docker.internal:host-gateway
environment:
- "NGINX_MAX_BODY_SIZE=100M"
- "FUNKWHALE_API_IP=host.docker.internal"
- "FUNKWHALE_API_HOST=host.docker.internal"
- "FUNKWHALE_API_PORT=5000"
- "FUNKWHALE_FRONT_IP=host.docker.internal"
- "FUNKWHALE_FRONT_PORT=8080"
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
- "FUNKWHALE_PROTOCOL=https"
- 'NGINX_MAX_BODY_SIZE=100M'
- 'FUNKWHALE_API_IP=host.docker.internal'
- 'FUNKWHALE_API_HOST=host.docker.internal'
- 'FUNKWHALE_API_PORT=5000'
- 'FUNKWHALE_FRONT_IP=host.docker.internal'
- 'FUNKWHALE_FRONT_PORT=8080'
- 'FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}'
- 'FUNKWHALE_PROTOCOL=https'
volumes:
- ../data/media:/workspace/funkwhale/data/media:ro
- ../data/music:/music:ro
......
......@@ -6,6 +6,13 @@ repos:
rev: v4.4.0
hooks:
- id: check-added-large-files
exclude: |
(?x)(
^api/funkwhale_api/common/schema.yml|
^api/tests/music/test_coverart.ogg|
^front/src/generated/types.ts
)
- id: check-case-conflict
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
......@@ -62,6 +69,7 @@ repos:
hooks:
- id: prettier
files: \.(md|yml|yaml|json)$
exclude: 'api/funkwhale_api/common/schema.yml'
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
......
{
"bracketSameLine": false,
"bracketSpacing": true,
"embeddedLanguageFormatting": "off",
"htmlWhitespaceSensitivity": "strict",
"printWidth": 160,
"semi": false,
"singleAttributePerLine": true,
"singleQuote": true,
"trailingComma": "none",
"tabWidth": 2,
"useTabs": false,
"overrides": [
{
"files": "*.html",
"options": {
"singleAttributePerLine": false
}
},
{
"files": "*.json",
"options": {
"parser": "json",
"printWidth": 80
}
}
]
}
......@@ -42,7 +42,12 @@ RUN set -eux; \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
python3-dev
python3-dev \
gfortran \
libgfortran \
openblas-dev \
py3-scipy \
py3-scikit-learn;
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
......@@ -53,19 +58,28 @@ COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip; \
pip3 install setuptools wheel; \
pip3 install --upgrade pip;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install setuptools wheel;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles|scipy|scikit-learn' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography \
lxml \
pillow \
psycopg2 \
watchfiles
watchfiles \
scipy \
scikit-learn;
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
......@@ -102,6 +116,8 @@ RUN set -eux; \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
py3-scipy \
py3-scikit-learn \
python3 \
tzdata
......@@ -111,6 +127,7 @@ ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN apk add --no-cache gfortran
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
......
......@@ -916,7 +916,7 @@ Example:
# Your common stuff: Below this line define 3rd party library settings
CELERY_TASK_DEFAULT_RATE_LIMIT = 1
CELERY_TASK_TIME_LIMIT = 300
CELERY_TASK_TIME_LIMIT = env.int("CELERY_TASK_TIME_LIMIT", default=300)
CELERY_BEAT_SCHEDULE = {
"audio.fetch_rss_feeds": {
"task": "audio.fetch_rss_feeds",
......@@ -1398,6 +1398,7 @@ VERSATILEIMAGEFIELD_RENDITION_KEY_SETS = {
],
"attachment_square": [
("original", "url"),
("small_square_crop", "crop__50x50"),
("medium_square_crop", "crop__200x200"),
("large_square_crop", "crop__600x600"),
],
......@@ -1534,3 +1535,10 @@ Typesense hostname. Defaults to `localhost` on non-Docker deployments and to `ty
Docker deployments.
"""
TYPESENSE_NUM_TYPO = env("TYPESENSE_NUM_TYPO", default=5)
"""
Max tracks to be downloaded when the THIRD_PARTY_UPLOAD plugin hook is triggered.
Each api request to playlist tracks or radio tracks trigger the hook if tracks upload are missing.
If your instance is big your ip might get rate limited.
"""
THIRD_PARTY_UPLOAD_MAX_UPLOADS = env.int("THIRD_PARTY_UPLOAD_MAX_UPLOADS", default=10)
......@@ -154,4 +154,4 @@ REST_FRAMEWORK.update(
)
# allows makemigrations and superuser creation
FORCE = env("FORCE", default=1)
FORCE = env("FORCE", default=True)
import os
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
from .common import * # noqa
DEBUG = True
SECRET_KEY = "a_super_secret_key!"
TYPESENSE_API_KEY = "apikey"
......@@ -37,7 +37,7 @@ class ChannelFilter(moderation_filters.HiddenContentFilterSet):
# tuple-mapping retains order
fields=(
("creation_date", "creation_date"),
("artist_credit__artist__modification_date", "modification_date"),
("artist__modification_date", "modification_date"),
("?", "random"),
)
)
......
......@@ -263,6 +263,7 @@ class ChannelSerializer(serializers.ModelSerializer):
attributed_to = federation_serializers.APIActorSerializer()
rss_url = serializers.CharField(source="get_rss_url")
url = serializers.SerializerMethodField()
subscriptions_count = serializers.SerializerMethodField()
class Meta:
model = models.Channel
......@@ -276,6 +277,7 @@ class ChannelSerializer(serializers.ModelSerializer):
"rss_url",
"url",
"downloads_count",
"subscriptions_count",
]
def to_representation(self, obj):
......@@ -284,6 +286,7 @@ class ChannelSerializer(serializers.ModelSerializer):
data["subscriptions_count"] = self.get_subscriptions_count(obj)
return data
@extend_schema_field(OpenApiTypes.INT)
def get_subscriptions_count(self, obj) -> int:
return obj.actor.received_follows.exclude(approved=False).count()
......
......@@ -2,10 +2,12 @@ from django import http
from django.db import transaction
from django.db.models import Count, Prefetch, Q, Sum
from django.utils import timezone
from drf_spectacular.utils import extend_schema, extend_schema_view
from drf_spectacular.utils import extend_schema, extend_schema_view, inline_serializer
from rest_framework import decorators, exceptions, mixins
from rest_framework import permissions as rest_permissions
from rest_framework import response, viewsets
from rest_framework import response
from rest_framework import serializers as rest_serializers
from rest_framework import viewsets
from funkwhale_api.common import locales, permissions, preferences
from funkwhale_api.common import utils as common_utils
......@@ -210,6 +212,32 @@ class ChannelViewSet(
data = serializers.rss_serialize_channel_full(channel=object, uploads=uploads)
return response.Response(data, status=200)
@extend_schema(
responses=inline_serializer(
name="MetedataChoicesSerializer",
fields={
"language": rest_serializers.ListField(
child=inline_serializer(
name="LanguageItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
},
)
),
"itunes_category": rest_serializers.ListField(
child=inline_serializer(
name="iTunesCategoryItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
"children": rest_serializers.CharField(),
},
)
),
},
)
)
@decorators.action(
methods=["get"],
detail=False,
......
......@@ -31,6 +31,7 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
}
)
# Federated TrackFavorite don't have an user associated with the trackfavorite.actor
# to do : if we implement the followers privacy_level this will become a problem
no_user_query = models.Q(**{f"{user_field}__isnull": True})
return (
......
......@@ -257,6 +257,13 @@ class Attachment(models.Model):
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=original")
@property
def download_url_small_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["50x50"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=small_square_crop")
@property
def download_url_medium_square_crop(self):
if self.file:
......
......@@ -76,11 +76,13 @@ class PrivacyLevelPermission(BasePermission):
# to avoid leaking data (#2326)
return True
privacy_level = (
obj.actor.user.privacy_level
if hasattr(obj, "actor")
else obj.user.privacy_level
)
if hasattr(obj, "privacy_level"):
privacy_level = obj.privacy_level
elif hasattr(obj, "actor") and obj.actor.user:
privacy_level = obj.actor.user.privacy_level
else:
privacy_level = obj.user.privacy_level
obj_actor = obj.actor if hasattr(obj, "actor") else obj.user.actor
if privacy_level == "everyone":
......@@ -106,9 +108,7 @@ class PrivacyLevelPermission(BasePermission):
elif privacy_level == "me" and obj_actor == request_actor:
return True
elif privacy_level == "followers" and (
request_actor in obj.user.actor.get_approved_followers()
):
elif request_actor in obj_actor.get_approved_followers():
return True
else:
return False
This diff is collapsed.
......@@ -293,11 +293,22 @@ class AttachmentSerializer(serializers.Serializer):
file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField()
@extend_schema_field(OpenApiTypes.OBJECT)
@extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
def get_urls(self, o):
urls = {}
urls["source"] = o.url
urls["original"] = o.download_url_original
urls["small_square_crop"] = o.download_url_small_square_crop
urls["medium_square_crop"] = o.download_url_medium_square_crop
urls["large_square_crop"] = o.download_url_large_square_crop
return urls
......
......@@ -176,7 +176,12 @@ class AttachmentViewSet(
return r
size = request.GET.get("next", "original").lower()
if size not in ["original", "medium_square_crop", "large_square_crop"]:
if size not in [
"original",
"small_square_crop",
"medium_square_crop",
"large_square_crop",
]:
size = "original"
try:
......