Commit 7864496d authored by petitminion's avatar petitminion
Browse files

Merge branch 'develop' of https://dev.funkwhale.audio/funkwhale/funkwhale into develop

parents 09a2d06d 0873a6e9
Pipeline #17608 failed with stages
in 3 minutes and 5 seconds
......@@ -8,10 +8,11 @@ variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/pip-cache"
PYTHONDONTWRITEBYTECODE: "true"
REVIEW_DOMAIN: preview.funkwhale.audio
REVIEW_INSTANCE_URL: https://demo.funkwhale.audio
REVIEW_INSTANCE_URL: https://funkwhale.juniorjpdj.pl
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
BUILD_PLATFORMS: linux/amd64,linux/arm64,linux/arm/v7
stages:
- review
......@@ -31,6 +32,7 @@ review_front:
BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
VUE_APP_ROUTER_BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
VUE_APP_INSTANCE_URL: $REVIEW_INSTANCE_URL
NODE_ENV: review
before_script:
- curl -L -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
- chmod +x /usr/local/bin/jq
......@@ -39,7 +41,6 @@ review_front:
- cd front
script:
- yarn install
- yarn run i18n-compile
# this is to ensure we don't have any errors in the output,
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
- yarn run build | tee /dev/stderr | (! grep -i 'ERROR in')
......@@ -104,6 +105,9 @@ black:
- pip install black==19.10b0
script:
- black --check --diff api/
only:
changes:
- api/**/*
flake8:
interruptible: true
......@@ -119,6 +123,9 @@ flake8:
key: "$CI_PROJECT_ID__flake8_pip_cache"
paths:
- "$PIP_CACHE_DIR"
only:
changes:
- api/**/*
eslint:
interruptible: true
......@@ -129,14 +136,14 @@ eslint:
- cd front
- yarn install
script:
# We search for all files ending with .vue or .js in src which changed in relation to develop
# and lint them. This way we focus on some errors instead of checking the hole repository
- export changedFiles=$(git diff --relative --name-only --diff-filter=d origin/develop -- src/ | grep -E "\.(vue|js)$")
- yarn run eslint --quiet -f table $(echo $changedFiles | tr '\n' ' ')
- yarn lint --max-warnings 0
cache:
key: "$CI_PROJECT_ID__eslint_npm_cache"
paths:
- front/node_modules
only:
changes:
- front/**/*
test_api:
interruptible: true
......@@ -156,7 +163,10 @@ test_api:
POSTGRES_HOST_AUTH_METHOD: trust
CACHE_URL: "redis://redis:6379/0"
only:
- branches
refs:
- branches
changes:
- api/**/*
before_script:
- cd api
- pip3 install -r requirements/base.txt
......@@ -179,7 +189,10 @@ test_front:
before_script:
- cd front
only:
- branches
refs:
- branches
changes:
- front/**/*
script:
- yarn install --check-files
- yarn test:unit
......@@ -247,28 +260,55 @@ pages:
tags:
- docker
docker_release:
.docker_publish:
stage: deploy
image: egon0/docker-with-buildx-and-git:bash
tags:
- multiarch
services:
- docker:20-dind
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
- cp -r front/dist api/frontend
- (if [ "$CI_COMMIT_REF_NAME" == "develop" ] || [ "$CI_COMMIT_REF_NAME" == "stable" ]; then ./scripts/set-api-build-metadata.sh $(echo $CI_COMMIT_SHA | cut -c 1-8); fi);
script:
- if [[ ! -z "$CI_COMMIT_TAG" ]]; then (./docs/get-releases-json.py | scripts/is-docker-latest.py $CI_COMMIT_TAG -) && export DOCKER_LATEST_TAG="-t $IMAGE_LATEST" || export DOCKER_LATEST_TAG=; fi
- if [[ "$CI_COMMIT_REF_NAME" =~ ^[0-9]+(.[0-9]+){1,2}$ ]]; then export stable=1 && export major="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1)" && export minor="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1,2)"; fi
docker_publish_stable_release:
# Publish a docker image for releases
extends: .docker_publish
rules:
- if: $CI_COMMIT_TAG && $CI_COMMIT_REF_NAME =~ /^[0-9]+(.[0-9]+){1,2}$/
script:
# Check if this is the latest release
- ./docs/get-releases-json.py | scripts/is-docker-latest.py $CI_COMMIT_TAG - && export DOCKER_LATEST_TAG="-t $IMAGE_LATEST" || export DOCKER_LATEST_TAG=;
- export major="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1)"
- export minor="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1,2)"; fi
- cd api
- docker build -t $IMAGE $DOCKER_LATEST_TAG .
- docker push $IMAGE
- if [[ ! -z "$DOCKER_LATEST_TAG" ]]; then docker push $IMAGE_LATEST; fi
- if [[ $stable == 1 ]]; then docker tag $IMAGE $IMAGE_NAME:$major && docker push $IMAGE_NAME:$major; fi
- if [[ $stable == 1 ]]; then docker tag $IMAGE $IMAGE_NAME:$minor && docker push $IMAGE_NAME:$minor; fi
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE -t $DOCKER_LATEST_TAG -t $IMAGE_NAME:$major -t $IMAGE_NAME:$minor .
docker_publish_unstable_release:
# Publish a docker image for releases
extends: .docker_publish
rules:
- if: $CI_COMMIT_TAG && $CI_COMMIT_REF_NAME !~ /^[0-9]+(.[0-9]+){1,2}$/
script:
# Check if this is the latest release
- cd api
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
docker_published_non-release:
# Publish a docker image for each commit on develop
extends: .docker_publish
only:
- develop@funkwhale/funkwhale
- stable@funkwhale/funkwhale
- tags@funkwhale/funkwhale
script:
- ./scripts/set-api-build-metadata.sh $CI_COMMIT_SHORT_SHA
- cd api
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
docker_all_in_one_release:
stage: deploy
......@@ -318,29 +358,3 @@ build_api:
- master@funkwhale/funkwhale
- stable@funkwhale/funkwhale
- develop@funkwhale/funkwhale
check_api_dependencies:
interruptible: true
stage: deps
image: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
cache:
key: "$CI_PROJECT_ID__pip_cache"
paths:
- "$PIP_CACHE_DIR"
variables:
DJANGO_SETTINGS_MODULE: config.settings.local
POSTGRES_HOST_AUTH_METHOD: trust
only:
- branches
before_script:
- apk add make git gcc python3-dev musl-dev
- apk add postgresql-dev py3-psycopg2 libldap libffi-dev make zlib-dev jpeg-dev openldap-dev
- cd api
- pip3 install -r requirements/base.txt
- pip3 install -r requirements/local.txt
- pip3 install -r requirements/test.txt
script:
- $CI_PROJECT_DIR/scripts/check-api-deps.sh
tags:
- docker
allow_failure: true
Please avoid merging the base branch into your feature branch. We are working with rebases and those merged tend to cause trouble.
For further questions, join us at Matrix: https://matrix.to/#/#funkwhale-dev:matrix.org
If your contribution is fixing an issue by a small change, please consider a merge into `stable` by using it as target branch.
Related issue: #XXX <!-- it's okay to have no issue for small changes -->
This Merge Request includes:
......
......@@ -3,7 +3,7 @@ Changelog
You can subscribe to release announcements by:
- Following `funkwhale@mastodon.eliotberriot.com <https://mastodon.eliotberriot.com/@funkwhale>`_ on Mastodon
- Following `@funkwhale@fosstodon.org <https://fosstodon.org/@funkwhale>`_ on Mastodon
- Subscribing to the following Atom feed: https://dev.funkwhale.audio/funkwhale/funkwhale/commits/develop?format=atom&search=Merge+tag
This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.html.
......@@ -11,12 +11,16 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
.. towncrier
1.1.4 (2021-08-02)
------------------
Upgrade instructions are available at
https://docs.funkwhale.audio/admin/upgrading.html
- Pinned version of asgiref to avoid trouble with latest release. For further information, see #1516
1.1.3 (2021-08-02)
------------------
Upgrade instructions are available at
https://docs.funkwhale.audio/admin/upgrading.html
......
......@@ -759,7 +759,8 @@ To make a new 3.4 release::
# polish changelog
# - update the date
# - look for typos
# - add list of contributors via `python3 scripts/get-contributions-stats.py develop $PREVIOUS_RELEASE`
# - add list of contributors via `python3 scripts/get-contributions-stats.py $NEXT_RELEASE`
git log $PREVIOUS_RELEASE.. --format="%aN" --reverse | sort | uniq # Get all commit authors since last release
nano CHANGELOG
# Set the `__version__` variable to $NEXT_RELEASE
......
......@@ -9,6 +9,7 @@ RUN \
postgresql-dev \
python3-dev \
py3-psycopg2 \
py3-cryptography \
libldap \
libffi-dev \
make \
......@@ -23,7 +24,7 @@ RUN \
ln -s /usr/bin/python3 /usr/bin/python
# create virtual env for next stage
RUN python -m venv /venv
RUN python -m venv --system-site-packages /venv
# emulate activation by prefixing PATH
ENV PATH="/venv/bin:$PATH" VIRTUAL_ENV=/venv
......@@ -35,7 +36,7 @@ RUN \
echo 'installing pip requirements' && \
pip3 install --upgrade pip && \
pip3 install setuptools wheel && \
pip3 install -r /requirements/base.txt && \
pip3 install -r /requirements/base.txt cryptography==3.3.2 && \
rm -rf $PIP_DOWNLOAD_CACHE
ARG install_dev_deps=0
......@@ -60,6 +61,7 @@ RUN apk add --no-cache \
libpq \
libxml2 \
libxslt \
py3-cryptography \
&& \
ln -s /usr/bin/python3 /usr/bin/python
......
......@@ -621,7 +621,9 @@ OAUTH2_PROVIDER = {
# we keep expired tokens for 15 days, for tracability
"REFRESH_TOKEN_EXPIRE_SECONDS": 3600 * 24 * 15,
"AUTHORIZATION_CODE_EXPIRE_SECONDS": 5 * 60,
"ACCESS_TOKEN_EXPIRE_SECONDS": 60 * 60 * 10,
"ACCESS_TOKEN_EXPIRE_SECONDS": env.int(
"ACCESS_TOKEN_EXPIRE_SECONDS", default=60 * 60 * 10
),
"OAUTH2_SERVER_CLASS": "funkwhale_api.users.oauth.server.OAuth2Server",
}
OAUTH2_PROVIDER_APPLICATION_MODEL = "users.Application"
......
# -*- coding: utf-8 -*-
__version__ = "1.1.4"
__version__ = "1.2.0-rc1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
......
from config import plugins
import funkwhale_api
from .funkwhale_startup import PLUGIN
from .client import ListenBrainzClient, Track
......@@ -21,7 +22,10 @@ def get_track(track):
title = track.title
album = None
additional_info = {
"listening_from": "Funkwhale",
"media_player": "Funkwhale",
"media_player_version": funkwhale_api.__version__,
"submission_client": "Funkwhale ListenBrainz plugin",
"submission_client_version": PLUGIN["version"],
"tracknumber": track.position,
"discnumber": track.disc_number,
}
......
......@@ -6,7 +6,7 @@ PLUGIN = plugins.get_plugin_config(
label="ListenBrainz",
description="A plugin that allows you to submit your listens to ListenBrainz.",
homepage="https://docs.funkwhale.audio/users/builtinplugins.html#listenbrainz-plugin", # noqa
version="0.1",
version="0.2",
user=True,
conf=[
{
......
......@@ -5,6 +5,9 @@ import tempfile
import urllib.parse
import uuid
from django.db.models.expressions import OuterRef, Subquery
from django.db.models.query_utils import Q
import arrow
import pydub
from django.conf import settings
......@@ -319,6 +322,19 @@ class AlbumQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
else:
return self.exclude(pk__in=matches)
def with_duration(self):
# takes one upload per track
subquery = Subquery(
Upload.objects.filter(track_id=OuterRef("tracks"))
.order_by("id")
.values("id")[:1]
)
return self.annotate(
duration=models.Sum(
"tracks__uploads__duration", filter=Q(tracks__uploads=subquery),
)
)
class Album(APIModelMixin):
title = models.CharField(max_length=MAX_LENGTHS["ALBUM_TITLE"])
......
......@@ -201,6 +201,7 @@ class AlbumSerializer(OptionalDescriptionMixin, serializers.Serializer):
release_date = serializers.DateField()
creation_date = serializers.DateTimeField()
is_local = serializers.BooleanField()
duration = serializers.SerializerMethodField(read_only=True)
get_attributed_to = serialize_attributed_to
......@@ -222,6 +223,13 @@ class AlbumSerializer(OptionalDescriptionMixin, serializers.Serializer):
tagged_items = getattr(obj, "_prefetched_tagged_items", [])
return [ti.tag.name for ti in tagged_items]
def get_duration(self, obj):
try:
return obj.duration
except AttributeError:
# no annotation?
return 0
class TrackAlbumSerializer(serializers.ModelSerializer):
artist = serializers.SerializerMethodField()
......
from django.db import models, transaction
from django.db.models.expressions import OuterRef, Subquery
from django.db.models import Q
from django.utils import timezone
from rest_framework import exceptions
......@@ -11,8 +13,18 @@ class PlaylistQuerySet(models.QuerySet):
return self.annotate(_tracks_count=models.Count("playlist_tracks"))
def with_duration(self):
subquery = Subquery(
music_models.Upload.objects.filter(
track_id=OuterRef("playlist_tracks__track__id")
)
.order_by("id")
.values("id")[:1]
)
return self.annotate(
duration=models.Sum("playlist_tracks__track__uploads__duration")
duration=models.Sum(
"playlist_tracks__track__uploads__duration",
filter=Q(playlist_tracks__track__uploads=subquery),
)
)
def with_covers(self):
......
......@@ -147,10 +147,17 @@ def get_album2_data(album):
"name": album.title,
"artist": album.artist.name,
"created": to_subsonic_date(album.creation_date),
"duration": album.duration,
"playCount": album.tracks.aggregate(l=Count("listenings"))["l"] or 0,
}
if album.attachment_cover_id:
payload["coverArt"] = "al-{}".format(album.id)
if album.tagged_items:
# exposes only first genre since the specification uses singular noun
first_genre = album.tagged_items.first()
payload["genre"] = first_genre.tag.name if first_genre else ""
if album.release_date:
payload["year"] = album.release_date.year
try:
payload["songCount"] = album._tracks_count
except AttributeError:
......
......@@ -265,7 +265,8 @@ class SubsonicViewSet(viewsets.GenericViewSet):
detail=False, methods=["get", "post"], url_name="get_album", url_path="getAlbum"
)
@find_object(
music_models.Album.objects.select_related("artist"), filter_playable=True
music_models.Album.objects.with_duration().select_related("artist"),
filter_playable=True,
)
def get_album(self, request, *args, **kwargs):
album = kwargs.pop("obj")
......@@ -443,6 +444,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
)
)
.with_tracks_count()
.with_duration()
.order_by("artist__name")
)
data = request.GET or request.POST
......@@ -533,9 +535,9 @@ class SubsonicViewSet(viewsets.GenericViewSet):
"subsonic": "album",
"search_fields": ["title"],
"queryset": (
music_models.Album.objects.with_tracks_count().select_related(
"artist"
)
music_models.Album.objects.with_duration()
.with_tracks_count()
.select_related("artist")
),
"serializer": serializers.get_album_list2_data,
},
......
......@@ -41,7 +41,7 @@ channels_redis~=3.3.0
uvicorn[standard]~=0.14.0
gunicorn~=20.1.0
cryptography~=3.4.7
cryptography>=3.3.2
# requests-http-signature==0.0.3
# clone until the branch is merged and released upstream
git+https://github.com/agateblue/requests-http-signature.git@signature-header-support
......@@ -71,4 +71,4 @@ feedparser~=6.0.0
watchdog~=2.1.2
## Pin third party dependency to avoid issue with latest version
asgiref==3.3.4
asgiref==3.3.4 #1516
......@@ -182,6 +182,7 @@ def test_album_serializer(factories, to_api_date):
"artist": serializers.serialize_artist_simple(album.artist),
"creation_date": to_api_date(album.creation_date),
"is_playable": False,
"duration": 0,
"cover": common_serializers.AttachmentSerializer(album.attachment_cover).data,
"release_date": to_api_date(album.release_date),
"tracks_count": 2,
......@@ -214,6 +215,7 @@ def test_track_album_serializer(factories, to_api_date):
"cover": common_serializers.AttachmentSerializer(album.attachment_cover).data,
"release_date": to_api_date(album.release_date),
"tracks_count": 2,
"duration": 0,
"is_local": album.is_local,
"tags": [],
"attributed_to": federation_serializers.APIActorSerializer(actor).data,
......@@ -605,3 +607,44 @@ def test_sort_uploads_for_listen(factories):
remote_upload_with_local_version,
]
assert serializers.sort_uploads_for_listen(unsorted) == expected
def test_album_serializer_includes_duration(tmpfile, factories):
album = factories["music.Album"]()
event = {
"path": tmpfile.name,
}
library = factories["music.Library"]()
track1 = factories["music.Track"](album=album)
track2 = factories["music.Track"](album=album)
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track1,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track1,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track2,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
qs = album.__class__.objects.with_duration()
serializer = serializers.AlbumSerializer(qs.get())
assert serializer.data["duration"] == 42
......@@ -31,15 +31,46 @@ def test_playlist_serializer_include_covers(factories, api_request):
assert serializer.data["album_covers"] == expected
def test_playlist_serializer_include_duration(factories, api_request):
def test_playlist_serializer_include_duration(tmpfile, factories):
playlist = factories["playlists.Playlist"]()
upload1 = factories["music.Upload"](duration=15)
upload2 = factories["music.Upload"](duration=30)
playlist.insert_many([upload1.track, upload2.track])
event = {
"path": tmpfile.name,
}
library = factories["music.Library"]()
track1 = factories["music.Track"]()
track2 = factories["music.Track"]()
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track1,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track1,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
factories["music.Upload"](
source="file://{}".format(event["path"]),
track=track2,
checksum="old",
library=library,
import_status="finished",
audio_file=None,
duration=21,
)
playlist.insert_many([track1, track2])
qs = playlist.__class__.objects.with_duration().with_tracks_count()
serializer = serializers.PlaylistSerializer(qs.get())
assert serializer.data["duration"] == 45
assert serializer.data["duration"] == 42
def test_playlist_serializer(factories, to_api_date):
......
import datetime
from django.db.models.aggregates import Count
import pytest
from funkwhale_api.music import models as music_models
......@@ -171,7 +173,7 @@ def test_get_album_serializer(factories):
album = factories["music.Album"](artist=artist, with_cover=True)
track = factories["music.Track"](album=album, disc_number=42)
upload = factories["music.Upload"](track=track, bitrate=42000, duration=43, size=44)
tagged_item = factories["tags.TaggedItem"](content_object=album, tag__name="foo")
expected = {
"id": album.pk,
"artistId": artist.pk,
......@@ -181,6 +183,9 @@ def test_get_album_serializer(factories):
"created": serializers.to_subsonic_date(album.creation_date),
"year": album.release_date.year,
"coverArt": "al-{}".format(album.id),
"genre": tagged_item.tag.name,
"duration": 43,
"playCount": album.tracks.aggregate(l=Count("listenings"))["l"] or 0,
"song": [
{
"id": track.pk,
......@@ -206,7 +211,9 @@ def test_get_album_serializer(factories):
],
}
assert serializers.GetAlbumSerializer(album).data == expected
qs = album.__class__.objects.with_duration()
assert serializers.GetAlbumSerializer(qs.first()).data == expected
def test_starred_tracks2_serializer(factories):
......@@ -222,10 +229,10 @@ def test_starred_tracks2_serializer(factories):
def test_get_album_list2_serializer(factories):
album1 = factories["music.Album"]()
album2 = factories["music.Album"]()
album1 = factories["music.Album"]().__class__.objects.with_duration().first()
album2 = factories["music.Album"]().__class__.objects.with_duration().last()