Verified Commit f3ce4f44 authored by Eliot Berriot's avatar Eliot Berriot 💬

Merge branch 'release/0.16'

parents b206c3cf c70a50c8
Pipeline #1665 passed with stages
in 6 minutes and 12 seconds
......@@ -10,5 +10,4 @@ PYTHONDONTWRITEBYTECODE=true
WEBPACK_DEVSERVER_PORT=8080
MUSIC_DIRECTORY_PATH=/music
BROWSABLE_API_ENABLED=True
CACHEOPS_ENABLED=False
FORWARDED_PROTO=http
......@@ -91,3 +91,5 @@ data/
po/*.po
docs/swagger
_build
front/src/translations.json
front/locales/en_US/LC_MESSAGES/app.po
......@@ -4,7 +4,8 @@ variables:
IMAGE_LATEST: $IMAGE_NAME:latest
PIP_CACHE_DIR: "$CI_PROJECT_DIR/pip-cache"
PYTHONDONTWRITEBYTECODE: "true"
REVIEW_DOMAIN: preview.funkwhale.audio
REVIEW_INSTANCE_URL: https://demo.funkwhale.audio
stages:
- review
......@@ -19,37 +20,42 @@ review_front:
when: manual
allow_failure: true
before_script:
- curl -L -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
- chmod +x /usr/local/bin/jq
- cd front
script:
- yarn install
- yarn run i18n-compile
# this is to ensure we don't have any errors in the output,
# cf https://code.eliotberriot.com/funkwhale/funkwhale/issues/169
- INSTANCE_URL=$REVIEW_INSTANCE_URL yarn run build | tee /dev/stderr | (! grep -i 'ERROR in')
- mkdir -p /static/front/$CI_BUILD_REF_SLUG
- cp -r dist/* /static/front/$CI_BUILD_REF_SLUG
- mkdir -p /static/front/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
- cp -r dist/* /static/front/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
cache:
key: "$CI_PROJECT_ID__front_dependencies"
key: "funkwhale__front_dependencies"
paths:
- front/node_modules
- front/yarn.lock
environment:
name: review/front-$CI_BUILD_REF_NAME
url: http://front-$CI_BUILD_REF_SLUG.$REVIEW_DOMAIN
name: review/front/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
url: http://front-$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG.$REVIEW_DOMAIN
on_stop: stop_front_review
only:
- branches@funkwhale/funkwhale
- branches
tags:
- funkwhale-review
stop_front_review:
stage: review
script:
- rm -rf /static/front/$CI_BUILD_REF_SLUG/
- rm -rf /static/front/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG/
variables:
GIT_STRATEGY: none
when: manual
only:
- branches
environment:
name: review/front-$CI_BUILD_REF_NAME
name: review/front/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
action: stop
tags:
- funkwhale-review
......@@ -63,33 +69,38 @@ review_docs:
BUILD_PATH: "../public"
before_script:
- cd docs
- apt-get update
- apt-get install -y graphviz
- pip install sphinx
cache:
key: "$CI_PROJECT_ID__sphinx"
paths:
- "$PIP_CACHE_DIR"
script:
- pip install sphinx
- ./build_docs.sh
- mkdir -p /static/docs/$CI_BUILD_REF_SLUG
- cp -r $CI_PROJECT_DIR/public/* /static/docs/$CI_BUILD_REF_SLUG
- mkdir -p /static/docs/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
- cp -r $CI_PROJECT_DIR/public/* /static/docs/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
environment:
name: review/docs-$CI_BUILD_REF_NAME
url: http://docs-$CI_BUILD_REF_SLUG.$REVIEW_DOMAIN
name: review/docs/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
url: http://docs-$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG.$REVIEW_DOMAIN
on_stop: stop_docs_review
only:
- branches@funkwhale/funkwhale
- branches
tags:
- funkwhale-review
stop_docs_review:
stage: review
script:
- rm -rf /static/docs/$CI_BUILD_REF_SLUG/
- rm -rf /static/docs/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG/
variables:
GIT_STRATEGY: none
when: manual
only:
- branches
environment:
name: review/docs-$CI_BUILD_REF_NAME
name: review/docs/$CI_PROJECT_PATH_SLUG-$CI_BUILD_REF_SLUG
action: stop
tags:
- funkwhale-review
......@@ -132,9 +143,9 @@ test_api:
DJANGO_ALLOWED_HOSTS: "localhost"
DATABASE_URL: "postgresql://postgres@postgres/postgres"
FUNKWHALE_URL: "https://funkwhale.ci"
CACHEOPS_ENABLED: "false"
DJANGO_SETTINGS_MODULE: config.settings.local
only:
- branches
before_script:
- cd api
- pip install -r requirements/base.txt
......@@ -151,12 +162,13 @@ test_front:
image: node:9
before_script:
- cd front
only:
- branches
script:
- yarn install
- yarn run unit
cache:
key: "$CI_PROJECT_ID__front_dependencies"
key: "funkwhale__front_dependencies"
paths:
- front/node_modules
- front/yarn.lock
......@@ -172,17 +184,18 @@ build_front:
stage: build
image: node:9
before_script:
- curl -L -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
- chmod +x /usr/local/bin/jq
- cd front
script:
- yarn install
- yarn run i18n-extract
- yarn run i18n-compile
# this is to ensure we don't have any errors in the output,
# cf https://code.eliotberriot.com/funkwhale/funkwhale/issues/169
- yarn run build | tee /dev/stderr | (! grep -i 'ERROR in')
- chmod -R 750 dist
cache:
key: "$CI_PROJECT_ID__front_dependencies"
key: "funkwhale__front_dependencies"
paths:
- front/node_modules
- front/yarn.lock
......@@ -205,8 +218,10 @@ pages:
BUILD_PATH: "../public"
before_script:
- cd docs
script:
- apt-get update
- apt-get install -y graphviz
- pip install sphinx
script:
- ./build_docs.sh
cache:
key: "$CI_PROJECT_ID__sphinx"
......@@ -243,7 +258,9 @@ build_api:
name: "api_${CI_COMMIT_REF_NAME}"
paths:
- api
script: echo Done!
script:
- chmod -R 750 api
- echo Done!
only:
- tags@funkwhale/funkwhale
- master@funkwhale/funkwhale
......
This diff is collapsed.
Contribute to Funkwhale development
==================================
===================================
First of all, thank you for your interest in the project! We really
appreciate the fact that you're about to take some time to read this
......@@ -82,7 +82,7 @@ Visit https://code.eliotberriot.com/funkwhale/funkwhale and clone the repository
A note about branches
^^^^^^^^^^^^^^^^^^^^^
Next release development occurs on the "develop" branch, and releases are made on the "master" branch. Therefor, when submitting Merge Requests, ensure you are merging on the develop branch.
Next release development occurs on the "develop" branch, and releases are made on the "master" branch. Therefore, when submitting Merge Requests, ensure you are merging on the develop branch.
Working with docker
......@@ -111,7 +111,7 @@ Create it like this::
Create docker network
^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^
Create the federation network::
......@@ -280,7 +280,7 @@ Typical workflow for a contribution
3. Create a dedicated branch for your work ``42-awesome-fix``. It is good practice to prefix your branch name with the ID of the issue you are solving.
4. Work on your stuff
5. Commit small, atomic changes to make it easier to review your contribution
6. Add a changelog fragment to summarize your changes: ``echo "Implemented awesome stuff (#42)" > changes/changelog.d/42.feature"``
6. Add a changelog fragment to summarize your changes: ``echo "Implemented awesome stuff (#42)" > changes/changelog.d/42.feature``
7. Push your branch
8. Create your merge request
9. Take a step back and enjoy, we're really grateful you did all of this and took the time to contribute!
......@@ -289,8 +289,9 @@ Typical workflow for a contribution
Internationalization
--------------------
We're using https://github.com/Polyconseil/vue-gettext to manage i18n in the project.
When working on the front-end, any end-user string should be translated
using either ``<i18next path="yourstring">`` or the ``$t('yourstring')``
using either ``<translate>yourstring</translate>`` or ``$gettext('yourstring')``
function.
Extraction is done by calling ``yarn run i18n-extract``, which
......
......@@ -14,8 +14,8 @@ Getting help
We offer various Matrix.org rooms to discuss about Funkwhale:
- `#funkwhale:matrix.org <https://riot.im/app/#/room/#funkwhale:matrix.org>`_ for general questions about funkwhale
- `#funkwhale-dev:matrix.org <https://riot.im/app/#/room/#funkwhale-dev:matrix.org>`_ for development-focused discussion
- `#funkwhale:matrix.org <https://matrix.to/#/#funkwhale:matrix.org>`_ for general questions about funkwhale
- `#funkwhale-dev:matrix.org <https://matrix.to/#/#funkwhale-dev:matrix.org>`_ for development-focused discussion
Please join those rooms if you have any questions!
......@@ -26,4 +26,9 @@ Contribute
----------
Contribution guidelines as well as development installation instructions
are outlined in `CONTRIBUTING <CONTRIBUTING>`_
are outlined in `CONTRIBUTING <CONTRIBUTING>`_.
Translate
^^^^^^^^^
Translators willing to help can refer to `TRANSLATORS <TRANSLATORS>`_ for instructions.
Translating Funkwhale
=====================
Thank you for reading this! If you want to help translate Funkwhale,
you found the proper place :)
Translation is done via our own Weblate instance at https://translate.funkwhale.audio/projects/funkwhale/front/.
You can signup/login using your Gitlab account (from https://code.eliotberriot.com).
Translation workflow
--------------------
Once you're logged-in on the Weblate instance, you can suggest translations. Your suggestions will then be reviewer
by the project maintainer or other translators to ensure consistency.
Guidelines
----------
Respecting those guidelines is mandatory if you want your translation to be included:
- Use gender-neutral language and wording
Requesting a new language
-------------------------
If you'd like to see a new language in Funkwhale, please open an issue here:
https://code.eliotberriot.com/funkwhale/funkwhale/issues
......@@ -92,8 +92,8 @@ THIRD_PARTY_APPS = (
"rest_auth.registration",
"dynamic_preferences",
"django_filters",
"cacheops",
"django_cleanup",
"versatileimagefield",
)
......@@ -302,6 +302,7 @@ SESSION_COOKIE_HTTPONLY = False
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_USERNAME_VALIDATORS = "funkwhale_api.users.serializers.username_validators"
# Custom user app defaults
# Select the correct user model
......@@ -420,15 +421,6 @@ PROTECT_FILES_PATH = env("PROTECT_FILES_PATH", default="/_protected")
# use this setting to tweak for how long you want to cache
# musicbrainz results. (value is in seconds)
MUSICBRAINZ_CACHE_DURATION = env.int("MUSICBRAINZ_CACHE_DURATION", default=300)
CACHEOPS_REDIS = env("CACHE_URL", default=CACHE_DEFAULT)
CACHEOPS_ENABLED = env.bool("CACHEOPS_ENABLED", default=True)
CACHEOPS = {
"music.artist": {"ops": "all", "timeout": 60 * 60},
"music.album": {"ops": "all", "timeout": 60 * 60},
"music.track": {"ops": "all", "timeout": 60 * 60},
"music.trackfile": {"ops": "all", "timeout": 60 * 60},
"taggit.tag": {"ops": "all", "timeout": 60 * 60},
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env("DJANGO_ADMIN_URL", default="^api/admin/")
......@@ -441,6 +433,7 @@ PLAYLISTS_MAX_TRACKS = env.int("PLAYLISTS_MAX_TRACKS", default=250)
ACCOUNT_USERNAME_BLACKLIST = [
"funkwhale",
"library",
"instance",
"test",
"status",
"root",
......@@ -449,6 +442,11 @@ ACCOUNT_USERNAME_BLACKLIST = [
"superuser",
"staff",
"service",
"me",
"ghost",
"_",
"hello",
"contact",
] + env.list("ACCOUNT_USERNAME_BLACKLIST", default=[])
EXTERNAL_REQUESTS_VERIFY_SSL = env.bool("EXTERNAL_REQUESTS_VERIFY_SSL", default=True)
......@@ -465,3 +463,13 @@ MUSIC_DIRECTORY_SERVE_PATH = env(
USERS_INVITATION_EXPIRATION_DAYS = env.int(
"USERS_INVITATION_EXPIRATION_DAYS", default=14
)
VERSATILEIMAGEFIELD_RENDITION_KEY_SETS = {
"square": [
("original", "url"),
("square_crop", "crop__400x400"),
("medium_square_crop", "crop__200x200"),
("small_square_crop", "crop__50x50"),
]
}
VERSATILEIMAGEFIELD_SETTINGS = {"create_images_on_demand": False}
......@@ -39,6 +39,7 @@ DEBUG_TOOLBAR_CONFIG = {
"DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
"SHOW_TEMPLATE_CONTEXT": True,
"SHOW_TOOLBAR_CALLBACK": lambda request: True,
"JQUERY_URL": "",
}
# django-extensions
......
......@@ -51,12 +51,6 @@ CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS
# END SITE CONFIGURATION
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
# Static Assets
# ------------------------
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
......
from funkwhale_api.users.models import User
u = User.objects.create(email="demo@demo.com", username="demo", is_staff=True)
u.set_password("demo")
u.subsonic_api_token = "demo"
u.save()
#! /bin/bash
echo "Loading demo data..."
python manage.py migrate --noinput
echo "Creating demo user..."
cat demo/demo-user.py | python manage.py shell -i python
echo "Importing demo tracks..."
python manage.py import_files "/music/**/*.ogg" --recursive --noinput --username demo
# -*- coding: utf-8 -*-
__version__ = "0.15"
__version__ = "0.16"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
......
import django_filters
from django.db import models
from funkwhale_api.music import utils
from . import search
PRIVACY_LEVEL_CHOICES = [
("me", "Only me"),
......@@ -34,5 +34,17 @@ class SearchFilter(django_filters.CharFilter):
def filter(self, qs, value):
if not value:
return qs
query = utils.get_query(value, self.search_fields)
query = search.get_query(value, self.search_fields)
return qs.filter(query)
class SmartSearchFilter(django_filters.CharFilter):
def __init__(self, *args, **kwargs):
self.config = kwargs.pop("config")
super().__init__(*args, **kwargs)
def filter(self, qs, value):
if not value:
return qs
cleaned = self.config.clean(value)
return search.apply(qs, cleaned)
......@@ -19,7 +19,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
name = options["script_name"]
if not name:
self.show_help()
return self.show_help()
available_scripts = self.get_scripts()
try:
......@@ -50,7 +50,7 @@ class Command(BaseCommand):
self.stdout.write(self.style.SUCCESS(name))
self.stdout.write("")
for line in script["help"].splitlines():
self.stdout.write("     {}".format(line))
self.stdout.write(" {}".format(line))
self.stdout.write("")
def get_scripts(self):
......
......@@ -14,6 +14,11 @@ def get(pref):
return manager[pref]
def set(pref, value):
manager = global_preferences_registry.manager()
manager[pref] = value
class StringListSerializer(serializers.BaseSerializer):
separator = ","
sort = True
......
from . import create_actors
from . import create_image_variations
from . import django_permissions_to_user_permissions
from . import test
__all__ = [
"create_actors",
"create_image_variations",
"django_permissions_to_user_permissions",
"test",
]
"""
Compute different sizes of image used for Album covers and User avatars
"""
from django.db.utils import IntegrityError
from funkwhale_api.users.models import User, create_actor
def main(command, **kwargs):
qs = User.objects.filter(actor__isnull=True).order_by("username")
total = len(qs)
command.stdout.write("{} users found without actors".format(total))
for i, user in enumerate(qs):
command.stdout.write(
"{}/{} creating actor for {}".format(i + 1, total, user.username)
)
try:
user.actor = create_actor(user)
except IntegrityError as e:
# somehow, an actor with the the url exists in the database
command.stderr.write("Error while creating actor: {}".format(str(e)))
continue
user.save(update_fields=["actor"])
"""
Compute different sizes of image used for Album covers and User avatars
"""
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
from funkwhale_api.music.models import Album
from funkwhale_api.users.models import User
MODELS = [(Album, "cover", "square"), (User, "avatar", "square")]
def main(command, **kwargs):
for model, attribute, key_set in MODELS:
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
qs = qs.exclude(**{attribute: ""})
warmer = VersatileImageFieldWarmer(
instance_or_queryset=qs,
rendition_key_set=key_set,
image_attr=attribute,
verbose=True,
)
command.stdout.write(
"Creating images for {} / {}".format(model.__name__, attribute)
)
num_created, failed_to_create = warmer.warm()
command.stdout.write(
" {} created, {} in error".format(num_created, len(failed_to_create))
)
import re
from django.db.models import Q
QUERY_REGEX = re.compile('(((?P<key>\w+):)?(?P<value>"[^"]+"|[\S]+))')
def parse_query(query):
"""
Given a search query such as "hello is:issue status:opened",
returns a list of dictionnaries discribing each query token
"""
matches = [m.groupdict() for m in QUERY_REGEX.finditer(query.lower())]
for m in matches:
if m["value"].startswith('"') and m["value"].endswith('"'):
m["value"] = m["value"][1:-1]
return matches
def normalize_query(
query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r"\s{2,}").sub,
):
""" Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
"""
return [normspace(" ", (t[0] or t[1]).strip()) for t in findterms(query_string)]
def get_query(query_string, search_fields):
""" Returns a query, that is a combination of Q objects. That combination
aims to search keywords within a model by testing the given search fields.
"""
query = None # Query to search for every search term
terms = normalize_query(query_string)
for term in terms:
or_query = None # Query to search for a given term in each field
for field_name in search_fields:
q = Q(**{"%s__icontains" % field_name: term})
if or_query is None:
or_query = q
else:
or_query = or_query | q
if query is None:
query = or_query
else:
query = query & or_query
return query
def filter_tokens(tokens, valid):
return [t for t in tokens if t["key"] in valid]
def apply(qs, config_data):
for k in ["filter_query", "search_query"]:
q = config_data.get(k)
if q:
qs = qs.filter(q)
return qs
class SearchConfig:
def __init__(self, search_fields={}, filter_fields={}, types=[]):
self.filter_fields = filter_fields
self.search_fields = search_fields
self.types = types
def clean(self, query):
tokens = parse_query(query)
cleaned_data = {}
cleaned_data["types"] = self.clean_types(filter_tokens(tokens, ["is"]))
cleaned_data["search_query"] = self.clean_search_query(
filter_tokens(tokens, [None, "in"])
)
unhandled_tokens = [t for t in tokens if t["key"] not in [None, "is", "in"]]
cleaned_data["filter_query"] = self.clean_filter_query(unhandled_tokens)
return cleaned_data
def clean_search_query(self, tokens):
if not self.search_fields or not tokens:
return
fields_subset = {
f for t in filter_tokens(tokens, ["in"]) for f in t["value"].split(",")
} or set(self.search_fields.keys())
fields_subset = set(self.search_fields.keys()) & fields_subset
to_fields = [self.search_fields[k]["to"] for k in fields_subset]
query_string = " ".join([t["value"] for t in filter_tokens(tokens, [None])])
return get_query(query_string, sorted(to_fields))
def clean_filter_query(self, tokens):
if not self.filter_fields or not tokens:
return
matching = [t for t in tokens if t["key"] in self.filter_fields]
queries = [
Q(**{self.filter_fields[t["key"]]["to"]: t["value"]}) for t in matching