Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • funkwhale/funkwhale
  • Luclu7/funkwhale
  • mbothorel/funkwhale
  • EorlBruder/funkwhale
  • tcit/funkwhale
  • JocelynDelalande/funkwhale
  • eneiluj/funkwhale
  • reg/funkwhale
  • ButterflyOfFire/funkwhale
  • m4sk1n/funkwhale
  • wxcafe/funkwhale
  • andybalaam/funkwhale
  • jcgruenhage/funkwhale
  • pblayo/funkwhale
  • joshuaboniface/funkwhale
  • n3ddy/funkwhale
  • gegeweb/funkwhale
  • tohojo/funkwhale
  • emillumine/funkwhale
  • Te-k/funkwhale
  • asaintgenis/funkwhale
  • anoadragon453/funkwhale
  • Sakada/funkwhale
  • ilianaw/funkwhale
  • l4p1n/funkwhale
  • pnizet/funkwhale
  • dante383/funkwhale
  • interfect/funkwhale
  • akhardya/funkwhale
  • svfusion/funkwhale
  • noplanman/funkwhale
  • nykopol/funkwhale
  • roipoussiere/funkwhale
  • Von/funkwhale
  • aurieh/funkwhale
  • icaria36/funkwhale
  • floreal/funkwhale
  • paulwalko/funkwhale
  • comradekingu/funkwhale
  • FurryJulie/funkwhale
  • Legolars99/funkwhale
  • Vierkantor/funkwhale
  • zachhats/funkwhale
  • heyjake/funkwhale
  • sn0w/funkwhale
  • jvoisin/funkwhale
  • gordon/funkwhale
  • Alexander/funkwhale
  • bignose/funkwhale
  • qasim.ali/funkwhale
  • fakegit/funkwhale
  • Kxze/funkwhale
  • stenstad/funkwhale
  • creak/funkwhale
  • Kaze/funkwhale
  • Tixie/funkwhale
  • IISergII/funkwhale
  • lfuelling/funkwhale
  • nhaddag/funkwhale
  • yoasif/funkwhale
  • ifischer/funkwhale
  • keslerm/funkwhale
  • flupe/funkwhale
  • petitminion/funkwhale
  • ariasuni/funkwhale
  • ollie/funkwhale
  • ngaumont/funkwhale
  • techknowlogick/funkwhale
  • Shleeble/funkwhale
  • theflyingfrog/funkwhale
  • jonatron/funkwhale
  • neobrain/funkwhale
  • eorn/funkwhale
  • KokaKiwi/funkwhale
  • u1-liquid/funkwhale
  • marzzzello/funkwhale
  • sirenwatcher/funkwhale
  • newer027/funkwhale
  • codl/funkwhale
  • Zwordi/funkwhale
  • gisforgabriel/funkwhale
  • iuriatan/funkwhale
  • simon/funkwhale
  • bheesham/funkwhale
  • zeoses/funkwhale
  • accraze/funkwhale
  • meliurwen/funkwhale
  • divadsn/funkwhale
  • Etua/funkwhale
  • sdrik/funkwhale
  • Soran/funkwhale
  • kuba-orlik/funkwhale
  • cristianvogel/funkwhale
  • Forceu/funkwhale
  • jeff/funkwhale
  • der_scheibenhacker/funkwhale
  • owlnical/funkwhale
  • jovuit/funkwhale
  • SilverFox15/funkwhale
  • phw/funkwhale
  • mayhem/funkwhale
  • sridhar/funkwhale
  • stromlin/funkwhale
  • rrrnld/funkwhale
  • nitaibezerra/funkwhale
  • jaller94/funkwhale
  • pcouy/funkwhale
  • eduxstad/funkwhale
  • codingHahn/funkwhale
  • captain/funkwhale
  • polyedre/funkwhale
  • leishenailong/funkwhale
  • ccritter/funkwhale
  • lnceballosz/funkwhale
  • fpiesche/funkwhale
  • Fanyx/funkwhale
  • markusblogde/funkwhale
  • Firobe/funkwhale
  • devilcius/funkwhale
  • freaktechnik/funkwhale
  • blopware/funkwhale
  • cone/funkwhale
  • thanksd/funkwhale
  • vachan-maker/funkwhale
  • bbenti/funkwhale
  • tarator/funkwhale
  • prplecake/funkwhale
  • DMarzal/funkwhale
  • lullis/funkwhale
  • hanacgr/funkwhale
  • albjeremias/funkwhale
  • xeruf/funkwhale
  • llelite/funkwhale
  • RoiArthurB/funkwhale
  • cloo/funkwhale
  • nztvar/funkwhale
  • Keunes/funkwhale
  • petitminion/funkwhale-petitminion
  • m-idler/funkwhale
  • SkyLeite/funkwhale
140 results
Select Git revision
Show changes
Commits on Source (26)
Showing
with 2404 additions and 2240 deletions
...@@ -248,6 +248,9 @@ test_api: ...@@ -248,6 +248,9 @@ test_api:
CACHE_URL: "redis://redis:6379/0" CACHE_URL: "redis://redis:6379/0"
before_script: before_script:
- cd api - cd api
- poetry env info
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras - poetry install --all-extras
script: script:
- > - >
...@@ -351,6 +354,8 @@ build_api_schema: ...@@ -351,6 +354,8 @@ build_api_schema:
API_TYPE: "v1" API_TYPE: "v1"
before_script: before_script:
- cd api - cd api
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras - poetry install --all-extras
- poetry run funkwhale-manage migrate - poetry run funkwhale-manage migrate
script: script:
...@@ -461,14 +466,14 @@ docker: ...@@ -461,14 +466,14 @@ docker:
- if: $CI_COMMIT_TAG - if: $CI_COMMIT_TAG
variables: variables:
BUILD_ARGS: > BUILD_ARGS: >
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7 --set *.platform=linux/amd64,linux/arm64
--no-cache --no-cache
--push --push
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/ - if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
variables: variables:
BUILD_ARGS: > BUILD_ARGS: >
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7 --set *.platform=linux/amd64,linux/arm64
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,oci-mediatypes=false --set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,oci-mediatypes=false
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false --set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false
--push --push
......
...@@ -9,6 +9,20 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog. ...@@ -9,6 +9,20 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
<!-- towncrier --> <!-- towncrier -->
## 1.4.1 (2025-04-14)
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
Bugfixes:
- Fix 1.4.0 builds
- Fix build script for documentation to properly deploy swagger
- Make trailing slashes optional for all endpoints
Documentation:
- Fixed the sample Apache configuration
## 1.4.0 (2023-12-12) ## 1.4.0 (2023-12-12)
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
......
...@@ -34,10 +34,10 @@ if settings.DEBUG: ...@@ -34,10 +34,10 @@ if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit # This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like. # these url in browser to see how these error pages look like.
urlpatterns += [ urlpatterns += [
url(r"^400/$", default_views.bad_request), url(r"^400/?$", default_views.bad_request),
url(r"^403/$", default_views.permission_denied), url(r"^403/?$", default_views.permission_denied),
url(r"^404/$", default_views.page_not_found), url(r"^404/?$", default_views.page_not_found),
url(r"^500/$", default_views.server_error), url(r"^500/?$", default_views.server_error),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if "debug_toolbar" in settings.INSTALLED_APPS: if "debug_toolbar" in settings.INSTALLED_APPS:
......
...@@ -28,7 +28,7 @@ router.register(r"attachments", common_views.AttachmentViewSet, "attachments") ...@@ -28,7 +28,7 @@ router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
v1_patterns = router.urls v1_patterns = router.urls
v1_patterns += [ v1_patterns += [
url(r"^oembed/$", views.OembedView.as_view(), name="oembed"), url(r"^oembed/?$", views.OembedView.as_view(), name="oembed"),
url( url(
r"^instance/", r"^instance/",
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"), include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
......
...@@ -8,22 +8,22 @@ router = routers.OptionalSlashRouter() ...@@ -8,22 +8,22 @@ router = routers.OptionalSlashRouter()
router.register(r"search", views.SearchViewSet, "search") router.register(r"search", views.SearchViewSet, "search")
urlpatterns = [ urlpatterns = [
url( url(
"releases/(?P<uuid>[0-9a-z-]+)/$", "releases/(?P<uuid>[0-9a-z-]+)/?$",
views.ReleaseDetail.as_view(), views.ReleaseDetail.as_view(),
name="release-detail", name="release-detail",
), ),
url( url(
"artists/(?P<uuid>[0-9a-z-]+)/$", "artists/(?P<uuid>[0-9a-z-]+)/?$",
views.ArtistDetail.as_view(), views.ArtistDetail.as_view(),
name="artist-detail", name="artist-detail",
), ),
url( url(
"release-groups/browse/(?P<artist_uuid>[0-9a-z-]+)/$", "release-groups/browse/(?P<artist_uuid>[0-9a-z-]+)/?$",
views.ReleaseGroupBrowse.as_view(), views.ReleaseGroupBrowse.as_view(),
name="release-group-browse", name="release-group-browse",
), ),
url( url(
"releases/browse/(?P<release_group_uuid>[0-9a-z-]+)/$", "releases/browse/(?P<release_group_uuid>[0-9a-z-]+)/?$",
views.ReleaseBrowse.as_view(), views.ReleaseBrowse.as_view(),
name="release-browse", name="release-browse",
), ),
......
import logging # import logging
import time # import time
import troi # import troi
import troi.core # import troi.core
from django.core.cache import cache # from django.core.cache import cache
from django.core.exceptions import ValidationError # from django.core.exceptions import ValidationError
from django.db.models import Q # from django.db.models import Q
from requests.exceptions import ConnectTimeout # from requests.exceptions import ConnectTimeout
from funkwhale_api.music import models as music_models # from funkwhale_api.music import models as music_models
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
logger = logging.getLogger(__name__) # logger = logging.getLogger(__name__)
patches = troi.utils.discover_patches() # patches = troi.utils.discover_patches()
SUPPORTED_PATCHES = patches.keys() # SUPPORTED_PATCHES = patches.keys()
def run(config, **kwargs): # def run(config, **kwargs):
"""Validate the received config and run the queryset generation""" # """Validate the received config and run the queryset generation"""
candidates = kwargs.pop("candidates", music_models.Track.objects.all()) # candidates = kwargs.pop("candidates", music_models.Track.objects.all())
validate(config) # validate(config)
return TroiPatch().get_queryset(config, candidates) # return TroiPatch().get_queryset(config, candidates)
def validate(config): # def validate(config):
patch = config.get("patch") # patch = config.get("patch")
if patch not in SUPPORTED_PATCHES: # if patch not in SUPPORTED_PATCHES:
raise ValidationError( # raise ValidationError(
'Invalid patch "{}". Supported patches: {}'.format( # 'Invalid patch "{}". Supported patches: {}'.format(
config["patch"], SUPPORTED_PATCHES # config["patch"], SUPPORTED_PATCHES
) # )
) # )
return True # return True
def build_radio_queryset(patch, config, radio_qs): # def build_radio_queryset(patch, config, radio_qs):
"""Take a troi patch and its arg, match the missing mbid and then build a radio queryset""" # """Take a troi patch and its arg, match the missing mbid and then build a radio queryset"""
logger.info("Config used for troi radio generation is " + str(config)) # logger.info("Config used for troi radio generation is " + str(config))
start_time = time.time() # start_time = time.time()
try: # try:
recommendations = troi.core.generate_playlist(patch, config) # recommendations = troi.core.generate_playlist(patch, config)
except ConnectTimeout: # except ConnectTimeout:
raise ValueError( # raise ValueError(
"Timed out while connecting to ListenBrainz. No candidates could be retrieved for the radio." # "Timed out while connecting to ListenBrainz. No candidates could be retrieved for the radio."
) # )
end_time_rec = time.time() # end_time_rec = time.time()
logger.info("Troi fetch took :" + str(end_time_rec - start_time)) # logger.info("Troi fetch took :" + str(end_time_rec - start_time))
if not recommendations: # if not recommendations:
raise ValueError("No candidates found by troi") # raise ValueError("No candidates found by troi")
recommended_mbids = [ # recommended_mbids = [
recommended_recording.mbid # recommended_recording.mbid
for recommended_recording in recommendations.playlists[0].recordings # for recommended_recording in recommendations.playlists[0].recordings
] # ]
logger.info("Searching for MusicBrainz ID in Funkwhale database") # logger.info("Searching for MusicBrainz ID in Funkwhale database")
qs_recommended = ( # qs_recommended = (
music_models.Track.objects.all() # music_models.Track.objects.all()
.filter(mbid__in=recommended_mbids) # .filter(mbid__in=recommended_mbids)
.order_by("mbid", "pk") # .order_by("mbid", "pk")
.distinct("mbid") # .distinct("mbid")
) # )
qs_recommended_mbid = [str(i.mbid) for i in qs_recommended] # qs_recommended_mbid = [str(i.mbid) for i in qs_recommended]
recommended_mbids_not_qs = [ # recommended_mbids_not_qs = [
mbid for mbid in recommended_mbids if mbid not in qs_recommended_mbid # mbid for mbid in recommended_mbids if mbid not in qs_recommended_mbid
] # ]
cached_match = cache.get_many(recommended_mbids_not_qs) # cached_match = cache.get_many(recommended_mbids_not_qs)
cached_match_mbid = [str(i) for i in cached_match.keys()] # cached_match_mbid = [str(i) for i in cached_match.keys()]
if qs_recommended and cached_match_mbid: # if qs_recommended and cached_match_mbid:
logger.info("MusicBrainz IDs found in Funkwhale database and redis") # logger.info("MusicBrainz IDs found in Funkwhale database and redis")
qs_recommended_mbid.extend(cached_match_mbid) # qs_recommended_mbid.extend(cached_match_mbid)
mbids_found = qs_recommended_mbid # mbids_found = qs_recommended_mbid
elif qs_recommended and not cached_match_mbid: # elif qs_recommended and not cached_match_mbid:
logger.info("MusicBrainz IDs found in Funkwhale database") # logger.info("MusicBrainz IDs found in Funkwhale database")
mbids_found = qs_recommended_mbid # mbids_found = qs_recommended_mbid
elif not qs_recommended and cached_match_mbid: # elif not qs_recommended and cached_match_mbid:
logger.info("MusicBrainz IDs found in redis cache") # logger.info("MusicBrainz IDs found in redis cache")
mbids_found = cached_match_mbid # mbids_found = cached_match_mbid
else: # else:
logger.info( # logger.info(
"Couldn't find any matches in Funkwhale database. Trying to match all" # "Couldn't find any matches in Funkwhale database. Trying to match all"
) # )
mbids_found = [] # mbids_found = []
recommended_recordings_not_found = [ # recommended_recordings_not_found = [
i for i in recommendations.playlists[0].recordings if i.mbid not in mbids_found # i for i in recommendations.playlists[0].recordings if i.mbid not in mbids_found
] # ]
logger.info("Matching missing MusicBrainz ID to Funkwhale track") # logger.info("Matching missing MusicBrainz ID to Funkwhale track")
start_time_resolv = time.time() # start_time_resolv = time.time()
utils.resolve_recordings_to_fw_track(recommended_recordings_not_found) # utils.resolve_recordings_to_fw_track(recommended_recordings_not_found)
end_time_resolv = time.time() # end_time_resolv = time.time()
logger.info( # logger.info(
"Resolving " # "Resolving "
+ str(len(recommended_recordings_not_found)) # + str(len(recommended_recordings_not_found))
+ " tracks in " # + " tracks in "
+ str(end_time_resolv - start_time_resolv) # + str(end_time_resolv - start_time_resolv)
) # )
cached_match = cache.get_many(recommended_mbids) # cached_match = cache.get_many(recommended_mbids)
if not mbids_found and not cached_match: # if not mbids_found and not cached_match:
raise ValueError("No candidates found for troi radio") # raise ValueError("No candidates found for troi radio")
mbids_found_pks = list( # mbids_found_pks = list(
music_models.Track.objects.all() # music_models.Track.objects.all()
.filter(mbid__in=mbids_found) # .filter(mbid__in=mbids_found)
.order_by("mbid", "pk") # .order_by("mbid", "pk")
.distinct("mbid") # .distinct("mbid")
.values_list("pk", flat=True) # .values_list("pk", flat=True)
) # )
mbids_found_pks_unique = [ # mbids_found_pks_unique = [
i for i in mbids_found_pks if i not in cached_match.keys() # i for i in mbids_found_pks if i not in cached_match.keys()
] # ]
if mbids_found and cached_match: # if mbids_found and cached_match:
return radio_qs.filter( # return radio_qs.filter(
Q(pk__in=mbids_found_pks_unique) | Q(pk__in=cached_match.values()) # Q(pk__in=mbids_found_pks_unique) | Q(pk__in=cached_match.values())
) # )
if mbids_found and not cached_match: # if mbids_found and not cached_match:
return radio_qs.filter(pk__in=mbids_found_pks_unique) # return radio_qs.filter(pk__in=mbids_found_pks_unique)
if not mbids_found and cached_match: # if not mbids_found and cached_match:
return radio_qs.filter(pk__in=cached_match.values()) # return radio_qs.filter(pk__in=cached_match.values())
class TroiPatch: # class TroiPatch:
code = "troi-patch" # code = "troi-patch"
label = "Troi Patch" # label = "Troi Patch"
def get_queryset(self, config, qs): # def get_queryset(self, config, qs):
patch_string = config.pop("patch") # patch_string = config.pop("patch")
patch = patches[patch_string] # patch = patches[patch_string]
return build_radio_queryset(patch(), config, qs) # return build_radio_queryset(patch(), config, qs)
from troi import Artist, Element, Playlist, Recording # from troi import Artist, Element, Playlist, Recording
from troi.patch import Patch # from troi.patch import Patch
recording_list = [ # recording_list = [
Recording( # Recording(
name="I Want It That Way", # name="I Want It That Way",
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa", # mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
artist=Artist(name="artist_name"), # artist=Artist(name="artist_name"),
), # ),
Recording(name="Untouchable", artist=Artist(name="Another lol")), # Recording(name="Untouchable", artist=Artist(name="Another lol")),
Recording( # Recording(
name="The Perfect Kiss", # name="The Perfect Kiss",
mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
artist=Artist(name="artist_name2"), # artist=Artist(name="artist_name2"),
), # ),
Recording( # Recording(
name="Love Your Voice", # name="Love Your Voice",
mbid="93726547-f8c0-4efd-8e16-d2dee76500f6", # mbid="93726547-f8c0-4efd-8e16-d2dee76500f6",
artist=Artist(name="artist_name"), # artist=Artist(name="artist_name"),
), # ),
Recording( # Recording(
name="Hall of Fame", # name="Hall of Fame",
mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09", # mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09",
artist=Artist(name="artist_name_3"), # artist=Artist(name="artist_name_3"),
), # ),
] # ]
class DummyElement(Element): # class DummyElement(Element):
"""Dummy element that returns a fixed playlist for testing""" # """Dummy element that returns a fixed playlist for testing"""
@staticmethod # @staticmethod
def outputs(): # def outputs():
return [Playlist] # return [Playlist]
def read(self, sources): # def read(self, sources):
recordings = recording_list # recordings = recording_list
return [ # return [
Playlist( # Playlist(
name="Test Export Playlist", # name="Test Export Playlist",
description="A playlist to test exporting playlists to spotify", # description="A playlist to test exporting playlists to spotify",
recordings=recordings, # recordings=recordings,
) # )
] # ]
class DummyPatch(Patch): # class DummyPatch(Patch):
"""Dummy patch that always returns a fixed set of recordings for testing""" # """Dummy patch that always returns a fixed set of recordings for testing"""
@staticmethod # @staticmethod
def slug(): # def slug():
return "test-patch" # return "test-patch"
def create(self, inputs): # def create(self, inputs):
return DummyElement() # return DummyElement()
@staticmethod # @staticmethod
def outputs(): # def outputs():
return [Recording] # return [Recording]
recommended_recording_mbids = [ # recommended_recording_mbids = [
"87dfa566-21c3-45ed-bc42-1d345b8563fa", # "87dfa566-21c3-45ed-bc42-1d345b8563fa",
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # "ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
"93726547-f8c0-4efd-8e16-d2dee76500f6", # "93726547-f8c0-4efd-8e16-d2dee76500f6",
"395bd5a1-79cc-4e04-8869-ca9eabc78d09", # "395bd5a1-79cc-4e04-8869-ca9eabc78d09",
] # ]
typesense_search_result = { # typesense_search_result = {
"facet_counts": [], # "facet_counts": [],
"found": 1, # "found": 1,
"out_of": 1, # "out_of": 1,
"page": 1, # "page": 1,
"request_params": { # "request_params": {
"collection_name": "canonical_fw_data", # "collection_name": "canonical_fw_data",
"per_page": 10, # "per_page": 10,
"q": "artist_nameiwantitthatway", # "q": "artist_nameiwantitthatway",
}, # },
"search_time_ms": 1, # "search_time_ms": 1,
"hits": [ # "hits": [
{ # {
"highlights": [ # "highlights": [
{ # {
"field": "combined", # "field": "combined",
"snippet": "string", # "snippet": "string",
"matched_tokens": ["string"], # "matched_tokens": ["string"],
} # }
], # ],
"document": { # "document": {
"pk": "1", # "pk": "1",
"combined": "artist_nameiwantitthatway", # "combined": "artist_nameiwantitthatway",
}, # },
"text_match": 130916, # "text_match": 130916,
}, # },
{ # {
"highlights": [ # "highlights": [
{ # {
"field": "combined", # "field": "combined",
"snippet": "string", # "snippet": "string",
"matched_tokens": ["string"], # "matched_tokens": ["string"],
} # }
], # ],
"document": { # "document": {
"pk": "2", # "pk": "2",
"combined": "artist_nameiwantitthatway", # "combined": "artist_nameiwantitthatway",
}, # },
"text_match": 130916, # "text_match": 130916,
}, # },
], # ],
} # }
import logging # import logging
import re # import re
import unidecode # import unidecode
from django.conf import settings # from django.conf import settings
from django.core.cache import cache # from django.core.cache import cache
from lb_matching_tools.cleaner import MetadataCleaner # from lb_matching_tools.cleaner import MetadataCleaner
from funkwhale_api.music import models as music_models # from funkwhale_api.music import models as music_models
logger = logging.getLogger(__name__) # logger = logging.getLogger(__name__)
api_key = settings.TYPESENSE_API_KEY # api_key = settings.TYPESENSE_API_KEY
host = settings.TYPESENSE_HOST # host = settings.TYPESENSE_HOST
port = settings.TYPESENSE_PORT # port = settings.TYPESENSE_PORT
protocol = settings.TYPESENSE_PROTOCOL # protocol = settings.TYPESENSE_PROTOCOL
TYPESENSE_NUM_TYPO = settings.TYPESENSE_NUM_TYPO # TYPESENSE_NUM_TYPO = settings.TYPESENSE_NUM_TYPO
class TypesenseNotActivate(Exception): # class TypesenseNotActivate(Exception):
pass # pass
if not settings.TYPESENSE_API_KEY: # if not settings.TYPESENSE_API_KEY:
logger.info( # logger.info(
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable." # "Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
) # )
else: # else:
import typesense # import typesense
def delete_non_alnum_characters(text): # def delete_non_alnum_characters(text):
return unidecode.unidecode(re.sub(r"[^\w]+", "", text).lower()) # return unidecode.unidecode(re.sub(r"[^\w]+", "", text).lower())
def resolve_recordings_to_fw_track(recordings): # def resolve_recordings_to_fw_track(recordings):
""" # """
Tries to match a troi recording entity to a fw track using the typesense index. # Tries to match a troi recording entity to a fw track using the typesense index.
It will save the results in the match_mbid attribute of the Track table. # It will save the results in the match_mbid attribute of the Track table.
For test purposes : if multiple fw tracks are returned, we log the information # For test purposes : if multiple fw tracks are returned, we log the information
but only keep the best result in db to avoid duplicates. # but only keep the best result in db to avoid duplicates.
""" # """
if not settings.TYPESENSE_API_KEY: # if not settings.TYPESENSE_API_KEY:
raise TypesenseNotActivate( # raise TypesenseNotActivate(
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable." # "Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
) # )
client = typesense.Client( # client = typesense.Client(
{ # {
"api_key": api_key, # "api_key": api_key,
"nodes": [{"host": host, "port": port, "protocol": protocol}], # "nodes": [{"host": host, "port": port, "protocol": protocol}],
"connection_timeout_seconds": 2, # "connection_timeout_seconds": 2,
} # }
) # )
mc = MetadataCleaner() # mc = MetadataCleaner()
for recording in recordings: # for recording in recordings:
rec = mc.clean_recording(recording.name) # rec = mc.clean_recording(recording.name)
artist = mc.clean_artist(recording.artist.name) # artist = mc.clean_artist(recording.artist.name)
canonical_name_for_track = delete_non_alnum_characters(artist + rec) # canonical_name_for_track = delete_non_alnum_characters(artist + rec)
logger.debug(f"Trying to resolve : {canonical_name_for_track}") # logger.debug(f"Trying to resolve : {canonical_name_for_track}")
search_parameters = { # search_parameters = {
"q": canonical_name_for_track, # "q": canonical_name_for_track,
"query_by": "combined", # "query_by": "combined",
"num_typos": TYPESENSE_NUM_TYPO, # "num_typos": TYPESENSE_NUM_TYPO,
"drop_tokens_threshold": 0, # "drop_tokens_threshold": 0,
} # }
matches = client.collections["canonical_fw_data"].documents.search( # matches = client.collections["canonical_fw_data"].documents.search(
search_parameters # search_parameters
) # )
if matches["hits"]: # if matches["hits"]:
hit = matches["hits"][0] # hit = matches["hits"][0]
pk = hit["document"]["pk"] # pk = hit["document"]["pk"]
logger.debug(f"Saving match for track with primary key {pk}") # logger.debug(f"Saving match for track with primary key {pk}")
cache.set(recording.mbid, pk) # cache.set(recording.mbid, pk)
if settings.DEBUG and matches["hits"][1]: # if settings.DEBUG and matches["hits"][1]:
for hit in matches["hits"][1:]: # for hit in matches["hits"][1:]:
pk = hit["document"]["pk"] # pk = hit["document"]["pk"]
fw_track = music_models.Track.objects.get(pk=pk) # fw_track = music_models.Track.objects.get(pk=pk)
logger.info( # logger.info(
f"Duplicate match found for {fw_track.artist.name} {fw_track.title} \ # f"Duplicate match found for {fw_track.artist.name} {fw_track.title} \
and primary key {pk}. Skipping because of better match." # and primary key {pk}. Skipping because of better match."
) # )
else: # else:
logger.debug("No match found in fw db") # logger.debug("No match found in fw db")
return cache.get_many([rec.mbid for rec in recordings]) # return cache.get_many([rec.mbid for rec in recordings])
...@@ -10,7 +10,7 @@ router.register(r"apps", views.ApplicationViewSet, "apps") ...@@ -10,7 +10,7 @@ router.register(r"apps", views.ApplicationViewSet, "apps")
router.register(r"grants", views.GrantViewSet, "grants") router.register(r"grants", views.GrantViewSet, "grants")
urlpatterns = router.urls + [ urlpatterns = router.urls + [
url("^authorize/$", csrf_exempt(views.AuthorizeView.as_view()), name="authorize"), url("^authorize/?$", csrf_exempt(views.AuthorizeView.as_view()), name="authorize"),
url("^token/$", views.TokenView.as_view(), name="token"), url("^token/?$", views.TokenView.as_view(), name="token"),
url("^revoke/$", views.RevokeTokenView.as_view(), name="revoke"), url("^revoke/?$", views.RevokeTokenView.as_view(), name="revoke"),
] ]
...@@ -7,26 +7,26 @@ from . import views ...@@ -7,26 +7,26 @@ from . import views
urlpatterns = [ urlpatterns = [
# URLs that do not require a session or valid token # URLs that do not require a session or valid token
url( url(
r"^password/reset/$", r"^password/reset/?$",
views.PasswordResetView.as_view(), views.PasswordResetView.as_view(),
name="rest_password_reset", name="rest_password_reset",
), ),
url( url(
r"^password/reset/confirm/$", r"^password/reset/confirm/?$",
views.PasswordResetConfirmView.as_view(), views.PasswordResetConfirmView.as_view(),
name="rest_password_reset_confirm", name="rest_password_reset_confirm",
), ),
# URLs that require a user to be logged in with a valid session / token. # URLs that require a user to be logged in with a valid session / token.
url( url(
r"^user/$", rest_auth_views.UserDetailsView.as_view(), name="rest_user_details" r"^user/?$", rest_auth_views.UserDetailsView.as_view(), name="rest_user_details"
), ),
url( url(
r"^password/change/$", r"^password/change/?$",
views.PasswordChangeView.as_view(), views.PasswordChangeView.as_view(),
name="rest_password_change", name="rest_password_change",
), ),
# Registration URLs # Registration URLs
url(r"^registration/$", views.RegisterView.as_view(), name="rest_register"), url(r"^registration/?$", views.RegisterView.as_view(), name="rest_register"),
url( url(
r"^registration/verify-email/?$", r"^registration/verify-email/?$",
views.VerifyEmailView.as_view(), views.VerifyEmailView.as_view(),
......
This diff is collapsed.
[tool.poetry] [tool.poetry]
name = "funkwhale-api" name = "funkwhale-api"
version = "1.4.0" version = "1.4.1"
description = "Funkwhale API" description = "Funkwhale API"
authors = ["Funkwhale Collective"] authors = ["Funkwhale Collective"]
...@@ -26,10 +26,11 @@ funkwhale-manage = 'funkwhale_api.main:main' ...@@ -26,10 +26,11 @@ funkwhale-manage = 'funkwhale_api.main:main'
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8,<3.12" python = "^3.8,<3.12"
setuptools = "==60.10.0"
# Django # Django
dj-rest-auth = { extras = ["with_social"], version = "2.2.8" } dj-rest-auth = { extras = ["with_social"], version = "2.2.8" }
django = "==3.2.23" django = "==3.2.24"
django-allauth = "==0.42.0" django-allauth = "==0.42.0"
django-cache-memoize = "0.1.10" django-cache-memoize = "0.1.10"
django-cacheops = "==6.1" django-cacheops = "==6.1"
...@@ -73,19 +74,17 @@ bleach = "==5.0.1" ...@@ -73,19 +74,17 @@ bleach = "==5.0.1"
boto3 = "==1.26.161" boto3 = "==1.26.161"
click = "==8.1.7" click = "==8.1.7"
cryptography = "==38.0.4" cryptography = "==38.0.4"
feedparser = "==6.0.10" feedparser = "==6.0.11"
musicbrainzngs = "==0.7.1" musicbrainzngs = "==0.7.1"
mutagen = "==1.46.0" mutagen = "==1.46.0"
pillow = "==9.3.0" pillow = "==9.3.0"
pydub = "==0.25.1" pydub = "==0.25.1"
pyld = "==2.0.3" pyld = "==2.0.4"
python-magic = "==0.4.27" python-magic = "==0.4.27"
requests = "==2.28.2" requests = "==2.28.2"
requests-http-message-signatures = "==0.3.1" requests-http-message-signatures = "==0.3.1"
sentry-sdk = "==1.19.1" sentry-sdk = "==1.19.1"
watchdog = "==2.2.1" watchdog = "==2.2.1"
troi = { git = "https://github.com/metabrainz/troi-recommendation-playground.git", tag = "v-2023-10-30.0"}
lb-matching-tools = { git = "https://github.com/metabrainz/listenbrainz-matching-tools.git", branch = "main"}
unidecode = "==1.3.7" unidecode = "==1.3.7"
pycountry = "22.3.5" pycountry = "22.3.5"
...@@ -97,7 +96,6 @@ ipython = "==7.34.0" ...@@ -97,7 +96,6 @@ ipython = "==7.34.0"
pluralizer = "==1.2.0" pluralizer = "==1.2.0"
service-identity = "==21.1.0" service-identity = "==21.1.0"
unicode-slugify = "==0.1.5" unicode-slugify = "==0.1.5"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
aioresponses = "==0.7.6" aioresponses = "==0.7.6"
asynctest = "==0.13.0" asynctest = "==0.13.0"
...@@ -110,9 +108,9 @@ factory-boy = "==3.2.1" ...@@ -110,9 +108,9 @@ factory-boy = "==3.2.1"
faker = "==15.3.4" faker = "==15.3.4"
flake8 = "==3.9.2" flake8 = "==3.9.2"
ipdb = "==0.13.13" ipdb = "==0.13.13"
pytest = "==7.4.3" pytest = "==7.4.4"
pytest-asyncio = "==0.21.0" pytest-asyncio = "==0.21.0"
prompt-toolkit = "==3.0.41" prompt-toolkit = "==3.0.43"
pytest-cov = "==4.0.0" pytest-cov = "==4.0.0"
pytest-django = "==4.5.2" pytest-django = "==4.5.2"
pytest-env = "==0.8.2" pytest-env = "==0.8.2"
...@@ -128,7 +126,8 @@ django-extensions = "==3.2.3" ...@@ -128,7 +126,8 @@ django-extensions = "==3.2.3"
typesense = ["typesense"] typesense = ["typesense"]
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core==1.8.1", "setuptools==60.10.0"
]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
[tool.pylint.master] [tool.pylint.master]
......
import pytest # import pytest
import troi.core # import troi.core
from django.core.cache import cache # from django.core.cache import cache
from django.db.models import Q # from django.db.models import Q
from requests.exceptions import ConnectTimeout # from requests.exceptions import ConnectTimeout
from funkwhale_api.music.models import Track # from funkwhale_api.music.models import Track
from funkwhale_api.radios import lb_recommendations # from funkwhale_api.radios import lb_recommendations
from funkwhale_api.typesense import factories as custom_factories # from funkwhale_api.typesense import factories as custom_factories
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
def test_can_build_radio_queryset_with_fw_db(factories, mocker): # def test_can_build_radio_queryset_with_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa" # title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
) # )
factories["music.Track"]( # factories["music.Track"](
title="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0" # title="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"
) # )
factories["music.Track"]() # factories["music.Track"]()
qs = Track.objects.all() # qs = Track.objects.all()
mocker.patch("funkwhale_api.typesense.utils.resolve_recordings_to_fw_track") # mocker.patch("funkwhale_api.typesense.utils.resolve_recordings_to_fw_track")
radio_qs = lb_recommendations.build_radio_queryset( # radio_qs = lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
recommended_recording_mbids = [ # recommended_recording_mbids = [
"87dfa566-21c3-45ed-bc42-1d345b8563fa", # "87dfa566-21c3-45ed-bc42-1d345b8563fa",
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", # "ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
] # ]
assert list( # assert list(
Track.objects.all().filter(Q(mbid__in=recommended_recording_mbids)) # Track.objects.all().filter(Q(mbid__in=recommended_recording_mbids))
) == list(radio_qs) # ) == list(radio_qs)
def test_build_radio_queryset_without_fw_db(mocker): # def test_build_radio_queryset_without_fw_db(mocker):
resolve_recordings_to_fw_track = mocker.patch.object( # resolve_recordings_to_fw_track = mocker.patch.object(
utils, "resolve_recordings_to_fw_track", return_value=None # utils, "resolve_recordings_to_fw_track", return_value=None
) # )
# mocker.patch.object(cache, "get_many", return_value=None) # # mocker.patch.object(cache, "get_many", return_value=None)
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
assert resolve_recordings_to_fw_track.called_once_with( # assert resolve_recordings_to_fw_track.called_once_with(
custom_factories.recommended_recording_mbids # custom_factories.recommended_recording_mbids
) # )
def test_build_radio_queryset_with_redis_and_fw_db(factories, mocker): # def test_build_radio_queryset_with_redis_and_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
pk="1", title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa" # pk="1", title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
) # )
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None) # mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
redis_cache = {} # redis_cache = {}
redis_cache["ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"] = 2 # redis_cache["ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"] = 2
mocker.patch.object(cache, "get_many", return_value=redis_cache) # mocker.patch.object(cache, "get_many", return_value=redis_cache)
qs = Track.objects.all() # qs = Track.objects.all()
assert list( # assert list(
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
) == list(Track.objects.all().filter(pk__in=[1, 2])) # ) == list(Track.objects.all().filter(pk__in=[1, 2]))
def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker): # def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker):
factories["music.Track"]( # factories["music.Track"](
pk="1", title="Super title", mbid="87dfaaaa-2aaa-45ed-bc42-1d34aaaaaaaa" # pk="1", title="Super title", mbid="87dfaaaa-2aaa-45ed-bc42-1d34aaaaaaaa"
) # )
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None) # mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
redis_cache = {} # redis_cache = {}
redis_cache["87dfa566-21c3-45ed-bc42-1d345b8563fa"] = 1 # redis_cache["87dfa566-21c3-45ed-bc42-1d345b8563fa"] = 1
mocker.patch.object(cache, "get_many", return_value=redis_cache) # mocker.patch.object(cache, "get_many", return_value=redis_cache)
qs = Track.objects.all() # qs = Track.objects.all()
assert list( # assert list(
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
) == list(Track.objects.all().filter(pk=1)) # ) == list(Track.objects.all().filter(pk=1))
def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker): # def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
mocker.patch.object( # mocker.patch.object(
troi.core, # troi.core,
"generate_playlist", # "generate_playlist",
side_effect=ConnectTimeout, # side_effect=ConnectTimeout,
) # )
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
def test_build_radio_queryset_catch_troi_no_candidates(mocker): # def test_build_radio_queryset_catch_troi_no_candidates(mocker):
mocker.patch.object( # mocker.patch.object(
troi.core, # troi.core,
"generate_playlist", # "generate_playlist",
) # )
qs = Track.objects.all() # qs = Track.objects.all()
with pytest.raises(ValueError): # with pytest.raises(ValueError):
lb_recommendations.build_radio_queryset( # lb_recommendations.build_radio_queryset(
custom_factories.DummyPatch(), {"min_recordings": 1}, qs # custom_factories.DummyPatch(), {"min_recordings": 1}, qs
) # )
import requests_mock # import requests_mock
import typesense # import typesense
from django.core.cache import cache # from django.core.cache import cache
from funkwhale_api.typesense import factories as custom_factories # from funkwhale_api.typesense import factories as custom_factories
from funkwhale_api.typesense import utils # from funkwhale_api.typesense import utils
def test_resolve_recordings_to_fw_track(mocker, factories): # def test_resolve_recordings_to_fw_track(mocker, factories):
artist = factories["music.Artist"](name="artist_name") # artist = factories["music.Artist"](name="artist_name")
factories["music.Track"]( # factories["music.Track"](
pk=1, # pk=1,
title="I Want It That Way", # title="I Want It That Way",
artist=artist, # artist=artist,
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa", # mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
) # )
factories["music.Track"]( # factories["music.Track"](
pk=2, # pk=2,
title="I Want It That Way", # title="I Want It That Way",
artist=artist, # artist=artist,
) # )
client = typesense.Client( # client = typesense.Client(
{ # {
"api_key": "api_key", # "api_key": "api_key",
"nodes": [{"host": "host", "port": "port", "protocol": "protocol"}], # "nodes": [{"host": "host", "port": "port", "protocol": "protocol"}],
"connection_timeout_seconds": 2, # "connection_timeout_seconds": 2,
} # }
) # )
with requests_mock.Mocker() as r_mocker: # with requests_mock.Mocker() as r_mocker:
mocker.patch.object(typesense, "Client", return_value=client) # mocker.patch.object(typesense, "Client", return_value=client)
mocker.patch.object( # mocker.patch.object(
typesense.client.ApiCall, # typesense.client.ApiCall,
"post", # "post",
return_value=custom_factories.typesense_search_result, # return_value=custom_factories.typesense_search_result,
) # )
r_mocker.get( # r_mocker.get(
"protocol://host:port/collections/canonical_fw_data/documents/search", # "protocol://host:port/collections/canonical_fw_data/documents/search",
json=custom_factories.typesense_search_result, # json=custom_factories.typesense_search_result,
) # )
utils.resolve_recordings_to_fw_track(custom_factories.recording_list) # utils.resolve_recordings_to_fw_track(custom_factories.recording_list)
assert cache.get("87dfa566-21c3-45ed-bc42-1d345b8563fa") == "1" # assert cache.get("87dfa566-21c3-45ed-bc42-1d345b8563fa") == "1"
Fixed the sample Apache configuration
Fix build script for documentation to properly deploy swagger
Make trailing slashes optional for all endpoints
...@@ -94,6 +94,11 @@ Define MEDIA_DIRECTORY_PATH ${FUNKWHALE_ROOT_PATH}/data/media ...@@ -94,6 +94,11 @@ Define MEDIA_DIRECTORY_PATH ${FUNKWHALE_ROOT_PATH}/data/media
</Location> </Location>
Alias /front ${FUNKWHALE_ROOT_PATH}/front/dist Alias /front ${FUNKWHALE_ROOT_PATH}/front/dist
<Location "/assets">
ProxyPass "!"
</Location>
Alias /assets ${FUNKWHALE_ROOT_PATH}/front/dist/assets
<Location "/media"> <Location "/media">
ProxyPass "!" ProxyPass "!"
</Location> </Location>
......
...@@ -14,7 +14,7 @@ $(VENV): ...@@ -14,7 +14,7 @@ $(VENV):
$(MAKE) install $(MAKE) install
install: install:
poetry install poetry install --no-root
poetry run pip install --no-deps --editable ../api poetry run pip install --no-deps --editable ../api
clean: clean:
...@@ -47,10 +47,10 @@ locale-prune-untranslated: $(VENV) ...@@ -47,10 +47,10 @@ locale-prune-untranslated: $(VENV)
# Swagger # Swagger
SWAGGER_VERSION = 5.1.2 SWAGGER_VERSION = 5.1.2
SWAGGER_RELEASE_URL = https://github.com/swagger-api/swagger-ui/archive/refs/tags/v$(SWAGGER_VERSION).tar.gz SWAGGER_RELEASE_URL = https://github.com/swagger-api/swagger-ui/archive/refs/tags/v$(SWAGGER_VERSION).tar.gz
SWAGGER_BUILD_DIR = swagger SWAGGER_BUILD_DIR = "$(BUILD_DIR)/swagger"
swagger: swagger:
mkdir "$(SWAGGER_BUILD_DIR)" mkdir -p "$(SWAGGER_BUILD_DIR)"
curl -sSL "$(SWAGGER_RELEASE_URL)" | \ curl -sSL "$(SWAGGER_RELEASE_URL)" | \
tar --extract \ tar --extract \
--gzip \ --gzip \
......
...@@ -72,7 +72,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 ...@@ -72,7 +72,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
[[package]] [[package]]
name = "django" name = "django"
version = "3.2.23" version = "3.2.24"
description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design."
category = "main" category = "main"
optional = false optional = false
...@@ -576,7 +576,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p ...@@ -576,7 +576,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "ece2cc9c958fc0ba6daf1213b80e849dc9357d5fd8bdd09d436dfbf605dccd7c" content-hash = "c72701986feaf309054e359d54f1bb0508cb753c68e5ec4e4ad8d3c75b2af6f0"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [
...@@ -696,8 +696,8 @@ colorama = [ ...@@ -696,8 +696,8 @@ colorama = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
] ]
django = [ django = [
{file = "Django-3.2.23-py3-none-any.whl", hash = "sha256:d48608d5f62f2c1e260986835db089fa3b79d6f58510881d316b8d88345ae6e1"}, {file = "Django-3.2.24-py3-none-any.whl", hash = "sha256:5dd5b787c3ba39637610fe700f54bf158e33560ea0dba600c19921e7ff926ec5"},
{file = "Django-3.2.23.tar.gz", hash = "sha256:82968f3640e29ef4a773af2c28448f5f7a08d001c6ac05b32d02aeee6509508b"}, {file = "Django-3.2.24.tar.gz", hash = "sha256:aaee9fb0fb4ebd4311520887ad2e33313d368846607f82a9a0ed461cd4c35b18"},
] ]
django-environ = [ django-environ = [
{file = "django-environ-0.10.0.tar.gz", hash = "sha256:b3559a91439c9d774a9e0c1ced872364772c612cdf6dc919506a2b13f7a77225"}, {file = "django-environ-0.10.0.tar.gz", hash = "sha256:b3559a91439c9d774a9e0c1ced872364772c612cdf6dc919506a2b13f7a77225"},
...@@ -836,6 +836,7 @@ PyYAML = [ ...@@ -836,6 +836,7 @@ PyYAML = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
......