diff --git a/api/funkwhale_api/music/serializers.py b/api/funkwhale_api/music/serializers.py index b9ecfc50dcd982e109e369cc611656beba08c4b6..9dfc9147872871b0f0ba500c213f812b79bad8d7 100644 --- a/api/funkwhale_api/music/serializers.py +++ b/api/funkwhale_api/music/serializers.py @@ -1,4 +1,5 @@ from django.db import transaction +from django.db.models import Q from rest_framework import serializers from taggit.models import Tag @@ -9,6 +10,7 @@ from funkwhale_api.federation.serializers import AP_CONTEXT from funkwhale_api.users.serializers import UserBasicSerializer from . import models +from . import tasks class TagSerializer(serializers.ModelSerializer): @@ -204,3 +206,33 @@ class SubmitFederationTracksSerializer(serializers.Serializer): source=lt.url, ) return batch + + +class ImportJobRunSerializer(serializers.Serializer): + jobs = serializers.PrimaryKeyRelatedField( + many=True, + queryset=models.ImportJob.objects.filter( + status__in=['pending', 'errored'] + ) + ) + batches = serializers.PrimaryKeyRelatedField( + many=True, + queryset=models.ImportBatch.objects.all() + ) + + def validate(self, validated_data): + jobs = validated_data['jobs'] + batches_ids = [b.pk for b in validated_data['batches']] + query = Q(batch__pk__in=batches_ids) + query |= Q(pk__in=[j.id for j in jobs]) + queryset = models.ImportJob.objects.filter(query).filter( + status__in=['pending', 'errored']).distinct() + validated_data['_jobs'] = queryset + return validated_data + + def create(self, validated_data): + ids = validated_data['_jobs'].values_list('id', flat=True) + validated_data['_jobs'].update(status='pending') + for id in ids: + tasks.import_job_run.delay(import_job_id=id) + return {'jobs': list(ids)} diff --git a/api/funkwhale_api/music/views.py b/api/funkwhale_api/music/views.py index cab8eb73876e0d2827e7f5e1e30f3d4a4f185817..f53de1b0a73a2126091fc0998325d27ca958f8f7 100644 --- a/api/funkwhale_api/music/views.py +++ b/api/funkwhale_api/music/views.py @@ -145,6 +145,14 @@ class ImportJobViewSet( data['count'] = sum([v for v in data.values()]) return Response(data) + @list_route(methods=['post']) + def run(self, request, *args, **kwargs): + serializer = serializers.ImportJobRunSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + payload = serializer.save() + + return Response(payload) + def perform_create(self, serializer): source = 'file://' + serializer.validated_data['audio_file'].name serializer.save(source=source) diff --git a/api/tests/music/test_views.py b/api/tests/music/test_views.py index 7d4117f80155db4c90a9773ff1c22c04e1a70404..2cdee4e8cd5199ce6772558a72ce38f50e4ba151 100644 --- a/api/tests/music/test_views.py +++ b/api/tests/music/test_views.py @@ -208,3 +208,64 @@ def test_import_job_stats_filter(factories, superuser_api_client): } assert response.status_code == 200 assert response.data == expected + + +def test_import_job_run_via_api(factories, superuser_api_client, mocker): + run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay') + job1 = factories['music.ImportJob'](status='errored') + job2 = factories['music.ImportJob'](status='pending') + + url = reverse('api:v1:import-jobs-run') + response = superuser_api_client.post(url, {'jobs': [job2.pk, job1.pk]}) + + job1.refresh_from_db() + job2.refresh_from_db() + assert response.status_code == 200 + assert response.data == {'jobs': [job1.pk, job2.pk]} + assert job1.status == 'pending' + assert job2.status == 'pending' + + run.assert_any_call(import_job_id=job1.pk) + run.assert_any_call(import_job_id=job2.pk) + + +def test_import_batch_run_via_api(factories, superuser_api_client, mocker): + run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay') + + batch = factories['music.ImportBatch']() + job1 = factories['music.ImportJob'](batch=batch, status='errored') + job2 = factories['music.ImportJob'](batch=batch, status='pending') + + url = reverse('api:v1:import-jobs-run') + response = superuser_api_client.post(url, {'batches': [batch.pk]}) + + job1.refresh_from_db() + job2.refresh_from_db() + assert response.status_code == 200 + assert job1.status == 'pending' + assert job2.status == 'pending' + + run.assert_any_call(import_job_id=job1.pk) + run.assert_any_call(import_job_id=job2.pk) + + +def test_import_batch_and_job_run_via_api( + factories, superuser_api_client, mocker): + run = mocker.patch('funkwhale_api.music.tasks.import_job_run.delay') + + batch = factories['music.ImportBatch']() + job1 = factories['music.ImportJob'](batch=batch, status='errored') + job2 = factories['music.ImportJob'](status='pending') + + url = reverse('api:v1:import-jobs-run') + response = superuser_api_client.post( + url, {'batches': [batch.pk], 'jobs': [job2.pk]}) + + job1.refresh_from_db() + job2.refresh_from_db() + assert response.status_code == 200 + assert job1.status == 'pending' + assert job2.status == 'pending' + + run.assert_any_call(import_job_id=job1.pk) + run.assert_any_call(import_job_id=job2.pk) diff --git a/changes/changelog.d/176.enhancement b/changes/changelog.d/176.enhancement new file mode 100644 index 0000000000000000000000000000000000000000..0e431f28cf10585991ef0f4a0bf3f801e16257f2 --- /dev/null +++ b/changes/changelog.d/176.enhancement @@ -0,0 +1 @@ +Can now relaunch erored jobs and batches (#176) diff --git a/front/src/components/library/import/BatchDetail.vue b/front/src/components/library/import/BatchDetail.vue index b73c8cf8257599e87cc21aea05ce669f58aaac45..f0e6502f02dfbb4cde87a5ea87e6e5925d1400bf 100644 --- a/front/src/components/library/import/BatchDetail.vue +++ b/front/src/components/library/import/BatchDetail.vue @@ -40,7 +40,16 @@ </tr> <tr v-if="stats"> <td><strong>{{ $t('Errored') }}</strong></td> - <td>{{ stats.errored }}</td> + <td> + {{ stats.errored }} + <button + @click="rerun({batches: [batch.id], jobs: []})" + v-if="stats.errored > 0" + class="ui tiny basic icon button"> + <i class="redo icon" /> + {{ $t('Rerun errored jobs')}} + </button> + </td> </tr> <tr v-if="stats"> <td><strong>{{ $t('Finished') }}</strong></td> @@ -83,11 +92,21 @@ <a :href="'https://www.musicbrainz.org/recording/' + job.mbid" target="_blank">{{ job.mbid }}</a> </td> <td> - <a :href="job.source" target="_blank">{{ job.source }}</a> + <a :title="job.source" :href="job.source" target="_blank"> + {{ job.source|truncate(50) }} + </a> </td> <td> <span - :class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']">{{ job.status }}</span> + :class="['ui', {'yellow': job.status === 'pending'}, {'red': job.status === 'errored'}, {'green': job.status === 'finished'}, 'label']"> + {{ job.status }}</span> + <button + @click="rerun({batches: [], jobs: [job.id]})" + v-if="job.status === 'errored'" + :title="$t('Rerun job')" + class="ui tiny basic icon button"> + <i class="redo icon" /> + </button> </td> <td> <router-link v-if="job.track_file" :to="{name: 'library.tracks.detail', params: {id: job.track_file.track }}">{{ job.track_file.track }}</router-link> @@ -167,12 +186,6 @@ export default { return axios.get(url).then((response) => { self.batch = response.data self.isLoading = false - if (self.batch.status === 'pending') { - self.timeout = setTimeout( - self.fetchData, - 5000 - ) - } }) }, fetchStats () { @@ -186,7 +199,7 @@ export default { self.fetchJobs() self.fetchData() } - if (self.batch.status === 'pending') { + if (self.stats.pending > 0) { self.timeout = setTimeout( self.fetchStats, 5000 @@ -194,6 +207,15 @@ export default { } }) }, + rerun ({jobs, batches}) { + let payload = { + jobs, batches + } + let self = this + axios.post('import-jobs/run/', payload).then((response) => { + self.fetchStats() + }) + }, fetchJobs () { let params = { batch: this.id,