From 80c6dc7ed994b61c3765672a94a67b22ac87756d Mon Sep 17 00:00:00 2001 From: Bryan Larson Date: Fri, 19 Aug 2022 09:30:16 -0600 Subject: [PATCH] DBMISVC-119 - Added async copy/move endpoints --- Dockerfile | 3 + docker-entrypoint-init.d/45-django-q.sh | 7 + fileservice/filemaster/admin.py | 14 +- fileservice/filemaster/aws.py | 18 +- fileservice/filemaster/files.py | 276 +++++++++++++++++- .../migrations/0023_fileoperation.py | 30 ++ fileservice/filemaster/models.py | 44 ++- fileservice/filemaster/serializers.py | 17 +- fileservice/filemaster/urls.py | 3 + fileservice/fileservice/settings.py | 17 ++ requirements.in | 2 + requirements.txt | 48 ++- 12 files changed, 444 insertions(+), 35 deletions(-) create mode 100644 docker-entrypoint-init.d/45-django-q.sh create mode 100644 fileservice/filemaster/migrations/0023_fileoperation.py diff --git a/Dockerfile b/Dockerfile index 5a81ff2..3012b65 100644 --- a/Dockerfile +++ b/Dockerfile @@ -42,6 +42,9 @@ RUN pip install --no-index \ # and Pip errors out on the mismatches. -r /requirements.in +# Setup entry scripts +ADD docker-entrypoint-init.d/* /docker-entrypoint-init.d/ + # Copy app source COPY /fileservice /app diff --git a/docker-entrypoint-init.d/45-django-q.sh b/docker-entrypoint-init.d/45-django-q.sh new file mode 100644 index 0000000..9165314 --- /dev/null +++ b/docker-entrypoint-init.d/45-django-q.sh @@ -0,0 +1,7 @@ +#!/bin/bash -e + +# Create the Django cache table as needed by Q +python ${DBMI_APP_ROOT}/manage.py createcachetable + +# Start the Q cluster +python ${DBMI_APP_ROOT}/manage.py qcluster & diff --git a/fileservice/filemaster/admin.py b/fileservice/filemaster/admin.py index e3c677f..1fdbf49 100644 --- a/fileservice/filemaster/admin.py +++ b/fileservice/filemaster/admin.py @@ -7,6 +7,7 @@ from filemaster.models import CustomUser from filemaster.models import DownloadLog from filemaster.models import FileLocation +from filemaster.models import FileOperation from guardian.admin import GuardedModelAdmin @@ -22,7 +23,7 @@ class CustomUserAdmin(admin.ModelAdmin): admin.site.register(CustomUser, CustomUserAdmin) -class BucketAdmin(admin.ModelAdmin): +class BucketAdmin(GuardedModelAdmin): list_display = ('name', ) @@ -34,7 +35,7 @@ class ArchiveFileAdmin(GuardedModelAdmin): list_display = ('filename', 'uuid', 'creationdate', 'owner') readonly_fields = ('uuid', 'creationdate') sortable_by = ('owner', 'creationdate', 'modifydate') - search_fields = ('owner__email', 'owner__username', 'filename', 'metadata', ) + search_fields = ('uuid', 'owner__email', 'owner__username', 'filename', 'metadata', ) admin.site.register(ArchiveFile, ArchiveFileAdmin) @@ -62,6 +63,15 @@ class DownloadLogAdmin(admin.ModelAdmin): admin.site.register(DownloadLog, DownloadLogAdmin) +class FileOperationAdmin(admin.ModelAdmin): + fields = ('uuid', 'archivefile', 'task_id', 'creationdate', 'modifydate', 'origin', 'destination', 'origin_location', 'destination_location') + readonly_fields = ('completed', 'task_id', 'uuid', 'creationdate', 'modifydate', 'archivefile', 'origin', 'destination', 'origin_location', 'destination_location') + list_display = ('uuid', 'operation', 'archivefile', 'origin', 'destination', 'completed', 'creationdate', 'modifydate', ) + sortable_by = ('creationdate', 'modifydate', 'archivefile', 'origin', 'destination', ) + +admin.site.register(FileOperation, FileOperationAdmin) + + def patch_admin(model, admin_site=None): """ Enables version control with full admin integration for a model that has diff --git a/fileservice/filemaster/aws.py b/fileservice/filemaster/aws.py index 32e06f6..6dce44a 100644 --- a/fileservice/filemaster/aws.py +++ b/fileservice/filemaster/aws.py @@ -8,7 +8,7 @@ from boto.s3.connection import S3Connection from django.conf import settings -from .models import FileLocation +from .models import ArchiveFile, FileLocation import logging log = logging.getLogger(__name__) @@ -132,7 +132,10 @@ def signedUrlDownload(archiveFile=None, hours=24): return False -def awsCopyFile(archive_file, destination, origin): +def awsCopyFile(archive_file_uuid, destination, origin): + + # Fetch it + archive_file = ArchiveFile.objects.get(uuid=archive_file_uuid) # Get the location location = archive_file.get_location(origin) @@ -167,7 +170,7 @@ def awsCopyFile(archive_file, destination, origin): # Add it archive_file.locations.add(new_location) - return new_location + return new_location.id def awsRemoveFile(location): @@ -188,7 +191,10 @@ def awsRemoveFile(location): return True -def awsMoveFile(archive_file, destination, origin): +def awsMoveFile(archive_file_uuid, destination, origin): + + # Fetch it + archive_file = ArchiveFile.objects.get(uuid=archive_file_uuid) # Get the current location location = archive_file.get_location(origin) @@ -197,7 +203,7 @@ def awsMoveFile(archive_file, destination, origin): return False # Call other methods - new_location = awsCopyFile(archive_file, destination, origin) + new_location = awsCopyFile(archive_file.uuid, destination, origin) if new_location: # File was copied, remove from origin @@ -210,7 +216,7 @@ def awsMoveFile(archive_file, destination, origin): else: log.error(f'Could not delete original file after move: {archive_file.uuid}') - return new_location + return new_location.id return False diff --git a/fileservice/filemaster/files.py b/fileservice/filemaster/files.py index 53e2d0a..a7c16a1 100644 --- a/fileservice/filemaster/files.py +++ b/fileservice/filemaster/files.py @@ -28,6 +28,7 @@ from django.contrib.auth.models import User from django.http import HttpResponse from django.contrib.auth import get_user_model +from django_q.tasks import async_task, result from filemaster.aws import awsCopyFile from filemaster.aws import awsMoveFile @@ -39,10 +40,13 @@ from filemaster.models import Bucket from filemaster.models import DownloadLog from filemaster.models import FileLocation +from filemaster.models import FileOperation +from filemaster.models import FILE_OPERATION_MOVE, FILE_OPERATION_COPY from filemaster.serializers import ArchiveFileSerializer from filemaster.serializers import ArchiveFileSimpleSerializer from filemaster.serializers import DownloadLogSerializer from filemaster.serializers import FileLocationSerializer +from filemaster.serializers import FileOperationSerializer from filemaster.permissions import DjangoObjectPermissionsAll from filemaster.permissions import DjangoObjectPermissionsChange @@ -236,10 +240,34 @@ def copy(self, request, uuid): # Get the location if not origin: - origin, path = next(archivefile.locations).get_bucket() + origin, path = next(archivefile.locations.all()).get_bucket() log.debug(f'Origin not provided, using "{origin}"') - except: - return HttpResponseNotFound() + + else: + + # Ensure file exists at origin + for location in archivefile.locations.all(): + + bucket, key = location.get_bucket() + if bucket.lower() == origin.lower() and location.uploadComplete: + + # Check size + if location.filesize > 1000000000: + return HttpResponseBadRequest(f"ArchiveFile size is greater than 1GB, use the 'asynccopy' endpoint") + + break + else: + log.debug(f'Origin "{origin}" does not contain archivefile') + return HttpResponseBadRequest(f"Origin '{origin}' does not contain ArchiveFile '{uuid}'") + + except ArchiveFile.DoesNotExist: + return HttpResponseNotFound(f'ArchiveFile \'{uuid}\' could not be found') + + except Exception as e: + log.exception(f'Copy error: {e}', exc_info=True, extra={ + 'request': request, 'uuid': uuid, + }) + return HttpResponseNotFound(f'Location for ArchiveFile \'{uuid}\' could not be found') # Check request if not uuid or not destination or not origin: @@ -270,11 +298,14 @@ def copy(self, request, uuid): try: # Perform the copy - new_location = awsCopyFile(archivefile, destination, origin) - if not new_location: + new_location_id = awsCopyFile(archivefile.uuid, destination, origin) + if not new_location_id: log.error(f'Could not copy file {archivefile.uuid}') return HttpResponseServerError(f'Could not copy file {archivefile.uuid}') + # Get the location + new_location = FileLocation.objects.get(id=new_location_id) + return Response({'message': 'copied', 'url': new_location.url, 'uuid': uuid}) except Exception as e: @@ -285,6 +316,95 @@ def copy(self, request, uuid): return HttpResponseServerError('Error copying file') + @action(detail=True, methods=['post'], permission_classes=[DjangoObjectPermissionsAll]) + def asynccopy(self, request, uuid): + + # Get bucket + destination = request.query_params.get('to') + origin = request.query_params.get('from') + + # Ensure it exists + try: + archivefile = ArchiveFile.objects.get(uuid=uuid) + + # Get the location + if not origin: + origin, path = next(archivefile.locations.all()).get_bucket() + log.debug(f'Origin not provided, using "{origin}"') + + else: + + # Ensure file exists at origin + for location in archivefile.locations.all(): + + bucket, key = location.get_bucket() + if bucket.lower() == origin.lower() and location.uploadComplete: + break + else: + log.debug(f'Origin "{origin}" does not contain archivefile') + return HttpResponseBadRequest(f"Origin '{origin}' does not contain ArchiveFile '{uuid}'") + + except ArchiveFile.DoesNotExist: + return HttpResponseNotFound(f'ArchiveFile \'{uuid}\' could not be found') + + except Exception as e: + log.exception(f'Copy error: {e}', exc_info=True, extra={ + 'request': request, 'uuid': uuid, + }) + return HttpResponseNotFound(f'Location for ArchiveFile \'{uuid}\' could not be found') + + # Check request + if not uuid or not destination or not origin: + return HttpResponseBadRequest('File UUID, origin and destination bucket are required') + + try: + # Check bucket perms + if not request.user.has_perm('filemaster.write_bucket', Bucket.objects.get(name=origin)): + return HttpResponseForbidden(f'User does not have permissions on Bucket "{origin}"') + except Bucket.DoesNotExist: + return HttpResponseNotFound(f'Bucket "{origin}" does not exist in Fileservice') + + try: + # Check bucket perms + if not request.user.has_perm('filemaster.write_bucket', Bucket.objects.get(name=destination)): + return HttpResponseForbidden(f'User does not have permissions on Bucket "{destination}"') + except Bucket.DoesNotExist: + return HttpResponseNotFound(f'Bucket "{destination}" does not exist in Fileservice') + + # Check permissions on file + if not request.user.has_perm('filemaster.change_archivefile', archivefile): + return HttpResponseForbidden(f'User does not have \'change\' permission on \'{uuid}\'') + + # Get location + location = archivefile.get_location(origin) + if not location: + return HttpResponseBadRequest(f'File {uuid} has multiple locations, \'from\' must be specified') + + try: + # Perform the copy + task_id = async_task(awsCopyFile, archivefile.uuid, destination, origin, hook="filemaster.files.async_hook") + + # Create the operation + operation = FileOperation( + archivefile=archivefile, + operation=FILE_OPERATION_COPY, + task_id=task_id, + origin_location=location, + origin=origin, + destination=destination, + ) + operation.save() + + return Response(operation.uuid, status=201) + + except Exception as e: + log.exception('File copy error: {}'.format(e), exc_info=True, extra={ + 'archivefile': archivefile.id, 'uuid': uuid, 'location': location.id, + 'origin': origin, 'destination': destination, + }) + + return HttpResponseServerError('Error copying file') + @action(detail=True, methods=['post'], permission_classes=[DjangoObjectPermissionsAll]) def move(self, request, uuid): @@ -300,6 +420,24 @@ def move(self, request, uuid): if not origin and archivefile.locations.first(): origin, path = archivefile.locations.first().get_bucket() log.debug(f'Origin not provided, using "{origin}"') + + else: + + # Ensure file exists at origin + for location in archivefile.locations.all(): + + bucket, key = location.get_bucket() + if bucket.lower() == origin.lower() and location.uploadComplete: + + # Check size + if location.filesize > 1000000000: + return HttpResponseBadRequest(f"ArchiveFile size is greater than 1GB, use the 'asyncmove' endpoint") + + break + else: + log.debug(f'Origin "{origin}" does not contain archivefile') + return HttpResponseBadRequest(f"Origin '{origin}' does not contain ArchiveFile '{uuid}'") + except ArchiveFile.DoesNotExist: return HttpResponseNotFound(f'ArchiveFile \'{uuid}\' could not be found') @@ -338,11 +476,14 @@ def move(self, request, uuid): try: # Perform the copy - new_location = awsMoveFile(archivefile, destination, origin) + new_location_id = awsMoveFile(archivefile.uuid, destination, origin) if not new_location: log.error(f'Could not move file {archivefile.uuid}') return HttpResponseServerError(f'Could not move file {archivefile.uuid}') + # Get the location + new_location = FileLocation.objects.get(id=new_location_id) + return Response({'message': 'moved', 'url': new_location.url, 'uuid': uuid}) except Exception as e: @@ -353,6 +494,95 @@ def move(self, request, uuid): return HttpResponseServerError('Error moving file') + @action(detail=True, methods=['post'], permission_classes=[DjangoObjectPermissionsAll]) + def asyncmove(self, request, uuid): + + # Get bucket + destination = request.query_params.get('to') + origin = request.query_params.get('from') + + # Ensure it exists + try: + archivefile = ArchiveFile.objects.get(uuid=uuid) + + # Get the location + if not origin: + origin, path = next(archivefile.locations.all()).get_bucket() + log.debug(f'Origin not provided, using "{origin}"') + + else: + + # Ensure file exists at origin + for location in archivefile.locations.all(): + + bucket, key = location.get_bucket() + if bucket.lower() == origin.lower() and location.uploadComplete: + break + else: + log.debug(f'Origin "{origin}" does not contain archivefile') + return HttpResponseBadRequest(f"Origin '{origin}' does not contain ArchiveFile '{uuid}'") + + except ArchiveFile.DoesNotExist: + return HttpResponseNotFound(f'ArchiveFile \'{uuid}\' could not be found') + + except Exception as e: + log.exception(f'Move error: {e}', exc_info=True, extra={ + 'request': request, 'uuid': uuid, + }) + return HttpResponseNotFound(f'Location for ArchiveFile \'{uuid}\' could not be found') + + # Check request + if not uuid or not destination or not origin: + return HttpResponseBadRequest('File UUID, origin and destination bucket are required') + + try: + # Check bucket perms + if not request.user.has_perm('filemaster.write_bucket', Bucket.objects.get(name=origin)): + return HttpResponseForbidden(f'User does not have permissions on Bucket "{origin}"') + except Bucket.DoesNotExist: + return HttpResponseNotFound(f'Bucket "{origin}" does not exist in Fileservice') + + try: + # Check bucket perms + if not request.user.has_perm('filemaster.write_bucket', Bucket.objects.get(name=destination)): + return HttpResponseForbidden(f'User does not have permissions on Bucket "{destination}"') + except Bucket.DoesNotExist: + return HttpResponseNotFound(f'Bucket "{destination}" does not exist in Fileservice') + + # Check permissions on file + if not request.user.has_perm('filemaster.change_archivefile', archivefile): + return HttpResponseForbidden(f'User does not have \'change\' permission on \'{uuid}\'') + + # Get location + location = archivefile.get_location(origin) + if not location: + return HttpResponseBadRequest(f'File {uuid} has multiple locations, \'from\' must be specified') + + try: + # Perform the copy + task_id = async_task(awsMoveFile, archivefile.uuid, destination, origin, hook="filemaster.files.async_hook") + + # Create the operation + operation = FileOperation( + archivefile=archivefile, + operation=FILE_OPERATION_MOVE, + task_id=task_id, + origin_location=location, + origin=origin, + destination=destination, + ) + operation.save() + + return Response(operation.uuid, status=201) + + except Exception as e: + log.exception('File move error: {}'.format(e), exc_info=True, extra={ + 'archivefile': archivefile.id, 'uuid': uuid, 'location': location.id, + 'origin': origin, 'destination': destination, + }) + + return HttpResponseServerError('Error copying file') + @action(detail=True, methods=['get'], permission_classes=[DjangoObjectPermissionsAll]) def download(self, request, uuid=None): @@ -808,7 +1038,7 @@ class ArchiveFileDetail(generics.RetrieveUpdateDestroyAPIView): serializer_class = ArchiveFileSimpleSerializer permission_classes = [IsAdminUser] filterset_fields = ['uuid', 'filename'] - + class FileLocationList(generics.ListCreateAPIView): queryset = FileLocation.objects.all() serializer_class = FileLocationSerializer @@ -820,3 +1050,35 @@ class FileLocationDetail(generics.RetrieveUpdateDestroyAPIView): serializer_class = FileLocationSerializer permission_classes = [IsAdminUser] filterset_fields = ['storagetype', 'url'] + +class FileOperationList(generics.ListAPIView): + lookup_field = 'uuid' + queryset = FileOperation.objects.all() + serializer_class = FileOperationSerializer + permission_classes = [] + filterset_fields = ['uuid', 'archivefile', 'task_id', 'creationdate', 'modifydate'] + + +def async_hook(task): + """ + Handles post async task processing + + :param task: The async task + :type task: Task + """ + try: + # Get the operation + operation = FileOperation.objects.get(task_id=task.id) + + # Get the destination location + destination_location = next(l for l in operation.archivefile.locations.all() if f"://{operation.destination}" in l.url) + + # Set it + operation.destination_location = destination_location + operation.save() + + except FileOperation.DoesNotExist: + log.exception(f"File operation does not exist") + + except Exception as e: + log.exception(f"Operation error: {e}", exc_info=True) diff --git a/fileservice/filemaster/migrations/0023_fileoperation.py b/fileservice/filemaster/migrations/0023_fileoperation.py new file mode 100644 index 0000000..b4b14db --- /dev/null +++ b/fileservice/filemaster/migrations/0023_fileoperation.py @@ -0,0 +1,30 @@ +# Generated by Django 2.2.28 on 2022-08-18 21:47 + +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('filemaster', '0022_auto_20210525_1039'), + ] + + operations = [ + migrations.CreateModel( + name='FileOperation', + fields=[ + ('uuid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('operation', models.CharField(choices=[('copy', 'Copy'), ('move', 'Move')], max_length=200)), + ('task_id', models.TextField()), + ('creationdate', models.DateTimeField(auto_now_add=True)), + ('modifydate', models.DateTimeField(auto_now=True)), + ('origin', models.TextField()), + ('destination', models.TextField()), + ('archivefile', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='filemaster.ArchiveFile')), + ('destination_location', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='destination_location', to='filemaster.FileLocation')), + ('origin_location', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='origin_location', to='filemaster.FileLocation')), + ], + ), + ] diff --git a/fileservice/filemaster/models.py b/fileservice/filemaster/models.py index 0594887..fac82eb 100644 --- a/fileservice/filemaster/models.py +++ b/fileservice/filemaster/models.py @@ -31,8 +31,9 @@ from guardian.shortcuts import remove_perm from guardian.shortcuts import get_groups_with_perms from taggit.managers import TaggableManager +from django_q.tasks import fetch -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) EXPIRATIONDATE = 60 if settings.EXPIRATIONDATE: @@ -44,6 +45,35 @@ def id_generator(size=18, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) +FILE_OPERATION_COPY = "copy" +FILE_OPERATION_MOVE = "move" +FILE_OPERATIONS = ( + (FILE_OPERATION_COPY, "Copy"), + (FILE_OPERATION_MOVE, "Move"), +) + + +class FileOperation(models.Model): + uuid = UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + operation = models.CharField(max_length=200, blank=False, null=False, choices=FILE_OPERATIONS) + task_id = models.TextField(blank=False, null=False) + creationdate = models.DateTimeField(auto_now=False, auto_now_add=True) + modifydate = models.DateTimeField(auto_now=True, auto_now_add=False) + archivefile = models.ForeignKey("ArchiveFile", blank=False, null=False, on_delete=models.DO_NOTHING) + origin_location = models.ForeignKey("FileLocation", blank=False, null=False, on_delete=models.DO_NOTHING, related_name="origin_location") + destination_location = models.ForeignKey("FileLocation", blank=True, null=True, on_delete=models.DO_NOTHING, related_name="destination_location") + origin = models.TextField(blank=False, null=False) + destination = models.TextField(blank=False, null=False) + + @property + def completed(self): + try: + # Check the task result + return fetch(self.task_id).success + except Exception as e: + logger.exception(f"Error: {e}", exc_info=True) + + return False class FileLocation(models.Model): creationdate = models.DateTimeField(auto_now=False, auto_now_add=True) @@ -101,7 +131,7 @@ class ArchiveFile(models.Model): def save(self, *args, **kwargs): # Check if the row with this hash already exists. if not self.pk and not self.expirationdate: - self.expirationdate = date.today() + timedelta(days=EXPIRATIONDATE) + self.expirationdate = date.today() + timedelta(days=EXPIRATIONDATE) super(ArchiveFile, self).save(*args, **kwargs) def setDefaultPerms(self, group, types): @@ -124,8 +154,8 @@ def setDefaultPerms(self, group, types): elif types == "DOWNLOADERS": assign_perm('download_archivefile', g, self) except Exception as e: - log.error("ERROR setperms %s %s %s" % (e, group, types)) - return + logger.error("ERROR setperms %s %s %s" % (e, group, types)) + return def removeDefaultPerms(self, group, types): try: @@ -147,8 +177,8 @@ def removeDefaultPerms(self, group, types): elif types == "DOWNLOADERS": remove_perm('download_archivefile', g, self) except Exception as e: - log.error("ERROR %s" % e) - return + logger.error("ERROR %s" % e) + return def setPerms(self, permissions): for types in GROUPTYPES: @@ -171,7 +201,7 @@ def get_permissions_display(self): if groupname not in grouplist: grouplist.append(groupname) except: - log.error("Error with %s" % g.name) + logger.error("Error with %s" % g.name) return grouplist def get_location(self, bucket): diff --git a/fileservice/filemaster/serializers.py b/fileservice/filemaster/serializers.py index 8b4dca6..1a429d5 100644 --- a/fileservice/filemaster/serializers.py +++ b/fileservice/filemaster/serializers.py @@ -14,7 +14,7 @@ from .models import CustomUser from .models import DownloadLog from .models import FileLocation - +from .models import FileOperation class WritableField(serializers.Field): def to_representation(self, value): @@ -46,6 +46,15 @@ class Meta: fields = ('id', 'url', 'uploadComplete', 'storagetype', 'filesize') +class FileOperationSerializer(serializers.ModelSerializer): + class Meta: + model = FileOperation + fields = ( + 'uuid', 'archivefile', 'creationdate', + 'modifydate', 'operation', 'completed', + ) + + class TokenSerializer(serializers.ModelSerializer): token = serializers.ReadOnlyField(source='key') class Meta: @@ -93,10 +102,10 @@ class ArchiveFileSimpleSerializer(TaggitSerializer, serializers.ModelSerializer) metadata = JSONFieldSerializer(required=False) permissions = serializers.ListField(read_only=True, source='get_permissions_display') expirationdate = serializers.DateField(required=False) - + class Meta: model = ArchiveFile - fields = ('id', 'uuid', 'description', 'metadata', 'tags', 'owner', 'filename', + fields = ('id', 'uuid', 'description', 'metadata', 'tags', 'owner', 'filename', 'locations', 'permissions', 'creationdate', 'modifydate', 'expirationdate') @@ -107,7 +116,7 @@ class ArchiveFileSerializer(ArchiveFileSimpleSerializer): class Meta: model = ArchiveFile lookup_field = 'uuid' - fields = ('id', 'uuid', 'description', 'metadata', 'tags', 'owner', 'filename', + fields = ('id', 'uuid', 'description', 'metadata', 'tags', 'owner', 'filename', 'locations', 'permissions', 'creationdate', 'modifydate', 'expirationdate') def get_locations_list(self, instance): diff --git a/fileservice/filemaster/urls.py b/fileservice/filemaster/urls.py index da1060f..f515ce3 100644 --- a/fileservice/filemaster/urls.py +++ b/fileservice/filemaster/urls.py @@ -17,6 +17,7 @@ from .files import DownloadLogList from .files import FileLocationList from .files import FileLocationDetail +from .files import FileOperationList from filemaster.realignment import CreateRealignedFile from filemaster.uploader import UploaderComplete, UploaderCheck, UploaderMetadata @@ -41,6 +42,8 @@ url(r'^api/location/(?P[^/]+)/?$', FileLocationDetail.as_view()), url(r'^api/file-detail/(?P[^/]+)/?$', ArchiveFileDetail.as_view()), url(r'^api/file-search/?$', ArchiveFileSearch.as_view()), + url(r'^api/file-operation/?$', FileOperationList.as_view()), + url(r'^api/file-operation/(?P[^/]+)/?$', FileOperationList.as_view()), url(r'^logout/?$', logout, name="logout"), url(r'^$', index, name="index"), ] diff --git a/fileservice/fileservice/settings.py b/fileservice/fileservice/settings.py index 1c67708..6155eac 100644 --- a/fileservice/fileservice/settings.py +++ b/fileservice/fileservice/settings.py @@ -202,6 +202,7 @@ 'health_check.db', 'dbmi_client', 'dbmi_client.login', + 'django_q', ) # Fixes duplicate errors in MYSQL @@ -352,6 +353,22 @@ # END AWS S3 CONFIGURATION +# DJANGO Q CONFIGURATION + +Q_CLUSTER = { + 'name': 'dbmi-fileservice', + 'workers': 8, + 'recycle': 500, + 'timeout': 3600, + 'retry': 7200, + 'compress': True, + 'save_limit': 250, + 'queue_limit': 500, + 'cpu_affinity': 1, + 'label': 'Django Q', + 'orm': 'default', + 'attempt_count': 1, +} # LOGGING CONFIGURATION diff --git a/requirements.in b/requirements.in index b644223..905ae54 100644 --- a/requirements.in +++ b/requirements.in @@ -9,8 +9,10 @@ django-filter<22.0 django-health-check<4.0 django-guardian<3.0 django-nose<2.0 +django-picklefield<3.1 django-taggit<2.0 django-taggit-serializer<2.0 +django-q<2.0 djangorestframework<4.0 djangorestframework-guardian<2.0 jsonfield<4.0 diff --git a/requirements.txt b/requirements.txt index db9af83..2df9078 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,17 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes --output-file=requirements.txt requirements.in # +arrow==1.2.2 \ + --hash=sha256:05caf1fd3d9a11a1135b2b6f09887421153b94558e5ef4d090b567b47173ac2b \ + --hash=sha256:d622c46ca681b5b3e3574fcb60a04e5cc81b9625112d5fb2b44220c36c892177 + # via django-q +blessed==1.19.1 \ + --hash=sha256:63b8554ae2e0e7f43749b6715c734cc8f3883010a809bf16790102563e6cf25b \ + --hash=sha256:9a0d099695bf621d4680dd6c73f6ad547f6a3442fbdbe80c4b1daa1edbc492fc + # via django-q boto==2.49.0 \ --hash=sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8 \ --hash=sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a # via -r requirements.in -boto3==1.24.52 \ - --hash=sha256:3e7664515a2228e695489600412644f59df4eb56202c5b4acab24f4d65e6e7c0 \ - --hash=sha256:95a1f54b5cf5e09b81f5ee79f3704977951605683fa1aafa86438bedd1f22507 +boto3==1.24.55 \ + --hash=sha256:4b620f55f3015c516a8f8063b02060a7bb9a763e10de3c0f3ec90102cdfa28db \ + --hash=sha256:9fe6c7c5019671cbea82f02dbaae7e743ec86187443ab5f333ebb3d3bef63dce # via -r requirements.in -botocore==1.27.52 \ - --hash=sha256:30b1f14dec9a58995d7921893beaf3ce2f3289658ea2e7449a900b0c58d154b5 \ - --hash=sha256:31d1379ceebcbb572f3040901d76b91e9147c3be6523957a5f4da26ac0ba8ff2 +botocore==1.27.55 \ + --hash=sha256:0b4a17e81c17845245c0e7a3fbf83753c7f6a5544b93dcf6e0fcc0f3f2156ab2 \ + --hash=sha256:929d6be4bdb33a693e6c8e06383dba76fa628bb72fdb1f9353fd13f5d115dd19 # via # boto3 # s3transfer @@ -137,6 +145,8 @@ django==2.2.28 \ # django-filter # django-guardian # django-health-check + # django-picklefield + # django-q # django-taggit # djangorestframework # djangorestframework-guardian @@ -177,6 +187,16 @@ django-nose==1.4.7 \ --hash=sha256:304adc447ee35b889b733d7106004f98aa401d8387ddcada5d4f2239d86790a9 \ --hash=sha256:a4885cd002d65fd2de96e2bb2563ef477c3fbe207009360c015fca5c3b5561b7 # via -r requirements.in +django-picklefield==3.0.1 \ + --hash=sha256:15ccba592ca953b9edf9532e64640329cd47b136b7f8f10f2939caa5f9ce4287 \ + --hash=sha256:3c702a54fde2d322fe5b2f39b8f78d9f655b8f77944ab26f703be6c0ed335a35 + # via + # -r requirements.in + # django-q +django-q==1.3.9 \ + --hash=sha256:1b74ce3a8931990b136903e3a7bc9b07243282a2b5355117246f05ed5d076e68 \ + --hash=sha256:5c6b4d530aa3aabf9c6aa57376da1ca2abf89a1562b77038b7a04e52a4a0a91b + # via -r requirements.in django-taggit==1.5.1 \ --hash=sha256:dfe9e9c10b5929132041de0c00093ef0072c73c2a97d0f74a818ae50fa77149a \ --hash=sha256:e5bb62891f458d55332e36a32e19c08d20142c43f74bc5656c803f8af25c084a @@ -248,6 +268,7 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via + # arrow # botocore # croniter pytz==2022.2.1 \ @@ -297,6 +318,10 @@ raven==6.10.0 \ # via # -r requirements.in # django-dbmi-client +redis==3.5.3 \ + --hash=sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2 \ + --hash=sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24 + # via django-q requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 @@ -311,6 +336,7 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via + # blessed # django-taggit-serializer # furl # orderedmultidict @@ -325,6 +351,10 @@ urllib3==1.26.11 \ # via # botocore # requests +wcwidth==0.2.5 \ + --hash=sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784 \ + --hash=sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83 + # via blessed wrapt==1.14.1 \ --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ @@ -393,7 +423,7 @@ wrapt==1.14.1 \ # via deprecated # The following packages are considered to be unsafe in a requirements file: -setuptools==65.0.1 \ - --hash=sha256:7a2e7e95c3bf33f356b4c59aee7a6848585c4219dd3e941e43cc117888f210e4 \ - --hash=sha256:c04a012ae3a1b2cc2aeed4893377b70ea61c6c143d0acceea16ec4b60de6e40d +setuptools==65.1.0 \ + --hash=sha256:10602cd0a6f5feab6656e9587f9075292ab777c5200f3bf00293ecd23d9f2788 \ + --hash=sha256:d2e010624c781b26ad6629a8de9832327cf853dea93894487979e55f9ad06857 # via django-axes