diff --git a/django_project/cplus_api/api_views/layer.py b/django_project/cplus_api/api_views/layer.py index 4d29dee..aa787f9 100644 --- a/django_project/cplus_api/api_views/layer.py +++ b/django_project/cplus_api/api_views/layer.py @@ -1,1043 +1,1130 @@ -import math -import os -from rest_framework.views import APIView -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticated, AllowAny -from rest_framework.parsers import MultiPartParser -from rest_framework.exceptions import PermissionDenied, ValidationError -from django.contrib.gis.geos import Polygon -from django.core.paginator import Paginator -from django.shortcuts import get_object_or_404 -from django.utils import timezone -from django.conf import settings -from drf_yasg import openapi -from drf_yasg.utils import swagger_auto_schema -from cplus_api.models.layer import ( - BaseLayer, InputLayer, input_layer_dir_path, - select_input_layer_storage, MultipartUpload, - TemporaryLayer -) -from cplus_api.models.profile import UserProfile -from cplus_api.serializers.layer import ( - InputLayerSerializer, - PaginatedInputLayerSerializer, - UploadLayerSerializer, - UpdateLayerInputSerializer, - FinishUploadLayerSerializer, - LAYER_SCHEMA_FIELDS, - InputLayerListSerializer -) -from cplus_api.serializers.common import ( - APIErrorSerializer, - NoContentSerializer -) -from cplus_api.utils.api_helper import ( - get_page_size, - LAYER_API_TAG, - PARAM_LAYER_UUID_IN_PATH, - get_presigned_url, - convert_size, - PARAMS_PAGINATION, - PARAM_BBOX_IN_QUERY, - get_multipart_presigned_urls, - complete_multipart_upload, - abort_multipart_upload, - clip_raster -) - - -def is_internal_user(user): - """Check if user has internal user role. - - :param user: user object - :type user: User - :return: True if user has internal role - :rtype: bool - """ - user_profile = UserProfile.objects.filter( - user=user - ).first() - if not user_profile: - return False - if not user_profile.role: - return False - return user_profile.role.name == 'Internal' - - -def validate_layer_access(input_layer: InputLayer, user): - """Validate if user can access input layer. - - :param input_layer: input layer object - :type input_layer: InputLayer - :param user: user object - :type user: User - :return: True if user has permission to access the layer - :rtype: bool - """ - if user.is_superuser: - return True - if input_layer.privacy_type == InputLayer.PrivacyTypes.COMMON: - return True - elif input_layer.privacy_type == InputLayer.PrivacyTypes.INTERNAL: - return is_internal_user(user) - return input_layer.owner == user - - -def validate_layer_manage(input_layer: InputLayer, user): - """Validate if user can manage(edit/delete) layer. - - :param input_layer: input layer object - :type input_layer: InputLayer - :param user: user object - :type user: User - :return: True if user has permission to manage the layer - :rtype: bool - """ - # Super user / owner / internal user can manage layer - return user.is_superuser or input_layer.owner == user \ - or is_internal_user(user) - - -def validate_bbox(bbox): - """Validate the bounding box format.""" - if not bbox: - raise ValidationError('Bounding box is required.') - bbox = bbox.replace(' ', '').split(',') - if len(bbox) != 4: - raise ValidationError('Bounding box must have 4 values.') - try: - bbox = [float(b) for b in bbox] - except ValueError: - raise ValidationError('Bounding box values must be numbers.') - return bbox - - -class LayerList(APIView): - """API to return available layers.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='layer-list', - tags=[LAYER_API_TAG], - manual_parameters=PARAMS_PAGINATION, - responses={ - 200: PaginatedInputLayerSerializer, - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def get(self, request, *args, **kwargs): - page = int(request.GET.get('page', '1')) - page_size = get_page_size(request) - layers = InputLayer.objects.filter( - privacy_type=InputLayer.PrivacyTypes.COMMON - ).order_by('name') - if is_internal_user(request.user): - internal_layers = InputLayer.objects.filter( - privacy_type=InputLayer.PrivacyTypes.INTERNAL - ).order_by('name') - layers = layers.union(internal_layers) - private_layers = InputLayer.objects.filter( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - owner=request.user - ).order_by('name') - layers = layers.union(private_layers) - layers = layers.order_by('name') - # set pagination - paginator = Paginator(layers, page_size) - total_page = math.ceil(paginator.count / page_size) - if page > total_page: - output = [] - else: - paginated_entities = paginator.get_page(page) - output = ( - InputLayerSerializer( - paginated_entities, - many=True - ).data - ) - return Response(status=200, data={ - 'page': page, - 'total_page': total_page, - 'page_size': page_size, - 'results': output - }) - - -class DefaultLayerList(APIView): - """API to return default layers.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='layer-default-list', - tags=[LAYER_API_TAG], - responses={ - 200: InputLayerListSerializer, - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def get(self, request, *args, **kwargs): - layers = InputLayer.objects.filter( - privacy_type=InputLayer.PrivacyTypes.COMMON - ).order_by('name') - return Response(status=200, data=( - InputLayerSerializer( - layers, many=True - ).data - )) - - -class BaseLayerUpload(APIView): - """Base class for layer upload.""" - - def validate_upload_access(self, privacy_type, user, - is_update=False, existing_layer=None): - is_valid = False - if user.is_superuser: - is_valid = True - if privacy_type == InputLayer.PrivacyTypes.PRIVATE: - if is_update: - is_valid = existing_layer.owner == user - else: - is_valid = True - elif privacy_type == InputLayer.PrivacyTypes.INTERNAL: - is_valid = is_internal_user(user) - if not is_valid: - err_msg = ( - f"You are not allowed to upload {privacy_type}" - " layer!" - ) - if is_update: - err_msg = ( - "You are not allowed to update this layer!" - ) - raise PermissionDenied(err_msg) - return True - - def save_input_layer(self, upload_param: UploadLayerSerializer, user): - input_layer: InputLayer = None - is_new = True - if upload_param.validated_data.get('uuid', None): - is_new = False - input_layer = get_object_or_404( - InputLayer, uuid=upload_param.validated_data['uuid']) - self.validate_upload_access( - upload_param.validated_data['privacy_type'], user, - True, input_layer) - input_layer.name = upload_param.validated_data['name'] - input_layer.created_on = timezone.now() - input_layer.owner = user - input_layer.layer_type = upload_param.validated_data['layer_type'] - input_layer.size = upload_param.validated_data['size'] - input_layer.component_type = ( - upload_param.validated_data['component_type'] - ) - input_layer.privacy_type = ( - upload_param.validated_data['privacy_type'] - ) - input_layer.client_id = upload_param.validated_data.get( - 'client_id', None) - input_layer.version = upload_param.validated_data.get( - 'version', - None - ) - input_layer.license = upload_param.validated_data.get( - 'license', - None - ) - input_layer.save(update_fields=[ - 'name', 'created_on', 'owner', 'layer_type', - 'size', 'component_type', 'privacy_type', - 'client_id', 'version', 'license' - ]) - else: - input_layer = InputLayer.objects.create( - name=upload_param.validated_data['name'], - created_on=timezone.now(), - owner=user, - layer_type=upload_param.validated_data['layer_type'], - size=upload_param.validated_data['size'], - component_type=upload_param.validated_data['component_type'], - privacy_type=upload_param.validated_data['privacy_type'], - client_id=upload_param.validated_data.get('client_id', None), - version=upload_param.validated_data.get('version', None), - license=upload_param.validated_data.get('license', None) - ) - return input_layer, is_new - - -class LayerUpload(BaseLayerUpload): - """API to upload layer file.""" - parser_classes = (MultiPartParser,) - layer_type_param = openapi.Parameter( - 'layer_type', openapi.IN_FORM, - description=( - 'Layer Type: 0 (Raster), 1 (Vector), -1 (Undefined)' - ), - type=openapi.TYPE_INTEGER, - enum=[ - BaseLayer.LayerTypes.RASTER, - BaseLayer.LayerTypes.VECTOR, - BaseLayer.LayerTypes.UNDEFINED - ], - default=BaseLayer.LayerTypes.RASTER, - required=True - ) - component_type_param = openapi.Parameter( - 'component_type', openapi.IN_FORM, - description=( - 'Component Type' - ), - type=openapi.TYPE_STRING, - enum=[ - InputLayer.ComponentTypes.NCS_CARBON, - InputLayer.ComponentTypes.NCS_PATHWAY, - InputLayer.ComponentTypes.PRIORITY_LAYER, - ], - required=True - ) - privacy_type_param = openapi.Parameter( - 'privacy_type', openapi.IN_FORM, - description=( - 'Privacy Type' - ), - type=openapi.TYPE_STRING, - enum=[ - InputLayer.PrivacyTypes.PRIVATE, - InputLayer.PrivacyTypes.INTERNAL, - InputLayer.PrivacyTypes.COMMON, - ], - default=InputLayer.PrivacyTypes.PRIVATE, - required=True - ) - client_id_param = openapi.Parameter( - 'client_id', openapi.IN_FORM, - description=( - 'ID given by the client' - ), - type=openapi.TYPE_STRING, - required=False - ) - layer_uuid_param = openapi.Parameter( - 'uuid', openapi.IN_FORM, - description=( - 'Layer UUID for updating existing layer' - ), - type=openapi.TYPE_STRING, - required=False - ) - layer_file_param = openapi.Parameter( - 'file', openapi.IN_FORM, - description=( - 'Raster layer file' - ), - type=openapi.TYPE_FILE, - required=True - ) - - @swagger_auto_schema( - operation_id='layer-upload', - tags=[LAYER_API_TAG], - manual_parameters=[ - layer_type_param, - component_type_param, - privacy_type_param, - client_id_param, - layer_uuid_param, - layer_file_param - ], - responses={ - 201: openapi.Schema( - description=( - 'Success Layer Upload' - ), - type=openapi.TYPE_OBJECT, - properties={ - 'uuid': openapi.Schema( - title='Layer UUID', - type=openapi.TYPE_STRING - ), - 'size': openapi.Schema( - title='Layer size', - type=openapi.TYPE_NUMBER - ), - 'name': openapi.Schema( - title='Layer name', - type=openapi.TYPE_STRING - ), - } - ), - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request, format=None): - file_obj = request.FILES['file'] - if file_obj is None: - raise ValidationError('Missing file object!') - request.data.update({ - 'name': file_obj.name, - 'size': file_obj.size - }) - upload_param = UploadLayerSerializer(data=request.data) - upload_param.is_valid(raise_exception=True) - # TODO: validations - # - layer_type - # - component_type - # - upload access - # - file type, max size (?) - self.validate_upload_access( - upload_param.validated_data['privacy_type'], request.user) - input_layer, _ = self.save_input_layer(upload_param, request.user) - input_layer.file.save(input_layer.name, file_obj, save=True) - input_layer.refresh_from_db() - if input_layer.name != input_layer.file.name: - input_layer.name = input_layer.file.name - input_layer.save(update_fields=['name']) - return Response(status=201, data={ - 'uuid': str(input_layer.uuid), - 'name': input_layer.name, - 'size': input_layer.size - }) - - -class LayerUploadStart(BaseLayerUpload): - """API to upload layer file direct to Minio.""" - - def generate_upload_url(self, input_layer: InputLayer, - number_of_parts=0): - storage_backend = select_input_layer_storage() - filename = input_layer.name - file_path = input_layer_dir_path(input_layer, filename) - available_name = storage_backend.get_available_name(file_path) - _, final_filename = os.path.split(available_name) - if input_layer.name != final_filename: - input_layer.name = final_filename - input_layer.save(update_fields=['name']) - results = [] - upload_id = None - if number_of_parts <= 1: - single_url = get_presigned_url(available_name) - if single_url: - results.append({ - 'part_number': 1, - 'url': single_url - }) - else: - upload_id, urls = get_multipart_presigned_urls( - available_name, number_of_parts - ) - if urls: - results.extend(urls) - # create MultipartUpload to store the upload_id - MultipartUpload.objects.create( - upload_id=upload_id, - input_layer_uuid=input_layer.uuid, - created_on=timezone.now(), - uploader=input_layer.owner, - parts=number_of_parts - ) - return results, upload_id - - @swagger_auto_schema( - operation_id='layer-upload-start', - tags=[LAYER_API_TAG], - manual_parameters=[], - request_body=UploadLayerSerializer, - responses={ - 200: openapi.Schema( - description=( - 'Success Start Layer Upload' - ), - type=openapi.TYPE_OBJECT, - properties={ - 'uuid': openapi.Schema( - title='Layer UUID', - type=openapi.TYPE_STRING - ), - 'upload_urls': openapi.Schema( - title='List of Upload Presigned URL', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_OBJECT, - title='Presigned URL item', - properties={ - 'part_number': openapi.Schema( - title='Part number for multipart upload', - type=openapi.TYPE_INTEGER - ), - 'url': openapi.Schema( - title='Presigned URL', - type=openapi.TYPE_STRING - ) - } - ) - ), - 'name': openapi.Schema( - title='Layer name', - type=openapi.TYPE_STRING - ), - 'multipart_upload_id': openapi.Schema( - title='Multipart Upload Id', - type=openapi.TYPE_STRING - ), - }, - example={ - "upload_url": [ - { - 'part_number': 1, - 'url': ( - "https://example.com/cplus/4/ncs_pathway" - "/layer.geojson" - ) - } - ] - } - ), - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request): - upload_param = UploadLayerSerializer(data=request.data) - upload_param.is_valid(raise_exception=True) - self.validate_upload_access( - upload_param.validated_data['privacy_type'], request.user) - input_layer, is_new = self.save_input_layer(upload_param, request.user) - if not is_new and input_layer.is_available(): - # delete existing file - input_layer.file = None - input_layer.save() - upload_urls, upload_id = self.generate_upload_url( - input_layer, upload_param.validated_data['number_of_parts']) - if len(upload_urls) == 0: - raise RuntimeError('Cannot generate upload url!') - return Response(status=201, data={ - 'uuid': str(input_layer.uuid), - 'upload_urls': upload_urls, - 'name': input_layer.name, - 'multipart_upload_id': upload_id - }) - - -class LayerUploadFinish(APIView): - """API to upload layer file.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='layer-upload-finish', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH], - request_body=FinishUploadLayerSerializer, - responses={ - 200: openapi.Schema( - description=( - 'Success Upload' - ), - type=openapi.TYPE_OBJECT, - properties={ - 'uuid': openapi.Schema( - title='Layer UUID', - type=openapi.TYPE_STRING - ), - 'size': openapi.Schema( - title='Layer size', - type=openapi.TYPE_NUMBER - ), - 'name': openapi.Schema( - title='Layer name', - type=openapi.TYPE_STRING - ), - } - ), - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request, layer_uuid): - input_layer = get_object_or_404(InputLayer, uuid=layer_uuid) - # get filepath - file_path = input_layer_dir_path(input_layer, input_layer.name) - upload_param = FinishUploadLayerSerializer(data=request.data) - upload_param.is_valid(raise_exception=True) - multipart_upload_id = ( - upload_param.validated_data.get('multipart_upload_id', None) - ) - if multipart_upload_id: - # mark multipart as done - complete_multipart_upload( - file_path, - multipart_upload_id, - upload_param.validated_data['items'] - ) - # remove MultipartUpload when upload is completed - MultipartUpload.objects.filter( - upload_id=multipart_upload_id - ).delete() - storage_backend = select_input_layer_storage() - # validate filepath exists - if not storage_backend.exists(file_path): - raise ValidationError( - f'Layer file {input_layer.name} does not exist!') - # validate size match - storage_file_size = storage_backend.size(file_path) - if storage_file_size != input_layer.size: - raise ValidationError( - 'Uploaded layer file size missmatch: ' - f'{convert_size(storage_file_size)} ' - f'should be {convert_size(input_layer.size)}!' - ) - input_layer.file.name = file_path - input_layer.save(update_fields=['file']) - return Response(status=200, data={ - 'uuid': str(input_layer.uuid), - 'name': input_layer.name, - 'size': input_layer.size - }) - - -class LayerUploadAbort(APIView): - """API to abort multipart upload.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='layer-upload-abort', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH], - request_body=FinishUploadLayerSerializer, - responses={ - 204: NoContentSerializer, - 400: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request, layer_uuid): - input_layer = get_object_or_404(InputLayer, uuid=layer_uuid) - # get filepath - file_path = input_layer_dir_path(input_layer, input_layer.name) - upload_param = FinishUploadLayerSerializer(data=request.data) - upload_param.is_valid(raise_exception=True) - multipart_upload_id = ( - upload_param.validated_data.get('multipart_upload_id', None) - ) - if not multipart_upload_id: - raise ValidationError('Missing multipart_upload_id!') - parts = abort_multipart_upload(file_path, multipart_upload_id) - if parts == 0: - # if parts is 0, then can safely remove MultipartUpload - MultipartUpload.objects.filter( - upload_id=multipart_upload_id - ).delete() - input_layer.delete() - else: - # else cron job will check and do abort - MultipartUpload.objects.filter( - upload_id=multipart_upload_id - ).update( - is_aborted=True, - aborted_on=timezone.now() - ) - return Response(status=204) - - -class LayerDetail(APIView): - """APIs to fetch and remove layer file.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='layer-detail', - operation_description='API to fetch layer detail.', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH], - responses={ - 200: InputLayerSerializer, - 400: APIErrorSerializer, - 403: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def get(self, request, *args, **kwargs): - layer_uuid = kwargs.get('layer_uuid') - input_layer = get_object_or_404( - InputLayer, uuid=layer_uuid) - if not validate_layer_access(input_layer, request.user): - raise PermissionDenied( - f"You are not allowed to access layer {layer_uuid}!") - return Response( - status=200, data=InputLayerSerializer(input_layer).data) - - @swagger_auto_schema( - operation_id='layer-remove', - operation_description='API to remove layer.', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH], - responses={ - 204: NoContentSerializer, - 400: APIErrorSerializer, - 403: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def delete(self, request, *args, **kwargs): - layer_uuid = kwargs.get('layer_uuid') - input_layer = get_object_or_404( - InputLayer, uuid=layer_uuid) - if not validate_layer_manage(input_layer, request.user): - raise PermissionDenied( - f"You are not allowed to delete layer {layer_uuid}!" - ) - input_layer.delete() - return Response(status=204) - - @swagger_auto_schema( - operation_id='layer-update-partial', - operation_description='Partially Update InputLayer.', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH], - request_body=UpdateLayerInputSerializer, - responses={ - 200: UpdateLayerInputSerializer, - 400: APIErrorSerializer, - 403: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def patch(self, request, *args, **kwargs): - layer_uuid = kwargs.get('layer_uuid') - input_layer = get_object_or_404( - InputLayer, uuid=layer_uuid) - if not validate_layer_manage(input_layer, request.user): - raise PermissionDenied( - f"You are not allowed to update layer {layer_uuid}!" - ) - - layer_param = UpdateLayerInputSerializer( - data=request.data, partial=True - ) - layer_param.is_valid(raise_exception=True) - update_fields = [] - for field, value in layer_param.validated_data.items(): - setattr(input_layer, field, value) - update_fields.append(field) - - input_layer.save(update_fields=update_fields) - return Response( - status=200, - data=InputLayerSerializer(input_layer).data - ) - - -class CheckLayer(APIView): - """API to check whether layer is ready by its identifier.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='check-layer', - operation_description='API to check whether layer is ready.', - tags=[LAYER_API_TAG], - manual_parameters=[ - openapi.Parameter( - 'id_type', openapi.IN_QUERY, - description='Type of layer id: client_id or layer_uuid', - type=openapi.TYPE_STRING, - required=False, - default='client_id', - enum=['client_id', 'layer_uuid'] - ) - ], - request_body=openapi.Schema( - title='List of layer id', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_STRING - ) - ), - responses={ - 200: openapi.Schema( - description=( - 'Check Layer Response' - ), - type=openapi.TYPE_OBJECT, - properties={ - 'available': openapi.Schema( - title='List of available layer', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_STRING - ) - ), - 'unavailable': openapi.Schema( - title='List of unavailable layer (missing file)', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_STRING - ) - ), - 'Invalid': openapi.Schema( - title='List of layer with invalid ID or inaccessible', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_STRING - ) - ) - } - ), - 400: APIErrorSerializer, - 403: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request, *args, **kwargs): - id_type = request.GET.get('id_type', 'client_id') - filters = {} - if id_type == 'layer_uuid': - filters = { - 'uuid__in': request.data - } - else: - filters = { - 'client_id__in': request.data - } - layers = InputLayer.objects.filter( - **filters - ).order_by('name') - input_ids = set(request.data) - ids_found = set() - ids_available = set() - ids_not_available = set() - for layer in layers: - layer_id = ( - str(layer.uuid) if id_type == 'layer_uuid' else - layer.client_id - ) - if not validate_layer_access(layer, request.user): - continue - ids_found.add(layer_id) - if layer.is_available(): - ids_available.add(layer_id) - else: - ids_not_available.add(layer_id) - return Response(status=200, data={ - 'available': list(ids_available), - 'unavailable': list(ids_not_available), - 'invalid': list(input_ids - ids_found) - }) - - -class FetchLayerByClientId(APIView): - """API to fetch input layer by client id.""" - permission_classes = [IsAuthenticated] - - @swagger_auto_schema( - operation_id='fetch-layer-by-client-id', - operation_description='API to fetch input layer by client id.', - tags=[LAYER_API_TAG], - request_body=openapi.Schema( - title='List of client id', - type=openapi.TYPE_ARRAY, - items=openapi.Items( - type=openapi.TYPE_STRING - ) - ), - responses={ - 200: openapi.Schema( - description=( - 'Layer List' - ), - type=openapi.TYPE_ARRAY, - items=openapi.Items(**LAYER_SCHEMA_FIELDS), - ), - 400: APIErrorSerializer, - 403: APIErrorSerializer, - 404: APIErrorSerializer - } - ) - def post(self, request, *args, **kwargs): - layers = InputLayer.objects.filter( - client_id__in=request.data - ).order_by('name') - results = {} - for layer in layers: - if not validate_layer_access(layer, request.user): - continue - if layer.client_id not in results: - results[layer.client_id] = layer - elif not results[layer.client_id].is_available(): - results[layer.client_id] = layer - return Response(status=200, data=InputLayerSerializer( - list(results.values()), - many=True - ).data) - - -class ReferenceLayerDownload(APIView): - """APIs to fetch and remove layer file.""" - permission_classes = [AllowAny] - authentication_classes = [] - - @swagger_auto_schema( - operation_id='reference-layer-download', - operation_description='API to download and crop reference layer.', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_BBOX_IN_QUERY], - responses={ - 200: openapi.Response(description='Binary response'), - 404: APIErrorSerializer - } - ) - def get(self, request, *args, **kwargs): - from django.core.exceptions import MultipleObjectsReturned - try: - reference_layer = get_object_or_404( - InputLayer, - component_type=InputLayer.ComponentTypes.REFERENCE_LAYER - ) - except MultipleObjectsReturned: - reference_layer = InputLayer.objects.filter( - component_type=InputLayer.ComponentTypes.REFERENCE_LAYER - ).first() - if reference_layer.is_available(): - basename = os.path.basename(reference_layer.file.name) - file_path = os.path.join( - settings.TEMPORARY_LAYER_DIR, - 'reference_layer', - basename - ) - if not os.path.exists(file_path): - file_path = reference_layer.download_to_working_directory( - settings.TEMPORARY_LAYER_DIR - ) - x_accel_redirect = os.path.join('reference_layer', basename) - file_name = basename - - if 'bbox' in request.query_params: - bbox = validate_bbox(request.query_params.get('bbox')) - - # Calculate the width and height of the bounding box - width = bbox[2] - bbox[0] - height = bbox[3] - bbox[1] - - # Calculate 20% expansion - expand_width = width * 0.2 - expand_height = height * 0.2 - - # Create the expanded bounding box - expanded_bbox = ( - bbox[0] - expand_width / 2, # min_x - bbox[1] - expand_height / 2, # min_y - bbox[2] + expand_width / 2, # max_x - bbox[3] + expand_height / 2 # max_y - ) - - # Convert the expanded bounding box to a Polygon - expanded_polygon = Polygon.from_bbox(expanded_bbox) - - # Clip the raster - file_path = clip_raster( - file_path, - expanded_polygon.extent, - settings.TEMPORARY_LAYER_DIR - ) - - # Create temporary layer object - TemporaryLayer.objects.create( - file_name=os.path.basename(file_path), - size=os.path.getsize(file_path) - ) - file_name = os.path.basename(file_path) - x_accel_redirect = file_name - - # fix issue nginx unable to read file - os.chmod(file_path, 0o644) - response = Response(status=200) - response['Content-type'] = "application/octet-stream" - response['X-Accel-Redirect'] = ( - f'/userfiles/{x_accel_redirect}' - ) - response['Content-Disposition'] = ( - f'attachment; filename="{file_name}"' - ) - - return response - - return Response( - data={'detail': 'Reference layer is not available.'}, - status=404 - ) - - -class DefaultLayerDownload(APIView): - """API to crop and download priority layer.""" - permission_classes = [AllowAny] - authentication_classes = [] - - @swagger_auto_schema( - operation_id='default-priority-layer-download', - operation_description='API to crop and download priority layer.', - tags=[LAYER_API_TAG], - manual_parameters=[PARAM_LAYER_UUID_IN_PATH, PARAM_BBOX_IN_QUERY], - responses={ - 200: openapi.Response(description='Binary response'), - 404: APIErrorSerializer - } - ) - def get(self, request, *args, **kwargs): - layer_uuid = kwargs.get('layer_uuid') - default_layer = get_object_or_404( - InputLayer, - uuid=layer_uuid, - component_type=InputLayer.ComponentTypes.PRIORITY_LAYER - ) - if default_layer.is_available(): - basename = os.path.basename(default_layer.file.name) - file_path = os.path.join( - settings.TEMPORARY_LAYER_DIR, - 'default_layer', - basename - ) - if not os.path.exists(file_path): - file_path = default_layer.download_to_working_directory( - settings.TEMPORARY_LAYER_DIR - ) - x_accel_redirect = os.path.join('default_layer', basename) - file_name = basename - - if 'bbox' in request.query_params: - bbox = validate_bbox(request.query_params.get('bbox')) - - # Convert the bounding box to a Polygon - polygon = Polygon.from_bbox(bbox) - - # Clip the raster - file_path = clip_raster( - file_path, - polygon.extent, - settings.TEMPORARY_LAYER_DIR - ) - - # Create temporary layer object - TemporaryLayer.objects.create( - file_name=os.path.basename(file_path), - size=os.path.getsize(file_path) - ) - file_name = os.path.basename(file_path) - x_accel_redirect = file_name - - # fix issue nginx unable to read file - os.chmod(file_path, 0o644) - response = Response(status=200) - response['Content-type'] = "application/octet-stream" - response['X-Accel-Redirect'] = ( - f'/userfiles/{x_accel_redirect}' - ) - response['Content-Disposition'] = ( - f'attachment; filename="{file_name}"' - ) - - return response - - return Response( - data={'detail': 'Default layer is not available.'}, - status=404 - ) +import math +import os +from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.permissions import IsAuthenticated, AllowAny +from rest_framework.parsers import MultiPartParser +from rest_framework.exceptions import PermissionDenied, ValidationError +from django.contrib.gis.geos import Polygon +from django.core.paginator import Paginator +from django.shortcuts import get_object_or_404 +from django.utils import timezone +from django.conf import settings +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema +from cplus_api.models.layer import ( + BaseLayer, InputLayer, input_layer_dir_path, + select_input_layer_storage, MultipartUpload, + TemporaryLayer +) +from cplus_api.models.profile import UserProfile +from cplus_api.serializers.layer import ( + InputLayerSerializer, + PaginatedInputLayerSerializer, + UploadLayerSerializer, + UpdateLayerInputSerializer, + FinishUploadLayerSerializer, + LAYER_SCHEMA_FIELDS, + InputLayerListSerializer +) +from cplus_api.serializers.common import ( + APIErrorSerializer, + NoContentSerializer +) +from cplus_api.utils.api_helper import ( + get_page_size, + LAYER_API_TAG, + PARAM_LAYER_UUID_IN_PATH, + get_presigned_url, + convert_size, + PARAMS_PAGINATION, + PARAM_BBOX_IN_QUERY, + get_multipart_presigned_urls, + complete_multipart_upload, + abort_multipart_upload, + clip_raster +) + + +def is_internal_user(user): + """Check if user has internal user role. + + :param user: user object + :type user: User + :return: True if user has internal role + :rtype: bool + """ + user_profile = UserProfile.objects.filter( + user=user + ).first() + if not user_profile: + return False + if not user_profile.role: + return False + return user_profile.role.name == 'Internal' + + +def validate_layer_access(input_layer: InputLayer, user): + """Validate if user can access input layer. + + :param input_layer: input layer object + :type input_layer: InputLayer + :param user: user object + :type user: User + :return: True if user has permission to access the layer + :rtype: bool + """ + if user.is_superuser: + return True + if input_layer.privacy_type == InputLayer.PrivacyTypes.COMMON: + return True + elif input_layer.privacy_type == InputLayer.PrivacyTypes.INTERNAL: + return is_internal_user(user) + return input_layer.owner == user + + +def validate_layer_manage(input_layer: InputLayer, user): + """Validate if user can manage(edit/delete) layer. + + :param input_layer: input layer object + :type input_layer: InputLayer + :param user: user object + :type user: User + :return: True if user has permission to manage the layer + :rtype: bool + """ + # Super user / owner / internal user can manage layer + return user.is_superuser or input_layer.owner == user \ + or is_internal_user(user) + + +def validate_bbox(bbox): + """Validate the bounding box format.""" + if not bbox: + raise ValidationError('Bounding box is required.') + bbox = bbox.replace(' ', '').split(',') + if len(bbox) != 4: + raise ValidationError('Bounding box must have 4 values.') + try: + bbox = [float(b) for b in bbox] + except ValueError: + raise ValidationError('Bounding box values must be numbers.') + return bbox + + +class LayerList(APIView): + """API to return available layers.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='layer-list', + tags=[LAYER_API_TAG], + manual_parameters=PARAMS_PAGINATION, + responses={ + 200: PaginatedInputLayerSerializer, + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + page = int(request.GET.get('page', '1')) + page_size = get_page_size(request) + layers = InputLayer.objects.filter( + privacy_type=InputLayer.PrivacyTypes.COMMON + ).order_by('name') + if is_internal_user(request.user): + internal_layers = InputLayer.objects.filter( + privacy_type=InputLayer.PrivacyTypes.INTERNAL + ).order_by('name') + layers = layers.union(internal_layers) + private_layers = InputLayer.objects.filter( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + owner=request.user + ).order_by('name') + layers = layers.union(private_layers) + layers = layers.order_by('name') + # set pagination + paginator = Paginator(layers, page_size) + total_page = math.ceil(paginator.count / page_size) + if page > total_page: + output = [] + else: + paginated_entities = paginator.get_page(page) + output = ( + InputLayerSerializer( + paginated_entities, + many=True + ).data + ) + return Response(status=200, data={ + 'page': page, + 'total_page': total_page, + 'page_size': page_size, + 'results': output + }) + + +class DefaultLayerList(APIView): + """API to return default layers.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='layer-default-list', + tags=[LAYER_API_TAG], + responses={ + 200: InputLayerListSerializer, + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + layers = InputLayer.objects.filter( + privacy_type=InputLayer.PrivacyTypes.COMMON + ).order_by('name') + return Response(status=200, data=( + InputLayerSerializer( + layers, many=True + ).data + )) + + +class BaseLayerUpload(APIView): + """Base class for layer upload.""" + + def validate_upload_access(self, privacy_type, user, + is_update=False, existing_layer=None): + is_valid = False + if user.is_superuser: + is_valid = True + if privacy_type == InputLayer.PrivacyTypes.PRIVATE: + if is_update: + is_valid = existing_layer.owner == user + else: + is_valid = True + elif privacy_type == InputLayer.PrivacyTypes.INTERNAL: + is_valid = is_internal_user(user) + if not is_valid: + err_msg = ( + f"You are not allowed to upload {privacy_type}" + " layer!" + ) + if is_update: + err_msg = ( + "You are not allowed to update this layer!" + ) + raise PermissionDenied(err_msg) + return True + + def save_input_layer(self, upload_param: UploadLayerSerializer, user): + input_layer: InputLayer = None + is_new = True + if upload_param.validated_data.get('uuid', None): + is_new = False + input_layer = get_object_or_404( + InputLayer, uuid=upload_param.validated_data['uuid']) + self.validate_upload_access( + upload_param.validated_data['privacy_type'], user, + True, input_layer) + input_layer.name = upload_param.validated_data['name'] + input_layer.created_on = timezone.now() + input_layer.owner = user + input_layer.layer_type = upload_param.validated_data['layer_type'] + input_layer.size = upload_param.validated_data['size'] + input_layer.component_type = ( + upload_param.validated_data['component_type'] + ) + input_layer.privacy_type = ( + upload_param.validated_data['privacy_type'] + ) + input_layer.client_id = upload_param.validated_data.get( + 'client_id', None) + input_layer.version = upload_param.validated_data.get( + 'version', + None + ) + input_layer.license = upload_param.validated_data.get( + 'license', + None + ) + input_layer.save(update_fields=[ + 'name', 'created_on', 'owner', 'layer_type', + 'size', 'component_type', 'privacy_type', + 'client_id', 'version', 'license' + ]) + else: + input_layer = InputLayer.objects.create( + name=upload_param.validated_data['name'], + created_on=timezone.now(), + owner=user, + layer_type=upload_param.validated_data['layer_type'], + size=upload_param.validated_data['size'], + component_type=upload_param.validated_data['component_type'], + privacy_type=upload_param.validated_data['privacy_type'], + client_id=upload_param.validated_data.get('client_id', None), + version=upload_param.validated_data.get('version', None), + license=upload_param.validated_data.get('license', None) + ) + return input_layer, is_new + + +class LayerUpload(BaseLayerUpload): + """API to upload layer file.""" + parser_classes = (MultiPartParser,) + layer_type_param = openapi.Parameter( + 'layer_type', openapi.IN_FORM, + description=( + 'Layer Type: 0 (Raster), 1 (Vector), -1 (Undefined)' + ), + type=openapi.TYPE_INTEGER, + enum=[ + BaseLayer.LayerTypes.RASTER, + BaseLayer.LayerTypes.VECTOR, + BaseLayer.LayerTypes.UNDEFINED + ], + default=BaseLayer.LayerTypes.RASTER, + required=True + ) + component_type_param = openapi.Parameter( + 'component_type', openapi.IN_FORM, + description=( + 'Component Type' + ), + type=openapi.TYPE_STRING, + enum=[ + InputLayer.ComponentTypes.NCS_CARBON, + InputLayer.ComponentTypes.NCS_PATHWAY, + InputLayer.ComponentTypes.PRIORITY_LAYER, + ], + required=True + ) + privacy_type_param = openapi.Parameter( + 'privacy_type', openapi.IN_FORM, + description=( + 'Privacy Type' + ), + type=openapi.TYPE_STRING, + enum=[ + InputLayer.PrivacyTypes.PRIVATE, + InputLayer.PrivacyTypes.INTERNAL, + InputLayer.PrivacyTypes.COMMON, + ], + default=InputLayer.PrivacyTypes.PRIVATE, + required=True + ) + client_id_param = openapi.Parameter( + 'client_id', openapi.IN_FORM, + description=( + 'ID given by the client' + ), + type=openapi.TYPE_STRING, + required=False + ) + layer_uuid_param = openapi.Parameter( + 'uuid', openapi.IN_FORM, + description=( + 'Layer UUID for updating existing layer' + ), + type=openapi.TYPE_STRING, + required=False + ) + layer_file_param = openapi.Parameter( + 'file', openapi.IN_FORM, + description=( + 'Raster layer file' + ), + type=openapi.TYPE_FILE, + required=True + ) + + @swagger_auto_schema( + operation_id='layer-upload', + tags=[LAYER_API_TAG], + manual_parameters=[ + layer_type_param, + component_type_param, + privacy_type_param, + client_id_param, + layer_uuid_param, + layer_file_param + ], + responses={ + 201: openapi.Schema( + description=( + 'Success Layer Upload' + ), + type=openapi.TYPE_OBJECT, + properties={ + 'uuid': openapi.Schema( + title='Layer UUID', + type=openapi.TYPE_STRING + ), + 'size': openapi.Schema( + title='Layer size', + type=openapi.TYPE_NUMBER + ), + 'name': openapi.Schema( + title='Layer name', + type=openapi.TYPE_STRING + ), + } + ), + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request, format=None): + file_obj = request.FILES['file'] + if file_obj is None: + raise ValidationError('Missing file object!') + request.data.update({ + 'name': file_obj.name, + 'size': file_obj.size + }) + upload_param = UploadLayerSerializer(data=request.data) + upload_param.is_valid(raise_exception=True) + # TODO: validations + # - layer_type + # - component_type + # - upload access + # - file type, max size (?) + self.validate_upload_access( + upload_param.validated_data['privacy_type'], request.user) + input_layer, _ = self.save_input_layer(upload_param, request.user) + input_layer.file.save(input_layer.name, file_obj, save=True) + input_layer.refresh_from_db() + if input_layer.name != input_layer.file.name: + input_layer.name = input_layer.file.name + input_layer.save(update_fields=['name']) + return Response(status=201, data={ + 'uuid': str(input_layer.uuid), + 'name': input_layer.name, + 'size': input_layer.size + }) + + +class LayerUploadStart(BaseLayerUpload): + """API to upload layer file direct to Minio.""" + + def generate_upload_url(self, input_layer: InputLayer, + number_of_parts=0): + storage_backend = select_input_layer_storage() + filename = input_layer.name + file_path = input_layer_dir_path(input_layer, filename) + available_name = storage_backend.get_available_name(file_path) + _, final_filename = os.path.split(available_name) + if input_layer.name != final_filename: + input_layer.name = final_filename + input_layer.save(update_fields=['name']) + results = [] + upload_id = None + if number_of_parts <= 1: + single_url = get_presigned_url(available_name) + if single_url: + results.append({ + 'part_number': 1, + 'url': single_url + }) + else: + upload_id, urls = get_multipart_presigned_urls( + available_name, number_of_parts + ) + if urls: + results.extend(urls) + # create MultipartUpload to store the upload_id + MultipartUpload.objects.create( + upload_id=upload_id, + input_layer_uuid=input_layer.uuid, + created_on=timezone.now(), + uploader=input_layer.owner, + parts=number_of_parts + ) + return results, upload_id + + @swagger_auto_schema( + operation_id='layer-upload-start', + tags=[LAYER_API_TAG], + manual_parameters=[], + request_body=UploadLayerSerializer, + responses={ + 200: openapi.Schema( + description=( + 'Success Start Layer Upload' + ), + type=openapi.TYPE_OBJECT, + properties={ + 'uuid': openapi.Schema( + title='Layer UUID', + type=openapi.TYPE_STRING + ), + 'upload_urls': openapi.Schema( + title='List of Upload Presigned URL', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_OBJECT, + title='Presigned URL item', + properties={ + 'part_number': openapi.Schema( + title='Part number for multipart upload', + type=openapi.TYPE_INTEGER + ), + 'url': openapi.Schema( + title='Presigned URL', + type=openapi.TYPE_STRING + ) + } + ) + ), + 'name': openapi.Schema( + title='Layer name', + type=openapi.TYPE_STRING + ), + 'multipart_upload_id': openapi.Schema( + title='Multipart Upload Id', + type=openapi.TYPE_STRING + ), + }, + example={ + "upload_url": [ + { + 'part_number': 1, + 'url': ( + "https://example.com/cplus/4/ncs_pathway" + "/layer.geojson" + ) + } + ] + } + ), + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request): + upload_param = UploadLayerSerializer(data=request.data) + upload_param.is_valid(raise_exception=True) + self.validate_upload_access( + upload_param.validated_data['privacy_type'], request.user) + input_layer, is_new = self.save_input_layer(upload_param, request.user) + if not is_new and input_layer.is_available(): + # delete existing file + input_layer.file = None + input_layer.save() + upload_urls, upload_id = self.generate_upload_url( + input_layer, upload_param.validated_data['number_of_parts']) + if len(upload_urls) == 0: + raise RuntimeError('Cannot generate upload url!') + return Response(status=201, data={ + 'uuid': str(input_layer.uuid), + 'upload_urls': upload_urls, + 'name': input_layer.name, + 'multipart_upload_id': upload_id + }) + + +class LayerUploadFinish(APIView): + """API to upload layer file.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='layer-upload-finish', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH], + request_body=FinishUploadLayerSerializer, + responses={ + 200: openapi.Schema( + description=( + 'Success Upload' + ), + type=openapi.TYPE_OBJECT, + properties={ + 'uuid': openapi.Schema( + title='Layer UUID', + type=openapi.TYPE_STRING + ), + 'size': openapi.Schema( + title='Layer size', + type=openapi.TYPE_NUMBER + ), + 'name': openapi.Schema( + title='Layer name', + type=openapi.TYPE_STRING + ), + } + ), + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request, layer_uuid): + input_layer = get_object_or_404(InputLayer, uuid=layer_uuid) + # get filepath + file_path = input_layer_dir_path(input_layer, input_layer.name) + upload_param = FinishUploadLayerSerializer(data=request.data) + upload_param.is_valid(raise_exception=True) + multipart_upload_id = ( + upload_param.validated_data.get('multipart_upload_id', None) + ) + if multipart_upload_id: + # mark multipart as done + complete_multipart_upload( + file_path, + multipart_upload_id, + upload_param.validated_data['items'] + ) + # remove MultipartUpload when upload is completed + MultipartUpload.objects.filter( + upload_id=multipart_upload_id + ).delete() + storage_backend = select_input_layer_storage() + # validate filepath exists + if not storage_backend.exists(file_path): + raise ValidationError( + f'Layer file {input_layer.name} does not exist!') + # validate size match + storage_file_size = storage_backend.size(file_path) + if storage_file_size != input_layer.size: + raise ValidationError( + 'Uploaded layer file size missmatch: ' + f'{convert_size(storage_file_size)} ' + f'should be {convert_size(input_layer.size)}!' + ) + input_layer.file.name = file_path + input_layer.save(update_fields=['file']) + return Response(status=200, data={ + 'uuid': str(input_layer.uuid), + 'name': input_layer.name, + 'size': input_layer.size + }) + + +class LayerUploadAbort(APIView): + """API to abort multipart upload.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='layer-upload-abort', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH], + request_body=FinishUploadLayerSerializer, + responses={ + 204: NoContentSerializer, + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request, layer_uuid): + input_layer = get_object_or_404(InputLayer, uuid=layer_uuid) + # get filepath + file_path = input_layer_dir_path(input_layer, input_layer.name) + upload_param = FinishUploadLayerSerializer(data=request.data) + upload_param.is_valid(raise_exception=True) + multipart_upload_id = ( + upload_param.validated_data.get('multipart_upload_id', None) + ) + if not multipart_upload_id: + raise ValidationError('Missing multipart_upload_id!') + parts = abort_multipart_upload(file_path, multipart_upload_id) + if parts == 0: + # if parts is 0, then can safely remove MultipartUpload + MultipartUpload.objects.filter( + upload_id=multipart_upload_id + ).delete() + input_layer.delete() + else: + # else cron job will check and do abort + MultipartUpload.objects.filter( + upload_id=multipart_upload_id + ).update( + is_aborted=True, + aborted_on=timezone.now() + ) + return Response(status=204) + + +class LayerDetail(APIView): + """APIs to fetch and remove layer file.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='layer-detail', + operation_description='API to fetch layer detail.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH], + responses={ + 200: InputLayerSerializer, + 400: APIErrorSerializer, + 403: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + layer_uuid = kwargs.get('layer_uuid') + input_layer = get_object_or_404( + InputLayer, uuid=layer_uuid) + if not validate_layer_access(input_layer, request.user): + raise PermissionDenied( + f"You are not allowed to access layer {layer_uuid}!") + return Response( + status=200, data=InputLayerSerializer(input_layer).data) + + @swagger_auto_schema( + operation_id='layer-remove', + operation_description='API to remove layer.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH], + responses={ + 204: NoContentSerializer, + 400: APIErrorSerializer, + 403: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def delete(self, request, *args, **kwargs): + layer_uuid = kwargs.get('layer_uuid') + input_layer = get_object_or_404( + InputLayer, uuid=layer_uuid) + if not validate_layer_manage(input_layer, request.user): + raise PermissionDenied( + f"You are not allowed to delete layer {layer_uuid}!" + ) + input_layer.delete() + return Response(status=204) + + @swagger_auto_schema( + operation_id='layer-update-partial', + operation_description='Partially Update InputLayer.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH], + request_body=UpdateLayerInputSerializer, + responses={ + 200: UpdateLayerInputSerializer, + 400: APIErrorSerializer, + 403: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def patch(self, request, *args, **kwargs): + layer_uuid = kwargs.get('layer_uuid') + input_layer = get_object_or_404( + InputLayer, uuid=layer_uuid) + if not validate_layer_manage(input_layer, request.user): + raise PermissionDenied( + f"You are not allowed to update layer {layer_uuid}!" + ) + + layer_param = UpdateLayerInputSerializer( + data=request.data, partial=True + ) + layer_param.is_valid(raise_exception=True) + update_fields = [] + for field, value in layer_param.validated_data.items(): + setattr(input_layer, field, value) + update_fields.append(field) + + input_layer.save(update_fields=update_fields) + return Response( + status=200, + data=InputLayerSerializer(input_layer).data + ) + + +class CheckLayer(APIView): + """API to check whether layer is ready by its identifier.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='check-layer', + operation_description='API to check whether layer is ready.', + tags=[LAYER_API_TAG], + manual_parameters=[ + openapi.Parameter( + 'id_type', openapi.IN_QUERY, + description='Type of layer id: client_id or layer_uuid', + type=openapi.TYPE_STRING, + required=False, + default='client_id', + enum=['client_id', 'layer_uuid'] + ) + ], + request_body=openapi.Schema( + title='List of layer id', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_STRING + ) + ), + responses={ + 200: openapi.Schema( + description=( + 'Check Layer Response' + ), + type=openapi.TYPE_OBJECT, + properties={ + 'available': openapi.Schema( + title='List of available layer', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_STRING + ) + ), + 'unavailable': openapi.Schema( + title='List of unavailable layer (missing file)', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_STRING + ) + ), + 'Invalid': openapi.Schema( + title='List of layer with invalid ID or inaccessible', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_STRING + ) + ) + } + ), + 400: APIErrorSerializer, + 403: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request, *args, **kwargs): + id_type = request.GET.get('id_type', 'client_id') + filters = {} + if id_type == 'layer_uuid': + filters = { + 'uuid__in': request.data + } + else: + filters = { + 'client_id__in': request.data + } + layers = InputLayer.objects.filter( + **filters + ).order_by('name') + input_ids = set(request.data) + ids_found = set() + ids_available = set() + ids_not_available = set() + for layer in layers: + layer_id = ( + str(layer.uuid) if id_type == 'layer_uuid' else + layer.client_id + ) + if not validate_layer_access(layer, request.user): + continue + ids_found.add(layer_id) + if layer.is_available(): + ids_available.add(layer_id) + else: + ids_not_available.add(layer_id) + return Response(status=200, data={ + 'available': list(ids_available), + 'unavailable': list(ids_not_available), + 'invalid': list(input_ids - ids_found) + }) + + +class FetchLayerByClientId(APIView): + """API to fetch input layer by client id.""" + permission_classes = [IsAuthenticated] + + @swagger_auto_schema( + operation_id='fetch-layer-by-client-id', + operation_description='API to fetch input layer by client id.', + tags=[LAYER_API_TAG], + request_body=openapi.Schema( + title='List of client id', + type=openapi.TYPE_ARRAY, + items=openapi.Items( + type=openapi.TYPE_STRING + ) + ), + responses={ + 200: openapi.Schema( + description=( + 'Layer List' + ), + type=openapi.TYPE_ARRAY, + items=openapi.Items(**LAYER_SCHEMA_FIELDS), + ), + 400: APIErrorSerializer, + 403: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def post(self, request, *args, **kwargs): + layers = InputLayer.objects.filter( + client_id__in=request.data + ).order_by('name') + results = {} + for layer in layers: + if not validate_layer_access(layer, request.user): + continue + if layer.client_id not in results: + results[layer.client_id] = layer + elif not results[layer.client_id].is_available(): + results[layer.client_id] = layer + return Response(status=200, data=InputLayerSerializer( + list(results.values()), + many=True + ).data) + + +class ReferenceLayerDownload(APIView): + """APIs to fetch and remove layer file.""" + permission_classes = [AllowAny] + authentication_classes = [] + + @swagger_auto_schema( + operation_id='reference-layer-download', + operation_description='API to download and crop reference layer.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_BBOX_IN_QUERY], + responses={ + 200: openapi.Response(description='Binary response'), + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + from django.core.exceptions import MultipleObjectsReturned + try: + reference_layer = get_object_or_404( + InputLayer, + component_type=InputLayer.ComponentTypes.REFERENCE_LAYER + ) + except MultipleObjectsReturned: + reference_layer = InputLayer.objects.filter( + component_type=InputLayer.ComponentTypes.REFERENCE_LAYER + ).first() + if reference_layer.is_available(): + basename = os.path.basename(reference_layer.file.name) + file_path = os.path.join( + settings.TEMPORARY_LAYER_DIR, + 'reference_layer', + basename + ) + if not os.path.exists(file_path): + file_path = reference_layer.download_to_working_directory( + settings.TEMPORARY_LAYER_DIR + ) + x_accel_redirect = os.path.join('reference_layer', basename) + file_name = basename + + if 'bbox' in request.query_params: + bbox = validate_bbox(request.query_params.get('bbox')) + + # Calculate the width and height of the bounding box + width = bbox[2] - bbox[0] + height = bbox[3] - bbox[1] + + # Calculate 20% expansion + expand_width = width * 0.2 + expand_height = height * 0.2 + + # Create the expanded bounding box + expanded_bbox = ( + bbox[0] - expand_width / 2, # min_x + bbox[1] - expand_height / 2, # min_y + bbox[2] + expand_width / 2, # max_x + bbox[3] + expand_height / 2 # max_y + ) + + # Convert the expanded bounding box to a Polygon + expanded_polygon = Polygon.from_bbox(expanded_bbox) + + # Clip the raster + file_path = clip_raster( + file_path, + expanded_polygon.extent, + settings.TEMPORARY_LAYER_DIR + ) + + # Create temporary layer object + TemporaryLayer.objects.create( + file_name=os.path.basename(file_path), + size=os.path.getsize(file_path) + ) + file_name = os.path.basename(file_path) + x_accel_redirect = file_name + + # fix issue nginx unable to read file + os.chmod(file_path, 0o644) + response = Response(status=200) + response['Content-type'] = "application/octet-stream" + response['X-Accel-Redirect'] = ( + f'/userfiles/{x_accel_redirect}' + ) + response['Content-Disposition'] = ( + f'attachment; filename="{file_name}"' + ) + + return response + + return Response( + data={'detail': 'Reference layer is not available.'}, + status=404 + ) + + +class DefaultLayerDownload(APIView): + """API to crop and download priority layer.""" + permission_classes = [AllowAny] + authentication_classes = [] + + @swagger_auto_schema( + operation_id='default-priority-layer-download', + operation_description='API to crop and download priority layer.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_LAYER_UUID_IN_PATH, PARAM_BBOX_IN_QUERY], + responses={ + 200: openapi.Response(description='Binary response'), + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + layer_uuid = kwargs.get('layer_uuid') + default_layer = get_object_or_404( + InputLayer, + uuid=layer_uuid, + component_type=InputLayer.ComponentTypes.PRIORITY_LAYER + ) + if default_layer.is_available(): + basename = os.path.basename(default_layer.file.name) + file_path = os.path.join( + settings.TEMPORARY_LAYER_DIR, + 'default_layer', + basename + ) + if not os.path.exists(file_path): + file_path = default_layer.download_to_working_directory( + settings.TEMPORARY_LAYER_DIR + ) + x_accel_redirect = os.path.join('default_layer', basename) + file_name = basename + + if 'bbox' in request.query_params: + bbox = validate_bbox(request.query_params.get('bbox')) + + # Convert the bounding box to a Polygon + polygon = Polygon.from_bbox(bbox) + + # Clip the raster + file_path = clip_raster( + file_path, + polygon.extent, + settings.TEMPORARY_LAYER_DIR + ) + + # Create temporary layer object + TemporaryLayer.objects.create( + file_name=os.path.basename(file_path), + size=os.path.getsize(file_path) + ) + file_name = os.path.basename(file_path) + x_accel_redirect = file_name + + # fix issue nginx unable to read file + os.chmod(file_path, 0o644) + response = Response(status=200) + response['Content-type'] = "application/octet-stream" + response['X-Accel-Redirect'] = ( + f'/userfiles/{x_accel_redirect}' + ) + response['Content-Disposition'] = ( + f'attachment; filename="{file_name}"' + ) + + return response + + return Response( + data={'detail': 'Default layer is not available.'}, + status=404 + ) + + +class StoredCarbonDownload(APIView): + """API to clip and download the stored carbon layer.""" + permission_classes = [AllowAny] + authentication_classes = [] + + @swagger_auto_schema( + operation_id='stored-carbon-download', + operation_description='Clip and download the stored carbon dataset.', + tags=[LAYER_API_TAG], + manual_parameters=[PARAM_BBOX_IN_QUERY], + responses={ + 200: openapi.Response(description='Binary response'), + 400: APIErrorSerializer, + 404: APIErrorSerializer + } + ) + def get(self, request, *args, **kwargs): + from django.core.exceptions import MultipleObjectsReturned + + if 'bbox' not in request.query_params: + return Response( + data={'detail': 'Query parameter "bbox" is required.'}, + status=400 + ) + + try: + stored_layer = get_object_or_404( + InputLayer, + component_type=InputLayer.ComponentTypes.STORED_CARBON + ) + except MultipleObjectsReturned: + stored_layer = InputLayer.objects.filter( + component_type=InputLayer.ComponentTypes.STORED_CARBON + ).first() + + if not stored_layer or not stored_layer.is_available(): + return Response( + data={'detail': 'Stored carbon layer is not available.'}, + status=404 + ) + + bbox = validate_bbox(request.query_params.get('bbox')) + + basename = os.path.basename(stored_layer.file.name) + base_dir = os.path.join( + settings.TEMPORARY_LAYER_DIR, + 'stored_carbon_layer' + ) + os.makedirs(base_dir, exist_ok=True) + + file_path = os.path.join(base_dir, basename) + if not os.path.exists(file_path): + file_path = stored_layer.download_to_working_directory( + settings.TEMPORARY_LAYER_DIR + ) + + file_path = clip_raster( + file_path=file_path, + bbox=bbox, + temp_dir=settings.TEMPORARY_LAYER_DIR + ) + + TemporaryLayer.objects.create( + file_name=os.path.basename(file_path), + size=os.path.getsize(file_path) + ) + try: + os.chmod(file_path, 0o644) + except Exception: + pass + + # Build X-Accel-Redirect path relative to alias root + try: + rel_path = os.path.relpath(file_path, settings.TEMPORARY_LAYER_DIR) + except ValueError: + rel_path = os.path.basename(file_path) + + file_name = os.path.basename(file_path) + + headers = { + 'Content-Type': 'application/octet-stream', + 'X-Accel-Redirect': f"/userfiles/{rel_path}", + 'Content-Disposition': f'attachment; filename="{file_name}"', + } + return Response(status=200, headers=headers) diff --git a/django_project/cplus_api/migrations/0021_alter_inputlayer_component_type.py b/django_project/cplus_api/migrations/0021_alter_inputlayer_component_type.py new file mode 100644 index 0000000..b754b77 --- /dev/null +++ b/django_project/cplus_api/migrations/0021_alter_inputlayer_component_type.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.7 on 2025-12-16 09:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('cplus_api', '0020_zonalstatisticstask'), + ] + + operations = [ + migrations.AlterField( + model_name='inputlayer', + name='component_type', + field=models.CharField(choices=[('ncs_pathway', 'ncs_pathway'), ('ncs_carbon', 'ncs_carbon'), ('priority_layer', 'priority_layer'), ('snap_layer', 'snap_layer'), ('sieve_mask_layer', 'sieve_mask_layer'), ('mask_layer', 'mask_layer'), ('reference_layer', 'reference_layer'), ('stored_carbon', 'stored_carbon')], max_length=255), + ), + ] diff --git a/django_project/cplus_api/models/layer.py b/django_project/cplus_api/models/layer.py index 67874f1..5a1515a 100644 --- a/django_project/cplus_api/models/layer.py +++ b/django_project/cplus_api/models/layer.py @@ -1,393 +1,394 @@ -import os -import uuid -import shutil -from zipfile import ZipFile -from django.db import models -from django.utils.translation import gettext_lazy as _ -from django.conf import settings -from django.utils import timezone -from django.core.files.storage import storages, FileSystemStorage -from django.db.models.signals import post_save, post_delete -from django.dispatch import receiver - - -COMMON_LAYERS_DIR = 'common_layers' -INTERNAL_LAYERS_DIR = 'internal_layers' - - -def input_layer_dir_path(instance, filename): - """Return upload directory path for Input Layer.""" - file_path = str(instance.owner.pk) - if instance.privacy_type == InputLayer.PrivacyTypes.COMMON: - file_path = COMMON_LAYERS_DIR - if instance.privacy_type == InputLayer.PrivacyTypes.INTERNAL: - file_path = INTERNAL_LAYERS_DIR - - if instance.privacy_type in [ - InputLayer.PrivacyTypes.COMMON, - InputLayer.PrivacyTypes.INTERNAL - ]: - file_path = os.path.join( - file_path, - instance.component_type, - instance.source, - filename - ) - else: - file_path = os.path.join( - file_path, - instance.component_type, - filename - ) - return file_path - - -def output_layer_dir_path(instance, filename): - """Return upload directory path for Output Layer.""" - file_path = f'{str(instance.owner.pk)}/{str(instance.scenario.uuid)}/' - if not instance.is_final_output: - file_path = file_path + f'{instance.group}/' - file_path = file_path + filename - return file_path - - -def select_input_layer_storage(): - """Return storage for input layer.""" - return storages['input_layer_storage'] - - -def default_output_meta(): - """ - Default value for OutputLayer's output_meta. - """ - return {} - - -class BaseLayer(models.Model): - class LayerTypes(models.IntegerChoices): - RASTER = 0, _('Raster') - VECTOR = 1, _('Vector') - UNDEFINED = -1, _('Undefined') - - uuid = models.UUIDField( - default=uuid.uuid4, - unique=True - ) - - name = models.CharField( - max_length=512 - ) - - created_on = models.DateTimeField() - - owner = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - ) - - layer_type = models.IntegerField(choices=LayerTypes.choices) - - size = models.BigIntegerField( - null=True, - blank=True, - default=0 - ) - - class Meta: - abstract = True - - -class InputLayer(BaseLayer): - class ComponentTypes(models.TextChoices): - NCS_PATHWAY = 'ncs_pathway', _('ncs_pathway') - NCS_CARBON = 'ncs_carbon', _('ncs_carbon') - PRIORITY_LAYER = 'priority_layer', _('priority_layer') - SNAP_LAYER = 'snap_layer', _('snap_layer') - SIEVE_MASK_LAYER = 'sieve_mask_layer', _('sieve_mask_layer') - MASK_LAYER = 'mask_layer', _('mask_layer') - REFERENCE_LAYER = 'reference_layer', _('reference_layer') - - class PrivacyTypes(models.TextChoices): - PRIVATE = 'private', _('private') - INTERNAL = 'internal', _('internal') - COMMON = 'common', _('common') - - class LayerSources(models.TextChoices): - CPLUS = 'cplus', _('CPLUS') - NATURE_BASE = 'naturebase', _('Naturebase') - - file = models.FileField( - upload_to=input_layer_dir_path, - storage=select_input_layer_storage - ) - - component_type = models.CharField( - max_length=255, - choices=ComponentTypes.choices - ) - - privacy_type = models.CharField( - max_length=255, - choices=PrivacyTypes.choices, - default=PrivacyTypes.PRIVATE - ) - - last_used_on = models.DateTimeField( - null=True, - blank=True - ) - - client_id = models.TextField( - null=True, - blank=True - ) - - metadata = models.JSONField( - default=dict, - blank=True, - help_text='Layer Metadata.' - ) - - modified_on = models.DateTimeField(auto_now=True) - - description = models.TextField( - null=False, - blank=True, - default='' - ) - - source = models.CharField( - max_length=50, - choices=LayerSources.choices, - default=LayerSources.CPLUS - ) - - version = models.CharField( - max_length=512, - null=True, - blank=True - ) - - license = models.TextField( - null=True, - blank=True - ) - - def __str__(self): - return f"{self.name} - {self.component_type}" - - def save( - self, force_insert=False, force_update=False, - using=None, update_fields=None - ): - if self.pk: - old_instance = InputLayer.objects.get(uuid=self.uuid) - self.move_file = False - if old_instance.privacy_type != self.privacy_type: - self.move_file = True - if old_instance.component_type != self.component_type: - self.move_file = True - return super().save( - force_insert=False, - force_update=False, - using=using, - update_fields=update_fields - ) - - def download_to_working_directory(self, base_dir: str): - if not self.is_available(): - return None - dir_path: str = os.path.join( - base_dir, - self.component_type - ) - if not os.path.exists(dir_path): - os.makedirs(dir_path) - file_path: str = os.path.join( - dir_path, - os.path.basename(self.file.name) - ) - storage = select_input_layer_storage() - if isinstance(storage, FileSystemStorage): - with open(file_path, 'wb+') as destination: - for chunk in self.file.chunks(): - destination.write(chunk) - else: - boto3_client = storage.connection.meta.client - boto3_client.download_file( - storage.bucket_name, - self.file.name, - file_path, - Config=settings.AWS_TRANSFER_CONFIG - ) - self.last_used_on = timezone.now() - self.save(update_fields=['last_used_on']) - if file_path.endswith('.zip'): - extract_path = os.path.join( - dir_path, - os.path.basename(file_path).replace('.zip', '_zip') - ) - with ZipFile(file_path, 'r') as zip_ref: - zip_ref.extractall(extract_path) - shapefile = [ - file for file in os.listdir(extract_path) - if file.endswith('.shp') - ] - if shapefile: - return os.path.join(extract_path, shapefile[0]) - else: - return None - return file_path - - def is_available(self): - if not self.file.name: - return False - return self.file.storage.exists(self.file.name) - - def is_in_correct_directory(self): - layer_path = self.file.name - prefix_path = str(self.owner.pk) - if self.privacy_type == InputLayer.PrivacyTypes.COMMON: - prefix_path = os.path.join( - COMMON_LAYERS_DIR, - self.component_type, - self.source - ) - elif self.privacy_type == InputLayer.PrivacyTypes.INTERNAL: - prefix_path = os.path.join( - INTERNAL_LAYERS_DIR, - self.component_type, - self.source - ) - return layer_path.startswith(prefix_path) - - def move_file_location(self): - if not self.is_available(): - return - old_path = self.file.name - correct_path = input_layer_dir_path(self, self.name) - storage = select_input_layer_storage() - if isinstance(storage, FileSystemStorage): - full_correct_path = os.path.join(storage.location, correct_path) - dirname = os.path.split(full_correct_path)[0] - os.makedirs(dirname, exist_ok=True) - shutil.move( - os.path.join(storage.location, old_path), - full_correct_path, - ) - else: - boto3_client = storage.connection.meta.client - copy_source = { - 'Bucket': storage.bucket_name, - 'Key': old_path - } - boto3_client.copy(copy_source, storage.bucket_name, correct_path) - boto3_client.delete_object( - Bucket=storage.bucket_name, Key=old_path) - self.file.name = correct_path - self.save(update_fields=['file']) - - def fix_layer_metadata(self): - if not self.is_available(): - return - self.size = self.file.size - self.save(update_fields=['size']) - if self.is_in_correct_directory(): - return - self.move_file_location() - - -class OutputLayer(BaseLayer): - - is_final_output = models.BooleanField( - default=False - ) - - group = models.CharField( - max_length=256, - null=True, - blank=True - ) - - scenario = models.ForeignKey( - 'cplus_api.ScenarioTask', - related_name='output_layers', - on_delete=models.CASCADE - ) - - file = models.FileField( - upload_to=output_layer_dir_path - ) - - is_deleted = models.BooleanField( - default=False - ) - output_meta = models.JSONField( - default=default_output_meta, - blank=True, - help_text='Output Metadata.' - ) - - def __str__(self): - group = self.group if not self.is_final_output else 'Final' - return f"{self.name} - {group} - {self.uuid}" - - -class MultipartUpload(models.Model): - """Model to store id of multipart upload.""" - - upload_id = models.CharField( - max_length=512 - ) - - input_layer_uuid = models.UUIDField() - - created_on = models.DateTimeField() - - uploader = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.SET_NULL, - null=True, - blank=True - ) - - parts = models.IntegerField() - - is_aborted = models.BooleanField( - default=False - ) - - aborted_on = models.DateTimeField( - null=True, - blank=True - ) - - -class TemporaryLayer(models.Model): - """Model to store temporary layer files.""" - - file_name = models.CharField( - max_length=512, - help_text='File name that is stored in TEMPORARY_LAYER_DIR.' - ) - size = models.BigIntegerField() - created_on = models.DateTimeField(auto_now_add=True) - - -@receiver(post_save, sender=InputLayer) -def save_input_layer(sender, instance, created, **kwargs): - """ - Handle Moving file after changing Input component type or privacy tyoe - """ - from cplus_api.tasks.move_input_layer_file import move_input_layer_file - if not created: - if getattr(instance, 'move_file', False): - move_input_layer_file.delay(instance.uuid) - - -@receiver(post_delete, sender=TemporaryLayer) -def post_delete_temp_layer(sender, instance, **kwargs): - """Remove temporary layer file if TemporaryLayer is deleted.""" - file_path = os.path.join(settings.TEMPORARY_LAYER_DIR, instance.file_name) - if os.path.exists(file_path): - os.remove(file_path) +import os +import uuid +import shutil +from zipfile import ZipFile +from django.db import models +from django.utils.translation import gettext_lazy as _ +from django.conf import settings +from django.utils import timezone +from django.core.files.storage import storages, FileSystemStorage +from django.db.models.signals import post_save, post_delete +from django.dispatch import receiver + + +COMMON_LAYERS_DIR = 'common_layers' +INTERNAL_LAYERS_DIR = 'internal_layers' + + +def input_layer_dir_path(instance, filename): + """Return upload directory path for Input Layer.""" + file_path = str(instance.owner.pk) + if instance.privacy_type == InputLayer.PrivacyTypes.COMMON: + file_path = COMMON_LAYERS_DIR + if instance.privacy_type == InputLayer.PrivacyTypes.INTERNAL: + file_path = INTERNAL_LAYERS_DIR + + if instance.privacy_type in [ + InputLayer.PrivacyTypes.COMMON, + InputLayer.PrivacyTypes.INTERNAL + ]: + file_path = os.path.join( + file_path, + instance.component_type, + instance.source, + filename + ) + else: + file_path = os.path.join( + file_path, + instance.component_type, + filename + ) + return file_path + + +def output_layer_dir_path(instance, filename): + """Return upload directory path for Output Layer.""" + file_path = f'{str(instance.owner.pk)}/{str(instance.scenario.uuid)}/' + if not instance.is_final_output: + file_path = file_path + f'{instance.group}/' + file_path = file_path + filename + return file_path + + +def select_input_layer_storage(): + """Return storage for input layer.""" + return storages['input_layer_storage'] + + +def default_output_meta(): + """ + Default value for OutputLayer's output_meta. + """ + return {} + + +class BaseLayer(models.Model): + class LayerTypes(models.IntegerChoices): + RASTER = 0, _('Raster') + VECTOR = 1, _('Vector') + UNDEFINED = -1, _('Undefined') + + uuid = models.UUIDField( + default=uuid.uuid4, + unique=True + ) + + name = models.CharField( + max_length=512 + ) + + created_on = models.DateTimeField() + + owner = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + ) + + layer_type = models.IntegerField(choices=LayerTypes.choices) + + size = models.BigIntegerField( + null=True, + blank=True, + default=0 + ) + + class Meta: + abstract = True + + +class InputLayer(BaseLayer): + class ComponentTypes(models.TextChoices): + NCS_PATHWAY = 'ncs_pathway', _('ncs_pathway') + NCS_CARBON = 'ncs_carbon', _('ncs_carbon') + PRIORITY_LAYER = 'priority_layer', _('priority_layer') + SNAP_LAYER = 'snap_layer', _('snap_layer') + SIEVE_MASK_LAYER = 'sieve_mask_layer', _('sieve_mask_layer') + MASK_LAYER = 'mask_layer', _('mask_layer') + REFERENCE_LAYER = 'reference_layer', _('reference_layer') + STORED_CARBON = 'stored_carbon', _('stored_carbon') + + class PrivacyTypes(models.TextChoices): + PRIVATE = 'private', _('private') + INTERNAL = 'internal', _('internal') + COMMON = 'common', _('common') + + class LayerSources(models.TextChoices): + CPLUS = 'cplus', _('CPLUS') + NATURE_BASE = 'naturebase', _('Naturebase') + + file = models.FileField( + upload_to=input_layer_dir_path, + storage=select_input_layer_storage + ) + + component_type = models.CharField( + max_length=255, + choices=ComponentTypes.choices + ) + + privacy_type = models.CharField( + max_length=255, + choices=PrivacyTypes.choices, + default=PrivacyTypes.PRIVATE + ) + + last_used_on = models.DateTimeField( + null=True, + blank=True + ) + + client_id = models.TextField( + null=True, + blank=True + ) + + metadata = models.JSONField( + default=dict, + blank=True, + help_text='Layer Metadata.' + ) + + modified_on = models.DateTimeField(auto_now=True) + + description = models.TextField( + null=False, + blank=True, + default='' + ) + + source = models.CharField( + max_length=50, + choices=LayerSources.choices, + default=LayerSources.CPLUS + ) + + version = models.CharField( + max_length=512, + null=True, + blank=True + ) + + license = models.TextField( + null=True, + blank=True + ) + + def __str__(self): + return f"{self.name} - {self.component_type}" + + def save( + self, force_insert=False, force_update=False, + using=None, update_fields=None + ): + if self.pk: + old_instance = InputLayer.objects.get(uuid=self.uuid) + self.move_file = False + if old_instance.privacy_type != self.privacy_type: + self.move_file = True + if old_instance.component_type != self.component_type: + self.move_file = True + return super().save( + force_insert=False, + force_update=False, + using=using, + update_fields=update_fields + ) + + def download_to_working_directory(self, base_dir: str): + if not self.is_available(): + return None + dir_path: str = os.path.join( + base_dir, + self.component_type + ) + if not os.path.exists(dir_path): + os.makedirs(dir_path) + file_path: str = os.path.join( + dir_path, + os.path.basename(self.file.name) + ) + storage = select_input_layer_storage() + if isinstance(storage, FileSystemStorage): + with open(file_path, 'wb+') as destination: + for chunk in self.file.chunks(): + destination.write(chunk) + else: + boto3_client = storage.connection.meta.client + boto3_client.download_file( + storage.bucket_name, + self.file.name, + file_path, + Config=settings.AWS_TRANSFER_CONFIG + ) + self.last_used_on = timezone.now() + self.save(update_fields=['last_used_on']) + if file_path.endswith('.zip'): + extract_path = os.path.join( + dir_path, + os.path.basename(file_path).replace('.zip', '_zip') + ) + with ZipFile(file_path, 'r') as zip_ref: + zip_ref.extractall(extract_path) + shapefile = [ + file for file in os.listdir(extract_path) + if file.endswith('.shp') + ] + if shapefile: + return os.path.join(extract_path, shapefile[0]) + else: + return None + return file_path + + def is_available(self): + if not self.file.name: + return False + return self.file.storage.exists(self.file.name) + + def is_in_correct_directory(self): + layer_path = self.file.name + prefix_path = str(self.owner.pk) + if self.privacy_type == InputLayer.PrivacyTypes.COMMON: + prefix_path = os.path.join( + COMMON_LAYERS_DIR, + self.component_type, + self.source + ) + elif self.privacy_type == InputLayer.PrivacyTypes.INTERNAL: + prefix_path = os.path.join( + INTERNAL_LAYERS_DIR, + self.component_type, + self.source + ) + return layer_path.startswith(prefix_path) + + def move_file_location(self): + if not self.is_available(): + return + old_path = self.file.name + correct_path = input_layer_dir_path(self, self.name) + storage = select_input_layer_storage() + if isinstance(storage, FileSystemStorage): + full_correct_path = os.path.join(storage.location, correct_path) + dirname = os.path.split(full_correct_path)[0] + os.makedirs(dirname, exist_ok=True) + shutil.move( + os.path.join(storage.location, old_path), + full_correct_path, + ) + else: + boto3_client = storage.connection.meta.client + copy_source = { + 'Bucket': storage.bucket_name, + 'Key': old_path + } + boto3_client.copy(copy_source, storage.bucket_name, correct_path) + boto3_client.delete_object( + Bucket=storage.bucket_name, Key=old_path) + self.file.name = correct_path + self.save(update_fields=['file']) + + def fix_layer_metadata(self): + if not self.is_available(): + return + self.size = self.file.size + self.save(update_fields=['size']) + if self.is_in_correct_directory(): + return + self.move_file_location() + + +class OutputLayer(BaseLayer): + + is_final_output = models.BooleanField( + default=False + ) + + group = models.CharField( + max_length=256, + null=True, + blank=True + ) + + scenario = models.ForeignKey( + 'cplus_api.ScenarioTask', + related_name='output_layers', + on_delete=models.CASCADE + ) + + file = models.FileField( + upload_to=output_layer_dir_path + ) + + is_deleted = models.BooleanField( + default=False + ) + output_meta = models.JSONField( + default=default_output_meta, + blank=True, + help_text='Output Metadata.' + ) + + def __str__(self): + group = self.group if not self.is_final_output else 'Final' + return f"{self.name} - {group} - {self.uuid}" + + +class MultipartUpload(models.Model): + """Model to store id of multipart upload.""" + + upload_id = models.CharField( + max_length=512 + ) + + input_layer_uuid = models.UUIDField() + + created_on = models.DateTimeField() + + uploader = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True + ) + + parts = models.IntegerField() + + is_aborted = models.BooleanField( + default=False + ) + + aborted_on = models.DateTimeField( + null=True, + blank=True + ) + + +class TemporaryLayer(models.Model): + """Model to store temporary layer files.""" + + file_name = models.CharField( + max_length=512, + help_text='File name that is stored in TEMPORARY_LAYER_DIR.' + ) + size = models.BigIntegerField() + created_on = models.DateTimeField(auto_now_add=True) + + +@receiver(post_save, sender=InputLayer) +def save_input_layer(sender, instance, created, **kwargs): + """ + Handle Moving file after changing Input component type or privacy tyoe + """ + from cplus_api.tasks.move_input_layer_file import move_input_layer_file + if not created: + if getattr(instance, 'move_file', False): + move_input_layer_file.delay(instance.uuid) + + +@receiver(post_delete, sender=TemporaryLayer) +def post_delete_temp_layer(sender, instance, **kwargs): + """Remove temporary layer file if TemporaryLayer is deleted.""" + file_path = os.path.join(settings.TEMPORARY_LAYER_DIR, instance.file_name) + if os.path.exists(file_path): + os.remove(file_path) diff --git a/django_project/cplus_api/tests/data/stored_carbon_layer.tif b/django_project/cplus_api/tests/data/stored_carbon_layer.tif new file mode 100644 index 0000000..77c94f9 Binary files /dev/null and b/django_project/cplus_api/tests/data/stored_carbon_layer.tif differ diff --git a/django_project/cplus_api/tests/test_layer_api_view.py b/django_project/cplus_api/tests/test_layer_api_view.py index dd39fee..5698143 100644 --- a/django_project/cplus_api/tests/test_layer_api_view.py +++ b/django_project/cplus_api/tests/test_layer_api_view.py @@ -1,1103 +1,1138 @@ -import os -import uuid -import mock -from django.contrib.gis.geos import Polygon -from django.test import override_settings -from django.conf import settings -from django.urls import reverse -from django.utils import timezone -from rest_framework.exceptions import PermissionDenied -from core.settings.utils import absolute_path -from cplus_api.models.layer import ( - InputLayer, - input_layer_dir_path, - select_input_layer_storage, - MultipartUpload -) -from cplus_api.api_views.layer import ( - LayerList, - LayerDetail, - LayerUpload, - LayerUploadStart, - LayerUploadFinish, - CheckLayer, - is_internal_user, - validate_layer_access, - LayerUploadAbort, - FetchLayerByClientId, - DefaultLayerList, - ReferenceLayerDownload, - DefaultLayerDownload -) -from cplus_api.models.profile import UserProfile -from cplus_api.utils.api_helper import convert_size -from cplus_api.tests.common import ( - FakeResolverMatchV1, - BaseAPIViewTransactionTest, - MockS3Client -) -from cplus_api.tests.factories import InputLayerF, UserF - - -class TestLayerAPIView(BaseAPIViewTransactionTest): - - def test_is_internal_user(self): - user_1 = UserF.create() - # has external role - self.assertFalse(is_internal_user(user_1)) - # no role - user_profile = user_1.user_profile - user_profile.role = None - user_profile.save() - self.assertFalse(is_internal_user(user_1)) - # no user_profile - user_profile.delete() - self.assertFalse(UserProfile.objects.filter(user=user_1).exists()) - user_1.refresh_from_db() - self.assertFalse(is_internal_user(user_1)) - # has internal role - user_2 = self.create_internal_user() - self.assertTrue(is_internal_user(user_2)) - - def test_layer_list(self): - request = self.factory.get( - reverse('v1:layer-list') - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - view = LayerList.as_view() - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data['results'], []) - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON - ) - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['results']), 1) - find_layer = self.find_layer_from_response( - response.data['results'], input_layer.uuid) - self.assertTrue(find_layer) - self.assertFalse(find_layer['url']) - self.assertFalse(input_layer.file) - # non existing file in storage - input_layer.file.name = ( - 'common_layers/ncs_pathway/test_model_2_123.tif' - ) - input_layer.save() - self.assertTrue(input_layer.file) - self.assertFalse(input_layer.is_available()) - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['results']), 1) - find_layer = self.find_layer_from_response( - response.data['results'], input_layer.uuid) - self.assertTrue(find_layer) - self.assertFalse(find_layer['url']) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(input_layer, file_path) - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['results']), 1) - find_layer = self.find_layer_from_response( - response.data['results'], input_layer.uuid) - self.assertTrue(find_layer) - self.assertTrue(find_layer['url']) - - def test_default_layer_list(self): - request = self.factory.get( - reverse('v1:layer-default-list') - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - view = DefaultLayerList.as_view() - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, []) - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON - ) - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 1) - find_layer = self.find_layer_from_response( - response.data, input_layer.uuid) - self.assertTrue(find_layer) - self.assertFalse(find_layer['url']) - self.assertFalse(input_layer.file) - - def test_layer_access(self): - input_layer_1 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON - ) - input_layer_2 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.INTERNAL - ) - input_layer_3 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE - ) - self.assertTrue(validate_layer_access( - input_layer_1, self.superuser - )) - self.assertTrue(validate_layer_access( - input_layer_1, self.user_1 - )) - user_2 = self.create_internal_user() - self.assertTrue(validate_layer_access( - input_layer_2, user_2 - )) - self.assertFalse(validate_layer_access( - input_layer_3, self.user_1 - )) - self.assertTrue(validate_layer_access( - input_layer_3, input_layer_3.owner - )) - # upload access - layer_upload_view = LayerUpload() - self.assertTrue(layer_upload_view.validate_upload_access( - InputLayer.PrivacyTypes.COMMON, self.superuser - )) - self.assertTrue(layer_upload_view.validate_upload_access( - InputLayer.PrivacyTypes.INTERNAL, user_2 - )) - with self.assertRaises(PermissionDenied): - layer_upload_view.validate_upload_access( - InputLayer.PrivacyTypes.COMMON, self.user_1 - ) - self.assertTrue(layer_upload_view.validate_upload_access( - InputLayer.PrivacyTypes.PRIVATE, self.user_1 - )) - input_layer_4 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - owner=self.user_1 - ) - self.assertTrue(layer_upload_view.validate_upload_access( - InputLayer.PrivacyTypes.PRIVATE, self.user_1, - True, input_layer_4 - )) - - def test_layer_detail(self): - view = LayerDetail.as_view() - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(input_layer, file_path) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - request = self.factory.get( - reverse('v1:layer-detail', kwargs=kwargs) - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - self.assertTrue(response.data['url']) - self.assertEqual(response.data['uuid'], str(input_layer.uuid)) - # forbidden - request = self.factory.get( - reverse('v1:layer-detail', kwargs=kwargs) - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request, **kwargs) - self.assertEqual(response.status_code, 403) - - @override_settings(DEBUG=True) - def test_layer_detail_from_dev(self): - view = LayerDetail.as_view() - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(input_layer, file_path) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - request = self.factory.get( - reverse('v1:layer-detail', kwargs=kwargs) - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - self.assertTrue(response.data['url']) - self.assertEqual(response.data['uuid'], str(input_layer.uuid)) - - def test_layer_delete(self): - view = LayerDetail.as_view() - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(input_layer, file_path) - layer_uuid = input_layer.uuid - kwargs = { - 'layer_uuid': str(layer_uuid) - } - # forbidden - request = self.factory.delete( - reverse('v1:layer-detail', kwargs=kwargs) - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request, **kwargs) - self.assertEqual(response.status_code, 403) - # successful - request = self.factory.delete( - reverse('v1:layer-detail', kwargs=kwargs) - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 204) - self.assertFalse( - InputLayer.objects.filter( - uuid=layer_uuid - ).exists() - ) - self.assertFalse(input_layer.is_available()) - - def test_layer_update_partial(self): - view = LayerDetail.as_view() - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(input_layer, file_path) - layer_uuid = input_layer.uuid - kwargs = { - 'layer_uuid': str(layer_uuid) - } - # forbidden - request = self.factory.patch( - reverse('v1:layer-detail', kwargs=kwargs), - data={'name': 'test_name'} - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request, **kwargs) - self.assertEqual(response.status_code, 403) - # successful - request = self.factory.patch( - reverse('v1:layer-detail', kwargs=kwargs), - data={'name': 'test_name'} - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - input_layer.refresh_from_db() - self.assertEqual(input_layer.name, 'test_name') - - def test_layer_upload(self): - view = LayerUpload.as_view() - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'file': self.read_uploaded_file(file_path) - } - # invalid access - request = self.factory.post( - reverse('v1:layer-upload'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 403) - # upload successful - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'file': self.read_uploaded_file(file_path) - } - request = self.factory.post( - reverse('v1:layer-upload'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertIn('uuid', response.data) - layer_uuid = response.data['uuid'] - input_layer = InputLayer.objects.filter( - uuid=layer_uuid - ).first() - self.assertTrue(input_layer) - self.assertEqual(input_layer.layer_type, data['layer_type']) - self.assertEqual(input_layer.component_type, data['component_type']) - self.assertEqual(input_layer.privacy_type, data['privacy_type']) - self.assertEqual(input_layer.client_id, data['client_id']) - self.assertTrue(input_layer.size > 0) - self.assertTrue(input_layer.is_available()) - # test update - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'pathways', 'test_pathway_1.tif' - ) - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'private', - 'client_id': 'client-test-123', - 'file': self.read_uploaded_file(file_path), - 'uuid': layer_uuid - } - # test 403 update - request = self.factory.post( - reverse('v1:layer-upload'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 403) - # test successful update - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'private', - 'client_id': 'client-test-123', - 'file': self.read_uploaded_file(file_path), - 'uuid': layer_uuid - } - request = self.factory.post( - reverse('v1:layer-upload'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertIn('uuid', response.data) - input_layer.refresh_from_db() - self.assertEqual(input_layer.privacy_type, data['privacy_type']) - - @override_settings(DEBUG=True) - @mock.patch('boto3.client') - def test_layer_upload_start(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_start.tif' - view = LayerUploadStart.as_view() - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'name': base_filename, - 'size': os.stat(file_path).st_size - } - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertIn('uuid', response.data) - self.assertIn('name', response.data) - self.assertIn('upload_urls', response.data) - self.assertEqual(response.data['name'], data['name']) - input_layer = InputLayer.objects.filter( - uuid=response.data['uuid'] - ).first() - self.assertTrue(input_layer) - self.assertFalse(input_layer.file) - self.assertEqual(input_layer.size, data['size']) - - # test with existing file - storage_backend = select_input_layer_storage() - dest_file_path = os.path.join( - storage_backend.location, - input_layer_dir_path(input_layer, base_filename) - ) - self.direct_upload_layer_file(file_path, dest_file_path) - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertNotEqual(response.data['name'], data['name']) - input_layer = InputLayer.objects.filter( - uuid=response.data['uuid'] - ).first() - self.assertTrue(input_layer) - self.assertFalse(input_layer.file) - self.assertEqual(input_layer.size, data['size']) - self.assertEqual(input_layer.name, response.data['name']) - self.assertTrue(os.path.exists(dest_file_path)) - # test update should remove old file - self.store_layer_file( - input_layer, file_path, file_name=input_layer.name) - input_layer.refresh_from_db() - old_filename = input_layer.name - old_file_path = os.path.join( - storage_backend.location, - input_layer_dir_path(input_layer, input_layer.name) - ) - self.assertTrue(os.path.exists(old_file_path)) - self.assertTrue(storage_backend.exists(input_layer.file.name)) - data['uuid'] = str(input_layer.uuid) - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - input_layer.refresh_from_db() - self.assertFalse(input_layer.file) - self.assertEqual(input_layer.name, response.data['name']) - self.assertFalse(storage_backend.exists(old_filename)) - self.assertFalse(os.path.exists(old_file_path)) - - @mock.patch('boto3.client') - def test_layer_upload_start_with_s3(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_start2.tif' - view = LayerUploadStart.as_view() - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'name': base_filename, - 'size': os.stat(file_path).st_size - } - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertIn('uuid', response.data) - self.assertIn('name', response.data) - self.assertIn('upload_urls', response.data) - self.assertEqual(response.data['name'], data['name']) - self.assertEqual( - response.data['upload_urls'], - [{ - 'part_number': 1, - 'url': 'this_is_url' - }] - ) - # test failed generate url - s3_client.raise_exc = True - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'name': base_filename, - 'size': os.stat(file_path).st_size - } - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 400) - - @mock.patch('boto3.client') - def test_layer_upload_start_with_s3_multipart(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_start2.tif' - view = LayerUploadStart.as_view() - data = { - 'layer_type': 0, - 'component_type': 'ncs_carbon', - 'privacy_type': 'common', - 'client_id': 'client-test-123', - 'name': base_filename, - 'size': os.stat(file_path).st_size, - 'number_of_parts': 2 - } - request = self.factory.post( - reverse('v1:layer-upload-start'), data - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 201) - self.assertIn('uuid', response.data) - self.assertIn('name', response.data) - self.assertIn('upload_urls', response.data) - self.assertEqual(response.data['name'], data['name']) - self.assertEqual( - response.data['upload_urls'], - [{ - 'part_number': 1, - 'url': 'this_is_url' - }, { - 'part_number': 2, - 'url': 'this_is_url' - }] - ) - multipart_upload_id = response.data.get('multipart_upload_id', '') - self.assertTrue(multipart_upload_id) - self.assertTrue(MultipartUpload.objects.filter( - upload_id=multipart_upload_id, - input_layer_uuid=response.data['uuid'] - ).exists()) - - def test_layer_upload_finish(self): - view = LayerUploadFinish.as_view() - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_finish.tif' - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name=base_filename, - size=10 - ) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - payload = {} - # file not exist - request = self.factory.post( - reverse('v1:layer-upload-finish', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 400) - self.check_validation_error_string(response.data, 'does not exist') - input_layer.refresh_from_db() - self.assertFalse(input_layer.file.name) - # size not match - storage_backend = select_input_layer_storage() - dest_file_path = os.path.join( - storage_backend.location, - input_layer_dir_path(input_layer, base_filename) - ) - self.direct_upload_layer_file(file_path, dest_file_path) - request = self.factory.post( - reverse('v1:layer-upload-finish', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 400) - self.check_validation_error_string( - response.data, 'file size missmatch') - input_layer.refresh_from_db() - self.assertFalse(input_layer.file.name) - # succcess - input_layer.size = os.stat(file_path).st_size - input_layer.save(update_fields=['size']) - request = self.factory.post( - reverse('v1:layer-upload-finish', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - input_layer.refresh_from_db() - self.assertTrue(input_layer.file.name) - self.assertTrue(input_layer.is_available()) - - @mock.patch('boto3.client') - def test_layer_upload_finish_with_multipart(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - view = LayerUploadFinish.as_view() - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_finish2.tif' - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name=base_filename, - size=10 - ) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - payload = { - 'multipart_upload_id': 'this_is_upload_id', - 'items': [{ - 'part_number': 1, - 'etag': 'etag-1' - }, { - 'part_number': 2, - 'etag': 'etag-2' - }] - } - MultipartUpload.objects.create( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=input_layer.uuid, - created_on=timezone.now(), - uploader=input_layer.owner, - parts=10 - ) - input_layer.size = os.stat(file_path).st_size - input_layer.save(update_fields=['size']) - self.store_layer_file(input_layer, file_path, base_filename) - request = self.factory.post( - reverse('v1:layer-upload-finish', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - input_layer.refresh_from_db() - self.assertTrue(input_layer.file.name) - self.assertTrue(input_layer.is_available()) - self.assertFalse(MultipartUpload.objects.filter( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=str(input_layer.uuid) - ).exists()) - - def test_check_layer(self): - view = CheckLayer.as_view() - # create layer by superuser - layer_1 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name='test_superuser_layer.tif', - size=10, - owner=self.superuser, - client_id='layer-1' - ) - # create layer by user with+without file - layer_2 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name='test_layer_2.tif', - size=10, - owner=self.user_1, - client_id='layer-2' - ) - layer_3 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name='test_layer_3.tif', - size=10, - owner=self.user_1, - client_id='layer-3' - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file(layer_3, file_path, layer_3.name) - # test with layer_uuid - data = [ - str(layer_1.uuid), - str(layer_2.uuid), - str(layer_3.uuid), - str(uuid.uuid4()) - ] - request = self.factory.post( - reverse('v1:layer-check') + '?id_type=layer_uuid', - data, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertIn('available', response.data) - self.assertIn('unavailable', response.data) - self.assertIn('invalid', response.data) - self.assertEqual(len(response.data['invalid']), 2) - self.assertIn(str(layer_1.uuid), response.data['invalid']) - self.assertIn(str(layer_2.uuid), response.data['unavailable']) - self.assertIn(str(layer_3.uuid), response.data['available']) - # test with client id - data = [ - layer_1.client_id, - layer_2.client_id, - layer_3.client_id, - 'test-layer-invalid' - ] - request = self.factory.post( - reverse('v1:layer-check') + '?id_type=client_id', - data, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data['invalid']), 2) - self.assertIn('test-layer-invalid', response.data['invalid']) - self.assertIn(layer_1.client_id, response.data['invalid']) - self.assertIn(layer_2.client_id, response.data['unavailable']) - self.assertIn(layer_3.client_id, response.data['available']) - - def test_convert_size(self): - self.assertEqual(convert_size(0), '0B') - self.assertEqual(convert_size(1024), '1.0 KB') - self.assertEqual(convert_size(1024 * 1024), '1.0 MB') - - @mock.patch('boto3.client') - def test_abort_multipart_upload(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - view = LayerUploadAbort.as_view() - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_finish3.tif' - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name=base_filename, - size=10 - ) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - # test invalid payload - payload = {} - request = self.factory.post( - reverse('v1:layer-upload-abort', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 400) - # test success abort with returned parts = 1 - payload = { - 'multipart_upload_id': 'this_is_upload_id' - } - upload_record = MultipartUpload.objects.create( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=input_layer.uuid, - created_on=timezone.now(), - uploader=input_layer.owner, - parts=10 - ) - input_layer.size = os.stat(file_path).st_size - input_layer.save(update_fields=['size']) - request = self.factory.post( - reverse('v1:layer-upload-abort', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 204) - upload_record.refresh_from_db() - self.assertTrue(upload_record.is_aborted) - # test success abort with returned parts = 0 - s3_client.mock_parts = { - 'Parts': [] - } - request = self.factory.post( - reverse('v1:layer-upload-abort', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 204) - self.assertFalse( - MultipartUpload.objects.filter( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=input_layer.uuid - ).exists() - ) - self.assertFalse( - InputLayer.objects.filter( - uuid=input_layer.uuid - ).exists() - ) - - @mock.patch('boto3.client') - def test_abort_multipart_upload_with_exc(self, mocked_s3): - s3_client = MockS3Client() - mocked_s3.return_value = s3_client - view = LayerUploadAbort.as_view() - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - base_filename = 'test_model_1_finish3.tif' - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.PRIVATE, - name=base_filename, - size=10 - ) - payload = { - 'multipart_upload_id': 'this_is_upload_id' - } - MultipartUpload.objects.create( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=input_layer.uuid, - created_on=timezone.now(), - uploader=input_layer.owner, - parts=10 - ) - input_layer.size = os.stat(file_path).st_size - input_layer.save(update_fields=['size']) - kwargs = { - 'layer_uuid': str(input_layer.uuid) - } - s3_client.raise_exc = True - request = self.factory.post( - reverse('v1:layer-upload-abort', kwargs=kwargs), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 204) - self.assertFalse( - MultipartUpload.objects.filter( - upload_id=payload['multipart_upload_id'], - input_layer_uuid=input_layer.uuid - ).exists() - ) - self.assertFalse( - InputLayer.objects.filter( - uuid=input_layer.uuid - ).exists() - ) - - def test_layer_fetch_by_client_id(self): - view = FetchLayerByClientId.as_view() - payload = [ - 'ncs_pathways--Final_Alien_Invasive_Plant_priority_norm.tif' - '_4326_20_20_1072586664' - ] - request = self.factory.post( - reverse('v1:fetch-layer-by-client-id'), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data, []) - input_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - client_id=payload[0] - ) - request = self.factory.post( - reverse('v1:fetch-layer-by-client-id'), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 1) - find_layer = self.find_layer_from_response( - response.data, input_layer.uuid) - self.assertTrue(find_layer) - self.assertFalse(find_layer['url']) - self.assertFalse(input_layer.file) - input_layer_2 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - client_id=payload[0] - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'models', 'test_model_1.tif' - ) - self.store_layer_file( - input_layer_2, file_path, input_layer_2.name) - request = self.factory.post( - reverse('v1:fetch-layer-by-client-id'), - data=payload, format='json' - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.user_1 - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.data), 1) - find_layer = self.find_layer_from_response( - response.data, input_layer_2.uuid) - self.assertTrue(find_layer) - self.assertTrue(find_layer['url']) - self.assertEqual(find_layer['uuid'], str(input_layer_2.uuid)) - - def test_reference_layer_not_exist_yet(self): - view = ReferenceLayerDownload.as_view() - request = self.factory.get( - reverse('v1:reference-layer-download'), - format='json' - ) - request.resolver_match = FakeResolverMatchV1 - response = view(request) - self.assertEqual(response.status_code, 404) - - def test_reference_layer_not_available(self): - view = ReferenceLayerDownload.as_view() - InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - component_type=InputLayer.ComponentTypes.REFERENCE_LAYER - ) - request = self.factory.get( - reverse('v1:reference-layer-download'), - format='json' - ) - request.resolver_match = FakeResolverMatchV1 - response = view(request) - self.assertEqual(response.status_code, 404) - self.assertEqual( - response.data, - {'detail': 'Reference layer is not available.'} - ) - - def test_reference_layer_download(self): - bbox = '29.134295060,-31.158062261,29.279926683,-31.094568889' - view = ReferenceLayerDownload.as_view() - reference_layer_1 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - component_type=InputLayer.ComponentTypes.REFERENCE_LAYER - ) - reference_layer_2 = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - component_type=InputLayer.ComponentTypes.REFERENCE_LAYER - ) - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'reference_layer.tif' - ) - self.store_layer_file( - reference_layer_1, file_path, reference_layer_1.name) - self.store_layer_file( - reference_layer_2, file_path, reference_layer_2.name) - request = self.factory.get( - f"{reverse('v1:reference-layer-download')}?bbox={bbox}", - format='json' - ) - request.resolver_match = FakeResolverMatchV1 - response = view(request) - self.assertEqual(response.status_code, 200) - self.assertIn('X-Accel-Redirect', response.headers) - file_path = os.path.join( - settings.TEMPORARY_LAYER_DIR, - response.headers['X-Accel-Redirect'].replace('/userfiles/', '') - ) - self.assertTrue(os.path.exists(file_path)) - # Test the streamed content - import rasterio - - with rasterio.open(file_path) as dataset: - expected_area = 0.01331516565230782 - bbox_polygon = Polygon.from_bbox(dataset.bounds) - self.assertAlmostEqual( - bbox_polygon.area, - expected_area, - places=3 - ) - os.remove(file_path) - - def test_pwl_layer_download(self): - bbox = '29.134295060,-31.158062261,29.279926683,-31.094568889' - view = DefaultLayerDownload.as_view() - priority_layer = InputLayerF.create( - privacy_type=InputLayer.PrivacyTypes.COMMON, - component_type=InputLayer.ComponentTypes.PRIORITY_LAYER - ) - - file_path = absolute_path( - 'cplus_api', 'tests', 'data', - 'priority_layer.tif' - ) - self.store_layer_file( - priority_layer, file_path, priority_layer.name) - - kwargs = { - 'layer_uuid': str(priority_layer.uuid) - } - - endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) - request = self.factory.get(f"""{endpoint}?bbox={bbox}""") - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 200) - self.assertIn('X-Accel-Redirect', response.headers) - file_path = os.path.join( - settings.TEMPORARY_LAYER_DIR, - response.headers['X-Accel-Redirect'].replace('/userfiles/', '') - ) - self.assertTrue(os.path.exists(file_path)) - # Test the streamed content - import rasterio - - with rasterio.open(file_path) as dataset: - expected_area = 0.00924664281410274 - bbox_polygon = Polygon.from_bbox(dataset.bounds) - self.assertAlmostEqual( - bbox_polygon.area, - expected_area, - places=3 - ) - os.remove(file_path) - - # Test with non-overlapping bbox - non_overlapping_bbox = ( - '28.1,-1.1,28.2,-1.0' - ) - endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) - request = self.factory.get( - f"""{endpoint}?bbox={non_overlapping_bbox}""" - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 400) - - # Test with invalid bbox - invalid_bbox = ( - '29.279926683,-31.094568889' - ) - endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) - request = self.factory.get( - f"""{endpoint}?bbox={invalid_bbox}""" - ) - request.resolver_match = FakeResolverMatchV1 - request.user = self.superuser - response = view(request, **kwargs) - self.assertEqual(response.status_code, 400) +import os +import uuid +import mock +from django.contrib.gis.geos import Polygon +from django.test import override_settings +from django.conf import settings +from django.urls import reverse +from django.utils import timezone +from rest_framework.exceptions import PermissionDenied +from core.settings.utils import absolute_path +from cplus_api.models.layer import ( + InputLayer, + input_layer_dir_path, + select_input_layer_storage, + MultipartUpload +) +from cplus_api.api_views.layer import ( + LayerList, + LayerDetail, + LayerUpload, + LayerUploadStart, + LayerUploadFinish, + CheckLayer, + is_internal_user, + validate_layer_access, + LayerUploadAbort, + FetchLayerByClientId, + DefaultLayerList, + ReferenceLayerDownload, + DefaultLayerDownload, + StoredCarbonDownload +) +from cplus_api.models.profile import UserProfile +from cplus_api.utils.api_helper import convert_size +from cplus_api.tests.common import ( + FakeResolverMatchV1, + BaseAPIViewTransactionTest, + MockS3Client +) +from cplus_api.tests.factories import InputLayerF, UserF + + +class TestLayerAPIView(BaseAPIViewTransactionTest): + + def test_is_internal_user(self): + user_1 = UserF.create() + # has external role + self.assertFalse(is_internal_user(user_1)) + # no role + user_profile = user_1.user_profile + user_profile.role = None + user_profile.save() + self.assertFalse(is_internal_user(user_1)) + # no user_profile + user_profile.delete() + self.assertFalse(UserProfile.objects.filter(user=user_1).exists()) + user_1.refresh_from_db() + self.assertFalse(is_internal_user(user_1)) + # has internal role + user_2 = self.create_internal_user() + self.assertTrue(is_internal_user(user_2)) + + def test_layer_list(self): + request = self.factory.get( + reverse('v1:layer-list') + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + view = LayerList.as_view() + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data['results'], []) + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON + ) + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['results']), 1) + find_layer = self.find_layer_from_response( + response.data['results'], input_layer.uuid) + self.assertTrue(find_layer) + self.assertFalse(find_layer['url']) + self.assertFalse(input_layer.file) + # non existing file in storage + input_layer.file.name = ( + 'common_layers/ncs_pathway/test_model_2_123.tif' + ) + input_layer.save() + self.assertTrue(input_layer.file) + self.assertFalse(input_layer.is_available()) + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['results']), 1) + find_layer = self.find_layer_from_response( + response.data['results'], input_layer.uuid) + self.assertTrue(find_layer) + self.assertFalse(find_layer['url']) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(input_layer, file_path) + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['results']), 1) + find_layer = self.find_layer_from_response( + response.data['results'], input_layer.uuid) + self.assertTrue(find_layer) + self.assertTrue(find_layer['url']) + + def test_default_layer_list(self): + request = self.factory.get( + reverse('v1:layer-default-list') + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + view = DefaultLayerList.as_view() + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, []) + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON + ) + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + find_layer = self.find_layer_from_response( + response.data, input_layer.uuid) + self.assertTrue(find_layer) + self.assertFalse(find_layer['url']) + self.assertFalse(input_layer.file) + + def test_layer_access(self): + input_layer_1 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON + ) + input_layer_2 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.INTERNAL + ) + input_layer_3 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE + ) + self.assertTrue(validate_layer_access( + input_layer_1, self.superuser + )) + self.assertTrue(validate_layer_access( + input_layer_1, self.user_1 + )) + user_2 = self.create_internal_user() + self.assertTrue(validate_layer_access( + input_layer_2, user_2 + )) + self.assertFalse(validate_layer_access( + input_layer_3, self.user_1 + )) + self.assertTrue(validate_layer_access( + input_layer_3, input_layer_3.owner + )) + # upload access + layer_upload_view = LayerUpload() + self.assertTrue(layer_upload_view.validate_upload_access( + InputLayer.PrivacyTypes.COMMON, self.superuser + )) + self.assertTrue(layer_upload_view.validate_upload_access( + InputLayer.PrivacyTypes.INTERNAL, user_2 + )) + with self.assertRaises(PermissionDenied): + layer_upload_view.validate_upload_access( + InputLayer.PrivacyTypes.COMMON, self.user_1 + ) + self.assertTrue(layer_upload_view.validate_upload_access( + InputLayer.PrivacyTypes.PRIVATE, self.user_1 + )) + input_layer_4 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + owner=self.user_1 + ) + self.assertTrue(layer_upload_view.validate_upload_access( + InputLayer.PrivacyTypes.PRIVATE, self.user_1, + True, input_layer_4 + )) + + def test_layer_detail(self): + view = LayerDetail.as_view() + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(input_layer, file_path) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + request = self.factory.get( + reverse('v1:layer-detail', kwargs=kwargs) + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + self.assertTrue(response.data['url']) + self.assertEqual(response.data['uuid'], str(input_layer.uuid)) + # forbidden + request = self.factory.get( + reverse('v1:layer-detail', kwargs=kwargs) + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request, **kwargs) + self.assertEqual(response.status_code, 403) + + @override_settings(DEBUG=True) + def test_layer_detail_from_dev(self): + view = LayerDetail.as_view() + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(input_layer, file_path) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + request = self.factory.get( + reverse('v1:layer-detail', kwargs=kwargs) + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + self.assertTrue(response.data['url']) + self.assertEqual(response.data['uuid'], str(input_layer.uuid)) + + def test_layer_delete(self): + view = LayerDetail.as_view() + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(input_layer, file_path) + layer_uuid = input_layer.uuid + kwargs = { + 'layer_uuid': str(layer_uuid) + } + # forbidden + request = self.factory.delete( + reverse('v1:layer-detail', kwargs=kwargs) + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request, **kwargs) + self.assertEqual(response.status_code, 403) + # successful + request = self.factory.delete( + reverse('v1:layer-detail', kwargs=kwargs) + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 204) + self.assertFalse( + InputLayer.objects.filter( + uuid=layer_uuid + ).exists() + ) + self.assertFalse(input_layer.is_available()) + + def test_layer_update_partial(self): + view = LayerDetail.as_view() + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(input_layer, file_path) + layer_uuid = input_layer.uuid + kwargs = { + 'layer_uuid': str(layer_uuid) + } + # forbidden + request = self.factory.patch( + reverse('v1:layer-detail', kwargs=kwargs), + data={'name': 'test_name'} + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request, **kwargs) + self.assertEqual(response.status_code, 403) + # successful + request = self.factory.patch( + reverse('v1:layer-detail', kwargs=kwargs), + data={'name': 'test_name'} + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + input_layer.refresh_from_db() + self.assertEqual(input_layer.name, 'test_name') + + def test_layer_upload(self): + view = LayerUpload.as_view() + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'file': self.read_uploaded_file(file_path) + } + # invalid access + request = self.factory.post( + reverse('v1:layer-upload'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 403) + # upload successful + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'file': self.read_uploaded_file(file_path) + } + request = self.factory.post( + reverse('v1:layer-upload'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertIn('uuid', response.data) + layer_uuid = response.data['uuid'] + input_layer = InputLayer.objects.filter( + uuid=layer_uuid + ).first() + self.assertTrue(input_layer) + self.assertEqual(input_layer.layer_type, data['layer_type']) + self.assertEqual(input_layer.component_type, data['component_type']) + self.assertEqual(input_layer.privacy_type, data['privacy_type']) + self.assertEqual(input_layer.client_id, data['client_id']) + self.assertTrue(input_layer.size > 0) + self.assertTrue(input_layer.is_available()) + # test update + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'pathways', 'test_pathway_1.tif' + ) + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'private', + 'client_id': 'client-test-123', + 'file': self.read_uploaded_file(file_path), + 'uuid': layer_uuid + } + # test 403 update + request = self.factory.post( + reverse('v1:layer-upload'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 403) + # test successful update + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'private', + 'client_id': 'client-test-123', + 'file': self.read_uploaded_file(file_path), + 'uuid': layer_uuid + } + request = self.factory.post( + reverse('v1:layer-upload'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertIn('uuid', response.data) + input_layer.refresh_from_db() + self.assertEqual(input_layer.privacy_type, data['privacy_type']) + + @override_settings(DEBUG=True) + @mock.patch('boto3.client') + def test_layer_upload_start(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_start.tif' + view = LayerUploadStart.as_view() + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'name': base_filename, + 'size': os.stat(file_path).st_size + } + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertIn('uuid', response.data) + self.assertIn('name', response.data) + self.assertIn('upload_urls', response.data) + self.assertEqual(response.data['name'], data['name']) + input_layer = InputLayer.objects.filter( + uuid=response.data['uuid'] + ).first() + self.assertTrue(input_layer) + self.assertFalse(input_layer.file) + self.assertEqual(input_layer.size, data['size']) + + # test with existing file + storage_backend = select_input_layer_storage() + dest_file_path = os.path.join( + storage_backend.location, + input_layer_dir_path(input_layer, base_filename) + ) + self.direct_upload_layer_file(file_path, dest_file_path) + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertNotEqual(response.data['name'], data['name']) + input_layer = InputLayer.objects.filter( + uuid=response.data['uuid'] + ).first() + self.assertTrue(input_layer) + self.assertFalse(input_layer.file) + self.assertEqual(input_layer.size, data['size']) + self.assertEqual(input_layer.name, response.data['name']) + self.assertTrue(os.path.exists(dest_file_path)) + # test update should remove old file + self.store_layer_file( + input_layer, file_path, file_name=input_layer.name) + input_layer.refresh_from_db() + old_filename = input_layer.name + old_file_path = os.path.join( + storage_backend.location, + input_layer_dir_path(input_layer, input_layer.name) + ) + self.assertTrue(os.path.exists(old_file_path)) + self.assertTrue(storage_backend.exists(input_layer.file.name)) + data['uuid'] = str(input_layer.uuid) + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + input_layer.refresh_from_db() + self.assertFalse(input_layer.file) + self.assertEqual(input_layer.name, response.data['name']) + self.assertFalse(storage_backend.exists(old_filename)) + self.assertFalse(os.path.exists(old_file_path)) + + @mock.patch('boto3.client') + def test_layer_upload_start_with_s3(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_start2.tif' + view = LayerUploadStart.as_view() + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'name': base_filename, + 'size': os.stat(file_path).st_size + } + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertIn('uuid', response.data) + self.assertIn('name', response.data) + self.assertIn('upload_urls', response.data) + self.assertEqual(response.data['name'], data['name']) + self.assertEqual( + response.data['upload_urls'], + [{ + 'part_number': 1, + 'url': 'this_is_url' + }] + ) + # test failed generate url + s3_client.raise_exc = True + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'name': base_filename, + 'size': os.stat(file_path).st_size + } + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 400) + + @mock.patch('boto3.client') + def test_layer_upload_start_with_s3_multipart(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_start2.tif' + view = LayerUploadStart.as_view() + data = { + 'layer_type': 0, + 'component_type': 'ncs_carbon', + 'privacy_type': 'common', + 'client_id': 'client-test-123', + 'name': base_filename, + 'size': os.stat(file_path).st_size, + 'number_of_parts': 2 + } + request = self.factory.post( + reverse('v1:layer-upload-start'), data + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 201) + self.assertIn('uuid', response.data) + self.assertIn('name', response.data) + self.assertIn('upload_urls', response.data) + self.assertEqual(response.data['name'], data['name']) + self.assertEqual( + response.data['upload_urls'], + [{ + 'part_number': 1, + 'url': 'this_is_url' + }, { + 'part_number': 2, + 'url': 'this_is_url' + }] + ) + multipart_upload_id = response.data.get('multipart_upload_id', '') + self.assertTrue(multipart_upload_id) + self.assertTrue(MultipartUpload.objects.filter( + upload_id=multipart_upload_id, + input_layer_uuid=response.data['uuid'] + ).exists()) + + def test_layer_upload_finish(self): + view = LayerUploadFinish.as_view() + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_finish.tif' + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name=base_filename, + size=10 + ) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + payload = {} + # file not exist + request = self.factory.post( + reverse('v1:layer-upload-finish', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 400) + self.check_validation_error_string(response.data, 'does not exist') + input_layer.refresh_from_db() + self.assertFalse(input_layer.file.name) + # size not match + storage_backend = select_input_layer_storage() + dest_file_path = os.path.join( + storage_backend.location, + input_layer_dir_path(input_layer, base_filename) + ) + self.direct_upload_layer_file(file_path, dest_file_path) + request = self.factory.post( + reverse('v1:layer-upload-finish', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 400) + self.check_validation_error_string( + response.data, 'file size missmatch') + input_layer.refresh_from_db() + self.assertFalse(input_layer.file.name) + # succcess + input_layer.size = os.stat(file_path).st_size + input_layer.save(update_fields=['size']) + request = self.factory.post( + reverse('v1:layer-upload-finish', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + input_layer.refresh_from_db() + self.assertTrue(input_layer.file.name) + self.assertTrue(input_layer.is_available()) + + @mock.patch('boto3.client') + def test_layer_upload_finish_with_multipart(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + view = LayerUploadFinish.as_view() + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_finish2.tif' + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name=base_filename, + size=10 + ) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + payload = { + 'multipart_upload_id': 'this_is_upload_id', + 'items': [{ + 'part_number': 1, + 'etag': 'etag-1' + }, { + 'part_number': 2, + 'etag': 'etag-2' + }] + } + MultipartUpload.objects.create( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=input_layer.uuid, + created_on=timezone.now(), + uploader=input_layer.owner, + parts=10 + ) + input_layer.size = os.stat(file_path).st_size + input_layer.save(update_fields=['size']) + self.store_layer_file(input_layer, file_path, base_filename) + request = self.factory.post( + reverse('v1:layer-upload-finish', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + input_layer.refresh_from_db() + self.assertTrue(input_layer.file.name) + self.assertTrue(input_layer.is_available()) + self.assertFalse(MultipartUpload.objects.filter( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=str(input_layer.uuid) + ).exists()) + + def test_check_layer(self): + view = CheckLayer.as_view() + # create layer by superuser + layer_1 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name='test_superuser_layer.tif', + size=10, + owner=self.superuser, + client_id='layer-1' + ) + # create layer by user with+without file + layer_2 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name='test_layer_2.tif', + size=10, + owner=self.user_1, + client_id='layer-2' + ) + layer_3 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name='test_layer_3.tif', + size=10, + owner=self.user_1, + client_id='layer-3' + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file(layer_3, file_path, layer_3.name) + # test with layer_uuid + data = [ + str(layer_1.uuid), + str(layer_2.uuid), + str(layer_3.uuid), + str(uuid.uuid4()) + ] + request = self.factory.post( + reverse('v1:layer-check') + '?id_type=layer_uuid', + data, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertIn('available', response.data) + self.assertIn('unavailable', response.data) + self.assertIn('invalid', response.data) + self.assertEqual(len(response.data['invalid']), 2) + self.assertIn(str(layer_1.uuid), response.data['invalid']) + self.assertIn(str(layer_2.uuid), response.data['unavailable']) + self.assertIn(str(layer_3.uuid), response.data['available']) + # test with client id + data = [ + layer_1.client_id, + layer_2.client_id, + layer_3.client_id, + 'test-layer-invalid' + ] + request = self.factory.post( + reverse('v1:layer-check') + '?id_type=client_id', + data, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data['invalid']), 2) + self.assertIn('test-layer-invalid', response.data['invalid']) + self.assertIn(layer_1.client_id, response.data['invalid']) + self.assertIn(layer_2.client_id, response.data['unavailable']) + self.assertIn(layer_3.client_id, response.data['available']) + + def test_convert_size(self): + self.assertEqual(convert_size(0), '0B') + self.assertEqual(convert_size(1024), '1.0 KB') + self.assertEqual(convert_size(1024 * 1024), '1.0 MB') + + @mock.patch('boto3.client') + def test_abort_multipart_upload(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + view = LayerUploadAbort.as_view() + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_finish3.tif' + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name=base_filename, + size=10 + ) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + # test invalid payload + payload = {} + request = self.factory.post( + reverse('v1:layer-upload-abort', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 400) + # test success abort with returned parts = 1 + payload = { + 'multipart_upload_id': 'this_is_upload_id' + } + upload_record = MultipartUpload.objects.create( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=input_layer.uuid, + created_on=timezone.now(), + uploader=input_layer.owner, + parts=10 + ) + input_layer.size = os.stat(file_path).st_size + input_layer.save(update_fields=['size']) + request = self.factory.post( + reverse('v1:layer-upload-abort', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 204) + upload_record.refresh_from_db() + self.assertTrue(upload_record.is_aborted) + # test success abort with returned parts = 0 + s3_client.mock_parts = { + 'Parts': [] + } + request = self.factory.post( + reverse('v1:layer-upload-abort', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 204) + self.assertFalse( + MultipartUpload.objects.filter( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=input_layer.uuid + ).exists() + ) + self.assertFalse( + InputLayer.objects.filter( + uuid=input_layer.uuid + ).exists() + ) + + @mock.patch('boto3.client') + def test_abort_multipart_upload_with_exc(self, mocked_s3): + s3_client = MockS3Client() + mocked_s3.return_value = s3_client + view = LayerUploadAbort.as_view() + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + base_filename = 'test_model_1_finish3.tif' + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.PRIVATE, + name=base_filename, + size=10 + ) + payload = { + 'multipart_upload_id': 'this_is_upload_id' + } + MultipartUpload.objects.create( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=input_layer.uuid, + created_on=timezone.now(), + uploader=input_layer.owner, + parts=10 + ) + input_layer.size = os.stat(file_path).st_size + input_layer.save(update_fields=['size']) + kwargs = { + 'layer_uuid': str(input_layer.uuid) + } + s3_client.raise_exc = True + request = self.factory.post( + reverse('v1:layer-upload-abort', kwargs=kwargs), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 204) + self.assertFalse( + MultipartUpload.objects.filter( + upload_id=payload['multipart_upload_id'], + input_layer_uuid=input_layer.uuid + ).exists() + ) + self.assertFalse( + InputLayer.objects.filter( + uuid=input_layer.uuid + ).exists() + ) + + def test_layer_fetch_by_client_id(self): + view = FetchLayerByClientId.as_view() + payload = [ + 'ncs_pathways--Final_Alien_Invasive_Plant_priority_norm.tif' + '_4326_20_20_1072586664' + ] + request = self.factory.post( + reverse('v1:fetch-layer-by-client-id'), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.data, []) + input_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + client_id=payload[0] + ) + request = self.factory.post( + reverse('v1:fetch-layer-by-client-id'), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + find_layer = self.find_layer_from_response( + response.data, input_layer.uuid) + self.assertTrue(find_layer) + self.assertFalse(find_layer['url']) + self.assertFalse(input_layer.file) + input_layer_2 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + client_id=payload[0] + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'models', 'test_model_1.tif' + ) + self.store_layer_file( + input_layer_2, file_path, input_layer_2.name) + request = self.factory.post( + reverse('v1:fetch-layer-by-client-id'), + data=payload, format='json' + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.user_1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + find_layer = self.find_layer_from_response( + response.data, input_layer_2.uuid) + self.assertTrue(find_layer) + self.assertTrue(find_layer['url']) + self.assertEqual(find_layer['uuid'], str(input_layer_2.uuid)) + + def test_reference_layer_not_exist_yet(self): + view = ReferenceLayerDownload.as_view() + request = self.factory.get( + reverse('v1:reference-layer-download'), + format='json' + ) + request.resolver_match = FakeResolverMatchV1 + response = view(request) + self.assertEqual(response.status_code, 404) + + def test_reference_layer_not_available(self): + view = ReferenceLayerDownload.as_view() + InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + component_type=InputLayer.ComponentTypes.REFERENCE_LAYER + ) + request = self.factory.get( + reverse('v1:reference-layer-download'), + format='json' + ) + request.resolver_match = FakeResolverMatchV1 + response = view(request) + self.assertEqual(response.status_code, 404) + self.assertEqual( + response.data, + {'detail': 'Reference layer is not available.'} + ) + + def test_reference_layer_download(self): + bbox = '29.134295060,-31.158062261,29.279926683,-31.094568889' + view = ReferenceLayerDownload.as_view() + reference_layer_1 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + component_type=InputLayer.ComponentTypes.REFERENCE_LAYER + ) + reference_layer_2 = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + component_type=InputLayer.ComponentTypes.REFERENCE_LAYER + ) + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'reference_layer.tif' + ) + self.store_layer_file( + reference_layer_1, file_path, reference_layer_1.name) + self.store_layer_file( + reference_layer_2, file_path, reference_layer_2.name) + request = self.factory.get( + f"{reverse('v1:reference-layer-download')}?bbox={bbox}", + format='json' + ) + request.resolver_match = FakeResolverMatchV1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertIn('X-Accel-Redirect', response.headers) + file_path = os.path.join( + settings.TEMPORARY_LAYER_DIR, + response.headers['X-Accel-Redirect'].replace('/userfiles/', '') + ) + self.assertTrue(os.path.exists(file_path)) + # Test the streamed content + import rasterio + + with rasterio.open(file_path) as dataset: + expected_area = 0.01331516565230782 + bbox_polygon = Polygon.from_bbox(dataset.bounds) + self.assertAlmostEqual( + bbox_polygon.area, + expected_area, + places=3 + ) + os.remove(file_path) + + def test_pwl_layer_download(self): + bbox = '29.134295060,-31.158062261,29.279926683,-31.094568889' + view = DefaultLayerDownload.as_view() + priority_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + component_type=InputLayer.ComponentTypes.PRIORITY_LAYER + ) + + file_path = absolute_path( + 'cplus_api', 'tests', 'data', + 'priority_layer.tif' + ) + self.store_layer_file( + priority_layer, file_path, priority_layer.name) + + kwargs = { + 'layer_uuid': str(priority_layer.uuid) + } + + endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) + request = self.factory.get(f"""{endpoint}?bbox={bbox}""") + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 200) + self.assertIn('X-Accel-Redirect', response.headers) + file_path = os.path.join( + settings.TEMPORARY_LAYER_DIR, + response.headers['X-Accel-Redirect'].replace('/userfiles/', '') + ) + self.assertTrue(os.path.exists(file_path)) + # Test the streamed content + import rasterio + + with rasterio.open(file_path) as dataset: + expected_area = 0.00924664281410274 + bbox_polygon = Polygon.from_bbox(dataset.bounds) + self.assertAlmostEqual( + bbox_polygon.area, + expected_area, + places=3 + ) + os.remove(file_path) + + # Test with non-overlapping bbox + non_overlapping_bbox = ( + '28.1,-1.1,28.2,-1.0' + ) + endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) + request = self.factory.get( + f"""{endpoint}?bbox={non_overlapping_bbox}""" + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 400) + + # Test with invalid bbox + invalid_bbox = ( + '29.279926683,-31.094568889' + ) + endpoint = reverse('v1:default-priority-layer-download', kwargs=kwargs) + request = self.factory.get( + f"""{endpoint}?bbox={invalid_bbox}""" + ) + request.resolver_match = FakeResolverMatchV1 + request.user = self.superuser + response = view(request, **kwargs) + self.assertEqual(response.status_code, 400) + + def test_stored_carbon_download(self): + bbox = '29.134295060,-31.158062261,29.279926683,-31.094568889' + view = StoredCarbonDownload.as_view() + stored_layer = InputLayerF.create( + privacy_type=InputLayer.PrivacyTypes.COMMON, + component_type=InputLayer.ComponentTypes.STORED_CARBON + ) + file_path = absolute_path( + 'cplus_api', + 'tests', + 'data', + 'stored_carbon_layer.tif' + ) + self.store_layer_file(stored_layer, file_path, stored_layer.name) + request = self.factory.get( + f"{reverse('v1:stored-carbon-download')}?bbox={bbox}", + format='json' + ) + request.resolver_match = FakeResolverMatchV1 + response = view(request) + self.assertEqual(response.status_code, 200) + self.assertIn('X-Accel-Redirect', response.headers) + + file_path = os.path.join( + settings.TEMPORARY_LAYER_DIR, + response.headers['X-Accel-Redirect'].replace('/userfiles/', '') + ) + self.assertTrue(os.path.exists(file_path)) + + import rasterio + with rasterio.open(file_path) as ds: + bbox_polygon = Polygon.from_bbox(ds.bounds) + self.assertTrue(bbox_polygon.area > 0) diff --git a/django_project/cplus_api/urls_v1.py b/django_project/cplus_api/urls_v1.py index 0facec2..3941f22 100644 --- a/django_project/cplus_api/urls_v1.py +++ b/django_project/cplus_api/urls_v1.py @@ -12,6 +12,7 @@ DefaultLayerList, ReferenceLayerDownload, DefaultLayerDownload, + StoredCarbonDownload, ) from cplus_api.api_views.scenario import ( ScenarioAnalysisSubmit, @@ -78,6 +79,11 @@ DefaultLayerDownload.as_view(), name="default-priority-layer-download", ), + path( + "stored_carbon/download/", + StoredCarbonDownload.as_view(), + name="stored-carbon-download", + ), ] # SCENARIO ANALYSIS API diff --git a/django_project/cplus_api/utils/qgis_helper.py b/django_project/cplus_api/utils/qgis_helper.py index e54b7a4..7051858 100644 --- a/django_project/cplus_api/utils/qgis_helper.py +++ b/django_project/cplus_api/utils/qgis_helper.py @@ -1,7 +1,9 @@ """QGIS initialization utils for Celery tasks.""" -import logging from contextlib import contextmanager +import logging +import os +import uuid logger = logging.getLogger(__name__) @@ -69,3 +71,85 @@ def create_bbox_vector_layer(extent): vector_layer.updateExtents() return vector_layer + + +_processing_ready = False + + +def _configure_processing(): + """Initialize QGIS Processing providers once.""" + global _processing_ready + if _processing_ready: + return + + from qgis.core import QgsApplication + try: + import processing # noqa: F401 + from qgis.analysis import QgsNativeAlgorithms + QgsApplication.processingRegistry().addProvider(QgsNativeAlgorithms()) + try: + from processing.algs.gdal.GdalAlgorithmProvider import ( + GdalAlgorithmProvider, + ) + QgsApplication.processingRegistry().addProvider( + GdalAlgorithmProvider() + ) + except Exception: + # If GDAL is already available, this is fine + pass + _processing_ready = True + logger.info("QGIS Processing initialized") + except Exception as exc: + logger.exception("Failed to initialize QGIS Processing: %s", exc) + raise + + +def clip_raster_by_bbox_qgis(input_path: str, bbox, temp_dir: str) -> str: + """ + Clip a raster using QGIS Processing - GDAL: cliprasterbyextent. + + Note: This function must be called within a qgis_application() + context. + + :param input_path: path to input raster which should be in + EPSG:4326. + :type input_path: str + + :param bbox: minx, miny, maxx, maxy in EPSG:4326 + :type bbox: iterable (tuple/list) + + :param temp_dir: directory for output. + :type temp_dir: str + + :returns: Path to the output clipped GeoTIFF. + :rtype: str + """ + from qgis.core import QgsRectangle + import processing + + _configure_processing() + + minx, miny, maxx, maxy = bbox + extent = QgsRectangle(minx, miny, maxx, maxy) + + # Use QgsRectangle to normalize the extents. + projwin = ( + f"{extent.xMinimum()}," + f"{extent.yMaximum()}," + f"{extent.xMaximum()}," + f"{extent.yMinimum()}" + ) + out_path = os.path.join(temp_dir, f"{uuid.uuid4().hex}.tif") + + params = { + "INPUT": input_path, + "PROJWIN": projwin, + "NODATA": None, + "OPTIONS": "", + "DATA_TYPE": 0, # use input data type + "EXTRA": "", + "OUTPUT": out_path, + } + processing.run("gdal:cliprasterbyextent", params) + + return out_path