`: Kept.
+
+ Results:
+
+
+ Value2
+
+ Value3
+
+ """
+ for child in node_.getchildren():
+ process_node(child, xpath_matches_)
+
+ # Get XPath of current node
+ node_xpath = remove_root_path(tree.getpath(node_))
+
+ # If `node_path` does not start with one of the occurrences previously
+ # found, it must be removed.
+ if (
+ not f'{node_xpath}/'.startswith(tuple(xpath_matches_))
+ and node_.get('do_not_delete') != 'true'
+ ):
+ if node_ != root_element:
+ node_.getparent().remove(node_)
+ elif node_xpath != '':
+ # node matches, keep its parent too.
+ node_.getparent().set('do_not_delete', 'true')
+
+ # All children have been processed and `node_` seems to be a parent we
+ # need to keep. Remove `do_not_delete` flag to avoid rendering it in
+ # final xml
+ if node_.attrib.get('do_not_delete'):
+ del node_.attrib['do_not_delete']
+
+ def remove_root_path(path_: str) -> str:
+ return path_.replace(root_path, '')
+
+ if len(nodes_to_keep):
+ xpath_matches = get_xpath_matches()
+ process_node(root_element, xpath_matches)
+
+ if rename_root_node_to:
+ tree.getroot().tag = rename_root_node_to
+
+ return etree.tostring(
+ tree,
+ pretty_print=True,
+ encoding='utf-8',
+ xml_declaration=xml_declaration,
+ ).decode()
+
+
+def add_xml_declaration(xml_content: Union[str, bytes]) -> Union[str, bytes]:
+ xml_declaration = ''
+ # Should support ̀ lmxl` and `dicttoxml`
+ start_of_declaration = '
> curl -X GET https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/reports/
+ ### Data sharing
+
+ Control sharing of submission data from this project to other projects
+
+
+ PATCH /api/v2/assets/{uid}/
+
+
+ > Example
+ >
+ > curl -X PATCH https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/
+ >
+ > **Payload**
+ >
+ > {
+ > "data_sharing": {
+ > "enabled": true,
+ > "fields": []"
+ > }
+ > }
+ >
+
+ * `fields`: Optional. List of questions whose responses will be shared. If
+ missing or empty, all responses will be shared. Questions must be
+ identified by full group path separated by slashes, e.g.
+ `group/subgroup/question_name`.
+
+ >
+ > Response
+ >
+ > HTTP 200 Ok
+ > {
+ > ...
+ > "data_sharing": {
+ > "enabled": true,
+ > "fields": []"
+ > }
+ > }
+ >
+
+
### CURRENT ENDPOINT
"""
@@ -527,8 +577,8 @@ def get_serializer_context(self):
context_ = super().get_serializer_context()
if self.action == 'list':
# To avoid making a triple join-query for each asset in the list
- # to retrieve related objects, we populated dicts key-ed by asset ids
- # with the data needed by serializer.
+ # to retrieve related objects, we populated dicts key-ed by asset
+ # ids with the data needed by serializer.
# We create one (big) query per dict instead of a separate query
# for each asset in the list.
# The serializer will be able to pick what it needs from that dict
@@ -630,14 +680,16 @@ def hash(self, request):
.order_by("uid")
)
- assets_version_ids = [asset.version_id
- for asset in accessible_assets
- if asset.version_id is not None]
+ assets_version_ids = [
+ asset.version_id
+ for asset in accessible_assets
+ if asset.version_id is not None
+ ]
# Sort alphabetically
assets_version_ids.sort()
if len(assets_version_ids) > 0:
- hash_ = get_hash(''.join(assets_version_ids))
+ hash_ = calculate_hash(''.join(assets_version_ids), algorithm='md5')
else:
hash_ = ''
@@ -658,19 +710,16 @@ def partial_update(self, request, *args, **kwargs):
if CLONE_ARG_NAME in request.data:
serializer = self._get_clone_serializer(instance)
else:
- serializer = self.get_serializer(instance, data=request.data, partial=True)
+ serializer = self.get_serializer(instance,
+ data=request.data,
+ partial=True)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
return Response(serializer.data)
def perform_create(self, serializer):
- # Check if the user is anonymous. The
- # django.contrib.auth.models.AnonymousUser object doesn't work for
- # queries.
- user = self.request.user
- if user.is_anonymous:
- user = get_anonymous_user()
+ user = get_database_user(self.request.user)
serializer.save(owner=user)
def perform_destroy(self, instance):
diff --git a/kpi/views/v2/asset_file.py b/kpi/views/v2/asset_file.py
index e7fedb9d81..1f487b49ed 100644
--- a/kpi/views/v2/asset_file.py
+++ b/kpi/views/v2/asset_file.py
@@ -1,5 +1,5 @@
# coding: utf-8
-from django.http import HttpResponseRedirect
+from django.http import HttpResponseRedirect, Http404
from private_storage.views import PrivateStorageDetailView
from rest_framework.decorators import action
from rest_framework_extensions.mixins import NestedViewSetMixin
@@ -134,7 +134,9 @@ def get_queryset(self):
file_type = self.request.GET.get('file_type')
if file_type is not None:
_queryset = _queryset.filter(file_type=file_type)
- _queryset = _queryset.filter(date_deleted__isnull=True)
+ _queryset = _queryset.filter(date_deleted__isnull=True).exclude(
+ file_type=AssetFile.PAIRED_DATA
+ )
return _queryset
def perform_create(self, serializer):
@@ -158,6 +160,7 @@ def can_access_file(self, private_file):
def content(self, *args, **kwargs):
asset_file = self.get_object()
+
if asset_file.metadata.get('redirect_url'):
return HttpResponseRedirect(asset_file.metadata.get('redirect_url'))
diff --git a/kpi/views/v2/asset_permission_assignment.py b/kpi/views/v2/asset_permission_assignment.py
index d4e92dc9e5..35310fad69 100644
--- a/kpi/views/v2/asset_permission_assignment.py
+++ b/kpi/views/v2/asset_permission_assignment.py
@@ -21,7 +21,10 @@
AssetBulkInsertPermissionSerializer,
AssetPermissionAssignmentSerializer,
)
-from kpi.utils.object_permission_helper import ObjectPermissionHelper
+from kpi.utils.object_permission import (
+ get_user_permission_assignments_queryset,
+)
+
from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin
@@ -159,7 +162,7 @@ class AssetPermissionAssignmentViewSet(AssetNestedObjectViewsetMixin,
permission_classes = (AssetPermissionAssignmentPermission,)
pagination_class = None
# filter_backends = Just kidding! Look at this instead:
- # kpi.utils.object_permission_helper.ObjectPermissionHelper.get_user_permission_assignments_queryset
+ # kpi.utils.object_permission.get_user_permission_assignments_queryset
@action(detail=False, methods=['POST'], renderer_classes=[renderers.JSONRenderer],
url_path='bulk')
@@ -266,9 +269,9 @@ def get_serializer_context(self):
return context_
def get_queryset(self):
- return ObjectPermissionHelper. \
- get_user_permission_assignments_queryset(self.asset,
- self.request.user)
+ return get_user_permission_assignments_queryset(
+ self.asset, self.request.user
+ )
def perform_create(self, serializer):
serializer.save(asset=self.asset)
diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py
index 3a7b52364b..646ea5db0b 100644
--- a/kpi/views/v2/asset_snapshot.py
+++ b/kpi/views/v2/asset_snapshot.py
@@ -11,7 +11,7 @@
from kpi.filters import RelatedAssetPermissionsFilter
from kpi.highlighters import highlight_xform
-from kpi.models import AssetSnapshot, AssetFile
+from kpi.models import AssetSnapshot, AssetFile, PairedData
from kpi.renderers import (
OpenRosaFormListRenderer,
OpenRosaManifestRenderer,
@@ -19,7 +19,6 @@
)
from kpi.serializers.v2.asset_snapshot import AssetSnapshotSerializer
from kpi.serializers.v2.open_rosa import FormListSerializer, ManifestSerializer
-from kpi.utils.log import logging
from kpi.views.no_update_model import NoUpdateModelViewSet
from kpi.views.v2.open_rosa import OpenRosaViewSetMixin
@@ -79,8 +78,19 @@ def manifest(self, request, *args, **kwargs):
"""
snapshot = self.get_object()
asset = snapshot.asset
- files = asset.asset_files.filter(file_type=AssetFile.FORM_MEDIA,
- date_deleted__isnull=True)
+ form_media_files = list(
+ asset.asset_files.filter(
+ file_type=AssetFile.FORM_MEDIA,
+ date_deleted__isnull=True,
+ )
+ )
+ files = form_media_files
+ # paired data files are treated differently from form media files
+ # void any cache when previewing the form
+ for paired_data in PairedData.objects(asset).values():
+ paired_data.void_external_xml_cache()
+ files.append(paired_data)
+
context = {'request': request}
serializer = ManifestSerializer(files, many=True, context=context)
diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py
index ec545a90df..fafeaac14a 100644
--- a/kpi/views/v2/data.py
+++ b/kpi/views/v2/data.py
@@ -1,9 +1,10 @@
# coding: utf-8
+import json
+
from django.conf import settings
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from rest_framework import (
- exceptions,
renderers,
serializers,
status,
@@ -16,8 +17,9 @@
from kpi.constants import (
INSTANCE_FORMAT_TYPE_JSON,
- PERM_ADD_SUBMISSIONS,
PERM_CHANGE_SUBMISSIONS,
+ PERM_DELETE_SUBMISSIONS,
+ PERM_VALIDATE_SUBMISSIONS,
)
from kpi.exceptions import ObjectDeploymentDoesNotExist
from kpi.models import Asset
@@ -31,6 +33,7 @@
)
from kpi.renderers import SubmissionGeoJsonRenderer, SubmissionXMLRenderer
from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin
+from kpi.serializers.v2.data import DataBulkActionsValidator
class DataViewSet(AssetNestedObjectViewsetMixin, NestedViewSetMixin,
@@ -290,8 +293,9 @@ def _get_deployment(self):
Returns the deployment for the asset specified by the request
"""
if not self.asset.has_deployment:
- raise ObjectDeploymentDoesNotExist(_('The specified asset has not been '
- 'deployed'))
+ raise ObjectDeploymentDoesNotExist(
+ _('The specified asset has not been deployed')
+ )
return self.asset.deployment
@@ -299,19 +303,30 @@ def _get_deployment(self):
renderer_classes=[renderers.JSONRenderer])
def bulk(self, request, *args, **kwargs):
deployment = self._get_deployment()
+ kwargs = {
+ 'data': request.data,
+ 'context': self.get_serializer_context(),
+ }
if request.method == 'DELETE':
- json_response = deployment.delete_submissions(request.data,
- request.user)
+ action_ = deployment.delete_submissions
+ kwargs['perm'] = PERM_DELETE_SUBMISSIONS
elif request.method == 'PATCH':
- json_response = deployment.bulk_update_submissions(
- dict(request.data), request.user
- )
+ action_ = deployment.bulk_update_submissions
+ kwargs['perm'] = PERM_CHANGE_SUBMISSIONS
+
+ bulk_actions_validator = DataBulkActionsValidator(**kwargs)
+ bulk_actions_validator.is_valid(raise_exception=True)
+ json_response = action_(bulk_actions_validator.data, request.user)
+
return Response(**json_response)
- def destroy(self, request, *args, **kwargs):
+ def destroy(self, request, pk, *args, **kwargs):
deployment = self._get_deployment()
- pk = kwargs.get("pk")
- json_response = deployment.delete_submission(pk, user=request.user)
+ # Coerce to int because back end only finds matches with same type
+ submission_id = positive_int(pk)
+ json_response = deployment.delete_submission(
+ submission_id, user=request.user
+ )
return Response(**json_response)
@action(
@@ -360,18 +375,18 @@ def list(self, request, *args, **kwargs):
# `SubmissionGeoJsonRenderer` handle the rest
return Response(
deployment.get_submissions(
- requesting_user_id=request.user,
+ user=request.user,
format_type=INSTANCE_FORMAT_TYPE_JSON,
**filters
)
)
- submissions = deployment.get_submissions(request.user.id,
+ submissions = deployment.get_submissions(request.user,
format_type=format_type,
**filters)
# Create a dummy list to let the Paginator do all the calculation
# for pagination because it does not need the list of real objects.
- # It avoids to retrieve all the objects from MongoDB
+ # It avoids retrieving all the objects from MongoDB
dummy_submissions_list = [None] * deployment.current_submissions_count
page = self.paginate_queryset(dummy_submissions_list)
if page is not None:
@@ -384,10 +399,12 @@ def retrieve(self, request, pk, *args, **kwargs):
deployment = self._get_deployment()
filters = self._filter_mongo_query(request)
try:
- submission = deployment.get_submission(positive_int(pk),
- request.user.id,
- format_type=format_type,
- **filters)
+ submission = deployment.get_submission(
+ positive_int(pk),
+ user=request.user,
+ format_type=format_type,
+ **filters,
+ )
except ValueError:
raise Http404
else:
@@ -403,8 +420,10 @@ def duplicate(self, request, pk, *args, **kwargs):
Creates a duplicate of the submission with a given `pk`
"""
deployment = self._get_deployment()
+ # Coerce to int because back end only finds matches with same type
+ submission_id = positive_int(pk)
duplicate_response = deployment.duplicate_submission(
- requesting_user=request.user, instance_id=positive_int(pk)
+ submission_id=submission_id, user=request.user
)
return Response(duplicate_response, status=status.HTTP_201_CREATED)
@@ -413,13 +432,21 @@ def duplicate(self, request, pk, *args, **kwargs):
permission_classes=[SubmissionValidationStatusPermission])
def validation_status(self, request, pk, *args, **kwargs):
deployment = self._get_deployment()
+ # Coerce to int because back end only finds matches with same type
+ submission_id = positive_int(pk)
if request.method == 'GET':
- json_response = deployment.get_validation_status(pk, request.GET, request.user)
+ json_response = deployment.get_validation_status(
+ submission_id=submission_id,
+ user=request.user,
+ params=request.GET.dict(),
+ )
else:
- json_response = deployment.set_validation_status(pk,
- request.data,
- request.user,
- request.method)
+ json_response = deployment.set_validation_status(
+ submission_id=submission_id,
+ user=request.user,
+ data=request.data,
+ method=request.method,
+ )
return Response(**json_response)
@@ -428,16 +455,22 @@ def validation_status(self, request, pk, *args, **kwargs):
permission_classes=[SubmissionValidationStatusPermission])
def validation_statuses(self, request, *args, **kwargs):
deployment = self._get_deployment()
- json_response = deployment.set_validation_statuses(request.data,
- request.user,
- request.method)
+ bulk_actions_validator = DataBulkActionsValidator(
+ data=request.data,
+ context=self.get_serializer_context(),
+ perm=PERM_VALIDATE_SUBMISSIONS
+ )
+ bulk_actions_validator.is_valid(raise_exception=True)
+ json_response = deployment.set_validation_statuses(
+ request.user, bulk_actions_validator.data)
return Response(**json_response)
def _enketo_request(self, request, pk, action, *args, **kwargs):
deployment = self._get_deployment()
+ submission_id = positive_int(pk)
json_response = deployment.get_enketo_submission_url(
- pk,
+ submission_id,
user=request.user,
params={**request.GET, 'action': action},
)
@@ -462,9 +495,9 @@ def _filter_mongo_query(self, request):
# submissions at one time
limit = filters.get('limit', settings.SUBMISSION_LIST_LIMIT)
try:
- filters['limit'] = positive_int(limit,
- strict=True,
- cutoff=settings.SUBMISSION_LIST_LIMIT)
+ filters['limit'] = positive_int(
+ limit, strict=True, cutoff=settings.SUBMISSION_LIST_LIMIT
+ )
except ValueError:
raise serializers.ValidationError(
{'limit': _('A positive integer is required')}
diff --git a/kpi/views/v2/export_task.py b/kpi/views/v2/export_task.py
index 5e77fc55f0..fd3fb26485 100644
--- a/kpi/views/v2/export_task.py
+++ b/kpi/views/v2/export_task.py
@@ -1,22 +1,15 @@
# coding: utf-8
from rest_framework import (
- exceptions,
filters,
renderers,
- serializers,
- status,
- viewsets,
)
-from rest_framework.response import Response
-from rest_framework.reverse import reverse
from rest_framework_extensions.mixins import NestedViewSetMixin
from kpi.filters import SearchFilter
from kpi.models import ExportTask
-from kpi.models.object_permission import get_anonymous_user
from kpi.permissions import ExportTaskPermission
from kpi.serializers.v2.export_task import ExportTaskSerializer
-from kpi.tasks import export_in_background
+from kpi.utils.object_permission import get_database_user
from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin
from kpi.views.no_update_model import NoUpdateModelViewSet
@@ -147,9 +140,7 @@ class ExportTaskViewSet(
]
def get_queryset(self):
- user = self.request.user
- if user.is_anonymous:
- user = get_anonymous_user()
+ user = get_database_user(self.request.user)
return self.model.objects.filter(
user=user,
data__source__icontains=self.kwargs['parent_lookup_asset'],
diff --git a/kpi/views/v2/paired_data.py b/kpi/views/v2/paired_data.py
new file mode 100644
index 0000000000..2209d79576
--- /dev/null
+++ b/kpi/views/v2/paired_data.py
@@ -0,0 +1,333 @@
+# coding: utf-8
+from django.conf import settings
+from django.core.files.base import ContentFile
+from django.http import Http404
+from django.utils import timezone
+from rest_framework import renderers, viewsets
+from rest_framework.decorators import action
+from rest_framework.response import Response
+from rest_framework_extensions.mixins import NestedViewSetMixin
+
+from kpi.constants import INSTANCE_FORMAT_TYPE_XML
+from kpi.models import Asset, AssetFile, PairedData
+from kpi.permissions import (
+ AssetEditorPermission,
+ XMLExternalDataPermission,
+)
+from kpi.serializers.v2.paired_data import PairedDataSerializer
+from kpi.renderers import SubmissionXMLRenderer
+from kpi.utils.hash import calculate_hash
+from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin
+from kpi.utils.xml import strip_nodes, add_xml_declaration
+
+
+class PairedDataViewset(AssetNestedObjectViewsetMixin,
+ NestedViewSetMixin,
+ viewsets.ModelViewSet):
+ """
+ ## List of paired project endpoints
+
+ ### Retrieve all paired projects
+
+
+ GET /api/v2/assets/{asset_uid}
/paired-data/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/
+
+ > Response
+ >
+ > HTTP 200 OK
+ > {
+ > "count": 1,
+ > "next": null,
+ > "previous": null,
+ > "results": [
+ > {
+ > "source": "https://[kpi]/api/v2/assets/aFDZxidYs5X5oJjm2Tmdf5/",
+ > "fields": [],
+ > "filename": "external-data.xml",
+ > "url": "https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/"
+ > }
+ > ]
+ > }
+ >
+
+ This endpoint is paginated and accepts these parameters:
+
+ - `offset`: The initial index from which to return the results
+ - `limit`: Number of results to return per page
+
+ ### Create a connection between two projects
+
+
+ POST /api/v2/assets/{asset_uid}
/paired-data/
+
+
+ > Example
+ >
+ > curl -X POST https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/
+
+ > **Payload**
+ >
+ > {
+ > "source": "https://[kpi]/api/v2/assets/aFDZxidYs5X5oJjm2Tmdf5/",
+ > "filename": "external-data.xml",
+ > "fields": []",
+ > }
+ >
+ >
+ > Response
+ >
+ > HTTP 201 Created
+ > {
+ > "source": "https://[kpi]/api/v2/assets/aFDZxidYs5X5oJjm2Tmdf5/",
+ > "fields": [],
+ > "filename": "external-data.xml",
+ > "url": "https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/"
+ > }
+ >
+
+ * `fields`: Optional. List of questions whose responses will be retrieved
+ from the source data. If missing or empty, all responses will be
+ retrieved. Questions must be identified by full group path separated by
+ slashes, e.g. `group/subgroup/question_name`.
+ * `filename`: Must be unique among all asset files. Only accepts letters, numbers and '-'.
+
+ ### Retrieve a connection between two projects
+
+
+ GET /api/v2/assets/{asset_uid}
/paired-data/{paired_data_uid}/
+
+
+ > Example
+ >
+ > curl -X GET https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/
+ >
+ > Response
+ >
+ > HTTP 200 Ok
+ > {
+ > "source": "https://[kpi]/api/v2/assets/aFDZxidYs5X5oJjm2Tmdf5/",
+ > "fields": [],
+ > "filename": "external-data.xml",
+ > "url": "https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/"
+ > }
+ >
+
+ ### Update a connection between two projects
+
+
+ PATCH /api/v2/assets/{asset_uid}
/paired-data/{paired_data_uid}/
+
+
+ > Example
+ >
+ > curl -X PATCH https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/
+ >
+ > **Payload**
+ >
+ > {
+ > "filename": "data-external.xml",
+ > "fields": ['group/question_1']",
+ > }
+ >
+
+ _Notes: `source` cannot be changed_
+
+ > Response
+ >
+ > HTTP 200 Ok
+ > {
+ > "source": "https://[kpi]/api/v2/assets/aFDZxidYs5X5oJjm2Tmdf5/",
+ > "fields": ['group/question_1'],
+ > "filename": "data-external.xml",
+ > "url": "https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/"
+ > }
+ >
+
+ ### Remove a connection between two projects
+
+
+ DELETE /api/v2/assets/{asset_uid}
/paired-data/{paired_data_uid}/
+
+
+ > Example
+ >
+ > curl -X DELETE https://[kpi]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/paired-data/pdFQheFF4cWbtcinRUqc64q/
+ >
+ > Response
+ >
+ > HTTP 204 No Content
+ >
+ >
+
+
+ ### CURRENT ENDPOINT
+ """
+
+ parent_model = Asset
+ renderer_classes = (
+ renderers.BrowsableAPIRenderer,
+ renderers.JSONRenderer,
+ SubmissionXMLRenderer,
+ )
+ lookup_field = 'paired_data_uid'
+ permission_classes = (AssetEditorPermission,)
+ serializer_class = PairedDataSerializer
+
+ @action(detail=True,
+ methods=['GET'],
+ permission_classes=[XMLExternalDataPermission],
+ renderer_classes=[SubmissionXMLRenderer],
+ filter_backends=[],
+ )
+ def external(self, request, paired_data_uid, **kwargs):
+ """
+ Returns an XML which contains data submitted to paired asset
+ Creates the endpoints
+ - /api/v2/assets//paired-data//external/
+ - /api/v2/assets//paired-data//external.xml/
+ """
+ paired_data = self.get_object()
+
+ # Retrieve the source if it exists
+ source_asset = paired_data.get_source()
+
+ if not source_asset:
+ # We can enter this condition when source data sharing has been
+ # deactivated after it has been paired with current form.
+ # We don't want to keep zombie files on storage.
+ try:
+ asset_file = self.asset.asset_files.get(uid=paired_data_uid)
+ except AssetFile.DoesNotExist:
+ pass
+ else:
+ asset_file.delete()
+
+ raise Http404
+
+ if not source_asset.has_deployment or not self.asset.has_deployment:
+ raise Http404
+
+ old_hash = None
+ # Retrieve data from related asset file.
+ # If data has already been fetched once, an `AssetFile` should exist.
+ # Otherwise, we create one to store the generated XML.
+ try:
+ asset_file = self.asset.asset_files.get(uid=paired_data_uid)
+ except AssetFile.DoesNotExist:
+ asset_file = AssetFile(
+ uid=paired_data_uid,
+ asset=self.asset,
+ file_type=AssetFile.PAIRED_DATA,
+ user=self.asset.owner,
+ )
+ # When asset file is new, we consider its content as expired to
+ # force its creation below
+ has_expired = True
+ else:
+ if not asset_file.content:
+ # if `asset_file` exists but does not have any content, it means
+ # `paired_data` has changed since last time this endpoint has been
+ # called. E.g.: Project owner has changed the questions they want
+ # to include in the `xml-external` file
+ has_expired = True
+ else:
+ old_hash = asset_file.md5_hash
+ timedelta = timezone.now() - asset_file.date_modified
+ has_expired = (
+ timedelta.total_seconds() > settings.PAIRED_DATA_EXPIRATION
+ )
+
+ # ToDo evaluate adding headers for caching and a HTTP 304 status code
+ if not has_expired:
+ return Response(asset_file.content.file.read().decode())
+
+ # If the content of `asset_file' has expired, let's regenerate the XML
+ submissions = source_asset.deployment.get_submissions(
+ self.asset.owner,
+ format_type=INSTANCE_FORMAT_TYPE_XML
+ )
+ parsed_submissions = []
+
+ for submission in submissions:
+ # Use `rename_root_node_to='data'` to rename the root node of each
+ # submission to `data` so that form authors do not have to rewrite
+ # their `xml-external` formulas any time the asset UID changes,
+ # e.g. when cloning a form or creating a project from a template.
+ # Set `use_xpath=True` because `paired_data.fields` uses full group
+ # hierarchies, not just question names.
+ parsed_submissions.append(
+ strip_nodes(
+ submission,
+ paired_data.allowed_fields,
+ use_xpath=True,
+ rename_root_node_to='data',
+ )
+ )
+
+ filename = paired_data.filename
+ parsed_submissions_to_str = ''.join(parsed_submissions)
+ root_tag_name = SubmissionXMLRenderer.root_tag_name
+ xml_ = add_xml_declaration(
+ f'<{root_tag_name}>'
+ f'{parsed_submissions_to_str}'
+ f'{root_tag_name}>'
+ )
+
+ if not parsed_submissions:
+ # We do not want to cache an empty file
+ return Response(xml_)
+
+ # We need to delete the current file (if it exists) when filename
+ # has changed. Otherwise, it would leave an orphan file on storage
+ if asset_file.pk and asset_file.content.name != filename:
+ asset_file.content.delete()
+
+ asset_file.content = ContentFile(xml_.encode(), name=filename)
+
+ # `xml_` is already there in memory, let's use its content to get its
+ # hash and store it within `asset_file` metadata
+ asset_file.set_md5_hash(calculate_hash(xml_, prefix=True))
+ asset_file.save()
+ if old_hash != asset_file.md5_hash:
+ # resync paired data to the deployment backend
+ self.asset.deployment.sync_media_files(AssetFile.PAIRED_DATA)
+
+ return Response(xml_)
+
+ def get_object(self):
+ obj = self.get_queryset(as_list=False).get(
+ self.kwargs[self.lookup_field]
+ )
+ if not obj:
+ raise Http404
+
+ # May raise a permission denied
+ self.check_object_permissions(self.request, obj)
+
+ return obj
+
+ def get_queryset(self, as_list=True):
+ queryset = PairedData.objects(self.asset)
+ if as_list:
+ return list(queryset.values())
+ return queryset
+
+ def get_serializer_context(self):
+ context_ = super().get_serializer_context()
+ context_['asset'] = self.asset
+
+ # To avoid multiple calls to DB within the serializer on the
+ # list endpoint, we retrieve all source names and cache them in a dict.
+ # The serializer can access it through the context.
+ source_uids = self.asset.paired_data.keys()
+ source__names = {}
+ records = Asset.objects.values('uid', 'name').filter(uid__in=source_uids)
+ for record in records:
+ source__names[record['uid']] = record['name']
+ context_['source__names'] = source__names
+ return context_
diff --git a/kpi/views/v2/user_asset_subscription.py b/kpi/views/v2/user_asset_subscription.py
index c3d59ad900..5a47ae4527 100644
--- a/kpi/views/v2/user_asset_subscription.py
+++ b/kpi/views/v2/user_asset_subscription.py
@@ -1,10 +1,11 @@
# coding: utf-8
from rest_framework import viewsets
-from kpi.models import UserAssetSubscription
-from kpi.models.object_permission import get_anonymous_user
-from kpi.serializers.v2.user_asset_subscription import \
- UserAssetSubscriptionSerializer
+from kpi.models import UserAssetSubscription
+from kpi.serializers.v2.user_asset_subscription import (
+ UserAssetSubscriptionSerializer,
+)
+from kpi.utils.object_permission import get_database_user
class UserAssetSubscriptionViewSet(viewsets.ModelViewSet):
@@ -13,12 +14,7 @@ class UserAssetSubscriptionViewSet(viewsets.ModelViewSet):
lookup_field = 'uid'
def get_queryset(self):
- user = self.request.user
- # Check if the user is anonymous. The
- # django.contrib.auth.models.AnonymousUser object doesn't work for
- # queries.
- if user.is_anonymous:
- user = get_anonymous_user()
+ user = get_database_user(self.request.user)
criteria = {'user': user}
if 'asset__uid' in self.request.query_params:
criteria['asset__uid'] = self.request.query_params[