Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 30 additions & 3 deletions atomic_operations/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,30 @@ def check_root(self, result):
pointer=f"/{ATOMIC_OPERATIONS}"
)

def parse_operation_metadata(self, resource_identifier_object: dict, metadata: dict):
"""Parse the meta object from operation data if it exists"""
if not metadata:
return {}
idx = None
if "id" in resource_identifier_object:
idx = resource_identifier_object.get("id")
elif "lid" in resource_identifier_object:
idx = resource_identifier_object.get("lid")
if not isinstance(metadata, dict):
raise JsonApiParseError(
id="invalid-operation-meta-object",
detail="Received operation meta data value is not valid",
pointer=f"{ATOMIC_OPERATIONS}/{idx}/meta",
)
for key, value in metadata.items():
if key == "include" and not isinstance(value, list):
raise JsonApiParseError(
id="invalid-operation-include-value",
detail="Received operation include value is not a list",
pointer=f"{ATOMIC_OPERATIONS}/{idx}/meta/include",
)
return {"meta": metadata}

def parse_operation(self, resource_identifier_object, result):
_parsed_data = self.parse_id_lid_and_type(resource_identifier_object)
_parsed_data.update(self.parse_attributes(resource_identifier_object))
Expand All @@ -207,6 +231,7 @@ def parse_data(self, result, parser_context):
for idx, operation in enumerate(result[ATOMIC_OPERATIONS]):

self.check_operation(idx, operation)
meta = operation.get("meta")

if operation["op"] == "update" and operation.get("ref"):
# special case relation update
Expand All @@ -224,12 +249,14 @@ def parse_data(self, result, parser_context):
operation_code = f'{operation["op"]}-relationship'

else:
data = operation.get(
"data", operation.get("ref")
)
_parsed_data = self.parse_operation(
resource_identifier_object=operation.get(
"data", operation.get("ref")
),
resource_identifier_object=data,
result=result
)
_parsed_data.update(self.parse_operation_metadata(data,meta))
operation_code = operation["op"]

parsed_data.append({
Expand Down
36 changes: 36 additions & 0 deletions atomic_operations/renderers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ class AtomicResultRenderer(JSONRenderer):

media_type = ATOMIC_CONTENT_TYPE
format = ATOMIC_MEDIA_TYPE
# the current atomic operation request data being rendered
current_operation_request_data = None


def check_error(self, operation_result_data, accepted_media_type, renderer_context):
# primitive check if any operation has errors while parsing
Expand All @@ -52,6 +55,35 @@ def check_error(self, operation_result_data, accepted_media_type, renderer_conte
except Exception:
pass

def extract_included(
self, fields, resource, resource_instance, included_resources, included_cache
):
"""
This method will be called by the super class (JSONRenderer) render method. The
value of the included_resources argument is set by a rest_framework_json_api
utility function called `get_included_resources`. The utility function checks the
Request's query_params for the `include` param which atomic_operations does not
use for include.

Because we cannot override the `get_included_resources` function without doing a
monkey patch, we override extract_included to use the include value from the
current atomic operation request data. Then we call the original
extract_included method with an updated included_resources value.

In order to have access to the current atomic operation's request data we make
this method an instance method and access self.current_operation_request_data.

Relevant django-rest-framework-json-api files:
https://github.com/django-json-api/django-rest-framework-json-api/blob/main/rest_framework_json_api/renderers.py#L559
https://github.com/django-json-api/django-rest-framework-json-api/blob/main/rest_framework_json_api/utils.py#L318
"""
op_data = self.current_operation_request_data
if op_data:
included_resources = op_data.get("meta", {}).get("include", [])
return JSONRenderer.extract_included(
fields, resource, resource_instance, included_resources, included_cache
)

def render(self, data: List[OrderedDict], accepted_media_type=None, renderer_context=None):
renderer_context = renderer_context or {"view": {}}

Expand All @@ -65,6 +97,10 @@ def render(self, data: List[OrderedDict], accepted_media_type=None, renderer_con
# pass in the resource name
renderer_context["view"].resource_name = get_resource_type_from_serializer(
operation_result_data.serializer)
# make request data accessible to extract_include
self.current_operation_request_data = operation_result_data.serializer._kwargs.get(
"data"
)
rendered_primary_data = super().render(
operation_result_data, accepted_media_type, renderer_context)
atomic_results.append(rendered_primary_data.decode("UTF-8"))
Expand Down
50 changes: 50 additions & 0 deletions atomic_operations/serializers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import inflection

from rest_framework.exceptions import ParseError


class AtomicOperationIncludedResourcesValidationMixin:
"""
Heavily based on django-restframework-json-api IncludedResourcesValidationMixin.
A serializer mixin that adds validation of `include` data to
support compound documents.

Specification: https://jsonapi.org/format/#document-compound-documents)
"""

def __init__(self, *args, **kwargs):
request_data = kwargs.get("data")
context = kwargs.get("context")
request = context.get("request") if context else None
view = context.get("view") if context else None
operation_code = context.get("operation_code") if context else None
resource_type = context.get("resource_type") if context else None

def validate_path(serializer_class, field_path, path):
serializers = getattr(serializer_class, "included_serializers", None)
if serializers is None:
raise ParseError("This endpoint does not support the include parameter")
this_field_name = inflection.underscore(field_path[0])
this_included_serializer = serializers.get(this_field_name)
if this_included_serializer is None:
raise ParseError(
"This endpoint does not support the include parameter for path {}".format(path)
)
if len(field_path) > 1:
new_included_field_path = field_path[1:]
# We go down one level in the path
validate_path(this_included_serializer, new_included_field_path, path)

if request and view:
meta_data = request_data.get("meta", {})
included_resources = meta_data.get("include", [])
for included_field_name in included_resources:
included_field_path = included_field_name.split(".")
if "related_field" in view.kwargs:
this_serializer_class = view.get_related_serializer_class()
else:
this_serializer_class = view.get_serializer_class(operation_code, resource_type)
# lets validate the current path
validate_path(this_serializer_class, included_field_path, included_field_name)

super().__init__(*args, **kwargs)
16 changes: 13 additions & 3 deletions atomic_operations/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from atomic_operations.exceptions import UnprocessableEntity
from atomic_operations.parsers import AtomicOperationParser
from atomic_operations.renderers import AtomicResultRenderer
from atomic_operations.serializers import AtomicOperationIncludedResourcesValidationMixin


class AtomicOperationView(APIView):
Expand Down Expand Up @@ -46,7 +47,13 @@ def get_serializer_class(self, operation_code: str, resource_type: str):
serializer_class = self.get_serializer_classes().get(
f"{operation_code}:{resource_type}")
if serializer_class:
return serializer_class
# wrap operation's serializer with AtomicOperationIncludedResourcesValidationMixin
wrapped_serializer_class = type(
"WrappedSerializer",
(AtomicOperationIncludedResourcesValidationMixin, serializer_class),
{},
)
return wrapped_serializer_class
else:
# TODO: is this error message correct? Check jsonapi spec for this
raise ImproperlyConfigured(
Expand All @@ -57,6 +64,8 @@ def get_serializer(self, idx, operation_code, resource_type, *args, **kwargs):
Return the serializer instance that should be used for validating and
deserializing input, and for serializing output.
"""
self._operation_code = operation_code
self._resource_type = resource_type
serializer_class = self.get_serializer_class(
operation_code, resource_type)
kwargs.setdefault('context', self.get_serializer_context())
Expand Down Expand Up @@ -88,8 +97,9 @@ def get_serializer_context(self):
return {
'request': self.request,
'format': self.format_kwarg,
'view': self

'view': self,
'operation_code': self._operation_code,
'resource_type': self._resource_type,
}

def post(self, request, *args, **kwargs):
Expand Down
16 changes: 10 additions & 6 deletions tests/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,6 @@
from tests.models import BasicModel, RelatedModel, RelatedModelTwo


class BasicModelSerializer(ModelSerializer):
class Meta:
fields = "__all__"
model = BasicModel


class RelatedModelSerializer(ModelSerializer):
class Meta:
fields = "__all__"
Expand All @@ -19,3 +13,13 @@ class RelatedModelTwoSerializer(ModelSerializer):
class Meta:
fields = "__all__"
model = RelatedModelTwo


class BasicModelSerializer(ModelSerializer):
class Meta:
fields = "__all__"
model = BasicModel

included_serializers = {
"to_one": RelatedModelSerializer,
}
51 changes: 50 additions & 1 deletion tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,19 @@ def test_parse(self):
{"type": "tags", "id": "2"},
{"type": "tags", "id": "3"}
]
}, {
"op": "update",
"data": {
"id": "13",
"type": "articles",
"attributes": {
"title": "New Title"
}
},
"meta": {
"include": ["author"]
}

}
]
}
Expand Down Expand Up @@ -104,7 +117,14 @@ def test_parse(self):
"type": "articles",
"tags": [{'type': 'tags', 'id': '2'}, {'type': 'tags', 'id': '3'}]
}
}
}, {
"update": {
"id": "13",
"type": "articles",
"title": "New Title",
"meta": {"include": ["author"]}
}
},
]
self.assertEqual(expected_result, result)

Expand Down Expand Up @@ -457,3 +477,32 @@ def test_primary_data_with_id_and_lid(self):
"parser_context": self.parser_context
}
)

def test_invalid_include_value_in_operation_meta(self):
data = {
ATOMIC_OPERATIONS: [
{
"op": "update",
"data": {
"id": "1",
"type": "articles",
"attributes": {
"title": "Title Change"
},
},
"meta": {
"include": 123
}
}
]
}
stream = BytesIO(json.dumps(data).encode("utf-8"))
self.assertRaisesRegex(
JsonApiParseError,
"Received operation include value is not a list",
self.parser.parse,
**{
"stream": stream,
"parser_context": self.parser_context
}
)
Loading