2020-08-21 22:42:15 +00:00
|
|
|
"""Flow importer"""
|
2020-09-09 15:21:43 +00:00
|
|
|
from contextlib import contextmanager
|
2020-09-09 15:21:16 +00:00
|
|
|
from copy import deepcopy
|
2020-08-21 22:42:15 +00:00
|
|
|
from json import loads
|
2020-09-05 23:07:06 +00:00
|
|
|
from typing import Any, Dict
|
2020-08-21 22:42:15 +00:00
|
|
|
|
|
|
|
from dacite import from_dict
|
|
|
|
from dacite.exceptions import DaciteError
|
|
|
|
from django.apps import apps
|
|
|
|
from django.db import transaction
|
|
|
|
from django.db.models import Model
|
2020-09-05 23:07:06 +00:00
|
|
|
from django.db.models.query_utils import Q
|
2020-09-09 15:21:43 +00:00
|
|
|
from django.db.utils import IntegrityError
|
2020-09-05 23:07:06 +00:00
|
|
|
from rest_framework.exceptions import ValidationError
|
2020-08-21 22:42:15 +00:00
|
|
|
from rest_framework.serializers import BaseSerializer, Serializer
|
|
|
|
from structlog import BoundLogger, get_logger
|
|
|
|
|
|
|
|
from passbook.flows.models import Flow, FlowStageBinding, Stage
|
|
|
|
from passbook.flows.transfer.common import (
|
|
|
|
EntryInvalidError,
|
|
|
|
FlowBundle,
|
|
|
|
FlowBundleEntry,
|
|
|
|
)
|
|
|
|
from passbook.lib.models import SerializerModel
|
2020-09-05 23:07:06 +00:00
|
|
|
from passbook.policies.models import Policy, PolicyBinding
|
2020-08-21 22:42:15 +00:00
|
|
|
from passbook.stages.prompt.models import Prompt
|
|
|
|
|
|
|
|
ALLOWED_MODELS = (Flow, FlowStageBinding, Stage, Policy, PolicyBinding, Prompt)
|
|
|
|
|
|
|
|
|
2020-09-09 15:21:43 +00:00
|
|
|
@contextmanager
|
|
|
|
def transaction_rollback():
|
|
|
|
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
|
|
|
atomic = transaction.atomic()
|
|
|
|
atomic.__enter__()
|
|
|
|
yield
|
|
|
|
atomic.__exit__(IntegrityError, None, None)
|
|
|
|
|
|
|
|
|
2020-08-21 22:42:15 +00:00
|
|
|
class FlowImporter:
|
|
|
|
"""Import Flow from json"""
|
|
|
|
|
|
|
|
__import: FlowBundle
|
|
|
|
|
2020-09-05 23:07:06 +00:00
|
|
|
__pk_map: Dict[Any, Model]
|
|
|
|
|
2020-08-21 22:42:15 +00:00
|
|
|
logger: BoundLogger
|
|
|
|
|
|
|
|
def __init__(self, json_input: str):
|
|
|
|
self.logger = get_logger()
|
2020-09-05 23:07:06 +00:00
|
|
|
self.__pk_map = {}
|
2020-08-21 22:42:15 +00:00
|
|
|
import_dict = loads(json_input)
|
|
|
|
try:
|
|
|
|
self.__import = from_dict(FlowBundle, import_dict)
|
|
|
|
except DaciteError as exc:
|
|
|
|
raise EntryInvalidError from exc
|
|
|
|
|
2020-09-05 23:07:06 +00:00
|
|
|
def __update_pks_for_attrs(self, attrs: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""Replace any value if it is a known primary key of an other object"""
|
2020-09-09 15:21:43 +00:00
|
|
|
|
2020-09-08 21:01:42 +00:00
|
|
|
def updater(value) -> Any:
|
2020-09-05 23:07:06 +00:00
|
|
|
if value in self.__pk_map:
|
2020-09-09 15:21:43 +00:00
|
|
|
self.logger.debug("updating reference in entry", value=value)
|
2020-09-08 21:01:42 +00:00
|
|
|
return self.__pk_map[value]
|
|
|
|
return value
|
|
|
|
|
|
|
|
for key, value in attrs.items():
|
|
|
|
if isinstance(value, (list, dict)):
|
|
|
|
for idx, _inner_value in enumerate(value):
|
|
|
|
attrs[key][idx] = updater(_inner_value)
|
|
|
|
else:
|
|
|
|
attrs[key] = updater(value)
|
2020-09-05 23:07:06 +00:00
|
|
|
return attrs
|
|
|
|
|
|
|
|
def __query_from_identifier(self, attrs: Dict[str, Any]) -> Q:
|
|
|
|
"""Generate an or'd query from all identifiers in an entry"""
|
|
|
|
# Since identifiers can also be pk-references to other objects (see FlowStageBinding)
|
|
|
|
# we have to ensure those references are also replaced
|
|
|
|
main_query = Q(pk=attrs["pk"])
|
|
|
|
sub_query = Q()
|
|
|
|
for identifier, value in attrs.items():
|
|
|
|
if identifier == "pk":
|
2020-08-21 22:42:15 +00:00
|
|
|
continue
|
2020-09-05 23:07:06 +00:00
|
|
|
sub_query &= Q(**{identifier: value})
|
|
|
|
return main_query | sub_query
|
2020-08-21 22:42:15 +00:00
|
|
|
|
|
|
|
def _validate_single(self, entry: FlowBundleEntry) -> BaseSerializer:
|
|
|
|
"""Validate a single entry"""
|
|
|
|
model_app_label, model_name = entry.model.split(".")
|
|
|
|
model: SerializerModel = apps.get_model(model_app_label, model_name)
|
|
|
|
if not isinstance(model(), ALLOWED_MODELS):
|
|
|
|
raise EntryInvalidError(f"Model {model} not allowed")
|
|
|
|
|
|
|
|
# If we try to validate without referencing a possible instance
|
|
|
|
# we'll get a duplicate error, hence we load the model here and return
|
|
|
|
# the full serializer for later usage
|
2020-09-05 23:07:06 +00:00
|
|
|
# Because a model might have multiple unique columns, we chain all identifiers together
|
|
|
|
# to create an OR query.
|
|
|
|
updated_identifiers = self.__update_pks_for_attrs(entry.identifiers)
|
|
|
|
existing_models = model.objects.filter(
|
|
|
|
self.__query_from_identifier(updated_identifiers)
|
|
|
|
)
|
|
|
|
|
|
|
|
serializer_kwargs = {}
|
2020-08-21 22:42:15 +00:00
|
|
|
if existing_models.exists():
|
2020-09-05 23:07:06 +00:00
|
|
|
model_instance = existing_models.first()
|
2020-08-21 22:42:15 +00:00
|
|
|
self.logger.debug(
|
2020-09-05 23:07:06 +00:00
|
|
|
"initialise serializer with instance",
|
|
|
|
model=model,
|
|
|
|
instance=model_instance,
|
|
|
|
pk=model_instance.pk,
|
2020-08-21 22:42:15 +00:00
|
|
|
)
|
2020-09-05 23:07:06 +00:00
|
|
|
serializer_kwargs["instance"] = model_instance
|
2020-08-21 22:42:15 +00:00
|
|
|
else:
|
2020-09-05 23:07:06 +00:00
|
|
|
self.logger.debug(
|
|
|
|
"initialise new instance", model=model, **updated_identifiers
|
|
|
|
)
|
|
|
|
full_data = self.__update_pks_for_attrs(entry.attrs)
|
|
|
|
full_data.update(updated_identifiers)
|
|
|
|
serializer_kwargs["data"] = full_data
|
2020-08-21 22:42:15 +00:00
|
|
|
|
|
|
|
serializer: Serializer = model().serializer(**serializer_kwargs)
|
2020-09-05 23:07:06 +00:00
|
|
|
try:
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
except ValidationError as exc:
|
|
|
|
raise EntryInvalidError(f"Serializer errors {serializer.errors}") from exc
|
2020-08-21 22:42:15 +00:00
|
|
|
return serializer
|
|
|
|
|
|
|
|
def apply(self) -> bool:
|
|
|
|
"""Apply (create/update) flow json, in database transaction"""
|
2020-09-09 15:21:43 +00:00
|
|
|
try:
|
|
|
|
with transaction.atomic():
|
|
|
|
if not self._apply_models():
|
|
|
|
self.logger.debug("Reverting changes due to error")
|
|
|
|
raise IntegrityError
|
|
|
|
except IntegrityError:
|
2020-08-21 22:42:15 +00:00
|
|
|
return False
|
2020-09-09 15:21:43 +00:00
|
|
|
else:
|
|
|
|
self.logger.debug("Committing changes")
|
2020-08-21 22:42:15 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
def _apply_models(self) -> bool:
|
|
|
|
"""Apply (create/update) flow json"""
|
2020-09-05 23:07:06 +00:00
|
|
|
self.__pk_map = {}
|
2020-09-09 15:21:16 +00:00
|
|
|
entries = deepcopy(self.__import.entries)
|
|
|
|
for entry in entries:
|
2020-08-21 22:42:15 +00:00
|
|
|
model_app_label, model_name = entry.model.split(".")
|
|
|
|
model: SerializerModel = apps.get_model(model_app_label, model_name)
|
|
|
|
# Validate each single entry
|
|
|
|
try:
|
|
|
|
serializer = self._validate_single(entry)
|
|
|
|
except EntryInvalidError as exc:
|
|
|
|
self.logger.error("entry not valid", entry=entry, error=exc)
|
|
|
|
return False
|
|
|
|
|
|
|
|
model = serializer.save()
|
2020-09-05 23:07:06 +00:00
|
|
|
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
2020-08-21 22:42:15 +00:00
|
|
|
self.logger.debug("updated model", model=model, pk=model.pk)
|
|
|
|
return True
|
2020-09-05 23:07:06 +00:00
|
|
|
|
|
|
|
def validate(self) -> bool:
|
|
|
|
"""Validate loaded flow export, ensure all models are allowed
|
|
|
|
and serializers have no errors"""
|
|
|
|
self.logger.debug("Starting flow import validaton")
|
|
|
|
if self.__import.version != 1:
|
|
|
|
self.logger.warning("Invalid bundle version")
|
|
|
|
return False
|
2020-09-09 15:21:43 +00:00
|
|
|
with transaction_rollback():
|
|
|
|
successful = self._apply_models()
|
|
|
|
if not successful:
|
|
|
|
self.logger.debug("Flow validation failed")
|
2020-09-05 23:07:06 +00:00
|
|
|
return successful
|