core: create app transactional api (#6446)
* initial api and schema Signed-off-by: Jens Langhammer <jens@goauthentik.io> * separate blueprint importer from yaml parsing Signed-off-by: Jens Langhammer <jens@goauthentik.io> * cleanup Signed-off-by: Jens Langhammer <jens@goauthentik.io> * add new "must_created" state to blueprints to prevent overwriting objects Signed-off-by: Jens Langhammer <jens@goauthentik.io> * rework validation and error response to make it actually usable Signed-off-by: Jens Langhammer <jens@goauthentik.io> * fix lint errors Signed-off-by: Jens Langhammer <jens@goauthentik.io> * add defaults Signed-off-by: Jens Langhammer <jens@goauthentik.io> * fix tests Signed-off-by: Jens Langhammer <jens@goauthentik.io> * rework transaction_rollback Signed-off-by: Jens Langhammer <jens@goauthentik.io> * use static method for string imports of subclass Signed-off-by: Jens Langhammer <jens@goauthentik.io> * slight cleanup Signed-off-by: Jens Langhammer <jens@goauthentik.io> --------- Signed-off-by: Jens Langhammer <jens@goauthentik.io>
This commit is contained in:
parent
583c5e3ba7
commit
7649a57495
|
@ -49,7 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer(content, context).validate()
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
|
|
|
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
|||
"""Apply all blueprints in order, abort when one fails to import"""
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer(content)
|
||||
importer = Importer.from_string(content)
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
self.stderr.write("blueprint invalid")
|
||||
|
|
|
@ -9,6 +9,7 @@ from rest_framework.fields import Field, JSONField, UUIDField
|
|||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.lib.models import SerializerModel
|
||||
|
@ -110,7 +111,7 @@ class Command(BaseCommand):
|
|||
"id": {"type": "string"},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["absent", "present", "created"],
|
||||
"enum": [s.value for s in BlueprintEntryDesiredState],
|
||||
"default": "present",
|
||||
},
|
||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||
|
|
|
@ -20,7 +20,7 @@ def apply_blueprint(*files: str):
|
|||
def wrapper(*args, **kwargs):
|
||||
for file in files:
|
||||
content = BlueprintInstance(path=file).retrieve()
|
||||
Importer(content).apply()
|
||||
Importer.from_string(content).apply()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
|
|
@ -25,7 +25,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
|||
def tester(self: TestPackaged):
|
||||
base = Path("blueprints/")
|
||||
rel_path = Path(file_name).relative_to(base)
|
||||
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
|
|
@ -21,14 +21,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
|
||||
def test_blueprint_invalid_format(self):
|
||||
"""Test blueprint with invalid format"""
|
||||
importer = Importer('{"version": 3}')
|
||||
importer = Importer.from_string('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
importer = Importer.from_string(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer(
|
||||
importer = Importer.from_string(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
|
@ -54,7 +54,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
},
|
||||
)
|
||||
|
||||
importer = Importer(
|
||||
importer = Importer.from_string(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
|
@ -103,7 +103,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
self.assertEqual(len(export.entries), 3)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
importer = Importer.from_string(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
@ -113,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
|
@ -130,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
|
@ -248,7 +248,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
importer = Importer.from_string(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||
|
@ -297,7 +297,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer(export_yaml)
|
||||
importer = Importer.from_string(export_yaml)
|
||||
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
|
|
@ -18,7 +18,7 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
|||
self.uid = generate_id()
|
||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
|
@ -35,7 +35,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
|
|
|
@ -15,7 +15,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -30,7 +30,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer updates it
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
@ -41,7 +41,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -56,7 +56,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer doesn't update it
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
@ -67,7 +67,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -75,7 +75,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer(import_yaml)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
|
|
@ -12,6 +12,7 @@ from uuid import UUID
|
|||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
|
@ -52,6 +53,7 @@ class BlueprintEntryDesiredState(Enum):
|
|||
ABSENT = "absent"
|
||||
PRESENT = "present"
|
||||
CREATED = "created"
|
||||
MUST_CREATED = "must_created"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -206,8 +208,8 @@ class KeyOf(YAMLTag):
|
|||
):
|
||||
return _entry._state.instance.pbm_uuid
|
||||
return _entry._state.instance.pk
|
||||
raise EntryInvalidError(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance", entry
|
||||
)
|
||||
|
||||
|
||||
|
@ -278,7 +280,7 @@ class Format(YAMLTag):
|
|||
try:
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
|
||||
|
||||
class Find(YAMLTag):
|
||||
|
@ -355,13 +357,15 @@ class Condition(YAMLTag):
|
|||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
raise EntryInvalidError.from_entry(
|
||||
"At least one value is required after mode selection.", entry
|
||||
)
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
|
@ -393,7 +397,7 @@ class If(YAMLTag):
|
|||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
|
@ -425,9 +429,10 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
"This is a noop. Check you are setting depth bigger than 0.",
|
||||
entry,
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
|
@ -436,9 +441,10 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError(
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping"
|
||||
"such as a sequence or a mapping",
|
||||
entry,
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
|
@ -449,7 +455,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError(exc)
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
|
||||
result = output_class()
|
||||
|
||||
|
@ -461,8 +467,8 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}", entry
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
@ -489,12 +495,13 @@ class EnumeratedItem(YAMLTag):
|
|||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError(
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
f"inside an {Enumerate.__name__} tag",
|
||||
entry,
|
||||
)
|
||||
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
@ -508,7 +515,7 @@ class Index(EnumeratedItem):
|
|||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
|
@ -520,7 +527,7 @@ class Value(EnumeratedItem):
|
|||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
|
@ -574,8 +581,26 @@ class BlueprintLoader(SafeLoader):
|
|||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
serializer_errors: Optional[dict]
|
||||
entry_model: Optional[str]
|
||||
entry_id: Optional[str]
|
||||
validation_error: Optional[ValidationError]
|
||||
|
||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.serializer_errors = serializer_errors
|
||||
self.entry_model = None
|
||||
self.entry_id = None
|
||||
self.validation_error = validation_error
|
||||
|
||||
@staticmethod
|
||||
def from_entry(
|
||||
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
||||
) -> "EntryInvalidError":
|
||||
"""Create EntryInvalidError with the context of an entry"""
|
||||
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
||||
if isinstance(msg_or_exc, ValidationError):
|
||||
error.validation_error = msg_or_exc
|
||||
# Make sure the model and id are strings, depending where the error happens
|
||||
# they might still be YAMLTag instances
|
||||
error.entry_model = str(entry.model)
|
||||
error.entry_id = str(entry.id)
|
||||
return error
|
||||
|
|
|
@ -8,9 +8,9 @@ from dacite.core import from_dict
|
|||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
|
@ -38,6 +38,7 @@ from authentik.core.models import (
|
|||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
|
||||
|
@ -72,41 +73,53 @@ def is_model_allowed(model: type[Model]) -> bool:
|
|||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
class DoRollback(SentryIgnoredException):
|
||||
"""Exception to trigger a rollback"""
|
||||
|
||||
|
||||
@contextmanager
|
||||
def transaction_rollback():
|
||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||
atomic = transaction.atomic()
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
atomic.__enter__()
|
||||
try:
|
||||
with atomic():
|
||||
yield
|
||||
atomic.__exit__(IntegrityError, None, None)
|
||||
raise DoRollback()
|
||||
except DoRollback:
|
||||
pass
|
||||
|
||||
|
||||
class Importer:
|
||||
"""Import Blueprint from YAML"""
|
||||
"""Import Blueprint from raw dict or YAML/JSON"""
|
||||
|
||||
logger: BoundLogger
|
||||
_import: Blueprint
|
||||
|
||||
def __init__(self, yaml_input: str, context: Optional[dict] = None):
|
||||
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
||||
self.__pk_map: dict[Any, Model] = {}
|
||||
self._import = blueprint
|
||||
self.logger = get_logger()
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self._import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self._import.context = ctx
|
||||
|
||||
@staticmethod
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||
"""Parse YAML string and create blueprint importer from it"""
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
self.__import = from_dict(
|
||||
_import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
ctx = {}
|
||||
always_merger.merge(ctx, self.__import.context)
|
||||
if context:
|
||||
always_merger.merge(ctx, context)
|
||||
self.__import.context = ctx
|
||||
return Importer(_import, context)
|
||||
|
||||
@property
|
||||
def blueprint(self) -> Blueprint:
|
||||
"""Get imported blueprint"""
|
||||
return self.__import
|
||||
return self._import
|
||||
|
||||
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Replace any value if it is a known primary key of an other object"""
|
||||
|
@ -152,19 +165,19 @@ class Importer:
|
|||
# pylint: disable-msg=too-many-locals
|
||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||
"""Validate a single entry"""
|
||||
if not entry.check_all_conditions_match(self.__import):
|
||||
if not entry.check_all_conditions_match(self._import):
|
||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||
return None
|
||||
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(
|
||||
data=entry.get_attrs(self.__import),
|
||||
data=entry.get_attrs(self._import),
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||
},
|
||||
|
@ -172,8 +185,10 @@ class Importer:
|
|||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
|
@ -182,7 +197,7 @@ class Importer:
|
|||
# the full serializer for later usage
|
||||
# Because a model might have multiple unique columns, we chain all identifiers together
|
||||
# to create an OR query.
|
||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import))
|
||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self._import))
|
||||
for key, value in list(updated_identifiers.items()):
|
||||
if isinstance(value, dict) and "pk" in value:
|
||||
del updated_identifiers[key]
|
||||
|
@ -190,12 +205,12 @@ class Importer:
|
|||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
raise EntryInvalidError.from_entry("No or invalid identifiers", entry)
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
raise EntryInvalidError.from_entry(f"Invalid identifier field: {exc}", entry) from exc
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
|
@ -208,6 +223,14 @@ class Importer:
|
|||
)
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
raise EntryInvalidError.from_entry(
|
||||
(
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance",
|
||||
|
@ -220,9 +243,9 @@ class Importer:
|
|||
model_instance.pk = updated_identifiers["pk"]
|
||||
serializer_kwargs["instance"] = model_instance
|
||||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError(exc) from exc
|
||||
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
|
@ -235,15 +258,17 @@ class Importer:
|
|||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
def apply(self) -> bool:
|
||||
"""Apply (create/update) models yaml, in database transaction"""
|
||||
try:
|
||||
with transaction.atomic():
|
||||
with atomic():
|
||||
if not self._apply_models():
|
||||
self.logger.debug("Reverting changes due to error")
|
||||
raise IntegrityError
|
||||
|
@ -252,11 +277,11 @@ class Importer:
|
|||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self) -> bool:
|
||||
def _apply_models(self, raise_errors=False) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self.__import.entries:
|
||||
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||
for entry in self._import.entries:
|
||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
||||
try:
|
||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
except LookupError:
|
||||
|
@ -269,15 +294,21 @@ class Importer:
|
|||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
# For deleting objects we don't need the serializer to be valid
|
||||
if entry.get_state(self.__import) == BlueprintEntryDesiredState.ABSENT:
|
||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||
continue
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
state = entry.get_state(self.__import)
|
||||
if state in [BlueprintEntryDesiredState.PRESENT, BlueprintEntryDesiredState.CREATED]:
|
||||
state = entry.get_state(self._import)
|
||||
if state in [
|
||||
BlueprintEntryDesiredState.PRESENT,
|
||||
BlueprintEntryDesiredState.CREATED,
|
||||
BlueprintEntryDesiredState.MUST_CREATED,
|
||||
]:
|
||||
instance = serializer.instance
|
||||
if (
|
||||
instance
|
||||
|
@ -305,23 +336,23 @@ class Importer:
|
|||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
orig_import = deepcopy(self.__import)
|
||||
if self.__import.version != 1:
|
||||
orig_import = deepcopy(self._import)
|
||||
if self._import.version != 1:
|
||||
self.logger.warning("Invalid blueprint version")
|
||||
return False, [{"event": "Invalid blueprint version"}]
|
||||
with (
|
||||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
):
|
||||
successful = self._apply_models()
|
||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
getattr(self.logger, log.get("log_level"))(**log)
|
||||
self.logger.debug("Finished blueprint import validation")
|
||||
self.__import = orig_import
|
||||
self._import = orig_import
|
||||
return successful, logs
|
||||
|
|
|
@ -190,7 +190,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
|||
self.set_uid(slugify(instance.name))
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer(blueprint_content, instance.context)
|
||||
importer = Importer.from_string(blueprint_content, instance.context)
|
||||
if importer.blueprint.metadata:
|
||||
instance.metadata = asdict(importer.blueprint.metadata)
|
||||
valid, logs = importer.validate()
|
||||
|
|
139
authentik/core/api/transactional_applications.py
Normal file
139
authentik/core/api/transactional_applications.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
"""transactional application and provider creation"""
|
||||
from django.apps import apps
|
||||
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from yaml import ScalarNode
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintEntry,
|
||||
BlueprintEntryDesiredState,
|
||||
EntryInvalidError,
|
||||
KeyOf,
|
||||
)
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.core.api.applications import ApplicationSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
def get_provider_serializer_mapping():
|
||||
"""Get a mapping of all providers' model names and their serializers"""
|
||||
mapping = {}
|
||||
for model in all_subclasses(Provider):
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
mapping[f"{model._meta.app_label}.{model._meta.model_name}"] = model().serializer
|
||||
return mapping
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
PolymorphicProxySerializer(
|
||||
component_name="model",
|
||||
serializers=get_provider_serializer_mapping,
|
||||
resource_type_field_name="provider_model",
|
||||
)
|
||||
)
|
||||
class TransactionProviderField(DictField):
|
||||
"""Dictionary field which can hold provider creation data"""
|
||||
|
||||
|
||||
class TransactionApplicationSerializer(PassiveSerializer):
|
||||
"""Serializer for creating a provider and an application in one transaction"""
|
||||
|
||||
app = ApplicationSerializer()
|
||||
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
|
||||
provider = TransactionProviderField()
|
||||
|
||||
_provider_model: type[Provider] = None
|
||||
|
||||
def validate_provider_model(self, fq_model_name: str) -> str:
|
||||
"""Validate that the model exists and is a provider"""
|
||||
if "." not in fq_model_name:
|
||||
raise ValidationError("Invalid provider model")
|
||||
try:
|
||||
app, _, model_name = fq_model_name.partition(".")
|
||||
model = apps.get_model(app, model_name)
|
||||
if not issubclass(model, Provider):
|
||||
raise ValidationError("Invalid provider model")
|
||||
self._provider_model = model
|
||||
except LookupError:
|
||||
raise ValidationError("Invalid provider model")
|
||||
return fq_model_name
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
blueprint = Blueprint()
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model=attrs["provider_model"],
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"name": attrs["provider"]["name"],
|
||||
},
|
||||
# Must match the name of the field on `self`
|
||||
id="provider",
|
||||
attrs=attrs["provider"],
|
||||
)
|
||||
)
|
||||
app_data = attrs["app"]
|
||||
app_data["provider"] = KeyOf(None, ScalarNode(tag="", value="provider"))
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model="authentik_core.application",
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"slug": attrs["app"]["slug"],
|
||||
},
|
||||
attrs=app_data,
|
||||
# Must match the name of the field on `self`
|
||||
id="app",
|
||||
)
|
||||
)
|
||||
importer = Importer(blueprint, {})
|
||||
try:
|
||||
valid, _ = importer.validate(raise_validation_errors=True)
|
||||
if not valid:
|
||||
raise ValidationError("Invalid blueprint")
|
||||
except EntryInvalidError as exc:
|
||||
raise ValidationError(
|
||||
{
|
||||
exc.entry_id: exc.validation_error.detail,
|
||||
}
|
||||
)
|
||||
return blueprint
|
||||
|
||||
|
||||
class TransactionApplicationResponseSerializer(PassiveSerializer):
|
||||
"""Transactional creation response"""
|
||||
|
||||
applied = BooleanField()
|
||||
logs = ListField(child=CharField())
|
||||
|
||||
|
||||
class TransactionalApplicationView(APIView):
|
||||
"""Create provider and application and attach them in a single transaction"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(
|
||||
request=TransactionApplicationSerializer(),
|
||||
responses={
|
||||
200: TransactionApplicationResponseSerializer(),
|
||||
},
|
||||
)
|
||||
def put(self, request: Request) -> Response:
|
||||
"""Convert data into a blueprint, validate it and apply it"""
|
||||
data = TransactionApplicationSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
|
||||
importer = Importer(data.validated_data, {})
|
||||
applied = importer.apply()
|
||||
response = {"applied": False, "logs": []}
|
||||
response["applied"] = applied
|
||||
return Response(response, status=200)
|
64
authentik/core/tests/test_transactional_applications_api.py
Normal file
64
authentik/core/tests/test_transactional_applications_api.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
"""Test Transactional API"""
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
|
||||
|
||||
class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"""Test Transactional API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
def test_create_transactional(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
authorization_flow = create_test_flow()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": str(authorization_flow.pk),
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
|
||||
provider = OAuth2Provider.objects.filter(name=uid).first()
|
||||
self.assertIsNotNone(provider)
|
||||
app = Application.objects.filter(slug=uid).first()
|
||||
self.assertIsNotNone(app)
|
||||
self.assertEqual(app.provider.pk, provider.pk)
|
||||
|
||||
def test_create_transactional_invalid(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||
)
|
|
@ -15,6 +15,7 @@ from authentik.core.api.propertymappings import PropertyMappingViewSet
|
|||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.core.views import apps
|
||||
from authentik.core.views.debug import AccessDeniedView
|
||||
|
@ -70,6 +71,11 @@ urlpatterns = [
|
|||
api_urlpatterns = [
|
||||
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
||||
("core/applications", ApplicationViewSet),
|
||||
path(
|
||||
"core/transactional/applications/",
|
||||
TransactionalApplicationView.as_view(),
|
||||
name="core-transactional-application",
|
||||
),
|
||||
("core/groups", GroupViewSet),
|
||||
("core/users", UserViewSet),
|
||||
("core/tokens", TokenViewSet),
|
||||
|
|
|
@ -181,7 +181,7 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
|
|||
if not file:
|
||||
return Response(data=import_response.initial_data, status=400)
|
||||
|
||||
importer = Importer(file.read().decode())
|
||||
importer = Importer.from_string(file.read().decode())
|
||||
valid, logs = importer.validate()
|
||||
import_response.initial_data["logs"] = [sanitize_dict(log) for log in logs]
|
||||
import_response.initial_data["success"] = valid
|
||||
|
|
|
@ -73,40 +73,23 @@ QS_QUERY = "query"
|
|||
|
||||
|
||||
def challenge_types():
|
||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
||||
`serializers`. This function returns a class which is an iterator, which returns the
|
||||
"""This function returns a mapping which contains all subclasses of challenges
|
||||
subclasses of Challenge, and Challenge itself."""
|
||||
|
||||
class Inner(dict):
|
||||
"""dummy class with custom callback on .items()"""
|
||||
|
||||
def items(self):
|
||||
mapping = {}
|
||||
classes = all_subclasses(Challenge)
|
||||
classes.remove(WithUserInfoChallenge)
|
||||
for cls in classes:
|
||||
for cls in all_subclasses(Challenge):
|
||||
if cls == WithUserInfoChallenge:
|
||||
continue
|
||||
mapping[cls().fields["component"].default] = cls
|
||||
return mapping.items()
|
||||
|
||||
return Inner()
|
||||
return mapping
|
||||
|
||||
|
||||
def challenge_response_types():
|
||||
"""This is a workaround for PolymorphicProxySerializer not accepting a callable for
|
||||
`serializers`. This function returns a class which is an iterator, which returns the
|
||||
"""This function returns a mapping which contains all subclasses of challenges
|
||||
subclasses of Challenge, and Challenge itself."""
|
||||
|
||||
class Inner(dict):
|
||||
"""dummy class with custom callback on .items()"""
|
||||
|
||||
def items(self):
|
||||
mapping = {}
|
||||
classes = all_subclasses(ChallengeResponse)
|
||||
for cls in classes:
|
||||
for cls in all_subclasses(ChallengeResponse):
|
||||
mapping[cls(stage=None).fields["component"].default] = cls
|
||||
return mapping.items()
|
||||
|
||||
return Inner()
|
||||
return mapping
|
||||
|
||||
|
||||
class InvalidStageError(SentryIgnoredException):
|
||||
|
@ -264,7 +247,7 @@ class FlowExecutorView(APIView):
|
|||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types(),
|
||||
serializers=challenge_types,
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
},
|
||||
|
@ -304,13 +287,13 @@ class FlowExecutorView(APIView):
|
|||
responses={
|
||||
200: PolymorphicProxySerializer(
|
||||
component_name="ChallengeTypes",
|
||||
serializers=challenge_types(),
|
||||
serializers=challenge_types,
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
},
|
||||
request=PolymorphicProxySerializer(
|
||||
component_name="FlowChallengeResponse",
|
||||
serializers=challenge_response_types(),
|
||||
serializers=challenge_response_types,
|
||||
resource_type_field_name="component",
|
||||
),
|
||||
parameters=[
|
||||
|
|
|
@ -59,7 +59,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -95,7 +96,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -131,7 +133,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -167,7 +170,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -203,7 +207,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -239,7 +244,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -275,7 +281,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -311,7 +318,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -347,7 +355,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -383,7 +392,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -419,7 +429,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -455,7 +466,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -491,7 +503,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -527,7 +540,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -563,7 +577,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -599,7 +614,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -635,7 +651,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -671,7 +688,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -707,7 +725,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -743,7 +762,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -779,7 +799,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -815,7 +836,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -851,7 +873,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -887,7 +910,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -923,7 +947,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -959,7 +984,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -995,7 +1021,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1031,7 +1058,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1067,7 +1095,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1103,7 +1132,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1139,7 +1169,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1175,7 +1206,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1211,7 +1243,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1247,7 +1280,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1283,7 +1317,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1319,7 +1354,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1355,7 +1391,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1391,7 +1428,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1427,7 +1465,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1463,7 +1502,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1499,7 +1539,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1535,7 +1576,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1571,7 +1613,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1607,7 +1650,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1643,7 +1687,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1679,7 +1724,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1715,7 +1761,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1751,7 +1798,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1787,7 +1835,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1823,7 +1872,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1859,7 +1909,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1895,7 +1946,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1931,7 +1983,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -1967,7 +2020,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2003,7 +2057,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2039,7 +2094,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2075,7 +2131,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2111,7 +2168,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2147,7 +2205,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2183,7 +2242,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2219,7 +2279,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2255,7 +2316,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2291,7 +2353,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2327,7 +2390,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2363,7 +2427,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2399,7 +2464,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2435,7 +2501,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2471,7 +2538,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2507,7 +2575,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2543,7 +2612,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2579,7 +2649,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2615,7 +2686,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
@ -2651,7 +2723,8 @@
|
|||
"enum": [
|
||||
"absent",
|
||||
"present",
|
||||
"created"
|
||||
"created",
|
||||
"must_created"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
|
|
93
schema.yml
93
schema.yml
|
@ -4349,6 +4349,39 @@ paths:
|
|||
schema:
|
||||
$ref: '#/components/schemas/GenericError'
|
||||
description: ''
|
||||
/core/transactional/applications/:
|
||||
put:
|
||||
operationId: core_transactional_applications_update
|
||||
description: Convert data into a blueprint, validate it and apply it
|
||||
tags:
|
||||
- core
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/TransactionApplicationRequest'
|
||||
required: true
|
||||
security:
|
||||
- authentik: []
|
||||
responses:
|
||||
'200':
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/TransactionApplicationResponse'
|
||||
description: ''
|
||||
'400':
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ValidationError'
|
||||
description: ''
|
||||
'403':
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GenericError'
|
||||
description: ''
|
||||
/core/user_consent/:
|
||||
get:
|
||||
operationId: core_user_consent_list
|
||||
|
@ -37543,6 +37576,22 @@ components:
|
|||
description: |-
|
||||
* `twilio` - Twilio
|
||||
* `generic` - Generic
|
||||
ProviderModelEnum:
|
||||
enum:
|
||||
- authentik_providers_ldap.ldapprovider
|
||||
- authentik_providers_oauth2.oauth2provider
|
||||
- authentik_providers_proxy.proxyprovider
|
||||
- authentik_providers_radius.radiusprovider
|
||||
- authentik_providers_saml.samlprovider
|
||||
- authentik_providers_scim.scimprovider
|
||||
type: string
|
||||
description: |-
|
||||
* `authentik_providers_ldap.ldapprovider` - authentik_providers_ldap.ldapprovider
|
||||
* `authentik_providers_oauth2.oauth2provider` - authentik_providers_oauth2.oauth2provider
|
||||
* `authentik_providers_proxy.proxyprovider` - authentik_providers_proxy.proxyprovider
|
||||
* `authentik_providers_radius.radiusprovider` - authentik_providers_radius.radiusprovider
|
||||
* `authentik_providers_saml.samlprovider` - authentik_providers_saml.samlprovider
|
||||
* `authentik_providers_scim.scimprovider` - authentik_providers_scim.scimprovider
|
||||
ProviderRequest:
|
||||
type: object
|
||||
description: Provider Serializer
|
||||
|
@ -39953,6 +40002,33 @@ components:
|
|||
readOnly: true
|
||||
required:
|
||||
- key
|
||||
TransactionApplicationRequest:
|
||||
type: object
|
||||
description: Serializer for creating a provider and an application in one transaction
|
||||
properties:
|
||||
app:
|
||||
$ref: '#/components/schemas/ApplicationRequest'
|
||||
provider_model:
|
||||
$ref: '#/components/schemas/ProviderModelEnum'
|
||||
provider:
|
||||
$ref: '#/components/schemas/modelRequest'
|
||||
required:
|
||||
- app
|
||||
- provider
|
||||
- provider_model
|
||||
TransactionApplicationResponse:
|
||||
type: object
|
||||
description: Transactional creation response
|
||||
properties:
|
||||
applied:
|
||||
type: boolean
|
||||
logs:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
required:
|
||||
- applied
|
||||
- logs
|
||||
TypeCreate:
|
||||
type: object
|
||||
description: Types of an object that can be created
|
||||
|
@ -40880,6 +40956,23 @@ components:
|
|||
type: integer
|
||||
required:
|
||||
- count
|
||||
modelRequest:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/LDAPProviderRequest'
|
||||
- $ref: '#/components/schemas/OAuth2ProviderRequest'
|
||||
- $ref: '#/components/schemas/ProxyProviderRequest'
|
||||
- $ref: '#/components/schemas/RadiusProviderRequest'
|
||||
- $ref: '#/components/schemas/SAMLProviderRequest'
|
||||
- $ref: '#/components/schemas/SCIMProviderRequest'
|
||||
discriminator:
|
||||
propertyName: provider_model
|
||||
mapping:
|
||||
authentik_providers_ldap.ldapprovider: '#/components/schemas/LDAPProviderRequest'
|
||||
authentik_providers_oauth2.oauth2provider: '#/components/schemas/OAuth2ProviderRequest'
|
||||
authentik_providers_proxy.proxyprovider: '#/components/schemas/ProxyProviderRequest'
|
||||
authentik_providers_radius.radiusprovider: '#/components/schemas/RadiusProviderRequest'
|
||||
authentik_providers_saml.samlprovider: '#/components/schemas/SAMLProviderRequest'
|
||||
authentik_providers_scim.scimprovider: '#/components/schemas/SCIMProviderRequest'
|
||||
securitySchemes:
|
||||
authentik:
|
||||
type: apiKey
|
||||
|
|
Reference in a new issue