root: use single redis db (#4009)
* use single redis db Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org> * cleanup prefixes Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org> * ensure __str__ always returns string Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org> * fix remaining old prefixes Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org> * add release notes Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org> Signed-off-by: Jens Langhammer <jens.langhammer@beryju.org>
This commit is contained in:
parent
9f269faf53
commit
55aa1897af
3
Makefile
3
Makefile
|
@ -197,7 +197,4 @@ dev-reset:
|
||||||
dropdb -U postgres -h localhost authentik
|
dropdb -U postgres -h localhost authentik
|
||||||
createdb -U postgres -h localhost authentik
|
createdb -U postgres -h localhost authentik
|
||||||
redis-cli -n 0 flushall
|
redis-cli -n 0 flushall
|
||||||
redis-cli -n 1 flushall
|
|
||||||
redis-cli -n 2 flushall
|
|
||||||
redis-cli -n 3 flushall
|
|
||||||
make migrate
|
make migrate
|
||||||
|
|
|
@ -37,7 +37,7 @@ LOGGER = get_logger()
|
||||||
|
|
||||||
def user_app_cache_key(user_pk: str) -> str:
|
def user_app_cache_key(user_pk: str) -> str:
|
||||||
"""Cache key where application list for user is saved"""
|
"""Cache key where application list for user is saved"""
|
||||||
return f"user_app_cache_{user_pk}"
|
return f"goauthentik.io/core/app_access/{user_pk}"
|
||||||
|
|
||||||
|
|
||||||
class ApplicationSerializer(ModelSerializer):
|
class ApplicationSerializer(ModelSerializer):
|
||||||
|
|
|
@ -297,7 +297,7 @@ class Provider(SerializerModel):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return str(self.name)
|
||||||
|
|
||||||
|
|
||||||
class Application(SerializerModel, PolicyBindingModel):
|
class Application(SerializerModel, PolicyBindingModel):
|
||||||
|
@ -379,7 +379,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return str(self.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
@ -481,7 +481,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return str(self.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|
|
@ -293,7 +293,7 @@ class Event(SerializerModel, ExpiringModel):
|
||||||
return f"{self.action}: {self.context}"
|
return f"{self.action}: {self.context}"
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"<Event action={self.action} user={self.user} context={self.context}>"
|
return f"Event action={self.action} user={self.user} context={self.context}"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
CACHE_KEY_PREFIX = "goauthentik.io/events/tasks/"
|
||||||
|
|
||||||
|
|
||||||
class TaskResultStatus(Enum):
|
class TaskResultStatus(Enum):
|
||||||
|
@ -70,16 +71,16 @@ class TaskInfo:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def all() -> dict[str, "TaskInfo"]:
|
def all() -> dict[str, "TaskInfo"]:
|
||||||
"""Get all TaskInfo objects"""
|
"""Get all TaskInfo objects"""
|
||||||
return cache.get_many(cache.keys("task_*"))
|
return cache.get_many(cache.keys(CACHE_KEY_PREFIX + "*"))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def by_name(name: str) -> Optional["TaskInfo"]:
|
def by_name(name: str) -> Optional["TaskInfo"]:
|
||||||
"""Get TaskInfo Object by name"""
|
"""Get TaskInfo Object by name"""
|
||||||
return cache.get(f"task_{name}", None)
|
return cache.get(CACHE_KEY_PREFIX + name, None)
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
"""Delete task info from cache"""
|
"""Delete task info from cache"""
|
||||||
return cache.delete(f"task_{self.task_name}")
|
return cache.delete(CACHE_KEY_PREFIX + self.task_name)
|
||||||
|
|
||||||
def set_prom_metrics(self):
|
def set_prom_metrics(self):
|
||||||
"""Update prometheus metrics"""
|
"""Update prometheus metrics"""
|
||||||
|
@ -98,9 +99,9 @@ class TaskInfo:
|
||||||
|
|
||||||
def save(self, timeout_hours=6):
|
def save(self, timeout_hours=6):
|
||||||
"""Save task into cache"""
|
"""Save task into cache"""
|
||||||
key = f"task_{self.task_name}"
|
key = CACHE_KEY_PREFIX + self.task_name
|
||||||
if self.result.uid:
|
if self.result.uid:
|
||||||
key += f"_{self.result.uid}"
|
key += f"/{self.result.uid}"
|
||||||
self.task_name += f"_{self.result.uid}"
|
self.task_name += f"_{self.result.uid}"
|
||||||
self.set_prom_metrics()
|
self.set_prom_metrics()
|
||||||
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
cache.set(key, self, timeout=timeout_hours * 60 * 60)
|
||||||
|
|
|
@ -31,7 +31,7 @@ CACHE_TIMEOUT = int(CONFIG.y("redis.cache_timeout_flows"))
|
||||||
|
|
||||||
def cache_key(flow: Flow, user: Optional[User] = None) -> str:
|
def cache_key(flow: Flow, user: Optional[User] = None) -> str:
|
||||||
"""Generate Cache key for flow"""
|
"""Generate Cache key for flow"""
|
||||||
prefix = f"flow_{flow.pk}"
|
prefix = f"goauthentik.io/flows/planner/{flow.pk}"
|
||||||
if user:
|
if user:
|
||||||
prefix += f"#{user.pk}"
|
prefix += f"#{user.pk}"
|
||||||
return prefix
|
return prefix
|
||||||
|
|
|
@ -19,10 +19,7 @@ redis:
|
||||||
password: ''
|
password: ''
|
||||||
tls: false
|
tls: false
|
||||||
tls_reqs: "none"
|
tls_reqs: "none"
|
||||||
cache_db: 0
|
db: 0
|
||||||
message_queue_db: 1
|
|
||||||
ws_db: 2
|
|
||||||
outpost_session_db: 3
|
|
||||||
cache_timeout: 300
|
cache_timeout: 300
|
||||||
cache_timeout_flows: 300
|
cache_timeout_flows: 300
|
||||||
cache_timeout_policies: 300
|
cache_timeout_policies: 300
|
||||||
|
|
|
@ -292,7 +292,7 @@ class Outpost(SerializerModel, ManagedModel):
|
||||||
@property
|
@property
|
||||||
def state_cache_prefix(self) -> str:
|
def state_cache_prefix(self) -> str:
|
||||||
"""Key by which the outposts status is saved"""
|
"""Key by which the outposts status is saved"""
|
||||||
return f"outpost_{self.uuid.hex}_state"
|
return f"goauthentik.io/outposts/{self.uuid.hex}_state"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self) -> list["OutpostState"]:
|
def state(self) -> list["OutpostState"]:
|
||||||
|
|
|
@ -41,6 +41,9 @@ class PolicyBindingModel(models.Model):
|
||||||
|
|
||||||
objects = InheritanceManager()
|
objects = InheritanceManager()
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"PolicyBindingModel {self.pbm_uuid}"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Policy Binding Model")
|
verbose_name = _("Policy Binding Model")
|
||||||
verbose_name_plural = _("Policy Binding Models")
|
verbose_name_plural = _("Policy Binding Models")
|
||||||
|
@ -135,6 +138,7 @@ class PolicyBinding(SerializerModel):
|
||||||
return f"Binding from {self.target} #{self.order} to {suffix}"
|
return f"Binding from {self.target} #{self.order} to {suffix}"
|
||||||
except PolicyBinding.target.RelatedObjectDoesNotExist: # pylint: disable=no-member
|
except PolicyBinding.target.RelatedObjectDoesNotExist: # pylint: disable=no-member
|
||||||
return f"Binding - #{self.order} to {suffix}"
|
return f"Binding - #{self.order} to {suffix}"
|
||||||
|
return ""
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
@ -175,7 +179,7 @@ class Policy(SerializerModel, CreatedUpdatedModel):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return str(self.name)
|
||||||
|
|
||||||
def passes(self, request: PolicyRequest) -> PolicyResult: # pragma: no cover
|
def passes(self, request: PolicyRequest) -> PolicyResult: # pragma: no cover
|
||||||
"""Check if request passes this policy"""
|
"""Check if request passes this policy"""
|
||||||
|
|
|
@ -14,7 +14,7 @@ from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
||||||
from authentik.policies.exceptions import PolicyException
|
from authentik.policies.exceptions import PolicyException
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ PROCESS_CLASS = FORK_CTX.Process
|
||||||
|
|
||||||
def cache_key(binding: PolicyBinding, request: PolicyRequest) -> str:
|
def cache_key(binding: PolicyBinding, request: PolicyRequest) -> str:
|
||||||
"""Generate Cache key for policy"""
|
"""Generate Cache key for policy"""
|
||||||
prefix = f"policy_{binding.policy_binding_uuid.hex}_"
|
prefix = f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}_"
|
||||||
if request.http_request and hasattr(request.http_request, "session"):
|
if request.http_request and hasattr(request.http_request, "session"):
|
||||||
prefix += f"_{request.http_request.session.session_key}"
|
prefix += f"_{request.http_request.session.session_key}"
|
||||||
if request.user:
|
if request.user:
|
||||||
|
|
|
@ -23,12 +23,12 @@ def update_score(request: HttpRequest, identifier: str, amount: int):
|
||||||
try:
|
try:
|
||||||
# We only update the cache here, as its faster than writing to the DB
|
# We only update the cache here, as its faster than writing to the DB
|
||||||
score = cache.get_or_set(
|
score = cache.get_or_set(
|
||||||
CACHE_KEY_PREFIX + remote_ip + identifier,
|
CACHE_KEY_PREFIX + remote_ip + "/" + identifier,
|
||||||
{"ip": remote_ip, "identifier": identifier, "score": 0},
|
{"ip": remote_ip, "identifier": identifier, "score": 0},
|
||||||
CACHE_TIMEOUT,
|
CACHE_TIMEOUT,
|
||||||
)
|
)
|
||||||
score["score"] += amount
|
score["score"] += amount
|
||||||
cache.set(CACHE_KEY_PREFIX + remote_ip + identifier, score)
|
cache.set(CACHE_KEY_PREFIX + remote_ip + "/" + identifier, score)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
LOGGER.warning("failed to set reputation", exc=exc)
|
LOGGER.warning("failed to set reputation", exc=exc)
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ class TestReputationPolicy(TestCase):
|
||||||
)
|
)
|
||||||
# Test value in cache
|
# Test value in cache
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
cache.get(CACHE_KEY_PREFIX + self.test_ip + self.test_username),
|
cache.get(CACHE_KEY_PREFIX + self.test_ip + "/" + self.test_username),
|
||||||
{"ip": "127.0.0.1", "identifier": "test", "score": -1},
|
{"ip": "127.0.0.1", "identifier": "test", "score": -1},
|
||||||
)
|
)
|
||||||
# Save cache and check db values
|
# Save cache and check db values
|
||||||
|
@ -47,7 +47,7 @@ class TestReputationPolicy(TestCase):
|
||||||
)
|
)
|
||||||
# Test value in cache
|
# Test value in cache
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
cache.get(CACHE_KEY_PREFIX + self.test_ip + self.test_username),
|
cache.get(CACHE_KEY_PREFIX + self.test_ip + "/" + self.test_username),
|
||||||
{"ip": "127.0.0.1", "identifier": "test", "score": -1},
|
{"ip": "127.0.0.1", "identifier": "test", "score": -1},
|
||||||
)
|
)
|
||||||
# Save cache and check db values
|
# Save cache and check db values
|
||||||
|
|
|
@ -6,6 +6,7 @@ from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.core.api.applications import user_app_cache_key
|
from authentik.core.api.applications import user_app_cache_key
|
||||||
from authentik.policies.apps import GAUGE_POLICIES_CACHED
|
from authentik.policies.apps import GAUGE_POLICIES_CACHED
|
||||||
|
from authentik.policies.types import CACHE_PREFIX
|
||||||
from authentik.root.monitoring import monitoring_set
|
from authentik.root.monitoring import monitoring_set
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
@ -15,7 +16,7 @@ LOGGER = get_logger()
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def monitoring_set_policies(sender, **kwargs):
|
def monitoring_set_policies(sender, **kwargs):
|
||||||
"""set policy gauges"""
|
"""set policy gauges"""
|
||||||
GAUGE_POLICIES_CACHED.set(len(cache.keys("policy_*") or []))
|
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save)
|
@receiver(post_save)
|
||||||
|
@ -27,7 +28,7 @@ def invalidate_policy_cache(sender, instance, **_):
|
||||||
if isinstance(instance, Policy):
|
if isinstance(instance, Policy):
|
||||||
total = 0
|
total = 0
|
||||||
for binding in PolicyBinding.objects.filter(policy=instance):
|
for binding in PolicyBinding.objects.filter(policy=instance):
|
||||||
prefix = f"policy_{binding.policy_binding_uuid.hex}_{binding.policy.pk.hex}*"
|
prefix = f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}_{binding.policy.pk.hex}*"
|
||||||
keys = cache.keys(prefix)
|
keys = cache.keys(prefix)
|
||||||
total += len(keys)
|
total += len(keys)
|
||||||
cache.delete_many(keys)
|
cache.delete_many(keys)
|
||||||
|
|
|
@ -8,6 +8,7 @@ from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy
|
||||||
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
|
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
|
||||||
from authentik.policies.tests.test_process import clear_policy_cache
|
from authentik.policies.tests.test_process import clear_policy_cache
|
||||||
|
from authentik.policies.types import CACHE_PREFIX
|
||||||
|
|
||||||
|
|
||||||
class TestPolicyEngine(TestCase):
|
class TestPolicyEngine(TestCase):
|
||||||
|
@ -101,8 +102,8 @@ class TestPolicyEngine(TestCase):
|
||||||
pbm = PolicyBindingModel.objects.create()
|
pbm = PolicyBindingModel.objects.create()
|
||||||
binding = PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
binding = PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
||||||
engine = PolicyEngine(pbm, self.user)
|
engine = PolicyEngine(pbm, self.user)
|
||||||
self.assertEqual(len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 0)
|
self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 0)
|
||||||
self.assertEqual(engine.build().passing, False)
|
self.assertEqual(engine.build().passing, False)
|
||||||
self.assertEqual(len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 1)
|
self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1)
|
||||||
self.assertEqual(engine.build().passing, False)
|
self.assertEqual(engine.build().passing, False)
|
||||||
self.assertEqual(len(cache.keys(f"policy_{binding.policy_binding_uuid.hex}*")), 1)
|
self.assertEqual(len(cache.keys(f"{CACHE_PREFIX}{binding.policy_binding_uuid.hex}*")), 1)
|
||||||
|
|
|
@ -16,6 +16,7 @@ if TYPE_CHECKING:
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
CACHE_PREFIX = "goauthentik.io/policies/"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
from channels.generic.websocket import JsonWebsocketConsumer
|
from channels.generic.websocket import JsonWebsocketConsumer
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
from authentik.root.messages.storage import CACHE_PREFIX
|
||||||
|
|
||||||
|
|
||||||
class MessageConsumer(JsonWebsocketConsumer):
|
class MessageConsumer(JsonWebsocketConsumer):
|
||||||
"""Consumer which sends django.contrib.messages Messages over WS.
|
"""Consumer which sends django.contrib.messages Messages over WS.
|
||||||
|
@ -12,11 +14,13 @@ class MessageConsumer(JsonWebsocketConsumer):
|
||||||
def connect(self):
|
def connect(self):
|
||||||
self.accept()
|
self.accept()
|
||||||
self.session_key = self.scope["session"].session_key
|
self.session_key = self.scope["session"].session_key
|
||||||
cache.set(f"user_{self.session_key}_messages_{self.channel_name}", True, None)
|
if not self.session_key:
|
||||||
|
return
|
||||||
|
cache.set(f"{CACHE_PREFIX}{self.session_key}_messages_{self.channel_name}", True, None)
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def disconnect(self, code):
|
def disconnect(self, code):
|
||||||
cache.delete(f"user_{self.session_key}_messages_{self.channel_name}")
|
cache.delete(f"{CACHE_PREFIX}{self.session_key}_messages_{self.channel_name}")
|
||||||
|
|
||||||
def event_update(self, event: dict):
|
def event_update(self, event: dict):
|
||||||
"""Event handler which is called by Messages Storage backend"""
|
"""Event handler which is called by Messages Storage backend"""
|
||||||
|
|
|
@ -7,6 +7,7 @@ from django.core.cache import cache
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
|
|
||||||
SESSION_KEY = "_messages"
|
SESSION_KEY = "_messages"
|
||||||
|
CACHE_PREFIX = "goauthentik.io/root/messages_"
|
||||||
|
|
||||||
|
|
||||||
class ChannelsStorage(SessionStorage):
|
class ChannelsStorage(SessionStorage):
|
||||||
|
@ -18,7 +19,7 @@ class ChannelsStorage(SessionStorage):
|
||||||
self.channel = get_channel_layer()
|
self.channel = get_channel_layer()
|
||||||
|
|
||||||
def _store(self, messages: list[Message], response, *args, **kwargs):
|
def _store(self, messages: list[Message], response, *args, **kwargs):
|
||||||
prefix = f"user_{self.request.session.session_key}_messages_"
|
prefix = f"{CACHE_PREFIX}{self.request.session.session_key}_messages_"
|
||||||
keys = cache.keys(f"{prefix}*")
|
keys = cache.keys(f"{prefix}*")
|
||||||
# if no active connections are open, fallback to storing messages in the
|
# if no active connections are open, fallback to storing messages in the
|
||||||
# session, so they can always be retrieved
|
# session, so they can always be retrieved
|
||||||
|
|
|
@ -195,9 +195,10 @@ _redis_url = (
|
||||||
CACHES = {
|
CACHES = {
|
||||||
"default": {
|
"default": {
|
||||||
"BACKEND": "django_redis.cache.RedisCache",
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
"LOCATION": f"{_redis_url}/{CONFIG.y('redis.cache_db')}",
|
"LOCATION": f"{_redis_url}/{CONFIG.y('redis.db')}",
|
||||||
"TIMEOUT": int(CONFIG.y("redis.cache_timeout", 300)),
|
"TIMEOUT": int(CONFIG.y("redis.cache_timeout", 300)),
|
||||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||||
|
"KEY_PREFIX": "authentik_cache",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DJANGO_REDIS_SCAN_ITERSIZE = 1000
|
DJANGO_REDIS_SCAN_ITERSIZE = 1000
|
||||||
|
@ -255,7 +256,8 @@ CHANNEL_LAYERS = {
|
||||||
"default": {
|
"default": {
|
||||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||||
"CONFIG": {
|
"CONFIG": {
|
||||||
"hosts": [f"{_redis_url}/{CONFIG.y('redis.ws_db')}"],
|
"hosts": [f"{_redis_url}/{CONFIG.y('redis.db')}"],
|
||||||
|
"prefix": "authentik_channels",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -338,12 +340,8 @@ CELERY_BEAT_SCHEDULE = {
|
||||||
}
|
}
|
||||||
CELERY_TASK_CREATE_MISSING_QUEUES = True
|
CELERY_TASK_CREATE_MISSING_QUEUES = True
|
||||||
CELERY_TASK_DEFAULT_QUEUE = "authentik"
|
CELERY_TASK_DEFAULT_QUEUE = "authentik"
|
||||||
CELERY_BROKER_URL = (
|
CELERY_BROKER_URL = f"{_redis_url}/{CONFIG.y('redis.db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
||||||
f"{_redis_url}/{CONFIG.y('redis.message_queue_db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
CELERY_RESULT_BACKEND = f"{_redis_url}/{CONFIG.y('redis.db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
||||||
)
|
|
||||||
CELERY_RESULT_BACKEND = (
|
|
||||||
f"{_redis_url}/{CONFIG.y('redis.message_queue_db')}{REDIS_CELERY_TLS_REQUIREMENTS}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sentry integration
|
# Sentry integration
|
||||||
env = get_env()
|
env = get_env()
|
||||||
|
|
|
@ -166,7 +166,7 @@ class LDAPPropertyMapping(PropertyMapping):
|
||||||
return LDAPPropertyMappingSerializer
|
return LDAPPropertyMappingSerializer
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return str(self.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|
|
@ -99,7 +99,7 @@ class DuoDevice(SerializerModel, Device):
|
||||||
return DuoDeviceSerializer
|
return DuoDeviceSerializer
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name or str(self.user)
|
return str(self.name) or str(self.user)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|
|
@ -216,7 +216,7 @@ class SMSDevice(SerializerModel, SideChannelDevice):
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name or str(self.user)
|
return str(self.name) or str(self.user)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("SMS Device")
|
verbose_name = _("SMS Device")
|
||||||
|
|
|
@ -146,7 +146,7 @@ class WebAuthnDevice(SerializerModel, Device):
|
||||||
return WebAuthnDeviceSerializer
|
return WebAuthnDeviceSerializer
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name or str(self.user)
|
return str(self.name) or str(self.user)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
||||||
|
|
|
@ -17,10 +17,7 @@ type RedisConfig struct {
|
||||||
Password string `yaml:"password" env:"AUTHENTIK_REDIS__PASSWORD"`
|
Password string `yaml:"password" env:"AUTHENTIK_REDIS__PASSWORD"`
|
||||||
TLS bool `yaml:"tls" env:"AUTHENTIK_REDIS__TLS"`
|
TLS bool `yaml:"tls" env:"AUTHENTIK_REDIS__TLS"`
|
||||||
TLSReqs string `yaml:"tls_reqs" env:"AUTHENTIK_REDIS__TLS_REQS"`
|
TLSReqs string `yaml:"tls_reqs" env:"AUTHENTIK_REDIS__TLS_REQS"`
|
||||||
CacheDB int `yaml:"cache_db" env:"AUTHENTIK_REDIS__CACHE_DB"`
|
DB int `yaml:"cache_db" env:"AUTHENTIK_REDIS__CACHE_DB"`
|
||||||
MessageQueueDB int `yaml:"message_queue_db" env:"AUTHENTIK_REDIS__MESSAGE_QUEUE_DB"`
|
|
||||||
WSDB int `yaml:"ws_db" env:"AUTHENTIK_REDIS__WS_DB"`
|
|
||||||
OutpostSessionDB int `yaml:"outpost_session_db" env:"AUTHENTIK_REDIS__OUTPOST_SESSION_DB"`
|
|
||||||
CacheTimeout int `yaml:"cache_timeout" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT"`
|
CacheTimeout int `yaml:"cache_timeout" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT"`
|
||||||
CacheTimeoutFlows int `yaml:"cache_timeout_flows" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS"`
|
CacheTimeoutFlows int `yaml:"cache_timeout_flows" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS"`
|
||||||
CacheTimeoutPolicies int `yaml:"cache_timeout_policies" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES"`
|
CacheTimeoutPolicies int `yaml:"cache_timeout_policies" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES"`
|
||||||
|
|
|
@ -16,11 +16,12 @@ import (
|
||||||
func (a *Application) getStore(p api.ProxyOutpostConfig, externalHost *url.URL) sessions.Store {
|
func (a *Application) getStore(p api.ProxyOutpostConfig, externalHost *url.URL) sessions.Store {
|
||||||
var store sessions.Store
|
var store sessions.Store
|
||||||
if config.Get().Redis.Host != "" {
|
if config.Get().Redis.Host != "" {
|
||||||
rs, err := redistore.NewRediStoreWithDB(10, "tcp", fmt.Sprintf("%s:%d", config.Get().Redis.Host, config.Get().Redis.Port), config.Get().Redis.Password, strconv.Itoa(config.Get().Redis.OutpostSessionDB), []byte(*p.CookieSecret))
|
rs, err := redistore.NewRediStoreWithDB(10, "tcp", fmt.Sprintf("%s:%d", config.Get().Redis.Host, config.Get().Redis.Port), config.Get().Redis.Password, strconv.Itoa(config.Get().Redis.DB), []byte(*p.CookieSecret))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
rs.SetMaxLength(math.MaxInt)
|
rs.SetMaxLength(math.MaxInt)
|
||||||
|
rs.SetKeyPrefix("authentik_proxy_session_")
|
||||||
if p.TokenValidity.IsSet() {
|
if p.TokenValidity.IsSet() {
|
||||||
t := p.TokenValidity.Get()
|
t := p.TokenValidity.Get()
|
||||||
// Add one to the validity to ensure we don't have a session with indefinite length
|
// Add one to the validity to ensure we don't have a session with indefinite length
|
||||||
|
|
|
@ -43,7 +43,7 @@ if CONFIG.y_bool("redis.tls", False):
|
||||||
REDIS_URL = (
|
REDIS_URL = (
|
||||||
f"{REDIS_PROTOCOL_PREFIX}:"
|
f"{REDIS_PROTOCOL_PREFIX}:"
|
||||||
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
|
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
|
||||||
f"{int(CONFIG.y('redis.port'))}/{CONFIG.y('redis.ws_db')}"
|
f"{int(CONFIG.y('redis.port'))}/{CONFIG.y('redis.db')}"
|
||||||
)
|
)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -11550,7 +11550,6 @@ paths:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/PasswordPolicyRequest'
|
$ref: '#/components/schemas/PasswordPolicyRequest'
|
||||||
required: true
|
|
||||||
security:
|
security:
|
||||||
- authentik: []
|
- authentik: []
|
||||||
responses:
|
responses:
|
||||||
|
@ -11625,7 +11624,6 @@ paths:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/PasswordPolicyRequest'
|
$ref: '#/components/schemas/PasswordPolicyRequest'
|
||||||
required: true
|
|
||||||
security:
|
security:
|
||||||
- authentik: []
|
- authentik: []
|
||||||
responses:
|
responses:
|
||||||
|
@ -32929,7 +32927,6 @@ components:
|
||||||
required:
|
required:
|
||||||
- bound_to
|
- bound_to
|
||||||
- component
|
- component
|
||||||
- error_message
|
|
||||||
- meta_model_name
|
- meta_model_name
|
||||||
- pk
|
- pk
|
||||||
- verbose_name
|
- verbose_name
|
||||||
|
@ -32975,7 +32972,6 @@ components:
|
||||||
minLength: 1
|
minLength: 1
|
||||||
error_message:
|
error_message:
|
||||||
type: string
|
type: string
|
||||||
minLength: 1
|
|
||||||
check_static_rules:
|
check_static_rules:
|
||||||
type: boolean
|
type: boolean
|
||||||
check_have_i_been_pwned:
|
check_have_i_been_pwned:
|
||||||
|
@ -32993,8 +32989,6 @@ components:
|
||||||
minimum: 0
|
minimum: 0
|
||||||
description: If the zxcvbn score is equal or less than this value, the policy
|
description: If the zxcvbn score is equal or less than this value, the policy
|
||||||
will fail.
|
will fail.
|
||||||
required:
|
|
||||||
- error_message
|
|
||||||
PasswordStage:
|
PasswordStage:
|
||||||
type: object
|
type: object
|
||||||
description: PasswordStage Serializer
|
description: PasswordStage Serializer
|
||||||
|
@ -34254,7 +34248,6 @@ components:
|
||||||
minLength: 1
|
minLength: 1
|
||||||
error_message:
|
error_message:
|
||||||
type: string
|
type: string
|
||||||
minLength: 1
|
|
||||||
check_static_rules:
|
check_static_rules:
|
||||||
type: boolean
|
type: boolean
|
||||||
check_have_i_been_pwned:
|
check_have_i_been_pwned:
|
||||||
|
|
|
@ -39,10 +39,7 @@ kubectl exec -it deployment/authentik-worker -c authentik -- ak dump_config
|
||||||
- `AUTHENTIK_REDIS__HOST`: Hostname of your Redis Server
|
- `AUTHENTIK_REDIS__HOST`: Hostname of your Redis Server
|
||||||
- `AUTHENTIK_REDIS__PORT`: Redis port, defaults to 6379
|
- `AUTHENTIK_REDIS__PORT`: Redis port, defaults to 6379
|
||||||
- `AUTHENTIK_REDIS__PASSWORD`: Password for your Redis Server
|
- `AUTHENTIK_REDIS__PASSWORD`: Password for your Redis Server
|
||||||
- `AUTHENTIK_REDIS__CACHE_DB`: Database for caching, defaults to 0
|
- `AUTHENTIK_REDIS__DB`: Database, defaults to 0
|
||||||
- `AUTHENTIK_REDIS__MESSAGE_QUEUE_DB`: Database for the message queue, defaults to 1
|
|
||||||
- `AUTHENTIK_REDIS__WS_DB`: Database for websocket connections, defaults to 2
|
|
||||||
- `AUTHENTIK_REDIS__OUTPOST_SESSION_DB`: Database for sessions for the embedded outpost, defaults to 3
|
|
||||||
- `AUTHENTIK_REDIS__CACHE_TIMEOUT`: Timeout for cached data until it expires in seconds, defaults to 300
|
- `AUTHENTIK_REDIS__CACHE_TIMEOUT`: Timeout for cached data until it expires in seconds, defaults to 300
|
||||||
- `AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS`: Timeout for cached flow plans until they expire in seconds, defaults to 300
|
- `AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS`: Timeout for cached flow plans until they expire in seconds, defaults to 300
|
||||||
- `AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES`: Timeout for cached policies until they expire in seconds, defaults to 300
|
- `AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES`: Timeout for cached policies until they expire in seconds, defaults to 300
|
||||||
|
|
|
@ -9,6 +9,10 @@ slug: "2022.11"
|
||||||
|
|
||||||
The policy has been merged with the password policy which provides the same functionality. Existing Have I Been Pwned policies will automatically be migrated.
|
The policy has been merged with the password policy which provides the same functionality. Existing Have I Been Pwned policies will automatically be migrated.
|
||||||
|
|
||||||
|
- Instead of using multiple redis databases, authentik now uses a single redis database
|
||||||
|
|
||||||
|
This will temporarily loose some cached information after the upgrade, like cached system tasks and policy results. This data will be re-cached in the background.
|
||||||
|
|
||||||
## New features
|
## New features
|
||||||
|
|
||||||
- authentik now runs on Python 3.11
|
- authentik now runs on Python 3.11
|
||||||
|
|
Reference in New Issue