logging: improve logging [AUTH-68]

This commit is contained in:
Samir Musali 2023-11-21 21:12:59 +02:00
parent 8831e1d946
commit f93b438e9d
4 changed files with 23 additions and 45 deletions

View File

@ -19,6 +19,7 @@ from django.utils import timezone
from django.views.debug import SafeExceptionReporterFilter from django.views.debug import SafeExceptionReporterFilter
from geoip2.models import ASN, City from geoip2.models import ASN, City
from guardian.utils import get_anonymous_user from guardian.utils import get_anonymous_user
from rest_framework import serializers
from authentik.blueprints.v1.common import YAMLTag from authentik.blueprints.v1.common import YAMLTag
from authentik.core.models import User from authentik.core.models import User
@ -186,3 +187,17 @@ def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
if new_value is not ...: if new_value is not ...:
final_dict[key] = new_value final_dict[key] = new_value
return final_dict return final_dict
class LogSerializer(serializers.Serializer):
timestamp = serializers.DateTimeField()
log_level = serializers.CharField()
message = serializers.CharField()
attributes = serializers.JSONField(default=dict)
def to_representation(self, instance):
data = super().to_representation(instance)
data["attributes"] = {
k: v for k, v in instance.items() if k not in ["timestamp", "log_level", "message"]
}
return data

View File

@ -6,6 +6,7 @@ from structlog.stdlib import get_logger
from structlog.testing import capture_logs from structlog.testing import capture_logs
from authentik import __version__, get_build_hash from authentik import __version__, get_build_hash
from authentik.events.utils import LogSerializer
from authentik.lib.config import CONFIG from authentik.lib.config import CONFIG
from authentik.lib.sentry import SentryIgnoredException from authentik.lib.sentry import SentryIgnoredException
from authentik.outposts.models import ( from authentik.outposts.models import (
@ -59,26 +60,17 @@ class BaseController:
self.logger = get_logger() self.logger = get_logger()
self.deployment_ports = [] self.deployment_ports = []
# pylint: disable=invalid-name
def up(self):
"""Called by scheduled task to reconcile deployment/service/etc"""
raise NotImplementedError
def up_with_logs(self) -> list[str]: def up_with_logs(self) -> list[str]:
"""Call .up() but capture all log output and return it.""" """Call .up() but capture all log output and return it."""
with capture_logs() as logs: with capture_logs() as logs:
self.up() self.up()
return [x["event"] for x in logs] return [LogSerializer(data=log).data for log in logs]
def down(self):
"""Handler to delete everything we've created"""
raise NotImplementedError
def down_with_logs(self) -> list[str]: def down_with_logs(self) -> list[str]:
"""Call .down() but capture all log output and return it.""" """Call .down() but capture all log output and return it."""
with capture_logs() as logs: with capture_logs() as logs:
self.down() self.down()
return [x["event"] for x in logs] return [LogSerializer(data=log).data for log in logs]
def __enter__(self): def __enter__(self):
return self return self

View File

@ -12,6 +12,7 @@ from structlog.testing import capture_logs
from urllib3.exceptions import HTTPError from urllib3.exceptions import HTTPError
from yaml import dump_all from yaml import dump_all
from authentik.events.utils import LogSerializer
from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException from authentik.outposts.controllers.base import BaseClient, BaseController, ControllerException
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
@ -78,18 +79,6 @@ class KubernetesController(BaseController):
PrometheusServiceMonitorReconciler.reconciler_name(), PrometheusServiceMonitorReconciler.reconciler_name(),
] ]
def up(self):
try:
for reconcile_key in self.reconcile_order:
reconciler_cls = self.reconcilers.get(reconcile_key)
if not reconciler_cls:
continue
reconciler = reconciler_cls(self)
reconciler.up()
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
raise ControllerException(str(exc)) from exc
def up_with_logs(self) -> list[str]: def up_with_logs(self) -> list[str]:
try: try:
all_logs = [] all_logs = []
@ -103,24 +92,11 @@ class KubernetesController(BaseController):
continue continue
reconciler = reconciler_cls(self) reconciler = reconciler_cls(self)
reconciler.up() reconciler.up()
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs] all_logs += [LogSerializer(data=log).data for log in logs]
return all_logs return all_logs
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc: except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
raise ControllerException(str(exc)) from exc raise ControllerException(str(exc)) from exc
def down(self):
try:
for reconcile_key in self.reconcile_order:
reconciler_cls = self.reconcilers.get(reconcile_key)
if not reconciler_cls:
continue
reconciler = reconciler_cls(self)
self.logger.debug("Tearing down object", name=reconcile_key)
reconciler.down()
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
raise ControllerException(str(exc)) from exc
def down_with_logs(self) -> list[str]: def down_with_logs(self) -> list[str]:
try: try:
all_logs = [] all_logs = []
@ -134,7 +110,7 @@ class KubernetesController(BaseController):
continue continue
reconciler = reconciler_cls(self) reconciler = reconciler_cls(self)
reconciler.down() reconciler.down()
all_logs += [f"{reconcile_key.title()}: {x['event']}" for x in logs] all_logs += [LogSerializer(data=log).data for log in logs]
return all_logs return all_logs
except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc: except (OpenApiException, HTTPError, ServiceConnectionInvalid) as exc:
raise ControllerException(str(exc)) from exc raise ControllerException(str(exc)) from exc

View File

@ -16,7 +16,7 @@ from authentik.api.decorators import permission_required
from authentik.core.api.applications import user_app_cache_key from authentik.core.api.applications import user_app_cache_key
from authentik.core.api.used_by import UsedByMixin from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import CacheSerializer, MetaNameSerializer, TypeCreateSerializer from authentik.core.api.utils import CacheSerializer, MetaNameSerializer, TypeCreateSerializer
from authentik.events.utils import sanitize_dict from authentik.events.utils import LogSerializer
from authentik.lib.utils.reflection import all_subclasses from authentik.lib.utils.reflection import all_subclasses
from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer from authentik.policies.api.exec import PolicyTestResultSerializer, PolicyTestSerializer
from authentik.policies.models import Policy, PolicyBinding from authentik.policies.models import Policy, PolicyBinding
@ -163,11 +163,6 @@ class PolicyViewSet(
proc = PolicyProcess(PolicyBinding(policy=policy), p_request, None) proc = PolicyProcess(PolicyBinding(policy=policy), p_request, None)
with capture_logs() as logs: with capture_logs() as logs:
result = proc.execute() result = proc.execute()
log_messages = [] result.log_messages = [LogSerializer(data=log).data for log in logs]
for log in logs:
if log.get("process", "") == "PolicyProcess":
continue
log_messages.append(sanitize_dict(log))
result.log_messages = log_messages
response = PolicyTestResultSerializer(result) response = PolicyTestResultSerializer(result)
return Response(response.data) return Response(response.data)