Merge branch 'main' into feature/improve-k8s-config

This commit is contained in:
Philipp Kolberg 2023-11-29 22:45:27 +01:00
commit 569ba024c2
No known key found for this signature in database
GPG key ID: 4C58CB0448FF9061
245 changed files with 20542 additions and 23255 deletions

View file

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.10.1
current_version = 2023.10.4
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View file

@ -2,36 +2,39 @@ name: "Setup authentik testing environment"
description: "Setup authentik testing environment"
inputs:
postgresql_tag:
postgresql_version:
description: "Optional postgresql image tag"
default: "12"
runs:
using: "composite"
steps:
- name: Install poetry
- name: Install poetry & deps
shell: bash
run: |
pipx install poetry || true
sudo apt update
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v3
uses: actions/setup-python@v4
with:
python-version: "3.11"
python-version-file: 'pyproject.toml'
cache: "poetry"
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Setup go
uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_tag }}
export PSQL_TAG=${{ inputs.postgresql_version }}
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry env use python3.11
poetry install
cd web && npm ci
- name: Generate config

View file

@ -11,6 +11,7 @@ on:
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
@ -47,25 +48,38 @@ jobs:
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
continue-on-error: true
strategy:
fail-fast: false
matrix:
psql:
- 12-alpine
- 15-alpine
- 16-alpine
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: checkout stable
run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (ensure stable deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable
run: poetry run python -m lifecycle.migrate
- name: checkout current code
@ -75,9 +89,13 @@ jobs:
git reset --hard HEAD
git clean -d -fx .
git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf $(poetry env info --path)
poetry install
- name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: migrate to latest
run: poetry run python -m lifecycle.migrate
test-unittest:
@ -96,7 +114,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_tag: ${{ matrix.psql }}
postgresql_version: ${{ matrix.psql }}
- name: run unittest
run: |
poetry run make test
@ -185,6 +203,9 @@ jobs:
build:
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
@ -235,6 +256,9 @@ jobs:
build-arm64:
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120
steps:
- uses: actions/checkout@v4

View file

@ -9,6 +9,7 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-golint:
@ -65,6 +66,9 @@ jobs:
- ldap
- radius
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
with:
@ -126,7 +130,7 @@ jobs:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API

View file

@ -9,6 +9,7 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-eslint:
@ -23,7 +24,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
@ -39,7 +40,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
@ -61,7 +62,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
@ -77,7 +78,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
@ -109,7 +110,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/

View file

@ -9,6 +9,7 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-prettier:
@ -17,7 +18,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
@ -31,7 +32,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
@ -52,7 +53,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/

View file

@ -6,6 +6,10 @@ on:
types:
- closed
permissions:
# Permission to delete cache
actions: write
jobs:
cleanup:
runs-on: ubuntu-latest

View file

@ -6,6 +6,7 @@ on:
workflow_dispatch:
permissions:
# Needed to be able to push to the next branch
contents: write
jobs:

View file

@ -7,6 +7,9 @@ on:
jobs:
build-server:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
@ -52,6 +55,9 @@ jobs:
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
strategy:
fail-fast: false
matrix:
@ -106,6 +112,9 @@ jobs:
build-outpost-binary:
timeout-minutes: 120
runs-on: ubuntu-latest
permissions:
# Needed to upload binaries to the release
contents: write
strategy:
fail-fast: false
matrix:
@ -122,7 +131,7 @@ jobs:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Build web

View file

@ -30,7 +30,7 @@ jobs:
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Extract version number
id: get_version
uses: actions/github-script@v6
uses: actions/github-script@v7
with:
github-token: ${{ steps.generate_token.outputs.token }}
script: |

View file

@ -6,8 +6,8 @@ on:
workflow_dispatch:
permissions:
# Needed to update issues and PRs
issues: write
pull-requests: write
jobs:
stale:

View file

@ -7,7 +7,12 @@ on:
paths:
- "!**"
- "locale/**"
- "web/src/locales/**"
- "!locale/en/**"
- "web/xliff/**"
permissions:
# Permission to write comment
pull-requests: write
jobs:
post-comment:

View file

@ -6,6 +6,10 @@ on:
pull_request:
types: [opened, reopened]
permissions:
# Permission to rename PR
pull-requests: write
jobs:
rename_pr:
runs-on: ubuntu-latest

View file

@ -19,7 +19,7 @@ jobs:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"
- name: Generate API Client
run: make gen-client-ts

View file

@ -14,6 +14,7 @@
"ms-python.pylint",
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx",

View file

@ -19,10 +19,8 @@
"slo",
"scim",
],
"python.linting.pylintEnabled": true,
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"python.formatting.provider": "black",
"yaml.customTags": [
"!Find sequence",
"!KeyOf scalar",

View file

@ -35,7 +35,14 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build
# Stage 3: Build go proxy
FROM docker.io/golang:1.21.3-bookworm AS go-builder
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
ARG TARGETOS
ARG TARGETARCH
ARG TARGETVARIANT
ARG GOOS=$TARGETOS
ARG GOARCH=$TARGETARCH
WORKDIR /go/src/goauthentik.io
@ -57,10 +64,10 @@ ENV CGO_ENABLED=0
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target=/root/.cache/go-build \
go build -o /go/authentik ./cmd/server
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
ENV GEOIPUPDATE_VERBOSE="true"
@ -74,7 +81,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
# Stage 5: Python dependencies
FROM docker.io/python:3.11.5-bookworm AS python-deps
FROM docker.io/python:3.12.0-slim-bookworm AS python-deps
WORKDIR /ak-root/poetry
@ -97,7 +104,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
poetry install --only=main --no-ansi --no-interaction
# Stage 6: Run
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
FROM docker.io/python:3.12.0-slim-bookworm AS final-image
ARG GIT_BUILD_HASH
ARG VERSION

View file

@ -110,6 +110,8 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
--markdown /local/diff.md \
/local/old_schema.yml /local/schema.yml
rm old_schema.yml
sed -i 's/{/&#123;/g' diff.md
sed -i 's/}/&#125;/g' diff.md
npx prettier --write diff.md
gen-clean:

View file

@ -2,7 +2,7 @@
from os import environ
from typing import Optional
__version__ = "2023.10.1"
__version__ = "2023.10.4"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View file

@ -30,7 +30,7 @@ class RuntimeDict(TypedDict):
uname: str
class SystemSerializer(PassiveSerializer):
class SystemInfoSerializer(PassiveSerializer):
"""Get system information."""
http_headers = SerializerMethodField()
@ -91,14 +91,14 @@ class SystemView(APIView):
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
pagination_class = None
filter_backends = []
serializer_class = SystemSerializer
serializer_class = SystemInfoSerializer
@extend_schema(responses={200: SystemSerializer(many=False)})
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
def get(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemSerializer(request).data)
return Response(SystemInfoSerializer(request).data)
@extend_schema(responses={200: SystemSerializer(many=False)})
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
def post(self, request: Request) -> Response:
"""Get system information."""
return Response(SystemSerializer(request).data)
return Response(SystemInfoSerializer(request).data)

View file

@ -93,10 +93,10 @@ class ConfigView(APIView):
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
},
"capabilities": self.get_capabilities(),
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
"cache_timeout": CONFIG.get_int("cache.timeout"),
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
}
)

View file

@ -171,6 +171,11 @@ class UserSerializer(ModelSerializer):
raise ValidationError("Setting a user to internal service account is not allowed.")
return user_type
def validate(self, attrs: dict) -> dict:
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
raise ValidationError("Can't modify internal service account users")
return super().validate(attrs)
class Meta:
model = User
fields = [

View file

@ -17,9 +17,15 @@ class Command(BaseCommand):
"""Run worker"""
def add_arguments(self, parser):
parser.add_argument("-b", "--beat", action="store_true")
parser.add_argument(
"-b",
"--beat",
action="store_false",
help="When set, this worker will _not_ run Beat (scheduled) tasks",
)
def handle(self, **options):
LOGGER.debug("Celery options", **options)
close_old_connections()
if CONFIG.get_bool("remote_debug"):
import debugpy

View file

@ -517,7 +517,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
objects = InheritanceManager()
@property
def get_icon(self) -> Optional[str]:
def icon_url(self) -> Optional[str]:
"""Get the URL to the Icon. If the name is /static or
starts with http it is returned as-is"""
if not self.icon:

View file

@ -27,6 +27,7 @@ from authentik.lib.sentry import before_send
from authentik.lib.utils.errors import exception_to_string
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
from authentik.providers.scim.models import SCIMGroup, SCIMUser
from authentik.stages.authenticator_static.models import StaticToken
@ -52,11 +53,13 @@ IGNORED_MODELS = (
RefreshToken,
SCIMUser,
SCIMGroup,
Reputation,
)
def should_log_model(model: Model) -> bool:
"""Return true if operation on `model` should be logged"""
# Check for silk by string so this comparison doesn't fail when silk isn't installed
if model.__module__.startswith("silk"):
return False
return model.__class__ not in IGNORED_MODELS
@ -93,21 +96,30 @@ class AuditMiddleware:
of models"""
get_response: Callable[[HttpRequest], HttpResponse]
anonymous_user: User = None
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
self.get_response = get_response
def _ensure_fallback_user(self):
"""Defer fetching anonymous user until we have to"""
if self.anonymous_user:
return
from guardian.shortcuts import get_anonymous_user
self.anonymous_user = get_anonymous_user()
def connect(self, request: HttpRequest):
"""Connect signal for automatic logging"""
if not hasattr(request, "user"):
return
if not getattr(request.user, "is_authenticated", False):
return
self._ensure_fallback_user()
user = getattr(request, "user", self.anonymous_user)
if not user.is_authenticated:
user = self.anonymous_user
if not hasattr(request, "request_id"):
return
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
m2m_changed_handler = partial(self.m2m_changed_handler, user=request.user, request=request)
post_save_handler = partial(self.post_save_handler, user=user, request=request)
pre_delete_handler = partial(self.pre_delete_handler, user=user, request=request)
m2m_changed_handler = partial(self.m2m_changed_handler, user=user, request=request)
post_save.connect(
post_save_handler,
dispatch_uid=request.request_id,

View file

@ -217,6 +217,7 @@ class Event(SerializerModel, ExpiringModel):
"path": request.path,
"method": request.method,
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
}
# Special case for events created during flow execution
# since they keep the http query within a wrapped query

View file

@ -13,6 +13,7 @@ from authentik.events.tasks import event_notification_handler, gdpr_cleanup
from authentik.flows.models import Stage
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.config import CONFIG
from authentik.stages.invitation.models import Invitation
from authentik.stages.invitation.signals import invitation_used
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
@ -92,4 +93,5 @@ def event_post_save_notification(sender, instance: Event, **_):
@receiver(pre_delete, sender=User)
def event_user_pre_delete_cleanup(sender, instance: User, **_):
"""If gdpr_compliance is enabled, remove all the user's events"""
gdpr_cleanup.delay(instance.pk)
if CONFIG.get_bool("gdpr_compliance", True):
gdpr_cleanup.delay(instance.pk)

View file

@ -53,7 +53,15 @@ class TestEvents(TestCase):
"""Test plain from_http"""
event = Event.new("unittest").from_http(self.factory.get("/"))
self.assertEqual(
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
event.context,
{
"http_request": {
"args": {},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
)
def test_from_http_clean_querystring(self):
@ -67,6 +75,7 @@ class TestEvents(TestCase):
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
)
@ -83,6 +92,7 @@ class TestEvents(TestCase):
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
)

View file

@ -5,12 +5,13 @@ from dataclasses import asdict, is_dataclass
from datetime import date, datetime, time, timedelta
from enum import Enum
from pathlib import Path
from types import GeneratorType
from types import GeneratorType, NoneType
from typing import Any, Optional
from uuid import UUID
from django.contrib.auth.models import AnonymousUser
from django.core.handlers.wsgi import WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.db.models.base import Model
from django.http.request import HttpRequest
@ -153,7 +154,20 @@ def sanitize_item(value: Any) -> Any:
return value.isoformat()
if isinstance(value, timedelta):
return str(value.total_seconds())
return value
if callable(value):
return {
"type": "callable",
"name": value.__name__,
"module": value.__module__,
}
# List taken from the stdlib's JSON encoder (_make_iterencode, encoder.py:415)
if isinstance(value, (bool, int, float, NoneType, list, tuple, dict)):
return value
try:
return DjangoJSONEncoder().default(value)
except TypeError:
return str(value)
return str(value)
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:

View file

@ -33,7 +33,7 @@ PLAN_CONTEXT_SOURCE = "source"
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
# was restored.
PLAN_CONTEXT_IS_RESTORED = "is_restored"
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_flows")
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_flows")
CACHE_PREFIX = "goauthentik.io/flows/planner/"

View file

@ -472,6 +472,7 @@ class TestFlowExecutor(FlowTestCase):
ident_stage = IdentificationStage.objects.create(
name="ident",
user_fields=[UserFields.E_MAIL],
pretend_user_exists=False,
)
FlowStageBinding.objects.create(
target=flow,

View file

@ -154,7 +154,15 @@ def generate_avatar_from_name(
def avatar_mode_generated(user: "User", mode: str) -> Optional[str]:
"""Wrapper that converts generated avatar to base64 svg"""
svg = generate_avatar_from_name(user.name if user.name.strip() != "" else "a k")
# By default generate based off of user's display name
name = user.name.strip()
if name == "":
# Fallback to username
name = user.username.strip()
# If we still don't have anything, fallback to `a k`
if name == "":
name = "a k"
svg = generate_avatar_from_name(name)
return f"data:image/svg+xml;base64,{b64encode(svg.encode('utf-8')).decode('utf-8')}"

View file

@ -1,4 +1,6 @@
"""authentik core config loader"""
import base64
import json
import os
from collections.abc import Mapping
from contextlib import contextmanager
@ -22,6 +24,25 @@ SEARCH_PATHS = ["authentik/lib/default.yml", "/etc/authentik/config.yml", ""] +
ENV_PREFIX = "AUTHENTIK"
ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
REDIS_ENV_KEYS = [
f"{ENV_PREFIX}_REDIS__HOST",
f"{ENV_PREFIX}_REDIS__PORT",
f"{ENV_PREFIX}_REDIS__DB",
f"{ENV_PREFIX}_REDIS__USERNAME",
f"{ENV_PREFIX}_REDIS__PASSWORD",
f"{ENV_PREFIX}_REDIS__TLS",
f"{ENV_PREFIX}_REDIS__TLS_REQS",
]
DEPRECATIONS = {
"redis.broker_url": "broker.url",
"redis.broker_transport_options": "broker.transport_options",
"redis.cache_timeout": "cache.timeout",
"redis.cache_timeout_flows": "cache.timeout_flows",
"redis.cache_timeout_policies": "cache.timeout_policies",
"redis.cache_timeout_reputation": "cache.timeout_reputation",
}
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
"""Recursively walk through `root`, checking each part of `path` separated by `sep`.
@ -81,6 +102,10 @@ class AttrEncoder(JSONEncoder):
return super().default(o)
class UNSET:
"""Used to test whether configuration key has not been set."""
class ConfigLoader:
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with
`ENV_PREFIX` are also applied.
@ -113,6 +138,40 @@ class ConfigLoader:
self.update_from_file(env_file)
self.update_from_env()
self.update(self.__config, kwargs)
self.check_deprecations()
def check_deprecations(self):
"""Warn if any deprecated configuration options are used"""
def _pop_deprecated_key(current_obj, dot_parts, index):
"""Recursive function to remove deprecated keys in configuration"""
dot_part = dot_parts[index]
if index == len(dot_parts) - 1:
return current_obj.pop(dot_part)
value = _pop_deprecated_key(current_obj[dot_part], dot_parts, index + 1)
if not current_obj[dot_part]:
current_obj.pop(dot_part)
return value
for deprecation, replacement in DEPRECATIONS.items():
if self.get(deprecation, default=UNSET) is not UNSET:
message = (
f"'{deprecation}' has been deprecated in favor of '{replacement}'! "
+ "Please update your configuration."
)
self.log(
"warning",
message,
)
try:
from authentik.events.models import Event, EventAction
Event.new(EventAction.CONFIGURATION_ERROR, message=message).save()
except ImportError:
continue
deprecated_attr = _pop_deprecated_key(self.__config, deprecation.split("."), 0)
self.set(replacement, deprecated_attr.value)
def log(self, level: str, message: str, **kwargs):
"""Custom Log method, we want to ensure ConfigLoader always logs JSON even when
@ -180,6 +239,10 @@ class ConfigLoader:
error=str(exc),
)
def update_from_dict(self, update: dict):
"""Update config from dict"""
self.__config.update(update)
def update_from_env(self):
"""Check environment variables"""
outer = {}
@ -188,19 +251,13 @@ class ConfigLoader:
if not key.startswith(ENV_PREFIX):
continue
relative_key = key.replace(f"{ENV_PREFIX}_", "", 1).replace("__", ".").lower()
# Recursively convert path from a.b.c into outer[a][b][c]
current_obj = outer
dot_parts = relative_key.split(".")
for dot_part in dot_parts[:-1]:
if dot_part not in current_obj:
current_obj[dot_part] = {}
current_obj = current_obj[dot_part]
# Check if the value is json, and try to load it
try:
value = loads(value)
except JSONDecodeError:
pass
current_obj[dot_parts[-1]] = Attr(value, Attr.Source.ENV, key)
attr_value = Attr(value, Attr.Source.ENV, relative_key)
set_path_in_dict(outer, relative_key, attr_value)
idx += 1
if idx > 0:
self.log("debug", "Loaded environment variables", count=idx)
@ -241,6 +298,23 @@ class ConfigLoader:
"""Wrapper for get that converts value into boolean"""
return str(self.get(path, default)).lower() == "true"
def get_dict_from_b64_json(self, path: str, default=None) -> dict:
"""Wrapper for get that converts value from Base64 encoded string into dictionary"""
config_value = self.get(path)
if config_value is None:
return {}
try:
b64decoded_str = base64.b64decode(config_value).decode("utf-8")
b64decoded_str = b64decoded_str.strip().lstrip("{").rstrip("}")
b64decoded_str = "{" + b64decoded_str + "}"
return json.loads(b64decoded_str)
except (JSONDecodeError, TypeError, ValueError) as exc:
self.log(
"warning",
f"Ignored invalid configuration for '{path}' due to exception: {str(exc)}",
)
return default if isinstance(default, dict) else {}
def set(self, path: str, value: Any, sep="."):
"""Set value using same syntax as get()"""
set_path_in_dict(self.raw, path, Attr(value), sep=sep)

View file

@ -28,14 +28,28 @@ listen:
redis:
host: localhost
port: 6379
db: 0
username: ""
password: ""
tls: false
tls_reqs: "none"
db: 0
cache_timeout: 300
cache_timeout_flows: 300
cache_timeout_policies: 300
cache_timeout_reputation: 300
# broker:
# url: ""
# transport_options: ""
cache:
# url: ""
timeout: 300
timeout_flows: 300
timeout_policies: 300
timeout_reputation: 300
# channel:
# url: ""
# result_backend:
# url: ""
paths:
media: ./media

View file

@ -1,20 +1,32 @@
"""Test config loader"""
import base64
from json import dumps
from os import chmod, environ, unlink, write
from tempfile import mkstemp
from unittest import mock
from django.conf import ImproperlyConfigured
from django.test import TestCase
from authentik.lib.config import ENV_PREFIX, ConfigLoader
from authentik.lib.config import ENV_PREFIX, UNSET, Attr, AttrEncoder, ConfigLoader
class TestConfig(TestCase):
"""Test config loader"""
check_deprecations_env_vars = {
ENV_PREFIX + "_REDIS__BROKER_URL": "redis://myredis:8327/43",
ENV_PREFIX + "_REDIS__BROKER_TRANSPORT_OPTIONS": "bWFzdGVybmFtZT1teW1hc3Rlcg==",
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT": "124s",
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_FLOWS": "32m",
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_POLICIES": "3920ns",
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_REPUTATION": "298382us",
}
@mock.patch.dict(environ, {ENV_PREFIX + "_test__test": "bar"})
def test_env(self):
"""Test simple instance"""
config = ConfigLoader()
environ[ENV_PREFIX + "_test__test"] = "bar"
config.update_from_env()
self.assertEqual(config.get("test.test"), "bar")
@ -27,12 +39,20 @@ class TestConfig(TestCase):
self.assertEqual(config.get("foo.bar"), "baz")
self.assertEqual(config.get("foo.bar"), "bar")
@mock.patch.dict(environ, {"foo": "bar"})
def test_uri_env(self):
"""Test URI parsing (environment)"""
config = ConfigLoader()
environ["foo"] = "bar"
self.assertEqual(config.parse_uri("env://foo").value, "bar")
self.assertEqual(config.parse_uri("env://foo?bar").value, "bar")
foo_uri = "env://foo"
foo_parsed = config.parse_uri(foo_uri)
self.assertEqual(foo_parsed.value, "bar")
self.assertEqual(foo_parsed.source_type, Attr.Source.URI)
self.assertEqual(foo_parsed.source, foo_uri)
foo_bar_uri = "env://foo?bar"
foo_bar_parsed = config.parse_uri(foo_bar_uri)
self.assertEqual(foo_bar_parsed.value, "bar")
self.assertEqual(foo_bar_parsed.source_type, Attr.Source.URI)
self.assertEqual(foo_bar_parsed.source, foo_bar_uri)
def test_uri_file(self):
"""Test URI parsing (file load)"""
@ -91,3 +111,60 @@ class TestConfig(TestCase):
config = ConfigLoader()
config.set("foo", "bar")
self.assertEqual(config.get_int("foo", 1234), 1234)
def test_get_dict_from_b64_json(self):
"""Test get_dict_from_b64_json"""
config = ConfigLoader()
test_value = ' { "foo": "bar" } '.encode("utf-8")
b64_value = base64.b64encode(test_value)
config.set("foo", b64_value)
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
def test_get_dict_from_b64_json_missing_brackets(self):
"""Test get_dict_from_b64_json with missing brackets"""
config = ConfigLoader()
test_value = ' "foo": "bar" '.encode("utf-8")
b64_value = base64.b64encode(test_value)
config.set("foo", b64_value)
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
def test_get_dict_from_b64_json_invalid(self):
"""Test get_dict_from_b64_json with invalid value"""
config = ConfigLoader()
config.set("foo", "bar")
self.assertEqual(config.get_dict_from_b64_json("foo"), {})
def test_attr_json_encoder(self):
"""Test AttrEncoder"""
test_attr = Attr("foo", Attr.Source.ENV, "AUTHENTIK_REDIS__USERNAME")
json_attr = dumps(test_attr, indent=4, cls=AttrEncoder)
self.assertEqual(json_attr, '"foo"')
def test_attr_json_encoder_no_attr(self):
"""Test AttrEncoder if no Attr is passed"""
class Test:
"""Non Attr class"""
with self.assertRaises(TypeError):
test_obj = Test()
dumps(test_obj, indent=4, cls=AttrEncoder)
@mock.patch.dict(environ, check_deprecations_env_vars)
def test_check_deprecations(self):
"""Test config key re-write for deprecated env vars"""
config = ConfigLoader()
config.update_from_env()
config.check_deprecations()
self.assertEqual(config.get("redis.broker_url", UNSET), UNSET)
self.assertEqual(config.get("redis.broker_transport_options", UNSET), UNSET)
self.assertEqual(config.get("redis.cache_timeout", UNSET), UNSET)
self.assertEqual(config.get("redis.cache_timeout_flows", UNSET), UNSET)
self.assertEqual(config.get("redis.cache_timeout_policies", UNSET), UNSET)
self.assertEqual(config.get("redis.cache_timeout_reputation", UNSET), UNSET)
self.assertEqual(config.get("broker.url"), "redis://myredis:8327/43")
self.assertEqual(config.get("broker.transport_options"), "bWFzdGVybmFtZT1teW1hc3Rlcg==")
self.assertEqual(config.get("cache.timeout"), "124s")
self.assertEqual(config.get("cache.timeout_flows"), "32m")
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")

View file

@ -93,7 +93,7 @@ class OutpostConsumer(AuthJsonConsumer):
expected=self.outpost.config.kubernetes_replicas,
).dec()
def receive_json(self, content: Data):
def receive_json(self, content: Data, **kwargs):
msg = from_dict(WebsocketMessage, content)
uid = msg.args.get("uuid", self.channel_name)
self.last_uid = uid

View file

@ -346,12 +346,22 @@ class Outpost(SerializerModel, ManagedModel):
user_created = False
if not user:
user: User = User.objects.create(username=self.user_identifier)
user.set_unusable_password()
user_created = True
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
user.name = f"Outpost {self.name} Service-Account"
user.path = USER_PATH_OUTPOSTS
user.save()
attrs = {
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
"name": f"Outpost {self.name} Service-Account",
"path": USER_PATH_OUTPOSTS,
}
dirty = False
for key, value in attrs.items():
if getattr(user, key) != value:
dirty = True
setattr(user, key, value)
if user.has_usable_password():
user.set_unusable_password()
dirty = True
if dirty:
user.save()
if user_created:
self.build_user_permissions(user)
return user

View file

@ -20,7 +20,7 @@ from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
LOGGER = get_logger()
FORK_CTX = get_context("fork")
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_policies")
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_policies")
PROCESS_CLASS = FORK_CTX.Process

View file

@ -13,7 +13,7 @@ from authentik.policies.reputation.tasks import save_reputation
from authentik.stages.identification.signals import identification_failed
LOGGER = get_logger()
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_reputation")
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_reputation")
def update_score(request: HttpRequest, identifier: str, amount: int):

View file

@ -0,0 +1,187 @@
"""Test token view"""
from base64 import b64encode, urlsafe_b64encode
from hashlib import sha256
from django.test import RequestFactory
from django.urls import reverse
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.challenge import ChallengeTypes
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import GRANT_TYPE_AUTHORIZATION_CODE
from authentik.providers.oauth2.models import AuthorizationCode, OAuth2Provider
from authentik.providers.oauth2.tests.utils import OAuthTestCase
class TestTokenPKCE(OAuthTestCase):
"""Test token view"""
def setUp(self) -> None:
super().setUp()
self.factory = RequestFactory()
self.app = Application.objects.create(name=generate_id(), slug="test")
def test_pkce_missing_in_token(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
challenge = generate_id()
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": challenge,
"code_challenge_method": "S256",
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
# Missing the code_verifier here
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertJSONEqual(
response.content,
{"error": "invalid_request", "error_description": "The request is otherwise malformed"},
)
self.assertEqual(response.status_code, 400)
def test_pkce_correct_s256(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
verifier = generate_id()
challenge = (
urlsafe_b64encode(sha256(verifier.encode("ascii")).digest())
.decode("utf-8")
.replace("=", "")
)
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": challenge,
"code_challenge_method": "S256",
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
"code_verifier": verifier,
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertEqual(response.status_code, 200)
def test_pkce_correct_plain(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
verifier = generate_id()
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": verifier,
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
"code_verifier": verifier,
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertEqual(response.status_code, 200)

View file

@ -188,6 +188,7 @@ def authenticate_provider(request: HttpRequest) -> Optional[OAuth2Provider]:
if client_id != provider.client_id or client_secret != provider.client_secret:
LOGGER.debug("(basic) Provider for basic auth does not exist")
return None
CTX_AUTH_VIA.set("oauth_client_secret")
return provider

View file

@ -17,6 +17,7 @@ from jwt import PyJWK, PyJWT, PyJWTError, decode
from sentry_sdk.hub import Hub
from structlog.stdlib import get_logger
from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import (
USER_ATTRIBUTE_EXPIRES,
USER_ATTRIBUTE_GENERATED,
@ -221,7 +222,10 @@ class TokenParams:
raise TokenError("invalid_grant")
# Validate PKCE parameters.
if self.code_verifier:
if self.authorization_code.code_challenge:
# Authorization code had PKCE but we didn't get one
if not self.code_verifier:
raise TokenError("invalid_request")
if self.authorization_code.code_challenge_method == PKCE_METHOD_S256:
new_code_challenge = (
urlsafe_b64encode(sha256(self.code_verifier.encode("ascii")).digest())
@ -448,6 +452,7 @@ class TokenView(View):
if not self.provider:
LOGGER.warning("OAuth2Provider does not exist", client_id=client_id)
raise TokenError("invalid_client")
CTX_AUTH_VIA.set("oauth_client_secret")
self.params = TokenParams.parse(request, self.provider, client_id, client_secret)
with Hub.current.start_span(

View file

@ -46,7 +46,9 @@ class SCIMGroupClient(SCIMClient[Group, SCIMGroupSchema]):
def to_scim(self, obj: Group) -> SCIMGroupSchema:
"""Convert authentik user into SCIM"""
raw_scim_group = {}
raw_scim_group = {
"schemas": ("urn:ietf:params:scim:schemas:core:2.0:Group",),
}
for mapping in (
self.provider.property_mappings_group.all().order_by("name").select_subclasses()
):

View file

@ -15,12 +15,14 @@ from pydanticscim.user import User as BaseUser
class User(BaseUser):
"""Modified User schema with added externalId field"""
schemas: tuple[str] = ("urn:ietf:params:scim:schemas:core:2.0:User",)
externalId: Optional[str] = None
class Group(BaseGroup):
"""Modified Group schema with added externalId field"""
schemas: tuple[str] = ("urn:ietf:params:scim:schemas:core:2.0:Group",)
externalId: Optional[str] = None

View file

@ -39,7 +39,9 @@ class SCIMUserClient(SCIMClient[User, SCIMUserSchema]):
def to_scim(self, obj: User) -> SCIMUserSchema:
"""Convert authentik user into SCIM"""
raw_scim_user = {}
raw_scim_user = {
"schemas": ("urn:ietf:params:scim:schemas:core:2.0:User",),
}
for mapping in self.provider.property_mappings.all().order_by("name").select_subclasses():
if not isinstance(mapping, SCIMMapping):
continue

View file

@ -61,7 +61,11 @@ class SCIMGroupTests(TestCase):
self.assertEqual(mock.request_history[1].method, "POST")
self.assertJSONEqual(
mock.request_history[1].body,
{"externalId": str(group.pk), "displayName": group.name},
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
"externalId": str(group.pk),
"displayName": group.name,
},
)
@Mocker()
@ -96,7 +100,11 @@ class SCIMGroupTests(TestCase):
validate(body, loads(schema.read()))
self.assertEqual(
body,
{"externalId": str(group.pk), "displayName": group.name},
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
"externalId": str(group.pk),
"displayName": group.name,
},
)
group.save()
self.assertEqual(mock.call_count, 4)
@ -129,7 +137,11 @@ class SCIMGroupTests(TestCase):
self.assertEqual(mock.request_history[1].method, "POST")
self.assertJSONEqual(
mock.request_history[1].body,
{"externalId": str(group.pk), "displayName": group.name},
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
"externalId": str(group.pk),
"displayName": group.name,
},
)
group.delete()
self.assertEqual(mock.call_count, 4)

View file

@ -89,6 +89,7 @@ class SCIMMembershipTests(TestCase):
self.assertJSONEqual(
mocker.request_history[3].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"emails": [],
"active": True,
"externalId": user.uid,
@ -99,7 +100,11 @@ class SCIMMembershipTests(TestCase):
)
self.assertJSONEqual(
mocker.request_history[5].body,
{"externalId": str(group.pk), "displayName": group.name},
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
"externalId": str(group.pk),
"displayName": group.name,
},
)
with Mocker() as mocker:
@ -118,6 +123,7 @@ class SCIMMembershipTests(TestCase):
self.assertJSONEqual(
mocker.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
"Operations": [
{
"op": "add",
@ -125,7 +131,6 @@ class SCIMMembershipTests(TestCase):
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
},
)
@ -174,6 +179,7 @@ class SCIMMembershipTests(TestCase):
self.assertJSONEqual(
mocker.request_history[3].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"active": True,
"displayName": "",
"emails": [],
@ -184,7 +190,11 @@ class SCIMMembershipTests(TestCase):
)
self.assertJSONEqual(
mocker.request_history[5].body,
{"externalId": str(group.pk), "displayName": group.name},
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:Group"],
"externalId": str(group.pk),
"displayName": group.name,
},
)
with Mocker() as mocker:
@ -203,6 +213,7 @@ class SCIMMembershipTests(TestCase):
self.assertJSONEqual(
mocker.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
"Operations": [
{
"op": "add",
@ -210,7 +221,6 @@ class SCIMMembershipTests(TestCase):
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
},
)
@ -230,6 +240,7 @@ class SCIMMembershipTests(TestCase):
self.assertJSONEqual(
mocker.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
"Operations": [
{
"op": "remove",
@ -237,6 +248,5 @@ class SCIMMembershipTests(TestCase):
"value": [{"value": user_scim_id}],
}
],
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
},
)

View file

@ -66,6 +66,7 @@ class SCIMUserTests(TestCase):
self.assertJSONEqual(
mock.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"active": True,
"emails": [
{
@ -121,6 +122,7 @@ class SCIMUserTests(TestCase):
self.assertEqual(
body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"active": True,
"emails": [
{
@ -173,6 +175,7 @@ class SCIMUserTests(TestCase):
self.assertJSONEqual(
mock.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"active": True,
"emails": [
{
@ -240,6 +243,7 @@ class SCIMUserTests(TestCase):
self.assertJSONEqual(
mock.request_history[1].body,
{
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
"active": True,
"emails": [
{

View file

@ -1,5 +1,4 @@
"""root settings for authentik"""
import importlib
import os
from hashlib import sha512
@ -195,8 +194,8 @@ _redis_url = (
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"{_redis_url}/{CONFIG.get('redis.db')}",
"TIMEOUT": CONFIG.get_int("redis.cache_timeout", 300),
"LOCATION": CONFIG.get("cache.url") or f"{_redis_url}/{CONFIG.get('redis.db')}",
"TIMEOUT": CONFIG.get_int("cache.timeout", 300),
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
"KEY_PREFIX": "authentik_cache",
}
@ -256,7 +255,7 @@ CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels_redis.pubsub.RedisPubSubChannelLayer",
"CONFIG": {
"hosts": [f"{_redis_url}/{CONFIG.get('redis.db')}"],
"hosts": [CONFIG.get("channel.url", f"{_redis_url}/{CONFIG.get('redis.db')}")],
"prefix": "authentik_channels_",
},
},
@ -349,8 +348,11 @@ CELERY = {
},
"task_create_missing_queues": True,
"task_default_queue": "authentik",
"broker_url": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
"result_backend": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
"broker_url": CONFIG.get("broker.url")
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
"broker_transport_options": CONFIG.get_dict_from_b64_json("broker.transport_options"),
"result_backend": CONFIG.get("result_backend.url")
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
}
# Sentry integration
@ -409,7 +411,6 @@ if DEBUG:
CELERY["task_always_eager"] = True
os.environ[ENV_GIT_HASH_KEY] = "dev"
INSTALLED_APPS.append("silk")
SILKY_PYTHON_PROFILER = True
MIDDLEWARE = ["silk.middleware.SilkyMiddleware"] + MIDDLEWARE
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"].append(
"rest_framework.renderers.BrowsableAPIRenderer"

View file

@ -1,13 +1,14 @@
"""Source API Views"""
from typing import Any
from typing import Any, Optional
from django.core.cache import cache
from django_filters.filters import AllValuesMultipleFilter
from django_filters.filterset import FilterSet
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema, extend_schema_field, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import DictField, ListField
from rest_framework.fields import BooleanField, DictField, ListField, SerializerMethodField
from rest_framework.relations import PrimaryKeyRelatedField
from rest_framework.request import Request
from rest_framework.response import Response
@ -17,15 +18,17 @@ from authentik.admin.api.tasks import TaskSerializer
from authentik.core.api.propertymappings import PropertyMappingSerializer
from authentik.core.api.sources import SourceSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer
from authentik.crypto.models import CertificateKeyPair
from authentik.events.monitored_tasks import TaskInfo
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
from authentik.sources.ldap.tasks import SYNC_CLASSES
from authentik.sources.ldap.tasks import CACHE_KEY_STATUS, SYNC_CLASSES
class LDAPSourceSerializer(SourceSerializer):
"""LDAP Source Serializer"""
connectivity = SerializerMethodField()
client_certificate = PrimaryKeyRelatedField(
allow_null=True,
help_text="Client certificate to authenticate against the LDAP Server's Certificate.",
@ -35,6 +38,10 @@ class LDAPSourceSerializer(SourceSerializer):
required=False,
)
def get_connectivity(self, source: LDAPSource) -> Optional[dict[str, dict[str, str]]]:
"""Get cached source connectivity"""
return cache.get(CACHE_KEY_STATUS + source.slug, None)
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
"""Check that only a single source has password_sync on"""
sync_users_password = attrs.get("sync_users_password", True)
@ -75,10 +82,18 @@ class LDAPSourceSerializer(SourceSerializer):
"sync_parent_group",
"property_mappings",
"property_mappings_group",
"connectivity",
]
extra_kwargs = {"bind_password": {"write_only": True}}
class LDAPSyncStatusSerializer(PassiveSerializer):
"""LDAP Source sync status"""
is_running = BooleanField(read_only=True)
tasks = TaskSerializer(many=True, read_only=True)
class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
"""LDAP Source Viewset"""
@ -114,19 +129,19 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
@extend_schema(
responses={
200: TaskSerializer(many=True),
200: LDAPSyncStatusSerializer(),
}
)
@action(methods=["GET"], detail=True, pagination_class=None, filter_backends=[])
def sync_status(self, request: Request, slug: str) -> Response:
"""Get source's sync status"""
source = self.get_object()
results = []
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*")
if tasks:
for task in tasks:
results.append(task)
return Response(TaskSerializer(results, many=True).data)
source: LDAPSource = self.get_object()
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*") or []
status = {
"tasks": tasks,
"is_running": source.sync_lock.locked(),
}
return Response(LDAPSyncStatusSerializer(status).data)
@extend_schema(
responses={

View file

@ -0,0 +1,24 @@
"""LDAP Connection check"""
from json import dumps
from django.core.management.base import BaseCommand
from structlog.stdlib import get_logger
from authentik.sources.ldap.models import LDAPSource
LOGGER = get_logger()
class Command(BaseCommand):
"""Check connectivity to LDAP servers for a source"""
def add_arguments(self, parser):
parser.add_argument("source_slugs", nargs="?", type=str)
def handle(self, **options):
sources = LDAPSource.objects.filter(enabled=True)
if options["source_slugs"]:
sources = LDAPSource.objects.filter(slug__in=options["source_slugs"])
for source in sources.order_by("slug"):
status = source.check_connection()
self.stdout.write(dumps(status, indent=4))

View file

@ -1,13 +1,17 @@
"""authentik LDAP Models"""
from os import chmod
from os.path import dirname, exists
from shutil import rmtree
from ssl import CERT_REQUIRED
from tempfile import NamedTemporaryFile, mkdtemp
from typing import Optional
from django.core.cache import cache
from django.db import models
from django.utils.translation import gettext_lazy as _
from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
from ldap3.core.exceptions import LDAPException, LDAPInsufficientAccessRightsResult, LDAPSchemaError
from redis.lock import Lock
from rest_framework.serializers import Serializer
from authentik.core.models import Group, PropertyMapping, Source
@ -117,7 +121,7 @@ class LDAPSource(Source):
return LDAPSourceSerializer
def server(self, **kwargs) -> Server:
def server(self, **kwargs) -> ServerPool:
"""Get LDAP Server/ServerPool"""
servers = []
tls_kwargs = {}
@ -154,7 +158,10 @@ class LDAPSource(Source):
return ServerPool(servers, RANDOM, active=5, exhaust=True)
def connection(
self, server_kwargs: Optional[dict] = None, connection_kwargs: Optional[dict] = None
self,
server: Optional[Server] = None,
server_kwargs: Optional[dict] = None,
connection_kwargs: Optional[dict] = None,
) -> Connection:
"""Get a fully connected and bound LDAP Connection"""
server_kwargs = server_kwargs or {}
@ -164,7 +171,7 @@ class LDAPSource(Source):
if self.bind_password is not None:
connection_kwargs.setdefault("password", self.bind_password)
connection = Connection(
self.server(**server_kwargs),
server or self.server(**server_kwargs),
raise_exceptions=True,
receive_timeout=LDAP_TIMEOUT,
**connection_kwargs,
@ -183,9 +190,60 @@ class LDAPSource(Source):
if server_kwargs.get("get_info", ALL) == NONE:
raise exc
server_kwargs["get_info"] = NONE
return self.connection(server_kwargs, connection_kwargs)
return self.connection(server, server_kwargs, connection_kwargs)
finally:
if connection.server.tls.certificate_file is not None and exists(
connection.server.tls.certificate_file
):
rmtree(dirname(connection.server.tls.certificate_file))
return RuntimeError("Failed to bind")
@property
def sync_lock(self) -> Lock:
"""Redis lock for syncing LDAP to prevent multiple parallel syncs happening"""
return Lock(
cache.client.get_client(),
name=f"goauthentik.io/sources/ldap/sync-{self.slug}",
# Convert task timeout hours to seconds, and multiply times 3
# (see authentik/sources/ldap/tasks.py:54)
# multiply by 3 to add even more leeway
timeout=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3,
)
def check_connection(self) -> dict[str, dict[str, str]]:
"""Check LDAP Connection"""
from authentik.sources.ldap.sync.base import flatten
servers = self.server()
server_info = {}
# Check each individual server
for server in servers.servers:
server: Server
try:
connection = self.connection(server=server)
server_info[server.host] = {
"vendor": str(flatten(connection.server.info.vendor_name)),
"version": str(flatten(connection.server.info.vendor_version)),
"status": "ok",
}
except LDAPException as exc:
server_info[server.host] = {
"status": str(exc),
}
# Check server pool
try:
connection = self.connection()
server_info["__all__"] = {
"vendor": str(flatten(connection.server.info.vendor_name)),
"version": str(flatten(connection.server.info.vendor_version)),
"status": "ok",
}
except LDAPException as exc:
server_info["__all__"] = {
"status": str(exc),
}
return server_info
class Meta:
verbose_name = _("LDAP Source")
verbose_name_plural = _("LDAP Sources")

View file

@ -8,5 +8,10 @@ CELERY_BEAT_SCHEDULE = {
"task": "authentik.sources.ldap.tasks.ldap_sync_all",
"schedule": crontab(minute=fqdn_rand("sources_ldap_sync"), hour="*/2"),
"options": {"queue": "authentik_scheduled"},
}
},
"sources_ldap_connectivity_check": {
"task": "authentik.sources.ldap.tasks.ldap_connectivity_check",
"schedule": crontab(minute=fqdn_rand("sources_ldap_connectivity_check"), hour="*"),
"options": {"queue": "authentik_scheduled"},
},
}

View file

@ -14,7 +14,7 @@ from authentik.events.models import Event, EventAction
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
from authentik.sources.ldap.models import LDAPSource
from authentik.sources.ldap.password import LDAPPasswordChanger
from authentik.sources.ldap.tasks import ldap_sync_single
from authentik.sources.ldap.tasks import ldap_connectivity_check, ldap_sync_single
from authentik.stages.prompt.signals import password_validate
LOGGER = get_logger()
@ -32,6 +32,7 @@ def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
if not instance.property_mappings.exists() or not instance.property_mappings_group.exists():
return
ldap_sync_single.delay(instance.pk)
ldap_connectivity_check.delay(instance.pk)
@receiver(password_validate)

View file

@ -17,6 +17,15 @@ from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
LDAP_UNIQUENESS = "ldap_uniq"
def flatten(value: Any) -> Any:
"""Flatten `value` if its a list"""
if isinstance(value, list):
if len(value) < 1:
return None
return value[0]
return value
class BaseLDAPSynchronizer:
"""Sync LDAP Users and groups into authentik"""
@ -122,14 +131,6 @@ class BaseLDAPSynchronizer:
cookie = None
yield self._connection.response
def _flatten(self, value: Any) -> Any:
"""Flatten `value` if its a list"""
if isinstance(value, list):
if len(value) < 1:
return None
return value[0]
return value
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
"""Build attributes for User object based on property mappings."""
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
@ -163,10 +164,10 @@ class BaseLDAPSynchronizer:
object_field = mapping.object_field
if object_field.startswith("attributes."):
# Because returning a list might desired, we can't
# rely on self._flatten here. Instead, just save the result as-is
# rely on flatten here. Instead, just save the result as-is
set_path_in_dict(properties, object_field, value)
else:
properties[object_field] = self._flatten(value)
properties[object_field] = flatten(value)
except PropertyMappingExpressionException as exc:
Event.new(
EventAction.CONFIGURATION_ERROR,
@ -177,7 +178,7 @@ class BaseLDAPSynchronizer:
self._logger.warning("Mapping failed to evaluate", exc=exc, mapping=mapping)
continue
if self._source.object_uniqueness_field in kwargs:
properties["attributes"][LDAP_UNIQUENESS] = self._flatten(
properties["attributes"][LDAP_UNIQUENESS] = flatten(
kwargs.get(self._source.object_uniqueness_field)
)
properties["attributes"][LDAP_DISTINGUISHED_NAME] = object_dn

View file

@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
from authentik.core.models import Group
from authentik.events.models import Event, EventAction
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
@ -39,7 +39,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
if "attributes" not in group:
continue
attributes = group.get("attributes", {})
group_dn = self._flatten(self._flatten(group.get("entryDN", group.get("dn"))))
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
if self._source.object_uniqueness_field not in attributes:
self.message(
f"Cannot find uniqueness field in attributes: '{group_dn}'",
@ -47,7 +47,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
dn=group_dn,
)
continue
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
uniq = flatten(attributes[self._source.object_uniqueness_field])
try:
defaults = self.build_group_properties(group_dn, **attributes)
defaults["parent"] = self._source.sync_parent_group

View file

@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
from authentik.core.models import User
from authentik.events.models import Event, EventAction
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
@ -41,7 +41,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
if "attributes" not in user:
continue
attributes = user.get("attributes", {})
user_dn = self._flatten(user.get("entryDN", user.get("dn")))
user_dn = flatten(user.get("entryDN", user.get("dn")))
if self._source.object_uniqueness_field not in attributes:
self.message(
f"Cannot find uniqueness field in attributes: '{user_dn}'",
@ -49,7 +49,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
dn=user_dn,
)
continue
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
uniq = flatten(attributes[self._source.object_uniqueness_field])
try:
defaults = self.build_user_properties(user_dn, **attributes)
self._logger.debug("Writing user with attributes", **defaults)

View file

@ -5,7 +5,7 @@ from typing import Any, Generator
from pytz import UTC
from authentik.core.models import User
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer, flatten
class FreeIPA(BaseLDAPSynchronizer):
@ -47,7 +47,7 @@ class FreeIPA(BaseLDAPSynchronizer):
return
# For some reason, nsaccountlock is not defined properly in the schema as bool
# hence we get it as a list of strings
_is_locked = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
_is_locked = str(flatten(attributes.get("nsaccountlock", ["FALSE"])))
# So we have to attempt to convert it to a bool
is_locked = _is_locked.lower() == "true"
# And then invert it since freeipa saves locked and we save active

View file

@ -1,13 +1,14 @@
"""LDAP Sync tasks"""
from typing import Optional
from uuid import uuid4
from celery import chain, group
from django.core.cache import cache
from ldap3.core.exceptions import LDAPException
from redis.exceptions import LockError
from redis.lock import Lock
from structlog.stdlib import get_logger
from authentik.events.monitored_tasks import CACHE_KEY_PREFIX as CACHE_KEY_PREFIX_TASKS
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
from authentik.lib.config import CONFIG
from authentik.lib.utils.errors import exception_to_string
@ -26,6 +27,7 @@ SYNC_CLASSES = [
MembershipLDAPSynchronizer,
]
CACHE_KEY_PREFIX = "goauthentik.io/sources/ldap/page/"
CACHE_KEY_STATUS = "goauthentik.io/sources/ldap/status/"
@CELERY_APP.task()
@ -35,6 +37,19 @@ def ldap_sync_all():
ldap_sync_single.apply_async(args=[source.pk])
@CELERY_APP.task()
def ldap_connectivity_check(pk: Optional[str] = None):
"""Check connectivity for LDAP Sources"""
# 2 hour timeout, this task should run every hour
timeout = 60 * 60 * 2
sources = LDAPSource.objects.filter(enabled=True)
if pk:
sources = sources.filter(pk=pk)
for source in sources:
status = source.check_connection()
cache.set(CACHE_KEY_STATUS + source.slug, status, timeout=timeout)
@CELERY_APP.task(
# We take the configured hours timeout time by 2.5 as we run user and
# group in parallel and then membership, so 2x is to cover the serial tasks,
@ -47,12 +62,15 @@ def ldap_sync_single(source_pk: str):
source: LDAPSource = LDAPSource.objects.filter(pk=source_pk).first()
if not source:
return
lock = Lock(cache.client.get_client(), name=f"goauthentik.io/sources/ldap/sync-{source.slug}")
lock = source.sync_lock
if lock.locked():
LOGGER.debug("LDAP sync locked, skipping task", source=source.slug)
return
try:
with lock:
# Delete all sync tasks from the cache
keys = cache.keys(f"{CACHE_KEY_PREFIX_TASKS}ldap_sync:{source.slug}*")
cache.delete_many(keys)
task = chain(
# User and group sync can happen at once, they have no dependencies on each other
group(

View file

@ -74,7 +74,7 @@ class OAuthSource(Source):
def ui_login_button(self, request: HttpRequest) -> UILoginButton:
provider_type = self.source_type
provider = provider_type()
icon = self.get_icon
icon = self.icon_url
if not icon:
icon = provider.icon_url()
return UILoginButton(
@ -85,7 +85,7 @@ class OAuthSource(Source):
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
provider_type = self.source_type
icon = self.get_icon
icon = self.icon_url
if not icon:
icon = provider_type().icon_url()
return UserSettingSerializer(
@ -232,7 +232,7 @@ class UserOAuthSourceConnection(UserSourceConnection):
access_token = models.TextField(blank=True, null=True, default=None)
@property
def serializer(self) -> Serializer:
def serializer(self) -> type[Serializer]:
from authentik.sources.oauth.api.source_connection import (
UserOAuthSourceConnectionSerializer,
)

View file

@ -12,8 +12,9 @@ class PatreonOAuthRedirect(OAuthRedirect):
"""Patreon OAuth2 Redirect"""
def get_additional_parameters(self, source: OAuthSource): # pragma: no cover
# https://docs.patreon.com/#scopes
return {
"scope": ["openid", "email", "profile"],
"scope": ["identity", "identity[email]"],
}

View file

@ -62,7 +62,7 @@ class PlexSource(Source):
return PlexSourceSerializer
def ui_login_button(self, request: HttpRequest) -> UILoginButton:
icon = self.get_icon
icon = self.icon_url
if not icon:
icon = static("authentik/sources/plex.svg")
return UILoginButton(
@ -79,7 +79,7 @@ class PlexSource(Source):
)
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
icon = self.get_icon
icon = self.icon_url
if not icon:
icon = static("authentik/sources/plex.svg")
return UserSettingSerializer(

View file

@ -200,11 +200,11 @@ class SAMLSource(Source):
}
),
name=self.name,
icon_url=self.get_icon,
icon_url=self.icon_url,
)
def ui_user_settings(self) -> Optional[UserSettingSerializer]:
icon = self.get_icon
icon = self.icon_url
if not icon:
icon = static(f"authentik/sources/{self.slug}.svg")
return UserSettingSerializer(

View file

@ -69,7 +69,6 @@ class AuthenticatorSMSStageView(ChallengeStageView):
stage: AuthenticatorSMSStage = self.executor.current_stage
hashed_number = hash_phone_number(phone_number)
query = Q(phone_number=hashed_number) | Q(phone_number=phone_number)
print(SMSDevice.objects.filter(query, stage=stage.pk))
if SMSDevice.objects.filter(query, stage=stage.pk).exists():
raise ValidationError(_("Invalid phone number"))
# No code yet, but we have a phone number, so send a verification message

View file

@ -199,11 +199,9 @@ class AuthenticatorSMSStageTests(FlowTestCase):
sms_send_mock,
),
):
print(self.client.session[SESSION_KEY_PLAN])
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
print(response.content.decode())
self.assertStageResponse(
response,
self.flow,

View file

@ -1,6 +1,7 @@
"""AuthenticatorTOTPStage API Views"""
from django_filters.rest_framework.backends import DjangoFilterBackend
from rest_framework import mixins
from rest_framework.fields import ChoiceField
from rest_framework.filters import OrderingFilter, SearchFilter
from rest_framework.permissions import IsAdminUser
from rest_framework.serializers import ModelSerializer
@ -9,12 +10,18 @@ from rest_framework.viewsets import GenericViewSet, ModelViewSet
from authentik.api.authorization import OwnerFilter, OwnerPermissions
from authentik.core.api.used_by import UsedByMixin
from authentik.flows.api.stages import StageSerializer
from authentik.stages.authenticator_totp.models import AuthenticatorTOTPStage, TOTPDevice
from authentik.stages.authenticator_totp.models import (
AuthenticatorTOTPStage,
TOTPDevice,
TOTPDigits,
)
class AuthenticatorTOTPStageSerializer(StageSerializer):
"""AuthenticatorTOTPStage Serializer"""
digits = ChoiceField(choices=TOTPDigits.choices)
class Meta:
model = AuthenticatorTOTPStage
fields = StageSerializer.Meta.fields + ["configure_flow", "friendly_name", "digits"]

View file

@ -29,4 +29,14 @@ class Migration(migrations.Migration):
name="totpdevice",
options={"verbose_name": "TOTP Device", "verbose_name_plural": "TOTP Devices"},
),
migrations.AlterField(
model_name="authenticatortotpstage",
name="digits",
field=models.IntegerField(
choices=[
("6", "6 digits, widely compatible"),
("8", "8 digits, not compatible with apps like Google Authenticator"),
]
),
),
]

View file

@ -19,7 +19,7 @@ from authentik.stages.authenticator.oath import TOTP
from authentik.stages.authenticator.util import hex_validator, random_hex
class TOTPDigits(models.IntegerChoices):
class TOTPDigits(models.TextChoices):
"""OTP Time Digits"""
SIX = 6, _("6 digits, widely compatible")

View file

@ -184,6 +184,7 @@ class AuthenticatorValidateStageDuoTests(FlowTestCase):
"args": {},
"method": "GET",
"path": f"/api/v3/flows/executor/{flow.slug}/",
"user_agent": "",
},
},
)

View file

@ -1,5 +1,6 @@
"""authentik multi-stage authentication engine"""
from datetime import timedelta
from uuid import uuid4
from django.contrib import messages
from django.http import HttpRequest, HttpResponse
@ -52,16 +53,11 @@ class EmailStageView(ChallengeStageView):
kwargs={"flow_slug": self.executor.flow.slug},
)
# Parse query string from current URL (full query string)
query_params = QueryDict(self.request.META.get("QUERY_STRING", ""), mutable=True)
# this view is only run within a flow executor, where we need to get the query string
# from the query= parameter (double encoded); but for the redirect
# we need to expand it since it'll go through the flow interface
query_params = QueryDict(self.request.GET.get(QS_QUERY), mutable=True)
query_params.pop(QS_KEY_TOKEN, None)
# Check for nested query string used by flow executor, and remove any
# kind of flow token from that
if QS_QUERY in query_params:
inner_query_params = QueryDict(query_params.get(QS_QUERY), mutable=True)
inner_query_params.pop(QS_KEY_TOKEN, None)
query_params[QS_QUERY] = inner_query_params.urlencode()
query_params.update(kwargs)
full_url = base_url
if len(query_params) > 0:
@ -75,7 +71,7 @@ class EmailStageView(ChallengeStageView):
valid_delta = timedelta(
minutes=current_stage.token_expiry + 1
) # + 1 because django timesince always rounds down
identifier = slugify(f"ak-email-stage-{current_stage.name}-{pending_user}")
identifier = slugify(f"ak-email-stage-{current_stage.name}-{str(uuid4())}")
# Don't check for validity here, we only care if the token exists
tokens = FlowToken.objects.filter(identifier=identifier)
if not tokens.exists():

View file

@ -259,7 +259,7 @@ class TestEmailStage(FlowTestCase):
session.save()
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
url += "?foo=bar"
url += "?query=" + urlencode({"foo": "bar"})
request = self.factory.get(url)
stage_view = EmailStageView(
FlowExecutorView(
@ -273,31 +273,3 @@ class TestEmailStage(FlowTestCase):
stage_view.get_full_url(**{QS_KEY_TOKEN: token}),
f"http://testserver/if/flow/{self.flow.slug}/?foo=bar&flow_token={token}",
)
def test_url_existing_params_nested(self):
"""Test to ensure that URL params are preserved in the URL being sent (including nested)"""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
url = reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
url += "?foo=bar&"
url += "query=" + urlencode({"nested": "value"})
request = self.factory.get(url)
stage_view = EmailStageView(
FlowExecutorView(
request=request,
flow=self.flow,
),
request=request,
)
token = generate_id()
self.assertEqual(
stage_view.get_full_url(**{QS_KEY_TOKEN: token}),
(
f"http://testserver/if/flow/{self.flow.slug}"
f"/?foo=bar&query=nested%3Dvalue&flow_token={token}"
),
)

View file

@ -33,6 +33,7 @@ class IdentificationStageSerializer(StageSerializer):
"passwordless_flow",
"sources",
"show_source_labels",
"pretend_user_exists",
]

View file

@ -0,0 +1,23 @@
# Generated by Django 4.2.7 on 2023-11-17 16:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_stages_identification",
"0002_auto_20200530_2204_squashed_0013_identificationstage_passwordless_flow",
),
]
operations = [
migrations.AddField(
model_name="identificationstage",
name="pretend_user_exists",
field=models.BooleanField(
default=True,
help_text="When enabled, the stage will succeed and continue even when incorrect user info is entered.",
),
),
]

View file

@ -54,6 +54,13 @@ class IdentificationStage(Stage):
"entered will be shown"
),
)
pretend_user_exists = models.BooleanField(
default=True,
help_text=_(
"When enabled, the stage will succeed and continue even when incorrect user info "
"is entered."
),
)
enrollment_flow = models.ForeignKey(
Flow,

View file

@ -121,8 +121,8 @@ class IdentificationChallengeResponse(ChallengeResponse):
self.pre_user = self.stage.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
if not current_stage.show_matched_user:
self.stage.executor.plan.context[PLAN_CONTEXT_PENDING_USER_IDENTIFIER] = uid_field
if self.stage.executor.flow.designation == FlowDesignation.RECOVERY:
# When used in a recovery flow, always continue to not disclose if a user exists
# when `pretend` is enabled, continue regardless
if current_stage.pretend_user_exists:
return attrs
raise ValidationError("Failed to authenticate.")
self.pre_user = pre_user

View file

@ -28,6 +28,7 @@ class TestIdentificationStage(FlowTestCase):
self.stage = IdentificationStage.objects.create(
name="identification",
user_fields=[UserFields.E_MAIL],
pretend_user_exists=False,
)
self.stage.sources.set([source])
self.stage.save()
@ -106,6 +107,26 @@ class TestIdentificationStage(FlowTestCase):
form_data,
)
self.assertEqual(response.status_code, 200)
self.assertStageResponse(
response,
self.flow,
component="ak-stage-identification",
response_errors={
"non_field_errors": [{"string": "Failed to authenticate.", "code": "invalid"}]
},
)
def test_invalid_with_username_pretend(self):
"""Test invalid with username (user exists but stage only allows email)"""
self.stage.pretend_user_exists = True
self.stage.save()
form_data = {"uid_field": self.user.username}
response = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
form_data,
)
self.assertEqual(response.status_code, 200)
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
def test_invalid_no_fields(self):
"""Test invalid with username (no user fields are enabled)"""

View file

@ -6,6 +6,7 @@ from django.urls import reverse
from authentik.core.models import USER_ATTRIBUTE_SOURCES, Group, Source, User, UserSourceConnection
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.events.models import Event, EventAction
from authentik.flows.markers import StageMarker
from authentik.flows.models import FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
@ -58,11 +59,33 @@ class TestUserWriteStage(FlowTestCase):
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
user_qs = User.objects.filter(username=plan.context[PLAN_CONTEXT_PROMPT]["username"])
self.assertTrue(user_qs.exists())
self.assertTrue(user_qs.first().check_password(password))
self.assertEqual(
list(user_qs.first().ak_groups.order_by("name")), [self.other_group, self.group]
user = user_qs.first()
self.assertTrue(user.check_password(password))
self.assertEqual(list(user.ak_groups.order_by("name")), [self.other_group, self.group])
self.assertEqual(user.attributes, {USER_ATTRIBUTE_SOURCES: [self.source.name]})
self.assertTrue(
Event.objects.filter(
action=EventAction.MODEL_CREATED,
context__model={
"app": "authentik_core",
"model_name": "user",
"pk": user.pk,
"name": "name",
},
)
)
self.assertTrue(
Event.objects.filter(
action=EventAction.MODEL_UPDATED,
context__model={
"app": "authentik_core",
"model_name": "user",
"pk": user.pk,
"name": "name",
},
)
)
self.assertEqual(user_qs.first().attributes, {USER_ATTRIBUTE_SOURCES: [self.source.name]})
def test_user_update(self):
"""Test update of existing user"""

View file

@ -6241,10 +6241,10 @@
"title": "Friendly name"
},
"digits": {
"type": "integer",
"type": "string",
"enum": [
6,
8
"6",
"8"
],
"title": "Digits"
}
@ -7425,6 +7425,11 @@
"show_source_labels": {
"type": "boolean",
"title": "Show source labels"
},
"pretend_user_exists": {
"type": "boolean",
"title": "Pretend user exists",
"description": "When enabled, the stage will succeed and continue even when incorrect user info is entered."
}
},
"required": []

View file

@ -32,7 +32,7 @@ services:
volumes:
- redis:/data
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.1}
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.4}
restart: unless-stopped
command: server
environment:
@ -53,7 +53,7 @@ services:
- postgresql
- redis
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.1}
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2023.10.4}
restart: unless-stopped
command: worker
environment:

30
go.mod
View file

@ -13,24 +13,24 @@ require (
github.com/go-openapi/strfmt v0.21.7
github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/google/uuid v1.4.0
github.com/gorilla/handlers v1.5.1
github.com/gorilla/mux v1.8.0
github.com/gorilla/securecookie v1.1.1
github.com/gorilla/sessions v1.2.1
github.com/gorilla/websocket v1.5.0
github.com/gorilla/handlers v1.5.2
github.com/gorilla/mux v1.8.1
github.com/gorilla/securecookie v1.1.2
github.com/gorilla/sessions v1.2.2
github.com/gorilla/websocket v1.5.1
github.com/jellydator/ttlcache/v3 v3.1.0
github.com/mitchellh/mapstructure v1.5.0
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
github.com/pires/go-proxyproto v0.7.0
github.com/prometheus/client_golang v1.17.0
github.com/redis/go-redis/v9 v9.2.1
github.com/redis/go-redis/v9 v9.3.0
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.7.0
github.com/spf13/cobra v1.8.0
github.com/stretchr/testify v1.8.4
goauthentik.io/api/v3 v3.2023101.1
goauthentik.io/api/v3 v3.2023104.1
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.13.0
golang.org/x/sync v0.4.0
golang.org/x/oauth2 v0.15.0
golang.org/x/sync v0.5.0
gopkg.in/yaml.v2 v2.4.0
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
)
@ -42,7 +42,7 @@ require (
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/felixge/httpsnoop v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.3 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.5 // indirect
github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27 // indirect
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a // indirect
@ -72,10 +72,10 @@ require (
go.mongodb.org/mongo-driver v1.11.3 // indirect
go.opentelemetry.io/otel v1.14.0 // indirect
go.opentelemetry.io/otel/trace v1.14.0 // indirect
golang.org/x/crypto v0.14.0 // indirect
golang.org/x/net v0.17.0 // indirect
golang.org/x/sys v0.13.0 // indirect
golang.org/x/text v0.13.0 // indirect
golang.org/x/crypto v0.16.0 // indirect
golang.org/x/net v0.19.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/square/go-jose.v2 v2.5.1 // indirect

63
go.sum
View file

@ -62,7 +62,7 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk=
github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -73,8 +73,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ=
github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/getsentry/sentry-go v0.25.0 h1:q6Eo+hS+yoJlTO3uu/azhQadsD8V+jQn2D8VvX1eOyI=
github.com/getsentry/sentry-go v0.25.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY=
github.com/go-asn1-ber/asn1-ber v1.5.5 h1:MNHlNMBDgEKD4TcKr36vQN68BA00aDfjIt3/bD50WnA=
@ -200,6 +200,8 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@ -216,16 +218,16 @@ github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY=
github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
@ -295,8 +297,8 @@ github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdO
github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
github.com/redis/go-redis/v9 v9.2.1 h1:WlYJg71ODF0dVspZZCpYmoF1+U1Jjk9Rwd7pq6QmlCg=
github.com/redis/go-redis/v9 v9.2.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
github.com/redis/go-redis/v9 v9.3.0 h1:RiVDjmig62jIWp7Kk4XVLs0hzV6pI3PyTnnL0cnn0u0=
github.com/redis/go-redis/v9 v9.3.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
@ -309,8 +311,8 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
@ -356,8 +358,8 @@ go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyK
go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8=
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
goauthentik.io/api/v3 v3.2023101.1 h1:KIQ4wmxjE+geAVB0wBfmxW9Uzo/tA0dbd2hSUJ7YJ3M=
goauthentik.io/api/v3 v3.2023101.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
goauthentik.io/api/v3 v3.2023104.1 h1:cvAsgoKP/fmO4fzifx0OyICknauFeyN88C4Z1LdWXDs=
goauthentik.io/api/v3 v3.2023104.1/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
@ -370,8 +372,8 @@ golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY=
golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -438,16 +440,16 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY=
golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0=
golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ=
golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -460,8 +462,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -502,8 +504,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -519,8 +521,9 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View file

@ -27,14 +27,11 @@ type Config struct {
type RedisConfig struct {
Host string `yaml:"host" env:"AUTHENTIK_REDIS__HOST"`
Port int `yaml:"port" env:"AUTHENTIK_REDIS__PORT"`
DB int `yaml:"db" env:"AUTHENTIK_REDIS__DB"`
Username string `yaml:"username" env:"AUTHENTIK_REDIS__USERNAME"`
Password string `yaml:"password" env:"AUTHENTIK_REDIS__PASSWORD"`
TLS bool `yaml:"tls" env:"AUTHENTIK_REDIS__TLS"`
TLSReqs string `yaml:"tls_reqs" env:"AUTHENTIK_REDIS__TLS_REQS"`
DB int `yaml:"cache_db" env:"AUTHENTIK_REDIS__DB"`
CacheTimeout int `yaml:"cache_timeout" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT"`
CacheTimeoutFlows int `yaml:"cache_timeout_flows" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_FLOWS"`
CacheTimeoutPolicies int `yaml:"cache_timeout_policies" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_POLICIES"`
CacheTimeoutReputation int `yaml:"cache_timeout_reputation" env:"AUTHENTIK_REDIS__CACHE_TIMEOUT_REPUTATION"`
}
type ListenConfig struct {

View file

@ -29,4 +29,4 @@ func UserAgent() string {
return fmt.Sprintf("authentik@%s", FullVersion())
}
const VERSION = "2023.10.1"
const VERSION = "2023.10.4"

View file

@ -29,16 +29,6 @@ var (
Name: "authentik_outpost_flow_timing_post_seconds",
Help: "Duration it took to send a challenge in seconds",
}, []string{"stage", "flow"})
// NOTE: the following metrics are kept for compatibility purpose
FlowTimingGetLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_flow_timing_get",
Help: "Duration it took to get a challenge",
}, []string{"stage", "flow"})
FlowTimingPostLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_flow_timing_post",
Help: "Duration it took to send a challenge",
}, []string{"stage", "flow"})
)
type SolverFunction func(*api.ChallengeTypes, api.ApiFlowsExecutorSolveRequest) (api.FlowChallengeResponseRequest, error)
@ -198,10 +188,6 @@ func (fe *FlowExecutor) getInitialChallenge() (*api.ChallengeTypes, error) {
"stage": ch.GetComponent(),
"flow": fe.flowSlug,
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)) / float64(time.Second))
FlowTimingGetLegacy.With(prometheus.Labels{
"stage": ch.GetComponent(),
"flow": fe.flowSlug,
}).Observe(float64(gcsp.EndTime.Sub(gcsp.StartTime)))
return challenge, nil
}
@ -259,10 +245,6 @@ func (fe *FlowExecutor) solveFlowChallenge(challenge *api.ChallengeTypes, depth
"stage": ch.GetComponent(),
"flow": fe.flowSlug,
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)) / float64(time.Second))
FlowTimingPostLegacy.With(prometheus.Labels{
"stage": ch.GetComponent(),
"flow": fe.flowSlug,
}).Observe(float64(scsp.EndTime.Sub(scsp.StartTime)))
if depth >= 10 {
return false, errors.New("exceeded stage recursion depth")

View file

@ -22,11 +22,6 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
"type": "bind",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": ls.ac.Outpost.Name,
"type": "bind",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Bind request")
}()
@ -55,12 +50,6 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
"reason": "no_provider",
"app": "",
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ls.ac.Outpost.Name,
"type": "bind",
"reason": "no_provider",
"app": "",
}).Inc()
return ldap.LDAPResultInsufficientAccessRights, nil
}

View file

@ -47,12 +47,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
"reason": "flow_error",
"app": db.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": db.si.GetOutpostName(),
"type": "bind",
"reason": "flow_error",
"app": db.si.GetAppSlug(),
}).Inc()
req.Log().WithError(err).Warning("failed to execute flow")
return ldap.LDAPResultInvalidCredentials, nil
}
@ -63,12 +57,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
"reason": "invalid_credentials",
"app": db.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": db.si.GetOutpostName(),
"type": "bind",
"reason": "invalid_credentials",
"app": db.si.GetAppSlug(),
}).Inc()
req.Log().Info("Invalid credentials")
return ldap.LDAPResultInvalidCredentials, nil
}
@ -82,12 +70,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
"reason": "access_denied",
"app": db.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": db.si.GetOutpostName(),
"type": "bind",
"reason": "access_denied",
"app": db.si.GetAppSlug(),
}).Inc()
return ldap.LDAPResultInsufficientAccessRights, nil
}
if err != nil {
@ -97,12 +79,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
"reason": "access_check_fail",
"app": db.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": db.si.GetOutpostName(),
"type": "bind",
"reason": "access_check_fail",
"app": db.si.GetAppSlug(),
}).Inc()
req.Log().WithError(err).Warning("failed to check access")
return ldap.LDAPResultOperationsError, nil
}
@ -117,12 +93,6 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
"reason": "user_info_fail",
"app": db.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": db.si.GetOutpostName(),
"type": "bind",
"reason": "user_info_fail",
"app": db.si.GetAppSlug(),
}).Inc()
req.Log().WithError(err).Warning("failed to get user info")
return ldap.LDAPResultOperationsError, nil
}

View file

@ -22,16 +22,6 @@ var (
Name: "authentik_outpost_ldap_requests_rejected_total",
Help: "Total number of rejected requests",
}, []string{"outpost_name", "type", "reason", "app"})
// NOTE: the following metrics are kept for compatibility purpose
RequestsLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_ldap_requests",
Help: "The total number of configured providers",
}, []string{"outpost_name", "type", "app"})
RequestsRejectedLegacy = promauto.NewCounterVec(prometheus.CounterOpts{
Name: "authentik_outpost_ldap_requests_rejected",
Help: "Total number of rejected requests",
}, []string{"outpost_name", "type", "reason", "app"})
)
func RunServer() {

View file

@ -23,11 +23,6 @@ func (ls *LDAPServer) Search(bindDN string, searchReq ldap.SearchRequest, conn n
"type": "search",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": ls.ac.Outpost.Name,
"type": "search",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
req.Log().WithField("attributes", searchReq.Attributes).WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Search request")
}()

View file

@ -45,12 +45,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "empty_bind_dn",
"app": ds.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ds.si.GetOutpostName(),
"type": "search",
"reason": "empty_bind_dn",
"app": ds.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
}
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
@ -60,12 +54,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "invalid_bind_dn",
"app": ds.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ds.si.GetOutpostName(),
"type": "search",
"reason": "invalid_bind_dn",
"app": ds.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: BindDN %s not in our BaseDN %s", req.BindDN, ds.si.GetBaseDN())
}
@ -78,12 +66,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "user_info_not_cached",
"app": ds.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ds.si.GetOutpostName(),
"type": "search",
"reason": "user_info_not_cached",
"app": ds.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied")
}
accsp.Finish()
@ -96,12 +78,6 @@ func (ds *DirectSearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "filter_parse_fail",
"app": ds.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ds.si.GetOutpostName(),
"type": "search",
"reason": "filter_parse_fail",
"app": ds.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultOperationsError}, fmt.Errorf("Search Error: error parsing filter: %s", req.Filter)
}

View file

@ -62,12 +62,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "empty_bind_dn",
"app": ms.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ms.si.GetOutpostName(),
"type": "search",
"reason": "empty_bind_dn",
"app": ms.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: Anonymous BindDN not allowed %s", req.BindDN)
}
if !utils.HasSuffixNoCase(req.BindDN, ","+baseDN) {
@ -77,12 +71,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "invalid_bind_dn",
"app": ms.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ms.si.GetOutpostName(),
"type": "search",
"reason": "invalid_bind_dn",
"app": ms.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, fmt.Errorf("Search Error: BindDN %s not in our BaseDN %s", req.BindDN, ms.si.GetBaseDN())
}
@ -95,12 +83,6 @@ func (ms *MemorySearcher) Search(req *search.Request) (ldap.ServerSearchResult,
"reason": "user_info_not_cached",
"app": ms.si.GetAppSlug(),
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ms.si.GetOutpostName(),
"type": "search",
"reason": "user_info_not_cached",
"app": ms.si.GetAppSlug(),
}).Inc()
return ldap.ServerSearchResult{ResultCode: ldap.LDAPResultInsufficientAccessRights}, errors.New("access denied")
}
accsp.Finish()

View file

@ -22,11 +22,6 @@ func (ls *LDAPServer) Unbind(boundDN string, conn net.Conn) (ldap.LDAPResultCode
"type": "unbind",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": ls.ac.Outpost.Name,
"type": "unbind",
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
req.Log().WithField("took-ms", span.EndTime.Sub(span.StartTime).Milliseconds()).Info("Unbind request")
}()
@ -55,11 +50,5 @@ func (ls *LDAPServer) Unbind(boundDN string, conn net.Conn) (ldap.LDAPResultCode
"reason": "no_provider",
"app": "",
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": ls.ac.Outpost.Name,
"type": "unbind",
"reason": "no_provider",
"app": "",
}).Inc()
return ldap.LDAPResultOperationsError, nil
}

View file

@ -173,12 +173,6 @@ func NewApplication(p api.ProxyOutpostConfig, c *http.Client, server Server) (*A
"method": r.Method,
"host": web.GetHost(r),
}).Observe(float64(elapsed) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": a.outpostName,
"type": "app",
"method": r.Method,
"host": web.GetHost(r),
}).Observe(float64(elapsed))
})
})
if server.API().GlobalConfig.ErrorReporting.Enabled {
@ -241,7 +235,10 @@ func (a *Application) Mode() api.ProxyMode {
return *a.proxyConfig.Mode
}
func (a *Application) HasQuerySignature(r *http.Request) bool {
func (a *Application) ShouldHandleURL(r *http.Request) bool {
if strings.HasPrefix(r.URL.Path, "/outpost.goauthentik.io") {
return true
}
if strings.EqualFold(r.URL.Query().Get(CallbackSignature), "true") {
return true
}

View file

@ -64,13 +64,6 @@ func (a *Application) configureProxy() error {
"scheme": r.URL.Scheme,
"host": web.GetHost(r),
}).Observe(float64(elapsed) / float64(time.Second))
metrics.UpstreamTimingLegacy.With(prometheus.Labels{
"outpost_name": a.outpostName,
"upstream_host": r.URL.Host,
"method": r.Method,
"scheme": r.URL.Scheme,
"host": web.GetHost(r),
}).Observe(float64(elapsed))
})
return nil
}

View file

@ -71,7 +71,7 @@ func (a *Application) getStore(p api.ProxyOutpostConfig, externalHost *url.URL)
cs.Options.Domain = *p.CookieDomain
cs.Options.SameSite = http.SameSiteLaxMode
cs.Options.MaxAge = maxAge
cs.Options.Path = externalHost.Path
cs.Options.Path = "/"
a.log.WithField("dir", dir).Trace("using filesystem session backend")
return cs
}
@ -131,7 +131,6 @@ func (a *Application) Logout(ctx context.Context, filter func(c Claims) bool) er
}
if rs, ok := a.sessions.(*redisstore.RedisStore); ok {
client := rs.Client()
defer client.Close()
keys, err := client.Keys(ctx, fmt.Sprintf("%s*", RedisKeyPrefix)).Result()
if err != nil {
return err

View file

@ -26,12 +26,6 @@ func (ps *ProxyServer) HandlePing(rw http.ResponseWriter, r *http.Request) {
"host": web.GetHost(r),
"type": "ping",
}).Observe(float64(elapsed) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": ps.akAPI.Outpost.Name,
"method": r.Method,
"host": web.GetHost(r),
"type": "ping",
}).Observe(float64(elapsed))
}
func (ps *ProxyServer) HandleStatic(rw http.ResponseWriter, r *http.Request) {
@ -44,12 +38,6 @@ func (ps *ProxyServer) HandleStatic(rw http.ResponseWriter, r *http.Request) {
"host": web.GetHost(r),
"type": "static",
}).Observe(float64(elapsed) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": ps.akAPI.Outpost.Name,
"method": r.Method,
"host": web.GetHost(r),
"type": "static",
}).Observe(float64(elapsed))
}
func (ps *ProxyServer) lookupApp(r *http.Request) (*application.Application, string) {

View file

@ -22,16 +22,6 @@ var (
Name: "authentik_outpost_proxy_upstream_response_duration_seconds",
Help: "Proxy upstream response latencies in seconds",
}, []string{"outpost_name", "method", "scheme", "host", "upstream_host"})
// NOTE: the following metric is kept for compatibility purpose
RequestsLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_proxy_requests",
Help: "The total number of configured providers",
}, []string{"outpost_name", "method", "host", "type"})
UpstreamTimingLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_proxy_upstream_time",
Help: "A summary of the duration we wait for the upstream reply",
}, []string{"outpost_name", "method", "scheme", "host", "upstream_host"})
)
func RunServer() {

View file

@ -74,7 +74,7 @@ func (ps *ProxyServer) HandleHost(rw http.ResponseWriter, r *http.Request) bool
if a == nil {
return false
}
if a.HasQuerySignature(r) || a.Mode() == api.PROXYMODE_PROXY {
if a.ShouldHandleURL(r) || a.Mode() == api.PROXYMODE_PROXY {
a.ServeHTTP(rw, r)
return true
}

View file

@ -35,11 +35,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
"reason": "flow_error",
"app": r.pi.appSlug,
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": rs.ac.Outpost.Name,
"reason": "flow_error",
"app": r.pi.appSlug,
}).Inc()
_ = w.Write(r.Response(radius.CodeAccessReject))
return
}
@ -49,11 +44,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
"reason": "invalid_credentials",
"app": r.pi.appSlug,
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": rs.ac.Outpost.Name,
"reason": "invalid_credentials",
"app": r.pi.appSlug,
}).Inc()
_ = w.Write(r.Response(radius.CodeAccessReject))
return
}
@ -66,11 +56,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
"reason": "access_check_fail",
"app": r.pi.appSlug,
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": rs.ac.Outpost.Name,
"reason": "access_check_fail",
"app": r.pi.appSlug,
}).Inc()
return
}
if !access {
@ -81,11 +66,6 @@ func (rs *RadiusServer) Handle_AccessRequest(w radius.ResponseWriter, r *RadiusR
"reason": "access_denied",
"app": r.pi.appSlug,
}).Inc()
metrics.RequestsRejectedLegacy.With(prometheus.Labels{
"outpost_name": rs.ac.Outpost.Name,
"reason": "access_denied",
"app": r.pi.appSlug,
}).Inc()
return
}
_ = w.Write(r.Response(radius.CodeAccessAccept))

View file

@ -47,10 +47,6 @@ func (rs *RadiusServer) ServeRADIUS(w radius.ResponseWriter, r *radius.Request)
"outpost_name": rs.ac.Outpost.Name,
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)) / float64(time.Second))
metrics.RequestsLegacy.With(prometheus.Labels{
"outpost_name": rs.ac.Outpost.Name,
"app": selectedApp,
}).Observe(float64(span.EndTime.Sub(span.StartTime)))
}()
nr := &RadiusRequest{

View file

@ -22,16 +22,6 @@ var (
Name: "authentik_outpost_radius_requests_rejected_total",
Help: "Total number of rejected requests",
}, []string{"outpost_name", "reason", "app"})
// NOTE: the following metric is kept for compatibility purpose
RequestsLegacy = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "authentik_outpost_radius_requests",
Help: "The total number of successful requests",
}, []string{"outpost_name", "app"})
RequestsRejectedLegacy = promauto.NewCounterVec(prometheus.CounterOpts{
Name: "authentik_outpost_radius_requests_rejected",
Help: "Total number of rejected requests",
}, []string{"outpost_name", "reason", "app"})
)
func RunServer() {

Some files were not shown because too many files have changed in this diff Show more