Compare commits
49 commits
root/move-
...
trustchain
Author | SHA1 | Date | |
---|---|---|---|
1cd000dfe2 | |||
00ae97944a | |||
9f3ccfb7c7 | |||
9ed9c39ac8 | |||
30b6eeee9f | |||
afe2621783 | |||
8b12c6a01a | |||
f63adfed96 | |||
9c8fec21cf | |||
4776d2bcc5 | |||
a15a040362 | |||
fcd6dc1d60 | |||
acc3b59869 | |||
d9d5ac10e6 | |||
750669dcab | |||
88a3eed67e | |||
6c214fffc4 | |||
70100fc105 | |||
3c1163fabd | |||
539e8242ff | |||
2648333590 | |||
fe828ef993 | |||
29a6530742 | |||
a6b9274c4f | |||
a2a67161ac | |||
2e8263a99b | |||
6b9afed21f | |||
1eb1f4e0b8 | |||
7c3d60ec3a | |||
a494c6b6e8 | |||
6604d3577f | |||
f8bfa7e16a | |||
ea6cf6eabf | |||
769ce3ce7b | |||
3891fb3fa8 | |||
41eb965350 | |||
8d95612287 | |||
82b5274b15 | |||
af56ce3d78 | |||
f5c6e7aeb0 | |||
3809400e93 | |||
1def9865cf | |||
3716298639 | |||
c16317d7cf | |||
bbb8fa8269 | |||
e4c251a178 | |||
0fefd5f522 | |||
88057db0b0 | |||
91cb6c9beb |
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 2023.10.5
|
||||
current_version = 2023.10.6
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
|
18
.github/workflows/ci-main.yml
vendored
18
.github/workflows/ci-main.yml
vendored
|
@ -61,6 +61,10 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
|
@ -72,7 +76,7 @@ jobs:
|
|||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (stable)
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
|
@ -86,20 +90,14 @@ jobs:
|
|||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
# Delete previous poetry env
|
||||
rm -rf /home/runner/.cache/pypoetry/virtualenvs/*
|
||||
rm -rf $(poetry env info --path)
|
||||
poetry install
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: |
|
||||
poetry run python -m lifecycle.migrate
|
||||
- name: run tests
|
||||
env:
|
||||
# Test in the main database that we just migrated from the previous stable version
|
||||
AUTHENTIK_POSTGRESQL__TEST__NAME: authentik
|
||||
run: |
|
||||
poetry run make test
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
|
|
6
.github/workflows/ci-outpost.yml
vendored
6
.github/workflows/ci-outpost.yml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
|
@ -37,7 +37,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
|
@ -125,7 +125,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
|
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
|
@ -27,10 +27,10 @@ jobs:
|
|||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
|
4
.github/workflows/gha-cache-cleanup.yml
vendored
4
.github/workflows/gha-cache-cleanup.yml
vendored
|
@ -6,10 +6,6 @@ on:
|
|||
types:
|
||||
- closed
|
||||
|
||||
permissions:
|
||||
# Permission to delete cache
|
||||
actions: write
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
4
.github/workflows/release-publish.yml
vendored
4
.github/workflows/release-publish.yml
vendored
|
@ -67,7 +67,7 @@ jobs:
|
|||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
|
@ -126,7 +126,7 @@ jobs:
|
|||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
|
|
2
.github/workflows/release-tag.yml
vendored
2
.github/workflows/release-tag.yml
vendored
|
@ -30,7 +30,7 @@ jobs:
|
|||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Extract version number
|
||||
id: get_version
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ steps.generate_token.outputs.token }}
|
||||
script: |
|
||||
|
|
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
|||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
days-before-stale: 60
|
||||
|
|
7
.github/workflows/translation-advice.yml
vendored
7
.github/workflows/translation-advice.yml
vendored
|
@ -7,12 +7,7 @@ on:
|
|||
paths:
|
||||
- "!**"
|
||||
- "locale/**"
|
||||
- "!locale/en/**"
|
||||
- "web/xliff/**"
|
||||
|
||||
permissions:
|
||||
# Permission to write comment
|
||||
pull-requests: write
|
||||
- "web/src/locales/**"
|
||||
|
||||
jobs:
|
||||
post-comment:
|
||||
|
|
4
.github/workflows/translation-rename.yml
vendored
4
.github/workflows/translation-rename.yml
vendored
|
@ -6,10 +6,6 @@ on:
|
|||
pull_request:
|
||||
types: [opened, reopened]
|
||||
|
||||
permissions:
|
||||
# Permission to rename PR
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
rename_pr:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -7,7 +7,9 @@
|
|||
*.pot
|
||||
*.pyc
|
||||
__pycache__/
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
media
|
||||
|
||||
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
|
||||
# in your Git repository. Update and uncomment the following line accordingly.
|
||||
|
@ -190,8 +192,10 @@ pyvenv.cfg
|
|||
pip-selfcheck.json
|
||||
|
||||
# End of https://www.gitignore.io/api/python,django
|
||||
*.env.yml
|
||||
/static/
|
||||
local.env.yml
|
||||
|
||||
media/
|
||||
*mmdb
|
||||
|
||||
.idea/
|
||||
|
|
1
.vscode/extensions.json
vendored
1
.vscode/extensions.json
vendored
|
@ -14,7 +14,6 @@
|
|||
"ms-python.pylint",
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"redhat.vscode-yaml",
|
||||
"Tobermory.es6-string-html",
|
||||
"unifiedjs.vscode-mdx",
|
||||
|
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
|
@ -19,8 +19,10 @@
|
|||
"slo",
|
||||
"scim",
|
||||
],
|
||||
"python.linting.pylintEnabled": true,
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"python.formatting.provider": "black",
|
||||
"yaml.customTags": [
|
||||
"!Find sequence",
|
||||
"!KeyOf scalar",
|
||||
|
|
47
Dockerfile
47
Dockerfile
|
@ -37,7 +37,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
|||
RUN npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.5-bookworm AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
|
@ -71,7 +71,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
|||
# Stage 4: MaxMind GeoIP
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
ENV GEOIPUPDATE_ACCOUNT_ID_FILE="/run/secrets/GEOIPUPDATE_ACCOUNT_ID"
|
||||
ENV GEOIPUPDATE_LICENSE_KEY_FILE="/run/secrets/GEOIPUPDATE_LICENSE_KEY"
|
||||
|
@ -83,7 +83,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM docker.io/python:3.12.1-slim-bookworm AS python-deps
|
||||
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
|
@ -108,7 +108,7 @@ RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|||
poetry install --only=main --no-ansi --no-interaction
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.12.1-slim-bookworm AS final-image
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ARG VERSION
|
||||
|
@ -120,47 +120,44 @@ LABEL org.opencontainers.image.source https://github.com/goauthentik/authentik
|
|||
LABEL org.opencontainers.image.version ${VERSION}
|
||||
LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
||||
|
||||
WORKDIR /ak-root
|
||||
WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 ca-certificates && \
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
mkdir -p /data/certs /data/media /data/blueprints && \
|
||||
mkdir -p /ak-root/blueprints && \
|
||||
ln -s /data/blueprints /ak-root/blueprints/local && \
|
||||
mkdir -p /certs /media /blueprints && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
mkdir -p /ak-root && \
|
||||
chown authentik:authentik /data/certs /data/media /authentik/.ssh /ak-root
|
||||
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
||||
|
||||
COPY ./authentik/ /ak-root/authentik
|
||||
COPY ./pyproject.toml /ak-root
|
||||
COPY ./poetry.lock /ak-root
|
||||
COPY ./schemas /ak-root/schemas
|
||||
COPY ./locale /ak-root/locale
|
||||
COPY ./tests /ak-root/tests
|
||||
COPY ./manage.py /ak-root/
|
||||
COPY ./blueprints /ak-root/blueprints
|
||||
COPY ./lifecycle/ /ak-root/lifecycle
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./poetry.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=python-deps /work/venv /ak-root/venv
|
||||
COPY --from=web-builder /work/web/dist/ /ak-root/web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /ak-root/web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /ak-root/website/help/
|
||||
COPY --from=geoip /usr/share/GeoIP /data/geoip
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
USER 1000
|
||||
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:/ak-root/lifecycle:$PATH" \
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
|
||||
|
|
7
Makefile
7
Makefile
|
@ -110,14 +110,11 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
|
|||
--markdown /local/diff.md \
|
||||
/local/old_schema.yml /local/schema.yml
|
||||
rm old_schema.yml
|
||||
sed -i 's/{/{/g' diff.md
|
||||
sed -i 's/}/}/g' diff.md
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean:
|
||||
rm -rf gen-go-api/
|
||||
rm -rf gen-ts-api/
|
||||
rm -rf web/node_modules/@goauthentik/api/
|
||||
rm -rf web/api/src/
|
||||
rm -rf api/
|
||||
|
||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
docker run \
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
authentik takes security very seriously. We follow the rules of [responsible disclosure](https://en.wikipedia.org/wiki/Responsible_disclosure), and we urge our community to do so as well, instead of reporting vulnerabilities publicly. This allows us to patch the issue quickly, announce it's existence and release the fixed version.
|
||||
|
||||
## Independent audits and pentests
|
||||
|
||||
In May/June of 2023 [Cure53](https://cure53.de) conducted an audit and pentest. The [results](https://cure53.de/pentest-report_authentik.pdf) are published on the [Cure53 website](https://cure53.de/#publications-2023). For more details about authentik's response to the findings of the audit refer to [2023-06 Cure53 Code audit](https://goauthentik.io/docs/security/2023-06-cure53).
|
||||
|
||||
## What authentik classifies as a CVE
|
||||
|
||||
CVE (Common Vulnerability and Exposure) is a system designed to aggregate all vulnerabilities. As such, a CVE will be issued when there is a either vulnerability or exposure. Per NIST, A vulnerability is:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.10.5"
|
||||
__version__ = "2023.10.6"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class RuntimeDict(TypedDict):
|
|||
uname: str
|
||||
|
||||
|
||||
class SystemInfoSerializer(PassiveSerializer):
|
||||
class SystemSerializer(PassiveSerializer):
|
||||
"""Get system information."""
|
||||
|
||||
http_headers = SerializerMethodField()
|
||||
|
@ -91,14 +91,14 @@ class SystemView(APIView):
|
|||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemInfoSerializer
|
||||
serializer_class = SystemSerializer
|
||||
|
||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemInfoSerializer(request).data)
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
||||
@extend_schema(responses={200: SystemInfoSerializer(many=False)})
|
||||
@extend_schema(responses={200: SystemSerializer(many=False)})
|
||||
def post(self, request: Request) -> Response:
|
||||
"""Get system information."""
|
||||
return Response(SystemInfoSerializer(request).data)
|
||||
return Response(SystemSerializer(request).data)
|
||||
|
|
|
@ -12,8 +12,6 @@ from authentik.blueprints.tests import reconcile_app
|
|||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
@ -51,12 +49,8 @@ class TestAPIAuth(TestCase):
|
|||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {token.key}".encode())
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_outpost_fail(self):
|
||||
def test_managed_outpost(self):
|
||||
"""Test managed outpost"""
|
||||
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
outpost.user.delete()
|
||||
outpost.delete()
|
||||
with self.assertRaises(AuthenticationFailed):
|
||||
bearer_auth(f"Bearer {settings.SECRET_KEY}".encode())
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ from rest_framework.response import Response
|
|||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.context_processors.base import get_context_processors
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
capabilities = Signal()
|
||||
|
@ -30,7 +30,6 @@ class Capabilities(models.TextChoices):
|
|||
|
||||
CAN_SAVE_MEDIA = "can_save_media"
|
||||
CAN_GEO_IP = "can_geo_ip"
|
||||
CAN_ASN = "can_asn"
|
||||
CAN_IMPERSONATE = "can_impersonate"
|
||||
CAN_DEBUG = "can_debug"
|
||||
IS_ENTERPRISE = "is_enterprise"
|
||||
|
@ -69,9 +68,8 @@ class ConfigView(APIView):
|
|||
deb_test = settings.DEBUG or settings.TEST
|
||||
if Path(settings.MEDIA_ROOT).is_mount() or deb_test:
|
||||
caps.append(Capabilities.CAN_SAVE_MEDIA)
|
||||
for processor in get_context_processors():
|
||||
if cap := processor.capability():
|
||||
caps.append(cap)
|
||||
if GEOIP_READER.enabled:
|
||||
caps.append(Capabilities.CAN_GEO_IP)
|
||||
if CONFIG.get_bool("impersonation"):
|
||||
caps.append(Capabilities.CAN_IMPERSONATE)
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
|
@ -95,10 +93,10 @@ class ConfigView(APIView):
|
|||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||
},
|
||||
"capabilities": self.get_capabilities(),
|
||||
"cache_timeout": CONFIG.get_int("cache.timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("cache.timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("cache.timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("cache.timeout_reputation"),
|
||||
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
||||
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
||||
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
||||
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ from django.utils.translation import gettext_lazy as _
|
|||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField
|
||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
|
@ -15,7 +15,7 @@ from authentik.blueprints.v1.importer import Importer
|
|||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
|
||||
|
||||
class ManagedSerializer:
|
||||
|
@ -28,7 +28,7 @@ class MetadataSerializer(PassiveSerializer):
|
|||
"""Serializer for blueprint metadata"""
|
||||
|
||||
name = CharField()
|
||||
labels = JSONDictField()
|
||||
labels = JSONField()
|
||||
|
||||
|
||||
class BlueprintInstanceSerializer(ModelSerializer):
|
||||
|
|
|
@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
|
|||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
|
|
|
@ -30,7 +30,7 @@ def check_blueprint_v1_file(BlueprintInstance: type, path: Path):
|
|||
return
|
||||
blueprint_file.seek(0)
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(path=path).first()
|
||||
rel_path = path.relative_to(Path(CONFIG.get("paths.blueprints")))
|
||||
rel_path = path.relative_to(Path(CONFIG.get("blueprints_dir")))
|
||||
meta = None
|
||||
if metadata:
|
||||
meta = from_dict(BlueprintMetadata, metadata)
|
||||
|
@ -55,7 +55,7 @@ def migration_blueprint_import(apps: Apps, schema_editor: BaseDatabaseSchemaEdit
|
|||
Flow = apps.get_model("authentik_flows", "Flow")
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
for file in glob(f"{CONFIG.get('paths.blueprints')}/**/*.yaml", recursive=True):
|
||||
for file in glob(f"{CONFIG.get('blueprints_dir')}/**/*.yaml", recursive=True):
|
||||
check_blueprint_v1_file(BlueprintInstance, Path(file))
|
||||
|
||||
for blueprint in BlueprintInstance.objects.using(db_alias).all():
|
||||
|
|
|
@ -82,7 +82,7 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
|||
def retrieve_file(self) -> str:
|
||||
"""Get blueprint from path"""
|
||||
try:
|
||||
base = Path(CONFIG.get("paths.blueprints"))
|
||||
base = Path(CONFIG.get("blueprints_dir"))
|
||||
full_path = base.joinpath(Path(self.path)).resolve()
|
||||
if not str(full_path).startswith(str(base.resolve())):
|
||||
raise BlueprintRetrievalFailed("Invalid blueprint path")
|
||||
|
|
|
@ -19,7 +19,7 @@ class TestBlueprintsV1API(APITestCase):
|
|||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_api_available(self):
|
||||
"""Test valid file"""
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
|
|
|
@ -16,7 +16,7 @@ TMP = mkdtemp("authentik-blueprints")
|
|||
class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
"""Test Blueprints v1 Tasks"""
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_invalid_file_syntax(self):
|
||||
"""Test syntactically invalid file"""
|
||||
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
|
||||
|
@ -25,7 +25,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||
blueprints = blueprints_find()
|
||||
self.assertEqual(blueprints, [])
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_invalid_file_version(self):
|
||||
"""Test invalid file"""
|
||||
with NamedTemporaryFile(suffix=".yaml", dir=TMP) as file:
|
||||
|
@ -34,7 +34,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||
blueprints = blueprints_find()
|
||||
self.assertEqual(blueprints, [])
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid(self):
|
||||
"""Test valid file"""
|
||||
blueprint_id = generate_id()
|
||||
|
@ -64,7 +64,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||
},
|
||||
)
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_updated(self):
|
||||
"""Test valid file"""
|
||||
BlueprintInstance.objects.filter(name="foo").delete()
|
||||
|
@ -123,7 +123,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
|||
},
|
||||
)
|
||||
|
||||
@CONFIG.patch("paths.blueprints", TMP)
|
||||
@CONFIG.patch("blueprints_dir", TMP)
|
||||
def test_valid_disabled(self):
|
||||
"""Test valid file"""
|
||||
with NamedTemporaryFile(mode="w+", suffix=".yaml", dir=TMP) as file:
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
from typing import TYPE_CHECKING
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField
|
||||
from rest_framework.fields import BooleanField, JSONField
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, MetaResult, registry
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
@ -17,7 +17,7 @@ LOGGER = get_logger()
|
|||
class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
"""Serializer for meta apply blueprint model"""
|
||||
|
||||
identifiers = JSONDictField()
|
||||
identifiers = JSONField(validators=[is_dict])
|
||||
required = BooleanField(default=True)
|
||||
|
||||
# We cannot override `instance` as that will confuse rest_framework
|
||||
|
|
|
@ -62,7 +62,7 @@ def start_blueprint_watcher():
|
|||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.get("paths.blueprints"), recursive=True)
|
||||
observer.schedule(BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
|
@ -75,7 +75,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
|||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
root = Path(CONFIG.get("paths.blueprints")).absolute()
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
path = Path(event.src_path).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
|
@ -101,7 +101,7 @@ def blueprints_find_dict():
|
|||
def blueprints_find() -> list[BlueprintFile]:
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.get("paths.blueprints"))
|
||||
root = Path(CONFIG.get("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
rel_path = path.relative_to(root)
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
|
|
|
@ -14,8 +14,7 @@ from ua_parser import user_agent_parser
|
|||
from authentik.api.authorization import OwnerSuperuserPermissions
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict
|
||||
from authentik.events.geo import GEOIP_READER, GeoIPDict
|
||||
|
||||
|
||||
class UserAgentDeviceDict(TypedDict):
|
||||
|
@ -60,7 +59,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||
current = SerializerMethodField()
|
||||
user_agent = SerializerMethodField()
|
||||
geo_ip = SerializerMethodField()
|
||||
asn = SerializerMethodField()
|
||||
|
||||
def get_current(self, instance: AuthenticatedSession) -> bool:
|
||||
"""Check if session is currently active session"""
|
||||
|
@ -72,12 +70,8 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||
return user_agent_parser.Parse(instance.last_user_agent)
|
||||
|
||||
def get_geo_ip(self, instance: AuthenticatedSession) -> Optional[GeoIPDict]: # pragma: no cover
|
||||
"""Get GeoIP Data"""
|
||||
return GEOIP_CONTEXT_PROCESSOR.city_dict(instance.last_ip)
|
||||
|
||||
def get_asn(self, instance: AuthenticatedSession) -> Optional[ASNDict]: # pragma: no cover
|
||||
"""Get ASN Data"""
|
||||
return ASN_CONTEXT_PROCESSOR.asn_dict(instance.last_ip)
|
||||
"""Get parsed user agent"""
|
||||
return GEOIP_READER.city_dict(instance.last_ip)
|
||||
|
||||
class Meta:
|
||||
model = AuthenticatedSession
|
||||
|
@ -86,7 +80,6 @@ class AuthenticatedSessionSerializer(ModelSerializer):
|
|||
"current",
|
||||
"user_agent",
|
||||
"geo_ip",
|
||||
"asn",
|
||||
"user",
|
||||
"last_ip",
|
||||
"last_user_agent",
|
||||
|
|
|
@ -8,7 +8,7 @@ from django_filters.filterset import FilterSet
|
|||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, IntegerField
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
|
@ -16,7 +16,7 @@ from rest_framework.viewsets import ModelViewSet
|
|||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.rbac.api.roles import RoleSerializer
|
||||
|
||||
|
@ -24,7 +24,7 @@ from authentik.rbac.api.roles import RoleSerializer
|
|||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
uid = CharField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -44,7 +44,7 @@ class GroupMemberSerializer(ModelSerializer):
|
|||
class GroupSerializer(ModelSerializer):
|
||||
"""Group Serializer"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
users_obj = ListSerializer(
|
||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||
)
|
||||
|
|
|
@ -32,7 +32,13 @@ from drf_spectacular.utils import (
|
|||
)
|
||||
from guardian.shortcuts import get_anonymous_user, get_objects_for_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import CharField, IntegerField, ListField, SerializerMethodField
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
IntegerField,
|
||||
JSONField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
|
@ -51,7 +57,7 @@ from authentik.admin.api.metrics import CoordinateSerializer
|
|||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, LinkSerializer, PassiveSerializer
|
||||
from authentik.core.api.utils import LinkSerializer, PassiveSerializer, is_dict
|
||||
from authentik.core.middleware import (
|
||||
SESSION_KEY_IMPERSONATE_ORIGINAL_USER,
|
||||
SESSION_KEY_IMPERSONATE_USER,
|
||||
|
@ -83,7 +89,7 @@ LOGGER = get_logger()
|
|||
class UserGroupSerializer(ModelSerializer):
|
||||
"""Simplified Group Serializer for user's groups"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
attributes = JSONField(required=False)
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -104,7 +110,7 @@ class UserSerializer(ModelSerializer):
|
|||
|
||||
is_superuser = BooleanField(read_only=True)
|
||||
avatar = CharField(read_only=True)
|
||||
attributes = JSONDictField(required=False)
|
||||
attributes = JSONField(validators=[is_dict], required=False)
|
||||
groups = PrimaryKeyRelatedField(
|
||||
allow_empty=True, many=True, source="ak_groups", queryset=Group.objects.all(), default=list
|
||||
)
|
||||
|
|
|
@ -2,9 +2,6 @@
|
|||
from typing import Any
|
||||
|
||||
from django.db.models import Model
|
||||
from drf_spectacular.extensions import OpenApiSerializerFieldExtension
|
||||
from drf_spectacular.plumbing import build_basic_type
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from rest_framework.fields import CharField, IntegerField, JSONField
|
||||
from rest_framework.serializers import Serializer, SerializerMethodField, ValidationError
|
||||
|
||||
|
@ -16,21 +13,6 @@ def is_dict(value: Any):
|
|||
raise ValidationError("Value must be a dictionary, and not have any duplicate keys.")
|
||||
|
||||
|
||||
class JSONDictField(JSONField):
|
||||
"""JSON Field which only allows dictionaries"""
|
||||
|
||||
default_validators = [is_dict]
|
||||
|
||||
|
||||
class JSONExtension(OpenApiSerializerFieldExtension):
|
||||
"""Generate API Schema for JSON fields as"""
|
||||
|
||||
target_class = "authentik.core.api.utils.JSONDictField"
|
||||
|
||||
def map_serializer_field(self, auto_schema, direction):
|
||||
return build_basic_type(OpenApiTypes.OBJECT)
|
||||
|
||||
|
||||
class PassiveSerializer(Serializer):
|
||||
"""Base serializer class which doesn't implement create/update methods"""
|
||||
|
||||
|
@ -44,7 +26,7 @@ class PassiveSerializer(Serializer):
|
|||
class PropertyMappingPreviewSerializer(PassiveSerializer):
|
||||
"""Preview how the current user is mapped via the property mappings selected in a provider"""
|
||||
|
||||
preview = JSONDictField(read_only=True)
|
||||
preview = JSONField(read_only=True)
|
||||
|
||||
|
||||
class MetaNameSerializer(PassiveSerializer):
|
||||
|
|
|
@ -30,6 +30,7 @@ from authentik.lib.models import (
|
|||
DomainlessFormattedURLValidator,
|
||||
SerializerModel,
|
||||
)
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.root.install_id import get_install_id
|
||||
|
||||
|
@ -516,7 +517,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
|||
objects = InheritanceManager()
|
||||
|
||||
@property
|
||||
def icon_url(self) -> Optional[str]:
|
||||
def get_icon(self) -> Optional[str]:
|
||||
"""Get the URL to the Icon. If the name is /static or
|
||||
starts with http it is returned as-is"""
|
||||
if not self.icon:
|
||||
|
@ -747,14 +748,12 @@ class AuthenticatedSession(ExpiringModel):
|
|||
@staticmethod
|
||||
def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]:
|
||||
"""Create a new session from a http request"""
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
if not hasattr(request, "session") or not request.session.session_key:
|
||||
return None
|
||||
return AuthenticatedSession(
|
||||
session_key=request.session.session_key,
|
||||
user=user,
|
||||
last_ip=ClientIPMiddleware.get_client_ip(request),
|
||||
last_ip=get_client_ip(request),
|
||||
last_user_agent=request.META.get("HTTP_USER_AGENT", ""),
|
||||
expires=request.session.get_expiry_date(),
|
||||
)
|
||||
|
|
|
@ -27,7 +27,7 @@ window.authentik.flow = {
|
|||
|
||||
{% block body %}
|
||||
<ak-message-container></ak-message-container>
|
||||
<ak-flow-executor flowSlug="{{ flow.slug }}">
|
||||
<ak-flow-executor>
|
||||
<ak-loading></ak-loading>
|
||||
</ak-flow-executor>
|
||||
{% endblock %}
|
||||
|
|
|
@ -44,14 +44,28 @@
|
|||
|
||||
{% block body %}
|
||||
<div class="pf-c-background-image">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="pf-c-background-image__filter" width="0" height="0">
|
||||
<filter id="image_overlay">
|
||||
<feColorMatrix in="SourceGraphic" type="matrix" values="1.3 0 0 0 0 0 1.3 0 0 0 0 0 1.3 0 0 0 0 0 1 0" />
|
||||
<feComponentTransfer color-interpolation-filters="sRGB" result="duotone">
|
||||
<feFuncR type="table" tableValues="0.086274509803922 0.43921568627451"></feFuncR>
|
||||
<feFuncG type="table" tableValues="0.086274509803922 0.43921568627451"></feFuncG>
|
||||
<feFuncB type="table" tableValues="0.086274509803922 0.43921568627451"></feFuncB>
|
||||
<feFuncA type="table" tableValues="0 1"></feFuncA>
|
||||
</feComponentTransfer>
|
||||
</filter>
|
||||
</svg>
|
||||
</div>
|
||||
<ak-message-container></ak-message-container>
|
||||
<div class="pf-c-login stacked">
|
||||
<div class="pf-c-login">
|
||||
<div class="ak-login-container">
|
||||
<main class="pf-c-login__main">
|
||||
<div class="pf-c-login__main-header pf-c-brand ak-brand">
|
||||
<header class="pf-c-login__header">
|
||||
<div class="pf-c-brand ak-brand">
|
||||
<img src="{{ tenant.branding_logo }}" alt="authentik Logo" />
|
||||
</div>
|
||||
</header>
|
||||
{% block main_container %}
|
||||
<main class="pf-c-login__main">
|
||||
<header class="pf-c-login__main-header">
|
||||
<h1 class="pf-c-title pf-m-3xl">
|
||||
{% block card_title %}
|
||||
|
@ -63,6 +77,7 @@
|
|||
{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
{% endblock %}
|
||||
<footer class="pf-c-login__footer">
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
|
|
|
@ -46,7 +46,7 @@ def certificate_discovery(self: MonitoredTask):
|
|||
certs = {}
|
||||
private_keys = {}
|
||||
discovered = 0
|
||||
for file in glob(CONFIG.get("path.cert_discovery") + "/**", recursive=True):
|
||||
for file in glob(CONFIG.get("cert_discovery_dir") + "/**", recursive=True):
|
||||
path = Path(file)
|
||||
if not path.exists():
|
||||
continue
|
||||
|
|
|
@ -265,7 +265,7 @@ class TestCrypto(APITestCase):
|
|||
_cert.write(builder.certificate)
|
||||
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
|
||||
_key.write(builder.private_key)
|
||||
with CONFIG.patch("path.cert_discovery", temp_dir):
|
||||
with CONFIG.patch("cert_discovery_dir", temp_dir):
|
||||
certificate_discovery() # pylint: disable=no-value-for-parameter
|
||||
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
|
||||
managed=MANAGED_DISCOVERED % "foo"
|
||||
|
|
|
@ -5,7 +5,7 @@ from json import loads
|
|||
import django_filters
|
||||
from django.db.models.aggregates import Count
|
||||
from django.db.models.fields.json import KeyTextTransform, KeyTransform
|
||||
from django.db.models.functions import ExtractDay, ExtractHour
|
||||
from django.db.models.functions import ExtractDay
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
|
@ -149,15 +149,7 @@ class EventViewSet(ModelViewSet):
|
|||
return Response(EventTopPerUserSerializer(instance=events, many=True).data)
|
||||
|
||||
@extend_schema(
|
||||
responses={200: CoordinateSerializer(many=True)},
|
||||
)
|
||||
@action(detail=False, methods=["GET"], pagination_class=None)
|
||||
def volume(self, request: Request) -> Response:
|
||||
"""Get event volume for specified filters and timeframe"""
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
return Response(queryset.get_events_per(timedelta(days=7), ExtractHour, 7 * 3))
|
||||
|
||||
@extend_schema(
|
||||
methods=["GET"],
|
||||
responses={200: CoordinateSerializer(many=True)},
|
||||
filters=[],
|
||||
parameters=[
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from prometheus_client import Gauge
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.lib.config import CONFIG, ENV_PREFIX
|
||||
|
||||
GAUGE_TASKS = Gauge(
|
||||
"authentik_system_tasks",
|
||||
|
@ -22,24 +21,3 @@ class AuthentikEventsConfig(ManagedAppConfig):
|
|||
def reconcile_load_events_signals(self):
|
||||
"""Load events signals"""
|
||||
self.import_module("authentik.events.signals")
|
||||
|
||||
def reconcile_check_deprecations(self):
|
||||
"""Check for config deprecations"""
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
for key_replace, msg in CONFIG.deprecations.items():
|
||||
key, replace = key_replace
|
||||
key_env = f"{ENV_PREFIX}_{key.replace('.', '__')}".upper()
|
||||
replace_env = f"{ENV_PREFIX}_{replace.replace('.', '__')}".upper()
|
||||
if Event.objects.filter(
|
||||
action=EventAction.CONFIGURATION_ERROR, context__deprecated_option=key
|
||||
).exists():
|
||||
continue
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
deprecated_option=key,
|
||||
deprecated_env=key_env,
|
||||
replacement_option=replace,
|
||||
replacement_env=replace_env,
|
||||
message=msg,
|
||||
).save()
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
"""ASN Enricher"""
|
||||
from typing import TYPE_CHECKING, Optional, TypedDict
|
||||
|
||||
from django.http import HttpRequest
|
||||
from geoip2.errors import GeoIP2Error
|
||||
from geoip2.models import ASN
|
||||
from sentry_sdk import Hub
|
||||
|
||||
from authentik.events.context_processors.mmdb import MMDBContextProcessor
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.api.v3.config import Capabilities
|
||||
from authentik.events.models import Event
|
||||
|
||||
|
||||
class ASNDict(TypedDict):
|
||||
"""ASN Details"""
|
||||
|
||||
asn: int
|
||||
as_org: str | None
|
||||
network: str | None
|
||||
|
||||
|
||||
class ASNContextProcessor(MMDBContextProcessor):
|
||||
"""ASN Database reader wrapper"""
|
||||
|
||||
def capability(self) -> Optional["Capabilities"]:
|
||||
from authentik.api.v3.config import Capabilities
|
||||
|
||||
return Capabilities.CAN_ASN
|
||||
|
||||
def path(self) -> str | None:
|
||||
return CONFIG.get("events.context_processors.asn")
|
||||
|
||||
def enrich_event(self, event: "Event"):
|
||||
asn = self.asn_dict(event.client_ip)
|
||||
if not asn:
|
||||
return
|
||||
event.context["asn"] = asn
|
||||
|
||||
def enrich_context(self, request: HttpRequest) -> dict:
|
||||
return {
|
||||
"asn": self.asn_dict(ClientIPMiddleware.get_client_ip(request)),
|
||||
}
|
||||
|
||||
def asn(self, ip_address: str) -> Optional[ASN]:
|
||||
"""Wrapper for Reader.asn"""
|
||||
with Hub.current.start_span(
|
||||
op="authentik.events.asn.asn",
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.configured():
|
||||
return None
|
||||
self.check_expired()
|
||||
try:
|
||||
return self.reader.asn(ip_address)
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def asn_to_dict(self, asn: ASN | None) -> ASNDict:
|
||||
"""Convert ASN to dict"""
|
||||
if not asn:
|
||||
return {}
|
||||
asn_dict: ASNDict = {
|
||||
"asn": asn.autonomous_system_number,
|
||||
"as_org": asn.autonomous_system_organization,
|
||||
"network": str(asn.network) if asn.network else None,
|
||||
}
|
||||
return asn_dict
|
||||
|
||||
def asn_dict(self, ip_address: str) -> Optional[ASNDict]:
|
||||
"""Wrapper for self.asn that returns a dict"""
|
||||
asn = self.asn(ip_address)
|
||||
if not asn:
|
||||
return None
|
||||
return self.asn_to_dict(asn)
|
||||
|
||||
|
||||
ASN_CONTEXT_PROCESSOR = ASNContextProcessor()
|
|
@ -1,43 +0,0 @@
|
|||
"""Base event enricher"""
|
||||
from functools import cache
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from django.http import HttpRequest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.api.v3.config import Capabilities
|
||||
from authentik.events.models import Event
|
||||
|
||||
|
||||
class EventContextProcessor:
|
||||
"""Base event enricher"""
|
||||
|
||||
def capability(self) -> Optional["Capabilities"]:
|
||||
"""Return the capability this context processor provides"""
|
||||
return None
|
||||
|
||||
def configured(self) -> bool:
|
||||
"""Return true if this context processor is configured"""
|
||||
return False
|
||||
|
||||
def enrich_event(self, event: "Event"):
|
||||
"""Modify event"""
|
||||
raise NotImplementedError
|
||||
|
||||
def enrich_context(self, request: HttpRequest) -> dict:
|
||||
"""Modify context"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@cache
|
||||
def get_context_processors() -> list[EventContextProcessor]:
|
||||
"""Get a list of all configured context processors"""
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
|
||||
|
||||
processors_types = [ASN_CONTEXT_PROCESSOR, GEOIP_CONTEXT_PROCESSOR]
|
||||
processors = []
|
||||
for _type in processors_types:
|
||||
if _type.configured():
|
||||
processors.append(_type)
|
||||
return processors
|
|
@ -1,86 +0,0 @@
|
|||
"""events GeoIP Reader"""
|
||||
from typing import TYPE_CHECKING, Optional, TypedDict
|
||||
|
||||
from django.http import HttpRequest
|
||||
from geoip2.errors import GeoIP2Error
|
||||
from geoip2.models import City
|
||||
from sentry_sdk.hub import Hub
|
||||
|
||||
from authentik.events.context_processors.mmdb import MMDBContextProcessor
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.api.v3.config import Capabilities
|
||||
from authentik.events.models import Event
|
||||
|
||||
|
||||
class GeoIPDict(TypedDict):
|
||||
"""GeoIP Details"""
|
||||
|
||||
continent: str
|
||||
country: str
|
||||
lat: float
|
||||
long: float
|
||||
city: str
|
||||
|
||||
|
||||
class GeoIPContextProcessor(MMDBContextProcessor):
|
||||
"""Slim wrapper around GeoIP API"""
|
||||
|
||||
def capability(self) -> Optional["Capabilities"]:
|
||||
from authentik.api.v3.config import Capabilities
|
||||
|
||||
return Capabilities.CAN_GEO_IP
|
||||
|
||||
def path(self) -> str | None:
|
||||
return CONFIG.get("events.context_processors.geoip")
|
||||
|
||||
def enrich_event(self, event: "Event"):
|
||||
city = self.city_dict(event.client_ip)
|
||||
if not city:
|
||||
return
|
||||
event.context["geo"] = city
|
||||
|
||||
def enrich_context(self, request: HttpRequest) -> dict:
|
||||
# Different key `geoip` vs `geo` for legacy reasons
|
||||
return {"geoip": self.city(ClientIPMiddleware.get_client_ip(request))}
|
||||
|
||||
def city(self, ip_address: str) -> Optional[City]:
|
||||
"""Wrapper for Reader.city"""
|
||||
with Hub.current.start_span(
|
||||
op="authentik.events.geo.city",
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.configured():
|
||||
return None
|
||||
self.check_expired()
|
||||
try:
|
||||
return self.reader.city(ip_address)
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def city_to_dict(self, city: City | None) -> GeoIPDict:
|
||||
"""Convert City to dict"""
|
||||
if not city:
|
||||
return {}
|
||||
city_dict: GeoIPDict = {
|
||||
"continent": city.continent.code,
|
||||
"country": city.country.iso_code,
|
||||
"lat": city.location.latitude,
|
||||
"long": city.location.longitude,
|
||||
"city": "",
|
||||
}
|
||||
if city.city.name:
|
||||
city_dict["city"] = city.city.name
|
||||
return city_dict
|
||||
|
||||
def city_dict(self, ip_address: str) -> Optional[GeoIPDict]:
|
||||
"""Wrapper for self.city that returns a dict"""
|
||||
city = self.city(ip_address)
|
||||
if not city:
|
||||
return None
|
||||
return self.city_to_dict(city)
|
||||
|
||||
|
||||
GEOIP_CONTEXT_PROCESSOR = GeoIPContextProcessor()
|
|
@ -1,53 +0,0 @@
|
|||
"""Common logic for reading MMDB files"""
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from geoip2.database import Reader
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.context_processors.base import EventContextProcessor
|
||||
|
||||
|
||||
class MMDBContextProcessor(EventContextProcessor):
|
||||
"""Common logic for reading MaxMind DB files, including re-loading if the file has changed"""
|
||||
|
||||
def __init__(self):
|
||||
self.reader: Optional[Reader] = None
|
||||
self._last_mtime: float = 0.0
|
||||
self.logger = get_logger()
|
||||
self.open()
|
||||
|
||||
def path(self) -> str | None:
|
||||
"""Get the path to the MMDB file to load"""
|
||||
raise NotImplementedError
|
||||
|
||||
def open(self):
|
||||
"""Get GeoIP Reader, if configured, otherwise none"""
|
||||
path = self.path()
|
||||
if path == "" or not path:
|
||||
return
|
||||
try:
|
||||
self.reader = Reader(path)
|
||||
self._last_mtime = Path(path).stat().st_mtime
|
||||
self.logger.info("Loaded MMDB database", last_write=self._last_mtime, file=path)
|
||||
except OSError as exc:
|
||||
self.logger.warning("Failed to load MMDB database", path=path, exc=exc)
|
||||
|
||||
def check_expired(self):
|
||||
"""Check if the modification date of the MMDB database has
|
||||
changed, and reload it if so"""
|
||||
path = self.path()
|
||||
if path == "" or not path:
|
||||
return
|
||||
try:
|
||||
mtime = Path(path).stat().st_mtime
|
||||
diff = self._last_mtime < mtime
|
||||
if diff > 0:
|
||||
self.logger.info("Found new MMDB Database, reopening", diff=diff, path=path)
|
||||
self.open()
|
||||
except OSError as exc:
|
||||
self.logger.warning("Failed to check MMDB age", exc=exc)
|
||||
|
||||
def configured(self) -> bool:
|
||||
"""Return true if this context processor is configured"""
|
||||
return bool(self.reader)
|
100
authentik/events/geo.py
Normal file
100
authentik/events/geo.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
"""events GeoIP Reader"""
|
||||
from os import stat
|
||||
from typing import Optional, TypedDict
|
||||
|
||||
from geoip2.database import Reader
|
||||
from geoip2.errors import GeoIP2Error
|
||||
from geoip2.models import City
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class GeoIPDict(TypedDict):
|
||||
"""GeoIP Details"""
|
||||
|
||||
continent: str
|
||||
country: str
|
||||
lat: float
|
||||
long: float
|
||||
city: str
|
||||
|
||||
|
||||
class GeoIPReader:
|
||||
"""Slim wrapper around GeoIP API"""
|
||||
|
||||
def __init__(self):
|
||||
self.__reader: Optional[Reader] = None
|
||||
self.__last_mtime: float = 0.0
|
||||
self.__open()
|
||||
|
||||
def __open(self):
|
||||
"""Get GeoIP Reader, if configured, otherwise none"""
|
||||
path = CONFIG.get("geoip")
|
||||
if path == "" or not path:
|
||||
return
|
||||
try:
|
||||
self.__reader = Reader(path)
|
||||
self.__last_mtime = stat(path).st_mtime
|
||||
LOGGER.info("Loaded GeoIP database", last_write=self.__last_mtime)
|
||||
except OSError as exc:
|
||||
LOGGER.warning("Failed to load GeoIP database", exc=exc)
|
||||
|
||||
def __check_expired(self):
|
||||
"""Check if the modification date of the GeoIP database has
|
||||
changed, and reload it if so"""
|
||||
path = CONFIG.get("geoip")
|
||||
try:
|
||||
mtime = stat(path).st_mtime
|
||||
diff = self.__last_mtime < mtime
|
||||
if diff > 0:
|
||||
LOGGER.info("Found new GeoIP Database, reopening", diff=diff)
|
||||
self.__open()
|
||||
except OSError as exc:
|
||||
LOGGER.warning("Failed to check GeoIP age", exc=exc)
|
||||
return
|
||||
|
||||
@property
|
||||
def enabled(self) -> bool:
|
||||
"""Check if GeoIP is enabled"""
|
||||
return bool(self.__reader)
|
||||
|
||||
def city(self, ip_address: str) -> Optional[City]:
|
||||
"""Wrapper for Reader.city"""
|
||||
with Hub.current.start_span(
|
||||
op="authentik.events.geo.city",
|
||||
description=ip_address,
|
||||
):
|
||||
if not self.enabled:
|
||||
return None
|
||||
self.__check_expired()
|
||||
try:
|
||||
return self.__reader.city(ip_address)
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def city_to_dict(self, city: City) -> GeoIPDict:
|
||||
"""Convert City to dict"""
|
||||
city_dict: GeoIPDict = {
|
||||
"continent": city.continent.code,
|
||||
"country": city.country.iso_code,
|
||||
"lat": city.location.latitude,
|
||||
"long": city.location.longitude,
|
||||
"city": "",
|
||||
}
|
||||
if city.city.name:
|
||||
city_dict["city"] = city.city.name
|
||||
return city_dict
|
||||
|
||||
def city_dict(self, ip_address: str) -> Optional[GeoIPDict]:
|
||||
"""Wrapper for self.city that returns a dict"""
|
||||
city = self.city(ip_address)
|
||||
if not city:
|
||||
return None
|
||||
return self.city_to_dict(city)
|
||||
|
||||
|
||||
GEOIP_READER = GeoIPReader()
|
|
@ -26,7 +26,7 @@ from authentik.core.middleware import (
|
|||
SESSION_KEY_IMPERSONATE_USER,
|
||||
)
|
||||
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
|
||||
from authentik.events.context_processors.base import get_context_processors
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.utils import (
|
||||
cleanse_dict,
|
||||
get_user,
|
||||
|
@ -36,10 +36,9 @@ from authentik.events.utils import (
|
|||
)
|
||||
from authentik.lib.models import DomainlessURLValidator, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.http import get_client_ip, get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
from authentik.tenants.models import Tenant
|
||||
from authentik.tenants.utils import DEFAULT_TENANT
|
||||
|
@ -245,16 +244,22 @@ class Event(SerializerModel, ExpiringModel):
|
|||
self.user = get_user(request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER])
|
||||
self.user["on_behalf_of"] = get_user(request.session[SESSION_KEY_IMPERSONATE_USER])
|
||||
# User 255.255.255.255 as fallback if IP cannot be determined
|
||||
self.client_ip = ClientIPMiddleware.get_client_ip(request)
|
||||
# Enrich event data
|
||||
for processor in get_context_processors():
|
||||
processor.enrich_event(self)
|
||||
self.client_ip = get_client_ip(request)
|
||||
# Apply GeoIP Data, when enabled
|
||||
self.with_geoip()
|
||||
# If there's no app set, we get it from the requests too
|
||||
if not self.app:
|
||||
self.app = Event._get_app_from_request(request)
|
||||
self.save()
|
||||
return self
|
||||
|
||||
def with_geoip(self): # pragma: no cover
|
||||
"""Apply GeoIP Data, when enabled"""
|
||||
city = GEOIP_READER.city_dict(self.client_ip)
|
||||
if not city:
|
||||
return
|
||||
self.context["geo"] = city
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
LOGGER.info(
|
||||
|
@ -461,7 +466,7 @@ class NotificationTransport(SerializerModel):
|
|||
}
|
||||
mail = TemplateEmailMessage(
|
||||
subject=subject_prefix + context["title"],
|
||||
to=[f"{notification.user.name} <{notification.user.email}>"],
|
||||
to=[notification.user.email],
|
||||
language=notification.user.locale(),
|
||||
template_name="email/event_notification.html",
|
||||
template_context=context,
|
||||
|
|
|
@ -45,14 +45,9 @@ def get_login_event(request: HttpRequest) -> Optional[Event]:
|
|||
|
||||
|
||||
@receiver(user_logged_out)
|
||||
def on_user_logged_out(sender, request: HttpRequest, user: User, **kwargs):
|
||||
def on_user_logged_out(sender, request: HttpRequest, user: User, **_):
|
||||
"""Log successfully logout"""
|
||||
# Check if this even comes from the user_login stage's middleware, which will set an extra
|
||||
# argument
|
||||
event = Event.new(EventAction.LOGOUT)
|
||||
if "event_extra" in kwargs:
|
||||
event.context.update(kwargs["event_extra"])
|
||||
event.from_http(request, user=user)
|
||||
Event.new(EventAction.LOGOUT).from_http(request, user=user)
|
||||
|
||||
|
||||
@receiver(user_write)
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
"""Test ASN Wrapper"""
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.events.context_processors.asn import ASNContextProcessor
|
||||
|
||||
|
||||
class TestASN(TestCase):
|
||||
"""Test ASN Wrapper"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.reader = ASNContextProcessor()
|
||||
|
||||
def test_simple(self):
|
||||
"""Test simple asn wrapper"""
|
||||
# IPs from
|
||||
# https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-ASN-Test.json
|
||||
self.assertEqual(
|
||||
self.reader.asn_dict("1.0.0.1"),
|
||||
{
|
||||
"asn": 15169,
|
||||
"as_org": "Google Inc.",
|
||||
"network": "1.0.0.0/24",
|
||||
},
|
||||
)
|
|
@ -1,14 +1,14 @@
|
|||
"""Test GeoIP Wrapper"""
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.events.context_processors.geoip import GeoIPContextProcessor
|
||||
from authentik.events.geo import GeoIPReader
|
||||
|
||||
|
||||
class TestGeoIP(TestCase):
|
||||
"""Test GeoIP Wrapper"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.reader = GeoIPContextProcessor()
|
||||
self.reader = GeoIPReader()
|
||||
|
||||
def test_simple(self):
|
||||
"""Test simple city wrapper"""
|
|
@ -17,13 +17,12 @@ from django.db.models.base import Model
|
|||
from django.http.request import HttpRequest
|
||||
from django.utils import timezone
|
||||
from django.views.debug import SafeExceptionReporterFilter
|
||||
from geoip2.models import ASN, City
|
||||
from geoip2.models import City
|
||||
from guardian.utils import get_anonymous_user
|
||||
|
||||
from authentik.blueprints.v1.common import YAMLTag
|
||||
from authentik.core.models import User
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
# Special keys which are *not* cleaned, even when the default filter
|
||||
|
@ -124,9 +123,7 @@ def sanitize_item(value: Any) -> Any:
|
|||
if isinstance(value, (HttpRequest, WSGIRequest)):
|
||||
return ...
|
||||
if isinstance(value, City):
|
||||
return GEOIP_CONTEXT_PROCESSOR.city_to_dict(value)
|
||||
if isinstance(value, ASN):
|
||||
return ASN_CONTEXT_PROCESSOR.asn_to_dict(value)
|
||||
return GEOIP_READER.city_to_dict(value)
|
||||
if isinstance(value, Path):
|
||||
return str(value)
|
||||
if isinstance(value, Exception):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Generated by Django 4.2.6 on 2023-10-28 14:24
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db import migrations, models
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
|
@ -31,19 +31,4 @@ class Migration(migrations.Migration):
|
|||
|
||||
operations = [
|
||||
migrations.RunPython(set_oobe_flow_authentication),
|
||||
migrations.AlterField(
|
||||
model_name="flow",
|
||||
name="authentication",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("none", "None"),
|
||||
("require_authenticated", "Require Authenticated"),
|
||||
("require_unauthenticated", "Require Unauthenticated"),
|
||||
("require_superuser", "Require Superuser"),
|
||||
("require_outpost", "Require Outpost"),
|
||||
],
|
||||
default="none",
|
||||
help_text="Required level of authentication and authorization to access a flow.",
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -31,7 +31,6 @@ class FlowAuthenticationRequirement(models.TextChoices):
|
|||
REQUIRE_AUTHENTICATED = "require_authenticated"
|
||||
REQUIRE_UNAUTHENTICATED = "require_unauthenticated"
|
||||
REQUIRE_SUPERUSER = "require_superuser"
|
||||
REQUIRE_OUTPOST = "require_outpost"
|
||||
|
||||
|
||||
class NotConfiguredAction(models.TextChoices):
|
||||
|
|
|
@ -23,7 +23,6 @@ from authentik.flows.models import (
|
|||
)
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
LOGGER = get_logger()
|
||||
PLAN_CONTEXT_PENDING_USER = "pending_user"
|
||||
|
@ -34,7 +33,7 @@ PLAN_CONTEXT_SOURCE = "source"
|
|||
# Is set by the Flow Planner when a FlowToken was used, and the currently active flow plan
|
||||
# was restored.
|
||||
PLAN_CONTEXT_IS_RESTORED = "is_restored"
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_flows")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_flows")
|
||||
CACHE_PREFIX = "goauthentik.io/flows/planner/"
|
||||
|
||||
|
||||
|
@ -142,10 +141,6 @@ class FlowPlanner:
|
|||
and not request.user.is_superuser
|
||||
):
|
||||
raise FlowNonApplicableException()
|
||||
if self.flow.authentication == FlowAuthenticationRequirement.REQUIRE_OUTPOST:
|
||||
outpost_user = ClientIPMiddleware.get_outpost_user(request)
|
||||
if not outpost_user:
|
||||
raise FlowNonApplicableException()
|
||||
|
||||
def plan(
|
||||
self, request: HttpRequest, default_context: Optional[dict[str, Any]] = None
|
||||
|
|
|
@ -472,7 +472,6 @@ class TestFlowExecutor(FlowTestCase):
|
|||
ident_stage = IdentificationStage.objects.create(
|
||||
name="ident",
|
||||
user_fields=[UserFields.E_MAIL],
|
||||
pretend_user_exists=False,
|
||||
)
|
||||
FlowStageBinding.objects.create(
|
||||
target=flow,
|
||||
|
|
|
@ -8,7 +8,6 @@ from django.test import RequestFactory, TestCase
|
|||
from django.urls import reverse
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
|
||||
|
@ -16,12 +15,9 @@ from authentik.flows.markers import ReevaluateMarker, StageMarker
|
|||
from authentik.flows.models import FlowAuthenticationRequirement, FlowDesignation, FlowStageBinding
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner, cache_key
|
||||
from authentik.lib.tests.utils import dummy_get_response
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.policies.dummy.models import DummyPolicy
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.dummy.models import DummyStage
|
||||
|
||||
POLICY_RETURN_FALSE = PropertyMock(return_value=PolicyResult(False))
|
||||
|
@ -72,34 +68,6 @@ class TestFlowPlanner(TestCase):
|
|||
planner.allow_empty_flows = True
|
||||
planner.plan(request)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_authentication_outpost(self):
|
||||
"""Test flow authentication (outpost)"""
|
||||
flow = create_test_flow()
|
||||
flow.authentication = FlowAuthenticationRequirement.REQUIRE_OUTPOST
|
||||
request = self.request_factory.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
request.user = AnonymousUser()
|
||||
with self.assertRaises(FlowNonApplicableException):
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
planner.plan(request)
|
||||
|
||||
outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
request = self.request_factory.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
HTTP_X_AUTHENTIK_OUTPOST_TOKEN=outpost.token.key,
|
||||
HTTP_X_AUTHENTIK_REMOTE_IP="1.2.3.4",
|
||||
)
|
||||
request.user = AnonymousUser()
|
||||
middleware = ClientIPMiddleware(dummy_get_response)
|
||||
middleware(request)
|
||||
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
planner.plan(request)
|
||||
|
||||
@patch(
|
||||
"authentik.policies.engine.PolicyEngine.result",
|
||||
POLICY_RETURN_FALSE,
|
||||
|
|
|
@ -154,15 +154,7 @@ def generate_avatar_from_name(
|
|||
|
||||
def avatar_mode_generated(user: "User", mode: str) -> Optional[str]:
|
||||
"""Wrapper that converts generated avatar to base64 svg"""
|
||||
# By default generate based off of user's display name
|
||||
name = user.name.strip()
|
||||
if name == "":
|
||||
# Fallback to username
|
||||
name = user.username.strip()
|
||||
# If we still don't have anything, fallback to `a k`
|
||||
if name == "":
|
||||
name = "a k"
|
||||
svg = generate_avatar_from_name(name)
|
||||
svg = generate_avatar_from_name(user.name if user.name.strip() != "" else "a k")
|
||||
return f"data:image/svg+xml;base64,{b64encode(svg.encode('utf-8')).decode('utf-8')}"
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
"""authentik core config loader"""
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
from collections.abc import Mapping
|
||||
from contextlib import contextmanager
|
||||
|
@ -24,26 +22,6 @@ SEARCH_PATHS = ["authentik/lib/default.yml", "/etc/authentik/config.yml", ""] +
|
|||
ENV_PREFIX = "AUTHENTIK"
|
||||
ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
|
||||
|
||||
REDIS_ENV_KEYS = [
|
||||
f"{ENV_PREFIX}_REDIS__HOST",
|
||||
f"{ENV_PREFIX}_REDIS__PORT",
|
||||
f"{ENV_PREFIX}_REDIS__DB",
|
||||
f"{ENV_PREFIX}_REDIS__USERNAME",
|
||||
f"{ENV_PREFIX}_REDIS__PASSWORD",
|
||||
f"{ENV_PREFIX}_REDIS__TLS",
|
||||
f"{ENV_PREFIX}_REDIS__TLS_REQS",
|
||||
]
|
||||
|
||||
DEPRECATIONS = {
|
||||
"geoip": "events.context_processors.geoip",
|
||||
"redis.broker_url": "broker.url",
|
||||
"redis.broker_transport_options": "broker.transport_options",
|
||||
"redis.cache_timeout": "cache.timeout",
|
||||
"redis.cache_timeout_flows": "cache.timeout_flows",
|
||||
"redis.cache_timeout_policies": "cache.timeout_policies",
|
||||
"redis.cache_timeout_reputation": "cache.timeout_reputation",
|
||||
}
|
||||
|
||||
|
||||
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
||||
"""Recursively walk through `root`, checking each part of `path` separated by `sep`.
|
||||
|
@ -103,18 +81,12 @@ class AttrEncoder(JSONEncoder):
|
|||
return super().default(o)
|
||||
|
||||
|
||||
class UNSET:
|
||||
"""Used to test whether configuration key has not been set."""
|
||||
|
||||
|
||||
class ConfigLoader:
|
||||
"""Search through SEARCH_PATHS and load configuration. Environment variables starting with
|
||||
`ENV_PREFIX` are also applied.
|
||||
|
||||
A variable like AUTHENTIK_POSTGRESQL__HOST would translate to postgresql.host"""
|
||||
|
||||
deprecations: dict[tuple[str, str], str] = {}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__()
|
||||
self.__config = {}
|
||||
|
@ -141,38 +113,6 @@ class ConfigLoader:
|
|||
self.update_from_file(env_file)
|
||||
self.update_from_env()
|
||||
self.update(self.__config, kwargs)
|
||||
self.deprecations = self.check_deprecations()
|
||||
|
||||
def check_deprecations(self) -> dict[str, str]:
|
||||
"""Warn if any deprecated configuration options are used"""
|
||||
|
||||
def _pop_deprecated_key(current_obj, dot_parts, index):
|
||||
"""Recursive function to remove deprecated keys in configuration"""
|
||||
dot_part = dot_parts[index]
|
||||
if index == len(dot_parts) - 1:
|
||||
return current_obj.pop(dot_part)
|
||||
value = _pop_deprecated_key(current_obj[dot_part], dot_parts, index + 1)
|
||||
if not current_obj[dot_part]:
|
||||
current_obj.pop(dot_part)
|
||||
return value
|
||||
|
||||
deprecation_replacements = {}
|
||||
for deprecation, replacement in DEPRECATIONS.items():
|
||||
if self.get(deprecation, default=UNSET) is UNSET:
|
||||
continue
|
||||
message = (
|
||||
f"'{deprecation}' has been deprecated in favor of '{replacement}'! "
|
||||
+ "Please update your configuration."
|
||||
)
|
||||
self.log(
|
||||
"warning",
|
||||
message,
|
||||
)
|
||||
deprecation_replacements[(deprecation, replacement)] = message
|
||||
|
||||
deprecated_attr = _pop_deprecated_key(self.__config, deprecation.split("."), 0)
|
||||
self.set(replacement, deprecated_attr)
|
||||
return deprecation_replacements
|
||||
|
||||
def log(self, level: str, message: str, **kwargs):
|
||||
"""Custom Log method, we want to ensure ConfigLoader always logs JSON even when
|
||||
|
@ -240,10 +180,6 @@ class ConfigLoader:
|
|||
error=str(exc),
|
||||
)
|
||||
|
||||
def update_from_dict(self, update: dict):
|
||||
"""Update config from dict"""
|
||||
self.__config.update(update)
|
||||
|
||||
def update_from_env(self):
|
||||
"""Check environment variables"""
|
||||
outer = {}
|
||||
|
@ -252,13 +188,19 @@ class ConfigLoader:
|
|||
if not key.startswith(ENV_PREFIX):
|
||||
continue
|
||||
relative_key = key.replace(f"{ENV_PREFIX}_", "", 1).replace("__", ".").lower()
|
||||
# Recursively convert path from a.b.c into outer[a][b][c]
|
||||
current_obj = outer
|
||||
dot_parts = relative_key.split(".")
|
||||
for dot_part in dot_parts[:-1]:
|
||||
if dot_part not in current_obj:
|
||||
current_obj[dot_part] = {}
|
||||
current_obj = current_obj[dot_part]
|
||||
# Check if the value is json, and try to load it
|
||||
try:
|
||||
value = loads(value)
|
||||
except JSONDecodeError:
|
||||
pass
|
||||
attr_value = Attr(value, Attr.Source.ENV, relative_key)
|
||||
set_path_in_dict(outer, relative_key, attr_value)
|
||||
current_obj[dot_parts[-1]] = Attr(value, Attr.Source.ENV, key)
|
||||
idx += 1
|
||||
if idx > 0:
|
||||
self.log("debug", "Loaded environment variables", count=idx)
|
||||
|
@ -299,28 +241,9 @@ class ConfigLoader:
|
|||
"""Wrapper for get that converts value into boolean"""
|
||||
return str(self.get(path, default)).lower() == "true"
|
||||
|
||||
def get_dict_from_b64_json(self, path: str, default=None) -> dict:
|
||||
"""Wrapper for get that converts value from Base64 encoded string into dictionary"""
|
||||
config_value = self.get(path)
|
||||
if config_value is None:
|
||||
return {}
|
||||
try:
|
||||
b64decoded_str = base64.b64decode(config_value).decode("utf-8")
|
||||
b64decoded_str = b64decoded_str.strip().lstrip("{").rstrip("}")
|
||||
b64decoded_str = "{" + b64decoded_str + "}"
|
||||
return json.loads(b64decoded_str)
|
||||
except (JSONDecodeError, TypeError, ValueError) as exc:
|
||||
self.log(
|
||||
"warning",
|
||||
f"Ignored invalid configuration for '{path}' due to exception: {str(exc)}",
|
||||
)
|
||||
return default if isinstance(default, dict) else {}
|
||||
|
||||
def set(self, path: str, value: Any, sep="."):
|
||||
"""Set value using same syntax as get()"""
|
||||
if not isinstance(value, Attr):
|
||||
value = Attr(value)
|
||||
set_path_in_dict(self.raw, path, value, sep=sep)
|
||||
set_path_in_dict(self.raw, path, Attr(value), sep=sep)
|
||||
|
||||
|
||||
CONFIG = ConfigLoader()
|
||||
|
|
|
@ -8,8 +8,6 @@ postgresql:
|
|||
password: "env://POSTGRES_PASSWORD"
|
||||
use_pgbouncer: false
|
||||
use_pgpool: false
|
||||
test:
|
||||
name: test_authentik
|
||||
|
||||
listen:
|
||||
listen_http: 0.0.0.0:9000
|
||||
|
@ -30,34 +28,17 @@ listen:
|
|||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
db: 0
|
||||
username: ""
|
||||
password: ""
|
||||
tls: false
|
||||
tls_reqs: "none"
|
||||
|
||||
# broker:
|
||||
# url: ""
|
||||
# transport_options: ""
|
||||
|
||||
cache:
|
||||
# url: ""
|
||||
timeout: 300
|
||||
timeout_flows: 300
|
||||
timeout_policies: 300
|
||||
timeout_reputation: 300
|
||||
|
||||
# channel:
|
||||
# url: ""
|
||||
|
||||
# result_backend:
|
||||
# url: ""
|
||||
db: 0
|
||||
cache_timeout: 300
|
||||
cache_timeout_flows: 300
|
||||
cache_timeout_policies: 300
|
||||
cache_timeout_reputation: 300
|
||||
|
||||
paths:
|
||||
media: /data/media
|
||||
cert_discovery: /data/certs
|
||||
email_templates: /data/email-templates
|
||||
blueprints: /ak-root/blueprints
|
||||
media: ./media
|
||||
|
||||
debug: false
|
||||
remote_debug: false
|
||||
|
@ -81,6 +62,7 @@ email:
|
|||
use_ssl: false
|
||||
timeout: 10
|
||||
from: authentik@localhost
|
||||
template_dir: /templates
|
||||
|
||||
throttle:
|
||||
providers:
|
||||
|
@ -110,10 +92,7 @@ cookie_domain: null
|
|||
disable_update_check: false
|
||||
disable_startup_analytics: false
|
||||
avatars: env://AUTHENTIK_AUTHENTIK__AVATARS?gravatar,initials
|
||||
events:
|
||||
context_processors:
|
||||
geoip: "/data/geoip/GeoLite2-City.mmdb"
|
||||
asn: "/data/geoip/GeoLite2-ASN.mmdb"
|
||||
geoip: "/geoip/GeoLite2-City.mmdb"
|
||||
|
||||
footer_links: []
|
||||
|
||||
|
@ -122,9 +101,12 @@ default_user_change_email: false
|
|||
default_user_change_username: false
|
||||
|
||||
gdpr_compliance: true
|
||||
cert_discovery_dir: /certs
|
||||
default_token_length: 60
|
||||
impersonation: true
|
||||
|
||||
blueprints_dir: /blueprints
|
||||
|
||||
web:
|
||||
# No default here as it's set dynamically
|
||||
# workers: 2
|
||||
|
|
|
@ -1,32 +1,20 @@
|
|||
"""Test config loader"""
|
||||
import base64
|
||||
from json import dumps
|
||||
from os import chmod, environ, unlink, write
|
||||
from tempfile import mkstemp
|
||||
from unittest import mock
|
||||
|
||||
from django.conf import ImproperlyConfigured
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.lib.config import ENV_PREFIX, UNSET, Attr, AttrEncoder, ConfigLoader
|
||||
from authentik.lib.config import ENV_PREFIX, ConfigLoader
|
||||
|
||||
|
||||
class TestConfig(TestCase):
|
||||
"""Test config loader"""
|
||||
|
||||
check_deprecations_env_vars = {
|
||||
ENV_PREFIX + "_REDIS__BROKER_URL": "redis://myredis:8327/43",
|
||||
ENV_PREFIX + "_REDIS__BROKER_TRANSPORT_OPTIONS": "bWFzdGVybmFtZT1teW1hc3Rlcg==",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT": "124s",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_FLOWS": "32m",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_POLICIES": "3920ns",
|
||||
ENV_PREFIX + "_REDIS__CACHE_TIMEOUT_REPUTATION": "298382us",
|
||||
}
|
||||
|
||||
@mock.patch.dict(environ, {ENV_PREFIX + "_test__test": "bar"})
|
||||
def test_env(self):
|
||||
"""Test simple instance"""
|
||||
config = ConfigLoader()
|
||||
environ[ENV_PREFIX + "_test__test"] = "bar"
|
||||
config.update_from_env()
|
||||
self.assertEqual(config.get("test.test"), "bar")
|
||||
|
||||
|
@ -39,20 +27,12 @@ class TestConfig(TestCase):
|
|||
self.assertEqual(config.get("foo.bar"), "baz")
|
||||
self.assertEqual(config.get("foo.bar"), "bar")
|
||||
|
||||
@mock.patch.dict(environ, {"foo": "bar"})
|
||||
def test_uri_env(self):
|
||||
"""Test URI parsing (environment)"""
|
||||
config = ConfigLoader()
|
||||
foo_uri = "env://foo"
|
||||
foo_parsed = config.parse_uri(foo_uri)
|
||||
self.assertEqual(foo_parsed.value, "bar")
|
||||
self.assertEqual(foo_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_parsed.source, foo_uri)
|
||||
foo_bar_uri = "env://foo?bar"
|
||||
foo_bar_parsed = config.parse_uri(foo_bar_uri)
|
||||
self.assertEqual(foo_bar_parsed.value, "bar")
|
||||
self.assertEqual(foo_bar_parsed.source_type, Attr.Source.URI)
|
||||
self.assertEqual(foo_bar_parsed.source, foo_bar_uri)
|
||||
environ["foo"] = "bar"
|
||||
self.assertEqual(config.parse_uri("env://foo").value, "bar")
|
||||
self.assertEqual(config.parse_uri("env://foo?bar").value, "bar")
|
||||
|
||||
def test_uri_file(self):
|
||||
"""Test URI parsing (file load)"""
|
||||
|
@ -111,60 +91,3 @@ class TestConfig(TestCase):
|
|||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_int("foo", 1234), 1234)
|
||||
|
||||
def test_get_dict_from_b64_json(self):
|
||||
"""Test get_dict_from_b64_json"""
|
||||
config = ConfigLoader()
|
||||
test_value = ' { "foo": "bar" } '.encode("utf-8")
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_missing_brackets(self):
|
||||
"""Test get_dict_from_b64_json with missing brackets"""
|
||||
config = ConfigLoader()
|
||||
test_value = ' "foo": "bar" '.encode("utf-8")
|
||||
b64_value = base64.b64encode(test_value)
|
||||
config.set("foo", b64_value)
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {"foo": "bar"})
|
||||
|
||||
def test_get_dict_from_b64_json_invalid(self):
|
||||
"""Test get_dict_from_b64_json with invalid value"""
|
||||
config = ConfigLoader()
|
||||
config.set("foo", "bar")
|
||||
self.assertEqual(config.get_dict_from_b64_json("foo"), {})
|
||||
|
||||
def test_attr_json_encoder(self):
|
||||
"""Test AttrEncoder"""
|
||||
test_attr = Attr("foo", Attr.Source.ENV, "AUTHENTIK_REDIS__USERNAME")
|
||||
json_attr = dumps(test_attr, indent=4, cls=AttrEncoder)
|
||||
self.assertEqual(json_attr, '"foo"')
|
||||
|
||||
def test_attr_json_encoder_no_attr(self):
|
||||
"""Test AttrEncoder if no Attr is passed"""
|
||||
|
||||
class Test:
|
||||
"""Non Attr class"""
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
test_obj = Test()
|
||||
dumps(test_obj, indent=4, cls=AttrEncoder)
|
||||
|
||||
@mock.patch.dict(environ, check_deprecations_env_vars)
|
||||
def test_check_deprecations(self):
|
||||
"""Test config key re-write for deprecated env vars"""
|
||||
config = ConfigLoader()
|
||||
config.update_from_env()
|
||||
config.check_deprecations()
|
||||
self.assertEqual(config.get("redis.broker_url", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.broker_transport_options", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_flows", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_policies", UNSET), UNSET)
|
||||
self.assertEqual(config.get("redis.cache_timeout_reputation", UNSET), UNSET)
|
||||
self.assertEqual(config.get("broker.url"), "redis://myredis:8327/43")
|
||||
self.assertEqual(config.get("broker.transport_options"), "bWFzdGVybmFtZT1teW1hc3Rlcg==")
|
||||
self.assertEqual(config.get("cache.timeout"), "124s")
|
||||
self.assertEqual(config.get("cache.timeout_flows"), "32m")
|
||||
self.assertEqual(config.get("cache.timeout_policies"), "3920ns")
|
||||
self.assertEqual(config.get("cache.timeout_reputation"), "298382us")
|
||||
|
|
|
@ -3,8 +3,8 @@ from django.test import RequestFactory, TestCase
|
|||
|
||||
from authentik.core.models import Token, TokenIntents, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.utils.http import OUTPOST_REMOTE_IP_HEADER, OUTPOST_TOKEN_HEADER, get_client_ip
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
|
||||
class TestHTTP(TestCase):
|
||||
|
@ -22,12 +22,12 @@ class TestHTTP(TestCase):
|
|||
def test_normal(self):
|
||||
"""Test normal request"""
|
||||
request = self.factory.get("/")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
|
||||
def test_forward_for(self):
|
||||
"""Test x-forwarded-for request"""
|
||||
request = self.factory.get("/", HTTP_X_FORWARDED_FOR="127.0.0.2")
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.2")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.2")
|
||||
|
||||
def test_fake_outpost(self):
|
||||
"""Test faked IP which is overridden by an outpost"""
|
||||
|
@ -38,28 +38,28 @@ class TestHTTP(TestCase):
|
|||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
ClientIPMiddleware.outpost_remote_ip_header: "1.2.3.4",
|
||||
ClientIPMiddleware.outpost_token_header: "abc",
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: "abc",
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
# Invalid, user doesn't have permissions
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
ClientIPMiddleware.outpost_remote_ip_header: "1.2.3.4",
|
||||
ClientIPMiddleware.outpost_token_header: token.key,
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "127.0.0.1")
|
||||
self.assertEqual(get_client_ip(request), "127.0.0.1")
|
||||
# Valid
|
||||
self.user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
self.user.save()
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
**{
|
||||
ClientIPMiddleware.outpost_remote_ip_header: "1.2.3.4",
|
||||
ClientIPMiddleware.outpost_token_header: token.key,
|
||||
OUTPOST_REMOTE_IP_HEADER: "1.2.3.4",
|
||||
OUTPOST_TOKEN_HEADER: token.key,
|
||||
},
|
||||
)
|
||||
self.assertEqual(ClientIPMiddleware.get_client_ip(request), "1.2.3.4")
|
||||
self.assertEqual(get_client_ip(request), "1.2.3.4")
|
||||
|
|
|
@ -1,39 +1,89 @@
|
|||
"""http helpers"""
|
||||
from uuid import uuid4
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from requests.sessions import PreparedRequest, Session
|
||||
from django.http import HttpRequest
|
||||
from requests.sessions import Session
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik import get_full_version
|
||||
|
||||
OUTPOST_REMOTE_IP_HEADER = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||
OUTPOST_TOKEN_HEADER = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
|
||||
DEFAULT_IP = "255.255.255.255"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def _get_client_ip_from_meta(meta: dict[str, Any]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
Returns none if no IP Could be found
|
||||
|
||||
No additional validation is done here as requests are expected to only arrive here
|
||||
via the go proxy, which deals with validating these headers for us"""
|
||||
headers = (
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"REMOTE_ADDR",
|
||||
)
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
return ips[0].strip()
|
||||
return DEFAULT_IP
|
||||
|
||||
|
||||
def _get_outpost_override_ip(request: HttpRequest) -> Optional[str]:
|
||||
"""Get the actual remote IP when set by an outpost. Only
|
||||
allowed when the request is authenticated, by an outpost internal service account"""
|
||||
from authentik.core.models import Token, TokenIntents, UserTypes
|
||||
|
||||
if OUTPOST_REMOTE_IP_HEADER not in request.META or OUTPOST_TOKEN_HEADER not in request.META:
|
||||
return None
|
||||
fake_ip = request.META[OUTPOST_REMOTE_IP_HEADER]
|
||||
token = (
|
||||
Token.filter_not_expired(
|
||||
key=request.META.get(OUTPOST_TOKEN_HEADER), intent=TokenIntents.INTENT_API
|
||||
)
|
||||
.select_related("user")
|
||||
.first()
|
||||
)
|
||||
if not token:
|
||||
LOGGER.warning("Attempted remote-ip override without token", fake_ip=fake_ip)
|
||||
return None
|
||||
user = token.user
|
||||
if user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||
LOGGER.warning(
|
||||
"Remote-IP override: user doesn't have permission",
|
||||
user=user,
|
||||
fake_ip=fake_ip,
|
||||
)
|
||||
return None
|
||||
# Update sentry scope to include correct IP
|
||||
user = Hub.current.scope._user
|
||||
if not user:
|
||||
user = {}
|
||||
user["ip_address"] = fake_ip
|
||||
Hub.current.scope.set_user(user)
|
||||
return fake_ip
|
||||
|
||||
|
||||
def get_client_ip(request: Optional[HttpRequest]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
Returns none if no IP Could be found"""
|
||||
if not request:
|
||||
return DEFAULT_IP
|
||||
override = _get_outpost_override_ip(request)
|
||||
if override:
|
||||
return override
|
||||
return _get_client_ip_from_meta(request.META)
|
||||
|
||||
|
||||
def authentik_user_agent() -> str:
|
||||
"""Get a common user agent"""
|
||||
return f"authentik@{get_full_version()}"
|
||||
|
||||
|
||||
class DebugSession(Session):
|
||||
"""requests session which logs http requests and responses"""
|
||||
|
||||
def send(self, req: PreparedRequest, *args, **kwargs):
|
||||
request_id = str(uuid4())
|
||||
LOGGER.debug("HTTP request sent", uid=request_id, path=req.path_url, headers=req.headers)
|
||||
resp = super().send(req, *args, **kwargs)
|
||||
LOGGER.debug(
|
||||
"HTTP response received",
|
||||
uid=request_id,
|
||||
status=resp.status_code,
|
||||
body=resp.text,
|
||||
headers=resp.headers,
|
||||
)
|
||||
return resp
|
||||
|
||||
|
||||
def get_http_session() -> Session:
|
||||
"""Get a requests session with common headers"""
|
||||
session = DebugSession() if settings.DEBUG else Session()
|
||||
session = Session()
|
||||
session.headers["User-Agent"] = authentik_user_agent()
|
||||
return session
|
||||
|
|
|
@ -9,16 +9,16 @@ from rest_framework.fields import BooleanField, CharField, DateTimeField
|
|||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer, ValidationError
|
||||
from rest_framework.serializers import JSONField, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik import get_build_hash
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Provider
|
||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
||||
from authentik.outposts.models import (
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
|
@ -34,7 +34,7 @@ from authentik.providers.radius.models import RadiusProvider
|
|||
class OutpostSerializer(ModelSerializer):
|
||||
"""Outpost Serializer"""
|
||||
|
||||
config = JSONDictField(source="_config")
|
||||
config = JSONField(validators=[is_dict], source="_config")
|
||||
# Need to set allow_empty=True for the embedded outpost with no providers
|
||||
# is checked for other providers in the API Viewset
|
||||
providers = PrimaryKeyRelatedField(
|
||||
|
@ -47,6 +47,16 @@ class OutpostSerializer(ModelSerializer):
|
|||
source="service_connection", read_only=True
|
||||
)
|
||||
|
||||
def validate_name(self, name: str) -> str:
|
||||
"""Validate name (especially for embedded outpost)"""
|
||||
if not self.instance:
|
||||
return name
|
||||
if self.instance.managed == MANAGED_OUTPOST and name != MANAGED_OUTPOST_NAME:
|
||||
raise ValidationError("Embedded outpost's name cannot be changed")
|
||||
if self.instance.name == MANAGED_OUTPOST_NAME:
|
||||
self.instance.managed = MANAGED_OUTPOST
|
||||
return name
|
||||
|
||||
def validate_providers(self, providers: list[Provider]) -> list[Provider]:
|
||||
"""Check that all providers match the type of the outpost"""
|
||||
type_map = {
|
||||
|
@ -95,7 +105,7 @@ class OutpostSerializer(ModelSerializer):
|
|||
class OutpostDefaultConfigSerializer(PassiveSerializer):
|
||||
"""Global default outpost config"""
|
||||
|
||||
config = JSONDictField(read_only=True)
|
||||
config = JSONField(read_only=True)
|
||||
|
||||
|
||||
class OutpostHealthSerializer(PassiveSerializer):
|
||||
|
|
|
@ -15,6 +15,7 @@ GAUGE_OUTPOSTS_LAST_UPDATE = Gauge(
|
|||
["outpost", "uid", "version"],
|
||||
)
|
||||
MANAGED_OUTPOST = "goauthentik.io/outposts/embedded"
|
||||
MANAGED_OUTPOST_NAME = "authentik Embedded Outpost"
|
||||
|
||||
|
||||
class AuthentikOutpostConfig(ManagedAppConfig):
|
||||
|
@ -35,14 +36,17 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
|||
DockerServiceConnection,
|
||||
KubernetesServiceConnection,
|
||||
Outpost,
|
||||
OutpostConfig,
|
||||
OutpostType,
|
||||
)
|
||||
|
||||
if outpost := Outpost.objects.filter(name=MANAGED_OUTPOST_NAME, managed="").first():
|
||||
outpost.managed = MANAGED_OUTPOST
|
||||
outpost.save()
|
||||
return
|
||||
outpost, updated = Outpost.objects.update_or_create(
|
||||
defaults={
|
||||
"name": "authentik Embedded Outpost",
|
||||
"type": OutpostType.PROXY,
|
||||
"name": MANAGED_OUTPOST_NAME,
|
||||
},
|
||||
managed=MANAGED_OUTPOST,
|
||||
)
|
||||
|
@ -51,10 +55,4 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
|||
outpost.service_connection = KubernetesServiceConnection.objects.first()
|
||||
elif DockerServiceConnection.objects.exists():
|
||||
outpost.service_connection = DockerServiceConnection.objects.first()
|
||||
outpost.config = OutpostConfig(
|
||||
kubernetes_disabled_components=[
|
||||
"deployment",
|
||||
"secret",
|
||||
]
|
||||
)
|
||||
outpost.save()
|
||||
|
|
|
@ -93,7 +93,7 @@ class OutpostConsumer(AuthJsonConsumer):
|
|||
expected=self.outpost.config.kubernetes_replicas,
|
||||
).dec()
|
||||
|
||||
def receive_json(self, content: Data, **kwargs):
|
||||
def receive_json(self, content: Data):
|
||||
msg = from_dict(WebsocketMessage, content)
|
||||
uid = msg.args.get("uuid", self.channel_name)
|
||||
self.last_uid = uid
|
||||
|
|
|
@ -43,6 +43,10 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
|||
self.api = AppsV1Api(controller.client)
|
||||
self.outpost = self.controller.outpost
|
||||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return self.is_embedded
|
||||
|
||||
@staticmethod
|
||||
def reconciler_name() -> str:
|
||||
return "deployment"
|
||||
|
|
|
@ -24,6 +24,10 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
|||
super().__init__(controller)
|
||||
self.api = CoreV1Api(controller.client)
|
||||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return self.is_embedded
|
||||
|
||||
@staticmethod
|
||||
def reconciler_name() -> str:
|
||||
return "secret"
|
||||
|
|
|
@ -77,7 +77,10 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
|
|||
|
||||
@property
|
||||
def noop(self) -> bool:
|
||||
return (not self._crd_exists()) or (self.is_embedded)
|
||||
if not self._crd_exists():
|
||||
self.logger.debug("CRD doesn't exist")
|
||||
return True
|
||||
return self.is_embedded
|
||||
|
||||
def _crd_exists(self) -> bool:
|
||||
"""Check if the Prometheus ServiceMonitor exists"""
|
||||
|
|
|
@ -2,11 +2,13 @@
|
|||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.core.models import PropertyMapping
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.api.outposts import OutpostSerializer
|
||||
from authentik.outposts.models import OutpostType, default_outpost_config
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost, OutpostType, default_outpost_config
|
||||
from authentik.providers.ldap.models import LDAPProvider
|
||||
from authentik.providers.proxy.models import ProxyProvider
|
||||
|
||||
|
@ -22,7 +24,36 @@ class TestOutpostServiceConnectionsAPI(APITestCase):
|
|||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_outpost_validaton(self):
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_name_change(self):
|
||||
"""Test name change for embedded outpost"""
|
||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
self.assertIsNotNone(embedded_outpost)
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
||||
{"name": "foo"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content, {"name": ["Embedded outpost's name cannot be changed"]}
|
||||
)
|
||||
|
||||
@reconcile_app("authentik_outposts")
|
||||
def test_managed_without_managed(self):
|
||||
"""Test name change for embedded outpost"""
|
||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
||||
self.assertIsNotNone(embedded_outpost)
|
||||
embedded_outpost.managed = ""
|
||||
embedded_outpost.save()
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
||||
{"name": "foo"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
embedded_outpost.refresh_from_db()
|
||||
self.assertEqual(embedded_outpost.managed, MANAGED_OUTPOST)
|
||||
|
||||
def test_outpost_validation(self):
|
||||
"""Test Outpost validation"""
|
||||
valid = OutpostSerializer(
|
||||
data={
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Serializer for policy execution"""
|
||||
from rest_framework.fields import BooleanField, CharField, DictField, ListField
|
||||
from rest_framework.fields import BooleanField, CharField, DictField, JSONField, ListField
|
||||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
|
||||
from authentik.core.api.utils import JSONDictField, PassiveSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import User
|
||||
|
||||
|
||||
|
@ -10,7 +10,7 @@ class PolicyTestSerializer(PassiveSerializer):
|
|||
"""Test policy execution for a user with context"""
|
||||
|
||||
user = PrimaryKeyRelatedField(queryset=User.objects.all())
|
||||
context = JSONDictField(required=False)
|
||||
context = JSONField(required=False, validators=[is_dict])
|
||||
|
||||
|
||||
class PolicyTestResultSerializer(PassiveSerializer):
|
||||
|
|
|
@ -7,9 +7,9 @@ from structlog.stdlib import get_logger
|
|||
|
||||
from authentik.flows.planner import PLAN_CONTEXT_SSO
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
LOGGER = get_logger()
|
||||
if TYPE_CHECKING:
|
||||
|
@ -49,7 +49,7 @@ class PolicyEvaluator(BaseEvaluator):
|
|||
"""Update context based on http request"""
|
||||
# update website/docs/expressions/_objects.md
|
||||
# update website/docs/expressions/_functions.md
|
||||
self._context["ak_client_ip"] = ip_address(ClientIPMiddleware.get_client_ip(request))
|
||||
self._context["ak_client_ip"] = ip_address(get_client_ip(request))
|
||||
self._context["http_request"] = request
|
||||
|
||||
def handle_error(self, exc: Exception, expression_source: str):
|
||||
|
|
|
@ -20,7 +20,7 @@ from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
|||
LOGGER = get_logger()
|
||||
|
||||
FORK_CTX = get_context("fork")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_policies")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_policies")
|
||||
PROCESS_CLASS = FORK_CTX.Process
|
||||
|
||||
|
||||
|
|
|
@ -47,7 +47,6 @@ class ReputationSerializer(ModelSerializer):
|
|||
"identifier",
|
||||
"ip",
|
||||
"ip_geo_data",
|
||||
"ip_asn_data",
|
||||
"score",
|
||||
"updated",
|
||||
]
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
# Generated by Django 4.2.7 on 2023-12-05 22:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_policies_reputation", "0005_reputation_expires_reputation_expiring"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="reputation",
|
||||
name="ip_asn_data",
|
||||
field=models.JSONField(default=dict),
|
||||
),
|
||||
]
|
|
@ -13,9 +13,9 @@ from structlog import get_logger
|
|||
from authentik.core.models import ExpiringModel
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
LOGGER = get_logger()
|
||||
CACHE_KEY_PREFIX = "goauthentik.io/policies/reputation/scores/"
|
||||
|
@ -44,7 +44,7 @@ class ReputationPolicy(Policy):
|
|||
return "ak-policy-reputation-form"
|
||||
|
||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||
remote_ip = ClientIPMiddleware.get_client_ip(request.http_request)
|
||||
remote_ip = get_client_ip(request.http_request)
|
||||
query = Q()
|
||||
if self.check_ip:
|
||||
query |= Q(ip=remote_ip)
|
||||
|
@ -76,7 +76,6 @@ class Reputation(ExpiringModel, SerializerModel):
|
|||
identifier = models.TextField()
|
||||
ip = models.GenericIPAddressField()
|
||||
ip_geo_data = models.JSONField(default=dict)
|
||||
ip_asn_data = models.JSONField(default=dict)
|
||||
score = models.BigIntegerField(default=0)
|
||||
|
||||
expires = models.DateTimeField(default=reputation_expiry)
|
||||
|
|
|
@ -7,18 +7,18 @@ from structlog.stdlib import get_logger
|
|||
|
||||
from authentik.core.signals import login_failed
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
from authentik.policies.reputation.models import CACHE_KEY_PREFIX
|
||||
from authentik.policies.reputation.tasks import save_reputation
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
from authentik.stages.identification.signals import identification_failed
|
||||
|
||||
LOGGER = get_logger()
|
||||
CACHE_TIMEOUT = CONFIG.get_int("cache.timeout_reputation")
|
||||
CACHE_TIMEOUT = CONFIG.get_int("redis.cache_timeout_reputation")
|
||||
|
||||
|
||||
def update_score(request: HttpRequest, identifier: str, amount: int):
|
||||
"""Update score for IP and User"""
|
||||
remote_ip = ClientIPMiddleware.get_client_ip(request)
|
||||
remote_ip = get_client_ip(request)
|
||||
|
||||
try:
|
||||
# We only update the cache here, as its faster than writing to the DB
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
from django.core.cache import cache
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.events.monitored_tasks import (
|
||||
MonitoredTask,
|
||||
TaskResult,
|
||||
|
@ -27,8 +26,7 @@ def save_reputation(self: MonitoredTask):
|
|||
ip=score["ip"],
|
||||
identifier=score["identifier"],
|
||||
)
|
||||
rep.ip_geo_data = GEOIP_CONTEXT_PROCESSOR.city_dict(score["ip"]) or {}
|
||||
rep.ip_asn_data = ASN_CONTEXT_PROCESSOR.asn_dict(score["ip"]) or {}
|
||||
rep.ip_geo_data = GEOIP_READER.city_dict(score["ip"]) or {}
|
||||
rep.score = score["score"]
|
||||
objects_to_update.append(rep)
|
||||
Reputation.objects.bulk_update(objects_to_update, ["score", "ip_geo_data"])
|
||||
|
|
|
@ -8,7 +8,8 @@ from django.db.models import Model
|
|||
from django.http import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.context_processors.base import get_context_processors
|
||||
from authentik.events.geo import GEOIP_READER
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.core.models import User
|
||||
|
@ -38,8 +39,12 @@ class PolicyRequest:
|
|||
def set_http_request(self, request: HttpRequest): # pragma: no cover
|
||||
"""Load data from HTTP request, including geoip when enabled"""
|
||||
self.http_request = request
|
||||
for processor in get_context_processors():
|
||||
self.context.update(processor.enrich_context(request))
|
||||
if not GEOIP_READER.enabled:
|
||||
return
|
||||
client_ip = get_client_ip(request)
|
||||
if not client_ip:
|
||||
return
|
||||
self.context["geoip"] = GEOIP_READER.city(client_ip)
|
||||
|
||||
@property
|
||||
def should_cache(self) -> bool:
|
||||
|
|
|
@ -85,6 +85,25 @@ class TestAuthorize(OAuthTestCase):
|
|||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
|
||||
def test_blocked_redirect_uri(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris="data:local.invalid",
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "data:localhost",
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
|
||||
def test_invalid_redirect_uri_empty(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
|
|
|
@ -75,6 +75,7 @@ PLAN_CONTEXT_PARAMS = "goauthentik.io/providers/oauth2/params"
|
|||
SESSION_KEY_LAST_LOGIN_UID = "authentik/providers/oauth2/last_login_uid"
|
||||
|
||||
ALLOWED_PROMPT_PARAMS = {PROMPT_NONE, PROMPT_CONSENT, PROMPT_LOGIN}
|
||||
FORBIDDEN_URI_SCHEMES = {"javascript", "data", "vbscript"}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -175,6 +176,10 @@ class OAuthAuthorizationParams:
|
|||
self.check_scope()
|
||||
self.check_nonce()
|
||||
self.check_code_challenge()
|
||||
if self.request:
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "request_not_supported", self.grant_type, self.state
|
||||
)
|
||||
|
||||
def check_redirect_uri(self):
|
||||
"""Redirect URI validation."""
|
||||
|
@ -212,10 +217,9 @@ class OAuthAuthorizationParams:
|
|||
expected=allowed_redirect_urls,
|
||||
)
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
if self.request:
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "request_not_supported", self.grant_type, self.state
|
||||
)
|
||||
# Check against forbidden schemes
|
||||
if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
|
||||
def check_scope(self):
|
||||
"""Ensure openid scope is set in Hybrid flows, or when requesting an id_token"""
|
||||
|
|
|
@ -6,6 +6,7 @@ from hashlib import sha256
|
|||
from re import error as RegexError
|
||||
from re import fullmatch
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
|
@ -54,6 +55,7 @@ from authentik.providers.oauth2.models import (
|
|||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.oauth2.utils import TokenResponse, cors_allow, extract_client_auth
|
||||
from authentik.providers.oauth2.views.authorize import FORBIDDEN_URI_SCHEMES
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||
|
||||
|
@ -205,6 +207,10 @@ class TokenParams:
|
|||
).from_http(request)
|
||||
raise TokenError("invalid_client")
|
||||
|
||||
# Check against forbidden schemes
|
||||
if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
|
||||
raise TokenError("invalid_request")
|
||||
|
||||
self.authorization_code = AuthorizationCode.objects.filter(code=raw_code).first()
|
||||
if not self.authorization_code:
|
||||
LOGGER.warning("Code does not exist", code=raw_code)
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from django.utils.text import slugify
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import BooleanField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
@ -10,7 +9,6 @@ from rest_framework.viewsets import ModelViewSet
|
|||
from authentik.admin.api.tasks import TaskSerializer
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
|
||||
|
@ -39,13 +37,6 @@ class SCIMProviderSerializer(ProviderSerializer):
|
|||
extra_kwargs = {}
|
||||
|
||||
|
||||
class SCIMSyncStatusSerializer(PassiveSerializer):
|
||||
"""SCIM Provider sync status"""
|
||||
|
||||
is_running = BooleanField(read_only=True)
|
||||
tasks = TaskSerializer(many=True, read_only=True)
|
||||
|
||||
|
||||
class SCIMProviderViewSet(UsedByMixin, ModelViewSet):
|
||||
"""SCIMProvider Viewset"""
|
||||
|
||||
|
@ -57,18 +48,15 @@ class SCIMProviderViewSet(UsedByMixin, ModelViewSet):
|
|||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: SCIMSyncStatusSerializer(),
|
||||
200: TaskSerializer(),
|
||||
404: OpenApiResponse(description="Task not found"),
|
||||
}
|
||||
)
|
||||
@action(methods=["GET"], detail=True, pagination_class=None, filter_backends=[])
|
||||
def sync_status(self, request: Request, pk: int) -> Response:
|
||||
"""Get provider's sync status"""
|
||||
provider: SCIMProvider = self.get_object()
|
||||
provider = self.get_object()
|
||||
task = TaskInfo.by_name(f"scim_sync:{slugify(provider.name)}")
|
||||
tasks = [task] if task else []
|
||||
status = {
|
||||
"tasks": tasks,
|
||||
"is_running": provider.sync_lock.locked(),
|
||||
}
|
||||
return Response(SCIMSyncStatusSerializer(status).data)
|
||||
if not task:
|
||||
return Response(status=404)
|
||||
return Response(TaskSerializer(task).data)
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
"""SCIM constants"""
|
||||
PAGE_SIZE = 100
|
||||
PAGE_TIMEOUT = 60 * 60 * 0.5 # Half an hour
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
"""SCIM Provider models"""
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from redis.lock import Lock
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import BackchannelProvider, Group, PropertyMapping, User, UserTypes
|
||||
from authentik.providers.scim.clients import PAGE_TIMEOUT
|
||||
|
||||
|
||||
class SCIMProvider(BackchannelProvider):
|
||||
|
@ -30,15 +27,6 @@ class SCIMProvider(BackchannelProvider):
|
|||
help_text=_("Property mappings used for group creation/updating."),
|
||||
)
|
||||
|
||||
@property
|
||||
def sync_lock(self) -> Lock:
|
||||
"""Redis lock for syncing SCIM to prevent multiple parallel syncs happening"""
|
||||
return Lock(
|
||||
cache.client.get_client(),
|
||||
name=f"goauthentik.io/providers/scim/sync-{str(self.pk)}",
|
||||
timeout=(60 * 60 * PAGE_TIMEOUT) * 3,
|
||||
)
|
||||
|
||||
def get_user_qs(self) -> QuerySet[User]:
|
||||
"""Get queryset of all users with consistent ordering
|
||||
according to the provider's settings"""
|
||||
|
|
|
@ -12,7 +12,7 @@ from structlog.stdlib import get_logger
|
|||
from authentik.core.models import Group, User
|
||||
from authentik.events.monitored_tasks import MonitoredTask, TaskResult, TaskResultStatus
|
||||
from authentik.lib.utils.reflection import path_to_class
|
||||
from authentik.providers.scim.clients import PAGE_SIZE, PAGE_TIMEOUT
|
||||
from authentik.providers.scim.clients import PAGE_SIZE
|
||||
from authentik.providers.scim.clients.base import SCIMClient
|
||||
from authentik.providers.scim.clients.exceptions import SCIMRequestException, StopSync
|
||||
from authentik.providers.scim.clients.group import SCIMGroupClient
|
||||
|
@ -47,19 +47,12 @@ def scim_sync(self: MonitoredTask, provider_pk: int) -> None:
|
|||
).first()
|
||||
if not provider:
|
||||
return
|
||||
lock = provider.sync_lock
|
||||
if lock.locked():
|
||||
LOGGER.debug("SCIM sync locked, skipping task", source=provider.name)
|
||||
return
|
||||
self.set_uid(slugify(provider.name))
|
||||
result = TaskResult(TaskResultStatus.SUCCESSFUL, [])
|
||||
result.messages.append(_("Starting full SCIM sync"))
|
||||
LOGGER.debug("Starting SCIM sync")
|
||||
users_paginator = Paginator(provider.get_user_qs(), PAGE_SIZE)
|
||||
groups_paginator = Paginator(provider.get_group_qs(), PAGE_SIZE)
|
||||
self.soft_time_limit = self.time_limit = (
|
||||
users_paginator.count + groups_paginator.count
|
||||
) * PAGE_TIMEOUT
|
||||
with allow_join_result():
|
||||
try:
|
||||
for page in users_paginator.page_range:
|
||||
|
@ -76,10 +69,7 @@ def scim_sync(self: MonitoredTask, provider_pk: int) -> None:
|
|||
self.set_status(result)
|
||||
|
||||
|
||||
@CELERY_APP.task(
|
||||
soft_time_limit=PAGE_TIMEOUT,
|
||||
task_time_limit=PAGE_TIMEOUT,
|
||||
)
|
||||
@CELERY_APP.task()
|
||||
def scim_sync_users(page: int, provider_pk: int):
|
||||
"""Sync single or multiple users to SCIM"""
|
||||
messages = []
|
||||
|
|
|
@ -24,7 +24,10 @@ class ExtraRoleObjectPermissionSerializer(RoleObjectPermissionSerializer):
|
|||
|
||||
def get_app_label_verbose(self, instance: GroupObjectPermission) -> str:
|
||||
"""Get app label from permission's model"""
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
try:
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
except LookupError:
|
||||
return instance.content_type.app_label
|
||||
|
||||
def get_model_verbose(self, instance: GroupObjectPermission) -> str:
|
||||
"""Get model label from permission's model"""
|
||||
|
|
|
@ -24,7 +24,10 @@ class ExtraUserObjectPermissionSerializer(UserObjectPermissionSerializer):
|
|||
|
||||
def get_app_label_verbose(self, instance: UserObjectPermission) -> str:
|
||||
"""Get app label from permission's model"""
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
try:
|
||||
return apps.get_app_config(instance.content_type.app_label).verbose_name
|
||||
except LookupError:
|
||||
return instance.content_type.app_label
|
||||
|
||||
def get_model_verbose(self, instance: UserObjectPermission) -> str:
|
||||
"""Get model label from permission's model"""
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from hashlib import sha512
|
||||
from time import time
|
||||
from timeit import default_timer
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.base import UpdateError
|
||||
|
@ -15,10 +15,9 @@ from django.middleware.csrf import CsrfViewMiddleware as UpstreamCsrfViewMiddlew
|
|||
from django.utils.cache import patch_vary_headers
|
||||
from django.utils.http import http_date
|
||||
from jwt import PyJWTError, decode, encode
|
||||
from sentry_sdk.hub import Hub
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.lib.utils.http import get_client_ip
|
||||
|
||||
LOGGER = get_logger("authentik.asgi")
|
||||
ACR_AUTHENTIK_SESSION = "goauthentik.io/core/default"
|
||||
|
@ -56,7 +55,7 @@ class SessionMiddleware(UpstreamSessionMiddleware):
|
|||
pass
|
||||
return session_key
|
||||
|
||||
def process_request(self, request: HttpRequest):
|
||||
def process_request(self, request):
|
||||
raw_session = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
|
||||
session_key = SessionMiddleware.decode_session_key(raw_session)
|
||||
request.session = self.SessionStore(session_key)
|
||||
|
@ -157,111 +156,6 @@ class CsrfViewMiddleware(UpstreamCsrfViewMiddleware):
|
|||
patch_vary_headers(response, ("Cookie",))
|
||||
|
||||
|
||||
class ClientIPMiddleware:
|
||||
"""Set a "known-good" client IP on the request, by default based off of x-forwarded-for
|
||||
which is set by the go proxy, but also allowing the remote IP to be overridden by an outpost
|
||||
for protocols like LDAP"""
|
||||
|
||||
get_response: Callable[[HttpRequest], HttpResponse]
|
||||
outpost_remote_ip_header = "HTTP_X_AUTHENTIK_REMOTE_IP"
|
||||
outpost_token_header = "HTTP_X_AUTHENTIK_OUTPOST_TOKEN" # nosec
|
||||
default_ip = "255.255.255.255"
|
||||
|
||||
request_attr_client_ip = "client_ip"
|
||||
request_attr_outpost_user = "outpost_user"
|
||||
|
||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||
self.get_response = get_response
|
||||
|
||||
def _get_client_ip_from_meta(self, meta: dict[str, Any]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
Returns none if no IP Could be found
|
||||
|
||||
No additional validation is done here as requests are expected to only arrive here
|
||||
via the go proxy, which deals with validating these headers for us"""
|
||||
headers = (
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"REMOTE_ADDR",
|
||||
)
|
||||
for _header in headers:
|
||||
if _header in meta:
|
||||
ips: list[str] = meta.get(_header).split(",")
|
||||
return ips[0].strip()
|
||||
return self.default_ip
|
||||
|
||||
# FIXME: this should probably not be in `root` but rather in a middleware in `outposts`
|
||||
# but for now it's fine
|
||||
def _get_outpost_override_ip(self, request: HttpRequest) -> Optional[str]:
|
||||
"""Get the actual remote IP when set by an outpost. Only
|
||||
allowed when the request is authenticated, by an outpost internal service account"""
|
||||
if (
|
||||
self.outpost_remote_ip_header not in request.META
|
||||
or self.outpost_token_header not in request.META
|
||||
):
|
||||
return None
|
||||
delegated_ip = request.META[self.outpost_remote_ip_header]
|
||||
token = (
|
||||
Token.filter_not_expired(
|
||||
key=request.META.get(self.outpost_token_header), intent=TokenIntents.INTENT_API
|
||||
)
|
||||
.select_related("user")
|
||||
.first()
|
||||
)
|
||||
if not token:
|
||||
LOGGER.warning("Attempted remote-ip override without token", delegated_ip=delegated_ip)
|
||||
return None
|
||||
user: User = token.user
|
||||
if user.type != UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||
LOGGER.warning(
|
||||
"Remote-IP override: user doesn't have permission",
|
||||
user=user,
|
||||
delegated_ip=delegated_ip,
|
||||
)
|
||||
return None
|
||||
# Update sentry scope to include correct IP
|
||||
user = Hub.current.scope._user
|
||||
if not user:
|
||||
user = {}
|
||||
user["ip_address"] = delegated_ip
|
||||
Hub.current.scope.set_user(user)
|
||||
# Set the outpost service account on the request
|
||||
setattr(request, self.request_attr_outpost_user, user)
|
||||
return delegated_ip
|
||||
|
||||
def _get_client_ip(self, request: Optional[HttpRequest]) -> str:
|
||||
"""Attempt to get the client's IP by checking common HTTP Headers.
|
||||
Returns none if no IP Could be found"""
|
||||
if not request:
|
||||
return self.default_ip
|
||||
override = self._get_outpost_override_ip(request)
|
||||
if override:
|
||||
return override
|
||||
return self._get_client_ip_from_meta(request.META)
|
||||
|
||||
@staticmethod
|
||||
def get_outpost_user(request: HttpRequest) -> Optional[User]:
|
||||
"""Get outpost user that authenticated this request"""
|
||||
return getattr(request, ClientIPMiddleware.request_attr_outpost_user, None)
|
||||
|
||||
@staticmethod
|
||||
def get_client_ip(request: HttpRequest) -> str:
|
||||
"""Get correct client IP, including any overrides from outposts that
|
||||
have the permission to do so"""
|
||||
if request and not hasattr(request, ClientIPMiddleware.request_attr_client_ip):
|
||||
ClientIPMiddleware(lambda request: request).set_ip(request)
|
||||
return getattr(
|
||||
request, ClientIPMiddleware.request_attr_client_ip, ClientIPMiddleware.default_ip
|
||||
)
|
||||
|
||||
def set_ip(self, request: HttpRequest):
|
||||
"""Set the IP"""
|
||||
setattr(request, self.request_attr_client_ip, self._get_client_ip(request))
|
||||
|
||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||
self.set_ip(request)
|
||||
return self.get_response(request)
|
||||
|
||||
|
||||
class ChannelsLoggingMiddleware:
|
||||
"""Logging middleware for channels"""
|
||||
|
||||
|
@ -297,7 +191,7 @@ class LoggingMiddleware:
|
|||
response = self.get_response(request)
|
||||
status_code = response.status_code
|
||||
kwargs = {
|
||||
"request_id": getattr(request, "request_id", None),
|
||||
"request_id": request.request_id,
|
||||
}
|
||||
kwargs.update(getattr(response, "ak_context", {}))
|
||||
self.log(request, status_code, int((default_timer() - start) * 1000), **kwargs)
|
||||
|
@ -307,7 +201,7 @@ class LoggingMiddleware:
|
|||
"""Log request"""
|
||||
LOGGER.info(
|
||||
request.get_full_path(),
|
||||
remote=ClientIPMiddleware.get_client_ip(request),
|
||||
remote=get_client_ip(request),
|
||||
method=request.method,
|
||||
scheme=request.scheme,
|
||||
status=status_code,
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
"""
|
||||
Module for abstract serializer/unserializer base classes.
|
||||
"""
|
||||
import pickle # nosec
|
||||
|
||||
|
||||
class PickleSerializer:
|
||||
"""
|
||||
Simple wrapper around pickle to be used in signing.dumps()/loads() and
|
||||
cache backends.
|
||||
"""
|
||||
|
||||
def __init__(self, protocol=None):
|
||||
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
|
||||
|
||||
def dumps(self, obj):
|
||||
"""Pickle data to be stored in redis"""
|
||||
return pickle.dumps(obj, self.protocol)
|
||||
|
||||
def loads(self, data):
|
||||
"""Unpickle data to be loaded from redis"""
|
||||
return pickle.loads(data) # nosec
|
|
@ -1,4 +1,5 @@
|
|||
"""root settings for authentik"""
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from hashlib import sha512
|
||||
|
@ -17,9 +18,7 @@ from authentik.stages.password import BACKEND_APP_PASSWORD, BACKEND_INBUILT, BAC
|
|||
|
||||
BASE_DIR = Path(__file__).absolute().parent.parent.parent
|
||||
STATICFILES_DIRS = [BASE_DIR / Path("web")]
|
||||
MEDIA_ROOT = Path(CONFIG.get("paths.media"))
|
||||
if not MEDIA_ROOT.exists():
|
||||
MEDIA_ROOT.mkdir(parents=True, exist_ok=True)
|
||||
MEDIA_ROOT = BASE_DIR / Path("media")
|
||||
|
||||
DEBUG = CONFIG.get_bool("debug")
|
||||
SECRET_KEY = CONFIG.get("secret_key")
|
||||
|
@ -140,7 +139,6 @@ SPECTACULAR_SETTINGS = {
|
|||
"EventActions": "authentik.events.models.EventAction",
|
||||
"ChallengeChoices": "authentik.flows.challenge.ChallengeTypes",
|
||||
"FlowDesignationEnum": "authentik.flows.models.FlowDesignation",
|
||||
"FlowLayoutEnum": "authentik.flows.models.FlowLayout",
|
||||
"PolicyEngineMode": "authentik.policies.models.PolicyEngineMode",
|
||||
"ProxyMode": "authentik.providers.proxy.models.ProxyMode",
|
||||
"PromptTypeEnum": "authentik.stages.prompt.models.FieldTypes",
|
||||
|
@ -197,8 +195,8 @@ _redis_url = (
|
|||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": CONFIG.get("cache.url") or f"{_redis_url}/{CONFIG.get('redis.db')}",
|
||||
"TIMEOUT": CONFIG.get_int("cache.timeout", 300),
|
||||
"LOCATION": f"{_redis_url}/{CONFIG.get('redis.db')}",
|
||||
"TIMEOUT": CONFIG.get_int("redis.cache_timeout", 300),
|
||||
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
|
||||
"KEY_PREFIX": "authentik_cache",
|
||||
}
|
||||
|
@ -207,7 +205,7 @@ DJANGO_REDIS_SCAN_ITERSIZE = 1000
|
|||
DJANGO_REDIS_IGNORE_EXCEPTIONS = True
|
||||
DJANGO_REDIS_LOG_IGNORED_EXCEPTIONS = True
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||
SESSION_SERIALIZER = "authentik.root.sessions.pickle.PickleSerializer"
|
||||
SESSION_SERIALIZER = "django.contrib.sessions.serializers.PickleSerializer"
|
||||
SESSION_CACHE_ALIAS = "default"
|
||||
# Configured via custom SessionMiddleware
|
||||
# SESSION_COOKIE_SAMESITE = "None"
|
||||
|
@ -219,8 +217,7 @@ MESSAGE_STORAGE = "authentik.root.messages.storage.ChannelsStorage"
|
|||
MIDDLEWARE = [
|
||||
"authentik.root.middleware.LoggingMiddleware",
|
||||
"django_prometheus.middleware.PrometheusBeforeMiddleware",
|
||||
"authentik.root.middleware.ClientIPMiddleware",
|
||||
"authentik.stages.user_login.middleware.BoundSessionMiddleware",
|
||||
"authentik.root.middleware.SessionMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"authentik.core.middleware.RequestIDMiddleware",
|
||||
"authentik.tenants.middleware.TenantMiddleware",
|
||||
|
@ -239,7 +236,7 @@ ROOT_URLCONF = "authentik.root.urls"
|
|||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [CONFIG.get("paths.email_templates")],
|
||||
"DIRS": [CONFIG.get("email.template_dir")],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
|
@ -259,7 +256,7 @@ CHANNEL_LAYERS = {
|
|||
"default": {
|
||||
"BACKEND": "channels_redis.pubsub.RedisPubSubChannelLayer",
|
||||
"CONFIG": {
|
||||
"hosts": [CONFIG.get("channel.url", f"{_redis_url}/{CONFIG.get('redis.db')}")],
|
||||
"hosts": [f"{_redis_url}/{CONFIG.get('redis.db')}"],
|
||||
"prefix": "authentik_channels_",
|
||||
},
|
||||
},
|
||||
|
@ -281,9 +278,6 @@ DATABASES = {
|
|||
"SSLROOTCERT": CONFIG.get("postgresql.sslrootcert"),
|
||||
"SSLCERT": CONFIG.get("postgresql.sslcert"),
|
||||
"SSLKEY": CONFIG.get("postgresql.sslkey"),
|
||||
"TEST": {
|
||||
"NAME": CONFIG.get("postgresql.test.name"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -355,11 +349,8 @@ CELERY = {
|
|||
},
|
||||
"task_create_missing_queues": True,
|
||||
"task_default_queue": "authentik",
|
||||
"broker_url": CONFIG.get("broker.url")
|
||||
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"broker_transport_options": CONFIG.get_dict_from_b64_json("broker.transport_options"),
|
||||
"result_backend": CONFIG.get("result_backend.url")
|
||||
or f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"broker_url": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
"result_backend": f"{_redis_url}/{CONFIG.get('redis.db')}{_redis_celery_tls_requirements}",
|
||||
}
|
||||
|
||||
# Sentry integration
|
||||
|
@ -418,6 +409,7 @@ if DEBUG:
|
|||
CELERY["task_always_eager"] = True
|
||||
os.environ[ENV_GIT_HASH_KEY] = "dev"
|
||||
INSTALLED_APPS.append("silk")
|
||||
SILKY_PYTHON_PROFILER = True
|
||||
MIDDLEWARE = ["silk.middleware.SilkyMiddleware"] + MIDDLEWARE
|
||||
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"].append(
|
||||
"rest_framework.renderers.BrowsableAPIRenderer"
|
||||
|
|
|
@ -32,9 +32,8 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
|
|||
settings.TEST = True
|
||||
settings.CELERY["task_always_eager"] = True
|
||||
CONFIG.set("avatars", "none")
|
||||
CONFIG.set("events.context_processors.geoip", "tests/GeoLite2-City-Test.mmdb")
|
||||
CONFIG.set("events.context_processors.asn", "tests/GeoLite2-ASN-Test.mmdb")
|
||||
CONFIG.set("paths.blueprints", "./blueprints")
|
||||
CONFIG.set("geoip", "tests/GeoLite2-City-Test.mmdb")
|
||||
CONFIG.set("blueprints_dir", "./blueprints")
|
||||
CONFIG.set(
|
||||
"outposts.container_image_base",
|
||||
f"ghcr.io/goauthentik/dev-%(type)s:{get_docker_tag()}",
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
"""Source API Views"""
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from django.core.cache import cache
|
||||
from django_filters.filters import AllValuesMultipleFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, DictField, ListField, SerializerMethodField
|
||||
from rest_framework.fields import DictField, ListField
|
||||
from rest_framework.relations import PrimaryKeyRelatedField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
@ -18,17 +17,15 @@ from authentik.admin.api.tasks import TaskSerializer
|
|||
from authentik.core.api.propertymappings import PropertyMappingSerializer
|
||||
from authentik.core.api.sources import SourceSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.monitored_tasks import TaskInfo
|
||||
from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
||||
from authentik.sources.ldap.tasks import CACHE_KEY_STATUS, SYNC_CLASSES
|
||||
from authentik.sources.ldap.tasks import SYNC_CLASSES
|
||||
|
||||
|
||||
class LDAPSourceSerializer(SourceSerializer):
|
||||
"""LDAP Source Serializer"""
|
||||
|
||||
connectivity = SerializerMethodField()
|
||||
client_certificate = PrimaryKeyRelatedField(
|
||||
allow_null=True,
|
||||
help_text="Client certificate to authenticate against the LDAP Server's Certificate.",
|
||||
|
@ -38,10 +35,6 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||
required=False,
|
||||
)
|
||||
|
||||
def get_connectivity(self, source: LDAPSource) -> Optional[dict[str, dict[str, str]]]:
|
||||
"""Get cached source connectivity"""
|
||||
return cache.get(CACHE_KEY_STATUS + source.slug, None)
|
||||
|
||||
def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Check that only a single source has password_sync on"""
|
||||
sync_users_password = attrs.get("sync_users_password", True)
|
||||
|
@ -82,18 +75,10 @@ class LDAPSourceSerializer(SourceSerializer):
|
|||
"sync_parent_group",
|
||||
"property_mappings",
|
||||
"property_mappings_group",
|
||||
"connectivity",
|
||||
]
|
||||
extra_kwargs = {"bind_password": {"write_only": True}}
|
||||
|
||||
|
||||
class LDAPSyncStatusSerializer(PassiveSerializer):
|
||||
"""LDAP Source sync status"""
|
||||
|
||||
is_running = BooleanField(read_only=True)
|
||||
tasks = TaskSerializer(many=True, read_only=True)
|
||||
|
||||
|
||||
class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
"""LDAP Source Viewset"""
|
||||
|
||||
|
@ -129,19 +114,19 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
|||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: LDAPSyncStatusSerializer(),
|
||||
200: TaskSerializer(many=True),
|
||||
}
|
||||
)
|
||||
@action(methods=["GET"], detail=True, pagination_class=None, filter_backends=[])
|
||||
def sync_status(self, request: Request, slug: str) -> Response:
|
||||
"""Get source's sync status"""
|
||||
source: LDAPSource = self.get_object()
|
||||
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*") or []
|
||||
status = {
|
||||
"tasks": tasks,
|
||||
"is_running": source.sync_lock.locked(),
|
||||
}
|
||||
return Response(LDAPSyncStatusSerializer(status).data)
|
||||
source = self.get_object()
|
||||
results = []
|
||||
tasks = TaskInfo.by_name(f"ldap_sync:{source.slug}:*")
|
||||
if tasks:
|
||||
for task in tasks:
|
||||
results.append(task)
|
||||
return Response(TaskSerializer(results, many=True).data)
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
"""LDAP Connection check"""
|
||||
from json import dumps
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check connectivity to LDAP servers for a source"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("source_slugs", nargs="?", type=str)
|
||||
|
||||
def handle(self, **options):
|
||||
sources = LDAPSource.objects.filter(enabled=True)
|
||||
if options["source_slugs"]:
|
||||
sources = LDAPSource.objects.filter(slug__in=options["source_slugs"])
|
||||
for source in sources.order_by("slug"):
|
||||
status = source.check_connection()
|
||||
self.stdout.write(dumps(status, indent=4))
|
|
@ -1,17 +1,13 @@
|
|||
"""authentik LDAP Models"""
|
||||
from os import chmod
|
||||
from os.path import dirname, exists
|
||||
from shutil import rmtree
|
||||
from ssl import CERT_REQUIRED
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
from typing import Optional
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
|
||||
from ldap3.core.exceptions import LDAPException, LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from redis.lock import Lock
|
||||
from ldap3.core.exceptions import LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import Group, PropertyMapping, Source
|
||||
|
@ -121,7 +117,7 @@ class LDAPSource(Source):
|
|||
|
||||
return LDAPSourceSerializer
|
||||
|
||||
def server(self, **kwargs) -> ServerPool:
|
||||
def server(self, **kwargs) -> Server:
|
||||
"""Get LDAP Server/ServerPool"""
|
||||
servers = []
|
||||
tls_kwargs = {}
|
||||
|
@ -158,10 +154,7 @@ class LDAPSource(Source):
|
|||
return ServerPool(servers, RANDOM, active=5, exhaust=True)
|
||||
|
||||
def connection(
|
||||
self,
|
||||
server: Optional[Server] = None,
|
||||
server_kwargs: Optional[dict] = None,
|
||||
connection_kwargs: Optional[dict] = None,
|
||||
self, server_kwargs: Optional[dict] = None, connection_kwargs: Optional[dict] = None
|
||||
) -> Connection:
|
||||
"""Get a fully connected and bound LDAP Connection"""
|
||||
server_kwargs = server_kwargs or {}
|
||||
|
@ -171,7 +164,7 @@ class LDAPSource(Source):
|
|||
if self.bind_password is not None:
|
||||
connection_kwargs.setdefault("password", self.bind_password)
|
||||
connection = Connection(
|
||||
server or self.server(**server_kwargs),
|
||||
self.server(**server_kwargs),
|
||||
raise_exceptions=True,
|
||||
receive_timeout=LDAP_TIMEOUT,
|
||||
**connection_kwargs,
|
||||
|
@ -190,60 +183,9 @@ class LDAPSource(Source):
|
|||
if server_kwargs.get("get_info", ALL) == NONE:
|
||||
raise exc
|
||||
server_kwargs["get_info"] = NONE
|
||||
return self.connection(server, server_kwargs, connection_kwargs)
|
||||
finally:
|
||||
if connection.server.tls.certificate_file is not None and exists(
|
||||
connection.server.tls.certificate_file
|
||||
):
|
||||
rmtree(dirname(connection.server.tls.certificate_file))
|
||||
return self.connection(server_kwargs, connection_kwargs)
|
||||
return RuntimeError("Failed to bind")
|
||||
|
||||
@property
|
||||
def sync_lock(self) -> Lock:
|
||||
"""Redis lock for syncing LDAP to prevent multiple parallel syncs happening"""
|
||||
return Lock(
|
||||
cache.client.get_client(),
|
||||
name=f"goauthentik.io/sources/ldap/sync-{self.slug}",
|
||||
# Convert task timeout hours to seconds, and multiply times 3
|
||||
# (see authentik/sources/ldap/tasks.py:54)
|
||||
# multiply by 3 to add even more leeway
|
||||
timeout=(60 * 60 * CONFIG.get_int("ldap.task_timeout_hours")) * 3,
|
||||
)
|
||||
|
||||
def check_connection(self) -> dict[str, dict[str, str]]:
|
||||
"""Check LDAP Connection"""
|
||||
from authentik.sources.ldap.sync.base import flatten
|
||||
|
||||
servers = self.server()
|
||||
server_info = {}
|
||||
# Check each individual server
|
||||
for server in servers.servers:
|
||||
server: Server
|
||||
try:
|
||||
connection = self.connection(server=server)
|
||||
server_info[server.host] = {
|
||||
"vendor": str(flatten(connection.server.info.vendor_name)),
|
||||
"version": str(flatten(connection.server.info.vendor_version)),
|
||||
"status": "ok",
|
||||
}
|
||||
except LDAPException as exc:
|
||||
server_info[server.host] = {
|
||||
"status": str(exc),
|
||||
}
|
||||
# Check server pool
|
||||
try:
|
||||
connection = self.connection()
|
||||
server_info["__all__"] = {
|
||||
"vendor": str(flatten(connection.server.info.vendor_name)),
|
||||
"version": str(flatten(connection.server.info.vendor_version)),
|
||||
"status": "ok",
|
||||
}
|
||||
except LDAPException as exc:
|
||||
server_info["__all__"] = {
|
||||
"status": str(exc),
|
||||
}
|
||||
return server_info
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("LDAP Source")
|
||||
verbose_name_plural = _("LDAP Sources")
|
||||
|
|
|
@ -8,10 +8,5 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"task": "authentik.sources.ldap.tasks.ldap_sync_all",
|
||||
"schedule": crontab(minute=fqdn_rand("sources_ldap_sync"), hour="*/2"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"sources_ldap_connectivity_check": {
|
||||
"task": "authentik.sources.ldap.tasks.ldap_connectivity_check",
|
||||
"schedule": crontab(minute=fqdn_rand("sources_ldap_connectivity_check"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ from authentik.events.models import Event, EventAction
|
|||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
from authentik.sources.ldap.password import LDAPPasswordChanger
|
||||
from authentik.sources.ldap.tasks import ldap_connectivity_check, ldap_sync_single
|
||||
from authentik.sources.ldap.tasks import ldap_sync_single
|
||||
from authentik.stages.prompt.signals import password_validate
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
@ -32,7 +32,6 @@ def sync_ldap_source_on_save(sender, instance: LDAPSource, **_):
|
|||
if not instance.property_mappings.exists() or not instance.property_mappings_group.exists():
|
||||
return
|
||||
ldap_sync_single.delay(instance.pk)
|
||||
ldap_connectivity_check.delay(instance.pk)
|
||||
|
||||
|
||||
@receiver(password_validate)
|
||||
|
|
|
@ -17,15 +17,6 @@ from authentik.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
|||
LDAP_UNIQUENESS = "ldap_uniq"
|
||||
|
||||
|
||||
def flatten(value: Any) -> Any:
|
||||
"""Flatten `value` if its a list"""
|
||||
if isinstance(value, list):
|
||||
if len(value) < 1:
|
||||
return None
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
|
||||
class BaseLDAPSynchronizer:
|
||||
"""Sync LDAP Users and groups into authentik"""
|
||||
|
||||
|
@ -131,6 +122,14 @@ class BaseLDAPSynchronizer:
|
|||
cookie = None
|
||||
yield self._connection.response
|
||||
|
||||
def _flatten(self, value: Any) -> Any:
|
||||
"""Flatten `value` if its a list"""
|
||||
if isinstance(value, list):
|
||||
if len(value) < 1:
|
||||
return None
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
def build_user_properties(self, user_dn: str, **kwargs) -> dict[str, Any]:
|
||||
"""Build attributes for User object based on property mappings."""
|
||||
props = self._build_object_properties(user_dn, self._source.property_mappings, **kwargs)
|
||||
|
@ -164,10 +163,10 @@ class BaseLDAPSynchronizer:
|
|||
object_field = mapping.object_field
|
||||
if object_field.startswith("attributes."):
|
||||
# Because returning a list might desired, we can't
|
||||
# rely on flatten here. Instead, just save the result as-is
|
||||
# rely on self._flatten here. Instead, just save the result as-is
|
||||
set_path_in_dict(properties, object_field, value)
|
||||
else:
|
||||
properties[object_field] = flatten(value)
|
||||
properties[object_field] = self._flatten(value)
|
||||
except PropertyMappingExpressionException as exc:
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
|
@ -178,7 +177,7 @@ class BaseLDAPSynchronizer:
|
|||
self._logger.warning("Mapping failed to evaluate", exc=exc, mapping=mapping)
|
||||
continue
|
||||
if self._source.object_uniqueness_field in kwargs:
|
||||
properties["attributes"][LDAP_UNIQUENESS] = flatten(
|
||||
properties["attributes"][LDAP_UNIQUENESS] = self._flatten(
|
||||
kwargs.get(self._source.object_uniqueness_field)
|
||||
)
|
||||
properties["attributes"][LDAP_DISTINGUISHED_NAME] = object_dn
|
||||
|
|
|
@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||
|
||||
from authentik.core.models import Group
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
|
||||
|
||||
class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
||||
|
@ -39,7 +39,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
if "attributes" not in group:
|
||||
continue
|
||||
attributes = group.get("attributes", {})
|
||||
group_dn = flatten(flatten(group.get("entryDN", group.get("dn"))))
|
||||
group_dn = self._flatten(self._flatten(group.get("entryDN", group.get("dn"))))
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Cannot find uniqueness field in attributes: '{group_dn}'",
|
||||
|
@ -47,7 +47,7 @@ class GroupLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
dn=group_dn,
|
||||
)
|
||||
continue
|
||||
uniq = flatten(attributes[self._source.object_uniqueness_field])
|
||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||
try:
|
||||
defaults = self.build_group_properties(group_dn, **attributes)
|
||||
defaults["parent"] = self._source.sync_parent_group
|
||||
|
|
|
@ -7,7 +7,7 @@ from ldap3 import ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, SUBTREE
|
|||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer, flatten
|
||||
from authentik.sources.ldap.sync.base import LDAP_UNIQUENESS, BaseLDAPSynchronizer
|
||||
from authentik.sources.ldap.sync.vendor.freeipa import FreeIPA
|
||||
from authentik.sources.ldap.sync.vendor.ms_ad import MicrosoftActiveDirectory
|
||||
|
||||
|
@ -41,7 +41,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
if "attributes" not in user:
|
||||
continue
|
||||
attributes = user.get("attributes", {})
|
||||
user_dn = flatten(user.get("entryDN", user.get("dn")))
|
||||
user_dn = self._flatten(user.get("entryDN", user.get("dn")))
|
||||
if self._source.object_uniqueness_field not in attributes:
|
||||
self.message(
|
||||
f"Cannot find uniqueness field in attributes: '{user_dn}'",
|
||||
|
@ -49,7 +49,7 @@ class UserLDAPSynchronizer(BaseLDAPSynchronizer):
|
|||
dn=user_dn,
|
||||
)
|
||||
continue
|
||||
uniq = flatten(attributes[self._source.object_uniqueness_field])
|
||||
uniq = self._flatten(attributes[self._source.object_uniqueness_field])
|
||||
try:
|
||||
defaults = self.build_user_properties(user_dn, **attributes)
|
||||
self._logger.debug("Writing user with attributes", **defaults)
|
||||
|
|
|
@ -5,7 +5,7 @@ from typing import Any, Generator
|
|||
from pytz import UTC
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer, flatten
|
||||
from authentik.sources.ldap.sync.base import BaseLDAPSynchronizer
|
||||
|
||||
|
||||
class FreeIPA(BaseLDAPSynchronizer):
|
||||
|
@ -47,7 +47,7 @@ class FreeIPA(BaseLDAPSynchronizer):
|
|||
return
|
||||
# For some reason, nsaccountlock is not defined properly in the schema as bool
|
||||
# hence we get it as a list of strings
|
||||
_is_locked = str(flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||
_is_locked = str(self._flatten(attributes.get("nsaccountlock", ["FALSE"])))
|
||||
# So we have to attempt to convert it to a bool
|
||||
is_locked = _is_locked.lower() == "true"
|
||||
# And then invert it since freeipa saves locked and we save active
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue