Compare commits

..

2 Commits

Author SHA1 Message Date
Jens Langhammer 310801dd2f
initial fix with class-name-updater
https://github.com/patternfly/pf-codemods/tree/main/packages/class-name-updater
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-09-22 18:29:57 +02:00
Jens Langhammer e1fddedfb4
fix build
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2023-09-22 18:29:42 +02:00
725 changed files with 12613 additions and 44332 deletions

View File

@ -1,5 +1,5 @@
[bumpversion]
current_version = 2023.10.6
current_version = 2023.8.3
tag = True
commit = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)

View File

@ -2,39 +2,36 @@ name: "Setup authentik testing environment"
description: "Setup authentik testing environment"
inputs:
postgresql_version:
postgresql_tag:
description: "Optional postgresql image tag"
default: "12"
runs:
using: "composite"
steps:
- name: Install poetry & deps
- name: Install poetry
shell: bash
run: |
pipx install poetry || true
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
sudo apt update
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
- name: Setup python and restore poetry
uses: actions/setup-python@v4
uses: actions/setup-python@v3
with:
python-version-file: 'pyproject.toml'
python-version: "3.11"
cache: "poetry"
- name: Setup node
uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Setup go
uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- name: Setup dependencies
shell: bash
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
export PSQL_TAG=${{ inputs.postgresql_tag }}
docker-compose -f .github/actions/setup/docker-compose.yml up -d
poetry env use python3.11
poetry install
cd web && npm ci
- name: Generate config

4
.github/codecov.yml vendored
View File

@ -6,5 +6,5 @@ coverage:
# adjust accordingly based on how flaky your tests are
# this allows a 1% drop from the previous base commit coverage
threshold: 1%
comment:
after_n_builds: 3
notify:
after_n_builds: 3

View File

@ -30,7 +30,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
# TODO: deduplicate these groups
groups:
sentry:
patterns:
@ -41,7 +40,7 @@ updates:
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/*"
- "@typescript-eslint/eslint-*"
- "eslint"
- "eslint-*"
storybook:
@ -51,40 +50,6 @@ updates:
esbuild:
patterns:
- "@esbuild/*"
- package-ecosystem: npm
directory: "/tests/wdio"
schedule:
interval: daily
time: "04:00"
labels:
- dependencies
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
# TODO: deduplicate these groups
groups:
sentry:
patterns:
- "@sentry/*"
babel:
patterns:
- "@babel/*"
- "babel-*"
eslint:
patterns:
- "@typescript-eslint/*"
- "eslint"
- "eslint-*"
storybook:
patterns:
- "@storybook/*"
- "*storybook*"
esbuild:
patterns:
- "@esbuild/*"
wdio:
patterns:
- "@wdio/*"
- package-ecosystem: npm
directory: "/website"
schedule:

View File

@ -11,7 +11,6 @@ on:
pull_request:
branches:
- main
- version-*
env:
POSTGRES_DB: authentik
@ -48,38 +47,25 @@ jobs:
- name: run migrations
run: poetry run python -m lifecycle.migrate
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
psql:
- 12-alpine
- 15-alpine
- 16-alpine
continue-on-error: true
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: checkout stable
run: |
# Delete all poetry envs
rm -rf /home/runner/.cache/pypoetry
# Copy current, latest config to local
cp authentik/lib/default.yml local.env.yml
cp -R .github ..
cp -R scripts ..
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
rm -rf .github/ scripts/
mv ../.github ../scripts .
- name: Setup authentik env (ensure stable deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable
run: poetry run python -m lifecycle.migrate
- name: checkout current code
@ -89,13 +75,9 @@ jobs:
git reset --hard HEAD
git clean -d -fx .
git checkout $GITHUB_SHA
# Delete previous poetry env
rm -rf $(poetry env info --path)
poetry install
- name: Setup authentik env (ensure latest deps are installed)
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
- name: migrate to latest
run: poetry run python -m lifecycle.migrate
test-unittest:
@ -108,13 +90,12 @@ jobs:
psql:
- 12-alpine
- 15-alpine
- 16-alpine
steps:
- uses: actions/checkout@v4
- name: Setup authentik env
uses: ./.github/actions/setup
with:
postgresql_version: ${{ matrix.psql }}
postgresql_tag: ${{ matrix.psql }}
- name: run unittest
run: |
poetry run make test
@ -203,9 +184,6 @@ jobs:
build:
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120
steps:
- uses: actions/checkout@v4
@ -256,9 +234,6 @@ jobs:
build-arm64:
needs: ci-core-mark
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
timeout-minutes: 120
steps:
- uses: actions/checkout@v4

View File

@ -9,7 +9,6 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-golint:
@ -30,7 +29,7 @@ jobs:
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
version: v1.54.2
version: v1.52.2
args: --timeout 5000s --verbose
skip-cache: true
test-unittest:
@ -40,8 +39,6 @@ jobs:
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-go
- name: Go unittests
@ -66,9 +63,6 @@ jobs:
- ldap
- radius
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
with:
@ -128,9 +122,9 @@ jobs:
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API

View File

@ -9,38 +9,31 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-eslint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
project:
- web
- tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: ${{ matrix.project }}/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: Eslint
working-directory: ${{ matrix.project }}/
working-directory: web/
run: npm run lint
lint-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
@ -52,33 +45,27 @@ jobs:
run: npm run tsc
lint-prettier:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
project:
- web
- tests/wdio
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: ${{ matrix.project }}/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: prettier
working-directory: ${{ matrix.project }}/
working-directory: web/
run: npm run prettier-check
lint-lit-analyse:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
@ -108,9 +95,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/

View File

@ -9,16 +9,15 @@ on:
pull_request:
branches:
- main
- version-*
jobs:
lint-prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: website/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
@ -30,9 +29,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: website/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
@ -51,9 +50,9 @@ jobs:
- build-docs-only
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: website/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/

View File

@ -1,8 +1,8 @@
name: ghcr-retention
on:
# schedule:
# - cron: "0 0 * * *" # every day at midnight
schedule:
- cron: "0 0 * * *" # every day at midnight
workflow_dispatch:
jobs:

View File

@ -6,7 +6,6 @@ on:
workflow_dispatch:
permissions:
# Needed to be able to push to the next branch
contents: write
jobs:

View File

@ -7,9 +7,6 @@ on:
jobs:
build-server:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
@ -30,10 +27,8 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: make empty ts client
run: mkdir -p ./gen-ts-client
- name: Build Docker Image
uses: docker/build-push-action@v5
with:
@ -55,9 +50,6 @@ jobs:
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost:
runs-on: ubuntu-latest
permissions:
# Needed to upload contianer images to ghcr.io
packages: write
strategy:
fail-fast: false
matrix:
@ -77,10 +69,6 @@ jobs:
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
- name: make empty clients
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Docker Login Registry
uses: docker/login-action@v3
with:
@ -105,16 +93,12 @@ jobs:
ghcr.io/goauthentik/${{ matrix.type }}:latest
file: ${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
build-args: |
VERSION=${{ steps.ev.outputs.version }}
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
build-outpost-binary:
timeout-minutes: 120
runs-on: ubuntu-latest
permissions:
# Needed to upload binaries to the release
contents: write
strategy:
fail-fast: false
matrix:
@ -129,9 +113,9 @@ jobs:
- uses: actions/setup-go@v4
with:
go-version-file: "go.mod"
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Build web

View File

@ -16,7 +16,6 @@ jobs:
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker buildx install
mkdir -p ./gen-ts-api
docker build -t testing:latest .
echo "AUTHENTIK_IMAGE=testing" >> .env
echo "AUTHENTIK_TAG=latest" >> .env

View File

@ -6,8 +6,8 @@ on:
workflow_dispatch:
permissions:
# Needed to update issues and PRs
issues: write
pull-requests: write
jobs:
stale:

View File

@ -17,9 +17,9 @@ jobs:
- uses: actions/checkout@v4
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version-file: web/package.json
node-version: "20"
registry-url: "https://registry.npmjs.org"
- name: Generate API Client
run: make gen-client-ts

3
.gitignore vendored
View File

@ -206,6 +206,3 @@ data/
.netlify
.ruff_cache
source_docs/
### Golang ###
/vendor/

View File

@ -9,8 +9,6 @@ lifecycle/ @goauthentik/backend
schemas/ @goauthentik/backend
scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
poetry.lock @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
Dockerfile @goauthentik/infrastructure
@ -19,7 +17,6 @@ Dockerfile @goauthentik/infrastructure
docker-compose.yml @goauthentik/infrastructure
# Web
web/ @goauthentik/frontend
tests/wdio/ @goauthentik/frontend
# Docs & Website
website/ @goauthentik/docs
# Security

View File

@ -1,7 +1,5 @@
# syntax=docker/dockerfile:1
# Stage 1: Build website
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
ENV NODE_ENV=production
@ -9,7 +7,7 @@ WORKDIR /work/website
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
--mount=type=cache,target=/root/.npm \
npm ci --include=dev
COPY ./website /work/website/
@ -19,7 +17,7 @@ COPY ./SECURITY.md /work/
RUN npm run build-docs-only
# Stage 2: Build webui
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
ENV NODE_ENV=production
@ -27,7 +25,7 @@ WORKDIR /work/web
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
--mount=type=cache,target=/root/.npm \
npm ci --include=dev
COPY ./web /work/web/
@ -37,14 +35,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
RUN npm run build
# Stage 3: Build go proxy
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
ARG TARGETOS
ARG TARGETARCH
ARG TARGETVARIANT
ARG GOOS=$TARGETOS
ARG GOARCH=$TARGETARCH
FROM docker.io/golang:1.21.1-bookworm AS go-builder
WORKDIR /go/src/goauthentik.io
@ -64,12 +55,12 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
ENV CGO_ENABLED=0
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target=/root/.cache/go-build \
go build -o /go/authentik ./cmd/server
# Stage 4: MaxMind GeoIP
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
ENV GEOIPUPDATE_VERBOSE="true"
@ -91,9 +82,7 @@ ENV VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false \
PATH="/ak-root/venv/bin:$PATH"
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
RUN --mount=type=cache,target=/var/cache/apt \
apt-get update && \
# Required for installing pip packages
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
@ -157,10 +146,10 @@ USER 1000
ENV TMPDIR=/dev/shm/ \
PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
PATH="/ak-root/venv/bin:$PATH" \
VENV_PATH="/ak-root/venv" \
POETRY_VIRTUALENVS_CREATE=false
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
ENTRYPOINT [ "dumb-init", "--", "ak" ]
ENTRYPOINT [ "dumb-init", "--", "/lifecycle/ak" ]

View File

@ -1,16 +1,9 @@
.PHONY: gen dev-reset all clean test web website
.SHELLFLAGS += ${SHELLFLAGS} -e
.SHELLFLAGS += -x -e
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.npm_version)
PY_SOURCES = authentik tests scripts lifecycle
DOCKER_IMAGE ?= "authentik:test"
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
-I .github/codespell-words.txt \
@ -26,82 +19,57 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
website/integrations \
website/src
all: lint-fix lint test gen web ## Lint, build, and test everything
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
help: ## Show this help
@echo "\nSpecify a command. The choices are:\n"
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
sort
@echo ""
all: lint-fix lint test gen web
test-go:
go test -timeout 0 -v -race -cover ./...
test-docker: ## Run all tests in a docker-compose
test-docker:
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
docker-compose pull -q
docker-compose up --no-start
docker-compose start postgresql redis
docker-compose run -u root server test-all
docker-compose run -u root server test
rm -f .env
test: ## Run the server tests and produce a coverage report (locally)
test:
coverage run manage.py test --keepdb authentik
coverage html
coverage report
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
isort $(PY_SOURCES)
black $(PY_SOURCES)
ruff $(PY_SOURCES)
lint-fix:
isort authentik $(PY_SOURCES)
black authentik $(PY_SOURCES)
ruff authentik $(PY_SOURCES)
codespell -w $(CODESPELL_ARGS)
lint: ## Lint the python and golang sources
bandit -r $(PY_SOURCES) -x node_modules
./web/node_modules/.bin/pyright $(PY_SOURCES)
lint:
pylint $(PY_SOURCES)
bandit -r $(PY_SOURCES) -x node_modules
golangci-lint run -v
migrate: ## Run the Authentik Django server's migrations
migrate:
python -m lifecycle.migrate
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
i18n-extract: i18n-extract-core web-i18n-extract
i18n-extract-core:
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
poetry install
dev-drop-db:
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
# Also remove the test-db if it exists
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
redis-cli -n 0 flushall
dev-create-db:
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
#########################
## API Schema
#########################
gen-build: ## Extract the schema from the database
gen-build:
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
gen-changelog:
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
gen-diff:
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
docker run \
--rm -v ${PWD}:/local \
@ -116,7 +84,7 @@ gen-clean:
rm -rf web/api/src/
rm -rf api/
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
gen-client-ts:
docker run \
--rm -v ${PWD}:/local \
--user ${UID}:${GID} \
@ -132,7 +100,7 @@ gen-client-ts: ## Build and install the authentik API for Typescript into the a
cd gen-ts-api && npm i
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
gen-client-go: ## Build and install the authentik API for Golang
gen-client-go:
mkdir -p ./gen-go-api ./gen-go-api/templates
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
@ -149,7 +117,7 @@ gen-client-go: ## Build and install the authentik API for Golang
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
gen-dev-config: ## Generate a local development config file
gen-dev-config:
python -m scripts.generate_config
gen: gen-build gen-clean gen-client-ts
@ -158,21 +126,21 @@ gen: gen-build gen-clean gen-client-ts
## Web
#########################
web-build: web-install ## Build the Authentik UI
web-build: web-install
cd web && npm run build
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
web: web-lint-fix web-lint web-check-compile
web-install: ## Install the necessary libraries to build the Authentik UI
web-install:
cd web && npm ci
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
web-watch:
rm -rf web/dist/
mkdir web/dist/
touch web/dist/.gitkeep
cd web && npm run watch
web-storybook-watch: ## Build and run the storybook documentation server
web-storybook-watch:
cd web && npm run storybook
web-lint-fix:
@ -192,7 +160,7 @@ web-i18n-extract:
## Website
#########################
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
website: website-lint-fix website-build
website-install:
cd website && npm ci
@ -203,15 +171,15 @@ website-lint-fix:
website-build:
cd website && npm run build
website-watch: ## Build and watch the documentation website, updating automatically
website-watch:
cd website && npm run watch
#########################
## Docker
#########################
docker: ## Build a docker image of the current source tree
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
docker:
DOCKER_BUILDKIT=1 docker build . --progress plain --tag authentik:test
#########################
## CI
@ -246,3 +214,14 @@ ci-pyright: ci--meta-debug
ci-pending-migrations: ci--meta-debug
ak makemigrations --check
install: web-install website-install
poetry install
dev-reset:
dropdb -U postgres -h localhost authentik
# Also remove the test-db if it exists
dropdb -U postgres -h localhost test_authentik || true
createdb -U postgres -h localhost authentik
redis-cli -n 0 flushall
make migrate

View File

@ -41,3 +41,15 @@ See [SECURITY.md](SECURITY.md)
## Adoption and Contributions
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
## Sponsors
This project is proudly sponsored by:
<p>
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
</a>
</p>
DigitalOcean provides development and testing resources for authentik.

View File

@ -2,7 +2,7 @@
from os import environ
from typing import Optional
__version__ = "2023.10.6"
__version__ = "2023.8.3"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@ -1,7 +1,7 @@
"""Meta API"""
from drf_spectacular.utils import extend_schema
from rest_framework.fields import CharField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
@ -21,7 +21,7 @@ class AppSerializer(PassiveSerializer):
class AppsViewSet(ViewSet):
"""Read-only view list all installed apps"""
permission_classes = [IsAuthenticated]
permission_classes = [IsAdminUser]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:
@ -35,7 +35,7 @@ class AppsViewSet(ViewSet):
class ModelViewSet(ViewSet):
"""Read-only view list all installed models"""
permission_classes = [IsAuthenticated]
permission_classes = [IsAdminUser]
@extend_schema(responses={200: AppSerializer(many=True)})
def list(self, request: Request) -> Response:

View File

@ -5,7 +5,7 @@ from django.db.models.functions import ExtractHour
from drf_spectacular.utils import extend_schema, extend_schema_field
from guardian.shortcuts import get_objects_for_user
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
@ -68,7 +68,7 @@ class LoginMetricsSerializer(PassiveSerializer):
class AdministrationMetricsViewSet(APIView):
"""Login Metrics per 1h"""
permission_classes = [IsAuthenticated]
permission_classes = [IsAdminUser]
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
def get(self, request: Request) -> Response:

View File

@ -8,6 +8,7 @@ from django.utils.timezone import now
from drf_spectacular.utils import extend_schema
from gunicorn import version_info as gunicorn_version
from rest_framework.fields import SerializerMethodField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
@ -16,7 +17,6 @@ from authentik.core.api.utils import PassiveSerializer
from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
from authentik.rbac.permissions import HasPermission
class RuntimeDict(TypedDict):
@ -88,7 +88,7 @@ class SystemSerializer(PassiveSerializer):
class SystemView(APIView):
"""Get system information."""
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
permission_classes = [IsAdminUser]
pagination_class = None
filter_backends = []
serializer_class = SystemSerializer

View File

@ -14,15 +14,14 @@ from rest_framework.fields import (
ListField,
SerializerMethodField,
)
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from structlog.stdlib import get_logger
from authentik.api.decorators import permission_required
from authentik.core.api.utils import PassiveSerializer
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
from authentik.rbac.permissions import HasPermission
LOGGER = get_logger()
@ -64,7 +63,7 @@ class TaskSerializer(PassiveSerializer):
class TaskViewSet(ViewSet):
"""Read-only view set that returns all background tasks"""
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
permission_classes = [IsAdminUser]
serializer_class = TaskSerializer
@extend_schema(
@ -94,7 +93,6 @@ class TaskViewSet(ViewSet):
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
return Response(TaskSerializer(tasks, many=True).data)
@permission_required(None, ["authentik_rbac.run_system_tasks"])
@extend_schema(
request=OpenApiTypes.NONE,
responses={

View File

@ -2,18 +2,18 @@
from django.conf import settings
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.fields import IntegerField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.rbac.permissions import HasPermission
from authentik.root.celery import CELERY_APP
class WorkerView(APIView):
"""Get currently connected worker count."""
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
permission_classes = [IsAdminUser]
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
def get(self, request: Request) -> Response:

View File

@ -7,9 +7,9 @@ from rest_framework.authentication import get_authorization_header
from rest_framework.filters import BaseFilterBackend
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
from rest_framework_guardian.filters import ObjectPermissionsFilter
from authentik.api.authentication import validate_auth
from authentik.rbac.filters import ObjectFilter
class OwnerFilter(BaseFilterBackend):
@ -26,14 +26,14 @@ class OwnerFilter(BaseFilterBackend):
class SecretKeyFilter(DjangoFilterBackend):
"""Allow access to all objects when authenticated with secret key as token.
Replaces both DjangoFilterBackend and ObjectFilter"""
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
auth_header = get_authorization_header(request)
token = validate_auth(auth_header)
if token and token == settings.SECRET_KEY:
return queryset
queryset = ObjectFilter().filter_queryset(request, queryset, view)
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
return super().filter_queryset(request, queryset, view)

View File

@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
LOGGER = get_logger()
def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None):
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
"""Check permissions for a single custom action"""
def wrapper_outter(func: Callable):
@ -18,17 +18,15 @@ def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[l
@wraps(func)
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
if obj_perm:
if perm:
obj = self.get_object()
if not request.user.has_perm(obj_perm, obj):
LOGGER.debug(
"denying access for object", user=request.user, perm=obj_perm, obj=obj
)
if not request.user.has_perm(perm, obj):
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
return self.permission_denied(request)
if global_perms:
for other_perm in global_perms:
if other_perms:
for other_perm in other_perms:
if not request.user.has_perm(other_perm):
LOGGER.debug("denying access for other", user=request.user, perm=other_perm)
LOGGER.debug("denying access for other", user=request.user, perm=perm)
return self.permission_denied(request)
return func(self, request, *args, **kwargs)

View File

@ -77,10 +77,3 @@ class Pagination(pagination.PageNumberPagination):
},
"required": ["pagination", "results"],
}
class SmallerPagination(Pagination):
"""Smaller pagination for objects which might require a lot of queries
to retrieve all data for."""
max_page_size = 10

View File

@ -16,7 +16,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
def tester(self: TestModelViewSets):
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
filterset_class = getattr(test_viewset, "filterset_class", None)
if not filterset_class:
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))

View File

@ -21,9 +21,7 @@ _other_urls = []
for _authentik_app in get_apps():
try:
api_urls = import_module(f"{_authentik_app.name}.urls")
except ModuleNotFoundError:
continue
except ImportError as exc:
except (ModuleNotFoundError, ImportError) as exc:
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
continue
if not hasattr(api_urls, "api_urlpatterns"):

View File

@ -4,6 +4,7 @@ from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField, JSONField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer
@ -86,11 +87,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
permission_classes = [IsAdminUser]
serializer_class = BlueprintInstanceSerializer
queryset = BlueprintInstance.objects.all()
search_fields = ["name", "path"]
filterset_fields = ["name", "path"]
ordering = ["name"]
@extend_schema(
responses={

View File

@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
meth()
self._logger.debug("Successfully reconciled", name=name)
except (DatabaseError, ProgrammingError, InternalError) as exc:
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
class AuthentikBlueprintsConfig(ManagedAppConfig):

View File

@ -6,7 +6,6 @@ from django.test import TestCase
from authentik.blueprints.v1.importer import is_model_allowed
from authentik.lib.models import SerializerModel
from authentik.providers.oauth2.models import RefreshToken
class TestModels(TestCase):
@ -22,9 +21,6 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
model_class = test_model()
self.assertTrue(isinstance(model_class, SerializerModel))
self.assertIsNotNone(model_class.serializer)
if model_class.serializer.Meta().model == RefreshToken:
return
self.assertEqual(model_class.serializer.Meta().model, test_model)
return tester

View File

@ -584,17 +584,12 @@ class EntryInvalidError(SentryIgnoredException):
entry_model: Optional[str]
entry_id: Optional[str]
validation_error: Optional[ValidationError]
serializer: Optional[Serializer] = None
def __init__(
self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs
) -> None:
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
super().__init__(*args)
self.entry_model = None
self.entry_id = None
self.validation_error = validation_error
for key, value in kwargs.items():
setattr(self, key, value)
@staticmethod
def from_entry(

View File

@ -35,28 +35,25 @@ from authentik.core.models import (
Source,
UserSourceConnection,
)
from authentik.enterprise.models import LicenseUsage
from authentik.events.utils import cleanse_dict
from authentik.flows.models import FlowToken, Stage
from authentik.lib.models import SerializerModel
from authentik.lib.sentry import SentryIgnoredException
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
from authentik.providers.scim.models import SCIMGroup, SCIMUser
# Context set when the serializer is created in a blueprint context
# Update website/developer-docs/blueprints/v1/models.md when used
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
def excluded_models() -> list[type[Model]]:
"""Return a list of all excluded models that shouldn't be exposed via API
or other means (internal only, base classes, non-used objects, etc)"""
def is_model_allowed(model: type[Model]) -> bool:
"""Check if model is allowed"""
# pylint: disable=imported-auth-user
from django.contrib.auth.models import Group as DjangoGroup
from django.contrib.auth.models import User as DjangoUser
return (
excluded_models = (
DjangoUser,
DjangoGroup,
# Base classes
@ -72,15 +69,8 @@ def excluded_models() -> list[type[Model]]:
AuthenticatedSession,
# Classes which are only internally managed
FlowToken,
LicenseUsage,
SCIMGroup,
SCIMUser,
)
def is_model_allowed(model: type[Model]) -> bool:
"""Check if model is allowed"""
return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel))
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
class DoRollback(SentryIgnoredException):
@ -255,10 +245,7 @@ class Importer:
try:
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
except ValueError as exc:
raise EntryInvalidError.from_entry(
exc,
entry,
) from exc
raise EntryInvalidError.from_entry(exc, entry) from exc
always_merger.merge(full_data, updated_identifiers)
serializer_kwargs["data"] = full_data
@ -275,7 +262,6 @@ class Importer:
f"Serializer errors {serializer.errors}",
validation_error=exc,
entry=entry,
serializer=serializer,
) from exc
return serializer
@ -304,18 +290,16 @@ class Importer:
)
return False
# Validate each single entry
serializer = None
try:
serializer = self._validate_single(entry)
except EntryInvalidError as exc:
# For deleting objects we don't need the serializer to be valid
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
serializer = exc.serializer
else:
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
if raise_errors:
raise exc
return False
continue
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
if raise_errors:
raise exc
return False
if not serializer:
continue

View File

@ -75,14 +75,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
return
if event.is_directory:
return
root = Path(CONFIG.get("blueprints_dir")).absolute()
path = Path(event.src_path).absolute()
rel_path = str(path.relative_to(root))
if isinstance(event, FileCreatedEvent):
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
blueprints_discovery.delay(rel_path)
LOGGER.debug("new blueprint file created, starting discovery")
blueprints_discovery.delay()
if isinstance(event, FileModifiedEvent):
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
path = Path(event.src_path)
root = Path(CONFIG.get("blueprints_dir")).absolute()
rel_path = str(path.relative_to(root))
for instance in BlueprintInstance.objects.filter(path=rel_path):
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
apply_blueprint.delay(instance.pk.hex)
@ -98,32 +98,39 @@ def blueprints_find_dict():
return blueprints
def blueprints_find() -> list[BlueprintFile]:
def blueprints_find():
"""Find blueprints and return valid ones"""
blueprints = []
root = Path(CONFIG.get("blueprints_dir"))
for path in root.rglob("**/*.yaml"):
rel_path = path.relative_to(root)
# Check if any part in the path starts with a dot and assume a hidden file
if any(part for part in path.parts if part.startswith(".")):
continue
LOGGER.debug("found blueprint", path=str(path))
with open(path, "r", encoding="utf-8") as blueprint_file:
try:
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
except YAMLError as exc:
raw_blueprint = None
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
if not raw_blueprint:
continue
metadata = raw_blueprint.get("metadata", None)
version = raw_blueprint.get("version", 1)
if version != 1:
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
continue
file_hash = sha512(path.read_bytes()).hexdigest()
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
blueprint = BlueprintFile(
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
)
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
blueprints.append(blueprint)
LOGGER.debug(
"parsed & loaded blueprint",
hash=file_hash,
path=str(path),
)
return blueprints
@ -131,12 +138,10 @@ def blueprints_find() -> list[BlueprintFile]:
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
)
@prefill_task
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
def blueprints_discovery(self: MonitoredTask):
"""Find blueprints and check if they need to be created in the database"""
count = 0
for blueprint in blueprints_find():
if path and blueprint.path != path:
continue
check_blueprint_v1_file(blueprint)
count += 1
self.set_status(
@ -166,11 +171,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
metadata={},
)
instance.save()
LOGGER.info(
"Creating new blueprint instance from file", instance=instance, path=instance.path
)
if instance.last_applied_hash != blueprint.hash:
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
apply_blueprint.delay(str(instance.pk))

View File

@ -17,6 +17,7 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from structlog.stdlib import get_logger
from structlog.testing import capture_logs
@ -37,7 +38,6 @@ from authentik.lib.utils.file import (
from authentik.policies.api.exec import PolicyTestResultSerializer
from authentik.policies.engine import PolicyEngine
from authentik.policies.types import PolicyResult
from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()
@ -98,7 +98,6 @@ class ApplicationSerializer(ModelSerializer):
class ApplicationViewSet(UsedByMixin, ModelViewSet):
"""Application Viewset"""
# pylint: disable=no-member
queryset = Application.objects.all().prefetch_related("provider")
serializer_class = ApplicationSerializer
search_fields = [
@ -123,7 +122,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
for backend in list(self.filter_backends):
if backend == ObjectFilter:
if backend == ObjectPermissionsFilter:
continue
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset

View File

@ -2,6 +2,7 @@
from json import loads
from typing import Optional
from django.db.models.query import QuerySet
from django.http import Http404
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
@ -13,12 +14,12 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from authentik.api.decorators import permission_required
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer, is_dict
from authentik.core.models import Group, User
from authentik.rbac.api.roles import RoleSerializer
class GroupMemberSerializer(ModelSerializer):
@ -48,12 +49,6 @@ class GroupSerializer(ModelSerializer):
users_obj = ListSerializer(
child=GroupMemberSerializer(), read_only=True, source="users", required=False
)
roles_obj = ListSerializer(
child=RoleSerializer(),
read_only=True,
source="roles",
required=False,
)
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
num_pk = IntegerField(read_only=True)
@ -76,10 +71,8 @@ class GroupSerializer(ModelSerializer):
"parent",
"parent_name",
"users",
"users_obj",
"attributes",
"roles",
"roles_obj",
"users_obj",
]
extra_kwargs = {
"users": {
@ -139,13 +132,25 @@ class UserAccountSerializer(PassiveSerializer):
class GroupViewSet(UsedByMixin, ModelViewSet):
"""Group Viewset"""
# pylint: disable=no-member
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
serializer_class = GroupSerializer
search_fields = ["name", "is_superuser"]
filterset_class = GroupFilter
ordering = ["name"]
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
for backend in list(self.filter_backends):
if backend == ObjectPermissionsFilter:
continue
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
def filter_queryset(self, queryset):
if self.request.user.has_perm("authentik_core.view_group"):
return self._filter_queryset_for_list(queryset)
return super().filter_queryset(queryset)
@permission_required(None, ["authentik_core.add_user"])
@extend_schema(
request=UserAccountSerializer,

View File

@ -38,7 +38,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
managed = ReadOnlyField()
component = SerializerMethodField()
icon = ReadOnlyField(source="icon_url")
icon = ReadOnlyField(source="get_icon")
def get_component(self, obj: Source) -> str:
"""Get object component so that we know how to edit the object"""

View File

@ -119,7 +119,6 @@ class TransactionApplicationResponseSerializer(PassiveSerializer):
class TransactionalApplicationView(APIView):
"""Create provider and application and attach them in a single transaction"""
# TODO: Migrate to a more specific permission
permission_classes = [IsAdminUser]
@extend_schema(

View File

@ -73,11 +73,6 @@ class UsedByMixin:
# but so we only apply them once, have a simple flag for the first object
first_object = True
# TODO: This will only return the used-by references that the user can see
# Either we have to leak model information here to not make the list
# useless if the user doesn't have all permissions, or we need to double
# query and check if there is a difference between modes the user can see
# and can't see and add a warning
for obj in get_objects_for_user(
request.user, f"{app}.view_{model_name}", manager
).all():

View File

@ -7,6 +7,7 @@ from django.contrib.auth import update_session_auth_hash
from django.contrib.sessions.backends.cache import KEY_PREFIX
from django.core.cache import cache
from django.db.models.functions import ExtractHour
from django.db.models.query import QuerySet
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.urls import reverse_lazy
@ -51,6 +52,7 @@ from rest_framework.serializers import (
)
from rest_framework.validators import UniqueValidator
from rest_framework.viewsets import ModelViewSet
from rest_framework_guardian.filters import ObjectPermissionsFilter
from structlog.stdlib import get_logger
from authentik.admin.api.metrics import CoordinateSerializer
@ -171,11 +173,6 @@ class UserSerializer(ModelSerializer):
raise ValidationError("Setting a user to internal service account is not allowed.")
return user_type
def validate(self, attrs: dict) -> dict:
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
raise ValidationError("Can't modify internal service account users")
return super().validate(attrs)
class Meta:
model = User
fields = [
@ -193,7 +190,6 @@ class UserSerializer(ModelSerializer):
"uid",
"path",
"type",
"uuid",
]
extra_kwargs = {
"name": {"allow_blank": True},
@ -208,7 +204,6 @@ class UserSelfSerializer(ModelSerializer):
groups = SerializerMethodField()
uid = CharField(read_only=True)
settings = SerializerMethodField()
system_permissions = SerializerMethodField()
@extend_schema_field(
ListSerializer(
@ -230,14 +225,6 @@ class UserSelfSerializer(ModelSerializer):
"""Get user settings with tenant and group settings applied"""
return user.group_attributes(self._context["request"]).get("settings", {})
def get_system_permissions(self, user: User) -> list[str]:
"""Get all system permissions assigned to the user"""
return list(
user.user_permissions.filter(
content_type__app_label="authentik_rbac", content_type__model="systempermission"
).values_list("codename", flat=True)
)
class Meta:
model = User
fields = [
@ -252,7 +239,6 @@ class UserSelfSerializer(ModelSerializer):
"uid",
"settings",
"type",
"system_permissions",
]
extra_kwargs = {
"is_active": {"read_only": True},
@ -630,10 +616,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
if not request.user.has_perm("impersonate"):
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
return Response(status=401)
user_to_be = self.get_object()
if user_to_be.pk == self.request.user.pk:
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
return Response(status=401)
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
@ -667,6 +651,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
return Response(status=204)
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
for backend in list(self.filter_backends):
if backend == ObjectPermissionsFilter:
continue
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
def filter_queryset(self, queryset):
if self.request.user.has_perm("authentik_core.view_user"):
return self._filter_queryset_for_list(queryset)
return super().filter_queryset(queryset)
@extend_schema(
responses={
200: inline_serializer(

View File

@ -44,7 +44,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
if request:
req.http_request = request
self._context["request"] = req
req.context.update(**kwargs)
self._context.update(**kwargs)
self.dry_run = dry_run

View File

@ -17,15 +17,9 @@ class Command(BaseCommand):
"""Run worker"""
def add_arguments(self, parser):
parser.add_argument(
"-b",
"--beat",
action="store_false",
help="When set, this worker will _not_ run Beat (scheduled) tasks",
)
parser.add_argument("-b", "--beat", action="store_true")
def handle(self, **options):
LOGGER.debug("Celery options", **options)
close_old_connections()
if CONFIG.get_bool("remote_debug"):
import debugpy
@ -35,7 +29,7 @@ class Command(BaseCommand):
no_color=False,
quiet=True,
optimization="fair",
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
autoscale=(3, 1),
task_events=True,
beat=options.get("beat", True),
schedule_filename=f"{tempdir}/celerybeat-schedule",

View File

@ -1,45 +0,0 @@
# Generated by Django 4.2.6 on 2023-10-11 13:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0031_alter_user_type"),
("authentik_rbac", "0001_initial"),
]
operations = [
migrations.AlterModelOptions(
name="group",
options={"verbose_name": "Group", "verbose_name_plural": "Groups"},
),
migrations.AlterModelOptions(
name="token",
options={
"permissions": [("view_token_key", "View token's key")],
"verbose_name": "Token",
"verbose_name_plural": "Tokens",
},
),
migrations.AlterModelOptions(
name="user",
options={
"permissions": [
("reset_user_password", "Reset Password"),
("impersonate", "Can impersonate other users"),
("assign_user_permissions", "Can assign permissions to users"),
("unassign_user_permissions", "Can unassign permissions from users"),
],
"verbose_name": "User",
"verbose_name_plural": "Users",
},
),
migrations.AddField(
model_name="group",
name="roles",
field=models.ManyToManyField(
blank=True, related_name="ak_groups", to="authentik_rbac.role"
),
),
]

View File

@ -1,7 +1,7 @@
"""authentik core models"""
from datetime import timedelta
from hashlib import sha256
from typing import Any, Optional, Self
from typing import Any, Optional
from uuid import uuid4
from deepmerge import always_merger
@ -88,8 +88,6 @@ class Group(SerializerModel):
default=False, help_text=_("Users added to this group will be superusers.")
)
roles = models.ManyToManyField("authentik_rbac.Role", related_name="ak_groups", blank=True)
parent = models.ForeignKey(
"Group",
blank=True,
@ -117,38 +115,6 @@ class Group(SerializerModel):
"""Recursively check if `user` is member of us, or any parent."""
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
"""Recursively get all groups that have this as parent or are indirectly related"""
direct_groups = []
if isinstance(self, QuerySet):
direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator())
else:
direct_groups = [self.pk]
if len(direct_groups) < 1:
return Group.objects.none()
query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = ANY(%s)
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth + 1
FROM authentik_core_group, parents
WHERE (
authentik_core_group.group_uuid = parents.parent_id and
parents.relative_depth < 20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid, name
ORDER BY name;
"""
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
return Group.objects.filter(pk__in=group_pks)
def __str__(self):
return f"Group {self.name}"
@ -159,8 +125,6 @@ class Group(SerializerModel):
"parent",
),
)
verbose_name = _("Group")
verbose_name_plural = _("Groups")
class UserManager(DjangoUserManager):
@ -196,7 +160,33 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
"""Recursively get all groups this user is a member of.
At least one query is done to get the direct groups of the user, with groups
there are at most 3 queries done"""
return Group.children_recursive(self.ak_groups.all())
direct_groups = list(
x for x in self.ak_groups.all().values_list("pk", flat=True).iterator()
)
if len(direct_groups) < 1:
return Group.objects.none()
query = """
WITH RECURSIVE parents AS (
SELECT authentik_core_group.*, 0 AS relative_depth
FROM authentik_core_group
WHERE authentik_core_group.group_uuid = ANY(%s)
UNION ALL
SELECT authentik_core_group.*, parents.relative_depth + 1
FROM authentik_core_group, parents
WHERE (
authentik_core_group.group_uuid = parents.parent_id and
parents.relative_depth < 20
)
)
SELECT group_uuid
FROM parents
GROUP BY group_uuid, name
ORDER BY name;
"""
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
return Group.objects.filter(pk__in=group_pks)
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
"""Get a dictionary containing the attributes from all groups the user belongs to,
@ -271,14 +261,12 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
return get_avatar(self)
class Meta:
permissions = (
("reset_user_password", "Reset Password"),
("impersonate", "Can impersonate other users"),
)
verbose_name = _("User")
verbose_name_plural = _("Users")
permissions = [
("reset_user_password", _("Reset Password")),
("impersonate", _("Can impersonate other users")),
("assign_user_permissions", _("Can assign permissions to users")),
("unassign_user_permissions", _("Can unassign permissions from users")),
]
class Provider(SerializerModel):
@ -687,7 +675,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
models.Index(fields=["identifier"]),
models.Index(fields=["key"]),
]
permissions = [("view_token_key", _("View token's key"))]
permissions = (("view_token_key", "View token's key"),)
class PropertyMapping(SerializerModel, ManagedModel):

View File

@ -7,7 +7,6 @@ from django.db.models import Model
from django.db.models.signals import post_save, pre_delete, pre_save
from django.dispatch import receiver
from django.http.request import HttpRequest
from structlog.stdlib import get_logger
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
@ -16,8 +15,6 @@ password_changed = Signal()
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
login_failed = Signal()
LOGGER = get_logger()
@receiver(post_save, sender=Application)
def post_save_application(sender: type[Model], instance, created: bool, **_):

View File

@ -97,7 +97,6 @@ class SourceFlowManager:
if self.request.user.is_authenticated:
new_connection.user = self.request.user
new_connection = self.update_connection(new_connection, **kwargs)
# pylint: disable=no-member
new_connection.save()
return Action.LINK, new_connection

View File

@ -13,6 +13,7 @@
{% block head_before %}
{% endblock %}
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>

View File

@ -16,8 +16,8 @@ You've logged out of {{ application }}.
{% block card %}
<form method="POST" class="pf-c-form">
<p>
{% blocktrans with application=application.name branding_title=tenant.branding_title %}
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
{% blocktrans with application=application.name %}
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your authentik account.
{% endblocktrans %}
</p>

View File

@ -6,7 +6,6 @@
{% block head_before %}
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
{% include "base/header_js.html" %}
{% endblock %}

View File

@ -6,7 +6,6 @@ from rest_framework.test import APITestCase
from authentik.core.models import User
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.config import CONFIG
class TestImpersonation(APITestCase):
@ -47,42 +46,12 @@ class TestImpersonation(APITestCase):
"""test impersonation without permissions"""
self.client.force_login(self.other_user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 403)
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}))
response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode())
self.assertEqual(response_body["user"]["username"], self.other_user.username)
@CONFIG.patch("impersonation", False)
def test_impersonate_disabled(self):
"""test impersonation that is disabled"""
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
)
self.assertEqual(response.status_code, 401)
response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode())
self.assertEqual(response_body["user"]["username"], self.user.username)
def test_impersonate_self(self):
"""test impersonation that user can't impersonate themselves"""
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 401)
response = self.client.get(reverse("authentik_api:user-me"))
response_body = loads(response.content.decode())
self.assertEqual(response_body["user"]["username"], self.user.username)
def test_un_impersonate_empty(self):
"""test un-impersonation without impersonating first"""
self.client.force_login(self.other_user)

View File

@ -21,9 +21,10 @@ def create_test_flow(
)
def create_test_user(name: Optional[str] = None, **kwargs) -> User:
"""Generate a test user"""
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
"""Generate a test-admin user"""
uid = generate_id(20) if not name else name
group = Group.objects.create(name=uid, is_superuser=True)
kwargs.setdefault("email", f"{uid}@goauthentik.io")
kwargs.setdefault("username", uid)
user: User = User.objects.create(
@ -32,13 +33,6 @@ def create_test_user(name: Optional[str] = None, **kwargs) -> User:
)
user.set_password(uid)
user.save()
return user
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
"""Generate a test-admin user"""
user = create_test_user(name, **kwargs)
group = Group.objects.create(name=user.name or name, is_superuser=True)
group.users.add(user)
return user

View File

@ -1,10 +1,13 @@
"""authentik crypto app config"""
from datetime import datetime
from typing import Optional
from typing import TYPE_CHECKING, Optional
from authentik.blueprints.apps import ManagedAppConfig
from authentik.lib.generators import generate_id
if TYPE_CHECKING:
from authentik.crypto.models import CertificateKeyPair
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
@ -20,37 +23,33 @@ class AuthentikCryptoConfig(ManagedAppConfig):
"""Load crypto tasks"""
self.import_module("authentik.crypto.tasks")
def _create_update_cert(self):
def _create_update_cert(self, cert: Optional["CertificateKeyPair"] = None):
from authentik.crypto.builder import CertificateBuilder
from authentik.crypto.models import CertificateKeyPair
common_name = "authentik Internal JWT Certificate"
builder = CertificateBuilder(common_name)
builder = CertificateBuilder("authentik Internal JWT Certificate")
builder.build(
subject_alt_names=["goauthentik.io"],
validity_days=360,
)
CertificateKeyPair.objects.update_or_create(
managed=MANAGED_KEY,
defaults={
"name": common_name,
"certificate_data": builder.certificate,
"key_data": builder.private_key,
},
)
if not cert:
cert = CertificateKeyPair()
builder.cert = cert
builder.cert.managed = MANAGED_KEY
builder.save()
def reconcile_managed_jwt_cert(self):
"""Ensure managed JWT certificate"""
from authentik.crypto.models import CertificateKeyPair
cert: Optional[CertificateKeyPair] = CertificateKeyPair.objects.filter(
managed=MANAGED_KEY
).first()
now = datetime.now()
if not cert or (
now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after
):
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
if not certs.exists():
self._create_update_cert()
return
cert: CertificateKeyPair = certs.first()
now = datetime.now()
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
self._create_update_cert(cert)
def reconcile_self_signed(self):
"""Create self-signed keypair"""
@ -62,10 +61,4 @@ class AuthentikCryptoConfig(ManagedAppConfig):
return
builder = CertificateBuilder(name)
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
CertificateKeyPair.objects.get_or_create(
name=name,
defaults={
"certificate_data": builder.certificate,
"key_data": builder.private_key,
},
)
builder.save()

View File

@ -6,7 +6,7 @@ from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
from rest_framework.permissions import IsAuthenticated
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ModelSerializer
@ -84,7 +84,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
},
)
@action(detail=False, methods=["GET"])
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
def get_install_id(self, request: Request) -> Response:
"""Get install_id"""
return Response(

View File

@ -33,8 +33,4 @@ class Migration(migrations.Migration):
"verbose_name_plural": "License Usage Records",
},
),
migrations.AlterModelOptions(
name="license",
options={"verbose_name": "License", "verbose_name_plural": "Licenses"},
),
]

View File

@ -19,10 +19,8 @@ from django.utils.translation import gettext as _
from guardian.shortcuts import get_anonymous_user
from jwt import PyJWTError, decode, get_unverified_header
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import BaseSerializer
from authentik.core.models import ExpiringModel, User, UserTypes
from authentik.lib.models import SerializerModel
from authentik.root.install_id import get_install_id
@ -136,9 +134,6 @@ class LicenseKey:
def record_usage(self):
"""Capture the current validity status and metrics and save them"""
threshold = now() - timedelta(hours=8)
if LicenseUsage.objects.filter(record_date__gte=threshold).exists():
return
LicenseUsage.objects.create(
user_count=self.get_default_user_count(),
external_user_count=self.get_external_user_count(),
@ -156,7 +151,7 @@ class LicenseKey:
return usage.record_date
class License(SerializerModel):
class License(models.Model):
"""An authentik enterprise license"""
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
@ -167,12 +162,6 @@ class License(SerializerModel):
internal_users = models.BigIntegerField()
external_users = models.BigIntegerField()
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.api import LicenseSerializer
return LicenseSerializer
@property
def status(self) -> LicenseKey:
"""Get parsed license status"""
@ -180,8 +169,6 @@ class License(SerializerModel):
class Meta:
indexes = (HashIndex(fields=("key",)),)
verbose_name = _("License")
verbose_name_plural = _("Licenses")
def usage_expiry():

View File

@ -6,7 +6,7 @@ from authentik.lib.utils.time import fqdn_rand
CELERY_BEAT_SCHEDULE = {
"enterprise_calculate_license": {
"task": "authentik.enterprise.tasks.calculate_license",
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/2"),
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/8"),
"options": {"queue": "authentik_scheduled"},
}
}

View File

@ -6,4 +6,5 @@ from authentik.root.celery import CELERY_APP
@CELERY_APP.task()
def calculate_license():
"""Calculate licensing status"""
LicenseKey.get_total().record_usage()
total = LicenseKey.get_total()
total.record_usage()

View File

@ -27,7 +27,6 @@ from authentik.lib.sentry import before_send
from authentik.lib.utils.errors import exception_to_string
from authentik.outposts.models import OutpostServiceConnection
from authentik.policies.models import Policy, PolicyBindingModel
from authentik.policies.reputation.models import Reputation
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
from authentik.providers.scim.models import SCIMGroup, SCIMUser
from authentik.stages.authenticator_static.models import StaticToken
@ -53,13 +52,11 @@ IGNORED_MODELS = (
RefreshToken,
SCIMUser,
SCIMGroup,
Reputation,
)
def should_log_model(model: Model) -> bool:
"""Return true if operation on `model` should be logged"""
# Check for silk by string so this comparison doesn't fail when silk isn't installed
if model.__module__.startswith("silk"):
return False
return model.__class__ not in IGNORED_MODELS
@ -96,30 +93,21 @@ class AuditMiddleware:
of models"""
get_response: Callable[[HttpRequest], HttpResponse]
anonymous_user: User = None
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
self.get_response = get_response
def _ensure_fallback_user(self):
"""Defer fetching anonymous user until we have to"""
if self.anonymous_user:
return
from guardian.shortcuts import get_anonymous_user
self.anonymous_user = get_anonymous_user()
def connect(self, request: HttpRequest):
"""Connect signal for automatic logging"""
self._ensure_fallback_user()
user = getattr(request, "user", self.anonymous_user)
if not user.is_authenticated:
user = self.anonymous_user
if not hasattr(request, "user"):
return
if not getattr(request.user, "is_authenticated", False):
return
if not hasattr(request, "request_id"):
return
post_save_handler = partial(self.post_save_handler, user=user, request=request)
pre_delete_handler = partial(self.pre_delete_handler, user=user, request=request)
m2m_changed_handler = partial(self.m2m_changed_handler, user=user, request=request)
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
m2m_changed_handler = partial(self.m2m_changed_handler, user=request.user, request=request)
post_save.connect(
post_save_handler,
dispatch_uid=request.request_id,

View File

@ -217,7 +217,6 @@ class Event(SerializerModel, ExpiringModel):
"path": request.path,
"method": request.method,
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
}
# Special case for events created during flow execution
# since they keep the http query within a wrapped query
@ -437,39 +436,32 @@ class NotificationTransport(SerializerModel):
def send_email(self, notification: "Notification") -> list[str]:
"""Send notification via global email configuration"""
subject_prefix = "authentik Notification: "
context = {
"key_value": {
"user_email": notification.user.email,
"user_username": notification.user.username,
},
"body": notification.body,
"title": "",
subject = "authentik Notification: "
key_value = {
"user_email": notification.user.email,
"user_username": notification.user.username,
}
if notification.event and notification.event.user:
context["key_value"]["event_user_email"] = notification.event.user.get("email", None)
context["key_value"]["event_user_username"] = notification.event.user.get(
"username", None
)
key_value["event_user_email"] = notification.event.user.get("email", None)
key_value["event_user_username"] = notification.event.user.get("username", None)
if notification.event:
context["title"] += notification.event.action
subject += notification.event.action
for key, value in notification.event.context.items():
if not isinstance(value, str):
continue
context["key_value"][key] = value
key_value[key] = value
else:
context["title"] += notification.body[:75]
# TODO: improve permission check
if notification.user.is_superuser:
context["source"] = {
"from": self.name,
}
subject += notification.body[:75]
mail = TemplateEmailMessage(
subject=subject_prefix + context["title"],
subject=subject,
to=[notification.user.email],
language=notification.user.locale(),
template_name="email/event_notification.html",
template_context=context,
template_name="email/generic.html",
template_context={
"title": subject,
"body": notification.body,
"key_value": key_value,
},
)
# Email is sent directly here, as the call to send() should have been from a task.
try:

View File

@ -206,8 +206,8 @@ def prefill_task(func):
task_call_module=func.__module__,
task_call_func=func.__name__,
# We don't have real values for these attributes but they cannot be null
start_timestamp=0,
finish_timestamp=0,
start_timestamp=default_timer(),
finish_timestamp=default_timer(),
finish_time=datetime.now(),
).save(86400)
LOGGER.debug("prefilled task", task_name=func.__name__)

View File

@ -13,7 +13,6 @@ from authentik.events.tasks import event_notification_handler, gdpr_cleanup
from authentik.flows.models import Stage
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.config import CONFIG
from authentik.stages.invitation.models import Invitation
from authentik.stages.invitation.signals import invitation_used
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
@ -93,5 +92,4 @@ def event_post_save_notification(sender, instance: Event, **_):
@receiver(pre_delete, sender=User)
def event_user_pre_delete_cleanup(sender, instance: User, **_):
"""If gdpr_compliance is enabled, remove all the user's events"""
if CONFIG.get_bool("gdpr_compliance", True):
gdpr_cleanup.delay(instance.pk)
gdpr_cleanup.delay(instance.pk)

View File

@ -53,15 +53,7 @@ class TestEvents(TestCase):
"""Test plain from_http"""
event = Event.new("unittest").from_http(self.factory.get("/"))
self.assertEqual(
event.context,
{
"http_request": {
"args": {},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
)
def test_from_http_clean_querystring(self):
@ -75,7 +67,6 @@ class TestEvents(TestCase):
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
)
@ -92,7 +83,6 @@ class TestEvents(TestCase):
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
"method": "GET",
"path": "/",
"user_agent": "",
}
},
)

View File

@ -2,20 +2,17 @@
import re
from copy import copy
from dataclasses import asdict, is_dataclass
from datetime import date, datetime, time, timedelta
from enum import Enum
from pathlib import Path
from types import GeneratorType, NoneType
from types import GeneratorType
from typing import Any, Optional
from uuid import UUID
from django.contrib.auth.models import AnonymousUser
from django.core.handlers.wsgi import WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.db.models.base import Model
from django.http.request import HttpRequest
from django.utils import timezone
from django.views.debug import SafeExceptionReporterFilter
from geoip2.models import City
from guardian.utils import get_anonymous_user
@ -87,7 +84,7 @@ def get_user(user: User, original_user: Optional[User] = None) -> dict[str, Any]
return user_data
# pylint: disable=too-many-return-statements,too-many-branches
# pylint: disable=too-many-return-statements
def sanitize_item(value: Any) -> Any:
"""Sanitize a single item, ensure it is JSON parsable"""
if is_dataclass(value):
@ -137,37 +134,7 @@ def sanitize_item(value: Any) -> Any:
"type": value.__name__,
"module": value.__module__,
}
# See
# https://github.com/encode/django-rest-framework/blob/master/rest_framework/utils/encoders.py
# For Date Time string spec, see ECMA 262
# https://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
if isinstance(value, datetime):
representation = value.isoformat()
if representation.endswith("+00:00"):
representation = representation[:-6] + "Z"
return representation
if isinstance(value, date):
return value.isoformat()
if isinstance(value, time):
if timezone and timezone.is_aware(value):
raise ValueError("JSON can't represent timezone-aware times.")
return value.isoformat()
if isinstance(value, timedelta):
return str(value.total_seconds())
if callable(value):
return {
"type": "callable",
"name": value.__name__,
"module": value.__module__,
}
# List taken from the stdlib's JSON encoder (_make_iterencode, encoder.py:415)
if isinstance(value, (bool, int, float, NoneType, list, tuple, dict)):
return value
try:
return DjangoJSONEncoder().default(value)
except TypeError:
return str(value)
return str(value)
return value
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:

View File

@ -45,4 +45,3 @@ class FlowStageBindingViewSet(UsedByMixin, ModelViewSet):
serializer_class = FlowStageBindingSerializer
filterset_fields = "__all__"
search_fields = ["stage__name"]
ordering = ["order"]

View File

@ -8,11 +8,6 @@ GAUGE_FLOWS_CACHED = Gauge(
"authentik_flows_cached",
"Cached flows",
)
HIST_FLOW_EXECUTION_STAGE_TIME = Histogram(
"authentik_flows_execution_stage_time",
"Duration each stage took to execute.",
["stage_type", "method"],
)
HIST_FLOWS_PLAN_TIME = Histogram(
"authentik_flows_plan_time",
"Duration to build a plan for a flow",

View File

@ -132,6 +132,13 @@ class PermissionDict(TypedDict):
name: str
class PermissionSerializer(PassiveSerializer):
"""Permission used for consent"""
name = CharField(allow_blank=True)
id = CharField()
class ChallengeResponse(PassiveSerializer):
"""Base class for all challenge responses"""

View File

@ -1,25 +0,0 @@
# Generated by Django 4.2.6 on 2023-10-10 17:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
]
operations = [
migrations.AlterModelOptions(
name="flow",
options={
"permissions": [
("export_flow", "Can export a Flow"),
("inspect_flow", "Can inspect a Flow's execution"),
("view_flow_cache", "View Flow's cache metrics"),
("clear_flow_cache", "Clear Flow's cache metrics"),
],
"verbose_name": "Flow",
"verbose_name_plural": "Flows",
},
),
]

View File

@ -1,34 +0,0 @@
# Generated by Django 4.2.6 on 2023-10-28 14:24
from django.apps.registry import Apps
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
def set_oobe_flow_authentication(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from guardian.shortcuts import get_anonymous_user
Flow = apps.get_model("authentik_flows", "Flow")
User = apps.get_model("authentik_core", "User")
db_alias = schema_editor.connection.alias
users = User.objects.using(db_alias).exclude(username="akadmin")
try:
users = users.exclude(pk=get_anonymous_user().pk)
# pylint: disable=broad-except
except Exception: # nosec
pass
if users.exists():
Flow.objects.filter(slug="initial-setup").update(authentication="require_superuser")
class Migration(migrations.Migration):
dependencies = [
("authentik_flows", "0026_alter_flow_options"),
]
operations = [
migrations.RunPython(set_oobe_flow_authentication),
]

View File

@ -194,10 +194,9 @@ class Flow(SerializerModel, PolicyBindingModel):
verbose_name_plural = _("Flows")
permissions = [
("export_flow", _("Can export a Flow")),
("inspect_flow", _("Can inspect a Flow's execution")),
("view_flow_cache", _("View Flow's cache metrics")),
("clear_flow_cache", _("Clear Flow's cache metrics")),
("export_flow", "Can export a Flow"),
("view_flow_cache", "View Flow's cache metrics"),
("clear_flow_cache", "Clear Flow's cache metrics"),
]

View File

@ -167,11 +167,7 @@ class ChallengeStageView(StageView):
stage_type=self.__class__.__name__, method="get_challenge"
).time(),
):
try:
challenge = self.get_challenge(*args, **kwargs)
except StageInvalidException as exc:
self.logger.debug("Got StageInvalidException", exc=exc)
return self.executor.stage_invalid()
challenge = self.get_challenge(*args, **kwargs)
with Hub.current.start_span(
op="authentik.flow.stage._get_challenge",
description=self.__class__.__name__,

View File

@ -24,7 +24,6 @@ from structlog.stdlib import BoundLogger, get_logger
from authentik.core.models import Application
from authentik.events.models import Event, EventAction, cleanse_dict
from authentik.flows.apps import HIST_FLOW_EXECUTION_STAGE_TIME
from authentik.flows.challenge import (
Challenge,
ChallengeResponse,
@ -43,7 +42,6 @@ from authentik.flows.models import (
FlowDesignation,
FlowStageBinding,
FlowToken,
InvalidResponseAction,
Stage,
)
from authentik.flows.planner import (
@ -107,7 +105,7 @@ class FlowExecutorView(APIView):
flow: Flow
plan: Optional[FlowPlan] = None
current_binding: Optional[FlowStageBinding] = None
current_binding: FlowStageBinding
current_stage: Stage
current_stage_view: View
@ -267,21 +265,17 @@ class FlowExecutorView(APIView):
)
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Get the next pending challenge from the currently active flow."""
class_path = class_to_path(self.current_stage_view.__class__)
self._logger.debug(
"f(exec): Passing GET",
view_class=class_path,
view_class=class_to_path(self.current_stage_view.__class__),
stage=self.current_stage,
)
try:
with Hub.current.start_span(
op="authentik.flow.executor.stage",
description=class_path,
) as span, HIST_FLOW_EXECUTION_STAGE_TIME.labels(
method=request.method.upper(),
stage_type=class_path,
).time():
span.set_data("Method", request.method.upper())
description=class_to_path(self.current_stage_view.__class__),
) as span:
span.set_data("Method", "GET")
span.set_data("authentik Stage", self.current_stage_view)
span.set_data("authentik Flow", self.flow.slug)
stage_response = self.current_stage_view.dispatch(request)
@ -315,21 +309,17 @@ class FlowExecutorView(APIView):
)
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Solve the previously retrieved challenge and advanced to the next stage."""
class_path = class_to_path(self.current_stage_view.__class__)
self._logger.debug(
"f(exec): Passing POST",
view_class=class_path,
view_class=class_to_path(self.current_stage_view.__class__),
stage=self.current_stage,
)
try:
with Hub.current.start_span(
op="authentik.flow.executor.stage",
description=class_path,
) as span, HIST_FLOW_EXECUTION_STAGE_TIME.labels(
method=request.method.upper(),
stage_type=class_path,
).time():
span.set_data("Method", request.method.upper())
description=class_to_path(self.current_stage_view.__class__),
) as span:
span.set_data("Method", "POST")
span.set_data("authentik Stage", self.current_stage_view)
span.set_data("authentik Flow", self.flow.slug)
stage_response = self.current_stage_view.dispatch(request)
@ -421,19 +411,6 @@ class FlowExecutorView(APIView):
Optionally, an exception can be passed, which will be shown if the current user
is a superuser."""
self._logger.debug("f(exec): Stage invalid")
if self.current_binding and self.current_binding.invalid_response_action in [
InvalidResponseAction.RESTART,
InvalidResponseAction.RESTART_WITH_CONTEXT,
]:
keep_context = (
self.current_binding.invalid_response_action
== InvalidResponseAction.RESTART_WITH_CONTEXT
)
self._logger.debug(
"f(exec): Invalid response, restarting flow",
keep_context=keep_context,
)
return self.restart_flow(keep_context)
self.cancel()
challenge_view = AccessDeniedChallengeView(self, error_message)
challenge_view.request = self.request

View File

@ -3,7 +3,6 @@ from hashlib import sha256
from typing import Any
from django.conf import settings
from django.http import Http404
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.shortcuts import get_object_or_404
@ -12,6 +11,7 @@ from django.views.decorators.clickjacking import xframe_options_sameorigin
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.fields import BooleanField, ListField, SerializerMethodField
from rest_framework.permissions import IsAdminUser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
@ -68,19 +68,21 @@ class FlowInspectionSerializer(PassiveSerializer):
class FlowInspectorView(APIView):
"""Flow inspector API"""
permission_classes = [IsAdminUser]
flow: Flow
_logger: BoundLogger
permission_classes = []
def check_permissions(self, request):
"""Always allow access when in debug mode"""
if settings.DEBUG:
return None
return super().check_permissions(request)
def setup(self, request: HttpRequest, flow_slug: str):
super().setup(request, flow_slug=flow_slug)
self._logger = get_logger().bind(flow_slug=flow_slug)
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
if settings.DEBUG:
return
if request.user.has_perm("authentik_flow.inspect_flow", self.flow):
return
raise Http404
self._logger = get_logger().bind(flow_slug=flow_slug)
@extend_schema(
responses={

View File

@ -24,7 +24,7 @@ ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
"""Recursively walk through `root`, checking each part of `path` separated by `sep`.
"""Recursively walk through `root`, checking each part of `path` split by `sep`.
If at any point a dict does not exist, return default"""
for comp in path.split(sep):
if root and comp in root:
@ -34,19 +34,7 @@ def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
return root
def set_path_in_dict(root: dict, path: str, value: Any, sep="."):
"""Recursively walk through `root`, checking each part of `path` separated by `sep`
and setting the last value to `value`"""
# Walk each component of the path
path_parts = path.split(sep)
for comp in path_parts[:-1]:
if comp not in root:
root[comp] = {}
root = root.get(comp, {})
root[path_parts[-1]] = value
@dataclass(slots=True)
@dataclass
class Attr:
"""Single configuration attribute"""
@ -67,10 +55,6 @@ class Attr:
# to the config file containing this change or the file containing this value
source: Optional[str] = field(default=None)
def __post_init__(self):
if isinstance(self.value, Attr):
raise RuntimeError(f"config Attr with nested Attr for source {self.source}")
class AttrEncoder(JSONEncoder):
"""JSON encoder that can deal with `Attr` classes"""
@ -243,7 +227,15 @@ class ConfigLoader:
def set(self, path: str, value: Any, sep="."):
"""Set value using same syntax as get()"""
set_path_in_dict(self.raw, path, Attr(value), sep=sep)
# Walk sub_dicts before parsing path
root = self.raw
# Walk each component of the path
path_parts = path.split(sep)
for comp in path_parts[:-1]:
if comp not in root:
root[comp] = {}
root = root.get(comp, {})
root[path_parts[-1]] = Attr(value)
CONFIG = ConfigLoader()

View File

@ -111,6 +111,3 @@ web:
# No default here as it's set dynamically
# workers: 2
threads: 4
worker:
concurrency: 2

View File

@ -141,7 +141,7 @@ class BaseEvaluator:
"""Create event with supplied data and try to extract as much relevant data
from the context"""
context = self._context.copy()
# If the result was a complex variable, we don't want to reuse it
# If the result was a complex variable, we don't want to re-use it
context.pop("result", None)
context.pop("handler", None)
event_kwargs = context

View File

@ -1,32 +0,0 @@
"""Serializer validators"""
from typing import Optional
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import Serializer
from rest_framework.utils.representation import smart_repr
class RequiredTogetherValidator:
"""Serializer-level validator that ensures all fields in `fields` are only
used together"""
fields: list[str]
requires_context = True
message = _("The fields {field_names} must be used together.")
def __init__(self, fields: list[str], message: Optional[str] = None) -> None:
self.fields = fields
self.message = message or self.message
def __call__(self, attrs: dict, serializer: Serializer):
"""Check that if any of the fields in `self.fields` are set, all of them must be set"""
if any(field in attrs for field in self.fields) and not all(
field in attrs for field in self.fields
):
field_names = ", ".join(self.fields)
message = self.message.format(field_names=field_names)
raise ValidationError(message, code="required")
def __repr__(self):
return "<%s(fields=%s)>" % (self.__class__.__name__, smart_repr(self.fields))

View File

@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import PassiveSerializer, is_dict
from authentik.core.models import Provider
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import (
Outpost,
OutpostConfig,
@ -47,16 +47,6 @@ class OutpostSerializer(ModelSerializer):
source="service_connection", read_only=True
)
def validate_name(self, name: str) -> str:
"""Validate name (especially for embedded outpost)"""
if not self.instance:
return name
if self.instance.managed == MANAGED_OUTPOST and name != MANAGED_OUTPOST_NAME:
raise ValidationError("Embedded outpost's name cannot be changed")
if self.instance.name == MANAGED_OUTPOST_NAME:
self.instance.managed = MANAGED_OUTPOST
return name
def validate_providers(self, providers: list[Provider]) -> list[Provider]:
"""Check that all providers match the type of the outpost"""
type_map = {

View File

@ -15,7 +15,6 @@ GAUGE_OUTPOSTS_LAST_UPDATE = Gauge(
["outpost", "uid", "version"],
)
MANAGED_OUTPOST = "goauthentik.io/outposts/embedded"
MANAGED_OUTPOST_NAME = "authentik Embedded Outpost"
class AuthentikOutpostConfig(ManagedAppConfig):
@ -36,17 +35,14 @@ class AuthentikOutpostConfig(ManagedAppConfig):
DockerServiceConnection,
KubernetesServiceConnection,
Outpost,
OutpostConfig,
OutpostType,
)
if outpost := Outpost.objects.filter(name=MANAGED_OUTPOST_NAME, managed="").first():
outpost.managed = MANAGED_OUTPOST
outpost.save()
return
outpost, updated = Outpost.objects.update_or_create(
defaults={
"name": "authentik Embedded Outpost",
"type": OutpostType.PROXY,
"name": MANAGED_OUTPOST_NAME,
},
managed=MANAGED_OUTPOST,
)
@ -55,4 +51,10 @@ class AuthentikOutpostConfig(ManagedAppConfig):
outpost.service_connection = KubernetesServiceConnection.objects.first()
elif DockerServiceConnection.objects.exists():
outpost.service_connection = DockerServiceConnection.objects.first()
outpost.config = OutpostConfig(
kubernetes_disabled_components=[
"deployment",
"secret",
]
)
outpost.save()

View File

@ -4,7 +4,6 @@ from datetime import datetime
from enum import IntEnum
from typing import Any, Optional
from asgiref.sync import async_to_sync
from channels.exceptions import DenyConnection
from dacite.core import from_dict
from dacite.data import Data
@ -15,8 +14,6 @@ from authentik.core.channels import AuthJsonConsumer
from authentik.outposts.apps import GAUGE_OUTPOSTS_CONNECTED, GAUGE_OUTPOSTS_LAST_UPDATE
from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState
OUTPOST_GROUP = "group_outpost_%(outpost_pk)s"
class WebsocketMessageInstruction(IntEnum):
"""Commands which can be triggered over Websocket"""
@ -30,9 +27,6 @@ class WebsocketMessageInstruction(IntEnum):
# Message sent by us to trigger an Update
TRIGGER_UPDATE = 2
# Provider specific message
PROVIDER_SPECIFIC = 3
@dataclass(slots=True)
class WebsocketMessage:
@ -50,6 +44,8 @@ class OutpostConsumer(AuthJsonConsumer):
last_uid: Optional[str] = None
first_msg = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.logger = get_logger()
@ -72,26 +68,22 @@ class OutpostConsumer(AuthJsonConsumer):
raise DenyConnection()
self.outpost = outpost
self.last_uid = self.channel_name
async_to_sync(self.channel_layer.group_add)(
OUTPOST_GROUP % {"outpost_pk": str(self.outpost.pk)}, self.channel_name
)
GAUGE_OUTPOSTS_CONNECTED.labels(
outpost=self.outpost.name,
uid=self.last_uid,
expected=self.outpost.config.kubernetes_replicas,
).inc()
def disconnect(self, code):
if self.outpost:
async_to_sync(self.channel_layer.group_discard)(
OUTPOST_GROUP % {"outpost_pk": str(self.outpost.pk)}, self.channel_name
)
if self.outpost and self.last_uid:
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
if self.channel_name in state.channel_ids:
state.channel_ids.remove(self.channel_name)
state.save()
GAUGE_OUTPOSTS_CONNECTED.labels(
outpost=self.outpost.name,
uid=self.last_uid,
expected=self.outpost.config.kubernetes_replicas,
).dec()
self.logger.debug(
"removed outpost instance from cache",
instance_uuid=self.last_uid,
)
def receive_json(self, content: Data):
msg = from_dict(WebsocketMessage, content)
@ -102,13 +94,26 @@ class OutpostConsumer(AuthJsonConsumer):
raise DenyConnection()
state = OutpostState.for_instance_uid(self.outpost, uid)
if self.channel_name not in state.channel_ids:
state.channel_ids.append(self.channel_name)
state.last_seen = datetime.now()
state.hostname = msg.args.pop("hostname", "")
state.hostname = msg.args.get("hostname", "")
if not self.first_msg:
GAUGE_OUTPOSTS_CONNECTED.labels(
outpost=self.outpost.name,
uid=self.last_uid,
expected=self.outpost.config.kubernetes_replicas,
).inc()
self.logger.debug(
"added outpost instance to cache",
instance_uuid=self.last_uid,
)
self.first_msg = True
if msg.instruction == WebsocketMessageInstruction.HELLO:
state.version = msg.args.pop("version", None)
state.build_hash = msg.args.pop("buildHash", "")
state.args = msg.args
state.version = msg.args.get("version", None)
state.build_hash = msg.args.get("buildHash", "")
elif msg.instruction == WebsocketMessageInstruction.ACK:
return
GAUGE_OUTPOSTS_LAST_UPDATE.labels(
@ -126,14 +131,3 @@ class OutpostConsumer(AuthJsonConsumer):
self.send_json(
asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE))
)
def event_provider_specific(self, event):
"""Event handler which can be called by provider-specific
implementations to send specific messages to the outpost"""
self.send_json(
asdict(
WebsocketMessage(
instruction=WebsocketMessageInstruction.PROVIDER_SPECIFIC, args=event
)
)
)

View File

@ -43,10 +43,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
self.api = AppsV1Api(controller.client)
self.outpost = self.controller.outpost
@property
def noop(self) -> bool:
return self.is_embedded
@staticmethod
def reconciler_name() -> str:
return "deployment"

View File

@ -24,10 +24,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
super().__init__(controller)
self.api = CoreV1Api(controller.client)
@property
def noop(self) -> bool:
return self.is_embedded
@staticmethod
def reconciler_name() -> str:
return "secret"

View File

@ -77,10 +77,7 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
@property
def noop(self) -> bool:
if not self._crd_exists():
self.logger.debug("CRD doesn't exist")
return True
return self.is_embedded
return (not self._crd_exists()) or (self.is_embedded)
def _crd_exists(self) -> bool:
"""Check if the Prometheus ServiceMonitor exists"""

View File

@ -28,8 +28,4 @@ class Migration(migrations.Migration):
verbose_name="Managed by authentik",
),
),
migrations.AlterModelOptions(
name="outpost",
options={"verbose_name": "Outpost", "verbose_name_plural": "Outposts"},
),
]

View File

@ -344,22 +344,12 @@ class Outpost(SerializerModel, ManagedModel):
user_created = False
if not user:
user: User = User.objects.create(username=self.user_identifier)
user_created = True
attrs = {
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
"name": f"Outpost {self.name} Service-Account",
"path": USER_PATH_OUTPOSTS,
}
dirty = False
for key, value in attrs.items():
if getattr(user, key) != value:
dirty = True
setattr(user, key, value)
if user.has_usable_password():
user.set_unusable_password()
dirty = True
if dirty:
user.save()
user_created = True
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
user.name = f"Outpost {self.name} Service-Account"
user.path = USER_PATH_OUTPOSTS
user.save()
if user_created:
self.build_user_permissions(user)
return user
@ -390,7 +380,7 @@ class Outpost(SerializerModel, ManagedModel):
managed=managed,
)
except IntegrityError:
# Integrity error happens mostly when managed is reused
# Integrity error happens mostly when managed is re-used
Token.objects.filter(managed=managed).delete()
Token.objects.filter(identifier=self.token_identifier).delete()
return self.token
@ -415,22 +405,18 @@ class Outpost(SerializerModel, ManagedModel):
def __str__(self) -> str:
return f"Outpost {self.name}"
class Meta:
verbose_name = _("Outpost")
verbose_name_plural = _("Outposts")
@dataclass
class OutpostState:
"""Outpost instance state, last_seen and version"""
uid: str
channel_ids: list[str] = field(default_factory=list)
last_seen: Optional[datetime] = field(default=None)
version: Optional[str] = field(default=None)
version_should: Version = field(default=OUR_VERSION)
build_hash: str = field(default="")
hostname: str = field(default="")
args: dict = field(default_factory=dict)
_outpost: Optional[Outpost] = field(default=None)

View File

@ -5,6 +5,7 @@ from socket import gethostname
from typing import Any, Optional
from urllib.parse import urlparse
import yaml
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.core.cache import cache
@ -15,7 +16,6 @@ from docker.constants import DEFAULT_UNIX_SOCKET
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from structlog.stdlib import get_logger
from yaml import safe_load
from authentik.events.monitored_tasks import (
MonitoredTask,
@ -25,7 +25,6 @@ from authentik.events.monitored_tasks import (
)
from authentik.lib.config import CONFIG
from authentik.lib.utils.reflection import path_to_class
from authentik.outposts.consumer import OUTPOST_GROUP
from authentik.outposts.controllers.base import BaseController, ControllerException
from authentik.outposts.controllers.docker import DockerClient
from authentik.outposts.controllers.kubernetes import KubernetesClient
@ -35,6 +34,7 @@ from authentik.outposts.models import (
Outpost,
OutpostModel,
OutpostServiceConnection,
OutpostState,
OutpostType,
ServiceConnectionInvalid,
)
@ -243,9 +243,10 @@ def _outpost_single_update(outpost: Outpost, layer=None):
outpost.build_user_permissions(outpost.user)
if not layer: # pragma: no cover
layer = get_channel_layer()
group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)}
LOGGER.debug("sending update", channel=group, outpost=outpost)
async_to_sync(layer.group_send)(group, {"type": "event.update"})
for state in OutpostState.for_outpost(outpost):
for channel in state.channel_ids:
LOGGER.debug("sending update", channel=channel, instance=state.uid, outpost=outpost)
async_to_sync(layer.send)(channel, {"type": "event.update"})
@CELERY_APP.task(
@ -278,7 +279,7 @@ def outpost_connection_discovery(self: MonitoredTask):
with kubeconfig_path.open("r", encoding="utf8") as _kubeconfig:
KubernetesServiceConnection.objects.create(
name=kubeconfig_local_name,
kubeconfig=safe_load(_kubeconfig),
kubeconfig=yaml.safe_load(_kubeconfig),
)
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
socket = Path(unix_socket_path)

View File

@ -2,13 +2,11 @@
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.blueprints.tests import reconcile_app
from authentik.core.models import PropertyMapping
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.lib.generators import generate_id
from authentik.outposts.api.outposts import OutpostSerializer
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost, OutpostType, default_outpost_config
from authentik.outposts.models import OutpostType, default_outpost_config
from authentik.providers.ldap.models import LDAPProvider
from authentik.providers.proxy.models import ProxyProvider
@ -24,36 +22,7 @@ class TestOutpostServiceConnectionsAPI(APITestCase):
self.user = create_test_admin_user()
self.client.force_login(self.user)
@reconcile_app("authentik_outposts")
def test_managed_name_change(self):
"""Test name change for embedded outpost"""
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
self.assertIsNotNone(embedded_outpost)
response = self.client.patch(
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
{"name": "foo"},
)
self.assertEqual(response.status_code, 400)
self.assertJSONEqual(
response.content, {"name": ["Embedded outpost's name cannot be changed"]}
)
@reconcile_app("authentik_outposts")
def test_managed_without_managed(self):
"""Test name change for embedded outpost"""
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
self.assertIsNotNone(embedded_outpost)
embedded_outpost.managed = ""
embedded_outpost.save()
response = self.client.patch(
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
{"name": "foo"},
)
self.assertEqual(response.status_code, 200)
embedded_outpost.refresh_from_db()
self.assertEqual(embedded_outpost.managed, MANAGED_OUTPOST)
def test_outpost_validation(self):
def test_outpost_validaton(self):
"""Test Outpost validation"""
valid = OutpostSerializer(
data={

View File

@ -7,7 +7,7 @@ from django.test import TransactionTestCase
from authentik import __version__
from authentik.core.tests.utils import create_test_flow
from authentik.outposts.consumer import WebsocketMessage, WebsocketMessageInstruction
from authentik.outposts.channels import WebsocketMessage, WebsocketMessageInstruction
from authentik.outposts.models import Outpost, OutpostType
from authentik.providers.proxy.models import ProxyProvider
from authentik.root import websocket

View File

@ -7,7 +7,7 @@ from authentik.outposts.api.service_connections import (
KubernetesServiceConnectionViewSet,
ServiceConnectionViewSet,
)
from authentik.outposts.consumer import OutpostConsumer
from authentik.outposts.channels import OutpostConsumer
from authentik.root.middleware import ChannelsLoggingMiddleware
websocket_urlpatterns = [

View File

@ -7,11 +7,7 @@ GAUGE_POLICIES_CACHED = Gauge(
"authentik_policies_cached",
"Cached Policies",
)
HIST_POLICIES_ENGINE_TOTAL_TIME = Histogram(
"authentik_policies_engine_time_total_seconds",
"(Total) Duration the policy engine took to evaluate a result.",
["obj_type", "obj_pk"],
)
HIST_POLICIES_EXECUTION_TIME = Histogram(
"authentik_policies_execution_time",
"Execution times for single policies",
@ -21,7 +17,6 @@ HIST_POLICIES_EXECUTION_TIME = Histogram(
"binding_target_name",
"object_pk",
"object_type",
"mode",
],
)

View File

@ -1,7 +1,6 @@
"""authentik policy engine"""
from multiprocessing import Pipe, current_process
from multiprocessing.connection import Connection
from timeit import default_timer
from typing import Iterator, Optional
from django.core.cache import cache
@ -11,8 +10,6 @@ from sentry_sdk.tracing import Span
from structlog.stdlib import BoundLogger, get_logger
from authentik.core.models import User
from authentik.lib.utils.reflection import class_to_path
from authentik.policies.apps import HIST_POLICIES_ENGINE_TOTAL_TIME, HIST_POLICIES_EXECUTION_TIME
from authentik.policies.exceptions import PolicyEngineException
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
from authentik.policies.process import PolicyProcess, cache_key
@ -80,33 +77,6 @@ class PolicyEngine:
if binding.policy is not None and binding.policy.__class__ == Policy:
raise PolicyEngineException(f"Policy '{binding.policy}' is root type")
def _check_cache(self, binding: PolicyBinding):
if not self.use_cache:
return False
before = default_timer()
key = cache_key(binding, self.request)
cached_policy = cache.get(key, None)
duration = max(default_timer() - before, 0)
if not cached_policy:
return False
self.logger.debug(
"P_ENG: Taking result from cache",
binding=binding,
cache_key=key,
request=self.request,
)
HIST_POLICIES_EXECUTION_TIME.labels(
binding_order=binding.order,
binding_target_type=binding.target_type,
binding_target_name=binding.target_name,
object_pk=str(self.request.obj.pk),
object_type=class_to_path(self.request.obj.__class__),
mode="cache_retrieve",
).observe(duration)
# It's a bit silly to time this, but
self.__cached_policies.append(cached_policy)
return True
def build(self) -> "PolicyEngine":
"""Build wrapper which monitors performance"""
with (
@ -114,10 +84,6 @@ class PolicyEngine:
op="authentik.policy.engine.build",
description=self.__pbm,
) as span,
HIST_POLICIES_ENGINE_TOTAL_TIME.labels(
obj_type=class_to_path(self.__pbm.__class__),
obj_pk=str(self.__pbm.pk),
).time(),
):
span: Span
span.set_data("pbm", self.__pbm)
@ -126,7 +92,16 @@ class PolicyEngine:
self.__expected_result_count += 1
self._check_policy_type(binding)
if self._check_cache(binding):
key = cache_key(binding, self.request)
cached_policy = cache.get(key, None)
if cached_policy and self.use_cache:
self.logger.debug(
"P_ENG: Taking result from cache",
binding=binding,
cache_key=key,
request=self.request,
)
self.__cached_policies.append(cached_policy)
continue
self.logger.debug("P_ENG: Evaluating policy", binding=binding, request=self.request)
our_end, task_end = Pipe(False)

View File

@ -190,8 +190,8 @@ class Policy(SerializerModel, CreatedUpdatedModel):
verbose_name_plural = _("Policies")
permissions = [
("view_policy_cache", _("View Policy's cache metrics")),
("clear_policy_cache", _("Clear Policy's cache metrics")),
("view_policy_cache", "View Policy's cache metrics"),
("clear_policy_cache", "Clear Policy's cache metrics"),
]
class PolicyMeta:

View File

@ -11,7 +11,6 @@ from structlog.stdlib import get_logger
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
from authentik.lib.utils.errors import exception_to_string
from authentik.lib.utils.reflection import class_to_path
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
from authentik.policies.exceptions import PolicyException
from authentik.policies.models import PolicyBinding
@ -129,8 +128,9 @@ class PolicyProcess(PROCESS_CLASS):
binding_target_type=self.binding.target_type,
binding_target_name=self.binding.target_name,
object_pk=str(self.request.obj.pk),
object_type=class_to_path(self.request.obj.__class__),
mode="execute_process",
object_type=(
f"{self.request.obj._meta.app_label}.{self.request.obj._meta.model_name}"
),
).time(),
):
span: Span

View File

@ -17,7 +17,7 @@ LOGGER = get_logger()
@receiver(monitoring_set)
def monitoring_set_policies(sender, **kwargs):
"""set policy gauges"""
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}*") or []))
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
@receiver(post_save, sender=Policy)

View File

@ -1,27 +0,0 @@
# Generated by Django 5.0 on 2023-12-22 23:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_providers_oauth2", "0016_alter_refreshtoken_token"),
]
operations = [
migrations.AddField(
model_name="accesstoken",
name="session_id",
field=models.CharField(blank=True, default=""),
),
migrations.AddField(
model_name="authorizationcode",
name="session_id",
field=models.CharField(blank=True, default=""),
),
migrations.AddField(
model_name="refreshtoken",
name="session_id",
field=models.CharField(blank=True, default=""),
),
]

View File

@ -296,7 +296,6 @@ class BaseGrantModel(models.Model):
revoked = models.BooleanField(default=False)
_scope = models.TextField(default="", verbose_name=_("Scopes"))
auth_time = models.DateTimeField(verbose_name="Authentication time")
session_id = models.CharField(default="", blank=True)
@property
def scope(self) -> list[str]:

View File

@ -85,25 +85,6 @@ class TestAuthorize(OAuthTestCase):
)
OAuthAuthorizationParams.from_request(request)
def test_blocked_redirect_uri(self):
"""test missing/invalid redirect URI"""
OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=create_test_flow(),
redirect_uris="data:local.invalid",
)
with self.assertRaises(RedirectUriError):
request = self.factory.get(
"/",
data={
"response_type": "code",
"client_id": "test",
"redirect_uri": "data:localhost",
},
)
OAuthAuthorizationParams.from_request(request)
def test_invalid_redirect_uri_empty(self):
"""test missing/invalid redirect URI"""
provider = OAuth2Provider.objects.create(

View File

@ -1,187 +0,0 @@
"""Test token view"""
from base64 import b64encode, urlsafe_b64encode
from hashlib import sha256
from django.test import RequestFactory
from django.urls import reverse
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.flows.challenge import ChallengeTypes
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.constants import GRANT_TYPE_AUTHORIZATION_CODE
from authentik.providers.oauth2.models import AuthorizationCode, OAuth2Provider
from authentik.providers.oauth2.tests.utils import OAuthTestCase
class TestTokenPKCE(OAuthTestCase):
"""Test token view"""
def setUp(self) -> None:
super().setUp()
self.factory = RequestFactory()
self.app = Application.objects.create(name=generate_id(), slug="test")
def test_pkce_missing_in_token(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
challenge = generate_id()
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": challenge,
"code_challenge_method": "S256",
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
# Missing the code_verifier here
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertJSONEqual(
response.content,
{"error": "invalid_request", "error_description": "The request is otherwise malformed"},
)
self.assertEqual(response.status_code, 400)
def test_pkce_correct_s256(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
verifier = generate_id()
challenge = (
urlsafe_b64encode(sha256(verifier.encode("ascii")).digest())
.decode("utf-8")
.replace("=", "")
)
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": challenge,
"code_challenge_method": "S256",
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
"code_verifier": verifier,
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertEqual(response.status_code, 200)
def test_pkce_correct_plain(self):
"""Test full with pkce"""
flow = create_test_flow()
provider = OAuth2Provider.objects.create(
name=generate_id(),
client_id="test",
authorization_flow=flow,
redirect_uris="foo://localhost",
access_code_validity="seconds=100",
)
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
user = create_test_admin_user()
self.client.force_login(user)
verifier = generate_id()
header = b64encode(f"{provider.client_id}:{provider.client_secret}".encode()).decode()
# Step 1, initiate params and get redirect to flow
self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"code_challenge": verifier,
},
)
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
code: AuthorizationCode = AuthorizationCode.objects.filter(user=user).first()
self.assertJSONEqual(
response.content.decode(),
{
"component": "xak-flow-redirect",
"type": ChallengeTypes.REDIRECT.value,
"to": f"foo://localhost?code={code.code}&state={state}",
},
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
data={
"grant_type": GRANT_TYPE_AUTHORIZATION_CODE,
"code": code.code,
"code_verifier": verifier,
"redirect_uri": "foo://localhost",
},
HTTP_AUTHORIZATION=f"Basic {header}",
)
self.assertEqual(response.status_code, 200)

View File

@ -188,7 +188,6 @@ def authenticate_provider(request: HttpRequest) -> Optional[OAuth2Provider]:
if client_id != provider.client_id or client_secret != provider.client_secret:
LOGGER.debug("(basic) Provider for basic auth does not exist")
return None
CTX_AUTH_VIA.set("oauth_client_secret")
return provider

Some files were not shown because too many files have changed in this diff Show More