Compare commits
1 commit
trustchain
...
expression
Author | SHA1 | Date | |
---|---|---|---|
5945b36200 |
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2023.10.6
|
current_version = 2023.8.3
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
|
|
21
.github/actions/setup/action.yml
vendored
21
.github/actions/setup/action.yml
vendored
|
@ -2,39 +2,36 @@ name: "Setup authentik testing environment"
|
||||||
description: "Setup authentik testing environment"
|
description: "Setup authentik testing environment"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
postgresql_version:
|
postgresql_tag:
|
||||||
description: "Optional postgresql image tag"
|
description: "Optional postgresql image tag"
|
||||||
default: "12"
|
default: "12"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install poetry & deps
|
- name: Install poetry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
pipx install poetry || true
|
pipx install poetry || true
|
||||||
sudo apt-get update
|
sudo apt update
|
||||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||||
- name: Setup python and restore poetry
|
- name: Setup python and restore poetry
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version-file: 'pyproject.toml'
|
python-version: "3.11"
|
||||||
cache: "poetry"
|
cache: "poetry"
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Setup go
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version-file: "go.mod"
|
|
||||||
- name: Setup dependencies
|
- name: Setup dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||||
|
poetry env use python3.11
|
||||||
poetry install
|
poetry install
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
- name: Generate config
|
- name: Generate config
|
||||||
|
|
2
.github/codecov.yml
vendored
2
.github/codecov.yml
vendored
|
@ -6,5 +6,5 @@ coverage:
|
||||||
# adjust accordingly based on how flaky your tests are
|
# adjust accordingly based on how flaky your tests are
|
||||||
# this allows a 1% drop from the previous base commit coverage
|
# this allows a 1% drop from the previous base commit coverage
|
||||||
threshold: 1%
|
threshold: 1%
|
||||||
comment:
|
notify:
|
||||||
after_n_builds: 3
|
after_n_builds: 3
|
||||||
|
|
37
.github/dependabot.yml
vendored
37
.github/dependabot.yml
vendored
|
@ -30,7 +30,6 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "web:"
|
prefix: "web:"
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
groups:
|
||||||
sentry:
|
sentry:
|
||||||
patterns:
|
patterns:
|
||||||
|
@ -41,7 +40,7 @@ updates:
|
||||||
- "babel-*"
|
- "babel-*"
|
||||||
eslint:
|
eslint:
|
||||||
patterns:
|
patterns:
|
||||||
- "@typescript-eslint/*"
|
- "@typescript-eslint/eslint-*"
|
||||||
- "eslint"
|
- "eslint"
|
||||||
- "eslint-*"
|
- "eslint-*"
|
||||||
storybook:
|
storybook:
|
||||||
|
@ -51,40 +50,6 @@ updates:
|
||||||
esbuild:
|
esbuild:
|
||||||
patterns:
|
patterns:
|
||||||
- "@esbuild/*"
|
- "@esbuild/*"
|
||||||
- package-ecosystem: npm
|
|
||||||
directory: "/tests/wdio"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "04:00"
|
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
commit-message:
|
|
||||||
prefix: "web:"
|
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
|
||||||
sentry:
|
|
||||||
patterns:
|
|
||||||
- "@sentry/*"
|
|
||||||
babel:
|
|
||||||
patterns:
|
|
||||||
- "@babel/*"
|
|
||||||
- "babel-*"
|
|
||||||
eslint:
|
|
||||||
patterns:
|
|
||||||
- "@typescript-eslint/*"
|
|
||||||
- "eslint"
|
|
||||||
- "eslint-*"
|
|
||||||
storybook:
|
|
||||||
patterns:
|
|
||||||
- "@storybook/*"
|
|
||||||
- "*storybook*"
|
|
||||||
esbuild:
|
|
||||||
patterns:
|
|
||||||
- "@esbuild/*"
|
|
||||||
wdio:
|
|
||||||
patterns:
|
|
||||||
- "@wdio/*"
|
|
||||||
- package-ecosystem: npm
|
- package-ecosystem: npm
|
||||||
directory: "/website"
|
directory: "/website"
|
||||||
schedule:
|
schedule:
|
||||||
|
|
31
.github/workflows/ci-main.yml
vendored
31
.github/workflows/ci-main.yml
vendored
|
@ -11,7 +11,6 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DB: authentik
|
POSTGRES_DB: authentik
|
||||||
|
@ -48,38 +47,25 @@ jobs:
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-migrations-from-stable:
|
test-migrations-from-stable:
|
||||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
continue-on-error: true
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
psql:
|
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
|
||||||
- 16-alpine
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: checkout stable
|
- name: checkout stable
|
||||||
run: |
|
run: |
|
||||||
# Delete all poetry envs
|
|
||||||
rm -rf /home/runner/.cache/pypoetry
|
|
||||||
# Copy current, latest config to local
|
# Copy current, latest config to local
|
||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
mv ../.github ../scripts .
|
||||||
- name: Setup authentik env (ensure stable deps are installed)
|
- name: Setup authentik env (ensure stable deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: run migrations to stable
|
- name: run migrations to stable
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
- name: checkout current code
|
- name: checkout current code
|
||||||
|
@ -89,13 +75,9 @@ jobs:
|
||||||
git reset --hard HEAD
|
git reset --hard HEAD
|
||||||
git clean -d -fx .
|
git clean -d -fx .
|
||||||
git checkout $GITHUB_SHA
|
git checkout $GITHUB_SHA
|
||||||
# Delete previous poetry env
|
|
||||||
rm -rf $(poetry env info --path)
|
|
||||||
poetry install
|
poetry install
|
||||||
- name: Setup authentik env (ensure latest deps are installed)
|
- name: Setup authentik env (ensure latest deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: migrate to latest
|
- name: migrate to latest
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-unittest:
|
test-unittest:
|
||||||
|
@ -108,13 +90,12 @@ jobs:
|
||||||
psql:
|
psql:
|
||||||
- 12-alpine
|
- 12-alpine
|
||||||
- 15-alpine
|
- 15-alpine
|
||||||
- 16-alpine
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_tag: ${{ matrix.psql }}
|
||||||
- name: run unittest
|
- name: run unittest
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make test
|
||||||
|
@ -203,9 +184,6 @@ jobs:
|
||||||
build:
|
build:
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
@ -256,9 +234,6 @@ jobs:
|
||||||
build-arm64:
|
build-arm64:
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
10
.github/workflows/ci-outpost.yml
vendored
10
.github/workflows/ci-outpost.yml
vendored
|
@ -9,7 +9,6 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-golint:
|
lint-golint:
|
||||||
|
@ -30,7 +29,7 @@ jobs:
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v3
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
version: v1.54.2
|
version: v1.52.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
skip-cache: true
|
skip-cache: true
|
||||||
test-unittest:
|
test-unittest:
|
||||||
|
@ -66,9 +65,6 @@ jobs:
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -128,9 +124,9 @@ jobs:
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
|
|
45
.github/workflows/ci-web.yml
vendored
45
.github/workflows/ci-web.yml
vendored
|
@ -9,38 +9,31 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-eslint:
|
lint-eslint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
project:
|
|
||||||
- web
|
|
||||||
- tests/wdio
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: ${{ matrix.project }}/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: Eslint
|
- name: Eslint
|
||||||
working-directory: ${{ matrix.project }}/
|
working-directory: web/
|
||||||
run: npm run lint
|
run: npm run lint
|
||||||
lint-build:
|
lint-build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
@ -52,33 +45,27 @@ jobs:
|
||||||
run: npm run tsc
|
run: npm run tsc
|
||||||
lint-prettier:
|
lint-prettier:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
project:
|
|
||||||
- web
|
|
||||||
- tests/wdio
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: ${{ matrix.project }}/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: prettier
|
- name: prettier
|
||||||
working-directory: ${{ matrix.project }}/
|
working-directory: web/
|
||||||
run: npm run prettier-check
|
run: npm run prettier-check
|
||||||
lint-lit-analyse:
|
lint-lit-analyse:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
@ -108,9 +95,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
|
13
.github/workflows/ci-website.yml
vendored
13
.github/workflows/ci-website.yml
vendored
|
@ -9,16 +9,15 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-prettier:
|
lint-prettier:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
@ -30,9 +29,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
@ -51,9 +50,9 @@ jobs:
|
||||||
- build-docs-only
|
- build-docs-only
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
|
4
.github/workflows/ghcr-retention.yml
vendored
4
.github/workflows/ghcr-retention.yml
vendored
|
@ -1,8 +1,8 @@
|
||||||
name: ghcr-retention
|
name: ghcr-retention
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# schedule:
|
schedule:
|
||||||
# - cron: "0 0 * * *" # every day at midnight
|
- cron: "0 0 * * *" # every day at midnight
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
1
.github/workflows/release-next-branch.yml
vendored
1
.github/workflows/release-next-branch.yml
vendored
|
@ -6,7 +6,6 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to be able to push to the next branch
|
|
||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
24
.github/workflows/release-publish.yml
vendored
24
.github/workflows/release-publish.yml
vendored
|
@ -7,9 +7,6 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
|
@ -30,10 +27,8 @@ jobs:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: make empty clients
|
- name: make empty ts client
|
||||||
run: |
|
run: mkdir -p ./gen-ts-client
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
mkdir -p ./gen-go-api
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
|
@ -55,9 +50,6 @@ jobs:
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -77,10 +69,6 @@ jobs:
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
- name: make empty clients
|
|
||||||
run: |
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
mkdir -p ./gen-go-api
|
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
|
@ -105,16 +93,12 @@ jobs:
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
|
||||||
build-args: |
|
build-args: |
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload binaries to the release
|
|
||||||
contents: write
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -129,9 +113,9 @@ jobs:
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Build web
|
- name: Build web
|
||||||
|
|
1
.github/workflows/release-tag.yml
vendored
1
.github/workflows/release-tag.yml
vendored
|
@ -16,7 +16,6 @@ jobs:
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||||
docker buildx install
|
docker buildx install
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
docker build -t testing:latest .
|
docker build -t testing:latest .
|
||||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||||
echo "AUTHENTIK_TAG=latest" >> .env
|
echo "AUTHENTIK_TAG=latest" >> .env
|
||||||
|
|
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
|
@ -6,8 +6,8 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to update issues and PRs
|
|
||||||
issues: write
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
|
|
4
.github/workflows/web-api-publish.yml
vendored
4
.github/workflows/web-api-publish.yml
vendored
|
@ -17,9 +17,9 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- name: Generate API Client
|
- name: Generate API Client
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -194,6 +194,7 @@ pip-selfcheck.json
|
||||||
# End of https://www.gitignore.io/api/python,django
|
# End of https://www.gitignore.io/api/python,django
|
||||||
/static/
|
/static/
|
||||||
local.env.yml
|
local.env.yml
|
||||||
|
/variables/
|
||||||
|
|
||||||
media/
|
media/
|
||||||
*mmdb
|
*mmdb
|
||||||
|
@ -206,6 +207,3 @@ data/
|
||||||
.netlify
|
.netlify
|
||||||
.ruff_cache
|
.ruff_cache
|
||||||
source_docs/
|
source_docs/
|
||||||
|
|
||||||
### Golang ###
|
|
||||||
/vendor/
|
|
||||||
|
|
|
@ -9,8 +9,6 @@ lifecycle/ @goauthentik/backend
|
||||||
schemas/ @goauthentik/backend
|
schemas/ @goauthentik/backend
|
||||||
scripts/ @goauthentik/backend
|
scripts/ @goauthentik/backend
|
||||||
tests/ @goauthentik/backend
|
tests/ @goauthentik/backend
|
||||||
pyproject.toml @goauthentik/backend
|
|
||||||
poetry.lock @goauthentik/backend
|
|
||||||
# Infrastructure
|
# Infrastructure
|
||||||
.github/ @goauthentik/infrastructure
|
.github/ @goauthentik/infrastructure
|
||||||
Dockerfile @goauthentik/infrastructure
|
Dockerfile @goauthentik/infrastructure
|
||||||
|
@ -19,7 +17,6 @@ Dockerfile @goauthentik/infrastructure
|
||||||
docker-compose.yml @goauthentik/infrastructure
|
docker-compose.yml @goauthentik/infrastructure
|
||||||
# Web
|
# Web
|
||||||
web/ @goauthentik/frontend
|
web/ @goauthentik/frontend
|
||||||
tests/wdio/ @goauthentik/frontend
|
|
||||||
# Docs & Website
|
# Docs & Website
|
||||||
website/ @goauthentik/docs
|
website/ @goauthentik/docs
|
||||||
# Security
|
# Security
|
||||||
|
|
31
Dockerfile
31
Dockerfile
|
@ -1,7 +1,5 @@
|
||||||
# syntax=docker/dockerfile:1
|
|
||||||
|
|
||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
@ -9,7 +7,7 @@ WORKDIR /work/website
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||||
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
|
--mount=type=cache,target=/root/.npm \
|
||||||
npm ci --include=dev
|
npm ci --include=dev
|
||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
|
@ -19,7 +17,7 @@ COPY ./SECURITY.md /work/
|
||||||
RUN npm run build-docs-only
|
RUN npm run build-docs-only
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
@ -27,7 +25,7 @@ WORKDIR /work/web
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
--mount=type=cache,target=/root/.npm \
|
||||||
npm ci --include=dev
|
npm ci --include=dev
|
||||||
|
|
||||||
COPY ./web /work/web/
|
COPY ./web /work/web/
|
||||||
|
@ -37,14 +35,7 @@ COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Build go proxy
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
|
FROM docker.io/golang:1.21.1-bookworm AS go-builder
|
||||||
|
|
||||||
ARG TARGETOS
|
|
||||||
ARG TARGETARCH
|
|
||||||
ARG TARGETVARIANT
|
|
||||||
|
|
||||||
ARG GOOS=$TARGETOS
|
|
||||||
ARG GOARCH=$TARGETARCH
|
|
||||||
|
|
||||||
WORKDIR /go/src/goauthentik.io
|
WORKDIR /go/src/goauthentik.io
|
||||||
|
|
||||||
|
@ -64,12 +55,12 @@ COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||||
|
|
||||||
ENV CGO_ENABLED=0
|
ENV CGO_ENABLED=0
|
||||||
|
|
||||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
RUN --mount=type=cache,target=/go/pkg/mod \
|
||||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
--mount=type=cache,target=/root/.cache/go-build \
|
||||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
go build -o /go/authentik ./cmd/server
|
||||||
|
|
||||||
# Stage 4: MaxMind GeoIP
|
# Stage 4: MaxMind GeoIP
|
||||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
ENV GEOIPUPDATE_VERBOSE="true"
|
||||||
|
@ -91,9 +82,7 @@ ENV VENV_PATH="/ak-root/venv" \
|
||||||
POETRY_VIRTUALENVS_CREATE=false \
|
POETRY_VIRTUALENVS_CREATE=false \
|
||||||
PATH="/ak-root/venv/bin:$PATH"
|
PATH="/ak-root/venv/bin:$PATH"
|
||||||
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
RUN --mount=type=cache,target=/var/cache/apt \
|
||||||
|
|
||||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
# Required for installing pip packages
|
# Required for installing pip packages
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||||
|
|
20
Makefile
20
Makefile
|
@ -28,13 +28,10 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
|
|
||||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||||
|
|
||||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
|
||||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
|
||||||
|
|
||||||
help: ## Show this help
|
help: ## Show this help
|
||||||
@echo "\nSpecify a command. The choices are:\n"
|
@echo "\nSpecify a command. The choices are:\n"
|
||||||
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
@grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
|
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-24s\033[m %s\n", $$1, $$2}' | \
|
||||||
sort
|
sort
|
||||||
@echo ""
|
@echo ""
|
||||||
|
|
||||||
|
@ -56,15 +53,14 @@ test: ## Run the server tests and produce a coverage report (locally)
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||||
isort $(PY_SOURCES)
|
isort authentik $(PY_SOURCES)
|
||||||
black $(PY_SOURCES)
|
black authentik $(PY_SOURCES)
|
||||||
ruff $(PY_SOURCES)
|
ruff authentik $(PY_SOURCES)
|
||||||
codespell -w $(CODESPELL_ARGS)
|
codespell -w $(CODESPELL_ARGS)
|
||||||
|
|
||||||
lint: ## Lint the python and golang sources
|
lint: ## Lint the python and golang sources
|
||||||
bandit -r $(PY_SOURCES) -x node_modules
|
|
||||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
|
||||||
pylint $(PY_SOURCES)
|
pylint $(PY_SOURCES)
|
||||||
|
bandit -r $(PY_SOURCES) -x node_modules
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
migrate: ## Run the Authentik Django server's migrations
|
migrate: ## Run the Authentik Django server's migrations
|
||||||
|
@ -79,10 +75,10 @@ install: web-install website-install ## Install all requires dependencies for `
|
||||||
poetry install
|
poetry install
|
||||||
|
|
||||||
dev-drop-db:
|
dev-drop-db:
|
||||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
echo dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
# Also remove the test-db if it exists
|
# Also remove the test-db if it exists
|
||||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||||
redis-cli -n 0 flushall
|
echo redis-cli -n 0 flushall
|
||||||
|
|
||||||
dev-create-db:
|
dev-create-db:
|
||||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2023.10.6"
|
__version__ = "2023.8.3"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""Meta API"""
|
"""Meta API"""
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
|
@ -21,7 +21,7 @@ class AppSerializer(PassiveSerializer):
|
||||||
class AppsViewSet(ViewSet):
|
class AppsViewSet(ViewSet):
|
||||||
"""Read-only view list all installed apps"""
|
"""Read-only view list all installed apps"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
|
@ -35,7 +35,7 @@ class AppsViewSet(ViewSet):
|
||||||
class ModelViewSet(ViewSet):
|
class ModelViewSet(ViewSet):
|
||||||
"""Read-only view list all installed models"""
|
"""Read-only view list all installed models"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
|
|
|
@ -5,7 +5,7 @@ from django.db.models.functions import ExtractHour
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
@ -68,7 +68,7 @@ class LoginMetricsSerializer(PassiveSerializer):
|
||||||
class AdministrationMetricsViewSet(APIView):
|
class AdministrationMetricsViewSet(APIView):
|
||||||
"""Login Metrics per 1h"""
|
"""Login Metrics per 1h"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
|
|
|
@ -8,6 +8,7 @@ from django.utils.timezone import now
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from gunicorn import version_info as gunicorn_version
|
from gunicorn import version_info as gunicorn_version
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
@ -16,7 +17,6 @@ from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
|
|
||||||
|
|
||||||
class RuntimeDict(TypedDict):
|
class RuntimeDict(TypedDict):
|
||||||
|
@ -88,7 +88,7 @@ class SystemSerializer(PassiveSerializer):
|
||||||
class SystemView(APIView):
|
class SystemView(APIView):
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
permission_classes = [IsAdminUser]
|
||||||
pagination_class = None
|
pagination_class = None
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
serializer_class = SystemSerializer
|
serializer_class = SystemSerializer
|
||||||
|
|
|
@ -14,15 +14,14 @@ from rest_framework.fields import (
|
||||||
ListField,
|
ListField,
|
||||||
SerializerMethodField,
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -64,7 +63,7 @@ class TaskSerializer(PassiveSerializer):
|
||||||
class TaskViewSet(ViewSet):
|
class TaskViewSet(ViewSet):
|
||||||
"""Read-only view set that returns all background tasks"""
|
"""Read-only view set that returns all background tasks"""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
|
permission_classes = [IsAdminUser]
|
||||||
serializer_class = TaskSerializer
|
serializer_class = TaskSerializer
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
|
@ -94,7 +93,6 @@ class TaskViewSet(ViewSet):
|
||||||
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
||||||
return Response(TaskSerializer(tasks, many=True).data)
|
return Response(TaskSerializer(tasks, many=True).data)
|
||||||
|
|
||||||
@permission_required(None, ["authentik_rbac.run_system_tasks"])
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=OpenApiTypes.NONE,
|
request=OpenApiTypes.NONE,
|
||||||
responses={
|
responses={
|
||||||
|
|
|
@ -2,18 +2,18 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.fields import IntegerField
|
from rest_framework.fields import IntegerField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
|
||||||
class WorkerView(APIView):
|
class WorkerView(APIView):
|
||||||
"""Get currently connected worker count."""
|
"""Get currently connected worker count."""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
|
|
|
@ -7,9 +7,9 @@ from rest_framework.authentication import get_authorization_header
|
||||||
from rest_framework.filters import BaseFilterBackend
|
from rest_framework.filters import BaseFilterBackend
|
||||||
from rest_framework.permissions import BasePermission
|
from rest_framework.permissions import BasePermission
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.api.authentication import validate_auth
|
from authentik.api.authentication import validate_auth
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
|
|
||||||
class OwnerFilter(BaseFilterBackend):
|
class OwnerFilter(BaseFilterBackend):
|
||||||
|
@ -26,14 +26,14 @@ class OwnerFilter(BaseFilterBackend):
|
||||||
class SecretKeyFilter(DjangoFilterBackend):
|
class SecretKeyFilter(DjangoFilterBackend):
|
||||||
"""Allow access to all objects when authenticated with secret key as token.
|
"""Allow access to all objects when authenticated with secret key as token.
|
||||||
|
|
||||||
Replaces both DjangoFilterBackend and ObjectFilter"""
|
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||||
|
|
||||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||||
auth_header = get_authorization_header(request)
|
auth_header = get_authorization_header(request)
|
||||||
token = validate_auth(auth_header)
|
token = validate_auth(auth_header)
|
||||||
if token and token == settings.SECRET_KEY:
|
if token and token == settings.SECRET_KEY:
|
||||||
return queryset
|
return queryset
|
||||||
queryset = ObjectFilter().filter_queryset(request, queryset, view)
|
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||||
return super().filter_queryset(request, queryset, view)
|
return super().filter_queryset(request, queryset, view)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None):
|
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||||
"""Check permissions for a single custom action"""
|
"""Check permissions for a single custom action"""
|
||||||
|
|
||||||
def wrapper_outter(func: Callable):
|
def wrapper_outter(func: Callable):
|
||||||
|
@ -18,17 +18,15 @@ def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[l
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
||||||
if obj_perm:
|
if perm:
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
if not request.user.has_perm(obj_perm, obj):
|
if not request.user.has_perm(perm, obj):
|
||||||
LOGGER.debug(
|
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||||
"denying access for object", user=request.user, perm=obj_perm, obj=obj
|
|
||||||
)
|
|
||||||
return self.permission_denied(request)
|
return self.permission_denied(request)
|
||||||
if global_perms:
|
if other_perms:
|
||||||
for other_perm in global_perms:
|
for other_perm in other_perms:
|
||||||
if not request.user.has_perm(other_perm):
|
if not request.user.has_perm(other_perm):
|
||||||
LOGGER.debug("denying access for other", user=request.user, perm=other_perm)
|
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||||
return self.permission_denied(request)
|
return self.permission_denied(request)
|
||||||
return func(self, request, *args, **kwargs)
|
return func(self, request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -77,10 +77,3 @@ class Pagination(pagination.PageNumberPagination):
|
||||||
},
|
},
|
||||||
"required": ["pagination", "results"],
|
"required": ["pagination", "results"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class SmallerPagination(Pagination):
|
|
||||||
"""Smaller pagination for objects which might require a lot of queries
|
|
||||||
to retrieve all data for."""
|
|
||||||
|
|
||||||
max_page_size = 10
|
|
||||||
|
|
|
@ -16,7 +16,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
||||||
|
|
||||||
def tester(self: TestModelViewSets):
|
def tester(self: TestModelViewSets):
|
||||||
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
||||||
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
|
|
||||||
filterset_class = getattr(test_viewset, "filterset_class", None)
|
filterset_class = getattr(test_viewset, "filterset_class", None)
|
||||||
if not filterset_class:
|
if not filterset_class:
|
||||||
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
||||||
|
|
|
@ -21,9 +21,7 @@ _other_urls = []
|
||||||
for _authentik_app in get_apps():
|
for _authentik_app in get_apps():
|
||||||
try:
|
try:
|
||||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||||
except ModuleNotFoundError:
|
except (ModuleNotFoundError, ImportError) as exc:
|
||||||
continue
|
|
||||||
except ImportError as exc:
|
|
||||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||||
continue
|
continue
|
||||||
if not hasattr(api_urls, "api_urlpatterns"):
|
if not hasattr(api_urls, "api_urlpatterns"):
|
||||||
|
|
|
@ -4,6 +4,7 @@ from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||||
|
@ -86,11 +87,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||||
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Blueprint instances"""
|
"""Blueprint instances"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
serializer_class = BlueprintInstanceSerializer
|
serializer_class = BlueprintInstanceSerializer
|
||||||
queryset = BlueprintInstance.objects.all()
|
queryset = BlueprintInstance.objects.all()
|
||||||
search_fields = ["name", "path"]
|
search_fields = ["name", "path"]
|
||||||
filterset_fields = ["name", "path"]
|
filterset_fields = ["name", "path"]
|
||||||
ordering = ["name"]
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
|
|
|
@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
|
||||||
meth()
|
meth()
|
||||||
self._logger.debug("Successfully reconciled", name=name)
|
self._logger.debug("Successfully reconciled", name=name)
|
||||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||||
|
|
||||||
|
|
||||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||||
|
|
|
@ -6,7 +6,6 @@ from django.test import TestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import is_model_allowed
|
from authentik.blueprints.v1.importer import is_model_allowed
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.providers.oauth2.models import RefreshToken
|
|
||||||
|
|
||||||
|
|
||||||
class TestModels(TestCase):
|
class TestModels(TestCase):
|
||||||
|
@ -22,9 +21,6 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||||
self.assertIsNotNone(model_class.serializer)
|
self.assertIsNotNone(model_class.serializer)
|
||||||
if model_class.serializer.Meta().model == RefreshToken:
|
|
||||||
return
|
|
||||||
self.assertEqual(model_class.serializer.Meta().model, test_model)
|
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
|
|
@ -584,17 +584,12 @@ class EntryInvalidError(SentryIgnoredException):
|
||||||
entry_model: Optional[str]
|
entry_model: Optional[str]
|
||||||
entry_id: Optional[str]
|
entry_id: Optional[str]
|
||||||
validation_error: Optional[ValidationError]
|
validation_error: Optional[ValidationError]
|
||||||
serializer: Optional[Serializer] = None
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, *args: object, validation_error: Optional[ValidationError] = None) -> None:
|
||||||
self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs
|
|
||||||
) -> None:
|
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.entry_model = None
|
self.entry_model = None
|
||||||
self.entry_id = None
|
self.entry_id = None
|
||||||
self.validation_error = validation_error
|
self.validation_error = validation_error
|
||||||
for key, value in kwargs.items():
|
|
||||||
setattr(self, key, value)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_entry(
|
def from_entry(
|
||||||
|
|
|
@ -35,28 +35,25 @@ from authentik.core.models import (
|
||||||
Source,
|
Source,
|
||||||
UserSourceConnection,
|
UserSourceConnection,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.models import LicenseUsage
|
|
||||||
from authentik.events.utils import cleanse_dict
|
from authentik.events.utils import cleanse_dict
|
||||||
from authentik.flows.models import FlowToken, Stage
|
from authentik.flows.models import FlowToken, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
from authentik.lib.sentry import SentryIgnoredException
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
# Context set when the serializer is created in a blueprint context
|
||||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||||
|
|
||||||
|
|
||||||
def excluded_models() -> list[type[Model]]:
|
def is_model_allowed(model: type[Model]) -> bool:
|
||||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
"""Check if model is allowed"""
|
||||||
or other means (internal only, base classes, non-used objects, etc)"""
|
|
||||||
# pylint: disable=imported-auth-user
|
# pylint: disable=imported-auth-user
|
||||||
from django.contrib.auth.models import Group as DjangoGroup
|
from django.contrib.auth.models import Group as DjangoGroup
|
||||||
from django.contrib.auth.models import User as DjangoUser
|
from django.contrib.auth.models import User as DjangoUser
|
||||||
|
|
||||||
return (
|
excluded_models = (
|
||||||
DjangoUser,
|
DjangoUser,
|
||||||
DjangoGroup,
|
DjangoGroup,
|
||||||
# Base classes
|
# Base classes
|
||||||
|
@ -72,15 +69,8 @@ def excluded_models() -> list[type[Model]]:
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
# Classes which are only internally managed
|
# Classes which are only internally managed
|
||||||
FlowToken,
|
FlowToken,
|
||||||
LicenseUsage,
|
|
||||||
SCIMGroup,
|
|
||||||
SCIMUser,
|
|
||||||
)
|
)
|
||||||
|
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||||
|
|
||||||
def is_model_allowed(model: type[Model]) -> bool:
|
|
||||||
"""Check if model is allowed"""
|
|
||||||
return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel))
|
|
||||||
|
|
||||||
|
|
||||||
class DoRollback(SentryIgnoredException):
|
class DoRollback(SentryIgnoredException):
|
||||||
|
@ -255,10 +245,7 @@ class Importer:
|
||||||
try:
|
try:
|
||||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError.from_entry(exc, entry) from exc
|
||||||
exc,
|
|
||||||
entry,
|
|
||||||
) from exc
|
|
||||||
always_merger.merge(full_data, updated_identifiers)
|
always_merger.merge(full_data, updated_identifiers)
|
||||||
serializer_kwargs["data"] = full_data
|
serializer_kwargs["data"] = full_data
|
||||||
|
|
||||||
|
@ -275,7 +262,6 @@ class Importer:
|
||||||
f"Serializer errors {serializer.errors}",
|
f"Serializer errors {serializer.errors}",
|
||||||
validation_error=exc,
|
validation_error=exc,
|
||||||
entry=entry,
|
entry=entry,
|
||||||
serializer=serializer,
|
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
|
@ -304,14 +290,12 @@ class Importer:
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
# Validate each single entry
|
# Validate each single entry
|
||||||
serializer = None
|
|
||||||
try:
|
try:
|
||||||
serializer = self._validate_single(entry)
|
serializer = self._validate_single(entry)
|
||||||
except EntryInvalidError as exc:
|
except EntryInvalidError as exc:
|
||||||
# For deleting objects we don't need the serializer to be valid
|
# For deleting objects we don't need the serializer to be valid
|
||||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||||
serializer = exc.serializer
|
continue
|
||||||
else:
|
|
||||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||||
if raise_errors:
|
if raise_errors:
|
||||||
raise exc
|
raise exc
|
||||||
|
|
|
@ -75,14 +75,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||||
return
|
return
|
||||||
if event.is_directory:
|
if event.is_directory:
|
||||||
return
|
return
|
||||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
|
||||||
path = Path(event.src_path).absolute()
|
|
||||||
rel_path = str(path.relative_to(root))
|
|
||||||
if isinstance(event, FileCreatedEvent):
|
if isinstance(event, FileCreatedEvent):
|
||||||
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
LOGGER.debug("new blueprint file created, starting discovery")
|
||||||
blueprints_discovery.delay(rel_path)
|
blueprints_discovery.delay()
|
||||||
if isinstance(event, FileModifiedEvent):
|
if isinstance(event, FileModifiedEvent):
|
||||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
path = Path(event.src_path)
|
||||||
|
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||||
|
rel_path = str(path.relative_to(root))
|
||||||
|
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||||
apply_blueprint.delay(instance.pk.hex)
|
apply_blueprint.delay(instance.pk.hex)
|
||||||
|
|
||||||
|
@ -98,32 +98,39 @@ def blueprints_find_dict():
|
||||||
return blueprints
|
return blueprints
|
||||||
|
|
||||||
|
|
||||||
def blueprints_find() -> list[BlueprintFile]:
|
def blueprints_find():
|
||||||
"""Find blueprints and return valid ones"""
|
"""Find blueprints and return valid ones"""
|
||||||
blueprints = []
|
blueprints = []
|
||||||
root = Path(CONFIG.get("blueprints_dir"))
|
root = Path(CONFIG.get("blueprints_dir"))
|
||||||
for path in root.rglob("**/*.yaml"):
|
for path in root.rglob("**/*.yaml"):
|
||||||
rel_path = path.relative_to(root)
|
|
||||||
# Check if any part in the path starts with a dot and assume a hidden file
|
# Check if any part in the path starts with a dot and assume a hidden file
|
||||||
if any(part for part in path.parts if part.startswith(".")):
|
if any(part for part in path.parts if part.startswith(".")):
|
||||||
continue
|
continue
|
||||||
|
LOGGER.debug("found blueprint", path=str(path))
|
||||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||||
try:
|
try:
|
||||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||||
except YAMLError as exc:
|
except YAMLError as exc:
|
||||||
raw_blueprint = None
|
raw_blueprint = None
|
||||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
|
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||||
if not raw_blueprint:
|
if not raw_blueprint:
|
||||||
continue
|
continue
|
||||||
metadata = raw_blueprint.get("metadata", None)
|
metadata = raw_blueprint.get("metadata", None)
|
||||||
version = raw_blueprint.get("version", 1)
|
version = raw_blueprint.get("version", 1)
|
||||||
if version != 1:
|
if version != 1:
|
||||||
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
|
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||||
continue
|
continue
|
||||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||||
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
|
blueprint = BlueprintFile(
|
||||||
|
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||||
|
)
|
||||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||||
blueprints.append(blueprint)
|
blueprints.append(blueprint)
|
||||||
|
LOGGER.debug(
|
||||||
|
"parsed & loaded blueprint",
|
||||||
|
hash=file_hash,
|
||||||
|
path=str(path),
|
||||||
|
)
|
||||||
return blueprints
|
return blueprints
|
||||||
|
|
||||||
|
|
||||||
|
@ -131,12 +138,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
||||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||||
)
|
)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
def blueprints_discovery(self: MonitoredTask):
|
||||||
"""Find blueprints and check if they need to be created in the database"""
|
"""Find blueprints and check if they need to be created in the database"""
|
||||||
count = 0
|
count = 0
|
||||||
for blueprint in blueprints_find():
|
for blueprint in blueprints_find():
|
||||||
if path and blueprint.path != path:
|
|
||||||
continue
|
|
||||||
check_blueprint_v1_file(blueprint)
|
check_blueprint_v1_file(blueprint)
|
||||||
count += 1
|
count += 1
|
||||||
self.set_status(
|
self.set_status(
|
||||||
|
@ -166,11 +171,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||||
metadata={},
|
metadata={},
|
||||||
)
|
)
|
||||||
instance.save()
|
instance.save()
|
||||||
LOGGER.info(
|
|
||||||
"Creating new blueprint instance from file", instance=instance, path=instance.path
|
|
||||||
)
|
|
||||||
if instance.last_applied_hash != blueprint.hash:
|
if instance.last_applied_hash != blueprint.hash:
|
||||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
|
||||||
apply_blueprint.delay(str(instance.pk))
|
apply_blueprint.delay(str(instance.pk))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
|
|
||||||
|
@ -37,7 +38,6 @@ from authentik.lib.utils.file import (
|
||||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.policies.types import PolicyResult
|
from authentik.policies.types import PolicyResult
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -98,7 +98,6 @@ class ApplicationSerializer(ModelSerializer):
|
||||||
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Application Viewset"""
|
"""Application Viewset"""
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
queryset = Application.objects.all().prefetch_related("provider")
|
queryset = Application.objects.all().prefetch_related("provider")
|
||||||
serializer_class = ApplicationSerializer
|
serializer_class = ApplicationSerializer
|
||||||
search_fields = [
|
search_fields = [
|
||||||
|
@ -123,7 +122,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
for backend in list(self.filter_backends):
|
for backend in list(self.filter_backends):
|
||||||
if backend == ObjectFilter:
|
if backend == ObjectPermissionsFilter:
|
||||||
continue
|
continue
|
||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
from json import loads
|
from json import loads
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
|
@ -13,12 +14,12 @@ from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.rbac.api.roles import RoleSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class GroupMemberSerializer(ModelSerializer):
|
class GroupMemberSerializer(ModelSerializer):
|
||||||
|
@ -48,12 +49,6 @@ class GroupSerializer(ModelSerializer):
|
||||||
users_obj = ListSerializer(
|
users_obj = ListSerializer(
|
||||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||||
)
|
)
|
||||||
roles_obj = ListSerializer(
|
|
||||||
child=RoleSerializer(),
|
|
||||||
read_only=True,
|
|
||||||
source="roles",
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
||||||
|
|
||||||
num_pk = IntegerField(read_only=True)
|
num_pk = IntegerField(read_only=True)
|
||||||
|
@ -76,10 +71,8 @@ class GroupSerializer(ModelSerializer):
|
||||||
"parent",
|
"parent",
|
||||||
"parent_name",
|
"parent_name",
|
||||||
"users",
|
"users",
|
||||||
"users_obj",
|
|
||||||
"attributes",
|
"attributes",
|
||||||
"roles",
|
"users_obj",
|
||||||
"roles_obj",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"users": {
|
"users": {
|
||||||
|
@ -139,13 +132,25 @@ class UserAccountSerializer(PassiveSerializer):
|
||||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Group Viewset"""
|
"""Group Viewset"""
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
||||||
serializer_class = GroupSerializer
|
serializer_class = GroupSerializer
|
||||||
search_fields = ["name", "is_superuser"]
|
search_fields = ["name", "is_superuser"]
|
||||||
filterset_class = GroupFilter
|
filterset_class = GroupFilter
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_group"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
|
||||||
@permission_required(None, ["authentik_core.add_user"])
|
@permission_required(None, ["authentik_core.add_user"])
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=UserAccountSerializer,
|
request=UserAccountSerializer,
|
||||||
|
|
|
@ -38,7 +38,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||||
|
|
||||||
managed = ReadOnlyField()
|
managed = ReadOnlyField()
|
||||||
component = SerializerMethodField()
|
component = SerializerMethodField()
|
||||||
icon = ReadOnlyField(source="icon_url")
|
icon = ReadOnlyField(source="get_icon")
|
||||||
|
|
||||||
def get_component(self, obj: Source) -> str:
|
def get_component(self, obj: Source) -> str:
|
||||||
"""Get object component so that we know how to edit the object"""
|
"""Get object component so that we know how to edit the object"""
|
||||||
|
|
|
@ -119,7 +119,6 @@ class TransactionApplicationResponseSerializer(PassiveSerializer):
|
||||||
class TransactionalApplicationView(APIView):
|
class TransactionalApplicationView(APIView):
|
||||||
"""Create provider and application and attach them in a single transaction"""
|
"""Create provider and application and attach them in a single transaction"""
|
||||||
|
|
||||||
# TODO: Migrate to a more specific permission
|
|
||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
|
|
|
@ -73,11 +73,6 @@ class UsedByMixin:
|
||||||
# but so we only apply them once, have a simple flag for the first object
|
# but so we only apply them once, have a simple flag for the first object
|
||||||
first_object = True
|
first_object = True
|
||||||
|
|
||||||
# TODO: This will only return the used-by references that the user can see
|
|
||||||
# Either we have to leak model information here to not make the list
|
|
||||||
# useless if the user doesn't have all permissions, or we need to double
|
|
||||||
# query and check if there is a difference between modes the user can see
|
|
||||||
# and can't see and add a warning
|
|
||||||
for obj in get_objects_for_user(
|
for obj in get_objects_for_user(
|
||||||
request.user, f"{app}.view_{model_name}", manager
|
request.user, f"{app}.view_{model_name}", manager
|
||||||
).all():
|
).all():
|
||||||
|
|
|
@ -7,6 +7,7 @@ from django.contrib.auth import update_session_auth_hash
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models.functions import ExtractHour
|
from django.db.models.functions import ExtractHour
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
|
@ -51,6 +52,7 @@ from rest_framework.serializers import (
|
||||||
)
|
)
|
||||||
from rest_framework.validators import UniqueValidator
|
from rest_framework.validators import UniqueValidator
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
|
@ -171,11 +173,6 @@ class UserSerializer(ModelSerializer):
|
||||||
raise ValidationError("Setting a user to internal service account is not allowed.")
|
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||||
return user_type
|
return user_type
|
||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
|
||||||
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
|
||||||
raise ValidationError("Can't modify internal service account users")
|
|
||||||
return super().validate(attrs)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -193,7 +190,6 @@ class UserSerializer(ModelSerializer):
|
||||||
"uid",
|
"uid",
|
||||||
"path",
|
"path",
|
||||||
"type",
|
"type",
|
||||||
"uuid",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"allow_blank": True},
|
"name": {"allow_blank": True},
|
||||||
|
@ -208,7 +204,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
groups = SerializerMethodField()
|
groups = SerializerMethodField()
|
||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
settings = SerializerMethodField()
|
settings = SerializerMethodField()
|
||||||
system_permissions = SerializerMethodField()
|
|
||||||
|
|
||||||
@extend_schema_field(
|
@extend_schema_field(
|
||||||
ListSerializer(
|
ListSerializer(
|
||||||
|
@ -230,14 +225,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
"""Get user settings with tenant and group settings applied"""
|
"""Get user settings with tenant and group settings applied"""
|
||||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||||
|
|
||||||
def get_system_permissions(self, user: User) -> list[str]:
|
|
||||||
"""Get all system permissions assigned to the user"""
|
|
||||||
return list(
|
|
||||||
user.user_permissions.filter(
|
|
||||||
content_type__app_label="authentik_rbac", content_type__model="systempermission"
|
|
||||||
).values_list("codename", flat=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -252,7 +239,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
"uid",
|
"uid",
|
||||||
"settings",
|
"settings",
|
||||||
"type",
|
"type",
|
||||||
"system_permissions",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"is_active": {"read_only": True},
|
"is_active": {"read_only": True},
|
||||||
|
@ -667,6 +653,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_user"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
200: inline_serializer(
|
200: inline_serializer(
|
||||||
|
|
|
@ -44,7 +44,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||||
if request:
|
if request:
|
||||||
req.http_request = request
|
req.http_request = request
|
||||||
self._context["request"] = req
|
self._context["request"] = req
|
||||||
req.context.update(**kwargs)
|
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
|
|
||||||
|
|
|
@ -17,15 +17,9 @@ class Command(BaseCommand):
|
||||||
"""Run worker"""
|
"""Run worker"""
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
parser.add_argument(
|
parser.add_argument("-b", "--beat", action="store_true")
|
||||||
"-b",
|
|
||||||
"--beat",
|
|
||||||
action="store_false",
|
|
||||||
help="When set, this worker will _not_ run Beat (scheduled) tasks",
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
LOGGER.debug("Celery options", **options)
|
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.get_bool("remote_debug"):
|
if CONFIG.get_bool("remote_debug"):
|
||||||
import debugpy
|
import debugpy
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
# Generated by Django 4.2.6 on 2023-10-11 13:37
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_core", "0031_alter_user_type"),
|
|
||||||
("authentik_rbac", "0001_initial"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="group",
|
|
||||||
options={"verbose_name": "Group", "verbose_name_plural": "Groups"},
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="token",
|
|
||||||
options={
|
|
||||||
"permissions": [("view_token_key", "View token's key")],
|
|
||||||
"verbose_name": "Token",
|
|
||||||
"verbose_name_plural": "Tokens",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="user",
|
|
||||||
options={
|
|
||||||
"permissions": [
|
|
||||||
("reset_user_password", "Reset Password"),
|
|
||||||
("impersonate", "Can impersonate other users"),
|
|
||||||
("assign_user_permissions", "Can assign permissions to users"),
|
|
||||||
("unassign_user_permissions", "Can unassign permissions from users"),
|
|
||||||
],
|
|
||||||
"verbose_name": "User",
|
|
||||||
"verbose_name_plural": "Users",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="group",
|
|
||||||
name="roles",
|
|
||||||
field=models.ManyToManyField(
|
|
||||||
blank=True, related_name="ak_groups", to="authentik_rbac.role"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""authentik core models"""
|
"""authentik core models"""
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional, Self
|
from typing import Any, Optional
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
|
@ -88,8 +88,6 @@ class Group(SerializerModel):
|
||||||
default=False, help_text=_("Users added to this group will be superusers.")
|
default=False, help_text=_("Users added to this group will be superusers.")
|
||||||
)
|
)
|
||||||
|
|
||||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="ak_groups", blank=True)
|
|
||||||
|
|
||||||
parent = models.ForeignKey(
|
parent = models.ForeignKey(
|
||||||
"Group",
|
"Group",
|
||||||
blank=True,
|
blank=True,
|
||||||
|
@ -117,38 +115,6 @@ class Group(SerializerModel):
|
||||||
"""Recursively check if `user` is member of us, or any parent."""
|
"""Recursively check if `user` is member of us, or any parent."""
|
||||||
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
||||||
|
|
||||||
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
|
|
||||||
"""Recursively get all groups that have this as parent or are indirectly related"""
|
|
||||||
direct_groups = []
|
|
||||||
if isinstance(self, QuerySet):
|
|
||||||
direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator())
|
|
||||||
else:
|
|
||||||
direct_groups = [self.pk]
|
|
||||||
if len(direct_groups) < 1:
|
|
||||||
return Group.objects.none()
|
|
||||||
query = """
|
|
||||||
WITH RECURSIVE parents AS (
|
|
||||||
SELECT authentik_core_group.*, 0 AS relative_depth
|
|
||||||
FROM authentik_core_group
|
|
||||||
WHERE authentik_core_group.group_uuid = ANY(%s)
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT authentik_core_group.*, parents.relative_depth + 1
|
|
||||||
FROM authentik_core_group, parents
|
|
||||||
WHERE (
|
|
||||||
authentik_core_group.group_uuid = parents.parent_id and
|
|
||||||
parents.relative_depth < 20
|
|
||||||
)
|
|
||||||
)
|
|
||||||
SELECT group_uuid
|
|
||||||
FROM parents
|
|
||||||
GROUP BY group_uuid, name
|
|
||||||
ORDER BY name;
|
|
||||||
"""
|
|
||||||
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
|
|
||||||
return Group.objects.filter(pk__in=group_pks)
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Group {self.name}"
|
return f"Group {self.name}"
|
||||||
|
|
||||||
|
@ -159,8 +125,6 @@ class Group(SerializerModel):
|
||||||
"parent",
|
"parent",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
verbose_name = _("Group")
|
|
||||||
verbose_name_plural = _("Groups")
|
|
||||||
|
|
||||||
|
|
||||||
class UserManager(DjangoUserManager):
|
class UserManager(DjangoUserManager):
|
||||||
|
@ -196,7 +160,33 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
"""Recursively get all groups this user is a member of.
|
"""Recursively get all groups this user is a member of.
|
||||||
At least one query is done to get the direct groups of the user, with groups
|
At least one query is done to get the direct groups of the user, with groups
|
||||||
there are at most 3 queries done"""
|
there are at most 3 queries done"""
|
||||||
return Group.children_recursive(self.ak_groups.all())
|
direct_groups = list(
|
||||||
|
x for x in self.ak_groups.all().values_list("pk", flat=True).iterator()
|
||||||
|
)
|
||||||
|
if len(direct_groups) < 1:
|
||||||
|
return Group.objects.none()
|
||||||
|
query = """
|
||||||
|
WITH RECURSIVE parents AS (
|
||||||
|
SELECT authentik_core_group.*, 0 AS relative_depth
|
||||||
|
FROM authentik_core_group
|
||||||
|
WHERE authentik_core_group.group_uuid = ANY(%s)
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT authentik_core_group.*, parents.relative_depth + 1
|
||||||
|
FROM authentik_core_group, parents
|
||||||
|
WHERE (
|
||||||
|
authentik_core_group.group_uuid = parents.parent_id and
|
||||||
|
parents.relative_depth < 20
|
||||||
|
)
|
||||||
|
)
|
||||||
|
SELECT group_uuid
|
||||||
|
FROM parents
|
||||||
|
GROUP BY group_uuid, name
|
||||||
|
ORDER BY name;
|
||||||
|
"""
|
||||||
|
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
|
||||||
|
return Group.objects.filter(pk__in=group_pks)
|
||||||
|
|
||||||
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
|
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
|
||||||
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
||||||
|
@ -271,14 +261,12 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
return get_avatar(self)
|
return get_avatar(self)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
permissions = (
|
||||||
|
("reset_user_password", "Reset Password"),
|
||||||
|
("impersonate", "Can impersonate other users"),
|
||||||
|
)
|
||||||
verbose_name = _("User")
|
verbose_name = _("User")
|
||||||
verbose_name_plural = _("Users")
|
verbose_name_plural = _("Users")
|
||||||
permissions = [
|
|
||||||
("reset_user_password", _("Reset Password")),
|
|
||||||
("impersonate", _("Can impersonate other users")),
|
|
||||||
("assign_user_permissions", _("Can assign permissions to users")),
|
|
||||||
("unassign_user_permissions", _("Can unassign permissions from users")),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class Provider(SerializerModel):
|
class Provider(SerializerModel):
|
||||||
|
@ -687,7 +675,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
||||||
models.Index(fields=["identifier"]),
|
models.Index(fields=["identifier"]),
|
||||||
models.Index(fields=["key"]),
|
models.Index(fields=["key"]),
|
||||||
]
|
]
|
||||||
permissions = [("view_token_key", _("View token's key"))]
|
permissions = (("view_token_key", "View token's key"),)
|
||||||
|
|
||||||
|
|
||||||
class PropertyMapping(SerializerModel, ManagedModel):
|
class PropertyMapping(SerializerModel, ManagedModel):
|
||||||
|
|
|
@ -7,7 +7,6 @@ from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete, pre_save
|
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
|
||||||
|
|
||||||
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
||||||
|
|
||||||
|
@ -16,8 +15,6 @@ password_changed = Signal()
|
||||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||||
login_failed = Signal()
|
login_failed = Signal()
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Application)
|
@receiver(post_save, sender=Application)
|
||||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||||
|
|
|
@ -97,7 +97,6 @@ class SourceFlowManager:
|
||||||
if self.request.user.is_authenticated:
|
if self.request.user.is_authenticated:
|
||||||
new_connection.user = self.request.user
|
new_connection.user = self.request.user
|
||||||
new_connection = self.update_connection(new_connection, **kwargs)
|
new_connection = self.update_connection(new_connection, **kwargs)
|
||||||
# pylint: disable=no-member
|
|
||||||
new_connection.save()
|
new_connection.save()
|
||||||
return Action.LINK, new_connection
|
return Action.LINK, new_connection
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||||
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
||||||
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
||||||
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
||||||
|
|
|
@ -16,8 +16,8 @@ You've logged out of {{ application }}.
|
||||||
{% block card %}
|
{% block card %}
|
||||||
<form method="POST" class="pf-c-form">
|
<form method="POST" class="pf-c-form">
|
||||||
<p>
|
<p>
|
||||||
{% blocktrans with application=application.name branding_title=tenant.branding_title %}
|
{% blocktrans with application=application.name %}
|
||||||
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
|
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your authentik account.
|
||||||
{% endblocktrans %}
|
{% endblocktrans %}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
|
||||||
{% include "base/header_js.html" %}
|
{% include "base/header_js.html" %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,10 @@ def create_test_flow(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_test_user(name: Optional[str] = None, **kwargs) -> User:
|
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
||||||
"""Generate a test user"""
|
"""Generate a test-admin user"""
|
||||||
uid = generate_id(20) if not name else name
|
uid = generate_id(20) if not name else name
|
||||||
|
group = Group.objects.create(name=uid, is_superuser=True)
|
||||||
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
||||||
kwargs.setdefault("username", uid)
|
kwargs.setdefault("username", uid)
|
||||||
user: User = User.objects.create(
|
user: User = User.objects.create(
|
||||||
|
@ -32,13 +33,6 @@ def create_test_user(name: Optional[str] = None, **kwargs) -> User:
|
||||||
)
|
)
|
||||||
user.set_password(uid)
|
user.set_password(uid)
|
||||||
user.save()
|
user.save()
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
|
||||||
"""Generate a test-admin user"""
|
|
||||||
user = create_test_user(name, **kwargs)
|
|
||||||
group = Group.objects.create(name=user.name or name, is_superuser=True)
|
|
||||||
group.users.add(user)
|
group.users.add(user)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
"""authentik crypto app config"""
|
"""authentik crypto app config"""
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
from authentik.blueprints.apps import ManagedAppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,37 +23,33 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
||||||
"""Load crypto tasks"""
|
"""Load crypto tasks"""
|
||||||
self.import_module("authentik.crypto.tasks")
|
self.import_module("authentik.crypto.tasks")
|
||||||
|
|
||||||
def _create_update_cert(self):
|
def _create_update_cert(self, cert: Optional["CertificateKeyPair"] = None):
|
||||||
from authentik.crypto.builder import CertificateBuilder
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
common_name = "authentik Internal JWT Certificate"
|
builder = CertificateBuilder("authentik Internal JWT Certificate")
|
||||||
builder = CertificateBuilder(common_name)
|
|
||||||
builder.build(
|
builder.build(
|
||||||
subject_alt_names=["goauthentik.io"],
|
subject_alt_names=["goauthentik.io"],
|
||||||
validity_days=360,
|
validity_days=360,
|
||||||
)
|
)
|
||||||
CertificateKeyPair.objects.update_or_create(
|
if not cert:
|
||||||
managed=MANAGED_KEY,
|
cert = CertificateKeyPair()
|
||||||
defaults={
|
builder.cert = cert
|
||||||
"name": common_name,
|
builder.cert.managed = MANAGED_KEY
|
||||||
"certificate_data": builder.certificate,
|
builder.save()
|
||||||
"key_data": builder.private_key,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def reconcile_managed_jwt_cert(self):
|
def reconcile_managed_jwt_cert(self):
|
||||||
"""Ensure managed JWT certificate"""
|
"""Ensure managed JWT certificate"""
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
cert: Optional[CertificateKeyPair] = CertificateKeyPair.objects.filter(
|
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
|
||||||
managed=MANAGED_KEY
|
if not certs.exists():
|
||||||
).first()
|
|
||||||
now = datetime.now()
|
|
||||||
if not cert or (
|
|
||||||
now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after
|
|
||||||
):
|
|
||||||
self._create_update_cert()
|
self._create_update_cert()
|
||||||
|
return
|
||||||
|
cert: CertificateKeyPair = certs.first()
|
||||||
|
now = datetime.now()
|
||||||
|
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
|
||||||
|
self._create_update_cert(cert)
|
||||||
|
|
||||||
def reconcile_self_signed(self):
|
def reconcile_self_signed(self):
|
||||||
"""Create self-signed keypair"""
|
"""Create self-signed keypair"""
|
||||||
|
@ -62,10 +61,4 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
||||||
return
|
return
|
||||||
builder = CertificateBuilder(name)
|
builder = CertificateBuilder(name)
|
||||||
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
|
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
|
||||||
CertificateKeyPair.objects.get_or_create(
|
builder.save()
|
||||||
name=name,
|
|
||||||
defaults={
|
|
||||||
"certificate_data": builder.certificate,
|
|
||||||
"key_data": builder.private_key,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
@ -84,7 +84,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=False, methods=["GET"])
|
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
|
||||||
def get_install_id(self, request: Request) -> Response:
|
def get_install_id(self, request: Request) -> Response:
|
||||||
"""Get install_id"""
|
"""Get install_id"""
|
||||||
return Response(
|
return Response(
|
||||||
|
|
|
@ -33,8 +33,4 @@ class Migration(migrations.Migration):
|
||||||
"verbose_name_plural": "License Usage Records",
|
"verbose_name_plural": "License Usage Records",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="license",
|
|
||||||
options={"verbose_name": "License", "verbose_name_plural": "Licenses"},
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
|
@ -19,10 +19,8 @@ from django.utils.translation import gettext as _
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from jwt import PyJWTError, decode, get_unverified_header
|
from jwt import PyJWTError, decode, get_unverified_header
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer
|
|
||||||
|
|
||||||
from authentik.core.models import ExpiringModel, User, UserTypes
|
from authentik.core.models import ExpiringModel, User, UserTypes
|
||||||
from authentik.lib.models import SerializerModel
|
|
||||||
from authentik.root.install_id import get_install_id
|
from authentik.root.install_id import get_install_id
|
||||||
|
|
||||||
|
|
||||||
|
@ -136,9 +134,6 @@ class LicenseKey:
|
||||||
|
|
||||||
def record_usage(self):
|
def record_usage(self):
|
||||||
"""Capture the current validity status and metrics and save them"""
|
"""Capture the current validity status and metrics and save them"""
|
||||||
threshold = now() - timedelta(hours=8)
|
|
||||||
if LicenseUsage.objects.filter(record_date__gte=threshold).exists():
|
|
||||||
return
|
|
||||||
LicenseUsage.objects.create(
|
LicenseUsage.objects.create(
|
||||||
user_count=self.get_default_user_count(),
|
user_count=self.get_default_user_count(),
|
||||||
external_user_count=self.get_external_user_count(),
|
external_user_count=self.get_external_user_count(),
|
||||||
|
@ -156,7 +151,7 @@ class LicenseKey:
|
||||||
return usage.record_date
|
return usage.record_date
|
||||||
|
|
||||||
|
|
||||||
class License(SerializerModel):
|
class License(models.Model):
|
||||||
"""An authentik enterprise license"""
|
"""An authentik enterprise license"""
|
||||||
|
|
||||||
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
@ -167,12 +162,6 @@ class License(SerializerModel):
|
||||||
internal_users = models.BigIntegerField()
|
internal_users = models.BigIntegerField()
|
||||||
external_users = models.BigIntegerField()
|
external_users = models.BigIntegerField()
|
||||||
|
|
||||||
@property
|
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
|
||||||
from authentik.enterprise.api import LicenseSerializer
|
|
||||||
|
|
||||||
return LicenseSerializer
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> LicenseKey:
|
def status(self) -> LicenseKey:
|
||||||
"""Get parsed license status"""
|
"""Get parsed license status"""
|
||||||
|
@ -180,8 +169,6 @@ class License(SerializerModel):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
indexes = (HashIndex(fields=("key",)),)
|
indexes = (HashIndex(fields=("key",)),)
|
||||||
verbose_name = _("License")
|
|
||||||
verbose_name_plural = _("Licenses")
|
|
||||||
|
|
||||||
|
|
||||||
def usage_expiry():
|
def usage_expiry():
|
||||||
|
|
|
@ -6,7 +6,7 @@ from authentik.lib.utils.time import fqdn_rand
|
||||||
CELERY_BEAT_SCHEDULE = {
|
CELERY_BEAT_SCHEDULE = {
|
||||||
"enterprise_calculate_license": {
|
"enterprise_calculate_license": {
|
||||||
"task": "authentik.enterprise.tasks.calculate_license",
|
"task": "authentik.enterprise.tasks.calculate_license",
|
||||||
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/2"),
|
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/8"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,4 +6,5 @@ from authentik.root.celery import CELERY_APP
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def calculate_license():
|
def calculate_license():
|
||||||
"""Calculate licensing status"""
|
"""Calculate licensing status"""
|
||||||
LicenseKey.get_total().record_usage()
|
total = LicenseKey.get_total()
|
||||||
|
total.record_usage()
|
||||||
|
|
|
@ -27,7 +27,6 @@ from authentik.lib.sentry import before_send
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.policies.reputation.models import Reputation
|
|
||||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||||
from authentik.stages.authenticator_static.models import StaticToken
|
from authentik.stages.authenticator_static.models import StaticToken
|
||||||
|
@ -53,13 +52,11 @@ IGNORED_MODELS = (
|
||||||
RefreshToken,
|
RefreshToken,
|
||||||
SCIMUser,
|
SCIMUser,
|
||||||
SCIMGroup,
|
SCIMGroup,
|
||||||
Reputation,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def should_log_model(model: Model) -> bool:
|
def should_log_model(model: Model) -> bool:
|
||||||
"""Return true if operation on `model` should be logged"""
|
"""Return true if operation on `model` should be logged"""
|
||||||
# Check for silk by string so this comparison doesn't fail when silk isn't installed
|
|
||||||
if model.__module__.startswith("silk"):
|
if model.__module__.startswith("silk"):
|
||||||
return False
|
return False
|
||||||
return model.__class__ not in IGNORED_MODELS
|
return model.__class__ not in IGNORED_MODELS
|
||||||
|
@ -96,30 +93,21 @@ class AuditMiddleware:
|
||||||
of models"""
|
of models"""
|
||||||
|
|
||||||
get_response: Callable[[HttpRequest], HttpResponse]
|
get_response: Callable[[HttpRequest], HttpResponse]
|
||||||
anonymous_user: User = None
|
|
||||||
|
|
||||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||||
self.get_response = get_response
|
self.get_response = get_response
|
||||||
|
|
||||||
def _ensure_fallback_user(self):
|
|
||||||
"""Defer fetching anonymous user until we have to"""
|
|
||||||
if self.anonymous_user:
|
|
||||||
return
|
|
||||||
from guardian.shortcuts import get_anonymous_user
|
|
||||||
|
|
||||||
self.anonymous_user = get_anonymous_user()
|
|
||||||
|
|
||||||
def connect(self, request: HttpRequest):
|
def connect(self, request: HttpRequest):
|
||||||
"""Connect signal for automatic logging"""
|
"""Connect signal for automatic logging"""
|
||||||
self._ensure_fallback_user()
|
if not hasattr(request, "user"):
|
||||||
user = getattr(request, "user", self.anonymous_user)
|
return
|
||||||
if not user.is_authenticated:
|
if not getattr(request.user, "is_authenticated", False):
|
||||||
user = self.anonymous_user
|
return
|
||||||
if not hasattr(request, "request_id"):
|
if not hasattr(request, "request_id"):
|
||||||
return
|
return
|
||||||
post_save_handler = partial(self.post_save_handler, user=user, request=request)
|
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
|
||||||
pre_delete_handler = partial(self.pre_delete_handler, user=user, request=request)
|
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
|
||||||
m2m_changed_handler = partial(self.m2m_changed_handler, user=user, request=request)
|
m2m_changed_handler = partial(self.m2m_changed_handler, user=request.user, request=request)
|
||||||
post_save.connect(
|
post_save.connect(
|
||||||
post_save_handler,
|
post_save_handler,
|
||||||
dispatch_uid=request.request_id,
|
dispatch_uid=request.request_id,
|
||||||
|
|
|
@ -217,7 +217,6 @@ class Event(SerializerModel, ExpiringModel):
|
||||||
"path": request.path,
|
"path": request.path,
|
||||||
"method": request.method,
|
"method": request.method,
|
||||||
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
|
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
|
||||||
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
|
|
||||||
}
|
}
|
||||||
# Special case for events created during flow execution
|
# Special case for events created during flow execution
|
||||||
# since they keep the http query within a wrapped query
|
# since they keep the http query within a wrapped query
|
||||||
|
@ -437,39 +436,32 @@ class NotificationTransport(SerializerModel):
|
||||||
|
|
||||||
def send_email(self, notification: "Notification") -> list[str]:
|
def send_email(self, notification: "Notification") -> list[str]:
|
||||||
"""Send notification via global email configuration"""
|
"""Send notification via global email configuration"""
|
||||||
subject_prefix = "authentik Notification: "
|
subject = "authentik Notification: "
|
||||||
context = {
|
key_value = {
|
||||||
"key_value": {
|
|
||||||
"user_email": notification.user.email,
|
"user_email": notification.user.email,
|
||||||
"user_username": notification.user.username,
|
"user_username": notification.user.username,
|
||||||
},
|
|
||||||
"body": notification.body,
|
|
||||||
"title": "",
|
|
||||||
}
|
}
|
||||||
if notification.event and notification.event.user:
|
if notification.event and notification.event.user:
|
||||||
context["key_value"]["event_user_email"] = notification.event.user.get("email", None)
|
key_value["event_user_email"] = notification.event.user.get("email", None)
|
||||||
context["key_value"]["event_user_username"] = notification.event.user.get(
|
key_value["event_user_username"] = notification.event.user.get("username", None)
|
||||||
"username", None
|
|
||||||
)
|
|
||||||
if notification.event:
|
if notification.event:
|
||||||
context["title"] += notification.event.action
|
subject += notification.event.action
|
||||||
for key, value in notification.event.context.items():
|
for key, value in notification.event.context.items():
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, str):
|
||||||
continue
|
continue
|
||||||
context["key_value"][key] = value
|
key_value[key] = value
|
||||||
else:
|
else:
|
||||||
context["title"] += notification.body[:75]
|
subject += notification.body[:75]
|
||||||
# TODO: improve permission check
|
|
||||||
if notification.user.is_superuser:
|
|
||||||
context["source"] = {
|
|
||||||
"from": self.name,
|
|
||||||
}
|
|
||||||
mail = TemplateEmailMessage(
|
mail = TemplateEmailMessage(
|
||||||
subject=subject_prefix + context["title"],
|
subject=subject,
|
||||||
to=[notification.user.email],
|
to=[notification.user.email],
|
||||||
language=notification.user.locale(),
|
language=notification.user.locale(),
|
||||||
template_name="email/event_notification.html",
|
template_name="email/generic.html",
|
||||||
template_context=context,
|
template_context={
|
||||||
|
"title": subject,
|
||||||
|
"body": notification.body,
|
||||||
|
"key_value": key_value,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
# Email is sent directly here, as the call to send() should have been from a task.
|
# Email is sent directly here, as the call to send() should have been from a task.
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -206,8 +206,8 @@ def prefill_task(func):
|
||||||
task_call_module=func.__module__,
|
task_call_module=func.__module__,
|
||||||
task_call_func=func.__name__,
|
task_call_func=func.__name__,
|
||||||
# We don't have real values for these attributes but they cannot be null
|
# We don't have real values for these attributes but they cannot be null
|
||||||
start_timestamp=0,
|
start_timestamp=default_timer(),
|
||||||
finish_timestamp=0,
|
finish_timestamp=default_timer(),
|
||||||
finish_time=datetime.now(),
|
finish_time=datetime.now(),
|
||||||
).save(86400)
|
).save(86400)
|
||||||
LOGGER.debug("prefilled task", task_name=func.__name__)
|
LOGGER.debug("prefilled task", task_name=func.__name__)
|
||||||
|
|
|
@ -13,7 +13,6 @@ from authentik.events.tasks import event_notification_handler, gdpr_cleanup
|
||||||
from authentik.flows.models import Stage
|
from authentik.flows.models import Stage
|
||||||
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
from authentik.flows.planner import PLAN_CONTEXT_SOURCE, FlowPlan
|
||||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
from authentik.stages.invitation.models import Invitation
|
from authentik.stages.invitation.models import Invitation
|
||||||
from authentik.stages.invitation.signals import invitation_used
|
from authentik.stages.invitation.signals import invitation_used
|
||||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||||
|
@ -93,5 +92,4 @@ def event_post_save_notification(sender, instance: Event, **_):
|
||||||
@receiver(pre_delete, sender=User)
|
@receiver(pre_delete, sender=User)
|
||||||
def event_user_pre_delete_cleanup(sender, instance: User, **_):
|
def event_user_pre_delete_cleanup(sender, instance: User, **_):
|
||||||
"""If gdpr_compliance is enabled, remove all the user's events"""
|
"""If gdpr_compliance is enabled, remove all the user's events"""
|
||||||
if CONFIG.get_bool("gdpr_compliance", True):
|
|
||||||
gdpr_cleanup.delay(instance.pk)
|
gdpr_cleanup.delay(instance.pk)
|
||||||
|
|
|
@ -53,15 +53,7 @@ class TestEvents(TestCase):
|
||||||
"""Test plain from_http"""
|
"""Test plain from_http"""
|
||||||
event = Event.new("unittest").from_http(self.factory.get("/"))
|
event = Event.new("unittest").from_http(self.factory.get("/"))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
event.context,
|
event.context, {"http_request": {"args": {}, "method": "GET", "path": "/"}}
|
||||||
{
|
|
||||||
"http_request": {
|
|
||||||
"args": {},
|
|
||||||
"method": "GET",
|
|
||||||
"path": "/",
|
|
||||||
"user_agent": "",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_from_http_clean_querystring(self):
|
def test_from_http_clean_querystring(self):
|
||||||
|
@ -75,7 +67,6 @@ class TestEvents(TestCase):
|
||||||
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
||||||
"method": "GET",
|
"method": "GET",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"user_agent": "",
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -92,7 +83,6 @@ class TestEvents(TestCase):
|
||||||
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
"args": {"token": SafeExceptionReporterFilter.cleansed_substitute},
|
||||||
"method": "GET",
|
"method": "GET",
|
||||||
"path": "/",
|
"path": "/",
|
||||||
"user_agent": "",
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,20 +2,17 @@
|
||||||
import re
|
import re
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from dataclasses import asdict, is_dataclass
|
from dataclasses import asdict, is_dataclass
|
||||||
from datetime import date, datetime, time, timedelta
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import GeneratorType, NoneType
|
from types import GeneratorType
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.core.handlers.wsgi import WSGIRequest
|
from django.core.handlers.wsgi import WSGIRequest
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.base import Model
|
from django.db.models.base import Model
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from django.utils import timezone
|
|
||||||
from django.views.debug import SafeExceptionReporterFilter
|
from django.views.debug import SafeExceptionReporterFilter
|
||||||
from geoip2.models import City
|
from geoip2.models import City
|
||||||
from guardian.utils import get_anonymous_user
|
from guardian.utils import get_anonymous_user
|
||||||
|
@ -87,7 +84,7 @@ def get_user(user: User, original_user: Optional[User] = None) -> dict[str, Any]
|
||||||
return user_data
|
return user_data
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=too-many-return-statements,too-many-branches
|
# pylint: disable=too-many-return-statements
|
||||||
def sanitize_item(value: Any) -> Any:
|
def sanitize_item(value: Any) -> Any:
|
||||||
"""Sanitize a single item, ensure it is JSON parsable"""
|
"""Sanitize a single item, ensure it is JSON parsable"""
|
||||||
if is_dataclass(value):
|
if is_dataclass(value):
|
||||||
|
@ -137,37 +134,7 @@ def sanitize_item(value: Any) -> Any:
|
||||||
"type": value.__name__,
|
"type": value.__name__,
|
||||||
"module": value.__module__,
|
"module": value.__module__,
|
||||||
}
|
}
|
||||||
# See
|
|
||||||
# https://github.com/encode/django-rest-framework/blob/master/rest_framework/utils/encoders.py
|
|
||||||
# For Date Time string spec, see ECMA 262
|
|
||||||
# https://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
|
|
||||||
if isinstance(value, datetime):
|
|
||||||
representation = value.isoformat()
|
|
||||||
if representation.endswith("+00:00"):
|
|
||||||
representation = representation[:-6] + "Z"
|
|
||||||
return representation
|
|
||||||
if isinstance(value, date):
|
|
||||||
return value.isoformat()
|
|
||||||
if isinstance(value, time):
|
|
||||||
if timezone and timezone.is_aware(value):
|
|
||||||
raise ValueError("JSON can't represent timezone-aware times.")
|
|
||||||
return value.isoformat()
|
|
||||||
if isinstance(value, timedelta):
|
|
||||||
return str(value.total_seconds())
|
|
||||||
if callable(value):
|
|
||||||
return {
|
|
||||||
"type": "callable",
|
|
||||||
"name": value.__name__,
|
|
||||||
"module": value.__module__,
|
|
||||||
}
|
|
||||||
# List taken from the stdlib's JSON encoder (_make_iterencode, encoder.py:415)
|
|
||||||
if isinstance(value, (bool, int, float, NoneType, list, tuple, dict)):
|
|
||||||
return value
|
return value
|
||||||
try:
|
|
||||||
return DjangoJSONEncoder().default(value)
|
|
||||||
except TypeError:
|
|
||||||
return str(value)
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||||
|
|
|
@ -45,4 +45,3 @@ class FlowStageBindingViewSet(UsedByMixin, ModelViewSet):
|
||||||
serializer_class = FlowStageBindingSerializer
|
serializer_class = FlowStageBindingSerializer
|
||||||
filterset_fields = "__all__"
|
filterset_fields = "__all__"
|
||||||
search_fields = ["stage__name"]
|
search_fields = ["stage__name"]
|
||||||
ordering = ["order"]
|
|
||||||
|
|
|
@ -8,11 +8,6 @@ GAUGE_FLOWS_CACHED = Gauge(
|
||||||
"authentik_flows_cached",
|
"authentik_flows_cached",
|
||||||
"Cached flows",
|
"Cached flows",
|
||||||
)
|
)
|
||||||
HIST_FLOW_EXECUTION_STAGE_TIME = Histogram(
|
|
||||||
"authentik_flows_execution_stage_time",
|
|
||||||
"Duration each stage took to execute.",
|
|
||||||
["stage_type", "method"],
|
|
||||||
)
|
|
||||||
HIST_FLOWS_PLAN_TIME = Histogram(
|
HIST_FLOWS_PLAN_TIME = Histogram(
|
||||||
"authentik_flows_plan_time",
|
"authentik_flows_plan_time",
|
||||||
"Duration to build a plan for a flow",
|
"Duration to build a plan for a flow",
|
||||||
|
|
|
@ -132,6 +132,13 @@ class PermissionDict(TypedDict):
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionSerializer(PassiveSerializer):
|
||||||
|
"""Permission used for consent"""
|
||||||
|
|
||||||
|
name = CharField(allow_blank=True)
|
||||||
|
id = CharField()
|
||||||
|
|
||||||
|
|
||||||
class ChallengeResponse(PassiveSerializer):
|
class ChallengeResponse(PassiveSerializer):
|
||||||
"""Base class for all challenge responses"""
|
"""Base class for all challenge responses"""
|
||||||
|
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
# Generated by Django 4.2.6 on 2023-10-10 17:18
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_flows", "0025_alter_flowstagebinding_evaluate_on_plan_and_more"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="flow",
|
|
||||||
options={
|
|
||||||
"permissions": [
|
|
||||||
("export_flow", "Can export a Flow"),
|
|
||||||
("inspect_flow", "Can inspect a Flow's execution"),
|
|
||||||
("view_flow_cache", "View Flow's cache metrics"),
|
|
||||||
("clear_flow_cache", "Clear Flow's cache metrics"),
|
|
||||||
],
|
|
||||||
"verbose_name": "Flow",
|
|
||||||
"verbose_name_plural": "Flows",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -1,34 +0,0 @@
|
||||||
# Generated by Django 4.2.6 on 2023-10-28 14:24
|
|
||||||
|
|
||||||
from django.apps.registry import Apps
|
|
||||||
from django.db import migrations
|
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|
||||||
|
|
||||||
|
|
||||||
def set_oobe_flow_authentication(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
|
||||||
from guardian.shortcuts import get_anonymous_user
|
|
||||||
|
|
||||||
Flow = apps.get_model("authentik_flows", "Flow")
|
|
||||||
User = apps.get_model("authentik_core", "User")
|
|
||||||
|
|
||||||
db_alias = schema_editor.connection.alias
|
|
||||||
|
|
||||||
users = User.objects.using(db_alias).exclude(username="akadmin")
|
|
||||||
try:
|
|
||||||
users = users.exclude(pk=get_anonymous_user().pk)
|
|
||||||
# pylint: disable=broad-except
|
|
||||||
except Exception: # nosec
|
|
||||||
pass
|
|
||||||
|
|
||||||
if users.exists():
|
|
||||||
Flow.objects.filter(slug="initial-setup").update(authentication="require_superuser")
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_flows", "0026_alter_flow_options"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(set_oobe_flow_authentication),
|
|
||||||
]
|
|
|
@ -194,10 +194,9 @@ class Flow(SerializerModel, PolicyBindingModel):
|
||||||
verbose_name_plural = _("Flows")
|
verbose_name_plural = _("Flows")
|
||||||
|
|
||||||
permissions = [
|
permissions = [
|
||||||
("export_flow", _("Can export a Flow")),
|
("export_flow", "Can export a Flow"),
|
||||||
("inspect_flow", _("Can inspect a Flow's execution")),
|
("view_flow_cache", "View Flow's cache metrics"),
|
||||||
("view_flow_cache", _("View Flow's cache metrics")),
|
("clear_flow_cache", "Clear Flow's cache metrics"),
|
||||||
("clear_flow_cache", _("Clear Flow's cache metrics")),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -167,11 +167,7 @@ class ChallengeStageView(StageView):
|
||||||
stage_type=self.__class__.__name__, method="get_challenge"
|
stage_type=self.__class__.__name__, method="get_challenge"
|
||||||
).time(),
|
).time(),
|
||||||
):
|
):
|
||||||
try:
|
|
||||||
challenge = self.get_challenge(*args, **kwargs)
|
challenge = self.get_challenge(*args, **kwargs)
|
||||||
except StageInvalidException as exc:
|
|
||||||
self.logger.debug("Got StageInvalidException", exc=exc)
|
|
||||||
return self.executor.stage_invalid()
|
|
||||||
with Hub.current.start_span(
|
with Hub.current.start_span(
|
||||||
op="authentik.flow.stage._get_challenge",
|
op="authentik.flow.stage._get_challenge",
|
||||||
description=self.__class__.__name__,
|
description=self.__class__.__name__,
|
||||||
|
|
|
@ -24,7 +24,6 @@ from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.models import Application
|
from authentik.core.models import Application
|
||||||
from authentik.events.models import Event, EventAction, cleanse_dict
|
from authentik.events.models import Event, EventAction, cleanse_dict
|
||||||
from authentik.flows.apps import HIST_FLOW_EXECUTION_STAGE_TIME
|
|
||||||
from authentik.flows.challenge import (
|
from authentik.flows.challenge import (
|
||||||
Challenge,
|
Challenge,
|
||||||
ChallengeResponse,
|
ChallengeResponse,
|
||||||
|
@ -267,21 +266,17 @@ class FlowExecutorView(APIView):
|
||||||
)
|
)
|
||||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Get the next pending challenge from the currently active flow."""
|
"""Get the next pending challenge from the currently active flow."""
|
||||||
class_path = class_to_path(self.current_stage_view.__class__)
|
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(exec): Passing GET",
|
"f(exec): Passing GET",
|
||||||
view_class=class_path,
|
view_class=class_to_path(self.current_stage_view.__class__),
|
||||||
stage=self.current_stage,
|
stage=self.current_stage,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with Hub.current.start_span(
|
with Hub.current.start_span(
|
||||||
op="authentik.flow.executor.stage",
|
op="authentik.flow.executor.stage",
|
||||||
description=class_path,
|
description=class_to_path(self.current_stage_view.__class__),
|
||||||
) as span, HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
) as span:
|
||||||
method=request.method.upper(),
|
span.set_data("Method", "GET")
|
||||||
stage_type=class_path,
|
|
||||||
).time():
|
|
||||||
span.set_data("Method", request.method.upper())
|
|
||||||
span.set_data("authentik Stage", self.current_stage_view)
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
span.set_data("authentik Flow", self.flow.slug)
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
stage_response = self.current_stage_view.dispatch(request)
|
stage_response = self.current_stage_view.dispatch(request)
|
||||||
|
@ -315,21 +310,17 @@ class FlowExecutorView(APIView):
|
||||||
)
|
)
|
||||||
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Solve the previously retrieved challenge and advanced to the next stage."""
|
"""Solve the previously retrieved challenge and advanced to the next stage."""
|
||||||
class_path = class_to_path(self.current_stage_view.__class__)
|
|
||||||
self._logger.debug(
|
self._logger.debug(
|
||||||
"f(exec): Passing POST",
|
"f(exec): Passing POST",
|
||||||
view_class=class_path,
|
view_class=class_to_path(self.current_stage_view.__class__),
|
||||||
stage=self.current_stage,
|
stage=self.current_stage,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with Hub.current.start_span(
|
with Hub.current.start_span(
|
||||||
op="authentik.flow.executor.stage",
|
op="authentik.flow.executor.stage",
|
||||||
description=class_path,
|
description=class_to_path(self.current_stage_view.__class__),
|
||||||
) as span, HIST_FLOW_EXECUTION_STAGE_TIME.labels(
|
) as span:
|
||||||
method=request.method.upper(),
|
span.set_data("Method", "POST")
|
||||||
stage_type=class_path,
|
|
||||||
).time():
|
|
||||||
span.set_data("Method", request.method.upper())
|
|
||||||
span.set_data("authentik Stage", self.current_stage_view)
|
span.set_data("authentik Stage", self.current_stage_view)
|
||||||
span.set_data("authentik Flow", self.flow.slug)
|
span.set_data("authentik Flow", self.flow.slug)
|
||||||
stage_response = self.current_stage_view.dispatch(request)
|
stage_response = self.current_stage_view.dispatch(request)
|
||||||
|
|
|
@ -3,7 +3,6 @@ from hashlib import sha256
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import Http404
|
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from django.http.response import HttpResponse
|
from django.http.response import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
@ -12,6 +11,7 @@ from django.views.decorators.clickjacking import xframe_options_sameorigin
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||||
from rest_framework.fields import BooleanField, ListField, SerializerMethodField
|
from rest_framework.fields import BooleanField, ListField, SerializerMethodField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
@ -68,19 +68,21 @@ class FlowInspectionSerializer(PassiveSerializer):
|
||||||
class FlowInspectorView(APIView):
|
class FlowInspectorView(APIView):
|
||||||
"""Flow inspector API"""
|
"""Flow inspector API"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
flow: Flow
|
flow: Flow
|
||||||
_logger: BoundLogger
|
_logger: BoundLogger
|
||||||
permission_classes = []
|
|
||||||
|
def check_permissions(self, request):
|
||||||
|
"""Always allow access when in debug mode"""
|
||||||
|
if settings.DEBUG:
|
||||||
|
return None
|
||||||
|
return super().check_permissions(request)
|
||||||
|
|
||||||
def setup(self, request: HttpRequest, flow_slug: str):
|
def setup(self, request: HttpRequest, flow_slug: str):
|
||||||
super().setup(request, flow_slug=flow_slug)
|
super().setup(request, flow_slug=flow_slug)
|
||||||
self._logger = get_logger().bind(flow_slug=flow_slug)
|
|
||||||
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
self.flow = get_object_or_404(Flow.objects.select_related(), slug=flow_slug)
|
||||||
if settings.DEBUG:
|
self._logger = get_logger().bind(flow_slug=flow_slug)
|
||||||
return
|
|
||||||
if request.user.has_perm("authentik_flow.inspect_flow", self.flow):
|
|
||||||
return
|
|
||||||
raise Http404
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
|
|
|
@ -24,7 +24,7 @@ ENVIRONMENT = os.getenv(f"{ENV_PREFIX}_ENV", "local")
|
||||||
|
|
||||||
|
|
||||||
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
||||||
"""Recursively walk through `root`, checking each part of `path` separated by `sep`.
|
"""Recursively walk through `root`, checking each part of `path` split by `sep`.
|
||||||
If at any point a dict does not exist, return default"""
|
If at any point a dict does not exist, return default"""
|
||||||
for comp in path.split(sep):
|
for comp in path.split(sep):
|
||||||
if root and comp in root:
|
if root and comp in root:
|
||||||
|
@ -34,19 +34,7 @@ def get_path_from_dict(root: dict, path: str, sep=".", default=None) -> Any:
|
||||||
return root
|
return root
|
||||||
|
|
||||||
|
|
||||||
def set_path_in_dict(root: dict, path: str, value: Any, sep="."):
|
@dataclass
|
||||||
"""Recursively walk through `root`, checking each part of `path` separated by `sep`
|
|
||||||
and setting the last value to `value`"""
|
|
||||||
# Walk each component of the path
|
|
||||||
path_parts = path.split(sep)
|
|
||||||
for comp in path_parts[:-1]:
|
|
||||||
if comp not in root:
|
|
||||||
root[comp] = {}
|
|
||||||
root = root.get(comp, {})
|
|
||||||
root[path_parts[-1]] = value
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class Attr:
|
class Attr:
|
||||||
"""Single configuration attribute"""
|
"""Single configuration attribute"""
|
||||||
|
|
||||||
|
@ -67,10 +55,6 @@ class Attr:
|
||||||
# to the config file containing this change or the file containing this value
|
# to the config file containing this change or the file containing this value
|
||||||
source: Optional[str] = field(default=None)
|
source: Optional[str] = field(default=None)
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if isinstance(self.value, Attr):
|
|
||||||
raise RuntimeError(f"config Attr with nested Attr for source {self.source}")
|
|
||||||
|
|
||||||
|
|
||||||
class AttrEncoder(JSONEncoder):
|
class AttrEncoder(JSONEncoder):
|
||||||
"""JSON encoder that can deal with `Attr` classes"""
|
"""JSON encoder that can deal with `Attr` classes"""
|
||||||
|
@ -243,7 +227,15 @@ class ConfigLoader:
|
||||||
|
|
||||||
def set(self, path: str, value: Any, sep="."):
|
def set(self, path: str, value: Any, sep="."):
|
||||||
"""Set value using same syntax as get()"""
|
"""Set value using same syntax as get()"""
|
||||||
set_path_in_dict(self.raw, path, Attr(value), sep=sep)
|
# Walk sub_dicts before parsing path
|
||||||
|
root = self.raw
|
||||||
|
# Walk each component of the path
|
||||||
|
path_parts = path.split(sep)
|
||||||
|
for comp in path_parts[:-1]:
|
||||||
|
if comp not in root:
|
||||||
|
root[comp] = {}
|
||||||
|
root = root.get(comp, {})
|
||||||
|
root[path_parts[-1]] = Attr(value)
|
||||||
|
|
||||||
|
|
||||||
CONFIG = ConfigLoader()
|
CONFIG = ConfigLoader()
|
||||||
|
|
|
@ -106,6 +106,7 @@ default_token_length: 60
|
||||||
impersonation: true
|
impersonation: true
|
||||||
|
|
||||||
blueprints_dir: /blueprints
|
blueprints_dir: /blueprints
|
||||||
|
variables_discovery_dir: /data/variables
|
||||||
|
|
||||||
web:
|
web:
|
||||||
# No default here as it's set dynamically
|
# No default here as it's set dynamically
|
||||||
|
|
|
@ -141,7 +141,7 @@ class BaseEvaluator:
|
||||||
"""Create event with supplied data and try to extract as much relevant data
|
"""Create event with supplied data and try to extract as much relevant data
|
||||||
from the context"""
|
from the context"""
|
||||||
context = self._context.copy()
|
context = self._context.copy()
|
||||||
# If the result was a complex variable, we don't want to reuse it
|
# If the result was a complex variable, we don't want to re-use it
|
||||||
context.pop("result", None)
|
context.pop("result", None)
|
||||||
context.pop("handler", None)
|
context.pop("handler", None)
|
||||||
event_kwargs = context
|
event_kwargs = context
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
"""Serializer validators"""
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from rest_framework.exceptions import ValidationError
|
|
||||||
from rest_framework.serializers import Serializer
|
|
||||||
from rest_framework.utils.representation import smart_repr
|
|
||||||
|
|
||||||
|
|
||||||
class RequiredTogetherValidator:
|
|
||||||
"""Serializer-level validator that ensures all fields in `fields` are only
|
|
||||||
used together"""
|
|
||||||
|
|
||||||
fields: list[str]
|
|
||||||
requires_context = True
|
|
||||||
message = _("The fields {field_names} must be used together.")
|
|
||||||
|
|
||||||
def __init__(self, fields: list[str], message: Optional[str] = None) -> None:
|
|
||||||
self.fields = fields
|
|
||||||
self.message = message or self.message
|
|
||||||
|
|
||||||
def __call__(self, attrs: dict, serializer: Serializer):
|
|
||||||
"""Check that if any of the fields in `self.fields` are set, all of them must be set"""
|
|
||||||
if any(field in attrs for field in self.fields) and not all(
|
|
||||||
field in attrs for field in self.fields
|
|
||||||
):
|
|
||||||
field_names = ", ".join(self.fields)
|
|
||||||
message = self.message.format(field_names=field_names)
|
|
||||||
raise ValidationError(message, code="required")
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<%s(fields=%s)>" % (self.__class__.__name__, smart_repr(self.fields))
|
|
|
@ -18,7 +18,7 @@ from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
from authentik.core.models import Provider
|
from authentik.core.models import Provider
|
||||||
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
from authentik.outposts.api.service_connections import ServiceConnectionSerializer
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST, MANAGED_OUTPOST_NAME
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import (
|
from authentik.outposts.models import (
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostConfig,
|
OutpostConfig,
|
||||||
|
@ -47,16 +47,6 @@ class OutpostSerializer(ModelSerializer):
|
||||||
source="service_connection", read_only=True
|
source="service_connection", read_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate_name(self, name: str) -> str:
|
|
||||||
"""Validate name (especially for embedded outpost)"""
|
|
||||||
if not self.instance:
|
|
||||||
return name
|
|
||||||
if self.instance.managed == MANAGED_OUTPOST and name != MANAGED_OUTPOST_NAME:
|
|
||||||
raise ValidationError("Embedded outpost's name cannot be changed")
|
|
||||||
if self.instance.name == MANAGED_OUTPOST_NAME:
|
|
||||||
self.instance.managed = MANAGED_OUTPOST
|
|
||||||
return name
|
|
||||||
|
|
||||||
def validate_providers(self, providers: list[Provider]) -> list[Provider]:
|
def validate_providers(self, providers: list[Provider]) -> list[Provider]:
|
||||||
"""Check that all providers match the type of the outpost"""
|
"""Check that all providers match the type of the outpost"""
|
||||||
type_map = {
|
type_map = {
|
||||||
|
|
|
@ -15,7 +15,6 @@ GAUGE_OUTPOSTS_LAST_UPDATE = Gauge(
|
||||||
["outpost", "uid", "version"],
|
["outpost", "uid", "version"],
|
||||||
)
|
)
|
||||||
MANAGED_OUTPOST = "goauthentik.io/outposts/embedded"
|
MANAGED_OUTPOST = "goauthentik.io/outposts/embedded"
|
||||||
MANAGED_OUTPOST_NAME = "authentik Embedded Outpost"
|
|
||||||
|
|
||||||
|
|
||||||
class AuthentikOutpostConfig(ManagedAppConfig):
|
class AuthentikOutpostConfig(ManagedAppConfig):
|
||||||
|
@ -36,17 +35,14 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
||||||
DockerServiceConnection,
|
DockerServiceConnection,
|
||||||
KubernetesServiceConnection,
|
KubernetesServiceConnection,
|
||||||
Outpost,
|
Outpost,
|
||||||
|
OutpostConfig,
|
||||||
OutpostType,
|
OutpostType,
|
||||||
)
|
)
|
||||||
|
|
||||||
if outpost := Outpost.objects.filter(name=MANAGED_OUTPOST_NAME, managed="").first():
|
|
||||||
outpost.managed = MANAGED_OUTPOST
|
|
||||||
outpost.save()
|
|
||||||
return
|
|
||||||
outpost, updated = Outpost.objects.update_or_create(
|
outpost, updated = Outpost.objects.update_or_create(
|
||||||
defaults={
|
defaults={
|
||||||
|
"name": "authentik Embedded Outpost",
|
||||||
"type": OutpostType.PROXY,
|
"type": OutpostType.PROXY,
|
||||||
"name": MANAGED_OUTPOST_NAME,
|
|
||||||
},
|
},
|
||||||
managed=MANAGED_OUTPOST,
|
managed=MANAGED_OUTPOST,
|
||||||
)
|
)
|
||||||
|
@ -55,4 +51,10 @@ class AuthentikOutpostConfig(ManagedAppConfig):
|
||||||
outpost.service_connection = KubernetesServiceConnection.objects.first()
|
outpost.service_connection = KubernetesServiceConnection.objects.first()
|
||||||
elif DockerServiceConnection.objects.exists():
|
elif DockerServiceConnection.objects.exists():
|
||||||
outpost.service_connection = DockerServiceConnection.objects.first()
|
outpost.service_connection = DockerServiceConnection.objects.first()
|
||||||
|
outpost.config = OutpostConfig(
|
||||||
|
kubernetes_disabled_components=[
|
||||||
|
"deployment",
|
||||||
|
"secret",
|
||||||
|
]
|
||||||
|
)
|
||||||
outpost.save()
|
outpost.save()
|
||||||
|
|
|
@ -4,7 +4,6 @@ from datetime import datetime
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from asgiref.sync import async_to_sync
|
|
||||||
from channels.exceptions import DenyConnection
|
from channels.exceptions import DenyConnection
|
||||||
from dacite.core import from_dict
|
from dacite.core import from_dict
|
||||||
from dacite.data import Data
|
from dacite.data import Data
|
||||||
|
@ -15,8 +14,6 @@ from authentik.core.channels import AuthJsonConsumer
|
||||||
from authentik.outposts.apps import GAUGE_OUTPOSTS_CONNECTED, GAUGE_OUTPOSTS_LAST_UPDATE
|
from authentik.outposts.apps import GAUGE_OUTPOSTS_CONNECTED, GAUGE_OUTPOSTS_LAST_UPDATE
|
||||||
from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState
|
from authentik.outposts.models import OUTPOST_HELLO_INTERVAL, Outpost, OutpostState
|
||||||
|
|
||||||
OUTPOST_GROUP = "group_outpost_%(outpost_pk)s"
|
|
||||||
|
|
||||||
|
|
||||||
class WebsocketMessageInstruction(IntEnum):
|
class WebsocketMessageInstruction(IntEnum):
|
||||||
"""Commands which can be triggered over Websocket"""
|
"""Commands which can be triggered over Websocket"""
|
||||||
|
@ -30,9 +27,6 @@ class WebsocketMessageInstruction(IntEnum):
|
||||||
# Message sent by us to trigger an Update
|
# Message sent by us to trigger an Update
|
||||||
TRIGGER_UPDATE = 2
|
TRIGGER_UPDATE = 2
|
||||||
|
|
||||||
# Provider specific message
|
|
||||||
PROVIDER_SPECIFIC = 3
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class WebsocketMessage:
|
class WebsocketMessage:
|
||||||
|
@ -50,6 +44,8 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||||
|
|
||||||
last_uid: Optional[str] = None
|
last_uid: Optional[str] = None
|
||||||
|
|
||||||
|
first_msg = False
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
|
@ -72,26 +68,22 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||||
raise DenyConnection()
|
raise DenyConnection()
|
||||||
self.outpost = outpost
|
self.outpost = outpost
|
||||||
self.last_uid = self.channel_name
|
self.last_uid = self.channel_name
|
||||||
async_to_sync(self.channel_layer.group_add)(
|
|
||||||
OUTPOST_GROUP % {"outpost_pk": str(self.outpost.pk)}, self.channel_name
|
|
||||||
)
|
|
||||||
GAUGE_OUTPOSTS_CONNECTED.labels(
|
|
||||||
outpost=self.outpost.name,
|
|
||||||
uid=self.last_uid,
|
|
||||||
expected=self.outpost.config.kubernetes_replicas,
|
|
||||||
).inc()
|
|
||||||
|
|
||||||
def disconnect(self, code):
|
def disconnect(self, code):
|
||||||
if self.outpost:
|
|
||||||
async_to_sync(self.channel_layer.group_discard)(
|
|
||||||
OUTPOST_GROUP % {"outpost_pk": str(self.outpost.pk)}, self.channel_name
|
|
||||||
)
|
|
||||||
if self.outpost and self.last_uid:
|
if self.outpost and self.last_uid:
|
||||||
|
state = OutpostState.for_instance_uid(self.outpost, self.last_uid)
|
||||||
|
if self.channel_name in state.channel_ids:
|
||||||
|
state.channel_ids.remove(self.channel_name)
|
||||||
|
state.save()
|
||||||
GAUGE_OUTPOSTS_CONNECTED.labels(
|
GAUGE_OUTPOSTS_CONNECTED.labels(
|
||||||
outpost=self.outpost.name,
|
outpost=self.outpost.name,
|
||||||
uid=self.last_uid,
|
uid=self.last_uid,
|
||||||
expected=self.outpost.config.kubernetes_replicas,
|
expected=self.outpost.config.kubernetes_replicas,
|
||||||
).dec()
|
).dec()
|
||||||
|
self.logger.debug(
|
||||||
|
"removed outpost instance from cache",
|
||||||
|
instance_uuid=self.last_uid,
|
||||||
|
)
|
||||||
|
|
||||||
def receive_json(self, content: Data):
|
def receive_json(self, content: Data):
|
||||||
msg = from_dict(WebsocketMessage, content)
|
msg = from_dict(WebsocketMessage, content)
|
||||||
|
@ -102,13 +94,26 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||||
raise DenyConnection()
|
raise DenyConnection()
|
||||||
|
|
||||||
state = OutpostState.for_instance_uid(self.outpost, uid)
|
state = OutpostState.for_instance_uid(self.outpost, uid)
|
||||||
|
if self.channel_name not in state.channel_ids:
|
||||||
|
state.channel_ids.append(self.channel_name)
|
||||||
state.last_seen = datetime.now()
|
state.last_seen = datetime.now()
|
||||||
state.hostname = msg.args.pop("hostname", "")
|
state.hostname = msg.args.get("hostname", "")
|
||||||
|
|
||||||
|
if not self.first_msg:
|
||||||
|
GAUGE_OUTPOSTS_CONNECTED.labels(
|
||||||
|
outpost=self.outpost.name,
|
||||||
|
uid=self.last_uid,
|
||||||
|
expected=self.outpost.config.kubernetes_replicas,
|
||||||
|
).inc()
|
||||||
|
self.logger.debug(
|
||||||
|
"added outpost instance to cache",
|
||||||
|
instance_uuid=self.last_uid,
|
||||||
|
)
|
||||||
|
self.first_msg = True
|
||||||
|
|
||||||
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
if msg.instruction == WebsocketMessageInstruction.HELLO:
|
||||||
state.version = msg.args.pop("version", None)
|
state.version = msg.args.get("version", None)
|
||||||
state.build_hash = msg.args.pop("buildHash", "")
|
state.build_hash = msg.args.get("buildHash", "")
|
||||||
state.args = msg.args
|
|
||||||
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
elif msg.instruction == WebsocketMessageInstruction.ACK:
|
||||||
return
|
return
|
||||||
GAUGE_OUTPOSTS_LAST_UPDATE.labels(
|
GAUGE_OUTPOSTS_LAST_UPDATE.labels(
|
||||||
|
@ -126,14 +131,3 @@ class OutpostConsumer(AuthJsonConsumer):
|
||||||
self.send_json(
|
self.send_json(
|
||||||
asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE))
|
asdict(WebsocketMessage(instruction=WebsocketMessageInstruction.TRIGGER_UPDATE))
|
||||||
)
|
)
|
||||||
|
|
||||||
def event_provider_specific(self, event):
|
|
||||||
"""Event handler which can be called by provider-specific
|
|
||||||
implementations to send specific messages to the outpost"""
|
|
||||||
self.send_json(
|
|
||||||
asdict(
|
|
||||||
WebsocketMessage(
|
|
||||||
instruction=WebsocketMessageInstruction.PROVIDER_SPECIFIC, args=event
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
|
@ -43,10 +43,6 @@ class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
|
||||||
self.api = AppsV1Api(controller.client)
|
self.api = AppsV1Api(controller.client)
|
||||||
self.outpost = self.controller.outpost
|
self.outpost = self.controller.outpost
|
||||||
|
|
||||||
@property
|
|
||||||
def noop(self) -> bool:
|
|
||||||
return self.is_embedded
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reconciler_name() -> str:
|
def reconciler_name() -> str:
|
||||||
return "deployment"
|
return "deployment"
|
||||||
|
|
|
@ -24,10 +24,6 @@ class SecretReconciler(KubernetesObjectReconciler[V1Secret]):
|
||||||
super().__init__(controller)
|
super().__init__(controller)
|
||||||
self.api = CoreV1Api(controller.client)
|
self.api = CoreV1Api(controller.client)
|
||||||
|
|
||||||
@property
|
|
||||||
def noop(self) -> bool:
|
|
||||||
return self.is_embedded
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reconciler_name() -> str:
|
def reconciler_name() -> str:
|
||||||
return "secret"
|
return "secret"
|
||||||
|
|
|
@ -77,10 +77,7 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def noop(self) -> bool:
|
def noop(self) -> bool:
|
||||||
if not self._crd_exists():
|
return (not self._crd_exists()) or (self.is_embedded)
|
||||||
self.logger.debug("CRD doesn't exist")
|
|
||||||
return True
|
|
||||||
return self.is_embedded
|
|
||||||
|
|
||||||
def _crd_exists(self) -> bool:
|
def _crd_exists(self) -> bool:
|
||||||
"""Check if the Prometheus ServiceMonitor exists"""
|
"""Check if the Prometheus ServiceMonitor exists"""
|
||||||
|
|
|
@ -28,8 +28,4 @@ class Migration(migrations.Migration):
|
||||||
verbose_name="Managed by authentik",
|
verbose_name="Managed by authentik",
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="outpost",
|
|
||||||
options={"verbose_name": "Outpost", "verbose_name_plural": "Outposts"},
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
|
@ -344,21 +344,11 @@ class Outpost(SerializerModel, ManagedModel):
|
||||||
user_created = False
|
user_created = False
|
||||||
if not user:
|
if not user:
|
||||||
user: User = User.objects.create(username=self.user_identifier)
|
user: User = User.objects.create(username=self.user_identifier)
|
||||||
user_created = True
|
|
||||||
attrs = {
|
|
||||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
|
||||||
"name": f"Outpost {self.name} Service-Account",
|
|
||||||
"path": USER_PATH_OUTPOSTS,
|
|
||||||
}
|
|
||||||
dirty = False
|
|
||||||
for key, value in attrs.items():
|
|
||||||
if getattr(user, key) != value:
|
|
||||||
dirty = True
|
|
||||||
setattr(user, key, value)
|
|
||||||
if user.has_usable_password():
|
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
dirty = True
|
user_created = True
|
||||||
if dirty:
|
user.type = UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||||
|
user.name = f"Outpost {self.name} Service-Account"
|
||||||
|
user.path = USER_PATH_OUTPOSTS
|
||||||
user.save()
|
user.save()
|
||||||
if user_created:
|
if user_created:
|
||||||
self.build_user_permissions(user)
|
self.build_user_permissions(user)
|
||||||
|
@ -390,7 +380,7 @@ class Outpost(SerializerModel, ManagedModel):
|
||||||
managed=managed,
|
managed=managed,
|
||||||
)
|
)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
# Integrity error happens mostly when managed is reused
|
# Integrity error happens mostly when managed is re-used
|
||||||
Token.objects.filter(managed=managed).delete()
|
Token.objects.filter(managed=managed).delete()
|
||||||
Token.objects.filter(identifier=self.token_identifier).delete()
|
Token.objects.filter(identifier=self.token_identifier).delete()
|
||||||
return self.token
|
return self.token
|
||||||
|
@ -415,22 +405,18 @@ class Outpost(SerializerModel, ManagedModel):
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"Outpost {self.name}"
|
return f"Outpost {self.name}"
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _("Outpost")
|
|
||||||
verbose_name_plural = _("Outposts")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OutpostState:
|
class OutpostState:
|
||||||
"""Outpost instance state, last_seen and version"""
|
"""Outpost instance state, last_seen and version"""
|
||||||
|
|
||||||
uid: str
|
uid: str
|
||||||
|
channel_ids: list[str] = field(default_factory=list)
|
||||||
last_seen: Optional[datetime] = field(default=None)
|
last_seen: Optional[datetime] = field(default=None)
|
||||||
version: Optional[str] = field(default=None)
|
version: Optional[str] = field(default=None)
|
||||||
version_should: Version = field(default=OUR_VERSION)
|
version_should: Version = field(default=OUR_VERSION)
|
||||||
build_hash: str = field(default="")
|
build_hash: str = field(default="")
|
||||||
hostname: str = field(default="")
|
hostname: str = field(default="")
|
||||||
args: dict = field(default_factory=dict)
|
|
||||||
|
|
||||||
_outpost: Optional[Outpost] = field(default=None)
|
_outpost: Optional[Outpost] = field(default=None)
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ from socket import gethostname
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import yaml
|
||||||
from asgiref.sync import async_to_sync
|
from asgiref.sync import async_to_sync
|
||||||
from channels.layers import get_channel_layer
|
from channels.layers import get_channel_layer
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
@ -15,7 +16,6 @@ from docker.constants import DEFAULT_UNIX_SOCKET
|
||||||
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
|
||||||
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from yaml import safe_load
|
|
||||||
|
|
||||||
from authentik.events.monitored_tasks import (
|
from authentik.events.monitored_tasks import (
|
||||||
MonitoredTask,
|
MonitoredTask,
|
||||||
|
@ -25,7 +25,6 @@ from authentik.events.monitored_tasks import (
|
||||||
)
|
)
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.reflection import path_to_class
|
from authentik.lib.utils.reflection import path_to_class
|
||||||
from authentik.outposts.consumer import OUTPOST_GROUP
|
|
||||||
from authentik.outposts.controllers.base import BaseController, ControllerException
|
from authentik.outposts.controllers.base import BaseController, ControllerException
|
||||||
from authentik.outposts.controllers.docker import DockerClient
|
from authentik.outposts.controllers.docker import DockerClient
|
||||||
from authentik.outposts.controllers.kubernetes import KubernetesClient
|
from authentik.outposts.controllers.kubernetes import KubernetesClient
|
||||||
|
@ -35,6 +34,7 @@ from authentik.outposts.models import (
|
||||||
Outpost,
|
Outpost,
|
||||||
OutpostModel,
|
OutpostModel,
|
||||||
OutpostServiceConnection,
|
OutpostServiceConnection,
|
||||||
|
OutpostState,
|
||||||
OutpostType,
|
OutpostType,
|
||||||
ServiceConnectionInvalid,
|
ServiceConnectionInvalid,
|
||||||
)
|
)
|
||||||
|
@ -243,9 +243,10 @@ def _outpost_single_update(outpost: Outpost, layer=None):
|
||||||
outpost.build_user_permissions(outpost.user)
|
outpost.build_user_permissions(outpost.user)
|
||||||
if not layer: # pragma: no cover
|
if not layer: # pragma: no cover
|
||||||
layer = get_channel_layer()
|
layer = get_channel_layer()
|
||||||
group = OUTPOST_GROUP % {"outpost_pk": str(outpost.pk)}
|
for state in OutpostState.for_outpost(outpost):
|
||||||
LOGGER.debug("sending update", channel=group, outpost=outpost)
|
for channel in state.channel_ids:
|
||||||
async_to_sync(layer.group_send)(group, {"type": "event.update"})
|
LOGGER.debug("sending update", channel=channel, instance=state.uid, outpost=outpost)
|
||||||
|
async_to_sync(layer.send)(channel, {"type": "event.update"})
|
||||||
|
|
||||||
|
|
||||||
@CELERY_APP.task(
|
@CELERY_APP.task(
|
||||||
|
@ -278,7 +279,7 @@ def outpost_connection_discovery(self: MonitoredTask):
|
||||||
with kubeconfig_path.open("r", encoding="utf8") as _kubeconfig:
|
with kubeconfig_path.open("r", encoding="utf8") as _kubeconfig:
|
||||||
KubernetesServiceConnection.objects.create(
|
KubernetesServiceConnection.objects.create(
|
||||||
name=kubeconfig_local_name,
|
name=kubeconfig_local_name,
|
||||||
kubeconfig=safe_load(_kubeconfig),
|
kubeconfig=yaml.safe_load(_kubeconfig),
|
||||||
)
|
)
|
||||||
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
unix_socket_path = urlparse(DEFAULT_UNIX_SOCKET).path
|
||||||
socket = Path(unix_socket_path)
|
socket = Path(unix_socket_path)
|
||||||
|
|
|
@ -2,13 +2,11 @@
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from rest_framework.test import APITestCase
|
from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.blueprints.tests import reconcile_app
|
|
||||||
from authentik.core.models import PropertyMapping
|
from authentik.core.models import PropertyMapping
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
from authentik.outposts.api.outposts import OutpostSerializer
|
from authentik.outposts.api.outposts import OutpostSerializer
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.models import OutpostType, default_outpost_config
|
||||||
from authentik.outposts.models import Outpost, OutpostType, default_outpost_config
|
|
||||||
from authentik.providers.ldap.models import LDAPProvider
|
from authentik.providers.ldap.models import LDAPProvider
|
||||||
from authentik.providers.proxy.models import ProxyProvider
|
from authentik.providers.proxy.models import ProxyProvider
|
||||||
|
|
||||||
|
@ -24,36 +22,7 @@ class TestOutpostServiceConnectionsAPI(APITestCase):
|
||||||
self.user = create_test_admin_user()
|
self.user = create_test_admin_user()
|
||||||
self.client.force_login(self.user)
|
self.client.force_login(self.user)
|
||||||
|
|
||||||
@reconcile_app("authentik_outposts")
|
def test_outpost_validaton(self):
|
||||||
def test_managed_name_change(self):
|
|
||||||
"""Test name change for embedded outpost"""
|
|
||||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
|
||||||
self.assertIsNotNone(embedded_outpost)
|
|
||||||
response = self.client.patch(
|
|
||||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
|
||||||
{"name": "foo"},
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 400)
|
|
||||||
self.assertJSONEqual(
|
|
||||||
response.content, {"name": ["Embedded outpost's name cannot be changed"]}
|
|
||||||
)
|
|
||||||
|
|
||||||
@reconcile_app("authentik_outposts")
|
|
||||||
def test_managed_without_managed(self):
|
|
||||||
"""Test name change for embedded outpost"""
|
|
||||||
embedded_outpost = Outpost.objects.filter(managed=MANAGED_OUTPOST).first()
|
|
||||||
self.assertIsNotNone(embedded_outpost)
|
|
||||||
embedded_outpost.managed = ""
|
|
||||||
embedded_outpost.save()
|
|
||||||
response = self.client.patch(
|
|
||||||
reverse("authentik_api:outpost-detail", kwargs={"pk": embedded_outpost.pk}),
|
|
||||||
{"name": "foo"},
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
embedded_outpost.refresh_from_db()
|
|
||||||
self.assertEqual(embedded_outpost.managed, MANAGED_OUTPOST)
|
|
||||||
|
|
||||||
def test_outpost_validation(self):
|
|
||||||
"""Test Outpost validation"""
|
"""Test Outpost validation"""
|
||||||
valid = OutpostSerializer(
|
valid = OutpostSerializer(
|
||||||
data={
|
data={
|
||||||
|
|
|
@ -7,7 +7,7 @@ from django.test import TransactionTestCase
|
||||||
|
|
||||||
from authentik import __version__
|
from authentik import __version__
|
||||||
from authentik.core.tests.utils import create_test_flow
|
from authentik.core.tests.utils import create_test_flow
|
||||||
from authentik.outposts.consumer import WebsocketMessage, WebsocketMessageInstruction
|
from authentik.outposts.channels import WebsocketMessage, WebsocketMessageInstruction
|
||||||
from authentik.outposts.models import Outpost, OutpostType
|
from authentik.outposts.models import Outpost, OutpostType
|
||||||
from authentik.providers.proxy.models import ProxyProvider
|
from authentik.providers.proxy.models import ProxyProvider
|
||||||
from authentik.root import websocket
|
from authentik.root import websocket
|
||||||
|
|
|
@ -7,7 +7,7 @@ from authentik.outposts.api.service_connections import (
|
||||||
KubernetesServiceConnectionViewSet,
|
KubernetesServiceConnectionViewSet,
|
||||||
ServiceConnectionViewSet,
|
ServiceConnectionViewSet,
|
||||||
)
|
)
|
||||||
from authentik.outposts.consumer import OutpostConsumer
|
from authentik.outposts.channels import OutpostConsumer
|
||||||
from authentik.root.middleware import ChannelsLoggingMiddleware
|
from authentik.root.middleware import ChannelsLoggingMiddleware
|
||||||
|
|
||||||
websocket_urlpatterns = [
|
websocket_urlpatterns = [
|
||||||
|
|
|
@ -7,11 +7,7 @@ GAUGE_POLICIES_CACHED = Gauge(
|
||||||
"authentik_policies_cached",
|
"authentik_policies_cached",
|
||||||
"Cached Policies",
|
"Cached Policies",
|
||||||
)
|
)
|
||||||
HIST_POLICIES_ENGINE_TOTAL_TIME = Histogram(
|
|
||||||
"authentik_policies_engine_time_total_seconds",
|
|
||||||
"(Total) Duration the policy engine took to evaluate a result.",
|
|
||||||
["obj_type", "obj_pk"],
|
|
||||||
)
|
|
||||||
HIST_POLICIES_EXECUTION_TIME = Histogram(
|
HIST_POLICIES_EXECUTION_TIME = Histogram(
|
||||||
"authentik_policies_execution_time",
|
"authentik_policies_execution_time",
|
||||||
"Execution times for single policies",
|
"Execution times for single policies",
|
||||||
|
@ -21,7 +17,6 @@ HIST_POLICIES_EXECUTION_TIME = Histogram(
|
||||||
"binding_target_name",
|
"binding_target_name",
|
||||||
"object_pk",
|
"object_pk",
|
||||||
"object_type",
|
"object_type",
|
||||||
"mode",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
"""authentik policy engine"""
|
"""authentik policy engine"""
|
||||||
from multiprocessing import Pipe, current_process
|
from multiprocessing import Pipe, current_process
|
||||||
from multiprocessing.connection import Connection
|
from multiprocessing.connection import Connection
|
||||||
from timeit import default_timer
|
|
||||||
from typing import Iterator, Optional
|
from typing import Iterator, Optional
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
@ -11,8 +10,6 @@ from sentry_sdk.tracing import Span
|
||||||
from structlog.stdlib import BoundLogger, get_logger
|
from structlog.stdlib import BoundLogger, get_logger
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.lib.utils.reflection import class_to_path
|
|
||||||
from authentik.policies.apps import HIST_POLICIES_ENGINE_TOTAL_TIME, HIST_POLICIES_EXECUTION_TIME
|
|
||||||
from authentik.policies.exceptions import PolicyEngineException
|
from authentik.policies.exceptions import PolicyEngineException
|
||||||
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
|
from authentik.policies.models import Policy, PolicyBinding, PolicyBindingModel, PolicyEngineMode
|
||||||
from authentik.policies.process import PolicyProcess, cache_key
|
from authentik.policies.process import PolicyProcess, cache_key
|
||||||
|
@ -80,33 +77,6 @@ class PolicyEngine:
|
||||||
if binding.policy is not None and binding.policy.__class__ == Policy:
|
if binding.policy is not None and binding.policy.__class__ == Policy:
|
||||||
raise PolicyEngineException(f"Policy '{binding.policy}' is root type")
|
raise PolicyEngineException(f"Policy '{binding.policy}' is root type")
|
||||||
|
|
||||||
def _check_cache(self, binding: PolicyBinding):
|
|
||||||
if not self.use_cache:
|
|
||||||
return False
|
|
||||||
before = default_timer()
|
|
||||||
key = cache_key(binding, self.request)
|
|
||||||
cached_policy = cache.get(key, None)
|
|
||||||
duration = max(default_timer() - before, 0)
|
|
||||||
if not cached_policy:
|
|
||||||
return False
|
|
||||||
self.logger.debug(
|
|
||||||
"P_ENG: Taking result from cache",
|
|
||||||
binding=binding,
|
|
||||||
cache_key=key,
|
|
||||||
request=self.request,
|
|
||||||
)
|
|
||||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
|
||||||
binding_order=binding.order,
|
|
||||||
binding_target_type=binding.target_type,
|
|
||||||
binding_target_name=binding.target_name,
|
|
||||||
object_pk=str(self.request.obj.pk),
|
|
||||||
object_type=class_to_path(self.request.obj.__class__),
|
|
||||||
mode="cache_retrieve",
|
|
||||||
).observe(duration)
|
|
||||||
# It's a bit silly to time this, but
|
|
||||||
self.__cached_policies.append(cached_policy)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def build(self) -> "PolicyEngine":
|
def build(self) -> "PolicyEngine":
|
||||||
"""Build wrapper which monitors performance"""
|
"""Build wrapper which monitors performance"""
|
||||||
with (
|
with (
|
||||||
|
@ -114,10 +84,6 @@ class PolicyEngine:
|
||||||
op="authentik.policy.engine.build",
|
op="authentik.policy.engine.build",
|
||||||
description=self.__pbm,
|
description=self.__pbm,
|
||||||
) as span,
|
) as span,
|
||||||
HIST_POLICIES_ENGINE_TOTAL_TIME.labels(
|
|
||||||
obj_type=class_to_path(self.__pbm.__class__),
|
|
||||||
obj_pk=str(self.__pbm.pk),
|
|
||||||
).time(),
|
|
||||||
):
|
):
|
||||||
span: Span
|
span: Span
|
||||||
span.set_data("pbm", self.__pbm)
|
span.set_data("pbm", self.__pbm)
|
||||||
|
@ -126,7 +92,16 @@ class PolicyEngine:
|
||||||
self.__expected_result_count += 1
|
self.__expected_result_count += 1
|
||||||
|
|
||||||
self._check_policy_type(binding)
|
self._check_policy_type(binding)
|
||||||
if self._check_cache(binding):
|
key = cache_key(binding, self.request)
|
||||||
|
cached_policy = cache.get(key, None)
|
||||||
|
if cached_policy and self.use_cache:
|
||||||
|
self.logger.debug(
|
||||||
|
"P_ENG: Taking result from cache",
|
||||||
|
binding=binding,
|
||||||
|
cache_key=key,
|
||||||
|
request=self.request,
|
||||||
|
)
|
||||||
|
self.__cached_policies.append(cached_policy)
|
||||||
continue
|
continue
|
||||||
self.logger.debug("P_ENG: Evaluating policy", binding=binding, request=self.request)
|
self.logger.debug("P_ENG: Evaluating policy", binding=binding, request=self.request)
|
||||||
our_end, task_end = Pipe(False)
|
our_end, task_end = Pipe(False)
|
||||||
|
|
|
@ -1,10 +1,32 @@
|
||||||
"""Expression Policy API"""
|
"""Expression Policy API"""
|
||||||
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.policies.api.policies import PolicySerializer
|
from authentik.policies.api.policies import PolicySerializer
|
||||||
from authentik.policies.expression.evaluator import PolicyEvaluator
|
from authentik.policies.expression.evaluator import PolicyEvaluator
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy, ExpressionVariable
|
||||||
|
|
||||||
|
|
||||||
|
class ExpressionVariableSerializer(ModelSerializer):
|
||||||
|
"""Expression Variable Serializer"""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ExpressionVariable
|
||||||
|
fields = "__all__"
|
||||||
|
extra_kwargs = {
|
||||||
|
"managed": {"read_only": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ExpressionVariableViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
"""Expression Variable Viewset"""
|
||||||
|
|
||||||
|
queryset = ExpressionVariable.objects.all()
|
||||||
|
serializer_class = ExpressionVariableSerializer
|
||||||
|
filterset_fields = "__all__"
|
||||||
|
ordering = ["name"]
|
||||||
|
search_fields = ["name"]
|
||||||
|
|
||||||
|
|
||||||
class ExpressionPolicySerializer(PolicySerializer):
|
class ExpressionPolicySerializer(PolicySerializer):
|
||||||
|
@ -18,7 +40,7 @@ class ExpressionPolicySerializer(PolicySerializer):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ExpressionPolicy
|
model = ExpressionPolicy
|
||||||
fields = PolicySerializer.Meta.fields + ["expression"]
|
fields = PolicySerializer.Meta.fields + ["expression", "variables"]
|
||||||
|
|
||||||
|
|
||||||
class ExpressionPolicyViewSet(UsedByMixin, ModelViewSet):
|
class ExpressionPolicyViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
|
|
@ -13,7 +13,7 @@ from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from authentik.policies.expression.models import ExpressionPolicy
|
from authentik.policies.expression.models import ExpressionPolicy, ExpressionVariable
|
||||||
|
|
||||||
|
|
||||||
class PolicyEvaluator(BaseEvaluator):
|
class PolicyEvaluator(BaseEvaluator):
|
||||||
|
@ -30,6 +30,7 @@ class PolicyEvaluator(BaseEvaluator):
|
||||||
# update website/docs/expressions/_functions.md
|
# update website/docs/expressions/_functions.md
|
||||||
self._context["ak_message"] = self.expr_func_message
|
self._context["ak_message"] = self.expr_func_message
|
||||||
self._context["ak_user_has_authenticator"] = self.expr_func_user_has_authenticator
|
self._context["ak_user_has_authenticator"] = self.expr_func_user_has_authenticator
|
||||||
|
self._context["ak_variables"] = {}
|
||||||
|
|
||||||
def expr_func_message(self, message: str):
|
def expr_func_message(self, message: str):
|
||||||
"""Wrapper to append to messages list, which is returned with PolicyResult"""
|
"""Wrapper to append to messages list, which is returned with PolicyResult"""
|
||||||
|
@ -52,6 +53,12 @@ class PolicyEvaluator(BaseEvaluator):
|
||||||
self._context["ak_client_ip"] = ip_address(get_client_ip(request))
|
self._context["ak_client_ip"] = ip_address(get_client_ip(request))
|
||||||
self._context["http_request"] = request
|
self._context["http_request"] = request
|
||||||
|
|
||||||
|
def set_variables(self, variables: list["ExpressionVariable"]):
|
||||||
|
"""Update context base on expression policy variables"""
|
||||||
|
for variable in variables:
|
||||||
|
variable.reload()
|
||||||
|
self._context["ak_variables"][variable.name] = variable.value
|
||||||
|
|
||||||
def handle_error(self, exc: Exception, expression_source: str):
|
def handle_error(self, exc: Exception, expression_source: str):
|
||||||
"""Exception Handler"""
|
"""Exception Handler"""
|
||||||
raise PolicyException(exc)
|
raise PolicyException(exc)
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
# Generated by Django 4.2.5 on 2023-09-29 00:25
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("authentik_policies_expression", "0004_expressionpolicy_authentik_p_policy__fb6feb_idx"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="ExpressionVariable",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("created", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("last_updated", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"managed",
|
||||||
|
models.TextField(
|
||||||
|
default=None,
|
||||||
|
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||||
|
null=True,
|
||||||
|
unique=True,
|
||||||
|
verbose_name="Managed by authentik",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("name", models.TextField(unique=True)),
|
||||||
|
("value", models.TextField()),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Expression Variable",
|
||||||
|
"verbose_name_plural": "Expression Variables",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="expressionpolicy",
|
||||||
|
name="variables",
|
||||||
|
field=models.ManyToManyField(
|
||||||
|
blank=True, to="authentik_policies_expression.expressionvariable"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
|
@ -1,18 +1,66 @@
|
||||||
"""authentik expression Policy Models"""
|
"""authentik expression Policy Models"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from rest_framework.serializers import BaseSerializer
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
|
from authentik.blueprints.models import ManagedModel
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||||
from authentik.policies.expression.evaluator import PolicyEvaluator
|
from authentik.policies.expression.evaluator import PolicyEvaluator
|
||||||
from authentik.policies.models import Policy
|
from authentik.policies.models import Policy
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
MANAGED_DISCOVERED = "goauthentik.io/variables/discovered/%s"
|
||||||
|
|
||||||
|
|
||||||
|
class ExpressionVariable(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||||
|
"""Variable that can be given to expression policies"""
|
||||||
|
|
||||||
|
name = models.TextField(unique=True)
|
||||||
|
value = models.TextField()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
|
from authentik.policies.expression.api import ExpressionVariableSerializer
|
||||||
|
|
||||||
|
return ExpressionVariableSerializer
|
||||||
|
|
||||||
|
def reload(self):
|
||||||
|
"""Reload a variable from disk if it's managed"""
|
||||||
|
if self.managed != MANAGED_DISCOVERED % self.name:
|
||||||
|
return
|
||||||
|
path = Path(CONFIG.get("variables_discovery_dir")) / Path(self.name)
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as _file:
|
||||||
|
body = _file.read()
|
||||||
|
if body != self.value:
|
||||||
|
self.value = body
|
||||||
|
self.save()
|
||||||
|
except (OSError, ValueError) as exc:
|
||||||
|
LOGGER.warning(
|
||||||
|
"Failed to reload variable, continuing anyway",
|
||||||
|
exc=exc,
|
||||||
|
file=path,
|
||||||
|
variable=self.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Expression Variable")
|
||||||
|
verbose_name_plural = _("Expression Variables")
|
||||||
|
|
||||||
|
|
||||||
class ExpressionPolicy(Policy):
|
class ExpressionPolicy(Policy):
|
||||||
"""Execute arbitrary Python code to implement custom checks and validation."""
|
"""Execute arbitrary Python code to implement custom checks and validation."""
|
||||||
|
|
||||||
expression = models.TextField()
|
expression = models.TextField()
|
||||||
|
|
||||||
|
variables = models.ManyToManyField(ExpressionVariable, blank=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
from authentik.policies.expression.api import ExpressionPolicySerializer
|
from authentik.policies.expression.api import ExpressionPolicySerializer
|
||||||
|
@ -28,6 +76,7 @@ class ExpressionPolicy(Policy):
|
||||||
evaluator = PolicyEvaluator(self.name)
|
evaluator = PolicyEvaluator(self.name)
|
||||||
evaluator.policy = self
|
evaluator.policy = self
|
||||||
evaluator.set_policy_request(request)
|
evaluator.set_policy_request(request)
|
||||||
|
evaluator.set_variables(self.variables)
|
||||||
return evaluator.evaluate(self.expression)
|
return evaluator.evaluate(self.expression)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
|
|
86
authentik/policies/expression/tasks.py
Normal file
86
authentik/policies/expression/tasks.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
"""Expression tasks"""
|
||||||
|
from glob import glob
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from structlog.stdlib import get_logger
|
||||||
|
from watchdog.events import (
|
||||||
|
FileCreatedEvent,
|
||||||
|
FileModifiedEvent,
|
||||||
|
FileSystemEvent,
|
||||||
|
FileSystemEventHandler,
|
||||||
|
)
|
||||||
|
from watchdog.observers import Observer
|
||||||
|
|
||||||
|
from authentik.events.monitored_tasks import (
|
||||||
|
MonitoredTask,
|
||||||
|
TaskResult,
|
||||||
|
TaskResultStatus,
|
||||||
|
prefill_task,
|
||||||
|
)
|
||||||
|
from authentik.lib.config import CONFIG
|
||||||
|
from authentik.policies.expression.models import MANAGED_DISCOVERED, ExpressionVariable
|
||||||
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
LOGGER = get_logger()
|
||||||
|
_file_watcher_started = False
|
||||||
|
|
||||||
|
|
||||||
|
@CELERY_APP.task(bind=True, base=MonitoredTask)
|
||||||
|
@prefill_task
|
||||||
|
def variable_discovery(self: MonitoredTask):
|
||||||
|
"""Discover, import and update variables from the filesystem"""
|
||||||
|
variables = {}
|
||||||
|
discovered = 0
|
||||||
|
base_path = Path(CONFIG.get("variables_discovery_dir")).absolute()
|
||||||
|
for file in glob(str(base_path) + "/**", recursive=True):
|
||||||
|
path = Path(file)
|
||||||
|
if not path.exists():
|
||||||
|
continue
|
||||||
|
if path.is_dir():
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as _file:
|
||||||
|
body = _file.read()
|
||||||
|
variables[str(path.relative_to(base_path))] = body
|
||||||
|
discovered += 1
|
||||||
|
except (OSError, ValueError) as exc:
|
||||||
|
LOGGER.warning("Failed to open file", exc=exc, file=path)
|
||||||
|
for name, value in variables.items():
|
||||||
|
variable = ExpressionVariable.objects.filter(managed=MANAGED_DISCOVERED % name).first()
|
||||||
|
if not variable:
|
||||||
|
variable = ExpressionVariable(name=name, managed=MANAGED_DISCOVERED % name)
|
||||||
|
if variable.value != value:
|
||||||
|
variable.value = value
|
||||||
|
variable.save()
|
||||||
|
self.set_status(
|
||||||
|
TaskResult(
|
||||||
|
TaskResultStatus.SUCCESSFUL,
|
||||||
|
messages=[_("Successfully imported %(count)d files." % {"count": discovered})],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class VariableEventHandler(FileSystemEventHandler):
|
||||||
|
"""Event handler for variable events"""
|
||||||
|
|
||||||
|
def on_any_event(self, event: FileSystemEvent):
|
||||||
|
if not isinstance(event, (FileCreatedEvent, FileModifiedEvent)):
|
||||||
|
return
|
||||||
|
if event.is_directory:
|
||||||
|
return
|
||||||
|
LOGGER.debug("variable file changed, starting discovery", file=event.src_path)
|
||||||
|
variable_discovery.delay()
|
||||||
|
|
||||||
|
|
||||||
|
def start_variables_watcher():
|
||||||
|
"""Start variables watcher, if it's not running already."""
|
||||||
|
# This function might be called twice since it's called on celery startup
|
||||||
|
# pylint: disable=global-statement
|
||||||
|
global _file_watcher_started
|
||||||
|
if _file_watcher_started:
|
||||||
|
return
|
||||||
|
observer = Observer()
|
||||||
|
observer.schedule(VariableEventHandler(), CONFIG.get("variables_discovery_dir"), recursive=True)
|
||||||
|
observer.start()
|
||||||
|
_file_watcher_started = True
|
|
@ -1,4 +1,7 @@
|
||||||
"""API URLs"""
|
"""API URLs"""
|
||||||
from authentik.policies.expression.api import ExpressionPolicyViewSet
|
from authentik.policies.expression.api import ExpressionPolicyViewSet, ExpressionVariableViewSet
|
||||||
|
|
||||||
api_urlpatterns = [("policies/expression", ExpressionPolicyViewSet)]
|
api_urlpatterns = [
|
||||||
|
("policies/expression/variables", ExpressionVariableViewSet),
|
||||||
|
("policies/expression", ExpressionPolicyViewSet),
|
||||||
|
]
|
||||||
|
|
|
@ -190,8 +190,8 @@ class Policy(SerializerModel, CreatedUpdatedModel):
|
||||||
verbose_name_plural = _("Policies")
|
verbose_name_plural = _("Policies")
|
||||||
|
|
||||||
permissions = [
|
permissions = [
|
||||||
("view_policy_cache", _("View Policy's cache metrics")),
|
("view_policy_cache", "View Policy's cache metrics"),
|
||||||
("clear_policy_cache", _("Clear Policy's cache metrics")),
|
("clear_policy_cache", "Clear Policy's cache metrics"),
|
||||||
]
|
]
|
||||||
|
|
||||||
class PolicyMeta:
|
class PolicyMeta:
|
||||||
|
|
|
@ -11,7 +11,6 @@ from structlog.stdlib import get_logger
|
||||||
from authentik.events.models import Event, EventAction
|
from authentik.events.models import Event, EventAction
|
||||||
from authentik.lib.config import CONFIG
|
from authentik.lib.config import CONFIG
|
||||||
from authentik.lib.utils.errors import exception_to_string
|
from authentik.lib.utils.errors import exception_to_string
|
||||||
from authentik.lib.utils.reflection import class_to_path
|
|
||||||
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
||||||
from authentik.policies.exceptions import PolicyException
|
from authentik.policies.exceptions import PolicyException
|
||||||
from authentik.policies.models import PolicyBinding
|
from authentik.policies.models import PolicyBinding
|
||||||
|
@ -129,8 +128,9 @@ class PolicyProcess(PROCESS_CLASS):
|
||||||
binding_target_type=self.binding.target_type,
|
binding_target_type=self.binding.target_type,
|
||||||
binding_target_name=self.binding.target_name,
|
binding_target_name=self.binding.target_name,
|
||||||
object_pk=str(self.request.obj.pk),
|
object_pk=str(self.request.obj.pk),
|
||||||
object_type=class_to_path(self.request.obj.__class__),
|
object_type=(
|
||||||
mode="execute_process",
|
f"{self.request.obj._meta.app_label}.{self.request.obj._meta.model_name}"
|
||||||
|
),
|
||||||
).time(),
|
).time(),
|
||||||
):
|
):
|
||||||
span: Span
|
span: Span
|
||||||
|
|
|
@ -17,7 +17,7 @@ LOGGER = get_logger()
|
||||||
@receiver(monitoring_set)
|
@receiver(monitoring_set)
|
||||||
def monitoring_set_policies(sender, **kwargs):
|
def monitoring_set_policies(sender, **kwargs):
|
||||||
"""set policy gauges"""
|
"""set policy gauges"""
|
||||||
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}*") or []))
|
GAUGE_POLICIES_CACHED.set(len(cache.keys(f"{CACHE_PREFIX}_*") or []))
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Policy)
|
@receiver(post_save, sender=Policy)
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
# Generated by Django 5.0 on 2023-12-22 23:20
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_providers_oauth2", "0016_alter_refreshtoken_token"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="accesstoken",
|
|
||||||
name="session_id",
|
|
||||||
field=models.CharField(blank=True, default=""),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="authorizationcode",
|
|
||||||
name="session_id",
|
|
||||||
field=models.CharField(blank=True, default=""),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="refreshtoken",
|
|
||||||
name="session_id",
|
|
||||||
field=models.CharField(blank=True, default=""),
|
|
||||||
),
|
|
||||||
]
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue