Compare commits
12 commits
trustchain
...
blueprints
Author | SHA1 | Date | |
---|---|---|---|
a331affd42 | |||
9d3bd8418d | |||
9ee77993a9 | |||
42e2eb1529 | |||
e2d18f6011 | |||
d811aabd38 | |||
c592599633 | |||
8b13da354f | |||
79175266cc | |||
629af26742 | |||
edc7f2fdb0 | |||
a95c33f1ca |
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 2023.10.6
|
||||
current_version = 2023.6.1
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
env
|
||||
htmlcov
|
||||
*.env.yml
|
||||
**/node_modules
|
||||
dist/**
|
||||
build/**
|
||||
build_docs/**
|
||||
*Dockerfile
|
||||
blueprints/local
|
||||
.git
|
||||
!gen-ts-api/node_modules
|
||||
!gen-ts-api/dist/**
|
||||
Dockerfile
|
||||
authentik/enterprise
|
||||
|
|
21
.github/actions/setup/action.yml
vendored
21
.github/actions/setup/action.yml
vendored
|
@ -2,39 +2,36 @@ name: "Setup authentik testing environment"
|
|||
description: "Setup authentik testing environment"
|
||||
|
||||
inputs:
|
||||
postgresql_version:
|
||||
postgresql_tag:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "12"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install poetry & deps
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
sudo apt update
|
||||
sudo apt install -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version-file: 'pyproject.toml'
|
||||
python-version: "3.11"
|
||||
cache: "poetry"
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
poetry env use python3.11
|
||||
poetry install
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
|
|
4
.github/codecov.yml
vendored
4
.github/codecov.yml
vendored
|
@ -6,5 +6,5 @@ coverage:
|
|||
# adjust accordingly based on how flaky your tests are
|
||||
# this allows a 1% drop from the previous base commit coverage
|
||||
threshold: 1%
|
||||
comment:
|
||||
after_n_builds: 3
|
||||
notify:
|
||||
after_n_builds: 3
|
||||
|
|
40
.github/dependabot.yml
vendored
40
.github/dependabot.yml
vendored
|
@ -30,7 +30,6 @@ updates:
|
|||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
# TODO: deduplicate these groups
|
||||
groups:
|
||||
sentry:
|
||||
patterns:
|
||||
|
@ -41,50 +40,13 @@ updates:
|
|||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@typescript-eslint/*"
|
||||
- "@typescript-eslint/eslint-*"
|
||||
- "eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
patterns:
|
||||
- "@storybook/*"
|
||||
- "*storybook*"
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/tests/wdio"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
labels:
|
||||
- dependencies
|
||||
open-pull-requests-limit: 10
|
||||
commit-message:
|
||||
prefix: "web:"
|
||||
# TODO: deduplicate these groups
|
||||
groups:
|
||||
sentry:
|
||||
patterns:
|
||||
- "@sentry/*"
|
||||
babel:
|
||||
patterns:
|
||||
- "@babel/*"
|
||||
- "babel-*"
|
||||
eslint:
|
||||
patterns:
|
||||
- "@typescript-eslint/*"
|
||||
- "eslint"
|
||||
- "eslint-*"
|
||||
storybook:
|
||||
patterns:
|
||||
- "@storybook/*"
|
||||
- "*storybook*"
|
||||
esbuild:
|
||||
patterns:
|
||||
- "@esbuild/*"
|
||||
wdio:
|
||||
patterns:
|
||||
- "@wdio/*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
|
|
73
.github/workflows/ci-main.yml
vendored
73
.github/workflows/ci-main.yml
vendored
|
@ -11,7 +11,6 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
|
@ -34,7 +33,7 @@ jobs:
|
|||
- ruff
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
|
@ -42,44 +41,31 @@ jobs:
|
|||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-migrations-from-stable:
|
||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: checkout stable
|
||||
run: |
|
||||
# Delete all poetry envs
|
||||
rm -rf /home/runner/.cache/pypoetry
|
||||
# Copy current, latest config to local
|
||||
cp authentik/lib/default.yml local.env.yml
|
||||
cp -R .github ..
|
||||
cp -R scripts ..
|
||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
rm -rf .github/ scripts/
|
||||
mv ../.github ../scripts .
|
||||
- name: Setup authentik env (ensure stable deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run migrations to stable
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
|
@ -89,13 +75,9 @@ jobs:
|
|||
git reset --hard HEAD
|
||||
git clean -d -fx .
|
||||
git checkout $GITHUB_SHA
|
||||
# Delete previous poetry env
|
||||
rm -rf $(poetry env info --path)
|
||||
poetry install
|
||||
- name: Setup authentik env (ensure latest deps are installed)
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: migrate to latest
|
||||
run: poetry run python -m lifecycle.migrate
|
||||
test-unittest:
|
||||
|
@ -108,13 +90,12 @@ jobs:
|
|||
psql:
|
||||
- 12-alpine
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
postgresql_tag: ${{ matrix.psql }}
|
||||
- name: run unittest
|
||||
run: |
|
||||
poetry run make test
|
||||
|
@ -127,7 +108,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
|
@ -163,7 +144,7 @@ jobs:
|
|||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
|
@ -203,36 +184,30 @@ jobs:
|
|||
build:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
|
@ -245,8 +220,6 @@ jobs:
|
|||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Comment on PR
|
||||
if: github.event_name == 'pull_request'
|
||||
continue-on-error: true
|
||||
|
@ -256,36 +229,30 @@ jobs:
|
|||
build-arm64:
|
||||
needs: ci-core-mark
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: generate ts client
|
||||
run: make gen-client-ts
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||
|
@ -299,5 +266,3 @@ jobs:
|
|||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
|
32
.github/workflows/ci-outpost.yml
vendored
32
.github/workflows/ci-outpost.yml
vendored
|
@ -9,13 +9,12 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
|
@ -30,18 +29,16 @@ jobs:
|
|||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: v1.54.2
|
||||
version: v1.52.2
|
||||
args: --timeout 5000s --verbose
|
||||
skip-cache: true
|
||||
skip-pkg-cache: true
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Go unittests
|
||||
|
@ -66,24 +63,21 @@ jobs:
|
|||
- ldap
|
||||
- radius
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
- name: Login to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
|
@ -92,7 +86,7 @@ jobs:
|
|||
- name: Generate API
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||
tags: |
|
||||
|
@ -105,8 +99,6 @@ jobs:
|
|||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-binary:
|
||||
timeout-minutes: 120
|
||||
needs:
|
||||
|
@ -122,15 +114,15 @@ jobs:
|
|||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Generate API
|
||||
|
|
55
.github/workflows/ci-web.yml
vendored
55
.github/workflows/ci-web.yml
vendored
|
@ -9,38 +9,31 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-eslint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: Eslint
|
||||
working-directory: ${{ matrix.project }}/
|
||||
working-directory: web/
|
||||
run: npm run lint
|
||||
lint-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
@ -52,33 +45,27 @@ jobs:
|
|||
run: npm run tsc
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- web
|
||||
- tests/wdio
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
||||
- working-directory: ${{ matrix.project }}/
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
run: npm ci
|
||||
- name: Generate API
|
||||
run: make gen-client-ts
|
||||
- name: prettier
|
||||
working-directory: ${{ matrix.project }}/
|
||||
working-directory: web/
|
||||
run: npm run prettier-check
|
||||
lint-lit-analyse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
@ -107,10 +94,10 @@ jobs:
|
|||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- working-directory: web/
|
||||
|
|
19
.github/workflows/ci-website.yml
vendored
19
.github/workflows/ci-website.yml
vendored
|
@ -9,16 +9,15 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- version-*
|
||||
|
||||
jobs:
|
||||
lint-prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
@ -29,10 +28,10 @@ jobs:
|
|||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
@ -50,10 +49,10 @@ jobs:
|
|||
- build
|
||||
- build-docs-only
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: website/package-lock.json
|
||||
- working-directory: website/
|
||||
|
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
|||
language: ["go", "javascript", "python"]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
|
|
34
.github/workflows/gha-cache-cleanup.yml
vendored
34
.github/workflows/gha-cache-cleanup.yml
vendored
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
|
||||
name: Cleanup cache after PR is closed
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
|
||||
# Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR; do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
6
.github/workflows/ghcr-retention.yml
vendored
6
.github/workflows/ghcr-retention.yml
vendored
|
@ -1,8 +1,8 @@
|
|||
name: ghcr-retention
|
||||
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "0 0 * * *" # every day at midnight
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
@ -11,7 +11,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
|
4
.github/workflows/image-compress.yml
vendored
4
.github/workflows/image-compress.yml
vendored
|
@ -29,11 +29,11 @@ jobs:
|
|||
github.event.pull_request.head.repo.full_name == github.repository)
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
|
|
8
.github/workflows/publish-source-docs.yml
vendored
8
.github/workflows/publish-source-docs.yml
vendored
|
@ -5,22 +5,16 @@ on:
|
|||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
POSTGRES_DB: authentik
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
jobs:
|
||||
publish-source-docs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
run: |
|
||||
poetry run make migrate
|
||||
poetry run ak build_source_docs
|
||||
- name: Publish
|
||||
uses: netlify/actions/cli@master
|
||||
|
|
3
.github/workflows/release-next-branch.yml
vendored
3
.github/workflows/release-next-branch.yml
vendored
|
@ -6,7 +6,6 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push to the next branch
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
|
@ -14,7 +13,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
environment: internal-production
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: main
|
||||
- run: |
|
||||
|
|
53
.github/workflows/release-publish.yml
vendored
53
.github/workflows/release-publish.yml
vendored
|
@ -7,37 +7,29 @@ on:
|
|||
jobs:
|
||||
build-server:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
|
@ -55,9 +47,6 @@ jobs:
|
|||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload contianer images to ghcr.io
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -66,34 +55,30 @@ jobs:
|
|||
- ldap
|
||||
- radius
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
|
@ -105,16 +90,12 @@ jobs:
|
|||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
build-args: |
|
||||
VERSION=${{ steps.ev.outputs.version }}
|
||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||
build-outpost-binary:
|
||||
timeout-minutes: 120
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload binaries to the release
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -125,13 +106,13 @@ jobs:
|
|||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
|
@ -160,7 +141,7 @@ jobs:
|
|||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
|
@ -176,7 +157,7 @@ jobs:
|
|||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
|
|
5
.github/workflows/release-tag.yml
vendored
5
.github/workflows/release-tag.yml
vendored
|
@ -10,13 +10,12 @@ jobs:
|
|||
name: Create Release from Tag
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Pre-release test
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker buildx install
|
||||
mkdir -p ./gen-ts-api
|
||||
docker build -t testing:latest .
|
||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||
echo "AUTHENTIK_TAG=latest" >> .env
|
||||
|
@ -24,7 +23,7 @@ jobs:
|
|||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
|
4
.github/workflows/repo-stale.yml
vendored
4
.github/workflows/repo-stale.yml
vendored
|
@ -6,15 +6,15 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
# Needed to update issues and PRs
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
|
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
|
@ -16,11 +16,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
|
|
8
.github/workflows/translation-rename.yml
vendored
8
.github/workflows/translation-rename.yml
vendored
|
@ -1,5 +1,4 @@
|
|||
# Rename transifex pull requests to have a correct naming
|
||||
# Also enables auto squash-merge
|
||||
name: authentik-translation-transifex-rename
|
||||
|
||||
on:
|
||||
|
@ -12,7 +11,7 @@ jobs:
|
|||
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
|
@ -38,8 +37,3 @@ jobs:
|
|||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
|
||||
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
|
||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
pull-request-number: ${{ github.event.pull_request.number }}
|
||||
merge-method: squash
|
||||
|
|
8
.github/workflows/web-api-publish.yml
vendored
8
.github/workflows/web-api-publish.yml
vendored
|
@ -10,16 +10,16 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3.7.0
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version: "20"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-ts
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -206,6 +206,3 @@ data/
|
|||
.netlify
|
||||
.ruff_cache
|
||||
source_docs/
|
||||
|
||||
### Golang ###
|
||||
/vendor/
|
||||
|
|
28
CODEOWNERS
28
CODEOWNERS
|
@ -1,26 +1,2 @@
|
|||
# Fallback
|
||||
* @goauthentik/backend @goauthentik/frontend
|
||||
# Backend
|
||||
authentik/ @goauthentik/backend
|
||||
blueprints/ @goauthentik/backend
|
||||
cmd/ @goauthentik/backend
|
||||
internal/ @goauthentik/backend
|
||||
lifecycle/ @goauthentik/backend
|
||||
schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
poetry.lock @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
# Docs & Website
|
||||
website/ @goauthentik/docs
|
||||
# Security
|
||||
website/docs/security/ @goauthentik/security
|
||||
* @goauthentik/core
|
||||
website/docs/security/** @goauthentik/security
|
||||
|
|
138
Dockerfile
138
Dockerfile
|
@ -1,75 +1,53 @@
|
|||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Stage 1: Build website
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /work/website
|
||||
|
||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
||||
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||
|
||||
COPY ./website /work/website/
|
||||
COPY ./blueprints /work/blueprints/
|
||||
COPY ./SECURITY.md /work/
|
||||
|
||||
RUN npm run build-docs-only
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/website
|
||||
RUN npm ci --include=dev && npm run build-docs-only
|
||||
|
||||
# Stage 2: Build webui
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /work/web
|
||||
|
||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
||||
npm ci --include=dev
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||
|
||||
COPY ./web /work/web/
|
||||
COPY ./website /work/website/
|
||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
||||
|
||||
RUN npm run build
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /work/web
|
||||
RUN npm ci --include=dev && npm run build
|
||||
|
||||
# Stage 3: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
|
||||
# Stage 3: Poetry to requirements.txt export
|
||||
FROM docker.io/python:3.11.4-slim-bullseye AS poetry-locker
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
WORKDIR /work
|
||||
COPY ./pyproject.toml /work
|
||||
COPY ./poetry.lock /work
|
||||
|
||||
ARG GOOS=$TARGETOS
|
||||
ARG GOARCH=$TARGETARCH
|
||||
RUN pip install --no-cache-dir poetry && \
|
||||
poetry export -f requirements.txt --output requirements.txt && \
|
||||
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||
|
||||
WORKDIR /go/src/goauthentik.io
|
||||
# Stage 4: Build go proxy
|
||||
FROM docker.io/golang:1.20.7-bullseye AS go-builder
|
||||
|
||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
WORKDIR /work
|
||||
|
||||
COPY ./cmd /go/src/goauthentik.io/cmd
|
||||
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
|
||||
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
|
||||
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
|
||||
COPY ./internal /go/src/goauthentik.io/internal
|
||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
||||
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||
|
||||
ENV CGO_ENABLED=0
|
||||
COPY ./cmd /work/cmd
|
||||
COPY ./web/static.go /work/web/static.go
|
||||
COPY ./internal /work/internal
|
||||
COPY ./go.mod /work/go.mod
|
||||
COPY ./go.sum /work/go.sum
|
||||
|
||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
||||
RUN go build -o /work/authentik ./cmd/server/
|
||||
|
||||
# Stage 4: MaxMind GeoIP
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
# Stage 5: MaxMind GeoIP
|
||||
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||
ENV GEOIPUPDATE_VERBOSE="true"
|
||||
|
@ -82,33 +60,8 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
|||
mkdir -p /usr/share/GeoIP && \
|
||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 5: Python dependencies
|
||||
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
||||
|
||||
WORKDIR /ak-root/poetry
|
||||
|
||||
ENV VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="/ak-root/venv/bin:$PATH"
|
||||
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
||||
|
||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
||||
--mount=type=cache,target=/root/.cache/pip \
|
||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
||||
python -m venv /ak-root/venv/ && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install poetry && \
|
||||
poetry install --only=main --no-ansi --no-interaction
|
||||
|
||||
# Stage 6: Run
|
||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
||||
FROM docker.io/python:3.11.4-slim-bullseye AS final-image
|
||||
|
||||
ARG GIT_BUILD_HASH
|
||||
ARG VERSION
|
||||
|
@ -122,45 +75,46 @@ LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
|||
|
||||
WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
COPY --from=poetry-locker /work/requirements.txt /
|
||||
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
RUN apt-get update && \
|
||||
# Required for installing pip packages
|
||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev python3-dev && \
|
||||
# Required for runtime
|
||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
||||
# Required for bootstrap & healtcheck
|
||||
apt-get install -y --no-install-recommends runit && \
|
||||
pip install --no-cache-dir -r /requirements.txt && \
|
||||
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev libpq-dev python3-dev && \
|
||||
apt-get autoremove --purge -y && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
mkdir -p /certs /media /blueprints && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
mkdir -p /ak-root && \
|
||||
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
||||
chown authentik:authentik /certs /media /authentik/.ssh
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./poetry.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
||||
COPY --from=go-builder /work/authentik /bin/authentik
|
||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=website-builder /work/website/help/ /website/help/
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
USER 1000
|
||||
|
||||
ENV TMPDIR=/dev/shm/ \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
||||
VENV_PATH="/ak-root/venv" \
|
||||
POETRY_VIRTUALENVS_CREATE=false
|
||||
ENV TMPDIR /dev/shm/
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||
|
||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
||||
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
|
||||
|
|
109
Makefile
109
Makefile
|
@ -1,16 +1,9 @@
|
|||
.PHONY: gen dev-reset all clean test web website
|
||||
|
||||
.SHELLFLAGS += ${SHELLFLAGS} -e
|
||||
.SHELLFLAGS += -x -e
|
||||
PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||
PY_SOURCES = authentik tests scripts lifecycle
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
|
||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||
-I .github/codespell-words.txt \
|
||||
|
@ -26,82 +19,57 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
|||
website/integrations \
|
||||
website/src
|
||||
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
||||
|
||||
help: ## Show this help
|
||||
@echo "\nSpecify a command. The choices are:\n"
|
||||
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
|
||||
sort
|
||||
@echo ""
|
||||
all: lint-fix lint test gen web
|
||||
|
||||
test-go:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test-docker: ## Run all tests in a docker-compose
|
||||
test-docker:
|
||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||
docker-compose pull -q
|
||||
docker-compose up --no-start
|
||||
docker-compose start postgresql redis
|
||||
docker-compose run -u root server test-all
|
||||
docker-compose run -u root server test
|
||||
rm -f .env
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
test:
|
||||
coverage run manage.py test --keepdb authentik
|
||||
coverage html
|
||||
coverage report
|
||||
|
||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
isort $(PY_SOURCES)
|
||||
black $(PY_SOURCES)
|
||||
ruff $(PY_SOURCES)
|
||||
lint-fix:
|
||||
isort authentik $(PY_SOURCES)
|
||||
black authentik $(PY_SOURCES)
|
||||
ruff authentik $(PY_SOURCES)
|
||||
codespell -w $(CODESPELL_ARGS)
|
||||
|
||||
lint: ## Lint the python and golang sources
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
||||
lint:
|
||||
pylint $(PY_SOURCES)
|
||||
bandit -r $(PY_SOURCES) -x node_modules
|
||||
golangci-lint run -v
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
migrate:
|
||||
python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
i18n-extract: i18n-extract-core web-i18n-extract
|
||||
|
||||
i18n-extract-core:
|
||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||
|
||||
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||
poetry install
|
||||
|
||||
dev-drop-db:
|
||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||
redis-cli -n 0 flushall
|
||||
|
||||
dev-create-db:
|
||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
|
||||
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
|
||||
|
||||
#########################
|
||||
## API Schema
|
||||
#########################
|
||||
|
||||
gen-build: ## Extract the schema from the database
|
||||
gen-build:
|
||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
gen-changelog:
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
npx prettier --write changelog.md
|
||||
|
||||
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
|
||||
gen-diff:
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
|
@ -116,7 +84,7 @@ gen-clean:
|
|||
rm -rf web/api/src/
|
||||
rm -rf api/
|
||||
|
||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
gen-client-ts:
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
|
@ -132,7 +100,7 @@ gen-client-ts: ## Build and install the authentik API for Typescript into the a
|
|||
cd gen-ts-api && npm i
|
||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||
|
||||
gen-client-go: ## Build and install the authentik API for Golang
|
||||
gen-client-go:
|
||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||
|
@ -149,7 +117,7 @@ gen-client-go: ## Build and install the authentik API for Golang
|
|||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
gen-dev-config:
|
||||
python -m scripts.generate_config
|
||||
|
||||
gen: gen-build gen-clean gen-client-ts
|
||||
|
@ -158,21 +126,21 @@ gen: gen-build gen-clean gen-client-ts
|
|||
## Web
|
||||
#########################
|
||||
|
||||
web-build: web-install ## Build the Authentik UI
|
||||
web-build: web-install
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
web: web-lint-fix web-lint web-check-compile
|
||||
|
||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||
web-install:
|
||||
cd web && npm ci
|
||||
|
||||
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
||||
web-watch:
|
||||
rm -rf web/dist/
|
||||
mkdir web/dist/
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-storybook-watch: ## Build and run the storybook documentation server
|
||||
web-storybook-watch:
|
||||
cd web && npm run storybook
|
||||
|
||||
web-lint-fix:
|
||||
|
@ -180,7 +148,8 @@ web-lint-fix:
|
|||
|
||||
web-lint:
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
# TODO: The analyzer hasn't run correctly in awhile.
|
||||
# cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
cd web && npm run tsc
|
||||
|
@ -192,7 +161,7 @@ web-i18n-extract:
|
|||
## Website
|
||||
#########################
|
||||
|
||||
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
|
||||
website: website-lint-fix website-build
|
||||
|
||||
website-install:
|
||||
cd website && npm ci
|
||||
|
@ -203,22 +172,11 @@ website-lint-fix:
|
|||
website-build:
|
||||
cd website && npm run build
|
||||
|
||||
website-watch: ## Build and watch the documentation website, updating automatically
|
||||
website-watch:
|
||||
cd website && npm run watch
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
#########################
|
||||
|
||||
docker: ## Build a docker image of the current source tree
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
#########################
|
||||
## CI
|
||||
#########################
|
||||
# These targets are use by GitHub actions to allow usage of matrix
|
||||
# which makes the YAML File a lot smaller
|
||||
|
||||
ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
@ -246,3 +204,14 @@ ci-pyright: ci--meta-debug
|
|||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
ak makemigrations --check
|
||||
|
||||
install: web-install website-install
|
||||
poetry install
|
||||
|
||||
dev-reset:
|
||||
dropdb -U postgres -h localhost authentik
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U postgres -h localhost test_authentik || true
|
||||
createdb -U postgres -h localhost authentik
|
||||
redis-cli -n 0 flushall
|
||||
make migrate
|
||||
|
|
12
README.md
12
README.md
|
@ -41,3 +41,15 @@ See [SECURITY.md](SECURITY.md)
|
|||
## Adoption and Contributions
|
||||
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||
|
||||
## Sponsors
|
||||
|
||||
This project is proudly sponsored by:
|
||||
|
||||
<p>
|
||||
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
DigitalOcean provides development and testing resources for authentik.
|
||||
|
|
|
@ -16,8 +16,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
|||
|
||||
| Version | Supported |
|
||||
| --- | --- |
|
||||
| 2023.5.x | ✅ |
|
||||
| 2023.6.x | ✅ |
|
||||
| 2023.8.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -27,8 +27,6 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
|
|||
|
||||
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||
|
||||
| Score | Severity |
|
||||
| --- | --- |
|
||||
| 0.0 | None |
|
||||
| 0.1 – 3.9 | Low |
|
||||
| 4.0 – 6.9 | Medium |
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from os import environ
|
||||
from typing import Optional
|
||||
|
||||
__version__ = "2023.10.6"
|
||||
__version__ = "2023.6.1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Meta API"""
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.fields import CharField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
|
@ -21,7 +21,7 @@ class AppSerializer(PassiveSerializer):
|
|||
class AppsViewSet(ViewSet):
|
||||
"""Read-only view list all installed apps"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
|
@ -35,7 +35,7 @@ class AppsViewSet(ViewSet):
|
|||
class ModelViewSet(ViewSet):
|
||||
"""Read-only view list all installed models"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||
def list(self, request: Request) -> Response:
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.db.models.functions import ExtractHour
|
|||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
@ -68,7 +68,7 @@ class LoginMetricsSerializer(PassiveSerializer):
|
|||
class AdministrationMetricsViewSet(APIView):
|
||||
"""Login Metrics per 1h"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
||||
def get(self, request: Request) -> Response:
|
||||
|
|
|
@ -8,6 +8,7 @@ from django.utils.timezone import now
|
|||
from drf_spectacular.utils import extend_schema
|
||||
from gunicorn import version_info as gunicorn_version
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
@ -16,7 +17,6 @@ from authentik.core.api.utils import PassiveSerializer
|
|||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
|
||||
|
||||
class RuntimeDict(TypedDict):
|
||||
|
@ -88,7 +88,7 @@ class SystemSerializer(PassiveSerializer):
|
|||
class SystemView(APIView):
|
||||
"""Get system information."""
|
||||
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
permission_classes = [IsAdminUser]
|
||||
pagination_class = None
|
||||
filter_backends = []
|
||||
serializer_class = SystemSerializer
|
||||
|
|
|
@ -14,15 +14,14 @@ from rest_framework.fields import (
|
|||
ListField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ViewSet
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
@ -64,7 +63,7 @@ class TaskSerializer(PassiveSerializer):
|
|||
class TaskViewSet(ViewSet):
|
||||
"""Read-only view set that returns all background tasks"""
|
||||
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = TaskSerializer
|
||||
|
||||
@extend_schema(
|
||||
|
@ -94,7 +93,6 @@ class TaskViewSet(ViewSet):
|
|||
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
||||
return Response(TaskSerializer(tasks, many=True).data)
|
||||
|
||||
@permission_required(None, ["authentik_rbac.run_system_tasks"])
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
|
|
|
@ -2,18 +2,18 @@
|
|||
from django.conf import settings
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.fields import IntegerField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
|
||||
class WorkerView(APIView):
|
||||
"""Get currently connected worker count."""
|
||||
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||
def get(self, request: Request) -> Response:
|
||||
|
|
|
@ -31,5 +31,5 @@ class AuthentikAPIConfig(AppConfig):
|
|||
"type": "apiKey",
|
||||
"in": "header",
|
||||
"name": "Authorization",
|
||||
"scheme": "bearer",
|
||||
"scheme": "Bearer",
|
||||
}
|
||||
|
|
|
@ -7,9 +7,9 @@ from rest_framework.authentication import get_authorization_header
|
|||
from rest_framework.filters import BaseFilterBackend
|
||||
from rest_framework.permissions import BasePermission
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.authentication import validate_auth
|
||||
from authentik.rbac.filters import ObjectFilter
|
||||
|
||||
|
||||
class OwnerFilter(BaseFilterBackend):
|
||||
|
@ -26,14 +26,14 @@ class OwnerFilter(BaseFilterBackend):
|
|||
class SecretKeyFilter(DjangoFilterBackend):
|
||||
"""Allow access to all objects when authenticated with secret key as token.
|
||||
|
||||
Replaces both DjangoFilterBackend and ObjectFilter"""
|
||||
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||
|
||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||
auth_header = get_authorization_header(request)
|
||||
token = validate_auth(auth_header)
|
||||
if token and token == settings.SECRET_KEY:
|
||||
return queryset
|
||||
queryset = ObjectFilter().filter_queryset(request, queryset, view)
|
||||
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
|
|||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None):
|
||||
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||
"""Check permissions for a single custom action"""
|
||||
|
||||
def wrapper_outter(func: Callable):
|
||||
|
@ -18,17 +18,15 @@ def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[l
|
|||
|
||||
@wraps(func)
|
||||
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
||||
if obj_perm:
|
||||
if perm:
|
||||
obj = self.get_object()
|
||||
if not request.user.has_perm(obj_perm, obj):
|
||||
LOGGER.debug(
|
||||
"denying access for object", user=request.user, perm=obj_perm, obj=obj
|
||||
)
|
||||
if not request.user.has_perm(perm, obj):
|
||||
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||
return self.permission_denied(request)
|
||||
if global_perms:
|
||||
for other_perm in global_perms:
|
||||
if other_perms:
|
||||
for other_perm in other_perms:
|
||||
if not request.user.has_perm(other_perm):
|
||||
LOGGER.debug("denying access for other", user=request.user, perm=other_perm)
|
||||
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||
return self.permission_denied(request)
|
||||
return func(self, request, *args, **kwargs)
|
||||
|
||||
|
|
|
@ -77,10 +77,3 @@ class Pagination(pagination.PageNumberPagination):
|
|||
},
|
||||
"required": ["pagination", "results"],
|
||||
}
|
||||
|
||||
|
||||
class SmallerPagination(Pagination):
|
||||
"""Smaller pagination for objects which might require a lot of queries
|
||||
to retrieve all data for."""
|
||||
|
||||
max_page_size = 10
|
||||
|
|
|
@ -9,7 +9,6 @@ from drf_spectacular.plumbing import (
|
|||
)
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from rest_framework.settings import api_settings
|
||||
|
||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
||||
|
||||
|
@ -32,7 +31,7 @@ GENERIC_ERROR = build_object_type(
|
|||
VALIDATION_ERROR = build_object_type(
|
||||
description=_("Validation Error"),
|
||||
properties={
|
||||
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=[],
|
||||
|
|
|
@ -16,7 +16,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
|||
|
||||
def tester(self: TestModelViewSets):
|
||||
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
||||
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
|
||||
filterset_class = getattr(test_viewset, "filterset_class", None)
|
||||
if not filterset_class:
|
||||
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
||||
|
|
|
@ -21,9 +21,7 @@ _other_urls = []
|
|||
for _authentik_app in get_apps():
|
||||
try:
|
||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||
except ModuleNotFoundError:
|
||||
continue
|
||||
except ImportError as exc:
|
||||
except (ModuleNotFoundError, ImportError) as exc:
|
||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||
continue
|
||||
if not hasattr(api_urls, "api_urlpatterns"):
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
"""Serializer mixin for managed models"""
|
||||
from django.apps import apps
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||
from rest_framework.fields import BooleanField, CharField, DateTimeField, DictField, JSONField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||
|
@ -11,7 +13,9 @@ from rest_framework.viewsets import ModelViewSet
|
|||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.common import Blueprint, BlueprintEntry, BlueprintEntryDesiredState
|
||||
from authentik.blueprints.v1.importer import Importer, YAMLStringImporter, is_model_allowed
|
||||
from authentik.blueprints.v1.json_parser import BlueprintJSONParser
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
|
@ -48,7 +52,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||
if content == "":
|
||||
return content
|
||||
context = self.instance.context if self.instance else {}
|
||||
valid, logs = Importer.from_string(content, context).validate()
|
||||
valid, logs = YAMLStringImporter(content, context).validate()
|
||||
if not valid:
|
||||
text_logs = "\n".join([x["event"] for x in logs])
|
||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||
|
@ -83,14 +87,51 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
|||
}
|
||||
|
||||
|
||||
class BlueprintEntrySerializer(PassiveSerializer):
|
||||
"""Validate a single blueprint entry, similar to a subset of regular blueprints"""
|
||||
|
||||
model = CharField()
|
||||
id = CharField(required=False, allow_blank=True)
|
||||
identifiers = DictField()
|
||||
attrs = DictField()
|
||||
|
||||
def validate_model(self, fq_model: str) -> str:
|
||||
"""Validate model is allowed"""
|
||||
if "." not in fq_model:
|
||||
raise ValidationError("Invalid model")
|
||||
app, model_name = fq_model.split(".")
|
||||
try:
|
||||
model = apps.get_model(app, model_name)
|
||||
if not is_model_allowed(model):
|
||||
raise ValidationError("Invalid model")
|
||||
except LookupError:
|
||||
raise ValidationError("Invalid model")
|
||||
return fq_model
|
||||
|
||||
|
||||
class BlueprintSerializer(PassiveSerializer):
|
||||
"""Validate a procedural blueprint, which is a subset of a regular blueprint"""
|
||||
|
||||
entries = ListSerializer(child=BlueprintEntrySerializer())
|
||||
context = DictField(required=False)
|
||||
|
||||
|
||||
class BlueprintProceduralResultSerializer(PassiveSerializer):
|
||||
"""Result of applying a procedural blueprint"""
|
||||
|
||||
valid = BooleanField()
|
||||
applied = BooleanField()
|
||||
logs = ListSerializer(child=CharField())
|
||||
|
||||
|
||||
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Blueprint instances"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
serializer_class = BlueprintInstanceSerializer
|
||||
queryset = BlueprintInstance.objects.all()
|
||||
search_fields = ["name", "path"]
|
||||
filterset_fields = ["name", "path"]
|
||||
ordering = ["name"]
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
|
@ -126,3 +167,55 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
|||
blueprint = self.get_object()
|
||||
apply_blueprint.delay(str(blueprint.pk)).get()
|
||||
return self.retrieve(request, *args, **kwargs)
|
||||
|
||||
@extend_schema(
|
||||
request=BlueprintSerializer,
|
||||
responses=BlueprintProceduralResultSerializer,
|
||||
parameters=[
|
||||
OpenApiParameter("validate_only", bool),
|
||||
],
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
methods=["PUT"],
|
||||
permission_classes=[IsAdminUser],
|
||||
parser_classes=[BlueprintJSONParser],
|
||||
)
|
||||
def procedural(self, request: Request) -> Response:
|
||||
"""Run a client-provided blueprint once, as-is. Blueprint is not kept in memory/database
|
||||
and will not be continuously applied"""
|
||||
blueprint = Blueprint()
|
||||
data = BlueprintSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
blueprint.context = data.validated_data.get("context", {})
|
||||
for raw_entry in data.validated_data["entries"]:
|
||||
entry = BlueprintEntrySerializer(data=raw_entry)
|
||||
entry.is_valid(raise_exception=True)
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model=entry.data["model"],
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers=entry.data["identifiers"],
|
||||
attrs=entry.data["attrs"],
|
||||
id=entry.data.get("id", None),
|
||||
)
|
||||
)
|
||||
importer = Importer(blueprint)
|
||||
valid, logs = importer.validate()
|
||||
result = {
|
||||
"valid": valid,
|
||||
"applied": False,
|
||||
# TODO: Better way to handle logs
|
||||
"logs": [x["event"] for x in logs],
|
||||
}
|
||||
response = BlueprintProceduralResultSerializer(data=result)
|
||||
response.is_valid()
|
||||
if request.query_params.get("validate_only", False):
|
||||
return Response(response.validated_data)
|
||||
applied = importer.apply()
|
||||
result["applied"] = applied
|
||||
response = BlueprintProceduralResultSerializer(data=result)
|
||||
response.is_valid()
|
||||
return Response(response.validated_data)
|
||||
|
|
|
@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
|
|||
meth()
|
||||
self._logger.debug("Successfully reconciled", name=name)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
||||
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.core.management.base import BaseCommand, no_translations
|
|||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
|||
"""Apply all blueprints in order, abort when one fails to import"""
|
||||
for blueprint_path in options.get("blueprints", []):
|
||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||
importer = Importer.from_string(content)
|
||||
importer = YAMLStringImporter(content)
|
||||
valid, _ = importer.validate()
|
||||
if not valid:
|
||||
self.stderr.write("blueprint invalid")
|
||||
|
|
|
@ -11,7 +11,7 @@ from authentik.blueprints.models import BlueprintInstance
|
|||
def apply_blueprint(*files: str):
|
||||
"""Apply blueprint before test"""
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
|
||||
def wrapper_outer(func: Callable):
|
||||
"""Apply blueprint before test"""
|
||||
|
@ -20,7 +20,7 @@ def apply_blueprint(*files: str):
|
|||
def wrapper(*args, **kwargs):
|
||||
for file in files:
|
||||
content = BlueprintInstance(path=file).retrieve()
|
||||
Importer.from_string(content).apply()
|
||||
YAMLStringImporter(content).apply()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
|
|
@ -45,8 +45,3 @@ entries:
|
|||
attrs:
|
||||
name: "%(uid)s"
|
||||
password: "%(uid)s"
|
||||
- model: authentik_core.user
|
||||
identifiers:
|
||||
username: "%(uid)s-no-password"
|
||||
attrs:
|
||||
name: "%(uid)s"
|
||||
|
|
|
@ -36,7 +36,6 @@ entries:
|
|||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
env_null: !Env [bar-baz, null]
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
|
|
44
authentik/blueprints/tests/fixtures/test.json
vendored
Normal file
44
authentik/blueprints/tests/fixtures/test.json
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"$schema": "https://goauthentik.io/blueprints/schema.json",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"name": "test-json"
|
||||
},
|
||||
"entries": [
|
||||
{
|
||||
"model": "authentik_providers_oauth2.oauth2provider",
|
||||
"id": "provider",
|
||||
"identifiers": {
|
||||
"name": "grafana-json"
|
||||
},
|
||||
"attrs": {
|
||||
"authorization_flow": {
|
||||
"goauthentik.io/yaml-key": "!Find",
|
||||
"args": [
|
||||
"authentik_flows.flow",
|
||||
[
|
||||
"pk",
|
||||
{
|
||||
"goauthentik.io/yaml-key": "!Context",
|
||||
"args": "flow"
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "authentik_core.application",
|
||||
"identifiers": {
|
||||
"slug": "test-json"
|
||||
},
|
||||
"attrs": {
|
||||
"name": "test-json",
|
||||
"provider": {
|
||||
"goauthentik.io/yaml-key": "!KeyOf",
|
||||
"args": "provider"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -6,7 +6,7 @@ from django.test import TransactionTestCase
|
|||
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
|
@ -25,7 +25,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
|||
def tester(self: TestPackaged):
|
||||
base = Path("blueprints/")
|
||||
rel_path = Path(file_name).relative_to(base)
|
||||
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
importer = YAMLStringImporter(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from django.test import TestCase
|
|||
|
||||
from authentik.blueprints.v1.importer import is_model_allowed
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.providers.oauth2.models import RefreshToken
|
||||
|
||||
|
||||
class TestModels(TestCase):
|
||||
|
@ -22,9 +21,6 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
|||
model_class = test_model()
|
||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||
self.assertIsNotNone(model_class.serializer)
|
||||
if model_class.serializer.Meta().model == RefreshToken:
|
||||
return
|
||||
self.assertEqual(model_class.serializer.Meta().model, test_model)
|
||||
|
||||
return tester
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from os import environ
|
|||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.exporter import FlowExporter
|
||||
from authentik.blueprints.v1.importer import Importer, transaction_rollback
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter, transaction_rollback
|
||||
from authentik.core.models import Group
|
||||
from authentik.flows.models import Flow, FlowDesignation, FlowStageBinding
|
||||
from authentik.lib.generators import generate_id
|
||||
|
@ -21,14 +21,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
|
||||
def test_blueprint_invalid_format(self):
|
||||
"""Test blueprint with invalid format"""
|
||||
importer = Importer.from_string('{"version": 3}')
|
||||
importer = YAMLStringImporter('{"version": 3}')
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer.from_string(
|
||||
importer = YAMLStringImporter(
|
||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||
'"model": "authentik_core.User"}]}'
|
||||
)
|
||||
self.assertFalse(importer.validate()[0])
|
||||
importer = Importer.from_string(
|
||||
importer = YAMLStringImporter(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||
'"identifiers": {}, '
|
||||
'"model": "authentik_core.Group"}]}'
|
||||
|
@ -54,7 +54,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
},
|
||||
)
|
||||
|
||||
importer = Importer.from_string(
|
||||
importer = YAMLStringImporter(
|
||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||
|
@ -103,7 +103,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
self.assertEqual(len(export.entries), 3)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = YAMLStringImporter(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
@ -113,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
"""Test export and import it twice"""
|
||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = YAMLStringImporter(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
count_before = Prompt.objects.filter(field_key="username").count()
|
||||
self.assertEqual(count_initial + 1, count_before)
|
||||
|
||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
importer = YAMLStringImporter(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||
|
@ -130,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
importer = YAMLStringImporter(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
|
@ -213,9 +213,8 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
},
|
||||
},
|
||||
"nested_context": "context-nested-value",
|
||||
"env_null": None,
|
||||
}
|
||||
).exists()
|
||||
)
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
|
@ -248,7 +247,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = YAMLStringImporter(export_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||
|
@ -297,7 +296,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
|||
exporter = FlowExporter(flow)
|
||||
export_yaml = exporter.export_to_string()
|
||||
|
||||
importer = Importer.from_string(export_yaml)
|
||||
importer = YAMLStringImporter(export_yaml)
|
||||
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
from authentik.core.models import Application, Token, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.flows.models import Flow
|
||||
|
@ -18,7 +18,7 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
|||
self.uid = generate_id()
|
||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
|
||||
|
@ -51,9 +51,3 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
|||
user: User = User.objects.filter(username=self.uid).first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertTrue(user.check_password(self.uid))
|
||||
|
||||
def test_user_null(self):
|
||||
"""Test user"""
|
||||
user: User = User.objects.filter(username=f"{self.uid}-no-password").first()
|
||||
self.assertIsNotNone(user)
|
||||
self.assertFalse(user.has_usable_password())
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
@ -18,7 +18,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects exist
|
||||
|
@ -35,7 +35,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
|||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||
)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure objects do not exist
|
||||
|
|
22
authentik/blueprints/tests/test_v1_json.py
Normal file
22
authentik/blueprints/tests/test_v1_json.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
"""Test blueprints v1 JSON"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import JSONStringImporter
|
||||
from authentik.core.tests.utils import create_test_flow
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
||||
|
||||
class TestBlueprintsV1JSON(TransactionTestCase):
|
||||
"""Test Blueprints"""
|
||||
|
||||
def test_import(self):
|
||||
"""Test JSON Import"""
|
||||
test_flow = create_test_flow()
|
||||
importer = JSONStringImporter(
|
||||
load_fixture("fixtures/test.json"),
|
||||
{
|
||||
"flow": str(test_flow.pk),
|
||||
},
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
|
@ -1,7 +1,7 @@
|
|||
"""Test blueprints v1"""
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.tests.utils import load_fixture
|
||||
|
@ -15,7 +15,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -30,7 +30,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer updates it
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
@ -41,7 +41,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -56,7 +56,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.title, "bar")
|
||||
|
||||
# Ensure importer doesn't update it
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
@ -67,7 +67,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
flow_slug = generate_id()
|
||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
# Ensure object exists
|
||||
|
@ -75,7 +75,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
|||
self.assertEqual(flow.slug, flow_slug)
|
||||
|
||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||
importer = Importer.from_string(import_yaml)
|
||||
importer = YAMLStringImporter(import_yaml)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||
|
|
|
@ -12,7 +12,6 @@ from uuid import UUID
|
|||
from deepmerge import always_merger
|
||||
from django.apps import apps
|
||||
from django.db.models import Model, Q
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
|
@ -208,8 +207,8 @@ class KeyOf(YAMLTag):
|
|||
):
|
||||
return _entry._state.instance.pbm_uuid
|
||||
return _entry._state.instance.pk
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance", entry
|
||||
raise EntryInvalidError(
|
||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||
)
|
||||
|
||||
|
||||
|
@ -225,11 +224,11 @@ class Env(YAMLTag):
|
|||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = loader.construct_object(node.value[0])
|
||||
self.default = loader.construct_object(node.value[1])
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
return getenv(self.key) or self.default
|
||||
return getenv(self.key, self.default)
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
|
@ -244,8 +243,8 @@ class Context(YAMLTag):
|
|||
if isinstance(node, ScalarNode):
|
||||
self.key = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.key = loader.construct_object(node.value[0])
|
||||
self.default = loader.construct_object(node.value[1])
|
||||
self.key = node.value[0].value
|
||||
self.default = node.value[1].value
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
value = self.default
|
||||
|
@ -264,7 +263,7 @@ class Format(YAMLTag):
|
|||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = loader.construct_object(node.value[0])
|
||||
self.format_string = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
@ -280,7 +279,7 @@ class Format(YAMLTag):
|
|||
try:
|
||||
return self.format_string % tuple(args)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Find(YAMLTag):
|
||||
|
@ -343,7 +342,7 @@ class Condition(YAMLTag):
|
|||
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = loader.construct_object(node.value[0])
|
||||
self.mode = node.value[0].value
|
||||
self.args = []
|
||||
for raw_node in node.value[1:]:
|
||||
self.args.append(loader.construct_object(raw_node))
|
||||
|
@ -357,15 +356,13 @@ class Condition(YAMLTag):
|
|||
args.append(arg)
|
||||
|
||||
if not args:
|
||||
raise EntryInvalidError.from_entry(
|
||||
"At least one value is required after mode selection.", entry
|
||||
)
|
||||
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||
|
||||
try:
|
||||
comparator = self._COMPARATORS[self.mode.upper()]
|
||||
return comparator(tuple(bool(x) for x in args))
|
||||
except (TypeError, KeyError) as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class If(YAMLTag):
|
||||
|
@ -397,7 +394,7 @@ class If(YAMLTag):
|
|||
blueprint,
|
||||
)
|
||||
except TypeError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
|
||||
class Enumerate(YAMLTag, YAMLTagContext):
|
||||
|
@ -420,7 +417,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = loader.construct_object(node.value[1])
|
||||
self.output_body = node.value[1].value
|
||||
self.item_body = loader.construct_object(node.value[2])
|
||||
self.__current_context: tuple[Any, Any] = tuple()
|
||||
|
||||
|
@ -429,10 +426,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||
"This is a noop. Check you are setting depth bigger than 0.",
|
||||
entry,
|
||||
"This is a noop. Check you are setting depth bigger than 0."
|
||||
)
|
||||
|
||||
if isinstance(self.iterable, YAMLTag):
|
||||
|
@ -441,10 +437,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
iterable = self.iterable
|
||||
|
||||
if not isinstance(iterable, Iterable):
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||
"such as a sequence or a mapping",
|
||||
entry,
|
||||
"such as a sequence or a mapping"
|
||||
)
|
||||
|
||||
if isinstance(iterable, Mapping):
|
||||
|
@ -455,7 +450,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
try:
|
||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||
except KeyError as exc:
|
||||
raise EntryInvalidError.from_entry(exc, entry)
|
||||
raise EntryInvalidError(exc)
|
||||
|
||||
result = output_class()
|
||||
|
||||
|
@ -467,8 +462,8 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
|||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||
result = add_fn(result, resolved_body)
|
||||
if not isinstance(result, output_class):
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}", entry
|
||||
raise EntryInvalidError(
|
||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||
)
|
||||
finally:
|
||||
self.__current_context = tuple()
|
||||
|
@ -495,13 +490,12 @@ class EnumeratedItem(YAMLTag):
|
|||
)
|
||||
except ValueError as exc:
|
||||
if self.depth == 0:
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
f"{self.__class__.__name__} tags are only usable "
|
||||
f"inside an {Enumerate.__name__} tag",
|
||||
entry,
|
||||
f"inside an {Enumerate.__name__} tag"
|
||||
)
|
||||
|
||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
||||
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||
|
||||
return context_tag.get_context(entry, blueprint)
|
||||
|
||||
|
@ -515,7 +509,7 @@ class Index(EnumeratedItem):
|
|||
try:
|
||||
return context[0]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class Value(EnumeratedItem):
|
||||
|
@ -527,7 +521,7 @@ class Value(EnumeratedItem):
|
|||
try:
|
||||
return context[1]
|
||||
except IndexError: # pragma: no cover
|
||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
||||
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||
|
||||
|
||||
class BlueprintDumper(SafeDumper):
|
||||
|
@ -561,51 +555,37 @@ class BlueprintDumper(SafeDumper):
|
|||
return super().represent(data)
|
||||
|
||||
|
||||
def yaml_key_map() -> dict[str, type[YAMLTag]]:
|
||||
"""get a dict of all yaml tags, key being the actual tag
|
||||
and the value is the class"""
|
||||
return {
|
||||
"!KeyOf": KeyOf,
|
||||
"!Find": Find,
|
||||
"!Context": Context,
|
||||
"!Format": Format,
|
||||
"!Condition": Condition,
|
||||
"!If": If,
|
||||
"!Env": Env,
|
||||
"!Enumerate": Enumerate,
|
||||
"!Value": Value,
|
||||
"!Index": Index,
|
||||
}
|
||||
|
||||
|
||||
class BlueprintLoader(SafeLoader):
|
||||
"""Loader for blueprints with custom tag support"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.add_constructor("!KeyOf", KeyOf)
|
||||
self.add_constructor("!Find", Find)
|
||||
self.add_constructor("!Context", Context)
|
||||
self.add_constructor("!Format", Format)
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
for tag, cls in yaml_key_map().items():
|
||||
self.add_constructor(tag, cls)
|
||||
|
||||
|
||||
class EntryInvalidError(SentryIgnoredException):
|
||||
"""Error raised when an entry is invalid"""
|
||||
|
||||
entry_model: Optional[str]
|
||||
entry_id: Optional[str]
|
||||
validation_error: Optional[ValidationError]
|
||||
serializer: Optional[Serializer] = None
|
||||
serializer_errors: Optional[dict]
|
||||
|
||||
def __init__(
|
||||
self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs
|
||||
) -> None:
|
||||
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||
super().__init__(*args)
|
||||
self.entry_model = None
|
||||
self.entry_id = None
|
||||
self.validation_error = validation_error
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
@staticmethod
|
||||
def from_entry(
|
||||
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
||||
) -> "EntryInvalidError":
|
||||
"""Create EntryInvalidError with the context of an entry"""
|
||||
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
||||
if isinstance(msg_or_exc, ValidationError):
|
||||
error.validation_error = msg_or_exc
|
||||
# Make sure the model and id are strings, depending where the error happens
|
||||
# they might still be YAMLTag instances
|
||||
error.entry_model = str(entry.model)
|
||||
error.entry_id = str(entry.id)
|
||||
return error
|
||||
self.serializer_errors = serializer_errors
|
||||
|
|
|
@ -8,9 +8,9 @@ from dacite.core import from_dict
|
|||
from dacite.exceptions import DaciteError
|
||||
from deepmerge import always_merger
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
|
@ -27,6 +27,7 @@ from authentik.blueprints.v1.common import (
|
|||
BlueprintLoader,
|
||||
EntryInvalidError,
|
||||
)
|
||||
from authentik.blueprints.v1.json_parser import BlueprintJSONDecoder
|
||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||
from authentik.core.models import (
|
||||
AuthenticatedSession,
|
||||
|
@ -35,28 +36,23 @@ from authentik.core.models import (
|
|||
Source,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.enterprise.models import LicenseUsage
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||
|
||||
|
||||
def excluded_models() -> list[type[Model]]:
|
||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
||||
or other means (internal only, base classes, non-used objects, etc)"""
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
# pylint: disable=imported-auth-user
|
||||
from django.contrib.auth.models import Group as DjangoGroup
|
||||
from django.contrib.auth.models import User as DjangoUser
|
||||
|
||||
return (
|
||||
excluded_models = (
|
||||
DjangoUser,
|
||||
DjangoGroup,
|
||||
# Base classes
|
||||
|
@ -72,34 +68,22 @@ def excluded_models() -> list[type[Model]]:
|
|||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
FlowToken,
|
||||
LicenseUsage,
|
||||
SCIMGroup,
|
||||
SCIMUser,
|
||||
)
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
class DoRollback(SentryIgnoredException):
|
||||
"""Exception to trigger a rollback"""
|
||||
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def transaction_rollback():
|
||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||
try:
|
||||
with atomic():
|
||||
yield
|
||||
raise DoRollback()
|
||||
except DoRollback:
|
||||
pass
|
||||
atomic = transaction.atomic()
|
||||
# pylint: disable=unnecessary-dunder-call
|
||||
atomic.__enter__()
|
||||
yield
|
||||
atomic.__exit__(IntegrityError, None, None)
|
||||
|
||||
|
||||
class Importer:
|
||||
"""Import Blueprint from raw dict or YAML/JSON"""
|
||||
"""Import Blueprint from YAML"""
|
||||
|
||||
logger: BoundLogger
|
||||
_import: Blueprint
|
||||
|
@ -114,18 +98,6 @@ class Importer:
|
|||
always_merger.merge(ctx, context)
|
||||
self._import.context = ctx
|
||||
|
||||
@staticmethod
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||
"""Parse YAML string and create blueprint importer from it"""
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
_import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
return Importer(_import, context)
|
||||
|
||||
@property
|
||||
def blueprint(self) -> Blueprint:
|
||||
"""Get imported blueprint"""
|
||||
|
@ -183,7 +155,7 @@ class Importer:
|
|||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||
# Don't use isinstance since we don't want to check for inheritance
|
||||
if not is_model_allowed(model):
|
||||
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
|
||||
raise EntryInvalidError(f"Model {model} not allowed")
|
||||
if issubclass(model, BaseMetaModel):
|
||||
serializer_class: type[Serializer] = model.serializer()
|
||||
serializer = serializer_class(
|
||||
|
@ -195,10 +167,8 @@ class Importer:
|
|||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
|
@ -215,12 +185,12 @@ class Importer:
|
|||
|
||||
query = self.__query_from_identifier(updated_identifiers)
|
||||
if not query:
|
||||
raise EntryInvalidError.from_entry("No or invalid identifiers", entry)
|
||||
raise EntryInvalidError("No or invalid identifiers")
|
||||
|
||||
try:
|
||||
existing_models = model.objects.filter(query)
|
||||
except FieldError as exc:
|
||||
raise EntryInvalidError.from_entry(f"Invalid identifier field: {exc}", entry) from exc
|
||||
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||
|
||||
serializer_kwargs = {}
|
||||
model_instance = existing_models.first()
|
||||
|
@ -234,18 +204,15 @@ class Importer:
|
|||
serializer_kwargs["instance"] = model_instance
|
||||
serializer_kwargs["partial"] = True
|
||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
||||
raise EntryInvalidError.from_entry(
|
||||
raise EntryInvalidError(
|
||||
(
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
||||
"and object exists already",
|
||||
),
|
||||
entry,
|
||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED}"
|
||||
" and object exists already"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"initialised new serializer instance",
|
||||
model=model,
|
||||
**cleanse_dict(updated_identifiers),
|
||||
"initialised new serializer instance", model=model, **updated_identifiers
|
||||
)
|
||||
model_instance = model()
|
||||
# pk needs to be set on the model instance otherwise a new one will be generated
|
||||
|
@ -255,10 +222,7 @@ class Importer:
|
|||
try:
|
||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
||||
except ValueError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
exc,
|
||||
entry,
|
||||
) from exc
|
||||
raise EntryInvalidError(exc) from exc
|
||||
always_merger.merge(full_data, updated_identifiers)
|
||||
serializer_kwargs["data"] = full_data
|
||||
|
||||
|
@ -271,18 +235,16 @@ class Importer:
|
|||
try:
|
||||
serializer.is_valid(raise_exception=True)
|
||||
except ValidationError as exc:
|
||||
raise EntryInvalidError.from_entry(
|
||||
f"Serializer errors {serializer.errors}",
|
||||
validation_error=exc,
|
||||
entry=entry,
|
||||
serializer=serializer,
|
||||
raise EntryInvalidError(
|
||||
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||
) from exc
|
||||
return serializer
|
||||
|
||||
def apply(self) -> bool:
|
||||
"""Apply (create/update) models yaml, in database transaction"""
|
||||
self.logger.debug("Starting blueprint import")
|
||||
try:
|
||||
with atomic():
|
||||
with transaction.atomic():
|
||||
if not self._apply_models():
|
||||
self.logger.debug("Reverting changes due to error")
|
||||
raise IntegrityError
|
||||
|
@ -291,7 +253,7 @@ class Importer:
|
|||
self.logger.debug("Committing changes")
|
||||
return True
|
||||
|
||||
def _apply_models(self, raise_errors=False) -> bool:
|
||||
def _apply_models(self) -> bool:
|
||||
"""Apply (create/update) models yaml"""
|
||||
self.__pk_map = {}
|
||||
for entry in self._import.entries:
|
||||
|
@ -304,18 +266,14 @@ class Importer:
|
|||
)
|
||||
return False
|
||||
# Validate each single entry
|
||||
serializer = None
|
||||
try:
|
||||
serializer = self._validate_single(entry)
|
||||
except EntryInvalidError as exc:
|
||||
# For deleting objects we don't need the serializer to be valid
|
||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
||||
serializer = exc.serializer
|
||||
else:
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
if raise_errors:
|
||||
raise exc
|
||||
return False
|
||||
continue
|
||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||
return False
|
||||
if not serializer:
|
||||
continue
|
||||
|
||||
|
@ -352,7 +310,7 @@ class Importer:
|
|||
self.logger.debug("entry to delete with no instance, skipping")
|
||||
return True
|
||||
|
||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
||||
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||
"""Validate loaded blueprint export, ensure all models are allowed
|
||||
and serializers have no errors"""
|
||||
self.logger.debug("Starting blueprint import validation")
|
||||
|
@ -364,7 +322,7 @@ class Importer:
|
|||
transaction_rollback(),
|
||||
capture_logs() as logs,
|
||||
):
|
||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
||||
successful = self._apply_models()
|
||||
if not successful:
|
||||
self.logger.debug("Blueprint validation failed")
|
||||
for log in logs:
|
||||
|
@ -372,3 +330,31 @@ class Importer:
|
|||
self.logger.debug("Finished blueprint import validation")
|
||||
self._import = orig_import
|
||||
return successful, logs
|
||||
|
||||
|
||||
class YAMLStringImporter(Importer):
|
||||
"""Importer that also parses from YAML string"""
|
||||
|
||||
def __init__(self, yaml_input: str, context: dict | None = None):
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
_import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
super().__init__(_import, context)
|
||||
|
||||
|
||||
class JSONStringImporter(Importer):
|
||||
"""Importer that also parses from JSON string"""
|
||||
|
||||
def __init__(self, json_import: str, context: dict | None = None):
|
||||
import_dict = load(json_import, BlueprintJSONDecoder)
|
||||
try:
|
||||
_import = from_dict(
|
||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||
)
|
||||
except DaciteError as exc:
|
||||
raise EntryInvalidError from exc
|
||||
super().__init__(_import, context)
|
||||
|
|
77
authentik/blueprints/v1/json_parser.py
Normal file
77
authentik/blueprints/v1/json_parser.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
"""Blueprint JSON decoder"""
|
||||
import codecs
|
||||
from collections.abc import Hashable
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import ParseError
|
||||
from rest_framework.parsers import JSONParser
|
||||
from yaml import load
|
||||
from yaml.nodes import MappingNode
|
||||
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, YAMLTag, yaml_key_map
|
||||
|
||||
TAG_KEY = "goauthentik.io/yaml-key"
|
||||
ARGS_KEY = "args"
|
||||
|
||||
|
||||
class BlueprintJSONDecoder(BlueprintLoader):
|
||||
"""Blueprint JSON decoder, allows using tag logic when using JSON data (e.g. through the API,
|
||||
when YAML tags are not available).
|
||||
|
||||
This is still based on a YAML Loader, since all the YAML Tag constructors expect *Node objects
|
||||
from YAML, this makes things a lot easier."""
|
||||
|
||||
tag_map: dict[str, type[YAMLTag]]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.tag_map = yaml_key_map()
|
||||
self.add_constructor("tag:yaml.org,2002:map", BlueprintJSONDecoder.construct_yaml_map)
|
||||
|
||||
def construct_yaml_map(self, node):
|
||||
"""The original construct_yaml_map creates a dict, yields it, then updates it,
|
||||
which is probably some sort of performance optimisation, however it breaks here
|
||||
when we don't return a dict from the `construct_mapping` function"""
|
||||
value = self.construct_mapping(node)
|
||||
yield value
|
||||
|
||||
def construct_mapping(self, node: MappingNode, deep: bool = False) -> dict[Hashable, Any]:
|
||||
"""Check if the mapping has a special key and create an in-place YAML tag for it,
|
||||
and return that instead of the actual dict"""
|
||||
parsed = super().construct_mapping(node, deep=deep)
|
||||
if TAG_KEY not in parsed:
|
||||
return parsed
|
||||
tag_cls = self.parse_yaml_tag(parsed)
|
||||
if not tag_cls:
|
||||
return parsed
|
||||
# MappingNode's value is a list of tuples where the tuples
|
||||
# consist of (KeyNode, ValueNode)
|
||||
# so this filters out the value node for `args`
|
||||
raw_args_pair = [x for x in node.value if x[0].value == ARGS_KEY]
|
||||
if len(raw_args_pair) < 1:
|
||||
return parsed
|
||||
# Get the value of the first Node in the pair we get from above
|
||||
# where the value isn't `args`, i.e. the actual argument data
|
||||
raw_args_data = [x for x in raw_args_pair[0] if x.value != ARGS_KEY][0]
|
||||
return tag_cls(self, raw_args_data)
|
||||
|
||||
def parse_yaml_tag(self, data: dict) -> YAMLTag | None:
|
||||
"""parse the tag"""
|
||||
yaml_tag = data.get(TAG_KEY)
|
||||
tag_cls = self.tag_map.get(yaml_tag)
|
||||
if not tag_cls:
|
||||
return None
|
||||
return tag_cls
|
||||
|
||||
|
||||
class BlueprintJSONParser(JSONParser):
|
||||
"""Wrapper around the rest_framework JSON parser that uses the `BlueprintJSONDecoder`"""
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET)
|
||||
try:
|
||||
decoded_stream = codecs.getreader(encoding)(stream)
|
||||
return load(decoded_stream, BlueprintJSONDecoder)
|
||||
except ValueError as exc:
|
||||
raise ParseError("JSON parse error") from exc
|
|
@ -31,7 +31,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
|||
required = attrs["required"]
|
||||
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
||||
if not instance and required:
|
||||
raise ValidationError({"identifiers": "Required blueprint does not exist"})
|
||||
raise ValidationError("Required blueprint does not exist")
|
||||
self.blueprint_instance = instance
|
||||
return super().validate(attrs)
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ from authentik.blueprints.models import (
|
|||
BlueprintRetrievalFailed,
|
||||
)
|
||||
from authentik.blueprints.v1.common import BlueprintLoader, BlueprintMetadata, EntryInvalidError
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.importer import YAMLStringImporter
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.monitored_tasks import (
|
||||
|
@ -75,14 +75,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
|||
return
|
||||
if event.is_directory:
|
||||
return
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
path = Path(event.src_path).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
if isinstance(event, FileCreatedEvent):
|
||||
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
||||
blueprints_discovery.delay(rel_path)
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
blueprints_discovery.delay()
|
||||
if isinstance(event, FileModifiedEvent):
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||
path = Path(event.src_path)
|
||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||
rel_path = str(path.relative_to(root))
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
@ -98,32 +98,39 @@ def blueprints_find_dict():
|
|||
return blueprints
|
||||
|
||||
|
||||
def blueprints_find() -> list[BlueprintFile]:
|
||||
def blueprints_find():
|
||||
"""Find blueprints and return valid ones"""
|
||||
blueprints = []
|
||||
root = Path(CONFIG.get("blueprints_dir"))
|
||||
for path in root.rglob("**/*.yaml"):
|
||||
rel_path = path.relative_to(root)
|
||||
# Check if any part in the path starts with a dot and assume a hidden file
|
||||
if any(part for part in path.parts if part.startswith(".")):
|
||||
continue
|
||||
LOGGER.debug("found blueprint", path=str(path))
|
||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||
try:
|
||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||
except YAMLError as exc:
|
||||
raw_blueprint = None
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
|
||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||
if not raw_blueprint:
|
||||
continue
|
||||
metadata = raw_blueprint.get("metadata", None)
|
||||
version = raw_blueprint.get("version", 1)
|
||||
if version != 1:
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
|
||||
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||
continue
|
||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
|
||||
blueprint = BlueprintFile(
|
||||
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||
)
|
||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||
blueprints.append(blueprint)
|
||||
LOGGER.debug(
|
||||
"parsed & loaded blueprint",
|
||||
hash=file_hash,
|
||||
path=str(path),
|
||||
)
|
||||
return blueprints
|
||||
|
||||
|
||||
|
@ -131,12 +138,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
|||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||
)
|
||||
@prefill_task
|
||||
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
||||
def blueprints_discovery(self: MonitoredTask):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
if path and blueprint.path != path:
|
||||
continue
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.set_status(
|
||||
|
@ -166,11 +171,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
|||
metadata={},
|
||||
)
|
||||
instance.save()
|
||||
LOGGER.info(
|
||||
"Creating new blueprint instance from file", instance=instance, path=instance.path
|
||||
)
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
|
||||
|
||||
|
@ -189,7 +190,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
|||
self.set_uid(slugify(instance.name))
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer.from_string(blueprint_content, instance.context)
|
||||
importer = YAMLStringImporter(blueprint_content, instance.context)
|
||||
if importer.blueprint.metadata:
|
||||
instance.metadata = asdict(importer.blueprint.metadata)
|
||||
valid, logs = importer.validate()
|
||||
|
|
|
@ -17,6 +17,7 @@ from rest_framework.request import Request
|
|||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
from structlog.testing import capture_logs
|
||||
|
||||
|
@ -37,7 +38,6 @@ from authentik.lib.utils.file import (
|
|||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.rbac.filters import ObjectFilter
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
@ -98,7 +98,6 @@ class ApplicationSerializer(ModelSerializer):
|
|||
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Application Viewset"""
|
||||
|
||||
# pylint: disable=no-member
|
||||
queryset = Application.objects.all().prefetch_related("provider")
|
||||
serializer_class = ApplicationSerializer
|
||||
search_fields = [
|
||||
|
@ -123,7 +122,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
|||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||
for backend in list(self.filter_backends):
|
||||
if backend == ObjectFilter:
|
||||
if backend == ObjectPermissionsFilter:
|
||||
continue
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""Authenticator Devices API Views"""
|
||||
from django_otp import device_classes, devices_for_user
|
||||
from django_otp.models import Device
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||
|
@ -8,8 +10,6 @@ from rest_framework.response import Response
|
|||
from rest_framework.viewsets import ViewSet
|
||||
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
|
||||
|
||||
class DeviceSerializer(MetaNameSerializer):
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
from json import loads
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import Http404
|
||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||
from django_filters.filterset import FilterSet
|
||||
|
@ -13,12 +14,12 @@ from rest_framework.request import Request
|
|||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
|
||||
from authentik.api.decorators import permission_required
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.rbac.api.roles import RoleSerializer
|
||||
|
||||
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
|
@ -48,13 +49,7 @@ class GroupSerializer(ModelSerializer):
|
|||
users_obj = ListSerializer(
|
||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||
)
|
||||
roles_obj = ListSerializer(
|
||||
child=RoleSerializer(),
|
||||
read_only=True,
|
||||
source="roles",
|
||||
required=False,
|
||||
)
|
||||
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
||||
parent_name = CharField(source="parent.name", read_only=True)
|
||||
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
|
@ -76,10 +71,8 @@ class GroupSerializer(ModelSerializer):
|
|||
"parent",
|
||||
"parent_name",
|
||||
"users",
|
||||
"users_obj",
|
||||
"attributes",
|
||||
"roles",
|
||||
"roles_obj",
|
||||
"users_obj",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"users": {
|
||||
|
@ -139,13 +132,25 @@ class UserAccountSerializer(PassiveSerializer):
|
|||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
"""Group Viewset"""
|
||||
|
||||
# pylint: disable=no-member
|
||||
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
||||
serializer_class = GroupSerializer
|
||||
search_fields = ["name", "is_superuser"]
|
||||
filterset_class = GroupFilter
|
||||
ordering = ["name"]
|
||||
|
||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||
for backend in list(self.filter_backends):
|
||||
if backend == ObjectPermissionsFilter:
|
||||
continue
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if self.request.user.has_perm("authentik_core.view_group"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user"])
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
|
|
|
@ -38,7 +38,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
|||
|
||||
managed = ReadOnlyField()
|
||||
component = SerializerMethodField()
|
||||
icon = ReadOnlyField(source="icon_url")
|
||||
icon = ReadOnlyField(source="get_icon")
|
||||
|
||||
def get_component(self, obj: Source) -> str:
|
||||
"""Get object component so that we know how to edit the object"""
|
||||
|
|
|
@ -47,7 +47,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
|||
attrs.setdefault("user", request.user)
|
||||
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
||||
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
||||
raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"})
|
||||
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
|
|
|
@ -1,140 +0,0 @@
|
|||
"""transactional application and provider creation"""
|
||||
from django.apps import apps
|
||||
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from yaml import ScalarNode
|
||||
|
||||
from authentik.blueprints.v1.common import (
|
||||
Blueprint,
|
||||
BlueprintEntry,
|
||||
BlueprintEntryDesiredState,
|
||||
EntryInvalidError,
|
||||
KeyOf,
|
||||
)
|
||||
from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.core.api.applications import ApplicationSerializer
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
def get_provider_serializer_mapping():
|
||||
"""Get a mapping of all providers' model names and their serializers"""
|
||||
mapping = {}
|
||||
for model in all_subclasses(Provider):
|
||||
if model._meta.abstract:
|
||||
continue
|
||||
mapping[f"{model._meta.app_label}.{model._meta.model_name}"] = model().serializer
|
||||
return mapping
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
PolymorphicProxySerializer(
|
||||
component_name="model",
|
||||
serializers=get_provider_serializer_mapping,
|
||||
resource_type_field_name="provider_model",
|
||||
)
|
||||
)
|
||||
class TransactionProviderField(DictField):
|
||||
"""Dictionary field which can hold provider creation data"""
|
||||
|
||||
|
||||
class TransactionApplicationSerializer(PassiveSerializer):
|
||||
"""Serializer for creating a provider and an application in one transaction"""
|
||||
|
||||
app = ApplicationSerializer()
|
||||
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
|
||||
provider = TransactionProviderField()
|
||||
|
||||
_provider_model: type[Provider] = None
|
||||
|
||||
def validate_provider_model(self, fq_model_name: str) -> str:
|
||||
"""Validate that the model exists and is a provider"""
|
||||
if "." not in fq_model_name:
|
||||
raise ValidationError("Invalid provider model")
|
||||
try:
|
||||
app, _, model_name = fq_model_name.partition(".")
|
||||
model = apps.get_model(app, model_name)
|
||||
if not issubclass(model, Provider):
|
||||
raise ValidationError("Invalid provider model")
|
||||
self._provider_model = model
|
||||
except LookupError:
|
||||
raise ValidationError("Invalid provider model")
|
||||
return fq_model_name
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
blueprint = Blueprint()
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model=attrs["provider_model"],
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"name": attrs["provider"]["name"],
|
||||
},
|
||||
# Must match the name of the field on `self`
|
||||
id="provider",
|
||||
attrs=attrs["provider"],
|
||||
)
|
||||
)
|
||||
app_data = attrs["app"]
|
||||
app_data["provider"] = KeyOf(None, ScalarNode(tag="", value="provider"))
|
||||
blueprint.entries.append(
|
||||
BlueprintEntry(
|
||||
model="authentik_core.application",
|
||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
||||
identifiers={
|
||||
"slug": attrs["app"]["slug"],
|
||||
},
|
||||
attrs=app_data,
|
||||
# Must match the name of the field on `self`
|
||||
id="app",
|
||||
)
|
||||
)
|
||||
importer = Importer(blueprint, {})
|
||||
try:
|
||||
valid, _ = importer.validate(raise_validation_errors=True)
|
||||
if not valid:
|
||||
raise ValidationError("Invalid blueprint")
|
||||
except EntryInvalidError as exc:
|
||||
raise ValidationError(
|
||||
{
|
||||
exc.entry_id: exc.validation_error.detail,
|
||||
}
|
||||
)
|
||||
return blueprint
|
||||
|
||||
|
||||
class TransactionApplicationResponseSerializer(PassiveSerializer):
|
||||
"""Transactional creation response"""
|
||||
|
||||
applied = BooleanField()
|
||||
logs = ListField(child=CharField())
|
||||
|
||||
|
||||
class TransactionalApplicationView(APIView):
|
||||
"""Create provider and application and attach them in a single transaction"""
|
||||
|
||||
# TODO: Migrate to a more specific permission
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(
|
||||
request=TransactionApplicationSerializer(),
|
||||
responses={
|
||||
200: TransactionApplicationResponseSerializer(),
|
||||
},
|
||||
)
|
||||
def put(self, request: Request) -> Response:
|
||||
"""Convert data into a blueprint, validate it and apply it"""
|
||||
data = TransactionApplicationSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
|
||||
importer = Importer(data.validated_data, {})
|
||||
applied = importer.apply()
|
||||
response = {"applied": False, "logs": []}
|
||||
response["applied"] = applied
|
||||
return Response(response, status=200)
|
|
@ -73,11 +73,6 @@ class UsedByMixin:
|
|||
# but so we only apply them once, have a simple flag for the first object
|
||||
first_object = True
|
||||
|
||||
# TODO: This will only return the used-by references that the user can see
|
||||
# Either we have to leak model information here to not make the list
|
||||
# useless if the user doesn't have all permissions, or we need to double
|
||||
# query and check if there is a difference between modes the user can see
|
||||
# and can't see and add a warning
|
||||
for obj in get_objects_for_user(
|
||||
request.user, f"{app}.view_{model_name}", manager
|
||||
).all():
|
||||
|
|
|
@ -7,6 +7,7 @@ from django.contrib.auth import update_session_auth_hash
|
|||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
from django.db.models.functions import ExtractHour
|
||||
from django.db.models.query import QuerySet
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from django.urls import reverse_lazy
|
||||
|
@ -51,6 +52,7 @@ from rest_framework.serializers import (
|
|||
)
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.admin.api.metrics import CoordinateSerializer
|
||||
|
@ -121,35 +123,27 @@ class UserSerializer(ModelSerializer):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["password"] = CharField(required=False, allow_null=True)
|
||||
self.fields["password"] = CharField(required=False)
|
||||
|
||||
def create(self, validated_data: dict) -> User:
|
||||
"""If this serializer is used in the blueprint context, we allow for
|
||||
directly setting a password. However should be done via the `set_password`
|
||||
method instead of directly setting it like rest_framework."""
|
||||
password = validated_data.pop("password", None)
|
||||
instance: User = super().create(validated_data)
|
||||
self._set_password(instance, password)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
|
||||
instance.set_password(validated_data["password"])
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def update(self, instance: User, validated_data: dict) -> User:
|
||||
"""Same as `create` above, set the password directly if we're in a blueprint
|
||||
context"""
|
||||
password = validated_data.pop("password", None)
|
||||
instance = super().update(instance, validated_data)
|
||||
self._set_password(instance, password)
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
|
||||
instance.set_password(validated_data["password"])
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def _set_password(self, instance: User, password: Optional[str]):
|
||||
"""Set password of user if we're in a blueprint context, and if it's an empty
|
||||
string then use an unusable password"""
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and password:
|
||||
instance.set_password(password)
|
||||
instance.save()
|
||||
if len(instance.password) == 0:
|
||||
instance.set_unusable_password()
|
||||
instance.save()
|
||||
|
||||
def validate_path(self, path: str) -> str:
|
||||
"""Validate path"""
|
||||
if path[:1] == "/" or path[-1] == "/":
|
||||
|
@ -171,11 +165,6 @@ class UserSerializer(ModelSerializer):
|
|||
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||
return user_type
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||
raise ValidationError("Can't modify internal service account users")
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
|
@ -193,7 +182,6 @@ class UserSerializer(ModelSerializer):
|
|||
"uid",
|
||||
"path",
|
||||
"type",
|
||||
"uuid",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"allow_blank": True},
|
||||
|
@ -208,7 +196,6 @@ class UserSelfSerializer(ModelSerializer):
|
|||
groups = SerializerMethodField()
|
||||
uid = CharField(read_only=True)
|
||||
settings = SerializerMethodField()
|
||||
system_permissions = SerializerMethodField()
|
||||
|
||||
@extend_schema_field(
|
||||
ListSerializer(
|
||||
|
@ -220,7 +207,7 @@ class UserSelfSerializer(ModelSerializer):
|
|||
)
|
||||
def get_groups(self, _: User):
|
||||
"""Return only the group names a user is member of"""
|
||||
for group in self.instance.all_groups().order_by("name"):
|
||||
for group in self.instance.ak_groups.all():
|
||||
yield {
|
||||
"name": group.name,
|
||||
"pk": group.pk,
|
||||
|
@ -230,14 +217,6 @@ class UserSelfSerializer(ModelSerializer):
|
|||
"""Get user settings with tenant and group settings applied"""
|
||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||
|
||||
def get_system_permissions(self, user: User) -> list[str]:
|
||||
"""Get all system permissions assigned to the user"""
|
||||
return list(
|
||||
user.user_permissions.filter(
|
||||
content_type__app_label="authentik_rbac", content_type__model="systempermission"
|
||||
).values_list("codename", flat=True)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
|
@ -252,7 +231,6 @@ class UserSelfSerializer(ModelSerializer):
|
|||
"uid",
|
||||
"settings",
|
||||
"type",
|
||||
"system_permissions",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"is_active": {"read_only": True},
|
||||
|
@ -331,7 +309,7 @@ class UsersFilter(FilterSet):
|
|||
path = CharFilter(field_name="path")
|
||||
path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
|
||||
|
||||
type = MultipleChoiceFilter(choices=UserTypes.choices, field_name="type")
|
||||
type = MultipleChoiceFilter(field_name="type")
|
||||
|
||||
groups_by_name = ModelMultipleChoiceFilter(
|
||||
field_name="ak_groups__name",
|
||||
|
@ -630,10 +608,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||
if not request.user.has_perm("impersonate"):
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
|
||||
user_to_be = self.get_object()
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
|
||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||
|
@ -667,6 +643,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
|||
|
||||
return Response(status=204)
|
||||
|
||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||
for backend in list(self.filter_backends):
|
||||
if backend == ObjectPermissionsFilter:
|
||||
continue
|
||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if self.request.user.has_perm("authentik_core.view_user"):
|
||||
return self._filter_queryset_for_list(queryset)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: inline_serializer(
|
||||
|
|
|
@ -44,7 +44,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
|||
if request:
|
||||
req.http_request = request
|
||||
self._context["request"] = req
|
||||
req.context.update(**kwargs)
|
||||
self._context.update(**kwargs)
|
||||
self.dry_run = dry_run
|
||||
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
"""custom runserver command"""
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
|
||||
|
||||
class Command(RunServer):
|
||||
"""custom runserver command, which doesn't show the misleading django startup message"""
|
||||
|
||||
def on_bind(self, server_port):
|
||||
pass
|
|
@ -16,16 +16,7 @@ LOGGER = get_logger()
|
|||
class Command(BaseCommand):
|
||||
"""Run worker"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--beat",
|
||||
action="store_false",
|
||||
help="When set, this worker will _not_ run Beat (scheduled) tasks",
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
LOGGER.debug("Celery options", **options)
|
||||
close_old_connections()
|
||||
if CONFIG.get_bool("remote_debug"):
|
||||
import debugpy
|
||||
|
@ -35,9 +26,10 @@ class Command(BaseCommand):
|
|||
no_color=False,
|
||||
quiet=True,
|
||||
optimization="fair",
|
||||
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
|
||||
max_tasks_per_child=1,
|
||||
autoscale=(3, 1),
|
||||
task_events=True,
|
||||
beat=options.get("beat", True),
|
||||
beat=True,
|
||||
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
||||
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
||||
)
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
# Generated by Django 4.2.6 on 2023-10-11 13:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_core", "0031_alter_user_type"),
|
||||
("authentik_rbac", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="group",
|
||||
options={"verbose_name": "Group", "verbose_name_plural": "Groups"},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="token",
|
||||
options={
|
||||
"permissions": [("view_token_key", "View token's key")],
|
||||
"verbose_name": "Token",
|
||||
"verbose_name_plural": "Tokens",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="user",
|
||||
options={
|
||||
"permissions": [
|
||||
("reset_user_password", "Reset Password"),
|
||||
("impersonate", "Can impersonate other users"),
|
||||
("assign_user_permissions", "Can assign permissions to users"),
|
||||
("unassign_user_permissions", "Can unassign permissions from users"),
|
||||
],
|
||||
"verbose_name": "User",
|
||||
"verbose_name_plural": "Users",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="group",
|
||||
name="roles",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="ak_groups", to="authentik_rbac.role"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -1,7 +1,7 @@
|
|||
"""authentik core models"""
|
||||
from datetime import timedelta
|
||||
from hashlib import sha256
|
||||
from typing import Any, Optional, Self
|
||||
from typing import Any, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from deepmerge import always_merger
|
||||
|
@ -88,8 +88,6 @@ class Group(SerializerModel):
|
|||
default=False, help_text=_("Users added to this group will be superusers.")
|
||||
)
|
||||
|
||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="ak_groups", blank=True)
|
||||
|
||||
parent = models.ForeignKey(
|
||||
"Group",
|
||||
blank=True,
|
||||
|
@ -115,39 +113,27 @@ class Group(SerializerModel):
|
|||
|
||||
def is_member(self, user: "User") -> bool:
|
||||
"""Recursively check if `user` is member of us, or any parent."""
|
||||
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
||||
|
||||
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
|
||||
"""Recursively get all groups that have this as parent or are indirectly related"""
|
||||
direct_groups = []
|
||||
if isinstance(self, QuerySet):
|
||||
direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator())
|
||||
else:
|
||||
direct_groups = [self.pk]
|
||||
if len(direct_groups) < 1:
|
||||
return Group.objects.none()
|
||||
query = """
|
||||
WITH RECURSIVE parents AS (
|
||||
SELECT authentik_core_group.*, 0 AS relative_depth
|
||||
FROM authentik_core_group
|
||||
WHERE authentik_core_group.group_uuid = ANY(%s)
|
||||
WHERE authentik_core_group.group_uuid = %s
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT authentik_core_group.*, parents.relative_depth + 1
|
||||
FROM authentik_core_group, parents
|
||||
SELECT authentik_core_group.*, parents.relative_depth - 1
|
||||
FROM authentik_core_group,parents
|
||||
WHERE (
|
||||
authentik_core_group.group_uuid = parents.parent_id and
|
||||
parents.relative_depth < 20
|
||||
authentik_core_group.parent_id = parents.group_uuid and
|
||||
parents.relative_depth > -20
|
||||
)
|
||||
)
|
||||
SELECT group_uuid
|
||||
FROM parents
|
||||
GROUP BY group_uuid, name
|
||||
ORDER BY name;
|
||||
GROUP BY group_uuid;
|
||||
"""
|
||||
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
|
||||
return Group.objects.filter(pk__in=group_pks)
|
||||
groups = Group.objects.raw(query, [self.group_uuid])
|
||||
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
|
||||
|
||||
def __str__(self):
|
||||
return f"Group {self.name}"
|
||||
|
@ -159,8 +145,6 @@ class Group(SerializerModel):
|
|||
"parent",
|
||||
),
|
||||
)
|
||||
verbose_name = _("Group")
|
||||
verbose_name_plural = _("Groups")
|
||||
|
||||
|
||||
class UserManager(DjangoUserManager):
|
||||
|
@ -192,19 +176,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||
"""Get the default user path"""
|
||||
return User._meta.get_field("path").default
|
||||
|
||||
def all_groups(self) -> QuerySet[Group]:
|
||||
"""Recursively get all groups this user is a member of.
|
||||
At least one query is done to get the direct groups of the user, with groups
|
||||
there are at most 3 queries done"""
|
||||
return Group.children_recursive(self.ak_groups.all())
|
||||
|
||||
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
|
||||
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
||||
including the users attributes"""
|
||||
final_attributes = {}
|
||||
if request and hasattr(request, "tenant"):
|
||||
always_merger.merge(final_attributes, request.tenant.attributes)
|
||||
for group in self.all_groups().order_by("name"):
|
||||
for group in self.ak_groups.all().order_by("name"):
|
||||
always_merger.merge(final_attributes, group.attributes)
|
||||
always_merger.merge(final_attributes, self.attributes)
|
||||
return final_attributes
|
||||
|
@ -218,7 +196,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||
@cached_property
|
||||
def is_superuser(self) -> bool:
|
||||
"""Get supseruser status based on membership in a group with superuser status"""
|
||||
return self.all_groups().filter(is_superuser=True).exists()
|
||||
return self.ak_groups.filter(is_superuser=True).exists()
|
||||
|
||||
@property
|
||||
def is_staff(self) -> bool:
|
||||
|
@ -271,14 +249,12 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
|||
return get_avatar(self)
|
||||
|
||||
class Meta:
|
||||
permissions = (
|
||||
("reset_user_password", "Reset Password"),
|
||||
("impersonate", "Can impersonate other users"),
|
||||
)
|
||||
verbose_name = _("User")
|
||||
verbose_name_plural = _("Users")
|
||||
permissions = [
|
||||
("reset_user_password", _("Reset Password")),
|
||||
("impersonate", _("Can impersonate other users")),
|
||||
("assign_user_permissions", _("Can assign permissions to users")),
|
||||
("unassign_user_permissions", _("Can unassign permissions from users")),
|
||||
]
|
||||
|
||||
|
||||
class Provider(SerializerModel):
|
||||
|
@ -687,7 +663,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
|||
models.Index(fields=["identifier"]),
|
||||
models.Index(fields=["key"]),
|
||||
]
|
||||
permissions = [("view_token_key", _("View token's key"))]
|
||||
permissions = (("view_token_key", "View token's key"),)
|
||||
|
||||
|
||||
class PropertyMapping(SerializerModel, ManagedModel):
|
||||
|
|
|
@ -7,7 +7,6 @@ from django.db.models import Model
|
|||
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
||||
|
||||
|
@ -16,8 +15,6 @@ password_changed = Signal()
|
|||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||
login_failed = Signal()
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Application)
|
||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||
|
|
|
@ -48,7 +48,7 @@ class Action(Enum):
|
|||
class MessageStage(StageView):
|
||||
"""Show a pre-configured message after the flow is done"""
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Show a pre-configured message after the flow is done"""
|
||||
message = getattr(self.executor.current_stage, "message", "")
|
||||
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
||||
|
@ -59,6 +59,10 @@ class MessageStage(StageView):
|
|||
)
|
||||
return self.executor.stage_ok()
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Wrapper for post requests"""
|
||||
return self.get(request)
|
||||
|
||||
|
||||
class SourceFlowManager:
|
||||
"""Help sources decide what they should do after authorization. Based on source settings and
|
||||
|
@ -97,7 +101,6 @@ class SourceFlowManager:
|
|||
if self.request.user.is_authenticated:
|
||||
new_connection.user = self.request.user
|
||||
new_connection = self.update_connection(new_connection, **kwargs)
|
||||
# pylint: disable=no-member
|
||||
new_connection.save()
|
||||
return Action.LINK, new_connection
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ class PostUserEnrollmentStage(StageView):
|
|||
"""Dynamically injected stage which saves the Connection after
|
||||
the user has been enrolled."""
|
||||
|
||||
def dispatch(self, request: HttpRequest) -> HttpResponse:
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
"""Stage used after the user has been enrolled"""
|
||||
connection: UserSourceConnection = self.executor.plan.context[
|
||||
PLAN_CONTEXT_SOURCES_CONNECTION
|
||||
|
@ -27,3 +27,7 @@ class PostUserEnrollmentStage(StageView):
|
|||
source=connection.source,
|
||||
).from_http(self.request)
|
||||
return self.executor.stage_ok()
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Wrapper for post requests"""
|
||||
return self.get(request)
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
{% block head_before %}
|
||||
{% endblock %}
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
||||
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
||||
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
||||
|
|
|
@ -16,8 +16,8 @@ You've logged out of {{ application }}.
|
|||
{% block card %}
|
||||
<form method="POST" class="pf-c-form">
|
||||
<p>
|
||||
{% blocktrans with application=application.name branding_title=tenant.branding_title %}
|
||||
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
|
||||
{% blocktrans with application=application.name %}
|
||||
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your authentik account.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
{% block head_before %}
|
||||
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||
{% include "base/header_js.html" %}
|
||||
{% endblock %}
|
||||
|
||||
|
@ -79,6 +78,7 @@
|
|||
</main>
|
||||
{% endblock %}
|
||||
<footer class="pf-c-login__footer">
|
||||
<p></p>
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
<li>
|
||||
|
|
|
@ -13,9 +13,7 @@ class TestGroups(TestCase):
|
|||
user = User.objects.create(username=generate_id())
|
||||
user2 = User.objects.create(username=generate_id())
|
||||
group = Group.objects.create(name=generate_id())
|
||||
other_group = Group.objects.create(name=generate_id())
|
||||
group.users.add(user)
|
||||
other_group.users.add(user)
|
||||
self.assertTrue(group.is_member(user))
|
||||
self.assertFalse(group.is_member(user2))
|
||||
|
||||
|
@ -23,26 +21,22 @@ class TestGroups(TestCase):
|
|||
"""Test parent membership"""
|
||||
user = User.objects.create(username=generate_id())
|
||||
user2 = User.objects.create(username=generate_id())
|
||||
parent = Group.objects.create(name=generate_id())
|
||||
child = Group.objects.create(name=generate_id(), parent=parent)
|
||||
child.users.add(user)
|
||||
self.assertTrue(child.is_member(user))
|
||||
self.assertTrue(parent.is_member(user))
|
||||
self.assertFalse(child.is_member(user2))
|
||||
self.assertFalse(parent.is_member(user2))
|
||||
first = Group.objects.create(name=generate_id())
|
||||
second = Group.objects.create(name=generate_id(), parent=first)
|
||||
second.users.add(user)
|
||||
self.assertTrue(first.is_member(user))
|
||||
self.assertFalse(first.is_member(user2))
|
||||
|
||||
def test_group_membership_parent_extra(self):
|
||||
"""Test parent membership"""
|
||||
user = User.objects.create(username=generate_id())
|
||||
user2 = User.objects.create(username=generate_id())
|
||||
parent = Group.objects.create(name=generate_id())
|
||||
second = Group.objects.create(name=generate_id(), parent=parent)
|
||||
first = Group.objects.create(name=generate_id())
|
||||
second = Group.objects.create(name=generate_id(), parent=first)
|
||||
third = Group.objects.create(name=generate_id(), parent=second)
|
||||
second.users.add(user)
|
||||
self.assertTrue(parent.is_member(user))
|
||||
self.assertFalse(parent.is_member(user2))
|
||||
self.assertTrue(second.is_member(user))
|
||||
self.assertFalse(second.is_member(user2))
|
||||
self.assertTrue(first.is_member(user))
|
||||
self.assertFalse(first.is_member(user2))
|
||||
self.assertFalse(third.is_member(user))
|
||||
self.assertFalse(third.is_member(user2))
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from rest_framework.test import APITestCase
|
|||
|
||||
from authentik.core.models import User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class TestImpersonation(APITestCase):
|
||||
|
@ -47,42 +46,12 @@ class TestImpersonation(APITestCase):
|
|||
"""test impersonation without permissions"""
|
||||
self.client.force_login(self.other_user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}))
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||
|
||||
@CONFIG.patch("impersonation", False)
|
||||
def test_impersonate_disabled(self):
|
||||
"""test impersonation that is disabled"""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
|
||||
def test_impersonate_self(self):
|
||||
"""test impersonation that user can't impersonate themselves"""
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
||||
|
||||
def test_un_impersonate_empty(self):
|
||||
"""test un-impersonation without impersonating first"""
|
||||
self.client.force_login(self.other_user)
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
"""Test Transactional API"""
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
|
||||
|
||||
class TestTransactionalApplicationsAPI(APITestCase):
|
||||
"""Test Transactional API"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.user = create_test_admin_user()
|
||||
|
||||
def test_create_transactional(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
authorization_flow = create_test_flow()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": str(authorization_flow.pk),
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
|
||||
provider = OAuth2Provider.objects.filter(name=uid).first()
|
||||
self.assertIsNotNone(provider)
|
||||
app = Application.objects.filter(slug=uid).first()
|
||||
self.assertIsNotNone(app)
|
||||
self.assertEqual(app.provider.pk, provider.pk)
|
||||
|
||||
def test_create_transactional_invalid(self):
|
||||
"""Test transactional Application + provider creation"""
|
||||
self.client.force_login(self.user)
|
||||
uid = generate_id()
|
||||
response = self.client.put(
|
||||
reverse("authentik_api:core-transactional-application"),
|
||||
data={
|
||||
"app": {
|
||||
"name": uid,
|
||||
"slug": uid,
|
||||
},
|
||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
||||
"provider": {
|
||||
"name": uid,
|
||||
"authorization_flow": "",
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
||||
)
|
|
@ -28,19 +28,6 @@ class TestUsersAPI(APITestCase):
|
|||
self.admin = create_test_admin_user()
|
||||
self.user = User.objects.create(username="test-user")
|
||||
|
||||
def test_filter_type(self):
|
||||
"""Test API filtering by type"""
|
||||
self.client.force_login(self.admin)
|
||||
user = create_test_admin_user(type=UserTypes.EXTERNAL)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"type": UserTypes.EXTERNAL,
|
||||
"username": user.username,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_metrics(self):
|
||||
"""Test user's metrics"""
|
||||
self.client.force_login(self.admin)
|
||||
|
|
|
@ -21,34 +21,27 @@ def create_test_flow(
|
|||
)
|
||||
|
||||
|
||||
def create_test_user(name: Optional[str] = None, **kwargs) -> User:
|
||||
"""Generate a test user"""
|
||||
def create_test_admin_user(name: Optional[str] = None) -> User:
|
||||
"""Generate a test-admin user"""
|
||||
uid = generate_id(20) if not name else name
|
||||
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
||||
kwargs.setdefault("username", uid)
|
||||
group = Group.objects.create(name=uid, is_superuser=True)
|
||||
user: User = User.objects.create(
|
||||
username=uid,
|
||||
name=uid,
|
||||
**kwargs,
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
user.set_password(uid)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
||||
"""Generate a test-admin user"""
|
||||
user = create_test_user(name, **kwargs)
|
||||
group = Group.objects.create(name=user.name or name, is_superuser=True)
|
||||
group.users.add(user)
|
||||
return user
|
||||
|
||||
|
||||
def create_test_tenant(**kwargs) -> Tenant:
|
||||
def create_test_tenant() -> Tenant:
|
||||
"""Generate a test tenant, removing all other tenants to make sure this one
|
||||
matches."""
|
||||
uid = generate_id(20)
|
||||
Tenant.objects.all().delete()
|
||||
return Tenant.objects.create(domain=uid, default=True, **kwargs)
|
||||
return Tenant.objects.create(domain=uid, default=True)
|
||||
|
||||
|
||||
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:
|
||||
|
|
|
@ -15,7 +15,6 @@ from authentik.core.api.propertymappings import PropertyMappingViewSet
|
|||
from authentik.core.api.providers import ProviderViewSet
|
||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||
from authentik.core.api.tokens import TokenViewSet
|
||||
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
||||
from authentik.core.api.users import UserViewSet
|
||||
from authentik.core.views import apps
|
||||
from authentik.core.views.debug import AccessDeniedView
|
||||
|
@ -71,11 +70,6 @@ urlpatterns = [
|
|||
api_urlpatterns = [
|
||||
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
||||
("core/applications", ApplicationViewSet),
|
||||
path(
|
||||
"core/transactional/applications/",
|
||||
TransactionalApplicationView.as_view(),
|
||||
name="core-transactional-application",
|
||||
),
|
||||
("core/groups", GroupViewSet),
|
||||
("core/users", UserViewSet),
|
||||
("core/tokens", TokenViewSet),
|
||||
|
|
|
@ -189,8 +189,6 @@ class CertificateKeyPairFilter(FilterSet):
|
|||
|
||||
def filter_has_key(self, queryset, name, value): # pragma: no cover
|
||||
"""Only return certificate-key pairs with keys"""
|
||||
if not value:
|
||||
return queryset
|
||||
return queryset.exclude(key_data__exact="")
|
||||
|
||||
class Meta:
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
"""authentik crypto app config"""
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
|
||||
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
||||
|
||||
|
||||
|
@ -20,37 +23,33 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
|||
"""Load crypto tasks"""
|
||||
self.import_module("authentik.crypto.tasks")
|
||||
|
||||
def _create_update_cert(self):
|
||||
def _create_update_cert(self, cert: Optional["CertificateKeyPair"] = None):
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
|
||||
common_name = "authentik Internal JWT Certificate"
|
||||
builder = CertificateBuilder(common_name)
|
||||
builder = CertificateBuilder("authentik Internal JWT Certificate")
|
||||
builder.build(
|
||||
subject_alt_names=["goauthentik.io"],
|
||||
validity_days=360,
|
||||
)
|
||||
CertificateKeyPair.objects.update_or_create(
|
||||
managed=MANAGED_KEY,
|
||||
defaults={
|
||||
"name": common_name,
|
||||
"certificate_data": builder.certificate,
|
||||
"key_data": builder.private_key,
|
||||
},
|
||||
)
|
||||
if not cert:
|
||||
cert = CertificateKeyPair()
|
||||
builder.cert = cert
|
||||
builder.cert.managed = MANAGED_KEY
|
||||
builder.save()
|
||||
|
||||
def reconcile_managed_jwt_cert(self):
|
||||
"""Ensure managed JWT certificate"""
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
|
||||
cert: Optional[CertificateKeyPair] = CertificateKeyPair.objects.filter(
|
||||
managed=MANAGED_KEY
|
||||
).first()
|
||||
now = datetime.now()
|
||||
if not cert or (
|
||||
now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after
|
||||
):
|
||||
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
|
||||
if not certs.exists():
|
||||
self._create_update_cert()
|
||||
return
|
||||
cert: CertificateKeyPair = certs.first()
|
||||
now = datetime.now()
|
||||
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
|
||||
self._create_update_cert(cert)
|
||||
|
||||
def reconcile_self_signed(self):
|
||||
"""Create self-signed keypair"""
|
||||
|
@ -62,10 +61,4 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
|||
return
|
||||
builder = CertificateBuilder(name)
|
||||
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
|
||||
CertificateKeyPair.objects.get_or_create(
|
||||
name=name,
|
||||
defaults={
|
||||
"certificate_data": builder.certificate,
|
||||
"key_data": builder.private_key,
|
||||
},
|
||||
)
|
||||
builder.save()
|
||||
|
|
|
@ -128,26 +128,8 @@ class TestCrypto(APITestCase):
|
|||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name},
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
api_cert = [x for x in body["results"] if x["name"] == cert.name][0]
|
||||
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
|
||||
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
|
||||
|
||||
def test_list_has_key_false(self):
|
||||
"""Test API List with has_key set to false"""
|
||||
cert = create_test_cert()
|
||||
cert.key_data = ""
|
||||
cert.save()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name, "has_key": False},
|
||||
)
|
||||
+ f"?name={cert.name}"
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
|
@ -162,8 +144,8 @@ class TestCrypto(APITestCase):
|
|||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name, "include_details": False},
|
||||
)
|
||||
+ f"?name={cert.name}&include_details=false"
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
body = loads(response.content.decode())
|
||||
|
@ -186,8 +168,8 @@ class TestCrypto(APITestCase):
|
|||
reverse(
|
||||
"authentik_api:certificatekeypair-view-certificate",
|
||||
kwargs={"pk": keypair.pk},
|
||||
),
|
||||
data={"download": True},
|
||||
)
|
||||
+ "?download",
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertIn("Content-Disposition", response)
|
||||
|
@ -207,8 +189,8 @@ class TestCrypto(APITestCase):
|
|||
reverse(
|
||||
"authentik_api:certificatekeypair-view-private-key",
|
||||
kwargs={"pk": keypair.pk},
|
||||
),
|
||||
data={"download": True},
|
||||
)
|
||||
+ "?download",
|
||||
)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertIn("Content-Disposition", response)
|
||||
|
@ -218,7 +200,7 @@ class TestCrypto(APITestCase):
|
|||
self.client.force_login(create_test_admin_user())
|
||||
keypair = create_test_cert()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
name="test",
|
||||
client_id="test",
|
||||
client_secret=generate_key(),
|
||||
authorization_flow=create_test_flow(),
|
||||
|
|
|
@ -6,7 +6,7 @@ from drf_spectacular.types import OpenApiTypes
|
|||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
|
@ -35,13 +35,13 @@ class LicenseSerializer(ModelSerializer):
|
|||
"name",
|
||||
"key",
|
||||
"expiry",
|
||||
"internal_users",
|
||||
"users",
|
||||
"external_users",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"read_only": True},
|
||||
"expiry": {"read_only": True},
|
||||
"internal_users": {"read_only": True},
|
||||
"users": {"read_only": True},
|
||||
"external_users": {"read_only": True},
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ class LicenseSerializer(ModelSerializer):
|
|||
class LicenseSummary(PassiveSerializer):
|
||||
"""Serializer for license status"""
|
||||
|
||||
internal_users = IntegerField(required=True)
|
||||
users = IntegerField(required=True)
|
||||
external_users = IntegerField(required=True)
|
||||
valid = BooleanField()
|
||||
show_admin_warning = BooleanField()
|
||||
|
@ -62,9 +62,9 @@ class LicenseSummary(PassiveSerializer):
|
|||
class LicenseForecastSerializer(PassiveSerializer):
|
||||
"""Serializer for license forecast"""
|
||||
|
||||
internal_users = IntegerField(required=True)
|
||||
users = IntegerField(required=True)
|
||||
external_users = IntegerField(required=True)
|
||||
forecasted_internal_users = IntegerField(required=True)
|
||||
forecasted_users = IntegerField(required=True)
|
||||
forecasted_external_users = IntegerField(required=True)
|
||||
|
||||
|
||||
|
@ -84,7 +84,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
|||
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
||||
},
|
||||
)
|
||||
@action(detail=False, methods=["GET"])
|
||||
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
|
||||
def get_install_id(self, request: Request) -> Response:
|
||||
"""Get install_id"""
|
||||
return Response(
|
||||
|
@ -111,7 +111,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
|||
latest_valid = datetime.fromtimestamp(total.exp)
|
||||
response = LicenseSummary(
|
||||
data={
|
||||
"internal_users": total.internal_users,
|
||||
"users": total.users,
|
||||
"external_users": total.external_users,
|
||||
"valid": total.is_valid(),
|
||||
"show_admin_warning": show_admin_warning,
|
||||
|
@ -135,8 +135,8 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
|||
def forecast(self, request: Request) -> Response:
|
||||
"""Forecast how many users will be required in a year"""
|
||||
last_month = now() - timedelta(days=30)
|
||||
# Forecast for internal users
|
||||
internal_in_last_month = User.objects.filter(
|
||||
# Forecast for default users
|
||||
users_in_last_month = User.objects.filter(
|
||||
type=UserTypes.INTERNAL, date_joined__gte=last_month
|
||||
).count()
|
||||
# Forecast for external users
|
||||
|
@ -144,9 +144,9 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
|||
forecast_for_months = 12
|
||||
response = LicenseForecastSerializer(
|
||||
data={
|
||||
"internal_users": LicenseKey.get_default_user_count(),
|
||||
"users": LicenseKey.get_default_user_count(),
|
||||
"external_users": LicenseKey.get_external_user_count(),
|
||||
"forecasted_internal_users": (internal_in_last_month * forecast_for_months),
|
||||
"forecasted_users": (users_in_last_month * forecast_for_months),
|
||||
"forecasted_external_users": (external_in_last_month * forecast_for_months),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
# Generated by Django 4.2.4 on 2023-08-23 10:06
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("authentik_enterprise", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="license",
|
||||
old_name="users",
|
||||
new_name="internal_users",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="license",
|
||||
name="key",
|
||||
field=models.TextField(),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="license",
|
||||
index=django.contrib.postgres.indexes.HashIndex(
|
||||
fields=["key"], name="authentik_e_key_523e13_hash"
|
||||
),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="licenseusage",
|
||||
options={
|
||||
"verbose_name": "License Usage",
|
||||
"verbose_name_plural": "License Usage Records",
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="license",
|
||||
options={"verbose_name": "License", "verbose_name_plural": "Licenses"},
|
||||
),
|
||||
]
|
|
@ -11,18 +11,14 @@ from uuid import uuid4
|
|||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate
|
||||
from dacite import from_dict
|
||||
from django.contrib.postgres.indexes import HashIndex
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from jwt import PyJWTError, decode, get_unverified_header
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.core.models import ExpiringModel, User, UserTypes
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.root.install_id import get_install_id
|
||||
|
||||
|
||||
|
@ -50,8 +46,8 @@ class LicenseKey:
|
|||
exp: int
|
||||
|
||||
name: str
|
||||
internal_users: int = 0
|
||||
external_users: int = 0
|
||||
users: int
|
||||
external_users: int
|
||||
flags: list[LicenseFlags] = field(default_factory=list)
|
||||
|
||||
@staticmethod
|
||||
|
@ -91,7 +87,7 @@ class LicenseKey:
|
|||
active_licenses = License.objects.filter(expiry__gte=now())
|
||||
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
|
||||
for lic in active_licenses:
|
||||
total.internal_users += lic.internal_users
|
||||
total.users += lic.users
|
||||
total.external_users += lic.external_users
|
||||
exp_ts = int(mktime(lic.expiry.timetuple()))
|
||||
if total.exp == 0:
|
||||
|
@ -127,7 +123,7 @@ class LicenseKey:
|
|||
|
||||
Only checks the current count, no historical data is checked"""
|
||||
default_users = self.get_default_user_count()
|
||||
if default_users > self.internal_users:
|
||||
if default_users > self.users:
|
||||
return False
|
||||
active_users = self.get_external_user_count()
|
||||
if active_users > self.external_users:
|
||||
|
@ -136,9 +132,6 @@ class LicenseKey:
|
|||
|
||||
def record_usage(self):
|
||||
"""Capture the current validity status and metrics and save them"""
|
||||
threshold = now() - timedelta(hours=8)
|
||||
if LicenseUsage.objects.filter(record_date__gte=threshold).exists():
|
||||
return
|
||||
LicenseUsage.objects.create(
|
||||
user_count=self.get_default_user_count(),
|
||||
external_user_count=self.get_external_user_count(),
|
||||
|
@ -156,33 +149,22 @@ class LicenseKey:
|
|||
return usage.record_date
|
||||
|
||||
|
||||
class License(SerializerModel):
|
||||
class License(models.Model):
|
||||
"""An authentik enterprise license"""
|
||||
|
||||
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
key = models.TextField()
|
||||
key = models.TextField(unique=True)
|
||||
|
||||
name = models.TextField()
|
||||
expiry = models.DateTimeField()
|
||||
internal_users = models.BigIntegerField()
|
||||
users = models.BigIntegerField()
|
||||
external_users = models.BigIntegerField()
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.api import LicenseSerializer
|
||||
|
||||
return LicenseSerializer
|
||||
|
||||
@property
|
||||
def status(self) -> LicenseKey:
|
||||
"""Get parsed license status"""
|
||||
return LicenseKey.validate(self.key)
|
||||
|
||||
class Meta:
|
||||
indexes = (HashIndex(fields=("key",)),)
|
||||
verbose_name = _("License")
|
||||
verbose_name_plural = _("Licenses")
|
||||
|
||||
|
||||
def usage_expiry():
|
||||
"""Keep license usage records for 3 months"""
|
||||
|
@ -201,7 +183,3 @@ class LicenseUsage(ExpiringModel):
|
|||
within_limits = models.BooleanField()
|
||||
|
||||
record_date = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("License Usage")
|
||||
verbose_name_plural = _("License Usage Records")
|
||||
|
|
|
@ -1,30 +1,44 @@
|
|||
"""Enterprise license policies"""
|
||||
from typing import Optional
|
||||
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.core.models import User, UserTypes
|
||||
from authentik.enterprise.models import LicenseKey
|
||||
from authentik.policies.models import Policy
|
||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
|
||||
|
||||
class EnterprisePolicy(Policy):
|
||||
"""Check that a user is correctly licensed for the request"""
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return ""
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
raise NotImplementedError
|
||||
|
||||
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||
if not LicenseKey.get_total().is_valid():
|
||||
return PolicyResult(False)
|
||||
if request.user.type != UserTypes.INTERNAL:
|
||||
return PolicyResult(False)
|
||||
return PolicyResult(True)
|
||||
|
||||
|
||||
class EnterprisePolicyAccessView(PolicyAccessView):
|
||||
"""PolicyAccessView which also checks enterprise licensing"""
|
||||
|
||||
def check_license(self):
|
||||
"""Check license"""
|
||||
if not LicenseKey.get_total().is_valid():
|
||||
return False
|
||||
if self.request.user.type != UserTypes.INTERNAL:
|
||||
return False
|
||||
return True
|
||||
|
||||
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
||||
user = user or self.request.user
|
||||
request = PolicyRequest(user)
|
||||
request.http_request = self.request
|
||||
result = super().user_has_access(user)
|
||||
enterprise_result = self.check_license()
|
||||
if not enterprise_result:
|
||||
enterprise_result = EnterprisePolicy().passes(request)
|
||||
if not enterprise_result.passing:
|
||||
return enterprise_result
|
||||
return result
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ from authentik.lib.utils.time import fqdn_rand
|
|||
CELERY_BEAT_SCHEDULE = {
|
||||
"enterprise_calculate_license": {
|
||||
"task": "authentik.enterprise.tasks.calculate_license",
|
||||
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/2"),
|
||||
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/8"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,6 @@ def pre_save_license(sender: type[License], instance: License, **_):
|
|||
"""Extract data from license jwt and save it into model"""
|
||||
status = instance.status
|
||||
instance.name = status.name
|
||||
instance.internal_users = status.internal_users
|
||||
instance.users = status.users
|
||||
instance.external_users = status.external_users
|
||||
instance.expiry = datetime.fromtimestamp(status.exp, tz=get_current_timezone())
|
||||
|
|
|
@ -6,4 +6,5 @@ from authentik.root.celery import CELERY_APP
|
|||
@CELERY_APP.task()
|
||||
def calculate_license():
|
||||
"""Calculate licensing status"""
|
||||
LicenseKey.get_total().record_usage()
|
||||
total = LicenseKey.get_total()
|
||||
total.record_usage()
|
||||
|
|
|
@ -23,7 +23,7 @@ class TestEnterpriseLicense(TestCase):
|
|||
aud="",
|
||||
exp=_exp,
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
|
@ -32,7 +32,7 @@ class TestEnterpriseLicense(TestCase):
|
|||
"""Check license verification"""
|
||||
lic = License.objects.create(key=generate_id())
|
||||
self.assertTrue(lic.status.is_valid())
|
||||
self.assertEqual(lic.internal_users, 100)
|
||||
self.assertEqual(lic.users, 100)
|
||||
|
||||
def test_invalid(self):
|
||||
"""Test invalid license"""
|
||||
|
@ -46,7 +46,7 @@ class TestEnterpriseLicense(TestCase):
|
|||
aud="",
|
||||
exp=_exp,
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
|
@ -58,7 +58,7 @@ class TestEnterpriseLicense(TestCase):
|
|||
lic2 = License.objects.create(key=generate_id())
|
||||
self.assertTrue(lic2.status.is_valid())
|
||||
total = LicenseKey.get_total()
|
||||
self.assertEqual(total.internal_users, 200)
|
||||
self.assertEqual(total.users, 200)
|
||||
self.assertEqual(total.external_users, 200)
|
||||
self.assertEqual(total.exp, _exp)
|
||||
self.assertTrue(total.is_valid())
|
||||
|
|
|
@ -4,7 +4,7 @@ from json import loads
|
|||
|
||||
import django_filters
|
||||
from django.db.models.aggregates import Count
|
||||
from django.db.models.fields.json import KeyTextTransform, KeyTransform
|
||||
from django.db.models.fields.json import KeyTextTransform
|
||||
from django.db.models.functions import ExtractDay
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
|
@ -134,11 +134,11 @@ class EventViewSet(ModelViewSet):
|
|||
"""Get the top_n events grouped by user count"""
|
||||
filtered_action = request.query_params.get("action", EventAction.LOGIN)
|
||||
top_n = int(request.query_params.get("top_n", "15"))
|
||||
events = (
|
||||
return Response(
|
||||
get_objects_for_user(request.user, "authentik_events.view_event")
|
||||
.filter(action=filtered_action)
|
||||
.exclude(context__authorized_application=None)
|
||||
.annotate(application=KeyTransform("authorized_application", "context"))
|
||||
.annotate(application=KeyTextTransform("authorized_application", "context"))
|
||||
.annotate(user_pk=KeyTextTransform("pk", "user"))
|
||||
.values("application")
|
||||
.annotate(counted_events=Count("application"))
|
||||
|
@ -146,7 +146,6 @@ class EventViewSet(ModelViewSet):
|
|||
.values("unique_users", "application", "counted_events")
|
||||
.order_by("-counted_events")[:top_n]
|
||||
)
|
||||
return Response(EventTopPerUserSerializer(instance=events, many=True).data)
|
||||
|
||||
@extend_schema(
|
||||
methods=["GET"],
|
||||
|
|
|
@ -39,7 +39,7 @@ class NotificationTransportSerializer(ModelSerializer):
|
|||
mode = attrs.get("mode")
|
||||
if mode in [TransportMode.WEBHOOK, TransportMode.WEBHOOK_SLACK]:
|
||||
if "webhook_url" not in attrs or attrs.get("webhook_url", "") == "":
|
||||
raise ValidationError({"webhook_url": "Webhook URL may not be empty."})
|
||||
raise ValidationError("Webhook URL may not be empty.")
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.core.exceptions import SuspiciousOperation
|
|||
from django.db.models import Model
|
||||
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django_otp.plugins.otp_static.models import StaticToken
|
||||
from guardian.models import UserObjectPermission
|
||||
|
||||
from authentik.core.models import (
|
||||
|
@ -27,10 +28,8 @@ from authentik.lib.sentry import before_send
|
|||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.policies.reputation.models import Reputation
|
||||
from authentik.providers.oauth2.models import AccessToken, AuthorizationCode, RefreshToken
|
||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
||||
from authentik.stages.authenticator_static.models import StaticToken
|
||||
|
||||
IGNORED_MODELS = (
|
||||
Event,
|
||||
|
@ -53,13 +52,11 @@ IGNORED_MODELS = (
|
|||
RefreshToken,
|
||||
SCIMUser,
|
||||
SCIMGroup,
|
||||
Reputation,
|
||||
)
|
||||
|
||||
|
||||
def should_log_model(model: Model) -> bool:
|
||||
"""Return true if operation on `model` should be logged"""
|
||||
# Check for silk by string so this comparison doesn't fail when silk isn't installed
|
||||
if model.__module__.startswith("silk"):
|
||||
return False
|
||||
return model.__class__ not in IGNORED_MODELS
|
||||
|
@ -96,30 +93,21 @@ class AuditMiddleware:
|
|||
of models"""
|
||||
|
||||
get_response: Callable[[HttpRequest], HttpResponse]
|
||||
anonymous_user: User = None
|
||||
|
||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||
self.get_response = get_response
|
||||
|
||||
def _ensure_fallback_user(self):
|
||||
"""Defer fetching anonymous user until we have to"""
|
||||
if self.anonymous_user:
|
||||
return
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
|
||||
self.anonymous_user = get_anonymous_user()
|
||||
|
||||
def connect(self, request: HttpRequest):
|
||||
"""Connect signal for automatic logging"""
|
||||
self._ensure_fallback_user()
|
||||
user = getattr(request, "user", self.anonymous_user)
|
||||
if not user.is_authenticated:
|
||||
user = self.anonymous_user
|
||||
if not hasattr(request, "user"):
|
||||
return
|
||||
if not getattr(request.user, "is_authenticated", False):
|
||||
return
|
||||
if not hasattr(request, "request_id"):
|
||||
return
|
||||
post_save_handler = partial(self.post_save_handler, user=user, request=request)
|
||||
pre_delete_handler = partial(self.pre_delete_handler, user=user, request=request)
|
||||
m2m_changed_handler = partial(self.m2m_changed_handler, user=user, request=request)
|
||||
post_save_handler = partial(self.post_save_handler, user=request.user, request=request)
|
||||
pre_delete_handler = partial(self.pre_delete_handler, user=request.user, request=request)
|
||||
m2m_changed_handler = partial(self.m2m_changed_handler, user=request.user, request=request)
|
||||
post_save.connect(
|
||||
post_save_handler,
|
||||
dispatch_uid=request.request_id,
|
||||
|
|
|
@ -217,7 +217,6 @@ class Event(SerializerModel, ExpiringModel):
|
|||
"path": request.path,
|
||||
"method": request.method,
|
||||
"args": cleanse_dict(QueryDict(request.META.get("QUERY_STRING", ""))),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
|
||||
}
|
||||
# Special case for events created during flow execution
|
||||
# since they keep the http query within a wrapped query
|
||||
|
@ -437,39 +436,32 @@ class NotificationTransport(SerializerModel):
|
|||
|
||||
def send_email(self, notification: "Notification") -> list[str]:
|
||||
"""Send notification via global email configuration"""
|
||||
subject_prefix = "authentik Notification: "
|
||||
context = {
|
||||
"key_value": {
|
||||
"user_email": notification.user.email,
|
||||
"user_username": notification.user.username,
|
||||
},
|
||||
"body": notification.body,
|
||||
"title": "",
|
||||
subject = "authentik Notification: "
|
||||
key_value = {
|
||||
"user_email": notification.user.email,
|
||||
"user_username": notification.user.username,
|
||||
}
|
||||
if notification.event and notification.event.user:
|
||||
context["key_value"]["event_user_email"] = notification.event.user.get("email", None)
|
||||
context["key_value"]["event_user_username"] = notification.event.user.get(
|
||||
"username", None
|
||||
)
|
||||
key_value["event_user_email"] = notification.event.user.get("email", None)
|
||||
key_value["event_user_username"] = notification.event.user.get("username", None)
|
||||
if notification.event:
|
||||
context["title"] += notification.event.action
|
||||
subject += notification.event.action
|
||||
for key, value in notification.event.context.items():
|
||||
if not isinstance(value, str):
|
||||
continue
|
||||
context["key_value"][key] = value
|
||||
key_value[key] = value
|
||||
else:
|
||||
context["title"] += notification.body[:75]
|
||||
# TODO: improve permission check
|
||||
if notification.user.is_superuser:
|
||||
context["source"] = {
|
||||
"from": self.name,
|
||||
}
|
||||
subject += notification.body[:75]
|
||||
mail = TemplateEmailMessage(
|
||||
subject=subject_prefix + context["title"],
|
||||
subject=subject,
|
||||
to=[notification.user.email],
|
||||
language=notification.user.locale(),
|
||||
template_name="email/event_notification.html",
|
||||
template_context=context,
|
||||
template_name="email/generic.html",
|
||||
template_context={
|
||||
"title": subject,
|
||||
"body": notification.body,
|
||||
"key_value": key_value,
|
||||
},
|
||||
)
|
||||
# Email is sent directly here, as the call to send() should have been from a task.
|
||||
try:
|
||||
|
|
|
@ -206,8 +206,8 @@ def prefill_task(func):
|
|||
task_call_module=func.__module__,
|
||||
task_call_func=func.__name__,
|
||||
# We don't have real values for these attributes but they cannot be null
|
||||
start_timestamp=0,
|
||||
finish_timestamp=0,
|
||||
start_timestamp=default_timer(),
|
||||
finish_timestamp=default_timer(),
|
||||
finish_time=datetime.now(),
|
||||
).save(86400)
|
||||
LOGGER.debug("prefilled task", task_name=func.__name__)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue