Compare commits
1 commit
trustchain
...
hack-close
Author | SHA1 | Date | |
---|---|---|---|
87bf75e51c |
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 2023.10.6
|
current_version = 2023.6.1
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
|
env
|
||||||
htmlcov
|
htmlcov
|
||||||
*.env.yml
|
*.env.yml
|
||||||
**/node_modules
|
**/node_modules
|
||||||
dist/**
|
dist/**
|
||||||
build/**
|
build/**
|
||||||
build_docs/**
|
build_docs/**
|
||||||
*Dockerfile
|
Dockerfile
|
||||||
blueprints/local
|
authentik/enterprise
|
||||||
.git
|
|
||||||
!gen-ts-api/node_modules
|
|
||||||
!gen-ts-api/dist/**
|
|
||||||
|
|
21
.github/actions/setup/action.yml
vendored
21
.github/actions/setup/action.yml
vendored
|
@ -2,39 +2,36 @@ name: "Setup authentik testing environment"
|
||||||
description: "Setup authentik testing environment"
|
description: "Setup authentik testing environment"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
postgresql_version:
|
postgresql_tag:
|
||||||
description: "Optional postgresql image tag"
|
description: "Optional postgresql image tag"
|
||||||
default: "12"
|
default: "12"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install poetry & deps
|
- name: Install poetry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
pipx install poetry || true
|
pipx install poetry || true
|
||||||
sudo apt-get update
|
sudo apt update
|
||||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
sudo apt install -y libxmlsec1-dev pkg-config gettext
|
||||||
- name: Setup python and restore poetry
|
- name: Setup python and restore poetry
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version-file: 'pyproject.toml'
|
python-version: "3.11"
|
||||||
cache: "poetry"
|
cache: "poetry"
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Setup go
|
|
||||||
uses: actions/setup-go@v4
|
|
||||||
with:
|
|
||||||
go-version-file: "go.mod"
|
|
||||||
- name: Setup dependencies
|
- name: Setup dependencies
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
export PSQL_TAG=${{ inputs.postgresql_tag }}
|
||||||
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
docker-compose -f .github/actions/setup/docker-compose.yml up -d
|
||||||
|
poetry env use python3.11
|
||||||
poetry install
|
poetry install
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
- name: Generate config
|
- name: Generate config
|
||||||
|
|
2
.github/cherry-pick-bot.yml
vendored
2
.github/cherry-pick-bot.yml
vendored
|
@ -1,2 +0,0 @@
|
||||||
enabled: true
|
|
||||||
preservePullRequestTitle: true
|
|
2
.github/codecov.yml
vendored
2
.github/codecov.yml
vendored
|
@ -6,5 +6,5 @@ coverage:
|
||||||
# adjust accordingly based on how flaky your tests are
|
# adjust accordingly based on how flaky your tests are
|
||||||
# this allows a 1% drop from the previous base commit coverage
|
# this allows a 1% drop from the previous base commit coverage
|
||||||
threshold: 1%
|
threshold: 1%
|
||||||
comment:
|
notify:
|
||||||
after_n_builds: 3
|
after_n_builds: 3
|
||||||
|
|
55
.github/dependabot.yml
vendored
55
.github/dependabot.yml
vendored
|
@ -8,8 +8,6 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "ci:"
|
prefix: "ci:"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
- package-ecosystem: gomod
|
- package-ecosystem: gomod
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
|
@ -18,19 +16,14 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "core:"
|
prefix: "core:"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
- package-ecosystem: npm
|
- package-ecosystem: npm
|
||||||
directory: "/web"
|
directory: "/web"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
time: "04:00"
|
time: "04:00"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "web:"
|
prefix: "web:"
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
groups:
|
||||||
sentry:
|
sentry:
|
||||||
patterns:
|
patterns:
|
||||||
|
@ -39,52 +32,10 @@ updates:
|
||||||
patterns:
|
patterns:
|
||||||
- "@babel/*"
|
- "@babel/*"
|
||||||
- "babel-*"
|
- "babel-*"
|
||||||
eslint:
|
|
||||||
patterns:
|
|
||||||
- "@typescript-eslint/*"
|
|
||||||
- "eslint"
|
|
||||||
- "eslint-*"
|
|
||||||
storybook:
|
storybook:
|
||||||
patterns:
|
patterns:
|
||||||
- "@storybook/*"
|
- "@storybook/*"
|
||||||
- "*storybook*"
|
- "*storybook*"
|
||||||
esbuild:
|
|
||||||
patterns:
|
|
||||||
- "@esbuild/*"
|
|
||||||
- package-ecosystem: npm
|
|
||||||
directory: "/tests/wdio"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "04:00"
|
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
commit-message:
|
|
||||||
prefix: "web:"
|
|
||||||
# TODO: deduplicate these groups
|
|
||||||
groups:
|
|
||||||
sentry:
|
|
||||||
patterns:
|
|
||||||
- "@sentry/*"
|
|
||||||
babel:
|
|
||||||
patterns:
|
|
||||||
- "@babel/*"
|
|
||||||
- "babel-*"
|
|
||||||
eslint:
|
|
||||||
patterns:
|
|
||||||
- "@typescript-eslint/*"
|
|
||||||
- "eslint"
|
|
||||||
- "eslint-*"
|
|
||||||
storybook:
|
|
||||||
patterns:
|
|
||||||
- "@storybook/*"
|
|
||||||
- "*storybook*"
|
|
||||||
esbuild:
|
|
||||||
patterns:
|
|
||||||
- "@esbuild/*"
|
|
||||||
wdio:
|
|
||||||
patterns:
|
|
||||||
- "@wdio/*"
|
|
||||||
- package-ecosystem: npm
|
- package-ecosystem: npm
|
||||||
directory: "/website"
|
directory: "/website"
|
||||||
schedule:
|
schedule:
|
||||||
|
@ -93,8 +44,6 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "website:"
|
prefix: "website:"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
groups:
|
groups:
|
||||||
docusaurus:
|
docusaurus:
|
||||||
patterns:
|
patterns:
|
||||||
|
@ -107,8 +56,6 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "core:"
|
prefix: "core:"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
- package-ecosystem: docker
|
- package-ecosystem: docker
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
|
@ -117,5 +64,3 @@ updates:
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "core:"
|
prefix: "core:"
|
||||||
labels:
|
|
||||||
- dependencies
|
|
||||||
|
|
20
.github/pull_request_template.md
vendored
20
.github/pull_request_template.md
vendored
|
@ -1,19 +1,23 @@
|
||||||
<!--
|
<!--
|
||||||
👋 Hi there! Welcome.
|
👋 Hello there! Welcome.
|
||||||
|
|
||||||
Please check the Contributing guidelines: https://goauthentik.io/developer-docs/#how-can-i-contribute
|
Please check the [Contributing guidelines](https://goauthentik.io/developer-docs/#how-can-i-contribute).
|
||||||
-->
|
-->
|
||||||
|
|
||||||
## Details
|
## Details
|
||||||
|
|
||||||
<!--
|
- **Does this resolve an issue?**
|
||||||
Explain what this PR changes, what the rationale behind the change is, if any new requirements are introduced or any breaking changes caused by this PR.
|
Resolves #
|
||||||
|
|
||||||
Ideally also link an Issue for context that this PR will close using `closes #`
|
## Changes
|
||||||
-->
|
|
||||||
REPLACE ME
|
|
||||||
|
|
||||||
---
|
### New Features
|
||||||
|
|
||||||
|
- Adds feature which does x, y, and z.
|
||||||
|
|
||||||
|
### Breaking Changes
|
||||||
|
|
||||||
|
- Adds breaking change which causes \<issue\>.
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
|
|
75
.github/workflows/ci-main.yml
vendored
75
.github/workflows/ci-main.yml
vendored
|
@ -11,7 +11,6 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
POSTGRES_DB: authentik
|
POSTGRES_DB: authentik
|
||||||
|
@ -34,7 +33,7 @@ jobs:
|
||||||
- ruff
|
- ruff
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run job
|
- name: run job
|
||||||
|
@ -42,44 +41,31 @@ jobs:
|
||||||
test-migrations:
|
test-migrations:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: run migrations
|
- name: run migrations
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-migrations-from-stable:
|
test-migrations-from-stable:
|
||||||
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }}
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
continue-on-error: true
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
psql:
|
|
||||||
- 12-alpine
|
|
||||||
- 15-alpine
|
|
||||||
- 16-alpine
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: checkout stable
|
- name: checkout stable
|
||||||
run: |
|
run: |
|
||||||
# Delete all poetry envs
|
|
||||||
rm -rf /home/runner/.cache/pypoetry
|
|
||||||
# Copy current, latest config to local
|
# Copy current, latest config to local
|
||||||
cp authentik/lib/default.yml local.env.yml
|
cp authentik/lib/default.yml local.env.yml
|
||||||
cp -R .github ..
|
cp -R .github ..
|
||||||
cp -R scripts ..
|
cp -R scripts ..
|
||||||
git checkout version/$(python -c "from authentik import __version__; print(__version__)")
|
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||||
rm -rf .github/ scripts/
|
rm -rf .github/ scripts/
|
||||||
mv ../.github ../scripts .
|
mv ../.github ../scripts .
|
||||||
- name: Setup authentik env (ensure stable deps are installed)
|
- name: Setup authentik env (ensure stable deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: run migrations to stable
|
- name: run migrations to stable
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
- name: checkout current code
|
- name: checkout current code
|
||||||
|
@ -89,13 +75,9 @@ jobs:
|
||||||
git reset --hard HEAD
|
git reset --hard HEAD
|
||||||
git clean -d -fx .
|
git clean -d -fx .
|
||||||
git checkout $GITHUB_SHA
|
git checkout $GITHUB_SHA
|
||||||
# Delete previous poetry env
|
|
||||||
rm -rf $(poetry env info --path)
|
|
||||||
poetry install
|
poetry install
|
||||||
- name: Setup authentik env (ensure latest deps are installed)
|
- name: Setup authentik env (ensure latest deps are installed)
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
|
||||||
postgresql_version: ${{ matrix.psql }}
|
|
||||||
- name: migrate to latest
|
- name: migrate to latest
|
||||||
run: poetry run python -m lifecycle.migrate
|
run: poetry run python -m lifecycle.migrate
|
||||||
test-unittest:
|
test-unittest:
|
||||||
|
@ -106,15 +88,14 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
psql:
|
psql:
|
||||||
|
- 11-alpine
|
||||||
- 12-alpine
|
- 12-alpine
|
||||||
- 15-alpine
|
|
||||||
- 16-alpine
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
with:
|
with:
|
||||||
postgresql_version: ${{ matrix.psql }}
|
postgresql_tag: ${{ matrix.psql }}
|
||||||
- name: run unittest
|
- name: run unittest
|
||||||
run: |
|
run: |
|
||||||
poetry run make test
|
poetry run make test
|
||||||
|
@ -127,7 +108,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Create k8s Kind Cluster
|
- name: Create k8s Kind Cluster
|
||||||
|
@ -163,7 +144,7 @@ jobs:
|
||||||
- name: flows
|
- name: flows
|
||||||
glob: tests/e2e/test_flows*
|
glob: tests/e2e/test_flows*
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Setup e2e env (chrome, etc)
|
- name: Setup e2e env (chrome, etc)
|
||||||
|
@ -203,36 +184,30 @@ jobs:
|
||||||
build:
|
build:
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v2.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: generate ts client
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
@ -245,8 +220,6 @@ jobs:
|
||||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
- name: Comment on PR
|
- name: Comment on PR
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
@ -256,36 +229,30 @@ jobs:
|
||||||
build-arm64:
|
build-arm64:
|
||||||
needs: ci-core-mark
|
needs: ci-core-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v2.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: generate ts client
|
|
||||||
run: make gen-client-ts
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
GEOIPUPDATE_LICENSE_KEY=${{ secrets.GEOIPUPDATE_LICENSE_KEY }}
|
||||||
|
@ -299,5 +266,3 @@ jobs:
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/arm64
|
platforms: linux/arm64
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
|
|
32
.github/workflows/ci-outpost.yml
vendored
32
.github/workflows/ci-outpost.yml
vendored
|
@ -9,13 +9,12 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-golint:
|
lint-golint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
|
@ -30,18 +29,16 @@ jobs:
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v3
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
version: v1.54.2
|
version: v1.52.2
|
||||||
args: --timeout 5000s --verbose
|
args: --timeout 5000s --verbose
|
||||||
skip-cache: true
|
skip-pkg-cache: true
|
||||||
test-unittest:
|
test-unittest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Setup authentik env
|
|
||||||
uses: ./.github/actions/setup
|
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Go unittests
|
- name: Go unittests
|
||||||
|
@ -66,24 +63,21 @@ jobs:
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v2.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Login to Container Registry
|
- name: Login to Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
if: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
|
@ -92,7 +86,7 @@ jobs:
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-go
|
run: make gen-client-go
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
push: ${{ steps.ev.outputs.shouldBuild == 'true' }}
|
||||||
tags: |
|
tags: |
|
||||||
|
@ -105,8 +99,6 @@ jobs:
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
context: .
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
build-binary:
|
build-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs:
|
needs:
|
||||||
|
@ -122,15 +114,15 @@ jobs:
|
||||||
goos: [linux]
|
goos: [linux]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
|
|
55
.github/workflows/ci-web.yml
vendored
55
.github/workflows/ci-web.yml
vendored
|
@ -9,38 +9,31 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-eslint:
|
lint-eslint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
project:
|
|
||||||
- web
|
|
||||||
- tests/wdio
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: ${{ matrix.project }}/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: Eslint
|
- name: Eslint
|
||||||
working-directory: ${{ matrix.project }}/
|
working-directory: web/
|
||||||
run: npm run lint
|
run: npm run lint
|
||||||
lint-build:
|
lint-build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
@ -52,33 +45,27 @@ jobs:
|
||||||
run: npm run tsc
|
run: npm run tsc
|
||||||
lint-prettier:
|
lint-prettier:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
project:
|
|
||||||
- web
|
|
||||||
- tests/wdio
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: ${{ matrix.project }}/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: ${{ matrix.project }}/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: ${{ matrix.project }}/
|
- working-directory: web/
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: Generate API
|
- name: Generate API
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
- name: prettier
|
- name: prettier
|
||||||
working-directory: ${{ matrix.project }}/
|
working-directory: web/
|
||||||
run: npm run prettier-check
|
run: npm run prettier-check
|
||||||
lint-lit-analyse:
|
lint-lit-analyse:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
@ -107,10 +94,10 @@ jobs:
|
||||||
- ci-web-mark
|
- ci-web-mark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- working-directory: web/
|
- working-directory: web/
|
||||||
|
|
19
.github/workflows/ci-website.yml
vendored
19
.github/workflows/ci-website.yml
vendored
|
@ -9,16 +9,15 @@ on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- version-*
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint-prettier:
|
lint-prettier:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
@ -29,10 +28,10 @@ jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
@ -50,10 +49,10 @@ jobs:
|
||||||
- build
|
- build
|
||||||
- build-docs-only
|
- build-docs-only
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: website/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: website/package-lock.json
|
cache-dependency-path: website/package-lock.json
|
||||||
- working-directory: website/
|
- working-directory: website/
|
||||||
|
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
||||||
language: ["go", "javascript", "python"]
|
language: ["go", "javascript", "python"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
uses: ./.github/actions/setup
|
uses: ./.github/actions/setup
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
|
34
.github/workflows/gha-cache-cleanup.yml
vendored
34
.github/workflows/gha-cache-cleanup.yml
vendored
|
@ -1,34 +0,0 @@
|
||||||
---
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
|
|
||||||
name: Cleanup cache after PR is closed
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- closed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cleanup:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Check out code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Cleanup
|
|
||||||
run: |
|
|
||||||
gh extension install actions/gh-actions-cache
|
|
||||||
|
|
||||||
REPO=${{ github.repository }}
|
|
||||||
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
|
||||||
|
|
||||||
echo "Fetching list of cache key"
|
|
||||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
|
||||||
|
|
||||||
# Setting this to not fail the workflow while deleting cache keys.
|
|
||||||
set +e
|
|
||||||
echo "Deleting caches..."
|
|
||||||
for cacheKey in $cacheKeysForPR; do
|
|
||||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
|
||||||
done
|
|
||||||
echo "Done"
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
6
.github/workflows/ghcr-retention.yml
vendored
6
.github/workflows/ghcr-retention.yml
vendored
|
@ -1,8 +1,8 @@
|
||||||
name: ghcr-retention
|
name: ghcr-retention
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# schedule:
|
schedule:
|
||||||
# - cron: "0 0 * * *" # every day at midnight
|
- cron: "0 0 * * *" # every day at midnight
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -11,7 +11,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
|
61
.github/workflows/image-compress.yml
vendored
61
.github/workflows/image-compress.yml
vendored
|
@ -1,61 +0,0 @@
|
||||||
---
|
|
||||||
name: authentik-compress-images
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- "**.jpg"
|
|
||||||
- "**.jpeg"
|
|
||||||
- "**.png"
|
|
||||||
- "**.webp"
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "**.jpg"
|
|
||||||
- "**.jpeg"
|
|
||||||
- "**.png"
|
|
||||||
- "**.webp"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
compress:
|
|
||||||
name: compress
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Don't run on forks. Token will not be available. Will run on main and open a PR anyway
|
|
||||||
if: |
|
|
||||||
github.repository == 'goauthentik/authentik' &&
|
|
||||||
(github.event_name != 'pull_request' ||
|
|
||||||
github.event.pull_request.head.repo.full_name == github.repository)
|
|
||||||
steps:
|
|
||||||
- id: generate_token
|
|
||||||
uses: tibdex/github-app-token@v2
|
|
||||||
with:
|
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
- name: Compress images
|
|
||||||
id: compress
|
|
||||||
uses: calibreapp/image-actions@main
|
|
||||||
with:
|
|
||||||
githubToken: ${{ steps.generate_token.outputs.token }}
|
|
||||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
|
||||||
- uses: peter-evans/create-pull-request@v5
|
|
||||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
|
||||||
id: cpr
|
|
||||||
with:
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
title: "*: Auto compress images"
|
|
||||||
branch-suffix: timestamp
|
|
||||||
commit-messsage: "*: compress images"
|
|
||||||
body: ${{ steps.compress.outputs.markdown }}
|
|
||||||
delete-branch: true
|
|
||||||
signoff: true
|
|
||||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
|
||||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
|
||||||
with:
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
|
||||||
merge-method: squash
|
|
31
.github/workflows/publish-source-docs.yml
vendored
31
.github/workflows/publish-source-docs.yml
vendored
|
@ -1,31 +0,0 @@
|
||||||
name: authentik-publish-source-docs
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
env:
|
|
||||||
POSTGRES_DB: authentik
|
|
||||||
POSTGRES_USER: authentik
|
|
||||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish-source-docs:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 120
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Setup authentik env
|
|
||||||
uses: ./.github/actions/setup
|
|
||||||
- name: generate docs
|
|
||||||
run: |
|
|
||||||
poetry run make migrate
|
|
||||||
poetry run ak build_source_docs
|
|
||||||
- name: Publish
|
|
||||||
uses: netlify/actions/cli@master
|
|
||||||
with:
|
|
||||||
args: deploy --dir=source_docs --prod
|
|
||||||
env:
|
|
||||||
NETLIFY_SITE_ID: eb246b7b-1d83-4f69-89f7-01a936b4ca59
|
|
||||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
|
3
.github/workflows/release-next-branch.yml
vendored
3
.github/workflows/release-next-branch.yml
vendored
|
@ -6,7 +6,6 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to be able to push to the next branch
|
|
||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -14,7 +13,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: internal-production
|
environment: internal-production
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: main
|
ref: main
|
||||||
- run: |
|
- run: |
|
||||||
|
|
53
.github/workflows/release-publish.yml
vendored
53
.github/workflows/release-publish.yml
vendored
|
@ -7,37 +7,29 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build-server:
|
build-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v2.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: make empty clients
|
|
||||||
run: |
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
mkdir -p ./gen-go-api
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
secrets: |
|
secrets: |
|
||||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||||
|
@ -55,9 +47,6 @@ jobs:
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost:
|
build-outpost:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload contianer images to ghcr.io
|
|
||||||
packages: write
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -66,34 +55,30 @@ jobs:
|
||||||
- ldap
|
- ldap
|
||||||
- radius
|
- radius
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.0.0
|
uses: docker/setup-qemu-action@v2.2.0
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
- name: make empty clients
|
|
||||||
run: |
|
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
mkdir -p ./gen-go-api
|
|
||||||
- name: Docker Login Registry
|
- name: Docker Login Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
tags: |
|
tags: |
|
||||||
|
@ -105,16 +90,12 @@ jobs:
|
||||||
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
ghcr.io/goauthentik/${{ matrix.type }}:latest
|
||||||
file: ${{ matrix.type }}.Dockerfile
|
file: ${{ matrix.type }}.Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
context: .
|
|
||||||
build-args: |
|
build-args: |
|
||||||
VERSION=${{ steps.ev.outputs.version }}
|
VERSION=${{ steps.ev.outputs.version }}
|
||||||
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
VERSION_FAMILY=${{ steps.ev.outputs.versionFamily }}
|
||||||
build-outpost-binary:
|
build-outpost-binary:
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# Needed to upload binaries to the release
|
|
||||||
contents: write
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -125,13 +106,13 @@ jobs:
|
||||||
goos: [linux, darwin]
|
goos: [linux, darwin]
|
||||||
goarch: [amd64, arm64]
|
goarch: [amd64, arm64]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: web/package-lock.json
|
cache-dependency-path: web/package-lock.json
|
||||||
- name: Build web
|
- name: Build web
|
||||||
|
@ -160,7 +141,7 @@ jobs:
|
||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Run test suite in final docker images
|
- name: Run test suite in final docker images
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
|
@ -176,7 +157,7 @@ jobs:
|
||||||
- build-outpost-binary
|
- build-outpost-binary
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: prepare variables
|
- name: prepare variables
|
||||||
uses: ./.github/actions/docker-push-variables
|
uses: ./.github/actions/docker-push-variables
|
||||||
id: ev
|
id: ev
|
||||||
|
|
5
.github/workflows/release-tag.yml
vendored
5
.github/workflows/release-tag.yml
vendored
|
@ -10,13 +10,12 @@ jobs:
|
||||||
name: Create Release from Tag
|
name: Create Release from Tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Pre-release test
|
- name: Pre-release test
|
||||||
run: |
|
run: |
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||||
docker buildx install
|
docker buildx install
|
||||||
mkdir -p ./gen-ts-api
|
|
||||||
docker build -t testing:latest .
|
docker build -t testing:latest .
|
||||||
echo "AUTHENTIK_IMAGE=testing" >> .env
|
echo "AUTHENTIK_IMAGE=testing" >> .env
|
||||||
echo "AUTHENTIK_TAG=latest" >> .env
|
echo "AUTHENTIK_TAG=latest" >> .env
|
||||||
|
@ -24,7 +23,7 @@ jobs:
|
||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker-compose run -u root server test-all
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
|
4
.github/workflows/repo-stale.yml
vendored
4
.github/workflows/repo-stale.yml
vendored
|
@ -6,15 +6,15 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# Needed to update issues and PRs
|
|
||||||
issues: write
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
|
4
.github/workflows/translation-compile.yml
vendored
4
.github/workflows/translation-compile.yml
vendored
|
@ -16,11 +16,11 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- name: Setup authentik env
|
- name: Setup authentik env
|
||||||
|
|
8
.github/workflows/translation-rename.yml
vendored
8
.github/workflows/translation-rename.yml
vendored
|
@ -1,5 +1,4 @@
|
||||||
# Rename transifex pull requests to have a correct naming
|
# Rename transifex pull requests to have a correct naming
|
||||||
# Also enables auto squash-merge
|
|
||||||
name: authentik-translation-transifex-rename
|
name: authentik-translation-transifex-rename
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
@ -12,7 +11,7 @@ jobs:
|
||||||
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
if: ${{ github.event.pull_request.user.login == 'transifex-integration[bot]'}}
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
|
@ -38,8 +37,3 @@ jobs:
|
||||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||||
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
|
https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${{ github.event.pull_request.number }} \
|
||||||
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
|
-d "{\"title\":\"translate: ${{ steps.title.outputs.title }}\"}"
|
||||||
- uses: peter-evans/enable-pull-request-automerge@v3
|
|
||||||
with:
|
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
|
||||||
pull-request-number: ${{ github.event.pull_request.number }}
|
|
||||||
merge-method: squash
|
|
||||||
|
|
8
.github/workflows/web-api-publish.yml
vendored
8
.github/workflows/web-api-publish.yml
vendored
|
@ -10,16 +10,16 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- id: generate_token
|
- id: generate_token
|
||||||
uses: tibdex/github-app-token@v2
|
uses: tibdex/github-app-token@v1
|
||||||
with:
|
with:
|
||||||
app_id: ${{ secrets.GH_APP_ID }}
|
app_id: ${{ secrets.GH_APP_ID }}
|
||||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate_token.outputs.token }}
|
token: ${{ steps.generate_token.outputs.token }}
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3.7.0
|
||||||
with:
|
with:
|
||||||
node-version-file: web/package.json
|
node-version: "20"
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- name: Generate API Client
|
- name: Generate API Client
|
||||||
run: make gen-client-ts
|
run: make gen-client-ts
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -205,7 +205,3 @@ data/
|
||||||
# Local Netlify folder
|
# Local Netlify folder
|
||||||
.netlify
|
.netlify
|
||||||
.ruff_cache
|
.ruff_cache
|
||||||
source_docs/
|
|
||||||
|
|
||||||
### Golang ###
|
|
||||||
/vendor/
|
|
||||||
|
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
@ -31,8 +31,7 @@
|
||||||
"!Format sequence",
|
"!Format sequence",
|
||||||
"!Condition sequence",
|
"!Condition sequence",
|
||||||
"!Env sequence",
|
"!Env sequence",
|
||||||
"!Env scalar",
|
"!Env scalar"
|
||||||
"!If sequence"
|
|
||||||
],
|
],
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.preferences.importModuleSpecifierEnding": "index",
|
"typescript.preferences.importModuleSpecifierEnding": "index",
|
||||||
|
|
28
CODEOWNERS
28
CODEOWNERS
|
@ -1,26 +1,2 @@
|
||||||
# Fallback
|
* @goauthentik/core
|
||||||
* @goauthentik/backend @goauthentik/frontend
|
website/docs/security/** @goauthentik/security
|
||||||
# Backend
|
|
||||||
authentik/ @goauthentik/backend
|
|
||||||
blueprints/ @goauthentik/backend
|
|
||||||
cmd/ @goauthentik/backend
|
|
||||||
internal/ @goauthentik/backend
|
|
||||||
lifecycle/ @goauthentik/backend
|
|
||||||
schemas/ @goauthentik/backend
|
|
||||||
scripts/ @goauthentik/backend
|
|
||||||
tests/ @goauthentik/backend
|
|
||||||
pyproject.toml @goauthentik/backend
|
|
||||||
poetry.lock @goauthentik/backend
|
|
||||||
# Infrastructure
|
|
||||||
.github/ @goauthentik/infrastructure
|
|
||||||
Dockerfile @goauthentik/infrastructure
|
|
||||||
*Dockerfile @goauthentik/infrastructure
|
|
||||||
.dockerignore @goauthentik/infrastructure
|
|
||||||
docker-compose.yml @goauthentik/infrastructure
|
|
||||||
# Web
|
|
||||||
web/ @goauthentik/frontend
|
|
||||||
tests/wdio/ @goauthentik/frontend
|
|
||||||
# Docs & Website
|
|
||||||
website/ @goauthentik/docs
|
|
||||||
# Security
|
|
||||||
website/docs/security/ @goauthentik/security
|
|
||||||
|
|
140
Dockerfile
140
Dockerfile
|
@ -1,75 +1,53 @@
|
||||||
# syntax=docker/dockerfile:1
|
|
||||||
|
|
||||||
# Stage 1: Build website
|
# Stage 1: Build website
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as website-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as website-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
|
||||||
|
|
||||||
WORKDIR /work/website
|
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/work/website/package.json,src=./website/package.json \
|
|
||||||
--mount=type=bind,target=/work/website/package-lock.json,src=./website/package-lock.json \
|
|
||||||
--mount=type=cache,id=npm-website,sharing=shared,target=/root/.npm \
|
|
||||||
npm ci --include=dev
|
|
||||||
|
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./blueprints /work/blueprints/
|
COPY ./blueprints /work/blueprints/
|
||||||
COPY ./SECURITY.md /work/
|
COPY ./SECURITY.md /work/
|
||||||
|
|
||||||
RUN npm run build-docs-only
|
ENV NODE_ENV=production
|
||||||
|
WORKDIR /work/website
|
||||||
|
RUN npm ci --include=dev && npm run build-docs-only
|
||||||
|
|
||||||
# Stage 2: Build webui
|
# Stage 2: Build webui
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/node:21 as web-builder
|
FROM --platform=${BUILDPLATFORM} docker.io/node:20 as web-builder
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
|
||||||
|
|
||||||
WORKDIR /work/web
|
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
|
|
||||||
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
|
|
||||||
--mount=type=cache,id=npm-web,sharing=shared,target=/root/.npm \
|
|
||||||
npm ci --include=dev
|
|
||||||
|
|
||||||
COPY ./web /work/web/
|
COPY ./web /work/web/
|
||||||
COPY ./website /work/website/
|
COPY ./website /work/website/
|
||||||
COPY ./gen-ts-api /work/web/node_modules/@goauthentik/api
|
|
||||||
|
|
||||||
RUN npm run build
|
ENV NODE_ENV=production
|
||||||
|
WORKDIR /work/web
|
||||||
|
RUN npm ci --include=dev && npm run build
|
||||||
|
|
||||||
# Stage 3: Build go proxy
|
# Stage 3: Poetry to requirements.txt export
|
||||||
FROM --platform=${BUILDPLATFORM} docker.io/golang:1.21.4-bookworm AS go-builder
|
FROM docker.io/python:3.11.4-slim-bullseye AS poetry-locker
|
||||||
|
|
||||||
ARG TARGETOS
|
WORKDIR /work
|
||||||
ARG TARGETARCH
|
COPY ./pyproject.toml /work
|
||||||
ARG TARGETVARIANT
|
COPY ./poetry.lock /work
|
||||||
|
|
||||||
ARG GOOS=$TARGETOS
|
RUN pip install --no-cache-dir poetry && \
|
||||||
ARG GOARCH=$TARGETARCH
|
poetry export -f requirements.txt --output requirements.txt && \
|
||||||
|
poetry export -f requirements.txt --dev --output requirements-dev.txt
|
||||||
|
|
||||||
WORKDIR /go/src/goauthentik.io
|
# Stage 4: Build go proxy
|
||||||
|
FROM docker.io/golang:1.20.6-bullseye AS go-builder
|
||||||
|
|
||||||
RUN --mount=type=bind,target=/go/src/goauthentik.io/go.mod,src=./go.mod \
|
WORKDIR /work
|
||||||
--mount=type=bind,target=/go/src/goauthentik.io/go.sum,src=./go.sum \
|
|
||||||
--mount=type=cache,target=/go/pkg/mod \
|
|
||||||
go mod download
|
|
||||||
|
|
||||||
COPY ./cmd /go/src/goauthentik.io/cmd
|
COPY --from=web-builder /work/web/robots.txt /work/web/robots.txt
|
||||||
COPY ./authentik/lib /go/src/goauthentik.io/authentik/lib
|
COPY --from=web-builder /work/web/security.txt /work/web/security.txt
|
||||||
COPY ./web/static.go /go/src/goauthentik.io/web/static.go
|
|
||||||
COPY --from=web-builder /work/web/robots.txt /go/src/goauthentik.io/web/robots.txt
|
|
||||||
COPY --from=web-builder /work/web/security.txt /go/src/goauthentik.io/web/security.txt
|
|
||||||
COPY ./internal /go/src/goauthentik.io/internal
|
|
||||||
COPY ./go.mod /go/src/goauthentik.io/go.mod
|
|
||||||
COPY ./go.sum /go/src/goauthentik.io/go.sum
|
|
||||||
|
|
||||||
ENV CGO_ENABLED=0
|
COPY ./cmd /work/cmd
|
||||||
|
COPY ./web/static.go /work/web/static.go
|
||||||
|
COPY ./internal /work/internal
|
||||||
|
COPY ./go.mod /work/go.mod
|
||||||
|
COPY ./go.sum /work/go.sum
|
||||||
|
|
||||||
RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
RUN go build -o /work/authentik ./cmd/server/
|
||||||
--mount=type=cache,id=go-build-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/root/.cache/go-build \
|
|
||||||
GOARM="${TARGETVARIANT#v}" go build -o /go/authentik ./cmd/server
|
|
||||||
|
|
||||||
# Stage 4: MaxMind GeoIP
|
# Stage 5: MaxMind GeoIP
|
||||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
FROM ghcr.io/maxmind/geoipupdate:v6.0 as geoip
|
||||||
|
|
||||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City"
|
||||||
ENV GEOIPUPDATE_VERBOSE="true"
|
ENV GEOIPUPDATE_VERBOSE="true"
|
||||||
|
@ -82,33 +60,8 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||||
mkdir -p /usr/share/GeoIP && \
|
mkdir -p /usr/share/GeoIP && \
|
||||||
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
/bin/sh -c "/usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||||
|
|
||||||
# Stage 5: Python dependencies
|
|
||||||
FROM docker.io/python:3.11.5-bookworm AS python-deps
|
|
||||||
|
|
||||||
WORKDIR /ak-root/poetry
|
|
||||||
|
|
||||||
ENV VENV_PATH="/ak-root/venv" \
|
|
||||||
POETRY_VIRTUALENVS_CREATE=false \
|
|
||||||
PATH="/ak-root/venv/bin:$PATH"
|
|
||||||
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/var/cache/apt \
|
|
||||||
apt-get update && \
|
|
||||||
# Required for installing pip packages
|
|
||||||
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev libpq-dev
|
|
||||||
|
|
||||||
RUN --mount=type=bind,target=./pyproject.toml,src=./pyproject.toml \
|
|
||||||
--mount=type=bind,target=./poetry.lock,src=./poetry.lock \
|
|
||||||
--mount=type=cache,target=/root/.cache/pip \
|
|
||||||
--mount=type=cache,target=/root/.cache/pypoetry \
|
|
||||||
python -m venv /ak-root/venv/ && \
|
|
||||||
pip3 install --upgrade pip && \
|
|
||||||
pip3 install poetry && \
|
|
||||||
poetry install --only=main --no-ansi --no-interaction
|
|
||||||
|
|
||||||
# Stage 6: Run
|
# Stage 6: Run
|
||||||
FROM docker.io/python:3.11.5-slim-bookworm AS final-image
|
FROM docker.io/python:3.11.4-slim-bullseye AS final-image
|
||||||
|
|
||||||
ARG GIT_BUILD_HASH
|
ARG GIT_BUILD_HASH
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
|
@ -122,45 +75,46 @@ LABEL org.opencontainers.image.revision ${GIT_BUILD_HASH}
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
COPY --from=poetry-locker /work/requirements.txt /
|
||||||
|
COPY --from=poetry-locker /work/requirements-dev.txt /
|
||||||
|
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
# Required for installing pip packages
|
||||||
|
apt-get install -y --no-install-recommends build-essential pkg-config libxmlsec1-dev zlib1g-dev && \
|
||||||
# Required for runtime
|
# Required for runtime
|
||||||
apt-get install -y --no-install-recommends libpq5 openssl libxmlsec1-openssl libmaxminddb0 && \
|
apt-get install -y --no-install-recommends libxmlsec1-openssl libmaxminddb0 && \
|
||||||
# Required for bootstrap & healtcheck
|
# Required for bootstrap & healtcheck
|
||||||
apt-get install -y --no-install-recommends runit && \
|
apt-get install -y --no-install-recommends runit && \
|
||||||
|
pip install --no-cache-dir -r /requirements.txt && \
|
||||||
|
apt-get remove --purge -y build-essential pkg-config libxmlsec1-dev && \
|
||||||
|
apt-get autoremove --purge -y && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||||
mkdir -p /certs /media /blueprints && \
|
mkdir -p /certs /media /blueprints && \
|
||||||
mkdir -p /authentik/.ssh && \
|
mkdir -p /authentik/.ssh && \
|
||||||
mkdir -p /ak-root && \
|
chown authentik:authentik /certs /media /authentik/.ssh
|
||||||
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
|
||||||
|
|
||||||
COPY ./authentik/ /authentik
|
COPY ./authentik/ /authentik
|
||||||
COPY ./pyproject.toml /
|
COPY ./pyproject.toml /
|
||||||
COPY ./poetry.lock /
|
|
||||||
COPY ./schemas /schemas
|
COPY ./schemas /schemas
|
||||||
COPY ./locale /locale
|
COPY ./locale /locale
|
||||||
COPY ./tests /tests
|
COPY ./tests /tests
|
||||||
COPY ./manage.py /
|
COPY ./manage.py /
|
||||||
COPY ./blueprints /blueprints
|
COPY ./blueprints /blueprints
|
||||||
COPY ./lifecycle/ /lifecycle
|
COPY ./lifecycle/ /lifecycle
|
||||||
COPY --from=go-builder /go/authentik /bin/authentik
|
COPY --from=go-builder /work/authentik /bin/authentik
|
||||||
COPY --from=python-deps /ak-root/venv /ak-root/venv
|
|
||||||
COPY --from=web-builder /work/web/dist/ /web/dist/
|
COPY --from=web-builder /work/web/dist/ /web/dist/
|
||||||
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
COPY --from=web-builder /work/web/authentik/ /web/authentik/
|
||||||
COPY --from=website-builder /work/website/help/ /website/help/
|
COPY --from=website-builder /work/website/help/ /website/help/
|
||||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
|
||||||
|
|
||||||
USER 1000
|
USER 1000
|
||||||
|
|
||||||
ENV TMPDIR=/dev/shm/ \
|
ENV TMPDIR /dev/shm/
|
||||||
PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONUNBUFFERED 1
|
||||||
PYTHONUNBUFFERED=1 \
|
ENV PATH "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/lifecycle"
|
||||||
PATH="/ak-root/venv/bin:/lifecycle:$PATH" \
|
|
||||||
VENV_PATH="/ak-root/venv" \
|
|
||||||
POETRY_VIRTUALENVS_CREATE=false
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "/lifecycle/ak", "healthcheck" ]
|
||||||
|
|
||||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
ENTRYPOINT [ "/usr/local/bin/dumb-init", "--", "/lifecycle/ak" ]
|
||||||
|
|
110
Makefile
110
Makefile
|
@ -1,16 +1,9 @@
|
||||||
.PHONY: gen dev-reset all clean test web website
|
.SHELLFLAGS += -x -e
|
||||||
|
|
||||||
.SHELLFLAGS += ${SHELLFLAGS} -e
|
|
||||||
PWD = $(shell pwd)
|
PWD = $(shell pwd)
|
||||||
UID = $(shell id -u)
|
UID = $(shell id -u)
|
||||||
GID = $(shell id -g)
|
GID = $(shell id -g)
|
||||||
NPM_VERSION = $(shell python -m scripts.npm_version)
|
NPM_VERSION = $(shell python -m scripts.npm_version)
|
||||||
PY_SOURCES = authentik tests scripts lifecycle
|
PY_SOURCES = authentik tests scripts lifecycle
|
||||||
DOCKER_IMAGE ?= "authentik:test"
|
|
||||||
|
|
||||||
pg_user := $(shell python -m authentik.lib.config postgresql.user 2>/dev/null)
|
|
||||||
pg_host := $(shell python -m authentik.lib.config postgresql.host 2>/dev/null)
|
|
||||||
pg_name := $(shell python -m authentik.lib.config postgresql.name 2>/dev/null)
|
|
||||||
|
|
||||||
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
-I .github/codespell-words.txt \
|
-I .github/codespell-words.txt \
|
||||||
|
@ -26,82 +19,57 @@ CODESPELL_ARGS = -D - -D .github/codespell-dictionary.txt \
|
||||||
website/integrations \
|
website/integrations \
|
||||||
website/src
|
website/src
|
||||||
|
|
||||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
all: lint-fix lint test gen web
|
||||||
|
|
||||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
|
||||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
|
||||||
|
|
||||||
help: ## Show this help
|
|
||||||
@echo "\nSpecify a command. The choices are:\n"
|
|
||||||
@grep -Eh '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \
|
|
||||||
awk 'BEGIN {FS = ":.*?## "}; {printf " \033[0;36m%-$(HELP_WIDTH)s \033[m %s\n", $$1, $$2}' | \
|
|
||||||
sort
|
|
||||||
@echo ""
|
|
||||||
|
|
||||||
test-go:
|
test-go:
|
||||||
go test -timeout 0 -v -race -cover ./...
|
go test -timeout 0 -v -race -cover ./...
|
||||||
|
|
||||||
test-docker: ## Run all tests in a docker-compose
|
test-docker:
|
||||||
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
echo "PG_PASS=$(openssl rand -base64 32)" >> .env
|
||||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
echo "AUTHENTIK_SECRET_KEY=$(openssl rand -base64 32)" >> .env
|
||||||
docker-compose pull -q
|
docker-compose pull -q
|
||||||
docker-compose up --no-start
|
docker-compose up --no-start
|
||||||
docker-compose start postgresql redis
|
docker-compose start postgresql redis
|
||||||
docker-compose run -u root server test-all
|
docker-compose run -u root server test
|
||||||
rm -f .env
|
rm -f .env
|
||||||
|
|
||||||
test: ## Run the server tests and produce a coverage report (locally)
|
test:
|
||||||
coverage run manage.py test --keepdb authentik
|
coverage run manage.py test --keepdb authentik
|
||||||
coverage html
|
coverage html
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
lint-fix:
|
||||||
isort $(PY_SOURCES)
|
isort authentik $(PY_SOURCES)
|
||||||
black $(PY_SOURCES)
|
black authentik $(PY_SOURCES)
|
||||||
ruff $(PY_SOURCES)
|
ruff authentik $(PY_SOURCES)
|
||||||
codespell -w $(CODESPELL_ARGS)
|
codespell -w $(CODESPELL_ARGS)
|
||||||
|
|
||||||
lint: ## Lint the python and golang sources
|
lint:
|
||||||
bandit -r $(PY_SOURCES) -x node_modules
|
|
||||||
./web/node_modules/.bin/pyright $(PY_SOURCES)
|
|
||||||
pylint $(PY_SOURCES)
|
pylint $(PY_SOURCES)
|
||||||
|
bandit -r $(PY_SOURCES) -x node_modules
|
||||||
golangci-lint run -v
|
golangci-lint run -v
|
||||||
|
|
||||||
migrate: ## Run the Authentik Django server's migrations
|
migrate:
|
||||||
python -m lifecycle.migrate
|
python -m lifecycle.migrate
|
||||||
|
|
||||||
i18n-extract: i18n-extract-core web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
i18n-extract: i18n-extract-core web-i18n-extract
|
||||||
|
|
||||||
i18n-extract-core:
|
i18n-extract-core:
|
||||||
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
ak makemessages --ignore web --ignore internal --ignore web --ignore web-api --ignore website -l en
|
||||||
|
|
||||||
install: web-install website-install ## Install all requires dependencies for `web`, `website` and `core`
|
|
||||||
poetry install
|
|
||||||
|
|
||||||
dev-drop-db:
|
|
||||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
|
||||||
# Also remove the test-db if it exists
|
|
||||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
|
||||||
redis-cli -n 0 flushall
|
|
||||||
|
|
||||||
dev-create-db:
|
|
||||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
|
||||||
|
|
||||||
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
|
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
## API Schema
|
## API Schema
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
gen-build: ## Extract the schema from the database
|
gen-build:
|
||||||
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
AUTHENTIK_DEBUG=true ak make_blueprint_schema > blueprints/schema.json
|
||||||
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
AUTHENTIK_DEBUG=true ak spectacular --file schema.yml
|
||||||
|
|
||||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
gen-changelog:
|
||||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||||
npx prettier --write changelog.md
|
npx prettier --write changelog.md
|
||||||
|
|
||||||
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
|
gen-diff:
|
||||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > old_schema.yml
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
|
@ -116,7 +84,7 @@ gen-clean:
|
||||||
rm -rf web/api/src/
|
rm -rf web/api/src/
|
||||||
rm -rf api/
|
rm -rf api/
|
||||||
|
|
||||||
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
|
gen-client-ts:
|
||||||
docker run \
|
docker run \
|
||||||
--rm -v ${PWD}:/local \
|
--rm -v ${PWD}:/local \
|
||||||
--user ${UID}:${GID} \
|
--user ${UID}:${GID} \
|
||||||
|
@ -132,7 +100,7 @@ gen-client-ts: ## Build and install the authentik API for Typescript into the a
|
||||||
cd gen-ts-api && npm i
|
cd gen-ts-api && npm i
|
||||||
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
\cp -rfv gen-ts-api/* web/node_modules/@goauthentik/api
|
||||||
|
|
||||||
gen-client-go: ## Build and install the authentik API for Golang
|
gen-client-go:
|
||||||
mkdir -p ./gen-go-api ./gen-go-api/templates
|
mkdir -p ./gen-go-api ./gen-go-api/templates
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/config.yaml -O ./gen-go-api/config.yaml
|
||||||
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
wget https://raw.githubusercontent.com/goauthentik/client-go/main/templates/README.mustache -O ./gen-go-api/templates/README.mustache
|
||||||
|
@ -149,7 +117,7 @@ gen-client-go: ## Build and install the authentik API for Golang
|
||||||
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
go mod edit -replace goauthentik.io/api/v3=./gen-go-api
|
||||||
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
rm -rf ./gen-go-api/config.yaml ./gen-go-api/templates/
|
||||||
|
|
||||||
gen-dev-config: ## Generate a local development config file
|
gen-dev-config:
|
||||||
python -m scripts.generate_config
|
python -m scripts.generate_config
|
||||||
|
|
||||||
gen: gen-build gen-clean gen-client-ts
|
gen: gen-build gen-clean gen-client-ts
|
||||||
|
@ -158,29 +126,27 @@ gen: gen-build gen-clean gen-client-ts
|
||||||
## Web
|
## Web
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
web-build: web-install ## Build the Authentik UI
|
web-build: web-install
|
||||||
cd web && npm run build
|
cd web && npm run build
|
||||||
|
|
||||||
web: web-lint-fix web-lint web-check-compile web-i18n-extract ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
web: web-lint-fix web-lint web-check-compile
|
||||||
|
|
||||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
web-install:
|
||||||
cd web && npm ci
|
cd web && npm ci
|
||||||
|
|
||||||
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
web-watch:
|
||||||
rm -rf web/dist/
|
rm -rf web/dist/
|
||||||
mkdir web/dist/
|
mkdir web/dist/
|
||||||
touch web/dist/.gitkeep
|
touch web/dist/.gitkeep
|
||||||
cd web && npm run watch
|
cd web && npm run watch
|
||||||
|
|
||||||
web-storybook-watch: ## Build and run the storybook documentation server
|
|
||||||
cd web && npm run storybook
|
|
||||||
|
|
||||||
web-lint-fix:
|
web-lint-fix:
|
||||||
cd web && npm run prettier
|
cd web && npm run prettier
|
||||||
|
|
||||||
web-lint:
|
web-lint:
|
||||||
cd web && npm run lint
|
cd web && npm run lint
|
||||||
cd web && npm run lit-analyse
|
# TODO: The analyzer hasn't run correctly in awhile.
|
||||||
|
# cd web && npm run lit-analyse
|
||||||
|
|
||||||
web-check-compile:
|
web-check-compile:
|
||||||
cd web && npm run tsc
|
cd web && npm run tsc
|
||||||
|
@ -192,7 +158,7 @@ web-i18n-extract:
|
||||||
## Website
|
## Website
|
||||||
#########################
|
#########################
|
||||||
|
|
||||||
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
|
website: website-lint-fix website-build
|
||||||
|
|
||||||
website-install:
|
website-install:
|
||||||
cd website && npm ci
|
cd website && npm ci
|
||||||
|
@ -203,22 +169,11 @@ website-lint-fix:
|
||||||
website-build:
|
website-build:
|
||||||
cd website && npm run build
|
cd website && npm run build
|
||||||
|
|
||||||
website-watch: ## Build and watch the documentation website, updating automatically
|
website-watch:
|
||||||
cd website && npm run watch
|
cd website && npm run watch
|
||||||
|
|
||||||
#########################
|
|
||||||
## Docker
|
|
||||||
#########################
|
|
||||||
|
|
||||||
docker: ## Build a docker image of the current source tree
|
|
||||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
|
||||||
|
|
||||||
#########################
|
|
||||||
## CI
|
|
||||||
#########################
|
|
||||||
# These targets are use by GitHub actions to allow usage of matrix
|
# These targets are use by GitHub actions to allow usage of matrix
|
||||||
# which makes the YAML File a lot smaller
|
# which makes the YAML File a lot smaller
|
||||||
|
|
||||||
ci--meta-debug:
|
ci--meta-debug:
|
||||||
python -V
|
python -V
|
||||||
node --version
|
node --version
|
||||||
|
@ -246,3 +201,14 @@ ci-pyright: ci--meta-debug
|
||||||
|
|
||||||
ci-pending-migrations: ci--meta-debug
|
ci-pending-migrations: ci--meta-debug
|
||||||
ak makemigrations --check
|
ak makemigrations --check
|
||||||
|
|
||||||
|
install: web-install website-install
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
dev-reset:
|
||||||
|
dropdb -U postgres -h localhost authentik
|
||||||
|
# Also remove the test-db if it exists
|
||||||
|
dropdb -U postgres -h localhost test_authentik || true
|
||||||
|
createdb -U postgres -h localhost authentik
|
||||||
|
redis-cli -n 0 flushall
|
||||||
|
make migrate
|
||||||
|
|
12
README.md
12
README.md
|
@ -41,3 +41,15 @@ See [SECURITY.md](SECURITY.md)
|
||||||
## Adoption and Contributions
|
## Adoption and Contributions
|
||||||
|
|
||||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [CONTRIBUTING.md file](./CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Sponsors
|
||||||
|
|
||||||
|
This project is proudly sponsored by:
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="https://www.digitalocean.com/?utm_medium=opensource&utm_source=goauthentik.io">
|
||||||
|
<img src="https://opensource.nyc3.cdn.digitaloceanspaces.com/attribution/assets/SVG/DO_Logo_horizontal_blue.svg" width="201px">
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
DigitalOcean provides development and testing resources for authentik.
|
||||||
|
|
|
@ -16,8 +16,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
|
| 2023.5.x | ✅ |
|
||||||
| 2023.6.x | ✅ |
|
| 2023.6.x | ✅ |
|
||||||
| 2023.8.x | ✅ |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -27,8 +27,6 @@ To report a vulnerability, send an email to [security@goauthentik.io](mailto:se
|
||||||
|
|
||||||
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
authentik reserves the right to reclassify CVSS as necessary. To determine severity, we will use the CVSS calculator from NVD (https://nvd.nist.gov/vuln-metrics/cvss/v3-calculator). The calculated CVSS score will then be translated into one of the following categories:
|
||||||
|
|
||||||
| Score | Severity |
|
|
||||||
| --- | --- |
|
|
||||||
| 0.0 | None |
|
| 0.0 | None |
|
||||||
| 0.1 – 3.9 | Low |
|
| 0.1 – 3.9 | Low |
|
||||||
| 4.0 – 6.9 | Medium |
|
| 4.0 – 6.9 | Medium |
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""authentik root module"""
|
"""authentik"""
|
||||||
from os import environ
|
from os import environ
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
__version__ = "2023.10.6"
|
__version__ = "2023.6.1"
|
||||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""Meta API"""
|
"""Meta API"""
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework.fields import CharField
|
from rest_framework.fields import CharField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
|
@ -21,7 +21,7 @@ class AppSerializer(PassiveSerializer):
|
||||||
class AppsViewSet(ViewSet):
|
class AppsViewSet(ViewSet):
|
||||||
"""Read-only view list all installed apps"""
|
"""Read-only view list all installed apps"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
|
@ -35,7 +35,7 @@ class AppsViewSet(ViewSet):
|
||||||
class ModelViewSet(ViewSet):
|
class ModelViewSet(ViewSet):
|
||||||
"""Read-only view list all installed models"""
|
"""Read-only view list all installed models"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: AppSerializer(many=True)})
|
@extend_schema(responses={200: AppSerializer(many=True)})
|
||||||
def list(self, request: Request) -> Response:
|
def list(self, request: Request) -> Response:
|
||||||
|
|
|
@ -5,7 +5,7 @@ from django.db.models.functions import ExtractHour
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||||
from guardian.shortcuts import get_objects_for_user
|
from guardian.shortcuts import get_objects_for_user
|
||||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
@ -68,7 +68,7 @@ class LoginMetricsSerializer(PassiveSerializer):
|
||||||
class AdministrationMetricsViewSet(APIView):
|
class AdministrationMetricsViewSet(APIView):
|
||||||
"""Login Metrics per 1h"""
|
"""Login Metrics per 1h"""
|
||||||
|
|
||||||
permission_classes = [IsAuthenticated]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
@extend_schema(responses={200: LoginMetricsSerializer(many=False)})
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
|
|
|
@ -8,6 +8,7 @@ from django.utils.timezone import now
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from gunicorn import version_info as gunicorn_version
|
from gunicorn import version_info as gunicorn_version
|
||||||
from rest_framework.fields import SerializerMethodField
|
from rest_framework.fields import SerializerMethodField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
@ -16,7 +17,6 @@ from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.lib.utils.reflection import get_env
|
from authentik.lib.utils.reflection import get_env
|
||||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||||
from authentik.outposts.models import Outpost
|
from authentik.outposts.models import Outpost
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
|
|
||||||
|
|
||||||
class RuntimeDict(TypedDict):
|
class RuntimeDict(TypedDict):
|
||||||
|
@ -88,7 +88,7 @@ class SystemSerializer(PassiveSerializer):
|
||||||
class SystemView(APIView):
|
class SystemView(APIView):
|
||||||
"""Get system information."""
|
"""Get system information."""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
permission_classes = [IsAdminUser]
|
||||||
pagination_class = None
|
pagination_class = None
|
||||||
filter_backends = []
|
filter_backends = []
|
||||||
serializer_class = SystemSerializer
|
serializer_class = SystemSerializer
|
||||||
|
|
|
@ -14,15 +14,14 @@ from rest_framework.fields import (
|
||||||
ListField,
|
ListField,
|
||||||
SerializerMethodField,
|
SerializerMethodField,
|
||||||
)
|
)
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
from authentik.core.api.utils import PassiveSerializer
|
||||||
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
from authentik.events.monitored_tasks import TaskInfo, TaskResultStatus
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -64,7 +63,7 @@ class TaskSerializer(PassiveSerializer):
|
||||||
class TaskViewSet(ViewSet):
|
class TaskViewSet(ViewSet):
|
||||||
"""Read-only view set that returns all background tasks"""
|
"""Read-only view set that returns all background tasks"""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_tasks")]
|
permission_classes = [IsAdminUser]
|
||||||
serializer_class = TaskSerializer
|
serializer_class = TaskSerializer
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
|
@ -94,7 +93,6 @@ class TaskViewSet(ViewSet):
|
||||||
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
tasks = sorted(TaskInfo.all().values(), key=lambda task: task.task_name)
|
||||||
return Response(TaskSerializer(tasks, many=True).data)
|
return Response(TaskSerializer(tasks, many=True).data)
|
||||||
|
|
||||||
@permission_required(None, ["authentik_rbac.run_system_tasks"])
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=OpenApiTypes.NONE,
|
request=OpenApiTypes.NONE,
|
||||||
responses={
|
responses={
|
||||||
|
|
|
@ -2,18 +2,18 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.fields import IntegerField
|
from rest_framework.fields import IntegerField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from authentik.rbac.permissions import HasPermission
|
|
||||||
from authentik.root.celery import CELERY_APP
|
from authentik.root.celery import CELERY_APP
|
||||||
|
|
||||||
|
|
||||||
class WorkerView(APIView):
|
class WorkerView(APIView):
|
||||||
"""Get currently connected worker count."""
|
"""Get currently connected worker count."""
|
||||||
|
|
||||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
@extend_schema(responses=inline_serializer("Workers", fields={"count": IntegerField()}))
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
|
|
|
@ -7,9 +7,9 @@ from rest_framework.authentication import get_authorization_header
|
||||||
from rest_framework.filters import BaseFilterBackend
|
from rest_framework.filters import BaseFilterBackend
|
||||||
from rest_framework.permissions import BasePermission
|
from rest_framework.permissions import BasePermission
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.api.authentication import validate_auth
|
from authentik.api.authentication import validate_auth
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
|
|
||||||
class OwnerFilter(BaseFilterBackend):
|
class OwnerFilter(BaseFilterBackend):
|
||||||
|
@ -26,14 +26,14 @@ class OwnerFilter(BaseFilterBackend):
|
||||||
class SecretKeyFilter(DjangoFilterBackend):
|
class SecretKeyFilter(DjangoFilterBackend):
|
||||||
"""Allow access to all objects when authenticated with secret key as token.
|
"""Allow access to all objects when authenticated with secret key as token.
|
||||||
|
|
||||||
Replaces both DjangoFilterBackend and ObjectFilter"""
|
Replaces both DjangoFilterBackend and ObjectPermissionsFilter"""
|
||||||
|
|
||||||
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
def filter_queryset(self, request: Request, queryset: QuerySet, view) -> QuerySet:
|
||||||
auth_header = get_authorization_header(request)
|
auth_header = get_authorization_header(request)
|
||||||
token = validate_auth(auth_header)
|
token = validate_auth(auth_header)
|
||||||
if token and token == settings.SECRET_KEY:
|
if token and token == settings.SECRET_KEY:
|
||||||
return queryset
|
return queryset
|
||||||
queryset = ObjectFilter().filter_queryset(request, queryset, view)
|
queryset = ObjectPermissionsFilter().filter_queryset(request, queryset, view)
|
||||||
return super().filter_queryset(request, queryset, view)
|
return super().filter_queryset(request, queryset, view)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
|
||||||
def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[list[str]] = None):
|
def permission_required(perm: Optional[str] = None, other_perms: Optional[list[str]] = None):
|
||||||
"""Check permissions for a single custom action"""
|
"""Check permissions for a single custom action"""
|
||||||
|
|
||||||
def wrapper_outter(func: Callable):
|
def wrapper_outter(func: Callable):
|
||||||
|
@ -18,17 +18,15 @@ def permission_required(obj_perm: Optional[str] = None, global_perms: Optional[l
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
def wrapper(self: ModelViewSet, request: Request, *args, **kwargs) -> Response:
|
||||||
if obj_perm:
|
if perm:
|
||||||
obj = self.get_object()
|
obj = self.get_object()
|
||||||
if not request.user.has_perm(obj_perm, obj):
|
if not request.user.has_perm(perm, obj):
|
||||||
LOGGER.debug(
|
LOGGER.debug("denying access for object", user=request.user, perm=perm, obj=obj)
|
||||||
"denying access for object", user=request.user, perm=obj_perm, obj=obj
|
|
||||||
)
|
|
||||||
return self.permission_denied(request)
|
return self.permission_denied(request)
|
||||||
if global_perms:
|
if other_perms:
|
||||||
for other_perm in global_perms:
|
for other_perm in other_perms:
|
||||||
if not request.user.has_perm(other_perm):
|
if not request.user.has_perm(other_perm):
|
||||||
LOGGER.debug("denying access for other", user=request.user, perm=other_perm)
|
LOGGER.debug("denying access for other", user=request.user, perm=perm)
|
||||||
return self.permission_denied(request)
|
return self.permission_denied(request)
|
||||||
return func(self, request, *args, **kwargs)
|
return func(self, request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -2,43 +2,6 @@
|
||||||
from rest_framework import pagination
|
from rest_framework import pagination
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
PAGINATION_COMPONENT_NAME = "Pagination"
|
|
||||||
PAGINATION_SCHEMA = {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"next": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"previous": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"count": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"current": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"total_pages": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"start_index": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"end_index": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"next",
|
|
||||||
"previous",
|
|
||||||
"count",
|
|
||||||
"current",
|
|
||||||
"total_pages",
|
|
||||||
"start_index",
|
|
||||||
"end_index",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Pagination(pagination.PageNumberPagination):
|
class Pagination(pagination.PageNumberPagination):
|
||||||
"""Pagination which includes total pages and current page"""
|
"""Pagination which includes total pages and current page"""
|
||||||
|
@ -72,15 +35,42 @@ class Pagination(pagination.PageNumberPagination):
|
||||||
return {
|
return {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"pagination": {"$ref": f"#/components/schemas/{PAGINATION_COMPONENT_NAME}"},
|
"pagination": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"next": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"previous": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"count": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"current": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"total_pages": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"start_index": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
"end_index": {
|
||||||
|
"type": "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": [
|
||||||
|
"next",
|
||||||
|
"previous",
|
||||||
|
"count",
|
||||||
|
"current",
|
||||||
|
"total_pages",
|
||||||
|
"start_index",
|
||||||
|
"end_index",
|
||||||
|
],
|
||||||
|
},
|
||||||
"results": schema,
|
"results": schema,
|
||||||
},
|
},
|
||||||
"required": ["pagination", "results"],
|
"required": ["pagination", "results"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class SmallerPagination(Pagination):
|
|
||||||
"""Smaller pagination for objects which might require a lot of queries
|
|
||||||
to retrieve all data for."""
|
|
||||||
|
|
||||||
max_page_size = 10
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from drf_spectacular.generators import SchemaGenerator
|
|
||||||
from drf_spectacular.plumbing import (
|
from drf_spectacular.plumbing import (
|
||||||
ResolvedComponent,
|
ResolvedComponent,
|
||||||
build_array_type,
|
build_array_type,
|
||||||
|
@ -9,9 +8,6 @@ from drf_spectacular.plumbing import (
|
||||||
)
|
)
|
||||||
from drf_spectacular.settings import spectacular_settings
|
from drf_spectacular.settings import spectacular_settings
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from rest_framework.settings import api_settings
|
|
||||||
|
|
||||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
|
||||||
|
|
||||||
|
|
||||||
def build_standard_type(obj, **kwargs):
|
def build_standard_type(obj, **kwargs):
|
||||||
|
@ -32,7 +28,7 @@ GENERIC_ERROR = build_object_type(
|
||||||
VALIDATION_ERROR = build_object_type(
|
VALIDATION_ERROR = build_object_type(
|
||||||
description=_("Validation Error"),
|
description=_("Validation Error"),
|
||||||
properties={
|
properties={
|
||||||
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
|
"non_field_errors": build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||||
"code": build_standard_type(OpenApiTypes.STR),
|
"code": build_standard_type(OpenApiTypes.STR),
|
||||||
},
|
},
|
||||||
required=[],
|
required=[],
|
||||||
|
@ -40,7 +36,15 @@ VALIDATION_ERROR = build_object_type(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA):
|
def postprocess_schema_responses(result, generator, **kwargs): # noqa: W0613
|
||||||
|
"""Workaround to set a default response for endpoints.
|
||||||
|
Workaround suggested at
|
||||||
|
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
|
||||||
|
for the missing drf-spectacular feature discussed in
|
||||||
|
<https://github.com/tfranzel/drf-spectacular/issues/101>.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def create_component(name, schema, type_=ResolvedComponent.SCHEMA):
|
||||||
"""Register a component and return a reference to it."""
|
"""Register a component and return a reference to it."""
|
||||||
component = ResolvedComponent(
|
component = ResolvedComponent(
|
||||||
name=name,
|
name=name,
|
||||||
|
@ -51,19 +55,8 @@ def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedCom
|
||||||
generator.registry.register_on_missing(component)
|
generator.registry.register_on_missing(component)
|
||||||
return component
|
return component
|
||||||
|
|
||||||
|
generic_error = create_component("GenericError", GENERIC_ERROR)
|
||||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs): # noqa: W0613
|
validation_error = create_component("ValidationError", VALIDATION_ERROR)
|
||||||
"""Workaround to set a default response for endpoints.
|
|
||||||
Workaround suggested at
|
|
||||||
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
|
|
||||||
for the missing drf-spectacular feature discussed in
|
|
||||||
<https://github.com/tfranzel/drf-spectacular/issues/101>.
|
|
||||||
"""
|
|
||||||
|
|
||||||
create_component(generator, PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA)
|
|
||||||
|
|
||||||
generic_error = create_component(generator, "GenericError", GENERIC_ERROR)
|
|
||||||
validation_error = create_component(generator, "ValidationError", VALIDATION_ERROR)
|
|
||||||
|
|
||||||
for path in result["paths"].values():
|
for path in result["paths"].values():
|
||||||
for method in path.values():
|
for method in path.values():
|
||||||
|
|
|
@ -16,7 +16,6 @@ def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
|
||||||
|
|
||||||
def tester(self: TestModelViewSets):
|
def tester(self: TestModelViewSets):
|
||||||
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
|
||||||
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
|
|
||||||
filterset_class = getattr(test_viewset, "filterset_class", None)
|
filterset_class = getattr(test_viewset, "filterset_class", None)
|
||||||
if not filterset_class:
|
if not filterset_class:
|
||||||
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
|
||||||
|
|
|
@ -93,10 +93,10 @@ class ConfigView(APIView):
|
||||||
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
"traces_sample_rate": float(CONFIG.get("error_reporting.sample_rate", 0.4)),
|
||||||
},
|
},
|
||||||
"capabilities": self.get_capabilities(),
|
"capabilities": self.get_capabilities(),
|
||||||
"cache_timeout": CONFIG.get_int("redis.cache_timeout"),
|
"cache_timeout": int(CONFIG.get("redis.cache_timeout")),
|
||||||
"cache_timeout_flows": CONFIG.get_int("redis.cache_timeout_flows"),
|
"cache_timeout_flows": int(CONFIG.get("redis.cache_timeout_flows")),
|
||||||
"cache_timeout_policies": CONFIG.get_int("redis.cache_timeout_policies"),
|
"cache_timeout_policies": int(CONFIG.get("redis.cache_timeout_policies")),
|
||||||
"cache_timeout_reputation": CONFIG.get_int("redis.cache_timeout_reputation"),
|
"cache_timeout_reputation": int(CONFIG.get("redis.cache_timeout_reputation")),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,7 @@ _other_urls = []
|
||||||
for _authentik_app in get_apps():
|
for _authentik_app in get_apps():
|
||||||
try:
|
try:
|
||||||
api_urls = import_module(f"{_authentik_app.name}.urls")
|
api_urls = import_module(f"{_authentik_app.name}.urls")
|
||||||
except ModuleNotFoundError:
|
except (ModuleNotFoundError, ImportError) as exc:
|
||||||
continue
|
|
||||||
except ImportError as exc:
|
|
||||||
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
LOGGER.warning("Could not import app's URLs", app_name=_authentik_app.name, exc=exc)
|
||||||
continue
|
continue
|
||||||
if not hasattr(api_urls, "api_urlpatterns"):
|
if not hasattr(api_urls, "api_urlpatterns"):
|
||||||
|
|
|
@ -4,6 +4,7 @@ from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.fields import CharField, DateTimeField, JSONField
|
from rest_framework.fields import CharField, DateTimeField, JSONField
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer
|
from rest_framework.serializers import ListSerializer, ModelSerializer
|
||||||
|
@ -48,7 +49,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||||
if content == "":
|
if content == "":
|
||||||
return content
|
return content
|
||||||
context = self.instance.context if self.instance else {}
|
context = self.instance.context if self.instance else {}
|
||||||
valid, logs = Importer.from_string(content, context).validate()
|
valid, logs = Importer(content, context).validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
text_logs = "\n".join([x["event"] for x in logs])
|
text_logs = "\n".join([x["event"] for x in logs])
|
||||||
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
raise ValidationError(_("Failed to validate blueprint: %(logs)s" % {"logs": text_logs}))
|
||||||
|
@ -86,11 +87,11 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||||
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Blueprint instances"""
|
"""Blueprint instances"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
serializer_class = BlueprintInstanceSerializer
|
serializer_class = BlueprintInstanceSerializer
|
||||||
queryset = BlueprintInstance.objects.all()
|
queryset = BlueprintInstance.objects.all()
|
||||||
search_fields = ["name", "path"]
|
search_fields = ["name", "path"]
|
||||||
filterset_fields = ["name", "path"]
|
filterset_fields = ["name", "path"]
|
||||||
ordering = ["name"]
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
|
|
|
@ -40,7 +40,7 @@ class ManagedAppConfig(AppConfig):
|
||||||
meth()
|
meth()
|
||||||
self._logger.debug("Successfully reconciled", name=name)
|
self._logger.debug("Successfully reconciled", name=name)
|
||||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||||
self._logger.warning("Failed to run reconcile", name=name, exc=exc)
|
self._logger.debug("Failed to run reconcile", name=name, exc=exc)
|
||||||
|
|
||||||
|
|
||||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||||
|
|
|
@ -18,7 +18,7 @@ class Command(BaseCommand):
|
||||||
"""Apply all blueprints in order, abort when one fails to import"""
|
"""Apply all blueprints in order, abort when one fails to import"""
|
||||||
for blueprint_path in options.get("blueprints", []):
|
for blueprint_path in options.get("blueprints", []):
|
||||||
content = BlueprintInstance(path=blueprint_path).retrieve()
|
content = BlueprintInstance(path=blueprint_path).retrieve()
|
||||||
importer = Importer.from_string(content)
|
importer = Importer(content)
|
||||||
valid, _ = importer.validate()
|
valid, _ = importer.validate()
|
||||||
if not valid:
|
if not valid:
|
||||||
self.stderr.write("blueprint invalid")
|
self.stderr.write("blueprint invalid")
|
||||||
|
|
|
@ -9,7 +9,6 @@ from rest_framework.fields import Field, JSONField, UUIDField
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.blueprints.v1.common import BlueprintEntryDesiredState
|
|
||||||
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT, is_model_allowed
|
||||||
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
from authentik.blueprints.v1.meta.registry import BaseMetaModel, registry
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
|
@ -111,7 +110,7 @@ class Command(BaseCommand):
|
||||||
"id": {"type": "string"},
|
"id": {"type": "string"},
|
||||||
"state": {
|
"state": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [s.value for s in BlueprintEntryDesiredState],
|
"enum": ["absent", "present", "created"],
|
||||||
"default": "present",
|
"default": "present",
|
||||||
},
|
},
|
||||||
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
"conditions": {"type": "array", "items": {"type": "boolean"}},
|
||||||
|
|
|
@ -20,7 +20,7 @@ def apply_blueprint(*files: str):
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
for file in files:
|
for file in files:
|
||||||
content = BlueprintInstance(path=file).retrieve()
|
content = BlueprintInstance(path=file).retrieve()
|
||||||
Importer.from_string(content).apply()
|
Importer(content).apply()
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
|
@ -45,8 +45,3 @@ entries:
|
||||||
attrs:
|
attrs:
|
||||||
name: "%(uid)s"
|
name: "%(uid)s"
|
||||||
password: "%(uid)s"
|
password: "%(uid)s"
|
||||||
- model: authentik_core.user
|
|
||||||
identifiers:
|
|
||||||
username: "%(uid)s-no-password"
|
|
||||||
attrs:
|
|
||||||
name: "%(uid)s"
|
|
||||||
|
|
|
@ -7,5 +7,7 @@ entries:
|
||||||
state: absent
|
state: absent
|
||||||
- identifiers:
|
- identifiers:
|
||||||
name: "%(id)s"
|
name: "%(id)s"
|
||||||
|
expression: |
|
||||||
|
return True
|
||||||
model: authentik_policies_expression.expressionpolicy
|
model: authentik_policies_expression.expressionpolicy
|
||||||
state: absent
|
state: absent
|
||||||
|
|
|
@ -9,8 +9,6 @@ context:
|
||||||
mapping:
|
mapping:
|
||||||
key1: value
|
key1: value
|
||||||
key2: 2
|
key2: 2
|
||||||
context1: context-nested-value
|
|
||||||
context2: !Context context1
|
|
||||||
entries:
|
entries:
|
||||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||||
state: !Format ["%s", present]
|
state: !Format ["%s", present]
|
||||||
|
@ -36,7 +34,6 @@ entries:
|
||||||
model: authentik_policies_expression.expressionpolicy
|
model: authentik_policies_expression.expressionpolicy
|
||||||
- attrs:
|
- attrs:
|
||||||
attributes:
|
attributes:
|
||||||
env_null: !Env [bar-baz, null]
|
|
||||||
policy_pk1:
|
policy_pk1:
|
||||||
!Format [
|
!Format [
|
||||||
"%s-%s",
|
"%s-%s",
|
||||||
|
@ -100,7 +97,6 @@ entries:
|
||||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||||
]
|
]
|
||||||
if_true_simple: !If [!Context foo, true, text]
|
if_true_simple: !If [!Context foo, true, text]
|
||||||
if_short: !If [!Context foo]
|
|
||||||
if_false_simple: !If [null, false, 2]
|
if_false_simple: !If [null, false, 2]
|
||||||
enumerate_mapping_to_mapping: !Enumerate [
|
enumerate_mapping_to_mapping: !Enumerate [
|
||||||
!Context mapping,
|
!Context mapping,
|
||||||
|
@ -145,7 +141,6 @@ entries:
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
nested_context: !Context context2
|
|
||||||
identifiers:
|
identifiers:
|
||||||
name: test
|
name: test
|
||||||
conditions:
|
conditions:
|
||||||
|
|
|
@ -25,7 +25,7 @@ def blueprint_tester(file_name: Path) -> Callable:
|
||||||
def tester(self: TestPackaged):
|
def tester(self: TestPackaged):
|
||||||
base = Path("blueprints/")
|
base = Path("blueprints/")
|
||||||
rel_path = Path(file_name).relative_to(base)
|
rel_path = Path(file_name).relative_to(base)
|
||||||
importer = Importer.from_string(BlueprintInstance(path=str(rel_path)).retrieve())
|
importer = Importer(BlueprintInstance(path=str(rel_path)).retrieve())
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ from django.test import TestCase
|
||||||
|
|
||||||
from authentik.blueprints.v1.importer import is_model_allowed
|
from authentik.blueprints.v1.importer import is_model_allowed
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.providers.oauth2.models import RefreshToken
|
|
||||||
|
|
||||||
|
|
||||||
class TestModels(TestCase):
|
class TestModels(TestCase):
|
||||||
|
@ -22,9 +21,6 @@ def serializer_tester_factory(test_model: Type[SerializerModel]) -> Callable:
|
||||||
model_class = test_model()
|
model_class = test_model()
|
||||||
self.assertTrue(isinstance(model_class, SerializerModel))
|
self.assertTrue(isinstance(model_class, SerializerModel))
|
||||||
self.assertIsNotNone(model_class.serializer)
|
self.assertIsNotNone(model_class.serializer)
|
||||||
if model_class.serializer.Meta().model == RefreshToken:
|
|
||||||
return
|
|
||||||
self.assertEqual(model_class.serializer.Meta().model, test_model)
|
|
||||||
|
|
||||||
return tester
|
return tester
|
||||||
|
|
||||||
|
|
|
@ -21,14 +21,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
|
|
||||||
def test_blueprint_invalid_format(self):
|
def test_blueprint_invalid_format(self):
|
||||||
"""Test blueprint with invalid format"""
|
"""Test blueprint with invalid format"""
|
||||||
importer = Importer.from_string('{"version": 3}')
|
importer = Importer('{"version": 3}')
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer.from_string(
|
importer = Importer(
|
||||||
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
'{"version": 1,"entries":[{"identifiers":{},"attrs":{},'
|
||||||
'"model": "authentik_core.User"}]}'
|
'"model": "authentik_core.User"}]}'
|
||||||
)
|
)
|
||||||
self.assertFalse(importer.validate()[0])
|
self.assertFalse(importer.validate()[0])
|
||||||
importer = Importer.from_string(
|
importer = Importer(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
'{"version": 1, "entries": [{"attrs": {"name": "test"}, '
|
||||||
'"identifiers": {}, '
|
'"identifiers": {}, '
|
||||||
'"model": "authentik_core.Group"}]}'
|
'"model": "authentik_core.Group"}]}'
|
||||||
|
@ -54,7 +54,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer.from_string(
|
importer = Importer(
|
||||||
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
'{"version": 1, "entries": [{"attrs": {"name": "test999", "attributes": '
|
||||||
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
'{"key": ["updated_value"]}}, "identifiers": {"attributes": {"other_key": '
|
||||||
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
'["other_value"]}}, "model": "authentik_core.Group"}]}'
|
||||||
|
@ -103,7 +103,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
self.assertEqual(len(export.entries), 3)
|
self.assertEqual(len(export.entries), 3)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer.from_string(export_yaml)
|
importer = Importer(export_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
|
@ -113,14 +113,14 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
"""Test export and import it twice"""
|
"""Test export and import it twice"""
|
||||||
count_initial = Prompt.objects.filter(field_key="username").count()
|
count_initial = Prompt.objects.filter(field_key="username").count()
|
||||||
|
|
||||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
count_before = Prompt.objects.filter(field_key="username").count()
|
count_before = Prompt.objects.filter(field_key="username").count()
|
||||||
self.assertEqual(count_initial + 1, count_before)
|
self.assertEqual(count_initial + 1, count_before)
|
||||||
|
|
||||||
importer = Importer.from_string(load_fixture("fixtures/static_prompt_export.yaml"))
|
importer = Importer(load_fixture("fixtures/static_prompt_export.yaml"))
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
self.assertEqual(Prompt.objects.filter(field_key="username").count(), count_before)
|
||||||
|
@ -130,7 +130,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||||
Group.objects.filter(name="test").delete()
|
Group.objects.filter(name="test").delete()
|
||||||
environ["foo"] = generate_id()
|
environ["foo"] = generate_id()
|
||||||
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
importer = Importer(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||||
|
@ -155,7 +155,6 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
},
|
},
|
||||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||||
"if_true_simple": True,
|
"if_true_simple": True,
|
||||||
"if_short": True,
|
|
||||||
"if_false_simple": 2,
|
"if_false_simple": 2,
|
||||||
"enumerate_mapping_to_mapping": {
|
"enumerate_mapping_to_mapping": {
|
||||||
"prefix-key1": "other-prefix-value",
|
"prefix-key1": "other-prefix-value",
|
||||||
|
@ -212,10 +211,8 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"nested_context": "context-nested-value",
|
|
||||||
"env_null": None,
|
|
||||||
}
|
}
|
||||||
).exists()
|
)
|
||||||
)
|
)
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
OAuthSource.objects.filter(
|
OAuthSource.objects.filter(
|
||||||
|
@ -248,7 +245,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
exporter = FlowExporter(flow)
|
exporter = FlowExporter(flow)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer.from_string(export_yaml)
|
importer = Importer(export_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
self.assertTrue(UserLoginStage.objects.filter(name=stage_name).exists())
|
||||||
|
@ -297,7 +294,7 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||||
exporter = FlowExporter(flow)
|
exporter = FlowExporter(flow)
|
||||||
export_yaml = exporter.export_to_string()
|
export_yaml = exporter.export_to_string()
|
||||||
|
|
||||||
importer = Importer.from_string(export_yaml)
|
importer = Importer(export_yaml)
|
||||||
|
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
|
@ -18,7 +18,7 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
||||||
self.uid = generate_id()
|
self.uid = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
import_yaml = load_fixture("fixtures/conditional_fields.yaml", uid=self.uid, user=user.pk)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
|
|
||||||
|
@ -51,9 +51,3 @@ class TestBlueprintsV1ConditionalFields(TransactionTestCase):
|
||||||
user: User = User.objects.filter(username=self.uid).first()
|
user: User = User.objects.filter(username=self.uid).first()
|
||||||
self.assertIsNotNone(user)
|
self.assertIsNotNone(user)
|
||||||
self.assertTrue(user.check_password(self.uid))
|
self.assertTrue(user.check_password(self.uid))
|
||||||
|
|
||||||
def test_user_null(self):
|
|
||||||
"""Test user"""
|
|
||||||
user: User = User.objects.filter(username=f"{self.uid}-no-password").first()
|
|
||||||
self.assertIsNotNone(user)
|
|
||||||
self.assertFalse(user.has_usable_password())
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||||
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure objects exist
|
# Ensure objects exist
|
||||||
|
@ -35,7 +35,7 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
|
||||||
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
"fixtures/conditions_not_fulfilled.yaml", id1=flow_slug1, id2=flow_slug2
|
||||||
)
|
)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure objects do not exist
|
# Ensure objects do not exist
|
||||||
|
|
|
@ -15,7 +15,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_present.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
|
@ -30,7 +30,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
self.assertEqual(flow.title, "bar")
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
# Ensure importer updates it
|
# Ensure importer updates it
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
@ -41,7 +41,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
|
@ -56,7 +56,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
self.assertEqual(flow.title, "bar")
|
self.assertEqual(flow.title, "bar")
|
||||||
|
|
||||||
# Ensure importer doesn't update it
|
# Ensure importer doesn't update it
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
@ -67,7 +67,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
flow_slug = generate_id()
|
flow_slug = generate_id()
|
||||||
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_created.yaml", id=flow_slug)
|
||||||
|
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
# Ensure object exists
|
# Ensure object exists
|
||||||
|
@ -75,7 +75,7 @@ class TestBlueprintsV1State(TransactionTestCase):
|
||||||
self.assertEqual(flow.slug, flow_slug)
|
self.assertEqual(flow.slug, flow_slug)
|
||||||
|
|
||||||
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
import_yaml = load_fixture("fixtures/state_absent.yaml", id=flow_slug)
|
||||||
importer = Importer.from_string(import_yaml)
|
importer = Importer(import_yaml)
|
||||||
self.assertTrue(importer.validate()[0])
|
self.assertTrue(importer.validate()[0])
|
||||||
self.assertTrue(importer.apply())
|
self.assertTrue(importer.apply())
|
||||||
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
flow: Flow = Flow.objects.filter(slug=flow_slug).first()
|
||||||
|
|
|
@ -12,7 +12,6 @@ from uuid import UUID
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.db.models import Model, Q
|
from django.db.models import Model, Q
|
||||||
from rest_framework.exceptions import ValidationError
|
|
||||||
from rest_framework.fields import Field
|
from rest_framework.fields import Field
|
||||||
from rest_framework.serializers import Serializer
|
from rest_framework.serializers import Serializer
|
||||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||||
|
@ -53,7 +52,6 @@ class BlueprintEntryDesiredState(Enum):
|
||||||
ABSENT = "absent"
|
ABSENT = "absent"
|
||||||
PRESENT = "present"
|
PRESENT = "present"
|
||||||
CREATED = "created"
|
CREATED = "created"
|
||||||
MUST_CREATED = "must_created"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -208,8 +206,8 @@ class KeyOf(YAMLTag):
|
||||||
):
|
):
|
||||||
return _entry._state.instance.pbm_uuid
|
return _entry._state.instance.pbm_uuid
|
||||||
return _entry._state.instance.pk
|
return _entry._state.instance.pk
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance", entry
|
f"KeyOf: failed to find entry with `id` of `{self.id_from}` and a model instance"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -225,11 +223,11 @@ class Env(YAMLTag):
|
||||||
if isinstance(node, ScalarNode):
|
if isinstance(node, ScalarNode):
|
||||||
self.key = node.value
|
self.key = node.value
|
||||||
if isinstance(node, SequenceNode):
|
if isinstance(node, SequenceNode):
|
||||||
self.key = loader.construct_object(node.value[0])
|
self.key = node.value[0].value
|
||||||
self.default = loader.construct_object(node.value[1])
|
self.default = node.value[1].value
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
return getenv(self.key) or self.default
|
return getenv(self.key, self.default)
|
||||||
|
|
||||||
|
|
||||||
class Context(YAMLTag):
|
class Context(YAMLTag):
|
||||||
|
@ -244,15 +242,13 @@ class Context(YAMLTag):
|
||||||
if isinstance(node, ScalarNode):
|
if isinstance(node, ScalarNode):
|
||||||
self.key = node.value
|
self.key = node.value
|
||||||
if isinstance(node, SequenceNode):
|
if isinstance(node, SequenceNode):
|
||||||
self.key = loader.construct_object(node.value[0])
|
self.key = node.value[0].value
|
||||||
self.default = loader.construct_object(node.value[1])
|
self.default = node.value[1].value
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
value = self.default
|
value = self.default
|
||||||
if self.key in blueprint.context:
|
if self.key in blueprint.context:
|
||||||
value = blueprint.context[self.key]
|
value = blueprint.context[self.key]
|
||||||
if isinstance(value, YAMLTag):
|
|
||||||
return value.resolve(entry, blueprint)
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@ -264,7 +260,7 @@ class Format(YAMLTag):
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.format_string = loader.construct_object(node.value[0])
|
self.format_string = node.value[0].value
|
||||||
self.args = []
|
self.args = []
|
||||||
for raw_node in node.value[1:]:
|
for raw_node in node.value[1:]:
|
||||||
self.args.append(loader.construct_object(raw_node))
|
self.args.append(loader.construct_object(raw_node))
|
||||||
|
@ -280,7 +276,7 @@ class Format(YAMLTag):
|
||||||
try:
|
try:
|
||||||
return self.format_string % tuple(args)
|
return self.format_string % tuple(args)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
class Find(YAMLTag):
|
class Find(YAMLTag):
|
||||||
|
@ -343,7 +339,7 @@ class Condition(YAMLTag):
|
||||||
|
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.mode = loader.construct_object(node.value[0])
|
self.mode = node.value[0].value
|
||||||
self.args = []
|
self.args = []
|
||||||
for raw_node in node.value[1:]:
|
for raw_node in node.value[1:]:
|
||||||
self.args.append(loader.construct_object(raw_node))
|
self.args.append(loader.construct_object(raw_node))
|
||||||
|
@ -357,15 +353,13 @@ class Condition(YAMLTag):
|
||||||
args.append(arg)
|
args.append(arg)
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError("At least one value is required after mode selection.")
|
||||||
"At least one value is required after mode selection.", entry
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
comparator = self._COMPARATORS[self.mode.upper()]
|
comparator = self._COMPARATORS[self.mode.upper()]
|
||||||
return comparator(tuple(bool(x) for x in args))
|
return comparator(tuple(bool(x) for x in args))
|
||||||
except (TypeError, KeyError) as exc:
|
except (TypeError, KeyError) as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
class If(YAMLTag):
|
class If(YAMLTag):
|
||||||
|
@ -378,10 +372,6 @@ class If(YAMLTag):
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.condition = loader.construct_object(node.value[0])
|
self.condition = loader.construct_object(node.value[0])
|
||||||
if len(node.value) == 1:
|
|
||||||
self.when_true = True
|
|
||||||
self.when_false = False
|
|
||||||
else:
|
|
||||||
self.when_true = loader.construct_object(node.value[1])
|
self.when_true = loader.construct_object(node.value[1])
|
||||||
self.when_false = loader.construct_object(node.value[2])
|
self.when_false = loader.construct_object(node.value[2])
|
||||||
|
|
||||||
|
@ -397,7 +387,7 @@ class If(YAMLTag):
|
||||||
blueprint,
|
blueprint,
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
|
|
||||||
class Enumerate(YAMLTag, YAMLTagContext):
|
class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
|
@ -420,7 +410,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.iterable = loader.construct_object(node.value[0])
|
self.iterable = loader.construct_object(node.value[0])
|
||||||
self.output_body = loader.construct_object(node.value[1])
|
self.output_body = node.value[1].value
|
||||||
self.item_body = loader.construct_object(node.value[2])
|
self.item_body = loader.construct_object(node.value[2])
|
||||||
self.__current_context: tuple[Any, Any] = tuple()
|
self.__current_context: tuple[Any, Any] = tuple()
|
||||||
|
|
||||||
|
@ -429,10 +419,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
|
|
||||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||||
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
if isinstance(self.iterable, EnumeratedItem) and self.iterable.depth == 0:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
f"{self.__class__.__name__} tag's iterable references this tag's context. "
|
||||||
"This is a noop. Check you are setting depth bigger than 0.",
|
"This is a noop. Check you are setting depth bigger than 0."
|
||||||
entry,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(self.iterable, YAMLTag):
|
if isinstance(self.iterable, YAMLTag):
|
||||||
|
@ -441,10 +430,9 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
iterable = self.iterable
|
iterable = self.iterable
|
||||||
|
|
||||||
if not isinstance(iterable, Iterable):
|
if not isinstance(iterable, Iterable):
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"{self.__class__.__name__}'s iterable must be an iterable "
|
f"{self.__class__.__name__}'s iterable must be an iterable "
|
||||||
"such as a sequence or a mapping",
|
"such as a sequence or a mapping"
|
||||||
entry,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(iterable, Mapping):
|
if isinstance(iterable, Mapping):
|
||||||
|
@ -455,7 +443,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
try:
|
try:
|
||||||
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
output_class, add_fn = self._OUTPUT_BODIES[self.output_body.upper()]
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
raise EntryInvalidError.from_entry(exc, entry)
|
raise EntryInvalidError(exc)
|
||||||
|
|
||||||
result = output_class()
|
result = output_class()
|
||||||
|
|
||||||
|
@ -467,8 +455,8 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||||
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
resolved_body = entry.tag_resolver(self.item_body, blueprint)
|
||||||
result = add_fn(result, resolved_body)
|
result = add_fn(result, resolved_body)
|
||||||
if not isinstance(result, output_class):
|
if not isinstance(result, output_class):
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"Invalid {self.__class__.__name__} item found: {resolved_body}", entry
|
f"Invalid {self.__class__.__name__} item found: {resolved_body}"
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
self.__current_context = tuple()
|
self.__current_context = tuple()
|
||||||
|
@ -495,13 +483,12 @@ class EnumeratedItem(YAMLTag):
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
if self.depth == 0:
|
if self.depth == 0:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"{self.__class__.__name__} tags are only usable "
|
f"{self.__class__.__name__} tags are only usable "
|
||||||
f"inside an {Enumerate.__name__} tag",
|
f"inside an {Enumerate.__name__} tag"
|
||||||
entry,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
raise EntryInvalidError.from_entry(f"{self.__class__.__name__} tag: {exc}", entry)
|
raise EntryInvalidError(f"{self.__class__.__name__} tag: {exc}")
|
||||||
|
|
||||||
return context_tag.get_context(entry, blueprint)
|
return context_tag.get_context(entry, blueprint)
|
||||||
|
|
||||||
|
@ -515,7 +502,7 @@ class Index(EnumeratedItem):
|
||||||
try:
|
try:
|
||||||
return context[0]
|
return context[0]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError: # pragma: no cover
|
||||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||||
|
|
||||||
|
|
||||||
class Value(EnumeratedItem):
|
class Value(EnumeratedItem):
|
||||||
|
@ -527,7 +514,7 @@ class Value(EnumeratedItem):
|
||||||
try:
|
try:
|
||||||
return context[1]
|
return context[1]
|
||||||
except IndexError: # pragma: no cover
|
except IndexError: # pragma: no cover
|
||||||
raise EntryInvalidError.from_entry(f"Empty/invalid context: {context}", entry)
|
raise EntryInvalidError(f"Empty/invalid context: {context}")
|
||||||
|
|
||||||
|
|
||||||
class BlueprintDumper(SafeDumper):
|
class BlueprintDumper(SafeDumper):
|
||||||
|
@ -581,31 +568,8 @@ class BlueprintLoader(SafeLoader):
|
||||||
class EntryInvalidError(SentryIgnoredException):
|
class EntryInvalidError(SentryIgnoredException):
|
||||||
"""Error raised when an entry is invalid"""
|
"""Error raised when an entry is invalid"""
|
||||||
|
|
||||||
entry_model: Optional[str]
|
serializer_errors: Optional[dict]
|
||||||
entry_id: Optional[str]
|
|
||||||
validation_error: Optional[ValidationError]
|
|
||||||
serializer: Optional[Serializer] = None
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, *args: object, serializer_errors: Optional[dict] = None) -> None:
|
||||||
self, *args: object, validation_error: Optional[ValidationError] = None, **kwargs
|
|
||||||
) -> None:
|
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.entry_model = None
|
self.serializer_errors = serializer_errors
|
||||||
self.entry_id = None
|
|
||||||
self.validation_error = validation_error
|
|
||||||
for key, value in kwargs.items():
|
|
||||||
setattr(self, key, value)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_entry(
|
|
||||||
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
|
||||||
) -> "EntryInvalidError":
|
|
||||||
"""Create EntryInvalidError with the context of an entry"""
|
|
||||||
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
|
||||||
if isinstance(msg_or_exc, ValidationError):
|
|
||||||
error.validation_error = msg_or_exc
|
|
||||||
# Make sure the model and id are strings, depending where the error happens
|
|
||||||
# they might still be YAMLTag instances
|
|
||||||
error.entry_model = str(entry.model)
|
|
||||||
error.entry_id = str(entry.id)
|
|
||||||
return error
|
|
||||||
|
|
|
@ -8,9 +8,9 @@ from dacite.core import from_dict
|
||||||
from dacite.exceptions import DaciteError
|
from dacite.exceptions import DaciteError
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
|
from django.db import transaction
|
||||||
from django.db.models import Model
|
from django.db.models import Model
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q
|
||||||
from django.db.transaction import atomic
|
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer, Serializer
|
from rest_framework.serializers import BaseSerializer, Serializer
|
||||||
|
@ -35,28 +35,23 @@ from authentik.core.models import (
|
||||||
Source,
|
Source,
|
||||||
UserSourceConnection,
|
UserSourceConnection,
|
||||||
)
|
)
|
||||||
from authentik.enterprise.models import LicenseUsage
|
|
||||||
from authentik.events.utils import cleanse_dict
|
|
||||||
from authentik.flows.models import FlowToken, Stage
|
from authentik.flows.models import FlowToken, Stage
|
||||||
from authentik.lib.models import SerializerModel
|
from authentik.lib.models import SerializerModel
|
||||||
from authentik.lib.sentry import SentryIgnoredException
|
|
||||||
from authentik.outposts.models import OutpostServiceConnection
|
from authentik.outposts.models import OutpostServiceConnection
|
||||||
from authentik.policies.models import Policy, PolicyBindingModel
|
from authentik.policies.models import Policy, PolicyBindingModel
|
||||||
from authentik.providers.scim.models import SCIMGroup, SCIMUser
|
|
||||||
|
|
||||||
# Context set when the serializer is created in a blueprint context
|
# Context set when the serializer is created in a blueprint context
|
||||||
# Update website/developer-docs/blueprints/v1/models.md when used
|
# Update website/developer-docs/blueprints/v1/models.md when used
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
SERIALIZER_CONTEXT_BLUEPRINT = "blueprint_entry"
|
||||||
|
|
||||||
|
|
||||||
def excluded_models() -> list[type[Model]]:
|
def is_model_allowed(model: type[Model]) -> bool:
|
||||||
"""Return a list of all excluded models that shouldn't be exposed via API
|
"""Check if model is allowed"""
|
||||||
or other means (internal only, base classes, non-used objects, etc)"""
|
|
||||||
# pylint: disable=imported-auth-user
|
# pylint: disable=imported-auth-user
|
||||||
from django.contrib.auth.models import Group as DjangoGroup
|
from django.contrib.auth.models import Group as DjangoGroup
|
||||||
from django.contrib.auth.models import User as DjangoUser
|
from django.contrib.auth.models import User as DjangoUser
|
||||||
|
|
||||||
return (
|
excluded_models = (
|
||||||
DjangoUser,
|
DjangoUser,
|
||||||
DjangoGroup,
|
DjangoGroup,
|
||||||
# Base classes
|
# Base classes
|
||||||
|
@ -72,64 +67,45 @@ def excluded_models() -> list[type[Model]]:
|
||||||
AuthenticatedSession,
|
AuthenticatedSession,
|
||||||
# Classes which are only internally managed
|
# Classes which are only internally managed
|
||||||
FlowToken,
|
FlowToken,
|
||||||
LicenseUsage,
|
|
||||||
SCIMGroup,
|
|
||||||
SCIMUser,
|
|
||||||
)
|
)
|
||||||
|
return model not in excluded_models and issubclass(model, (SerializerModel, BaseMetaModel))
|
||||||
|
|
||||||
def is_model_allowed(model: type[Model]) -> bool:
|
|
||||||
"""Check if model is allowed"""
|
|
||||||
return model not in excluded_models() and issubclass(model, (SerializerModel, BaseMetaModel))
|
|
||||||
|
|
||||||
|
|
||||||
class DoRollback(SentryIgnoredException):
|
|
||||||
"""Exception to trigger a rollback"""
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def transaction_rollback():
|
def transaction_rollback():
|
||||||
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
"""Enters an atomic transaction and always triggers a rollback at the end of the block."""
|
||||||
try:
|
atomic = transaction.atomic()
|
||||||
with atomic():
|
# pylint: disable=unnecessary-dunder-call
|
||||||
|
atomic.__enter__()
|
||||||
yield
|
yield
|
||||||
raise DoRollback()
|
atomic.__exit__(IntegrityError, None, None)
|
||||||
except DoRollback:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
"""Import Blueprint from raw dict or YAML/JSON"""
|
"""Import Blueprint from YAML"""
|
||||||
|
|
||||||
logger: BoundLogger
|
logger: BoundLogger
|
||||||
_import: Blueprint
|
|
||||||
|
|
||||||
def __init__(self, blueprint: Blueprint, context: Optional[dict] = None):
|
def __init__(self, yaml_input: str, context: Optional[dict] = None):
|
||||||
self.__pk_map: dict[Any, Model] = {}
|
self.__pk_map: dict[Any, Model] = {}
|
||||||
self._import = blueprint
|
|
||||||
self.logger = get_logger()
|
self.logger = get_logger()
|
||||||
ctx = {}
|
|
||||||
always_merger.merge(ctx, self._import.context)
|
|
||||||
if context:
|
|
||||||
always_merger.merge(ctx, context)
|
|
||||||
self._import.context = ctx
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
|
||||||
"""Parse YAML string and create blueprint importer from it"""
|
|
||||||
import_dict = load(yaml_input, BlueprintLoader)
|
import_dict = load(yaml_input, BlueprintLoader)
|
||||||
try:
|
try:
|
||||||
_import = from_dict(
|
self.__import = from_dict(
|
||||||
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
Blueprint, import_dict, config=Config(cast=[BlueprintEntryDesiredState])
|
||||||
)
|
)
|
||||||
except DaciteError as exc:
|
except DaciteError as exc:
|
||||||
raise EntryInvalidError from exc
|
raise EntryInvalidError from exc
|
||||||
return Importer(_import, context)
|
ctx = {}
|
||||||
|
always_merger.merge(ctx, self.__import.context)
|
||||||
|
if context:
|
||||||
|
always_merger.merge(ctx, context)
|
||||||
|
self.__import.context = ctx
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def blueprint(self) -> Blueprint:
|
def blueprint(self) -> Blueprint:
|
||||||
"""Get imported blueprint"""
|
"""Get imported blueprint"""
|
||||||
return self._import
|
return self.__import
|
||||||
|
|
||||||
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
def __update_pks_for_attrs(self, attrs: dict[str, Any]) -> dict[str, Any]:
|
||||||
"""Replace any value if it is a known primary key of an other object"""
|
"""Replace any value if it is a known primary key of an other object"""
|
||||||
|
@ -175,19 +151,19 @@ class Importer:
|
||||||
# pylint: disable-msg=too-many-locals
|
# pylint: disable-msg=too-many-locals
|
||||||
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
def _validate_single(self, entry: BlueprintEntry) -> Optional[BaseSerializer]:
|
||||||
"""Validate a single entry"""
|
"""Validate a single entry"""
|
||||||
if not entry.check_all_conditions_match(self._import):
|
if not entry.check_all_conditions_match(self.__import):
|
||||||
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
self.logger.debug("One or more conditions of this entry are not fulfilled, skipping")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
# Don't use isinstance since we don't want to check for inheritance
|
# Don't use isinstance since we don't want to check for inheritance
|
||||||
if not is_model_allowed(model):
|
if not is_model_allowed(model):
|
||||||
raise EntryInvalidError.from_entry(f"Model {model} not allowed", entry)
|
raise EntryInvalidError(f"Model {model} not allowed")
|
||||||
if issubclass(model, BaseMetaModel):
|
if issubclass(model, BaseMetaModel):
|
||||||
serializer_class: type[Serializer] = model.serializer()
|
serializer_class: type[Serializer] = model.serializer()
|
||||||
serializer = serializer_class(
|
serializer = serializer_class(
|
||||||
data=entry.get_attrs(self._import),
|
data=entry.get_attrs(self.__import),
|
||||||
context={
|
context={
|
||||||
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
SERIALIZER_CONTEXT_BLUEPRINT: entry,
|
||||||
},
|
},
|
||||||
|
@ -195,10 +171,8 @@ class Importer:
|
||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"Serializer errors {serializer.errors}",
|
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||||
validation_error=exc,
|
|
||||||
entry=entry,
|
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
|
@ -207,7 +181,7 @@ class Importer:
|
||||||
# the full serializer for later usage
|
# the full serializer for later usage
|
||||||
# Because a model might have multiple unique columns, we chain all identifiers together
|
# Because a model might have multiple unique columns, we chain all identifiers together
|
||||||
# to create an OR query.
|
# to create an OR query.
|
||||||
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self._import))
|
updated_identifiers = self.__update_pks_for_attrs(entry.get_identifiers(self.__import))
|
||||||
for key, value in list(updated_identifiers.items()):
|
for key, value in list(updated_identifiers.items()):
|
||||||
if isinstance(value, dict) and "pk" in value:
|
if isinstance(value, dict) and "pk" in value:
|
||||||
del updated_identifiers[key]
|
del updated_identifiers[key]
|
||||||
|
@ -215,16 +189,19 @@ class Importer:
|
||||||
|
|
||||||
query = self.__query_from_identifier(updated_identifiers)
|
query = self.__query_from_identifier(updated_identifiers)
|
||||||
if not query:
|
if not query:
|
||||||
raise EntryInvalidError.from_entry("No or invalid identifiers", entry)
|
raise EntryInvalidError("No or invalid identifiers")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing_models = model.objects.filter(query)
|
existing_models = model.objects.filter(query)
|
||||||
except FieldError as exc:
|
except FieldError as exc:
|
||||||
raise EntryInvalidError.from_entry(f"Invalid identifier field: {exc}", entry) from exc
|
raise EntryInvalidError(f"Invalid identifier field: {exc}") from exc
|
||||||
|
|
||||||
serializer_kwargs = {}
|
serializer_kwargs = {}
|
||||||
model_instance = existing_models.first()
|
model_instance = existing_models.first()
|
||||||
if not isinstance(model(), BaseMetaModel) and model_instance:
|
if not isinstance(model(), BaseMetaModel) and model_instance:
|
||||||
|
if entry.get_state(self.__import) == BlueprintEntryDesiredState.CREATED:
|
||||||
|
self.logger.debug("instance exists, skipping")
|
||||||
|
return None
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialise serializer with instance",
|
"initialise serializer with instance",
|
||||||
model=model,
|
model=model,
|
||||||
|
@ -233,19 +210,9 @@ class Importer:
|
||||||
)
|
)
|
||||||
serializer_kwargs["instance"] = model_instance
|
serializer_kwargs["instance"] = model_instance
|
||||||
serializer_kwargs["partial"] = True
|
serializer_kwargs["partial"] = True
|
||||||
elif model_instance and entry.state == BlueprintEntryDesiredState.MUST_CREATED:
|
|
||||||
raise EntryInvalidError.from_entry(
|
|
||||||
(
|
|
||||||
f"state is set to {BlueprintEntryDesiredState.MUST_CREATED} "
|
|
||||||
"and object exists already",
|
|
||||||
),
|
|
||||||
entry,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"initialised new serializer instance",
|
"initialised new serializer instance", model=model, **updated_identifiers
|
||||||
model=model,
|
|
||||||
**cleanse_dict(updated_identifiers),
|
|
||||||
)
|
)
|
||||||
model_instance = model()
|
model_instance = model()
|
||||||
# pk needs to be set on the model instance otherwise a new one will be generated
|
# pk needs to be set on the model instance otherwise a new one will be generated
|
||||||
|
@ -253,12 +220,9 @@ class Importer:
|
||||||
model_instance.pk = updated_identifiers["pk"]
|
model_instance.pk = updated_identifiers["pk"]
|
||||||
serializer_kwargs["instance"] = model_instance
|
serializer_kwargs["instance"] = model_instance
|
||||||
try:
|
try:
|
||||||
full_data = self.__update_pks_for_attrs(entry.get_attrs(self._import))
|
full_data = self.__update_pks_for_attrs(entry.get_attrs(self.__import))
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(exc) from exc
|
||||||
exc,
|
|
||||||
entry,
|
|
||||||
) from exc
|
|
||||||
always_merger.merge(full_data, updated_identifiers)
|
always_merger.merge(full_data, updated_identifiers)
|
||||||
serializer_kwargs["data"] = full_data
|
serializer_kwargs["data"] = full_data
|
||||||
|
|
||||||
|
@ -271,18 +235,15 @@ class Importer:
|
||||||
try:
|
try:
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
except ValidationError as exc:
|
except ValidationError as exc:
|
||||||
raise EntryInvalidError.from_entry(
|
raise EntryInvalidError(
|
||||||
f"Serializer errors {serializer.errors}",
|
f"Serializer errors {serializer.errors}", serializer_errors=serializer.errors
|
||||||
validation_error=exc,
|
|
||||||
entry=entry,
|
|
||||||
serializer=serializer,
|
|
||||||
) from exc
|
) from exc
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
def apply(self) -> bool:
|
def apply(self) -> bool:
|
||||||
"""Apply (create/update) models yaml, in database transaction"""
|
"""Apply (create/update) models yaml, in database transaction"""
|
||||||
try:
|
try:
|
||||||
with atomic():
|
with transaction.atomic():
|
||||||
if not self._apply_models():
|
if not self._apply_models():
|
||||||
self.logger.debug("Reverting changes due to error")
|
self.logger.debug("Reverting changes due to error")
|
||||||
raise IntegrityError
|
raise IntegrityError
|
||||||
|
@ -291,11 +252,11 @@ class Importer:
|
||||||
self.logger.debug("Committing changes")
|
self.logger.debug("Committing changes")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _apply_models(self, raise_errors=False) -> bool:
|
def _apply_models(self) -> bool:
|
||||||
"""Apply (create/update) models yaml"""
|
"""Apply (create/update) models yaml"""
|
||||||
self.__pk_map = {}
|
self.__pk_map = {}
|
||||||
for entry in self._import.entries:
|
for entry in self.__import.entries:
|
||||||
model_app_label, model_name = entry.get_model(self._import).split(".")
|
model_app_label, model_name = entry.get_model(self.__import).split(".")
|
||||||
try:
|
try:
|
||||||
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
model: type[SerializerModel] = registry.get_model(model_app_label, model_name)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
|
@ -304,45 +265,24 @@ class Importer:
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
# Validate each single entry
|
# Validate each single entry
|
||||||
serializer = None
|
|
||||||
try:
|
try:
|
||||||
serializer = self._validate_single(entry)
|
serializer = self._validate_single(entry)
|
||||||
except EntryInvalidError as exc:
|
except EntryInvalidError as exc:
|
||||||
# For deleting objects we don't need the serializer to be valid
|
|
||||||
if entry.get_state(self._import) == BlueprintEntryDesiredState.ABSENT:
|
|
||||||
serializer = exc.serializer
|
|
||||||
else:
|
|
||||||
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
self.logger.warning(f"entry invalid: {exc}", entry=entry, error=exc)
|
||||||
if raise_errors:
|
|
||||||
raise exc
|
|
||||||
return False
|
return False
|
||||||
if not serializer:
|
if not serializer:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
state = entry.get_state(self._import)
|
state = entry.get_state(self.__import)
|
||||||
if state in [
|
if state in [
|
||||||
BlueprintEntryDesiredState.PRESENT,
|
BlueprintEntryDesiredState.PRESENT,
|
||||||
BlueprintEntryDesiredState.CREATED,
|
BlueprintEntryDesiredState.CREATED,
|
||||||
BlueprintEntryDesiredState.MUST_CREATED,
|
|
||||||
]:
|
]:
|
||||||
instance = serializer.instance
|
model = serializer.save()
|
||||||
if (
|
|
||||||
instance
|
|
||||||
and not instance._state.adding
|
|
||||||
and state == BlueprintEntryDesiredState.CREATED
|
|
||||||
):
|
|
||||||
self.logger.debug(
|
|
||||||
"instance exists, skipping",
|
|
||||||
model=model,
|
|
||||||
instance=instance,
|
|
||||||
pk=instance.pk,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
instance = serializer.save()
|
|
||||||
self.logger.debug("updated model", model=instance)
|
|
||||||
if "pk" in entry.identifiers:
|
if "pk" in entry.identifiers:
|
||||||
self.__pk_map[entry.identifiers["pk"]] = instance.pk
|
self.__pk_map[entry.identifiers["pk"]] = model.pk
|
||||||
entry._state = BlueprintEntryState(instance)
|
entry._state = BlueprintEntryState(model)
|
||||||
|
self.logger.debug("updated model", model=model)
|
||||||
elif state == BlueprintEntryDesiredState.ABSENT:
|
elif state == BlueprintEntryDesiredState.ABSENT:
|
||||||
instance: Optional[Model] = serializer.instance
|
instance: Optional[Model] = serializer.instance
|
||||||
if instance.pk:
|
if instance.pk:
|
||||||
|
@ -352,23 +292,22 @@ class Importer:
|
||||||
self.logger.debug("entry to delete with no instance, skipping")
|
self.logger.debug("entry to delete with no instance, skipping")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def validate(self, raise_validation_errors=False) -> tuple[bool, list[EventDict]]:
|
def validate(self) -> tuple[bool, list[EventDict]]:
|
||||||
"""Validate loaded blueprint export, ensure all models are allowed
|
"""Validate loaded blueprint export, ensure all models are allowed
|
||||||
and serializers have no errors"""
|
and serializers have no errors"""
|
||||||
self.logger.debug("Starting blueprint import validation")
|
self.logger.debug("Starting blueprint import validation")
|
||||||
orig_import = deepcopy(self._import)
|
orig_import = deepcopy(self.__import)
|
||||||
if self._import.version != 1:
|
if self.__import.version != 1:
|
||||||
self.logger.warning("Invalid blueprint version")
|
self.logger.warning("Invalid blueprint version")
|
||||||
return False, [{"event": "Invalid blueprint version"}]
|
return False, [{"event": "Invalid blueprint version"}]
|
||||||
with (
|
with (
|
||||||
transaction_rollback(),
|
transaction_rollback(),
|
||||||
capture_logs() as logs,
|
capture_logs() as logs,
|
||||||
):
|
):
|
||||||
successful = self._apply_models(raise_errors=raise_validation_errors)
|
successful = self._apply_models()
|
||||||
if not successful:
|
if not successful:
|
||||||
self.logger.debug("Blueprint validation failed")
|
self.logger.debug("Blueprint validation failed")
|
||||||
for log in logs:
|
for log in logs:
|
||||||
getattr(self.logger, log.get("log_level"))(**log)
|
getattr(self.logger, log.get("log_level"))(**log)
|
||||||
self.logger.debug("Finished blueprint import validation")
|
self.__import = orig_import
|
||||||
self._import = orig_import
|
|
||||||
return successful, logs
|
return successful, logs
|
||||||
|
|
|
@ -31,7 +31,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||||
required = attrs["required"]
|
required = attrs["required"]
|
||||||
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
instance = BlueprintInstance.objects.filter(**identifiers).first()
|
||||||
if not instance and required:
|
if not instance and required:
|
||||||
raise ValidationError({"identifiers": "Required blueprint does not exist"})
|
raise ValidationError("Required blueprint does not exist")
|
||||||
self.blueprint_instance = instance
|
self.blueprint_instance = instance
|
||||||
return super().validate(attrs)
|
return super().validate(attrs)
|
||||||
|
|
||||||
|
|
|
@ -75,14 +75,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||||
return
|
return
|
||||||
if event.is_directory:
|
if event.is_directory:
|
||||||
return
|
return
|
||||||
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
|
||||||
path = Path(event.src_path).absolute()
|
|
||||||
rel_path = str(path.relative_to(root))
|
|
||||||
if isinstance(event, FileCreatedEvent):
|
if isinstance(event, FileCreatedEvent):
|
||||||
LOGGER.debug("new blueprint file created, starting discovery", path=rel_path)
|
LOGGER.debug("new blueprint file created, starting discovery")
|
||||||
blueprints_discovery.delay(rel_path)
|
blueprints_discovery.delay()
|
||||||
if isinstance(event, FileModifiedEvent):
|
if isinstance(event, FileModifiedEvent):
|
||||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
path = Path(event.src_path)
|
||||||
|
root = Path(CONFIG.get("blueprints_dir")).absolute()
|
||||||
|
rel_path = str(path.relative_to(root))
|
||||||
|
for instance in BlueprintInstance.objects.filter(path=rel_path):
|
||||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||||
apply_blueprint.delay(instance.pk.hex)
|
apply_blueprint.delay(instance.pk.hex)
|
||||||
|
|
||||||
|
@ -98,32 +98,39 @@ def blueprints_find_dict():
|
||||||
return blueprints
|
return blueprints
|
||||||
|
|
||||||
|
|
||||||
def blueprints_find() -> list[BlueprintFile]:
|
def blueprints_find():
|
||||||
"""Find blueprints and return valid ones"""
|
"""Find blueprints and return valid ones"""
|
||||||
blueprints = []
|
blueprints = []
|
||||||
root = Path(CONFIG.get("blueprints_dir"))
|
root = Path(CONFIG.get("blueprints_dir"))
|
||||||
for path in root.rglob("**/*.yaml"):
|
for path in root.rglob("**/*.yaml"):
|
||||||
rel_path = path.relative_to(root)
|
|
||||||
# Check if any part in the path starts with a dot and assume a hidden file
|
# Check if any part in the path starts with a dot and assume a hidden file
|
||||||
if any(part for part in path.parts if part.startswith(".")):
|
if any(part for part in path.parts if part.startswith(".")):
|
||||||
continue
|
continue
|
||||||
|
LOGGER.debug("found blueprint", path=str(path))
|
||||||
with open(path, "r", encoding="utf-8") as blueprint_file:
|
with open(path, "r", encoding="utf-8") as blueprint_file:
|
||||||
try:
|
try:
|
||||||
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
raw_blueprint = load(blueprint_file.read(), BlueprintLoader)
|
||||||
except YAMLError as exc:
|
except YAMLError as exc:
|
||||||
raw_blueprint = None
|
raw_blueprint = None
|
||||||
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(rel_path))
|
LOGGER.warning("failed to parse blueprint", exc=exc, path=str(path))
|
||||||
if not raw_blueprint:
|
if not raw_blueprint:
|
||||||
continue
|
continue
|
||||||
metadata = raw_blueprint.get("metadata", None)
|
metadata = raw_blueprint.get("metadata", None)
|
||||||
version = raw_blueprint.get("version", 1)
|
version = raw_blueprint.get("version", 1)
|
||||||
if version != 1:
|
if version != 1:
|
||||||
LOGGER.warning("invalid blueprint version", version=version, path=str(rel_path))
|
LOGGER.warning("invalid blueprint version", version=version, path=str(path))
|
||||||
continue
|
continue
|
||||||
file_hash = sha512(path.read_bytes()).hexdigest()
|
file_hash = sha512(path.read_bytes()).hexdigest()
|
||||||
blueprint = BlueprintFile(str(rel_path), version, file_hash, int(path.stat().st_mtime))
|
blueprint = BlueprintFile(
|
||||||
|
str(path.relative_to(root)), version, file_hash, int(path.stat().st_mtime)
|
||||||
|
)
|
||||||
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
blueprint.meta = from_dict(BlueprintMetadata, metadata) if metadata else None
|
||||||
blueprints.append(blueprint)
|
blueprints.append(blueprint)
|
||||||
|
LOGGER.debug(
|
||||||
|
"parsed & loaded blueprint",
|
||||||
|
hash=file_hash,
|
||||||
|
path=str(path),
|
||||||
|
)
|
||||||
return blueprints
|
return blueprints
|
||||||
|
|
||||||
|
|
||||||
|
@ -131,12 +138,10 @@ def blueprints_find() -> list[BlueprintFile]:
|
||||||
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
throws=(DatabaseError, ProgrammingError, InternalError), base=MonitoredTask, bind=True
|
||||||
)
|
)
|
||||||
@prefill_task
|
@prefill_task
|
||||||
def blueprints_discovery(self: MonitoredTask, path: Optional[str] = None):
|
def blueprints_discovery(self: MonitoredTask):
|
||||||
"""Find blueprints and check if they need to be created in the database"""
|
"""Find blueprints and check if they need to be created in the database"""
|
||||||
count = 0
|
count = 0
|
||||||
for blueprint in blueprints_find():
|
for blueprint in blueprints_find():
|
||||||
if path and blueprint.path != path:
|
|
||||||
continue
|
|
||||||
check_blueprint_v1_file(blueprint)
|
check_blueprint_v1_file(blueprint)
|
||||||
count += 1
|
count += 1
|
||||||
self.set_status(
|
self.set_status(
|
||||||
|
@ -166,11 +171,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||||
metadata={},
|
metadata={},
|
||||||
)
|
)
|
||||||
instance.save()
|
instance.save()
|
||||||
LOGGER.info(
|
|
||||||
"Creating new blueprint instance from file", instance=instance, path=instance.path
|
|
||||||
)
|
|
||||||
if instance.last_applied_hash != blueprint.hash:
|
if instance.last_applied_hash != blueprint.hash:
|
||||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
|
||||||
apply_blueprint.delay(str(instance.pk))
|
apply_blueprint.delay(str(instance.pk))
|
||||||
|
|
||||||
|
|
||||||
|
@ -189,7 +190,7 @@ def apply_blueprint(self: MonitoredTask, instance_pk: str):
|
||||||
self.set_uid(slugify(instance.name))
|
self.set_uid(slugify(instance.name))
|
||||||
blueprint_content = instance.retrieve()
|
blueprint_content = instance.retrieve()
|
||||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||||
importer = Importer.from_string(blueprint_content, instance.context)
|
importer = Importer(blueprint_content, instance.context)
|
||||||
if importer.blueprint.metadata:
|
if importer.blueprint.metadata:
|
||||||
instance.metadata = asdict(importer.blueprint.metadata)
|
instance.metadata = asdict(importer.blueprint.metadata)
|
||||||
valid, logs = importer.validate()
|
valid, logs = importer.validate()
|
||||||
|
|
|
@ -17,6 +17,7 @@ from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
from structlog.testing import capture_logs
|
from structlog.testing import capture_logs
|
||||||
|
|
||||||
|
@ -37,7 +38,6 @@ from authentik.lib.utils.file import (
|
||||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||||
from authentik.policies.engine import PolicyEngine
|
from authentik.policies.engine import PolicyEngine
|
||||||
from authentik.policies.types import PolicyResult
|
from authentik.policies.types import PolicyResult
|
||||||
from authentik.rbac.filters import ObjectFilter
|
|
||||||
|
|
||||||
LOGGER = get_logger()
|
LOGGER = get_logger()
|
||||||
|
|
||||||
|
@ -98,7 +98,6 @@ class ApplicationSerializer(ModelSerializer):
|
||||||
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Application Viewset"""
|
"""Application Viewset"""
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
queryset = Application.objects.all().prefetch_related("provider")
|
queryset = Application.objects.all().prefetch_related("provider")
|
||||||
serializer_class = ApplicationSerializer
|
serializer_class = ApplicationSerializer
|
||||||
search_fields = [
|
search_fields = [
|
||||||
|
@ -123,7 +122,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||||
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
for backend in list(self.filter_backends):
|
for backend in list(self.filter_backends):
|
||||||
if backend == ObjectFilter:
|
if backend == ObjectPermissionsFilter:
|
||||||
continue
|
continue
|
||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
"""Authenticator Devices API Views"""
|
"""Authenticator Devices API Views"""
|
||||||
|
from django_otp import device_classes, devices_for_user
|
||||||
|
from django_otp.models import Device
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
from rest_framework.fields import BooleanField, CharField, IntegerField, SerializerMethodField
|
||||||
|
@ -8,8 +10,6 @@ from rest_framework.response import Response
|
||||||
from rest_framework.viewsets import ViewSet
|
from rest_framework.viewsets import ViewSet
|
||||||
|
|
||||||
from authentik.core.api.utils import MetaNameSerializer
|
from authentik.core.api.utils import MetaNameSerializer
|
||||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
|
||||||
from authentik.stages.authenticator.models import Device
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceSerializer(MetaNameSerializer):
|
class DeviceSerializer(MetaNameSerializer):
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
from json import loads
|
from json import loads
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.http import Http404
|
from django.http import Http404
|
||||||
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
|
@ -13,12 +14,12 @@ from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
from rest_framework.serializers import ListSerializer, ModelSerializer, ValidationError
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
|
|
||||||
from authentik.api.decorators import permission_required
|
from authentik.api.decorators import permission_required
|
||||||
from authentik.core.api.used_by import UsedByMixin
|
from authentik.core.api.used_by import UsedByMixin
|
||||||
from authentik.core.api.utils import PassiveSerializer, is_dict
|
from authentik.core.api.utils import PassiveSerializer, is_dict
|
||||||
from authentik.core.models import Group, User
|
from authentik.core.models import Group, User
|
||||||
from authentik.rbac.api.roles import RoleSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class GroupMemberSerializer(ModelSerializer):
|
class GroupMemberSerializer(ModelSerializer):
|
||||||
|
@ -48,13 +49,7 @@ class GroupSerializer(ModelSerializer):
|
||||||
users_obj = ListSerializer(
|
users_obj = ListSerializer(
|
||||||
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
child=GroupMemberSerializer(), read_only=True, source="users", required=False
|
||||||
)
|
)
|
||||||
roles_obj = ListSerializer(
|
parent_name = CharField(source="parent.name", read_only=True)
|
||||||
child=RoleSerializer(),
|
|
||||||
read_only=True,
|
|
||||||
source="roles",
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
|
||||||
|
|
||||||
num_pk = IntegerField(read_only=True)
|
num_pk = IntegerField(read_only=True)
|
||||||
|
|
||||||
|
@ -76,10 +71,8 @@ class GroupSerializer(ModelSerializer):
|
||||||
"parent",
|
"parent",
|
||||||
"parent_name",
|
"parent_name",
|
||||||
"users",
|
"users",
|
||||||
"users_obj",
|
|
||||||
"attributes",
|
"attributes",
|
||||||
"roles",
|
"users_obj",
|
||||||
"roles_obj",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"users": {
|
"users": {
|
||||||
|
@ -139,13 +132,25 @@ class UserAccountSerializer(PassiveSerializer):
|
||||||
class GroupViewSet(UsedByMixin, ModelViewSet):
|
class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||||
"""Group Viewset"""
|
"""Group Viewset"""
|
||||||
|
|
||||||
# pylint: disable=no-member
|
|
||||||
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
queryset = Group.objects.all().select_related("parent").prefetch_related("users")
|
||||||
serializer_class = GroupSerializer
|
serializer_class = GroupSerializer
|
||||||
search_fields = ["name", "is_superuser"]
|
search_fields = ["name", "is_superuser"]
|
||||||
filterset_class = GroupFilter
|
filterset_class = GroupFilter
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_group"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
|
||||||
@permission_required(None, ["authentik_core.add_user"])
|
@permission_required(None, ["authentik_core.add_user"])
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
request=UserAccountSerializer,
|
request=UserAccountSerializer,
|
||||||
|
|
|
@ -38,7 +38,7 @@ class SourceSerializer(ModelSerializer, MetaNameSerializer):
|
||||||
|
|
||||||
managed = ReadOnlyField()
|
managed = ReadOnlyField()
|
||||||
component = SerializerMethodField()
|
component = SerializerMethodField()
|
||||||
icon = ReadOnlyField(source="icon_url")
|
icon = ReadOnlyField(source="get_icon")
|
||||||
|
|
||||||
def get_component(self, obj: Source) -> str:
|
def get_component(self, obj: Source) -> str:
|
||||||
"""Get object component so that we know how to edit the object"""
|
"""Get object component so that we know how to edit the object"""
|
||||||
|
|
|
@ -47,7 +47,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
|
||||||
attrs.setdefault("user", request.user)
|
attrs.setdefault("user", request.user)
|
||||||
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
attrs.setdefault("intent", TokenIntents.INTENT_API)
|
||||||
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
if attrs.get("intent") not in [TokenIntents.INTENT_API, TokenIntents.INTENT_APP_PASSWORD]:
|
||||||
raise ValidationError({"intent": f"Invalid intent {attrs.get('intent')}"})
|
raise ValidationError(f"Invalid intent {attrs.get('intent')}")
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
|
@ -1,140 +0,0 @@
|
||||||
"""transactional application and provider creation"""
|
|
||||||
from django.apps import apps
|
|
||||||
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
|
|
||||||
from rest_framework.exceptions import ValidationError
|
|
||||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
|
|
||||||
from rest_framework.permissions import IsAdminUser
|
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.views import APIView
|
|
||||||
from yaml import ScalarNode
|
|
||||||
|
|
||||||
from authentik.blueprints.v1.common import (
|
|
||||||
Blueprint,
|
|
||||||
BlueprintEntry,
|
|
||||||
BlueprintEntryDesiredState,
|
|
||||||
EntryInvalidError,
|
|
||||||
KeyOf,
|
|
||||||
)
|
|
||||||
from authentik.blueprints.v1.importer import Importer
|
|
||||||
from authentik.core.api.applications import ApplicationSerializer
|
|
||||||
from authentik.core.api.utils import PassiveSerializer
|
|
||||||
from authentik.core.models import Provider
|
|
||||||
from authentik.lib.utils.reflection import all_subclasses
|
|
||||||
|
|
||||||
|
|
||||||
def get_provider_serializer_mapping():
|
|
||||||
"""Get a mapping of all providers' model names and their serializers"""
|
|
||||||
mapping = {}
|
|
||||||
for model in all_subclasses(Provider):
|
|
||||||
if model._meta.abstract:
|
|
||||||
continue
|
|
||||||
mapping[f"{model._meta.app_label}.{model._meta.model_name}"] = model().serializer
|
|
||||||
return mapping
|
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_field(
|
|
||||||
PolymorphicProxySerializer(
|
|
||||||
component_name="model",
|
|
||||||
serializers=get_provider_serializer_mapping,
|
|
||||||
resource_type_field_name="provider_model",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
class TransactionProviderField(DictField):
|
|
||||||
"""Dictionary field which can hold provider creation data"""
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionApplicationSerializer(PassiveSerializer):
|
|
||||||
"""Serializer for creating a provider and an application in one transaction"""
|
|
||||||
|
|
||||||
app = ApplicationSerializer()
|
|
||||||
provider_model = ChoiceField(choices=list(get_provider_serializer_mapping().keys()))
|
|
||||||
provider = TransactionProviderField()
|
|
||||||
|
|
||||||
_provider_model: type[Provider] = None
|
|
||||||
|
|
||||||
def validate_provider_model(self, fq_model_name: str) -> str:
|
|
||||||
"""Validate that the model exists and is a provider"""
|
|
||||||
if "." not in fq_model_name:
|
|
||||||
raise ValidationError("Invalid provider model")
|
|
||||||
try:
|
|
||||||
app, _, model_name = fq_model_name.partition(".")
|
|
||||||
model = apps.get_model(app, model_name)
|
|
||||||
if not issubclass(model, Provider):
|
|
||||||
raise ValidationError("Invalid provider model")
|
|
||||||
self._provider_model = model
|
|
||||||
except LookupError:
|
|
||||||
raise ValidationError("Invalid provider model")
|
|
||||||
return fq_model_name
|
|
||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
|
||||||
blueprint = Blueprint()
|
|
||||||
blueprint.entries.append(
|
|
||||||
BlueprintEntry(
|
|
||||||
model=attrs["provider_model"],
|
|
||||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
|
||||||
identifiers={
|
|
||||||
"name": attrs["provider"]["name"],
|
|
||||||
},
|
|
||||||
# Must match the name of the field on `self`
|
|
||||||
id="provider",
|
|
||||||
attrs=attrs["provider"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
app_data = attrs["app"]
|
|
||||||
app_data["provider"] = KeyOf(None, ScalarNode(tag="", value="provider"))
|
|
||||||
blueprint.entries.append(
|
|
||||||
BlueprintEntry(
|
|
||||||
model="authentik_core.application",
|
|
||||||
state=BlueprintEntryDesiredState.MUST_CREATED,
|
|
||||||
identifiers={
|
|
||||||
"slug": attrs["app"]["slug"],
|
|
||||||
},
|
|
||||||
attrs=app_data,
|
|
||||||
# Must match the name of the field on `self`
|
|
||||||
id="app",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
importer = Importer(blueprint, {})
|
|
||||||
try:
|
|
||||||
valid, _ = importer.validate(raise_validation_errors=True)
|
|
||||||
if not valid:
|
|
||||||
raise ValidationError("Invalid blueprint")
|
|
||||||
except EntryInvalidError as exc:
|
|
||||||
raise ValidationError(
|
|
||||||
{
|
|
||||||
exc.entry_id: exc.validation_error.detail,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return blueprint
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionApplicationResponseSerializer(PassiveSerializer):
|
|
||||||
"""Transactional creation response"""
|
|
||||||
|
|
||||||
applied = BooleanField()
|
|
||||||
logs = ListField(child=CharField())
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionalApplicationView(APIView):
|
|
||||||
"""Create provider and application and attach them in a single transaction"""
|
|
||||||
|
|
||||||
# TODO: Migrate to a more specific permission
|
|
||||||
permission_classes = [IsAdminUser]
|
|
||||||
|
|
||||||
@extend_schema(
|
|
||||||
request=TransactionApplicationSerializer(),
|
|
||||||
responses={
|
|
||||||
200: TransactionApplicationResponseSerializer(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
def put(self, request: Request) -> Response:
|
|
||||||
"""Convert data into a blueprint, validate it and apply it"""
|
|
||||||
data = TransactionApplicationSerializer(data=request.data)
|
|
||||||
data.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
importer = Importer(data.validated_data, {})
|
|
||||||
applied = importer.apply()
|
|
||||||
response = {"applied": False, "logs": []}
|
|
||||||
response["applied"] = applied
|
|
||||||
return Response(response, status=200)
|
|
|
@ -73,11 +73,6 @@ class UsedByMixin:
|
||||||
# but so we only apply them once, have a simple flag for the first object
|
# but so we only apply them once, have a simple flag for the first object
|
||||||
first_object = True
|
first_object = True
|
||||||
|
|
||||||
# TODO: This will only return the used-by references that the user can see
|
|
||||||
# Either we have to leak model information here to not make the list
|
|
||||||
# useless if the user doesn't have all permissions, or we need to double
|
|
||||||
# query and check if there is a difference between modes the user can see
|
|
||||||
# and can't see and add a warning
|
|
||||||
for obj in get_objects_for_user(
|
for obj in get_objects_for_user(
|
||||||
request.user, f"{app}.view_{model_name}", manager
|
request.user, f"{app}.view_{model_name}", manager
|
||||||
).all():
|
).all():
|
||||||
|
|
|
@ -7,6 +7,7 @@ from django.contrib.auth import update_session_auth_hash
|
||||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.db.models.functions import ExtractHour
|
from django.db.models.functions import ExtractHour
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from django.db.utils import IntegrityError
|
from django.db.utils import IntegrityError
|
||||||
from django.urls import reverse_lazy
|
from django.urls import reverse_lazy
|
||||||
|
@ -14,13 +15,7 @@ from django.utils.http import urlencode
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
from django_filters.filters import (
|
from django_filters.filters import BooleanFilter, CharFilter, ModelMultipleChoiceFilter, UUIDFilter
|
||||||
BooleanFilter,
|
|
||||||
CharFilter,
|
|
||||||
ModelMultipleChoiceFilter,
|
|
||||||
MultipleChoiceFilter,
|
|
||||||
UUIDFilter,
|
|
||||||
)
|
|
||||||
from django_filters.filterset import FilterSet
|
from django_filters.filterset import FilterSet
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import (
|
from drf_spectacular.utils import (
|
||||||
|
@ -51,6 +46,7 @@ from rest_framework.serializers import (
|
||||||
)
|
)
|
||||||
from rest_framework.validators import UniqueValidator
|
from rest_framework.validators import UniqueValidator
|
||||||
from rest_framework.viewsets import ModelViewSet
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework_guardian.filters import ObjectPermissionsFilter
|
||||||
from structlog.stdlib import get_logger
|
from structlog.stdlib import get_logger
|
||||||
|
|
||||||
from authentik.admin.api.metrics import CoordinateSerializer
|
from authentik.admin.api.metrics import CoordinateSerializer
|
||||||
|
@ -121,35 +117,27 @@ class UserSerializer(ModelSerializer):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||||
self.fields["password"] = CharField(required=False, allow_null=True)
|
self.fields["password"] = CharField(required=False)
|
||||||
|
|
||||||
def create(self, validated_data: dict) -> User:
|
def create(self, validated_data: dict) -> User:
|
||||||
"""If this serializer is used in the blueprint context, we allow for
|
"""If this serializer is used in the blueprint context, we allow for
|
||||||
directly setting a password. However should be done via the `set_password`
|
directly setting a password. However should be done via the `set_password`
|
||||||
method instead of directly setting it like rest_framework."""
|
method instead of directly setting it like rest_framework."""
|
||||||
password = validated_data.pop("password", None)
|
|
||||||
instance: User = super().create(validated_data)
|
instance: User = super().create(validated_data)
|
||||||
self._set_password(instance, password)
|
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
|
||||||
|
instance.set_password(validated_data["password"])
|
||||||
|
instance.save()
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def update(self, instance: User, validated_data: dict) -> User:
|
def update(self, instance: User, validated_data: dict) -> User:
|
||||||
"""Same as `create` above, set the password directly if we're in a blueprint
|
"""Same as `create` above, set the password directly if we're in a blueprint
|
||||||
context"""
|
context"""
|
||||||
password = validated_data.pop("password", None)
|
|
||||||
instance = super().update(instance, validated_data)
|
instance = super().update(instance, validated_data)
|
||||||
self._set_password(instance, password)
|
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and "password" in validated_data:
|
||||||
|
instance.set_password(validated_data["password"])
|
||||||
|
instance.save()
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
def _set_password(self, instance: User, password: Optional[str]):
|
|
||||||
"""Set password of user if we're in a blueprint context, and if it's an empty
|
|
||||||
string then use an unusable password"""
|
|
||||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context and password:
|
|
||||||
instance.set_password(password)
|
|
||||||
instance.save()
|
|
||||||
if len(instance.password) == 0:
|
|
||||||
instance.set_unusable_password()
|
|
||||||
instance.save()
|
|
||||||
|
|
||||||
def validate_path(self, path: str) -> str:
|
def validate_path(self, path: str) -> str:
|
||||||
"""Validate path"""
|
"""Validate path"""
|
||||||
if path[:1] == "/" or path[-1] == "/":
|
if path[:1] == "/" or path[-1] == "/":
|
||||||
|
@ -171,11 +159,6 @@ class UserSerializer(ModelSerializer):
|
||||||
raise ValidationError("Setting a user to internal service account is not allowed.")
|
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||||
return user_type
|
return user_type
|
||||||
|
|
||||||
def validate(self, attrs: dict) -> dict:
|
|
||||||
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
|
||||||
raise ValidationError("Can't modify internal service account users")
|
|
||||||
return super().validate(attrs)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -193,7 +176,6 @@ class UserSerializer(ModelSerializer):
|
||||||
"uid",
|
"uid",
|
||||||
"path",
|
"path",
|
||||||
"type",
|
"type",
|
||||||
"uuid",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"allow_blank": True},
|
"name": {"allow_blank": True},
|
||||||
|
@ -208,7 +190,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
groups = SerializerMethodField()
|
groups = SerializerMethodField()
|
||||||
uid = CharField(read_only=True)
|
uid = CharField(read_only=True)
|
||||||
settings = SerializerMethodField()
|
settings = SerializerMethodField()
|
||||||
system_permissions = SerializerMethodField()
|
|
||||||
|
|
||||||
@extend_schema_field(
|
@extend_schema_field(
|
||||||
ListSerializer(
|
ListSerializer(
|
||||||
|
@ -220,7 +201,7 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
)
|
)
|
||||||
def get_groups(self, _: User):
|
def get_groups(self, _: User):
|
||||||
"""Return only the group names a user is member of"""
|
"""Return only the group names a user is member of"""
|
||||||
for group in self.instance.all_groups().order_by("name"):
|
for group in self.instance.ak_groups.all():
|
||||||
yield {
|
yield {
|
||||||
"name": group.name,
|
"name": group.name,
|
||||||
"pk": group.pk,
|
"pk": group.pk,
|
||||||
|
@ -230,14 +211,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
"""Get user settings with tenant and group settings applied"""
|
"""Get user settings with tenant and group settings applied"""
|
||||||
return user.group_attributes(self._context["request"]).get("settings", {})
|
return user.group_attributes(self._context["request"]).get("settings", {})
|
||||||
|
|
||||||
def get_system_permissions(self, user: User) -> list[str]:
|
|
||||||
"""Get all system permissions assigned to the user"""
|
|
||||||
return list(
|
|
||||||
user.user_permissions.filter(
|
|
||||||
content_type__app_label="authentik_rbac", content_type__model="systempermission"
|
|
||||||
).values_list("codename", flat=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = User
|
model = User
|
||||||
fields = [
|
fields = [
|
||||||
|
@ -252,7 +225,6 @@ class UserSelfSerializer(ModelSerializer):
|
||||||
"uid",
|
"uid",
|
||||||
"settings",
|
"settings",
|
||||||
"type",
|
"type",
|
||||||
"system_permissions",
|
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"is_active": {"read_only": True},
|
"is_active": {"read_only": True},
|
||||||
|
@ -328,11 +300,11 @@ class UsersFilter(FilterSet):
|
||||||
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
is_superuser = BooleanFilter(field_name="ak_groups", lookup_expr="is_superuser")
|
||||||
uuid = UUIDFilter(field_name="uuid")
|
uuid = UUIDFilter(field_name="uuid")
|
||||||
|
|
||||||
path = CharFilter(field_name="path")
|
path = CharFilter(
|
||||||
|
field_name="path",
|
||||||
|
)
|
||||||
path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
|
path_startswith = CharFilter(field_name="path", lookup_expr="startswith")
|
||||||
|
|
||||||
type = MultipleChoiceFilter(choices=UserTypes.choices, field_name="type")
|
|
||||||
|
|
||||||
groups_by_name = ModelMultipleChoiceFilter(
|
groups_by_name = ModelMultipleChoiceFilter(
|
||||||
field_name="ak_groups__name",
|
field_name="ak_groups__name",
|
||||||
to_field_name="name",
|
to_field_name="name",
|
||||||
|
@ -630,10 +602,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||||
if not request.user.has_perm("impersonate"):
|
if not request.user.has_perm("impersonate"):
|
||||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||||
return Response(status=401)
|
return Response(status=401)
|
||||||
|
|
||||||
user_to_be = self.get_object()
|
user_to_be = self.get_object()
|
||||||
if user_to_be.pk == self.request.user.pk:
|
|
||||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
|
||||||
return Response(status=401)
|
|
||||||
|
|
||||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||||
|
@ -667,6 +637,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||||
|
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
|
def _filter_queryset_for_list(self, queryset: QuerySet) -> QuerySet:
|
||||||
|
"""Custom filter_queryset method which ignores guardian, but still supports sorting"""
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
if backend == ObjectPermissionsFilter:
|
||||||
|
continue
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
if self.request.user.has_perm("authentik_core.view_user"):
|
||||||
|
return self._filter_queryset_for_list(queryset)
|
||||||
|
return super().filter_queryset(queryset)
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
responses={
|
responses={
|
||||||
200: inline_serializer(
|
200: inline_serializer(
|
||||||
|
|
|
@ -44,7 +44,6 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||||
if request:
|
if request:
|
||||||
req.http_request = request
|
req.http_request = request
|
||||||
self._context["request"] = req
|
self._context["request"] = req
|
||||||
req.context.update(**kwargs)
|
|
||||||
self._context.update(**kwargs)
|
self._context.update(**kwargs)
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
|
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
"""Build source docs"""
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from pdoc import pdoc
|
|
||||||
from pdoc.render import configure
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""Build source docs"""
|
|
||||||
|
|
||||||
def handle(self, **options):
|
|
||||||
configure(
|
|
||||||
docformat="markdown",
|
|
||||||
mermaid=True,
|
|
||||||
logo="https://goauthentik.io/img/icon_top_brand_colour.svg",
|
|
||||||
)
|
|
||||||
pdoc(
|
|
||||||
"authentik",
|
|
||||||
output_directory=Path("./source_docs"),
|
|
||||||
)
|
|
|
@ -1,9 +0,0 @@
|
||||||
"""custom runserver command"""
|
|
||||||
from daphne.management.commands.runserver import Command as RunServer
|
|
||||||
|
|
||||||
|
|
||||||
class Command(RunServer):
|
|
||||||
"""custom runserver command, which doesn't show the misleading django startup message"""
|
|
||||||
|
|
||||||
def on_bind(self, server_port):
|
|
||||||
pass
|
|
|
@ -16,16 +16,7 @@ LOGGER = get_logger()
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""Run worker"""
|
"""Run worker"""
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"-b",
|
|
||||||
"--beat",
|
|
||||||
action="store_false",
|
|
||||||
help="When set, this worker will _not_ run Beat (scheduled) tasks",
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, **options):
|
def handle(self, **options):
|
||||||
LOGGER.debug("Celery options", **options)
|
|
||||||
close_old_connections()
|
close_old_connections()
|
||||||
if CONFIG.get_bool("remote_debug"):
|
if CONFIG.get_bool("remote_debug"):
|
||||||
import debugpy
|
import debugpy
|
||||||
|
@ -35,9 +26,10 @@ class Command(BaseCommand):
|
||||||
no_color=False,
|
no_color=False,
|
||||||
quiet=True,
|
quiet=True,
|
||||||
optimization="fair",
|
optimization="fair",
|
||||||
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
|
max_tasks_per_child=1,
|
||||||
|
autoscale=(3, 1),
|
||||||
task_events=True,
|
task_events=True,
|
||||||
beat=options.get("beat", True),
|
beat=True,
|
||||||
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
||||||
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,11 +1,55 @@
|
||||||
# Generated by Django 3.2.8 on 2021-10-10 16:16
|
# Generated by Django 3.2.8 on 2021-10-10 16:16
|
||||||
|
|
||||||
|
from os import environ
|
||||||
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
from django.apps.registry import Apps
|
||||||
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||||
|
|
||||||
import authentik.core.models
|
import authentik.core.models
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_user(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
|
||||||
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
|
akadmin, _ = User.objects.using(db_alias).get_or_create(
|
||||||
|
username="akadmin",
|
||||||
|
email=environ.get("AUTHENTIK_BOOTSTRAP_EMAIL", "root@localhost"),
|
||||||
|
name="authentik Default Admin",
|
||||||
|
)
|
||||||
|
password = None
|
||||||
|
if "TF_BUILD" in environ or settings.TEST:
|
||||||
|
password = "akadmin" # noqa # nosec
|
||||||
|
if "AUTHENTIK_BOOTSTRAP_PASSWORD" in environ:
|
||||||
|
password = environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]
|
||||||
|
if password:
|
||||||
|
akadmin.password = make_password(password)
|
||||||
|
else:
|
||||||
|
akadmin.password = make_password(None)
|
||||||
|
akadmin.save()
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_admin_group(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
Group = apps.get_model("authentik_core", "Group")
|
||||||
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
|
||||||
|
# Creates a default admin group
|
||||||
|
group, _ = Group.objects.using(db_alias).get_or_create(
|
||||||
|
is_superuser=True,
|
||||||
|
defaults={
|
||||||
|
"name": "authentik Admins",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
group.users.set(User.objects.filter(username="akadmin"))
|
||||||
|
group.save()
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_core", "0002_auto_20200523_1133"),
|
("authentik_core", "0002_auto_20200523_1133"),
|
||||||
|
@ -75,6 +119,9 @@ class Migration(migrations.Migration):
|
||||||
model_name="user",
|
model_name="user",
|
||||||
name="is_staff",
|
name="is_staff",
|
||||||
),
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
code=create_default_user,
|
||||||
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="user",
|
model_name="user",
|
||||||
name="is_superuser",
|
name="is_superuser",
|
||||||
|
@ -154,6 +201,9 @@ class Migration(migrations.Migration):
|
||||||
default=False, help_text="Users added to this group will be superusers."
|
default=False, help_text="Users added to this group will be superusers."
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
code=create_default_admin_group,
|
||||||
|
),
|
||||||
migrations.AlterModelManagers(
|
migrations.AlterModelManagers(
|
||||||
name="user",
|
name="user",
|
||||||
managers=[
|
managers=[
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# Generated by Django 3.2.8 on 2021-10-10 16:12
|
# Generated by Django 3.2.8 on 2021-10-10 16:12
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
from os import environ
|
||||||
|
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
from django.apps.registry import Apps
|
from django.apps.registry import Apps
|
||||||
|
@ -34,6 +35,29 @@ def fix_duplicates(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
Token.objects.using(db_alias).filter(identifier=ident["identifier"]).delete()
|
Token.objects.using(db_alias).filter(identifier=ident["identifier"]).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_user_token(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||||
|
from authentik.core.models import TokenIntents
|
||||||
|
|
||||||
|
User = apps.get_model("authentik_core", "User")
|
||||||
|
Token = apps.get_model("authentik_core", "Token")
|
||||||
|
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
|
akadmin = User.objects.using(db_alias).filter(username="akadmin")
|
||||||
|
if not akadmin.exists():
|
||||||
|
return
|
||||||
|
if "AUTHENTIK_BOOTSTRAP_TOKEN" not in environ:
|
||||||
|
return
|
||||||
|
key = environ["AUTHENTIK_BOOTSTRAP_TOKEN"]
|
||||||
|
Token.objects.using(db_alias).create(
|
||||||
|
identifier="authentik-bootstrap-token",
|
||||||
|
user=akadmin.first(),
|
||||||
|
intent=TokenIntents.INTENT_API,
|
||||||
|
expiring=False,
|
||||||
|
key=key,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
replaces = [
|
replaces = [
|
||||||
("authentik_core", "0018_auto_20210330_1345"),
|
("authentik_core", "0018_auto_20210330_1345"),
|
||||||
|
@ -190,6 +214,9 @@ class Migration(migrations.Migration):
|
||||||
"verbose_name_plural": "Authenticated Sessions",
|
"verbose_name_plural": "Authenticated Sessions",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
code=create_default_user_token,
|
||||||
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="token",
|
model_name="token",
|
||||||
name="intent",
|
name="intent",
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
# Generated by Django 4.2.6 on 2023-10-11 13:37
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_core", "0031_alter_user_type"),
|
|
||||||
("authentik_rbac", "0001_initial"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="group",
|
|
||||||
options={"verbose_name": "Group", "verbose_name_plural": "Groups"},
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="token",
|
|
||||||
options={
|
|
||||||
"permissions": [("view_token_key", "View token's key")],
|
|
||||||
"verbose_name": "Token",
|
|
||||||
"verbose_name_plural": "Tokens",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="user",
|
|
||||||
options={
|
|
||||||
"permissions": [
|
|
||||||
("reset_user_password", "Reset Password"),
|
|
||||||
("impersonate", "Can impersonate other users"),
|
|
||||||
("assign_user_permissions", "Can assign permissions to users"),
|
|
||||||
("unassign_user_permissions", "Can unassign permissions from users"),
|
|
||||||
],
|
|
||||||
"verbose_name": "User",
|
|
||||||
"verbose_name_plural": "Users",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name="group",
|
|
||||||
name="roles",
|
|
||||||
field=models.ManyToManyField(
|
|
||||||
blank=True, related_name="ak_groups", to="authentik_rbac.role"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""authentik core models"""
|
"""authentik core models"""
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Any, Optional, Self
|
from typing import Any, Optional
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from deepmerge import always_merger
|
from deepmerge import always_merger
|
||||||
|
@ -60,7 +60,7 @@ def default_token_key():
|
||||||
"""Default token key"""
|
"""Default token key"""
|
||||||
# We use generate_id since the chars in the key should be easy
|
# We use generate_id since the chars in the key should be easy
|
||||||
# to use in Emails (for verification) and URLs (for recovery)
|
# to use in Emails (for verification) and URLs (for recovery)
|
||||||
return generate_id(CONFIG.get_int("default_token_length"))
|
return generate_id(int(CONFIG.get("default_token_length")))
|
||||||
|
|
||||||
|
|
||||||
class UserTypes(models.TextChoices):
|
class UserTypes(models.TextChoices):
|
||||||
|
@ -79,7 +79,7 @@ class UserTypes(models.TextChoices):
|
||||||
|
|
||||||
|
|
||||||
class Group(SerializerModel):
|
class Group(SerializerModel):
|
||||||
"""Group model which supports a basic hierarchy and has attributes"""
|
"""Custom Group model which supports a basic hierarchy"""
|
||||||
|
|
||||||
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
group_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
|
|
||||||
|
@ -88,8 +88,6 @@ class Group(SerializerModel):
|
||||||
default=False, help_text=_("Users added to this group will be superusers.")
|
default=False, help_text=_("Users added to this group will be superusers.")
|
||||||
)
|
)
|
||||||
|
|
||||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="ak_groups", blank=True)
|
|
||||||
|
|
||||||
parent = models.ForeignKey(
|
parent = models.ForeignKey(
|
||||||
"Group",
|
"Group",
|
||||||
blank=True,
|
blank=True,
|
||||||
|
@ -115,39 +113,27 @@ class Group(SerializerModel):
|
||||||
|
|
||||||
def is_member(self, user: "User") -> bool:
|
def is_member(self, user: "User") -> bool:
|
||||||
"""Recursively check if `user` is member of us, or any parent."""
|
"""Recursively check if `user` is member of us, or any parent."""
|
||||||
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
|
||||||
|
|
||||||
def children_recursive(self: Self | QuerySet["Group"]) -> QuerySet["Group"]:
|
|
||||||
"""Recursively get all groups that have this as parent or are indirectly related"""
|
|
||||||
direct_groups = []
|
|
||||||
if isinstance(self, QuerySet):
|
|
||||||
direct_groups = list(x for x in self.all().values_list("pk", flat=True).iterator())
|
|
||||||
else:
|
|
||||||
direct_groups = [self.pk]
|
|
||||||
if len(direct_groups) < 1:
|
|
||||||
return Group.objects.none()
|
|
||||||
query = """
|
query = """
|
||||||
WITH RECURSIVE parents AS (
|
WITH RECURSIVE parents AS (
|
||||||
SELECT authentik_core_group.*, 0 AS relative_depth
|
SELECT authentik_core_group.*, 0 AS relative_depth
|
||||||
FROM authentik_core_group
|
FROM authentik_core_group
|
||||||
WHERE authentik_core_group.group_uuid = ANY(%s)
|
WHERE authentik_core_group.group_uuid = %s
|
||||||
|
|
||||||
UNION ALL
|
UNION ALL
|
||||||
|
|
||||||
SELECT authentik_core_group.*, parents.relative_depth + 1
|
SELECT authentik_core_group.*, parents.relative_depth - 1
|
||||||
FROM authentik_core_group, parents
|
FROM authentik_core_group,parents
|
||||||
WHERE (
|
WHERE (
|
||||||
authentik_core_group.group_uuid = parents.parent_id and
|
authentik_core_group.parent_id = parents.group_uuid and
|
||||||
parents.relative_depth < 20
|
parents.relative_depth > -20
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
SELECT group_uuid
|
SELECT group_uuid
|
||||||
FROM parents
|
FROM parents
|
||||||
GROUP BY group_uuid, name
|
GROUP BY group_uuid;
|
||||||
ORDER BY name;
|
|
||||||
"""
|
"""
|
||||||
group_pks = [group.pk for group in Group.objects.raw(query, [direct_groups]).iterator()]
|
groups = Group.objects.raw(query, [self.group_uuid])
|
||||||
return Group.objects.filter(pk__in=group_pks)
|
return user.ak_groups.filter(pk__in=[group.pk for group in groups]).exists()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Group {self.name}"
|
return f"Group {self.name}"
|
||||||
|
@ -159,20 +145,18 @@ class Group(SerializerModel):
|
||||||
"parent",
|
"parent",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
verbose_name = _("Group")
|
|
||||||
verbose_name_plural = _("Groups")
|
|
||||||
|
|
||||||
|
|
||||||
class UserManager(DjangoUserManager):
|
class UserManager(DjangoUserManager):
|
||||||
"""User manager that doesn't assign is_superuser and is_staff"""
|
"""Custom user manager that doesn't assign is_superuser and is_staff"""
|
||||||
|
|
||||||
def create_user(self, username, email=None, password=None, **extra_fields):
|
def create_user(self, username, email=None, password=None, **extra_fields):
|
||||||
"""User manager that doesn't assign is_superuser and is_staff"""
|
"""Custom user manager that doesn't assign is_superuser and is_staff"""
|
||||||
return self._create_user(username, email, password, **extra_fields)
|
return self._create_user(username, email, password, **extra_fields)
|
||||||
|
|
||||||
|
|
||||||
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
"""authentik User model, based on django's contrib auth user model."""
|
"""Custom User model to allow easier adding of user-based settings"""
|
||||||
|
|
||||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
||||||
name = models.TextField(help_text=_("User's display name."))
|
name = models.TextField(help_text=_("User's display name."))
|
||||||
|
@ -192,19 +176,13 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
"""Get the default user path"""
|
"""Get the default user path"""
|
||||||
return User._meta.get_field("path").default
|
return User._meta.get_field("path").default
|
||||||
|
|
||||||
def all_groups(self) -> QuerySet[Group]:
|
|
||||||
"""Recursively get all groups this user is a member of.
|
|
||||||
At least one query is done to get the direct groups of the user, with groups
|
|
||||||
there are at most 3 queries done"""
|
|
||||||
return Group.children_recursive(self.ak_groups.all())
|
|
||||||
|
|
||||||
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
|
def group_attributes(self, request: Optional[HttpRequest] = None) -> dict[str, Any]:
|
||||||
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
"""Get a dictionary containing the attributes from all groups the user belongs to,
|
||||||
including the users attributes"""
|
including the users attributes"""
|
||||||
final_attributes = {}
|
final_attributes = {}
|
||||||
if request and hasattr(request, "tenant"):
|
if request and hasattr(request, "tenant"):
|
||||||
always_merger.merge(final_attributes, request.tenant.attributes)
|
always_merger.merge(final_attributes, request.tenant.attributes)
|
||||||
for group in self.all_groups().order_by("name"):
|
for group in self.ak_groups.all().order_by("name"):
|
||||||
always_merger.merge(final_attributes, group.attributes)
|
always_merger.merge(final_attributes, group.attributes)
|
||||||
always_merger.merge(final_attributes, self.attributes)
|
always_merger.merge(final_attributes, self.attributes)
|
||||||
return final_attributes
|
return final_attributes
|
||||||
|
@ -218,7 +196,7 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
@cached_property
|
@cached_property
|
||||||
def is_superuser(self) -> bool:
|
def is_superuser(self) -> bool:
|
||||||
"""Get supseruser status based on membership in a group with superuser status"""
|
"""Get supseruser status based on membership in a group with superuser status"""
|
||||||
return self.all_groups().filter(is_superuser=True).exists()
|
return self.ak_groups.filter(is_superuser=True).exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_staff(self) -> bool:
|
def is_staff(self) -> bool:
|
||||||
|
@ -271,14 +249,12 @@ class User(SerializerModel, GuardianUserMixin, AbstractUser):
|
||||||
return get_avatar(self)
|
return get_avatar(self)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
permissions = (
|
||||||
|
("reset_user_password", "Reset Password"),
|
||||||
|
("impersonate", "Can impersonate other users"),
|
||||||
|
)
|
||||||
verbose_name = _("User")
|
verbose_name = _("User")
|
||||||
verbose_name_plural = _("Users")
|
verbose_name_plural = _("Users")
|
||||||
permissions = [
|
|
||||||
("reset_user_password", _("Reset Password")),
|
|
||||||
("impersonate", _("Can impersonate other users")),
|
|
||||||
("assign_user_permissions", _("Can assign permissions to users")),
|
|
||||||
("unassign_user_permissions", _("Can unassign permissions from users")),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class Provider(SerializerModel):
|
class Provider(SerializerModel):
|
||||||
|
@ -687,7 +663,7 @@ class Token(SerializerModel, ManagedModel, ExpiringModel):
|
||||||
models.Index(fields=["identifier"]),
|
models.Index(fields=["identifier"]),
|
||||||
models.Index(fields=["key"]),
|
models.Index(fields=["key"]),
|
||||||
]
|
]
|
||||||
permissions = [("view_token_key", _("View token's key"))]
|
permissions = (("view_token_key", "View token's key"),)
|
||||||
|
|
||||||
|
|
||||||
class PropertyMapping(SerializerModel, ManagedModel):
|
class PropertyMapping(SerializerModel, ManagedModel):
|
||||||
|
|
|
@ -7,7 +7,6 @@ from django.db.models import Model
|
||||||
from django.db.models.signals import post_save, pre_delete, pre_save
|
from django.db.models.signals import post_save, pre_delete, pre_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.http.request import HttpRequest
|
from django.http.request import HttpRequest
|
||||||
from structlog.stdlib import get_logger
|
|
||||||
|
|
||||||
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
from authentik.core.models import Application, AuthenticatedSession, BackchannelProvider, User
|
||||||
|
|
||||||
|
@ -16,8 +15,6 @@ password_changed = Signal()
|
||||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||||
login_failed = Signal()
|
login_failed = Signal()
|
||||||
|
|
||||||
LOGGER = get_logger()
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=Application)
|
@receiver(post_save, sender=Application)
|
||||||
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
def post_save_application(sender: type[Model], instance, created: bool, **_):
|
||||||
|
|
|
@ -48,7 +48,7 @@ class Action(Enum):
|
||||||
class MessageStage(StageView):
|
class MessageStage(StageView):
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
|
|
||||||
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Show a pre-configured message after the flow is done"""
|
"""Show a pre-configured message after the flow is done"""
|
||||||
message = getattr(self.executor.current_stage, "message", "")
|
message = getattr(self.executor.current_stage, "message", "")
|
||||||
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
level = getattr(self.executor.current_stage, "level", messages.SUCCESS)
|
||||||
|
@ -59,6 +59,10 @@ class MessageStage(StageView):
|
||||||
)
|
)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|
||||||
|
def post(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
"""Wrapper for post requests"""
|
||||||
|
return self.get(request)
|
||||||
|
|
||||||
|
|
||||||
class SourceFlowManager:
|
class SourceFlowManager:
|
||||||
"""Help sources decide what they should do after authorization. Based on source settings and
|
"""Help sources decide what they should do after authorization. Based on source settings and
|
||||||
|
@ -97,7 +101,6 @@ class SourceFlowManager:
|
||||||
if self.request.user.is_authenticated:
|
if self.request.user.is_authenticated:
|
||||||
new_connection.user = self.request.user
|
new_connection.user = self.request.user
|
||||||
new_connection = self.update_connection(new_connection, **kwargs)
|
new_connection = self.update_connection(new_connection, **kwargs)
|
||||||
# pylint: disable=no-member
|
|
||||||
new_connection.save()
|
new_connection.save()
|
||||||
return Action.LINK, new_connection
|
return Action.LINK, new_connection
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ class PostUserEnrollmentStage(StageView):
|
||||||
"""Dynamically injected stage which saves the Connection after
|
"""Dynamically injected stage which saves the Connection after
|
||||||
the user has been enrolled."""
|
the user has been enrolled."""
|
||||||
|
|
||||||
def dispatch(self, request: HttpRequest) -> HttpResponse:
|
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||||
"""Stage used after the user has been enrolled"""
|
"""Stage used after the user has been enrolled"""
|
||||||
connection: UserSourceConnection = self.executor.plan.context[
|
connection: UserSourceConnection = self.executor.plan.context[
|
||||||
PLAN_CONTEXT_SOURCES_CONNECTION
|
PLAN_CONTEXT_SOURCES_CONNECTION
|
||||||
|
@ -27,3 +27,7 @@ class PostUserEnrollmentStage(StageView):
|
||||||
source=connection.source,
|
source=connection.source,
|
||||||
).from_http(self.request)
|
).from_http(self.request)
|
||||||
return self.executor.stage_ok()
|
return self.executor.stage_ok()
|
||||||
|
|
||||||
|
def post(self, request: HttpRequest) -> HttpResponse:
|
||||||
|
"""Wrapper for post requests"""
|
||||||
|
return self.get(request)
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||||
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/custom.css' %}" data-inject>
|
||||||
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
<script src="{% static 'dist/poly.js' %}?version={{ version }}" type="module"></script>
|
||||||
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
<script src="{% static 'dist/standalone/loading/index.js' %}?version={{ version }}" type="module"></script>
|
||||||
|
|
|
@ -16,8 +16,8 @@ You've logged out of {{ application }}.
|
||||||
{% block card %}
|
{% block card %}
|
||||||
<form method="POST" class="pf-c-form">
|
<form method="POST" class="pf-c-form">
|
||||||
<p>
|
<p>
|
||||||
{% blocktrans with application=application.name branding_title=tenant.branding_title %}
|
{% blocktrans with application=application.name %}
|
||||||
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your {{ branding_title }} account.
|
You've logged out of {{ application }}. You can go back to the overview to launch another application, or log out of your authentik account.
|
||||||
{% endblocktrans %}
|
{% endblocktrans %}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
{% block head_before %}
|
{% block head_before %}
|
||||||
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
<link rel="prefetch" href="/static/dist/assets/images/flow_background.jpg" />
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
<link rel="stylesheet" type="text/css" href="{% static 'dist/patternfly.min.css' %}">
|
||||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/theme-dark.css' %}" media="(prefers-color-scheme: dark)">
|
|
||||||
{% include "base/header_js.html" %}
|
{% include "base/header_js.html" %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
|
@ -79,6 +78,7 @@
|
||||||
</main>
|
</main>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
<footer class="pf-c-login__footer">
|
<footer class="pf-c-login__footer">
|
||||||
|
<p></p>
|
||||||
<ul class="pf-c-list pf-m-inline">
|
<ul class="pf-c-list pf-m-inline">
|
||||||
{% for link in footer_links %}
|
{% for link in footer_links %}
|
||||||
<li>
|
<li>
|
||||||
|
|
|
@ -13,9 +13,7 @@ class TestGroups(TestCase):
|
||||||
user = User.objects.create(username=generate_id())
|
user = User.objects.create(username=generate_id())
|
||||||
user2 = User.objects.create(username=generate_id())
|
user2 = User.objects.create(username=generate_id())
|
||||||
group = Group.objects.create(name=generate_id())
|
group = Group.objects.create(name=generate_id())
|
||||||
other_group = Group.objects.create(name=generate_id())
|
|
||||||
group.users.add(user)
|
group.users.add(user)
|
||||||
other_group.users.add(user)
|
|
||||||
self.assertTrue(group.is_member(user))
|
self.assertTrue(group.is_member(user))
|
||||||
self.assertFalse(group.is_member(user2))
|
self.assertFalse(group.is_member(user2))
|
||||||
|
|
||||||
|
@ -23,26 +21,22 @@ class TestGroups(TestCase):
|
||||||
"""Test parent membership"""
|
"""Test parent membership"""
|
||||||
user = User.objects.create(username=generate_id())
|
user = User.objects.create(username=generate_id())
|
||||||
user2 = User.objects.create(username=generate_id())
|
user2 = User.objects.create(username=generate_id())
|
||||||
parent = Group.objects.create(name=generate_id())
|
first = Group.objects.create(name=generate_id())
|
||||||
child = Group.objects.create(name=generate_id(), parent=parent)
|
second = Group.objects.create(name=generate_id(), parent=first)
|
||||||
child.users.add(user)
|
second.users.add(user)
|
||||||
self.assertTrue(child.is_member(user))
|
self.assertTrue(first.is_member(user))
|
||||||
self.assertTrue(parent.is_member(user))
|
self.assertFalse(first.is_member(user2))
|
||||||
self.assertFalse(child.is_member(user2))
|
|
||||||
self.assertFalse(parent.is_member(user2))
|
|
||||||
|
|
||||||
def test_group_membership_parent_extra(self):
|
def test_group_membership_parent_extra(self):
|
||||||
"""Test parent membership"""
|
"""Test parent membership"""
|
||||||
user = User.objects.create(username=generate_id())
|
user = User.objects.create(username=generate_id())
|
||||||
user2 = User.objects.create(username=generate_id())
|
user2 = User.objects.create(username=generate_id())
|
||||||
parent = Group.objects.create(name=generate_id())
|
first = Group.objects.create(name=generate_id())
|
||||||
second = Group.objects.create(name=generate_id(), parent=parent)
|
second = Group.objects.create(name=generate_id(), parent=first)
|
||||||
third = Group.objects.create(name=generate_id(), parent=second)
|
third = Group.objects.create(name=generate_id(), parent=second)
|
||||||
second.users.add(user)
|
second.users.add(user)
|
||||||
self.assertTrue(parent.is_member(user))
|
self.assertTrue(first.is_member(user))
|
||||||
self.assertFalse(parent.is_member(user2))
|
self.assertFalse(first.is_member(user2))
|
||||||
self.assertTrue(second.is_member(user))
|
|
||||||
self.assertFalse(second.is_member(user2))
|
|
||||||
self.assertFalse(third.is_member(user))
|
self.assertFalse(third.is_member(user))
|
||||||
self.assertFalse(third.is_member(user2))
|
self.assertFalse(third.is_member(user2))
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ from rest_framework.test import APITestCase
|
||||||
|
|
||||||
from authentik.core.models import User
|
from authentik.core.models import User
|
||||||
from authentik.core.tests.utils import create_test_admin_user
|
from authentik.core.tests.utils import create_test_admin_user
|
||||||
from authentik.lib.config import CONFIG
|
|
||||||
|
|
||||||
|
|
||||||
class TestImpersonation(APITestCase):
|
class TestImpersonation(APITestCase):
|
||||||
|
@ -47,42 +46,12 @@ class TestImpersonation(APITestCase):
|
||||||
"""test impersonation without permissions"""
|
"""test impersonation without permissions"""
|
||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
|
||||||
response = self.client.post(
|
self.client.get(reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}))
|
||||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 403)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
response = self.client.get(reverse("authentik_api:user-me"))
|
||||||
response_body = loads(response.content.decode())
|
response_body = loads(response.content.decode())
|
||||||
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
self.assertEqual(response_body["user"]["username"], self.other_user.username)
|
||||||
|
|
||||||
@CONFIG.patch("impersonation", False)
|
|
||||||
def test_impersonate_disabled(self):
|
|
||||||
"""test impersonation that is disabled"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
response = self.client.post(
|
|
||||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 401)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
|
||||||
response_body = loads(response.content.decode())
|
|
||||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
|
||||||
|
|
||||||
def test_impersonate_self(self):
|
|
||||||
"""test impersonation that user can't impersonate themselves"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
|
|
||||||
response = self.client.post(
|
|
||||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk})
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 401)
|
|
||||||
|
|
||||||
response = self.client.get(reverse("authentik_api:user-me"))
|
|
||||||
response_body = loads(response.content.decode())
|
|
||||||
self.assertEqual(response_body["user"]["username"], self.user.username)
|
|
||||||
|
|
||||||
def test_un_impersonate_empty(self):
|
def test_un_impersonate_empty(self):
|
||||||
"""test un-impersonation without impersonating first"""
|
"""test un-impersonation without impersonating first"""
|
||||||
self.client.force_login(self.other_user)
|
self.client.force_login(self.other_user)
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
"""Test Transactional API"""
|
|
||||||
from django.urls import reverse
|
|
||||||
from rest_framework.test import APITestCase
|
|
||||||
|
|
||||||
from authentik.core.models import Application
|
|
||||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
|
||||||
from authentik.lib.generators import generate_id
|
|
||||||
from authentik.providers.oauth2.models import OAuth2Provider
|
|
||||||
|
|
||||||
|
|
||||||
class TestTransactionalApplicationsAPI(APITestCase):
|
|
||||||
"""Test Transactional API"""
|
|
||||||
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.user = create_test_admin_user()
|
|
||||||
|
|
||||||
def test_create_transactional(self):
|
|
||||||
"""Test transactional Application + provider creation"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
uid = generate_id()
|
|
||||||
authorization_flow = create_test_flow()
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:core-transactional-application"),
|
|
||||||
data={
|
|
||||||
"app": {
|
|
||||||
"name": uid,
|
|
||||||
"slug": uid,
|
|
||||||
},
|
|
||||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
|
||||||
"provider": {
|
|
||||||
"name": uid,
|
|
||||||
"authorization_flow": str(authorization_flow.pk),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertJSONEqual(response.content.decode(), {"applied": True, "logs": []})
|
|
||||||
provider = OAuth2Provider.objects.filter(name=uid).first()
|
|
||||||
self.assertIsNotNone(provider)
|
|
||||||
app = Application.objects.filter(slug=uid).first()
|
|
||||||
self.assertIsNotNone(app)
|
|
||||||
self.assertEqual(app.provider.pk, provider.pk)
|
|
||||||
|
|
||||||
def test_create_transactional_invalid(self):
|
|
||||||
"""Test transactional Application + provider creation"""
|
|
||||||
self.client.force_login(self.user)
|
|
||||||
uid = generate_id()
|
|
||||||
response = self.client.put(
|
|
||||||
reverse("authentik_api:core-transactional-application"),
|
|
||||||
data={
|
|
||||||
"app": {
|
|
||||||
"name": uid,
|
|
||||||
"slug": uid,
|
|
||||||
},
|
|
||||||
"provider_model": "authentik_providers_oauth2.oauth2provider",
|
|
||||||
"provider": {
|
|
||||||
"name": uid,
|
|
||||||
"authorization_flow": "",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertJSONEqual(
|
|
||||||
response.content.decode(),
|
|
||||||
{"provider": {"authorization_flow": ["This field may not be null."]}},
|
|
||||||
)
|
|
|
@ -28,19 +28,6 @@ class TestUsersAPI(APITestCase):
|
||||||
self.admin = create_test_admin_user()
|
self.admin = create_test_admin_user()
|
||||||
self.user = User.objects.create(username="test-user")
|
self.user = User.objects.create(username="test-user")
|
||||||
|
|
||||||
def test_filter_type(self):
|
|
||||||
"""Test API filtering by type"""
|
|
||||||
self.client.force_login(self.admin)
|
|
||||||
user = create_test_admin_user(type=UserTypes.EXTERNAL)
|
|
||||||
response = self.client.get(
|
|
||||||
reverse("authentik_api:user-list"),
|
|
||||||
data={
|
|
||||||
"type": UserTypes.EXTERNAL,
|
|
||||||
"username": user.username,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
|
|
||||||
def test_metrics(self):
|
def test_metrics(self):
|
||||||
"""Test user's metrics"""
|
"""Test user's metrics"""
|
||||||
self.client.force_login(self.admin)
|
self.client.force_login(self.admin)
|
||||||
|
|
|
@ -21,34 +21,27 @@ def create_test_flow(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_test_user(name: Optional[str] = None, **kwargs) -> User:
|
def create_test_admin_user(name: Optional[str] = None) -> User:
|
||||||
"""Generate a test user"""
|
"""Generate a test-admin user"""
|
||||||
uid = generate_id(20) if not name else name
|
uid = generate_id(20) if not name else name
|
||||||
kwargs.setdefault("email", f"{uid}@goauthentik.io")
|
group = Group.objects.create(name=uid, is_superuser=True)
|
||||||
kwargs.setdefault("username", uid)
|
|
||||||
user: User = User.objects.create(
|
user: User = User.objects.create(
|
||||||
|
username=uid,
|
||||||
name=uid,
|
name=uid,
|
||||||
**kwargs,
|
email=f"{uid}@goauthentik.io",
|
||||||
)
|
)
|
||||||
user.set_password(uid)
|
user.set_password(uid)
|
||||||
user.save()
|
user.save()
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
def create_test_admin_user(name: Optional[str] = None, **kwargs) -> User:
|
|
||||||
"""Generate a test-admin user"""
|
|
||||||
user = create_test_user(name, **kwargs)
|
|
||||||
group = Group.objects.create(name=user.name or name, is_superuser=True)
|
|
||||||
group.users.add(user)
|
group.users.add(user)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def create_test_tenant(**kwargs) -> Tenant:
|
def create_test_tenant() -> Tenant:
|
||||||
"""Generate a test tenant, removing all other tenants to make sure this one
|
"""Generate a test tenant, removing all other tenants to make sure this one
|
||||||
matches."""
|
matches."""
|
||||||
uid = generate_id(20)
|
uid = generate_id(20)
|
||||||
Tenant.objects.all().delete()
|
Tenant.objects.all().delete()
|
||||||
return Tenant.objects.create(domain=uid, default=True, **kwargs)
|
return Tenant.objects.create(domain=uid, default=True)
|
||||||
|
|
||||||
|
|
||||||
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:
|
def create_test_cert(use_ec_private_key=False) -> CertificateKeyPair:
|
||||||
|
|
|
@ -15,7 +15,6 @@ from authentik.core.api.propertymappings import PropertyMappingViewSet
|
||||||
from authentik.core.api.providers import ProviderViewSet
|
from authentik.core.api.providers import ProviderViewSet
|
||||||
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
from authentik.core.api.sources import SourceViewSet, UserSourceConnectionViewSet
|
||||||
from authentik.core.api.tokens import TokenViewSet
|
from authentik.core.api.tokens import TokenViewSet
|
||||||
from authentik.core.api.transactional_applications import TransactionalApplicationView
|
|
||||||
from authentik.core.api.users import UserViewSet
|
from authentik.core.api.users import UserViewSet
|
||||||
from authentik.core.views import apps
|
from authentik.core.views import apps
|
||||||
from authentik.core.views.debug import AccessDeniedView
|
from authentik.core.views.debug import AccessDeniedView
|
||||||
|
@ -71,11 +70,6 @@ urlpatterns = [
|
||||||
api_urlpatterns = [
|
api_urlpatterns = [
|
||||||
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
("core/authenticated_sessions", AuthenticatedSessionViewSet),
|
||||||
("core/applications", ApplicationViewSet),
|
("core/applications", ApplicationViewSet),
|
||||||
path(
|
|
||||||
"core/transactional/applications/",
|
|
||||||
TransactionalApplicationView.as_view(),
|
|
||||||
name="core-transactional-application",
|
|
||||||
),
|
|
||||||
("core/groups", GroupViewSet),
|
("core/groups", GroupViewSet),
|
||||||
("core/users", UserViewSet),
|
("core/users", UserViewSet),
|
||||||
("core/tokens", TokenViewSet),
|
("core/tokens", TokenViewSet),
|
||||||
|
|
|
@ -189,8 +189,6 @@ class CertificateKeyPairFilter(FilterSet):
|
||||||
|
|
||||||
def filter_has_key(self, queryset, name, value): # pragma: no cover
|
def filter_has_key(self, queryset, name, value): # pragma: no cover
|
||||||
"""Only return certificate-key pairs with keys"""
|
"""Only return certificate-key pairs with keys"""
|
||||||
if not value:
|
|
||||||
return queryset
|
|
||||||
return queryset.exclude(key_data__exact="")
|
return queryset.exclude(key_data__exact="")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
"""authentik crypto app config"""
|
"""authentik crypto app config"""
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
from authentik.blueprints.apps import ManagedAppConfig
|
from authentik.blueprints.apps import ManagedAppConfig
|
||||||
from authentik.lib.generators import generate_id
|
from authentik.lib.generators import generate_id
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,37 +23,33 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
||||||
"""Load crypto tasks"""
|
"""Load crypto tasks"""
|
||||||
self.import_module("authentik.crypto.tasks")
|
self.import_module("authentik.crypto.tasks")
|
||||||
|
|
||||||
def _create_update_cert(self):
|
def _create_update_cert(self, cert: Optional["CertificateKeyPair"] = None):
|
||||||
from authentik.crypto.builder import CertificateBuilder
|
from authentik.crypto.builder import CertificateBuilder
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
common_name = "authentik Internal JWT Certificate"
|
builder = CertificateBuilder("authentik Internal JWT Certificate")
|
||||||
builder = CertificateBuilder(common_name)
|
|
||||||
builder.build(
|
builder.build(
|
||||||
subject_alt_names=["goauthentik.io"],
|
subject_alt_names=["goauthentik.io"],
|
||||||
validity_days=360,
|
validity_days=360,
|
||||||
)
|
)
|
||||||
CertificateKeyPair.objects.update_or_create(
|
if not cert:
|
||||||
managed=MANAGED_KEY,
|
cert = CertificateKeyPair()
|
||||||
defaults={
|
builder.cert = cert
|
||||||
"name": common_name,
|
builder.cert.managed = MANAGED_KEY
|
||||||
"certificate_data": builder.certificate,
|
builder.save()
|
||||||
"key_data": builder.private_key,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def reconcile_managed_jwt_cert(self):
|
def reconcile_managed_jwt_cert(self):
|
||||||
"""Ensure managed JWT certificate"""
|
"""Ensure managed JWT certificate"""
|
||||||
from authentik.crypto.models import CertificateKeyPair
|
from authentik.crypto.models import CertificateKeyPair
|
||||||
|
|
||||||
cert: Optional[CertificateKeyPair] = CertificateKeyPair.objects.filter(
|
certs = CertificateKeyPair.objects.filter(managed=MANAGED_KEY)
|
||||||
managed=MANAGED_KEY
|
if not certs.exists():
|
||||||
).first()
|
|
||||||
now = datetime.now()
|
|
||||||
if not cert or (
|
|
||||||
now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after
|
|
||||||
):
|
|
||||||
self._create_update_cert()
|
self._create_update_cert()
|
||||||
|
return
|
||||||
|
cert: CertificateKeyPair = certs.first()
|
||||||
|
now = datetime.now()
|
||||||
|
if now < cert.certificate.not_valid_before or now > cert.certificate.not_valid_after:
|
||||||
|
self._create_update_cert(cert)
|
||||||
|
|
||||||
def reconcile_self_signed(self):
|
def reconcile_self_signed(self):
|
||||||
"""Create self-signed keypair"""
|
"""Create self-signed keypair"""
|
||||||
|
@ -62,10 +61,4 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
||||||
return
|
return
|
||||||
builder = CertificateBuilder(name)
|
builder = CertificateBuilder(name)
|
||||||
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
|
builder.build(subject_alt_names=[f"{generate_id()}.self-signed.goauthentik.io"])
|
||||||
CertificateKeyPair.objects.get_or_create(
|
builder.save()
|
||||||
name=name,
|
|
||||||
defaults={
|
|
||||||
"certificate_data": builder.certificate,
|
|
||||||
"key_data": builder.private_key,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
|
@ -128,26 +128,8 @@ class TestCrypto(APITestCase):
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-list",
|
"authentik_api:certificatekeypair-list",
|
||||||
),
|
|
||||||
data={"name": cert.name},
|
|
||||||
)
|
)
|
||||||
self.assertEqual(200, response.status_code)
|
+ f"?name={cert.name}"
|
||||||
body = loads(response.content.decode())
|
|
||||||
api_cert = [x for x in body["results"] if x["name"] == cert.name][0]
|
|
||||||
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
|
|
||||||
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
|
|
||||||
|
|
||||||
def test_list_has_key_false(self):
|
|
||||||
"""Test API List with has_key set to false"""
|
|
||||||
cert = create_test_cert()
|
|
||||||
cert.key_data = ""
|
|
||||||
cert.save()
|
|
||||||
self.client.force_login(create_test_admin_user())
|
|
||||||
response = self.client.get(
|
|
||||||
reverse(
|
|
||||||
"authentik_api:certificatekeypair-list",
|
|
||||||
),
|
|
||||||
data={"name": cert.name, "has_key": False},
|
|
||||||
)
|
)
|
||||||
self.assertEqual(200, response.status_code)
|
self.assertEqual(200, response.status_code)
|
||||||
body = loads(response.content.decode())
|
body = loads(response.content.decode())
|
||||||
|
@ -162,8 +144,8 @@ class TestCrypto(APITestCase):
|
||||||
response = self.client.get(
|
response = self.client.get(
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-list",
|
"authentik_api:certificatekeypair-list",
|
||||||
),
|
)
|
||||||
data={"name": cert.name, "include_details": False},
|
+ f"?name={cert.name}&include_details=false"
|
||||||
)
|
)
|
||||||
self.assertEqual(200, response.status_code)
|
self.assertEqual(200, response.status_code)
|
||||||
body = loads(response.content.decode())
|
body = loads(response.content.decode())
|
||||||
|
@ -186,8 +168,8 @@ class TestCrypto(APITestCase):
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-view-certificate",
|
"authentik_api:certificatekeypair-view-certificate",
|
||||||
kwargs={"pk": keypair.pk},
|
kwargs={"pk": keypair.pk},
|
||||||
),
|
)
|
||||||
data={"download": True},
|
+ "?download",
|
||||||
)
|
)
|
||||||
self.assertEqual(200, response.status_code)
|
self.assertEqual(200, response.status_code)
|
||||||
self.assertIn("Content-Disposition", response)
|
self.assertIn("Content-Disposition", response)
|
||||||
|
@ -207,8 +189,8 @@ class TestCrypto(APITestCase):
|
||||||
reverse(
|
reverse(
|
||||||
"authentik_api:certificatekeypair-view-private-key",
|
"authentik_api:certificatekeypair-view-private-key",
|
||||||
kwargs={"pk": keypair.pk},
|
kwargs={"pk": keypair.pk},
|
||||||
),
|
)
|
||||||
data={"download": True},
|
+ "?download",
|
||||||
)
|
)
|
||||||
self.assertEqual(200, response.status_code)
|
self.assertEqual(200, response.status_code)
|
||||||
self.assertIn("Content-Disposition", response)
|
self.assertIn("Content-Disposition", response)
|
||||||
|
@ -218,7 +200,7 @@ class TestCrypto(APITestCase):
|
||||||
self.client.force_login(create_test_admin_user())
|
self.client.force_login(create_test_admin_user())
|
||||||
keypair = create_test_cert()
|
keypair = create_test_cert()
|
||||||
provider = OAuth2Provider.objects.create(
|
provider = OAuth2Provider.objects.create(
|
||||||
name=generate_id(),
|
name="test",
|
||||||
client_id="test",
|
client_id="test",
|
||||||
client_secret=generate_key(),
|
client_secret=generate_key(),
|
||||||
authorization_flow=create_test_flow(),
|
authorization_flow=create_test_flow(),
|
||||||
|
|
|
@ -6,7 +6,7 @@ from drf_spectacular.types import OpenApiTypes
|
||||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
from rest_framework.fields import BooleanField, CharField, DateTimeField, IntegerField
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ModelSerializer
|
from rest_framework.serializers import ModelSerializer
|
||||||
|
@ -35,13 +35,13 @@ class LicenseSerializer(ModelSerializer):
|
||||||
"name",
|
"name",
|
||||||
"key",
|
"key",
|
||||||
"expiry",
|
"expiry",
|
||||||
"internal_users",
|
"users",
|
||||||
"external_users",
|
"external_users",
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"name": {"read_only": True},
|
"name": {"read_only": True},
|
||||||
"expiry": {"read_only": True},
|
"expiry": {"read_only": True},
|
||||||
"internal_users": {"read_only": True},
|
"users": {"read_only": True},
|
||||||
"external_users": {"read_only": True},
|
"external_users": {"read_only": True},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ class LicenseSerializer(ModelSerializer):
|
||||||
class LicenseSummary(PassiveSerializer):
|
class LicenseSummary(PassiveSerializer):
|
||||||
"""Serializer for license status"""
|
"""Serializer for license status"""
|
||||||
|
|
||||||
internal_users = IntegerField(required=True)
|
users = IntegerField(required=True)
|
||||||
external_users = IntegerField(required=True)
|
external_users = IntegerField(required=True)
|
||||||
valid = BooleanField()
|
valid = BooleanField()
|
||||||
show_admin_warning = BooleanField()
|
show_admin_warning = BooleanField()
|
||||||
|
@ -62,9 +62,9 @@ class LicenseSummary(PassiveSerializer):
|
||||||
class LicenseForecastSerializer(PassiveSerializer):
|
class LicenseForecastSerializer(PassiveSerializer):
|
||||||
"""Serializer for license forecast"""
|
"""Serializer for license forecast"""
|
||||||
|
|
||||||
internal_users = IntegerField(required=True)
|
users = IntegerField(required=True)
|
||||||
external_users = IntegerField(required=True)
|
external_users = IntegerField(required=True)
|
||||||
forecasted_internal_users = IntegerField(required=True)
|
forecasted_users = IntegerField(required=True)
|
||||||
forecasted_external_users = IntegerField(required=True)
|
forecasted_external_users = IntegerField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
200: inline_serializer("InstallIDSerializer", {"install_id": CharField(required=True)}),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@action(detail=False, methods=["GET"])
|
@action(detail=False, methods=["GET"], permission_classes=[IsAdminUser])
|
||||||
def get_install_id(self, request: Request) -> Response:
|
def get_install_id(self, request: Request) -> Response:
|
||||||
"""Get install_id"""
|
"""Get install_id"""
|
||||||
return Response(
|
return Response(
|
||||||
|
@ -111,7 +111,7 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
latest_valid = datetime.fromtimestamp(total.exp)
|
latest_valid = datetime.fromtimestamp(total.exp)
|
||||||
response = LicenseSummary(
|
response = LicenseSummary(
|
||||||
data={
|
data={
|
||||||
"internal_users": total.internal_users,
|
"users": total.users,
|
||||||
"external_users": total.external_users,
|
"external_users": total.external_users,
|
||||||
"valid": total.is_valid(),
|
"valid": total.is_valid(),
|
||||||
"show_admin_warning": show_admin_warning,
|
"show_admin_warning": show_admin_warning,
|
||||||
|
@ -135,8 +135,8 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
def forecast(self, request: Request) -> Response:
|
def forecast(self, request: Request) -> Response:
|
||||||
"""Forecast how many users will be required in a year"""
|
"""Forecast how many users will be required in a year"""
|
||||||
last_month = now() - timedelta(days=30)
|
last_month = now() - timedelta(days=30)
|
||||||
# Forecast for internal users
|
# Forecast for default users
|
||||||
internal_in_last_month = User.objects.filter(
|
users_in_last_month = User.objects.filter(
|
||||||
type=UserTypes.INTERNAL, date_joined__gte=last_month
|
type=UserTypes.INTERNAL, date_joined__gte=last_month
|
||||||
).count()
|
).count()
|
||||||
# Forecast for external users
|
# Forecast for external users
|
||||||
|
@ -144,9 +144,9 @@ class LicenseViewSet(UsedByMixin, ModelViewSet):
|
||||||
forecast_for_months = 12
|
forecast_for_months = 12
|
||||||
response = LicenseForecastSerializer(
|
response = LicenseForecastSerializer(
|
||||||
data={
|
data={
|
||||||
"internal_users": LicenseKey.get_default_user_count(),
|
"users": LicenseKey.get_default_user_count(),
|
||||||
"external_users": LicenseKey.get_external_user_count(),
|
"external_users": LicenseKey.get_external_user_count(),
|
||||||
"forecasted_internal_users": (internal_in_last_month * forecast_for_months),
|
"forecasted_users": (users_in_last_month * forecast_for_months),
|
||||||
"forecasted_external_users": (external_in_last_month * forecast_for_months),
|
"forecasted_external_users": (external_in_last_month * forecast_for_months),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
# Generated by Django 4.2.4 on 2023-08-23 10:06
|
|
||||||
|
|
||||||
import django.contrib.postgres.indexes
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("authentik_enterprise", "0001_initial"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RenameField(
|
|
||||||
model_name="license",
|
|
||||||
old_name="users",
|
|
||||||
new_name="internal_users",
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="license",
|
|
||||||
name="key",
|
|
||||||
field=models.TextField(),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name="license",
|
|
||||||
index=django.contrib.postgres.indexes.HashIndex(
|
|
||||||
fields=["key"], name="authentik_e_key_523e13_hash"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="licenseusage",
|
|
||||||
options={
|
|
||||||
"verbose_name": "License Usage",
|
|
||||||
"verbose_name_plural": "License Usage Records",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AlterModelOptions(
|
|
||||||
name="license",
|
|
||||||
options={"verbose_name": "License", "verbose_name_plural": "Licenses"},
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -11,18 +11,14 @@ from uuid import uuid4
|
||||||
from cryptography.exceptions import InvalidSignature
|
from cryptography.exceptions import InvalidSignature
|
||||||
from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate
|
from cryptography.x509 import Certificate, load_der_x509_certificate, load_pem_x509_certificate
|
||||||
from dacite import from_dict
|
from dacite import from_dict
|
||||||
from django.contrib.postgres.indexes import HashIndex
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.query import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext as _
|
|
||||||
from guardian.shortcuts import get_anonymous_user
|
from guardian.shortcuts import get_anonymous_user
|
||||||
from jwt import PyJWTError, decode, get_unverified_header
|
from jwt import PyJWTError, decode, get_unverified_header
|
||||||
from rest_framework.exceptions import ValidationError
|
from rest_framework.exceptions import ValidationError
|
||||||
from rest_framework.serializers import BaseSerializer
|
|
||||||
|
|
||||||
from authentik.core.models import ExpiringModel, User, UserTypes
|
from authentik.core.models import ExpiringModel, User, UserTypes
|
||||||
from authentik.lib.models import SerializerModel
|
|
||||||
from authentik.root.install_id import get_install_id
|
from authentik.root.install_id import get_install_id
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,8 +46,8 @@ class LicenseKey:
|
||||||
exp: int
|
exp: int
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
internal_users: int = 0
|
users: int
|
||||||
external_users: int = 0
|
external_users: int
|
||||||
flags: list[LicenseFlags] = field(default_factory=list)
|
flags: list[LicenseFlags] = field(default_factory=list)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -91,7 +87,7 @@ class LicenseKey:
|
||||||
active_licenses = License.objects.filter(expiry__gte=now())
|
active_licenses = License.objects.filter(expiry__gte=now())
|
||||||
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
|
total = LicenseKey(get_license_aud(), 0, "Summarized license", 0, 0)
|
||||||
for lic in active_licenses:
|
for lic in active_licenses:
|
||||||
total.internal_users += lic.internal_users
|
total.users += lic.users
|
||||||
total.external_users += lic.external_users
|
total.external_users += lic.external_users
|
||||||
exp_ts = int(mktime(lic.expiry.timetuple()))
|
exp_ts = int(mktime(lic.expiry.timetuple()))
|
||||||
if total.exp == 0:
|
if total.exp == 0:
|
||||||
|
@ -127,7 +123,7 @@ class LicenseKey:
|
||||||
|
|
||||||
Only checks the current count, no historical data is checked"""
|
Only checks the current count, no historical data is checked"""
|
||||||
default_users = self.get_default_user_count()
|
default_users = self.get_default_user_count()
|
||||||
if default_users > self.internal_users:
|
if default_users > self.users:
|
||||||
return False
|
return False
|
||||||
active_users = self.get_external_user_count()
|
active_users = self.get_external_user_count()
|
||||||
if active_users > self.external_users:
|
if active_users > self.external_users:
|
||||||
|
@ -136,9 +132,6 @@ class LicenseKey:
|
||||||
|
|
||||||
def record_usage(self):
|
def record_usage(self):
|
||||||
"""Capture the current validity status and metrics and save them"""
|
"""Capture the current validity status and metrics and save them"""
|
||||||
threshold = now() - timedelta(hours=8)
|
|
||||||
if LicenseUsage.objects.filter(record_date__gte=threshold).exists():
|
|
||||||
return
|
|
||||||
LicenseUsage.objects.create(
|
LicenseUsage.objects.create(
|
||||||
user_count=self.get_default_user_count(),
|
user_count=self.get_default_user_count(),
|
||||||
external_user_count=self.get_external_user_count(),
|
external_user_count=self.get_external_user_count(),
|
||||||
|
@ -156,33 +149,22 @@ class LicenseKey:
|
||||||
return usage.record_date
|
return usage.record_date
|
||||||
|
|
||||||
|
|
||||||
class License(SerializerModel):
|
class License(models.Model):
|
||||||
"""An authentik enterprise license"""
|
"""An authentik enterprise license"""
|
||||||
|
|
||||||
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
license_uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||||
key = models.TextField()
|
key = models.TextField(unique=True)
|
||||||
|
|
||||||
name = models.TextField()
|
name = models.TextField()
|
||||||
expiry = models.DateTimeField()
|
expiry = models.DateTimeField()
|
||||||
internal_users = models.BigIntegerField()
|
users = models.BigIntegerField()
|
||||||
external_users = models.BigIntegerField()
|
external_users = models.BigIntegerField()
|
||||||
|
|
||||||
@property
|
|
||||||
def serializer(self) -> type[BaseSerializer]:
|
|
||||||
from authentik.enterprise.api import LicenseSerializer
|
|
||||||
|
|
||||||
return LicenseSerializer
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self) -> LicenseKey:
|
def status(self) -> LicenseKey:
|
||||||
"""Get parsed license status"""
|
"""Get parsed license status"""
|
||||||
return LicenseKey.validate(self.key)
|
return LicenseKey.validate(self.key)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
indexes = (HashIndex(fields=("key",)),)
|
|
||||||
verbose_name = _("License")
|
|
||||||
verbose_name_plural = _("Licenses")
|
|
||||||
|
|
||||||
|
|
||||||
def usage_expiry():
|
def usage_expiry():
|
||||||
"""Keep license usage records for 3 months"""
|
"""Keep license usage records for 3 months"""
|
||||||
|
@ -201,7 +183,3 @@ class LicenseUsage(ExpiringModel):
|
||||||
within_limits = models.BooleanField()
|
within_limits = models.BooleanField()
|
||||||
|
|
||||||
record_date = models.DateTimeField(auto_now_add=True)
|
record_date = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = _("License Usage")
|
|
||||||
verbose_name_plural = _("License Usage Records")
|
|
||||||
|
|
|
@ -1,30 +1,44 @@
|
||||||
"""Enterprise license policies"""
|
"""Enterprise license policies"""
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from rest_framework.serializers import BaseSerializer
|
||||||
|
|
||||||
from authentik.core.models import User, UserTypes
|
from authentik.core.models import User, UserTypes
|
||||||
from authentik.enterprise.models import LicenseKey
|
from authentik.enterprise.models import LicenseKey
|
||||||
|
from authentik.policies.models import Policy
|
||||||
from authentik.policies.types import PolicyRequest, PolicyResult
|
from authentik.policies.types import PolicyRequest, PolicyResult
|
||||||
from authentik.policies.views import PolicyAccessView
|
from authentik.policies.views import PolicyAccessView
|
||||||
|
|
||||||
|
|
||||||
|
class EnterprisePolicy(Policy):
|
||||||
|
"""Check that a user is correctly licensed for the request"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def component(self) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serializer(self) -> type[BaseSerializer]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def passes(self, request: PolicyRequest) -> PolicyResult:
|
||||||
|
if not LicenseKey.get_total().is_valid():
|
||||||
|
return PolicyResult(False)
|
||||||
|
if request.user.type != UserTypes.INTERNAL:
|
||||||
|
return PolicyResult(False)
|
||||||
|
return PolicyResult(True)
|
||||||
|
|
||||||
|
|
||||||
class EnterprisePolicyAccessView(PolicyAccessView):
|
class EnterprisePolicyAccessView(PolicyAccessView):
|
||||||
"""PolicyAccessView which also checks enterprise licensing"""
|
"""PolicyAccessView which also checks enterprise licensing"""
|
||||||
|
|
||||||
def check_license(self):
|
|
||||||
"""Check license"""
|
|
||||||
if not LicenseKey.get_total().is_valid():
|
|
||||||
return False
|
|
||||||
if self.request.user.type != UserTypes.INTERNAL:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
def user_has_access(self, user: Optional[User] = None) -> PolicyResult:
|
||||||
user = user or self.request.user
|
user = user or self.request.user
|
||||||
request = PolicyRequest(user)
|
request = PolicyRequest(user)
|
||||||
request.http_request = self.request
|
request.http_request = self.request
|
||||||
result = super().user_has_access(user)
|
result = super().user_has_access(user)
|
||||||
enterprise_result = self.check_license()
|
enterprise_result = EnterprisePolicy().passes(request)
|
||||||
if not enterprise_result:
|
if not enterprise_result.passing:
|
||||||
return enterprise_result
|
return enterprise_result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ from authentik.lib.utils.time import fqdn_rand
|
||||||
CELERY_BEAT_SCHEDULE = {
|
CELERY_BEAT_SCHEDULE = {
|
||||||
"enterprise_calculate_license": {
|
"enterprise_calculate_license": {
|
||||||
"task": "authentik.enterprise.tasks.calculate_license",
|
"task": "authentik.enterprise.tasks.calculate_license",
|
||||||
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/2"),
|
"schedule": crontab(minute=fqdn_rand("calculate_license"), hour="*/8"),
|
||||||
"options": {"queue": "authentik_scheduled"},
|
"options": {"queue": "authentik_scheduled"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,6 @@ def pre_save_license(sender: type[License], instance: License, **_):
|
||||||
"""Extract data from license jwt and save it into model"""
|
"""Extract data from license jwt and save it into model"""
|
||||||
status = instance.status
|
status = instance.status
|
||||||
instance.name = status.name
|
instance.name = status.name
|
||||||
instance.internal_users = status.internal_users
|
instance.users = status.users
|
||||||
instance.external_users = status.external_users
|
instance.external_users = status.external_users
|
||||||
instance.expiry = datetime.fromtimestamp(status.exp, tz=get_current_timezone())
|
instance.expiry = datetime.fromtimestamp(status.exp, tz=get_current_timezone())
|
||||||
|
|
|
@ -6,4 +6,5 @@ from authentik.root.celery import CELERY_APP
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def calculate_license():
|
def calculate_license():
|
||||||
"""Calculate licensing status"""
|
"""Calculate licensing status"""
|
||||||
LicenseKey.get_total().record_usage()
|
total = LicenseKey.get_total()
|
||||||
|
total.record_usage()
|
||||||
|
|
|
@ -23,7 +23,7 @@ class TestEnterpriseLicense(TestCase):
|
||||||
aud="",
|
aud="",
|
||||||
exp=_exp,
|
exp=_exp,
|
||||||
name=generate_id(),
|
name=generate_id(),
|
||||||
internal_users=100,
|
users=100,
|
||||||
external_users=100,
|
external_users=100,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
@ -32,7 +32,7 @@ class TestEnterpriseLicense(TestCase):
|
||||||
"""Check license verification"""
|
"""Check license verification"""
|
||||||
lic = License.objects.create(key=generate_id())
|
lic = License.objects.create(key=generate_id())
|
||||||
self.assertTrue(lic.status.is_valid())
|
self.assertTrue(lic.status.is_valid())
|
||||||
self.assertEqual(lic.internal_users, 100)
|
self.assertEqual(lic.users, 100)
|
||||||
|
|
||||||
def test_invalid(self):
|
def test_invalid(self):
|
||||||
"""Test invalid license"""
|
"""Test invalid license"""
|
||||||
|
@ -46,7 +46,7 @@ class TestEnterpriseLicense(TestCase):
|
||||||
aud="",
|
aud="",
|
||||||
exp=_exp,
|
exp=_exp,
|
||||||
name=generate_id(),
|
name=generate_id(),
|
||||||
internal_users=100,
|
users=100,
|
||||||
external_users=100,
|
external_users=100,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
@ -58,7 +58,7 @@ class TestEnterpriseLicense(TestCase):
|
||||||
lic2 = License.objects.create(key=generate_id())
|
lic2 = License.objects.create(key=generate_id())
|
||||||
self.assertTrue(lic2.status.is_valid())
|
self.assertTrue(lic2.status.is_valid())
|
||||||
total = LicenseKey.get_total()
|
total = LicenseKey.get_total()
|
||||||
self.assertEqual(total.internal_users, 200)
|
self.assertEqual(total.users, 200)
|
||||||
self.assertEqual(total.external_users, 200)
|
self.assertEqual(total.external_users, 200)
|
||||||
self.assertEqual(total.exp, _exp)
|
self.assertEqual(total.exp, _exp)
|
||||||
self.assertTrue(total.is_valid())
|
self.assertTrue(total.is_valid())
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue