Merge branch 'master' into 37-guardian
This commit is contained in:
commit
30acf0660b
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.1.27-beta
|
current_version = 0.1.30-beta
|
||||||
tag = True
|
tag = True
|
||||||
commit = True
|
commit = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)\-(?P<release>.*)
|
||||||
|
|
|
@ -40,6 +40,7 @@ pylint:
|
||||||
stage: test
|
stage: test
|
||||||
coverage:
|
coverage:
|
||||||
script:
|
script:
|
||||||
|
- python manage.py collectstatic --no-input
|
||||||
- coverage run manage.py test
|
- coverage run manage.py test
|
||||||
- coverage report
|
- coverage report
|
||||||
stage: test
|
stage: test
|
||||||
|
@ -55,7 +56,7 @@ package-docker:
|
||||||
before_script:
|
before_script:
|
||||||
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
- echo "{\"auths\":{\"docker.$NEXUS_URL\":{\"auth\":\"$NEXUS_AUTH\"}}}" > /kaniko/.docker/config.json
|
||||||
script:
|
script:
|
||||||
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.pkg.beryju.org/passbook:latest --destination docker.pkg.beryju.org/passbook:0.1.27-beta
|
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination docker.pkg.beryju.org/passbook:latest --destination docker.pkg.beryju.org/passbook:0.1.30-beta
|
||||||
stage: build
|
stage: build
|
||||||
only:
|
only:
|
||||||
- tags
|
- tags
|
||||||
|
|
|
@ -6,7 +6,7 @@ COPY ./requirements.txt /app/
|
||||||
|
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
|
||||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||||
mkdir /app/static/ && \
|
mkdir /app/static/ && \
|
||||||
pip install -r requirements.txt && \
|
pip install -r requirements.txt && \
|
||||||
pip install psycopg2 && \
|
pip install psycopg2 && \
|
||||||
|
@ -23,7 +23,7 @@ COPY --from=build /app/static /app/static/
|
||||||
|
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
|
||||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||||
pip install -r requirements.txt && \
|
pip install -r requirements.txt && \
|
||||||
pip install psycopg2 && \
|
pip install psycopg2 && \
|
||||||
adduser --system --home /app/ passbook && \
|
adduser --system --home /app/ passbook && \
|
||||||
|
|
|
@ -3,7 +3,7 @@ from setuptools import setup
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='django-allauth-passbook',
|
name='django-allauth-passbook',
|
||||||
version='0.1.27-beta',
|
version='0.1.30-beta',
|
||||||
description='passbook support for django-allauth',
|
description='passbook support for django-allauth',
|
||||||
# long_description='\n'.join(read_simple('docs/index.md')[2:]),
|
# long_description='\n'.join(read_simple('docs/index.md')[2:]),
|
||||||
long_description_content_type='text/markdown',
|
long_description_content_type='text/markdown',
|
||||||
|
|
|
@ -18,7 +18,7 @@ tests_require = [
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='sentry-auth-passbook',
|
name='sentry-auth-passbook',
|
||||||
version='0.1.27-beta',
|
version='0.1.30-beta',
|
||||||
author='BeryJu.org',
|
author='BeryJu.org',
|
||||||
author_email='support@beryju.org',
|
author_email='support@beryju.org',
|
||||||
url='https://passbook.beryju.org',
|
url='https://passbook.beryju.org',
|
||||||
|
|
34
debian/changelog
vendored
34
debian/changelog
vendored
|
@ -1,3 +1,37 @@
|
||||||
|
passbook (0.1.30) stable; urgency=medium
|
||||||
|
|
||||||
|
* bump version: 0.1.28-beta -> 0.1.29-beta
|
||||||
|
* don't use context manager in web command
|
||||||
|
|
||||||
|
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 11 Apr 2019 12:21:58 +0000
|
||||||
|
|
||||||
|
passbook (0.1.29) stable; urgency=medium
|
||||||
|
|
||||||
|
* bump version: 0.1.27-beta -> 0.1.28-beta
|
||||||
|
* Add libpq-dev dependency so psycopg2 build works
|
||||||
|
* switch to whitenoise for static files
|
||||||
|
* replace cherrypy with daphne
|
||||||
|
* Run collectstatic before coverage, use autoreload on celery worker
|
||||||
|
|
||||||
|
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 11 Apr 2019 12:00:27 +0000
|
||||||
|
|
||||||
|
passbook (0.1.28) stable; urgency=medium
|
||||||
|
|
||||||
|
* bump version: 0.1.26-beta -> 0.1.27-beta
|
||||||
|
* fix allauth client's formatting
|
||||||
|
* switch from raven to sentry_sdk
|
||||||
|
* add ability to have non-expiring nonces, clean up expired nonces
|
||||||
|
* fully remove raven and switch WSGI and logging to sentry_sdk
|
||||||
|
* fix failing CI
|
||||||
|
* trigger autoreload from config files
|
||||||
|
* Choose upstream more cleverly
|
||||||
|
* Move code from django-revproxy to app_gw to fix cookie bug
|
||||||
|
* Implement websocket proxy
|
||||||
|
* switch kubernetes deployment to daphne server
|
||||||
|
* set default log level to warn, fix clean_nonces not working
|
||||||
|
|
||||||
|
-- Jens Langhammer <jens.langhammer@beryju.org> Thu, 11 Apr 2019 08:46:44 +0000
|
||||||
|
|
||||||
passbook (0.1.27) stable; urgency=medium
|
passbook (0.1.27) stable; urgency=medium
|
||||||
|
|
||||||
* bump version: 0.1.25-beta -> 0.1.26-beta
|
* bump version: 0.1.25-beta -> 0.1.26-beta
|
||||||
|
|
2
debian/control
vendored
2
debian/control
vendored
|
@ -3,7 +3,7 @@ Section: admin
|
||||||
Priority: optional
|
Priority: optional
|
||||||
Maintainer: BeryJu.org <support@beryju.org>
|
Maintainer: BeryJu.org <support@beryju.org>
|
||||||
Uploaders: Jens Langhammer <jens@beryju.org>, BeryJu.org <support@beryju.org>
|
Uploaders: Jens Langhammer <jens@beryju.org>, BeryJu.org <support@beryju.org>
|
||||||
Build-Depends: debhelper (>= 10), dh-systemd (>= 1.5), dh-exec, wget, dh-exec, python3 (>= 3.5) | python3.6 | python3.7
|
Build-Depends: debhelper (>= 10), dh-systemd (>= 1.5), dh-exec, wget, dh-exec, python3 (>= 3.5) | python3.6 | python3.7, libpq-dev
|
||||||
Standards-Version: 3.9.6
|
Standards-Version: 3.9.6
|
||||||
|
|
||||||
Package: passbook
|
Package: passbook
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
apiVersion: v1
|
apiVersion: v1
|
||||||
appVersion: "0.1.27-beta"
|
appVersion: "0.1.30-beta"
|
||||||
description: A Helm chart for passbook.
|
description: A Helm chart for passbook.
|
||||||
name: passbook
|
name: passbook
|
||||||
version: "0.1.27-beta"
|
version: "0.1.30-beta"
|
||||||
icon: https://passbook.beryju.org/images/logo.png
|
icon: https://passbook.beryju.org/images/logo.png
|
||||||
|
|
|
@ -15,8 +15,8 @@ data:
|
||||||
port: ''
|
port: ''
|
||||||
log:
|
log:
|
||||||
level:
|
level:
|
||||||
console: DEBUG
|
console: WARNING
|
||||||
file: DEBUG
|
file: WARNING
|
||||||
file: /dev/null
|
file: /dev/null
|
||||||
syslog:
|
syslog:
|
||||||
host: 127.0.0.1
|
host: 127.0.0.1
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
|
|
||||||
image:
|
image:
|
||||||
tag: 0.1.27-beta
|
tag: 0.1.30-beta
|
||||||
|
|
||||||
nameOverride: ""
|
nameOverride: ""
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook"""
|
"""passbook"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook admin"""
|
"""passbook admin"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook api"""
|
"""passbook api"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook Application Security Gateway Header"""
|
"""passbook Application Security Gateway Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,228 +1,33 @@
|
||||||
"""passbook app_gw middleware"""
|
"""passbook app_gw middleware"""
|
||||||
import mimetypes
|
|
||||||
from logging import getLogger
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import certifi
|
|
||||||
import urllib3
|
|
||||||
from django.core.cache import cache
|
|
||||||
from django.utils.http import urlencode
|
|
||||||
from django.views.generic import RedirectView
|
from django.views.generic import RedirectView
|
||||||
from revproxy.exceptions import InvalidUpstream
|
|
||||||
from revproxy.response import get_django_response
|
|
||||||
from revproxy.utils import encode_items, normalize_request_headers
|
|
||||||
|
|
||||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
from passbook.app_gw.proxy.handler import RequestHandler
|
||||||
from passbook.app_gw.rewrite import Rewriter
|
|
||||||
from passbook.core.models import Application
|
|
||||||
from passbook.core.policies import PolicyEngine
|
|
||||||
from passbook.lib.config import CONFIG
|
from passbook.lib.config import CONFIG
|
||||||
|
|
||||||
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
|
||||||
LOGGER = getLogger(__name__)
|
|
||||||
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
|
||||||
ERRORS_MESSAGES = {
|
|
||||||
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
|
||||||
"'http' or 'https' (%s).")
|
|
||||||
}
|
|
||||||
|
|
||||||
# pylint: disable=too-many-instance-attributes
|
|
||||||
class ApplicationGatewayMiddleware:
|
class ApplicationGatewayMiddleware:
|
||||||
"""Check if request should be proxied or handeled normally"""
|
"""Check if request should be proxied or handeled normally"""
|
||||||
|
|
||||||
ignored_hosts = []
|
_app_gw_cache = {}
|
||||||
request = None
|
|
||||||
app_gw = None
|
|
||||||
http = None
|
|
||||||
http_no_verify = None
|
|
||||||
host_header = ''
|
|
||||||
|
|
||||||
_parsed_url = None
|
|
||||||
_request_headers = None
|
|
||||||
|
|
||||||
def __init__(self, get_response):
|
def __init__(self, get_response):
|
||||||
self.get_response = get_response
|
self.get_response = get_response
|
||||||
self.ignored_hosts = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
|
||||||
self.http_no_verify = urllib3.PoolManager()
|
|
||||||
self.http = urllib3.PoolManager(
|
|
||||||
cert_reqs='CERT_REQUIRED',
|
|
||||||
ca_certs=certifi.where())
|
|
||||||
|
|
||||||
def precheck(self, request):
|
|
||||||
"""Check if a request should be proxied or forwarded to passbook"""
|
|
||||||
# Check if hostname is in cached list of ignored hostnames
|
|
||||||
# This saves us having to query the database on each request
|
|
||||||
self.host_header = request.META.get('HTTP_HOST')
|
|
||||||
if self.host_header in self.ignored_hosts:
|
|
||||||
LOGGER.debug("%s is ignored", self.host_header)
|
|
||||||
return True, None
|
|
||||||
# Look through all ApplicationGatewayProviders and check hostnames
|
|
||||||
matches = ApplicationGatewayProvider.objects.filter(
|
|
||||||
server_name__contains=[self.host_header],
|
|
||||||
enabled=True)
|
|
||||||
if not matches.exists():
|
|
||||||
# Mo matching Providers found, add host header to ignored list
|
|
||||||
self.ignored_hosts.append(self.host_header)
|
|
||||||
cache.set(IGNORED_HOSTNAMES_KEY, self.ignored_hosts)
|
|
||||||
LOGGER.debug("Ignoring %s", self.host_header)
|
|
||||||
return True, None
|
|
||||||
# At this point we're certain there's a matching ApplicationGateway
|
|
||||||
if len(matches) > 1:
|
|
||||||
# TODO This should never happen
|
|
||||||
raise ValueError
|
|
||||||
app_gw = matches.first()
|
|
||||||
try:
|
|
||||||
# Check if ApplicationGateway is associcaited with application
|
|
||||||
getattr(app_gw, 'application')
|
|
||||||
return False, app_gw
|
|
||||||
except Application.DoesNotExist:
|
|
||||||
LOGGER.debug("ApplicationGateway not associated with Application")
|
|
||||||
return True, None
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
forward, self.app_gw = self.precheck(request)
|
# Rudimentary cache
|
||||||
if forward:
|
host_header = request.META.get('HTTP_HOST')
|
||||||
return self.get_response(request)
|
if host_header not in self._app_gw_cache:
|
||||||
self.request = request
|
self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request)
|
||||||
return self.dispatch(request)
|
if self._app_gw_cache[host_header]:
|
||||||
|
return self.dispatch(request, self._app_gw_cache[host_header])
|
||||||
|
return self.get_response(request)
|
||||||
|
|
||||||
def get_upstream(self):
|
def dispatch(self, request, app_gw):
|
||||||
"""Get upstream as parsed url"""
|
|
||||||
# TODO: How to choose upstream?
|
|
||||||
upstream = self.app_gw.upstream[0]
|
|
||||||
|
|
||||||
self._parsed_url = urlparse(upstream)
|
|
||||||
|
|
||||||
if self._parsed_url.scheme not in ('http', 'https'):
|
|
||||||
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
|
||||||
upstream)
|
|
||||||
|
|
||||||
return upstream
|
|
||||||
|
|
||||||
def _format_path_to_redirect(self, request):
|
|
||||||
LOGGER.debug("Path before: %s", request.get_full_path())
|
|
||||||
rewriter = Rewriter(self.app_gw, request)
|
|
||||||
after = rewriter.build()
|
|
||||||
LOGGER.debug("Path after: %s", after)
|
|
||||||
return after
|
|
||||||
|
|
||||||
def get_proxy_request_headers(self, request):
|
|
||||||
"""Get normalized headers for the upstream
|
|
||||||
Gets all headers from the original request and normalizes them.
|
|
||||||
Normalization occurs by removing the prefix ``HTTP_`` and
|
|
||||||
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
|
||||||
becames ``Accept-Encoding``.
|
|
||||||
.. versionadded:: 0.9.1
|
|
||||||
:param request: The original HTTPRequest instance
|
|
||||||
:returns: Normalized headers for the upstream
|
|
||||||
"""
|
|
||||||
return normalize_request_headers(request)
|
|
||||||
|
|
||||||
def get_request_headers(self):
|
|
||||||
"""Return request headers that will be sent to upstream.
|
|
||||||
The header REMOTE_USER is set to the current user
|
|
||||||
if AuthenticationMiddleware is enabled and
|
|
||||||
the view's add_remote_user property is True.
|
|
||||||
.. versionadded:: 0.9.8
|
|
||||||
"""
|
|
||||||
request_headers = self.get_proxy_request_headers(self.request)
|
|
||||||
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
|
||||||
LOGGER.info("%s set", self.app_gw.authentication_header)
|
|
||||||
|
|
||||||
return request_headers
|
|
||||||
|
|
||||||
def check_permission(self):
|
|
||||||
"""Check if user is authenticated and has permission to access app"""
|
|
||||||
if not hasattr(self.request, 'user'):
|
|
||||||
return False
|
|
||||||
if not self.request.user.is_authenticated:
|
|
||||||
return False
|
|
||||||
policy_engine = PolicyEngine(self.app_gw.application.policies.all())
|
|
||||||
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
|
||||||
passing, _messages = policy_engine.result
|
|
||||||
|
|
||||||
return passing
|
|
||||||
|
|
||||||
def get_encoded_query_params(self):
|
|
||||||
"""Return encoded query params to be used in proxied request"""
|
|
||||||
get_data = encode_items(self.request.GET.lists())
|
|
||||||
return urlencode(get_data)
|
|
||||||
|
|
||||||
def _created_proxy_response(self, request, path):
|
|
||||||
request_payload = request.body
|
|
||||||
|
|
||||||
LOGGER.debug("Request headers: %s", self._request_headers)
|
|
||||||
|
|
||||||
request_url = self.get_upstream() + path
|
|
||||||
LOGGER.debug("Request URL: %s", request_url)
|
|
||||||
|
|
||||||
if request.GET:
|
|
||||||
request_url += '?' + self.get_encoded_query_params()
|
|
||||||
LOGGER.debug("Request URL: %s", request_url)
|
|
||||||
|
|
||||||
http = self.http
|
|
||||||
if not self.app_gw.upstream_ssl_verification:
|
|
||||||
http = self.http_no_verify
|
|
||||||
|
|
||||||
try:
|
|
||||||
proxy_response = http.urlopen(request.method,
|
|
||||||
request_url,
|
|
||||||
redirect=False,
|
|
||||||
retries=None,
|
|
||||||
headers=self._request_headers,
|
|
||||||
body=request_payload,
|
|
||||||
decode_content=False,
|
|
||||||
preload_content=False)
|
|
||||||
LOGGER.debug("Proxy response header: %s",
|
|
||||||
proxy_response.getheaders())
|
|
||||||
except urllib3.exceptions.HTTPError as error:
|
|
||||||
LOGGER.exception(error)
|
|
||||||
raise
|
|
||||||
|
|
||||||
return proxy_response
|
|
||||||
|
|
||||||
def _replace_host_on_redirect_location(self, request, proxy_response):
|
|
||||||
location = proxy_response.headers.get('Location')
|
|
||||||
if location:
|
|
||||||
if request.is_secure():
|
|
||||||
scheme = 'https://'
|
|
||||||
else:
|
|
||||||
scheme = 'http://'
|
|
||||||
request_host = scheme + self.host_header
|
|
||||||
|
|
||||||
upstream_host_http = 'http://' + self._parsed_url.netloc
|
|
||||||
upstream_host_https = 'https://' + self._parsed_url.netloc
|
|
||||||
|
|
||||||
location = location.replace(upstream_host_http, request_host)
|
|
||||||
location = location.replace(upstream_host_https, request_host)
|
|
||||||
proxy_response.headers['Location'] = location
|
|
||||||
LOGGER.debug("Proxy response LOCATION: %s",
|
|
||||||
proxy_response.headers['Location'])
|
|
||||||
|
|
||||||
def _set_content_type(self, request, proxy_response):
|
|
||||||
content_type = proxy_response.headers.get('Content-Type')
|
|
||||||
if not content_type:
|
|
||||||
content_type = (mimetypes.guess_type(request.path)[0] or
|
|
||||||
self.app_gw.default_content_type)
|
|
||||||
proxy_response.headers['Content-Type'] = content_type
|
|
||||||
LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
|
||||||
proxy_response.headers['Content-Type'])
|
|
||||||
|
|
||||||
def dispatch(self, request):
|
|
||||||
"""Build proxied request and pass to upstream"""
|
"""Build proxied request and pass to upstream"""
|
||||||
if not self.check_permission():
|
handler = RequestHandler(app_gw, request)
|
||||||
|
|
||||||
|
if not handler.check_permission():
|
||||||
to_url = 'https://%s/?next=%s' % (CONFIG.get('domains')[0], request.get_full_path())
|
to_url = 'https://%s/?next=%s' % (CONFIG.get('domains')[0], request.get_full_path())
|
||||||
return RedirectView.as_view(url=to_url)(request)
|
return RedirectView.as_view(url=to_url)(request)
|
||||||
|
|
||||||
self._request_headers = self.get_request_headers()
|
return handler.get_response()
|
||||||
|
|
||||||
path = self._format_path_to_redirect(request)
|
|
||||||
proxy_response = self._created_proxy_response(request, path)
|
|
||||||
|
|
||||||
self._replace_host_on_redirect_location(request, proxy_response)
|
|
||||||
self._set_content_type(request, proxy_response)
|
|
||||||
response = get_django_response(proxy_response, strict_cookies=False)
|
|
||||||
|
|
||||||
LOGGER.debug("RESPONSE RETURNED: %s", response)
|
|
||||||
return response
|
|
||||||
|
|
18
passbook/app_gw/migrations/0003_auto_20190411_1314.py
Normal file
18
passbook/app_gw/migrations/0003_auto_20190411_1314.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 2.2 on 2019-04-11 13:14
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('passbook_app_gw', '0002_auto_20190321_1521'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='applicationgatewayprovider',
|
||||||
|
name='authentication_header',
|
||||||
|
field=models.TextField(blank=True, default='X-Remote-User'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -15,7 +15,7 @@ class ApplicationGatewayProvider(Provider):
|
||||||
upstream = ArrayField(models.TextField())
|
upstream = ArrayField(models.TextField())
|
||||||
enabled = models.BooleanField(default=True)
|
enabled = models.BooleanField(default=True)
|
||||||
|
|
||||||
authentication_header = models.TextField(default='X-Remote-User')
|
authentication_header = models.TextField(default='X-Remote-User', blank=True)
|
||||||
default_content_type = models.TextField(default='application/octet-stream')
|
default_content_type = models.TextField(default='application/octet-stream')
|
||||||
upstream_ssl_verification = models.BooleanField(default=True)
|
upstream_ssl_verification = models.BooleanField(default=True)
|
||||||
|
|
||||||
|
|
0
passbook/app_gw/proxy/__init__.py
Normal file
0
passbook/app_gw/proxy/__init__.py
Normal file
8
passbook/app_gw/proxy/exceptions.py
Normal file
8
passbook/app_gw/proxy/exceptions.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
"""Exception classes"""
|
||||||
|
|
||||||
|
class ReverseProxyException(Exception):
|
||||||
|
"""Base for revproxy exception"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidUpstream(ReverseProxyException):
|
||||||
|
"""Invalid upstream set"""
|
225
passbook/app_gw/proxy/handler.py
Normal file
225
passbook/app_gw/proxy/handler.py
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
"""passbook app_gw request handler"""
|
||||||
|
import mimetypes
|
||||||
|
from logging import getLogger
|
||||||
|
from random import SystemRandom
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import certifi
|
||||||
|
import urllib3
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.utils.http import urlencode
|
||||||
|
|
||||||
|
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||||
|
from passbook.app_gw.proxy.exceptions import InvalidUpstream
|
||||||
|
from passbook.app_gw.proxy.response import get_django_response
|
||||||
|
from passbook.app_gw.proxy.rewrite import Rewriter
|
||||||
|
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||||
|
from passbook.core.models import Application
|
||||||
|
from passbook.core.policies import PolicyEngine
|
||||||
|
|
||||||
|
SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream'
|
||||||
|
IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored'
|
||||||
|
LOGGER = getLogger(__name__)
|
||||||
|
QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`'
|
||||||
|
ERRORS_MESSAGES = {
|
||||||
|
'upstream-no-scheme': ("Upstream URL scheme must be either "
|
||||||
|
"'http' or 'https' (%s).")
|
||||||
|
}
|
||||||
|
HTTP_NO_VERIFY = urllib3.PoolManager()
|
||||||
|
HTTP = urllib3.PoolManager(
|
||||||
|
cert_reqs='CERT_REQUIRED',
|
||||||
|
ca_certs=certifi.where())
|
||||||
|
IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, [])
|
||||||
|
POLICY_CACHE = {}
|
||||||
|
|
||||||
|
class RequestHandler:
|
||||||
|
"""Forward requests"""
|
||||||
|
|
||||||
|
_parsed_url = None
|
||||||
|
_request_headers = None
|
||||||
|
|
||||||
|
def __init__(self, app_gw, request):
|
||||||
|
self.app_gw = app_gw
|
||||||
|
self.request = request
|
||||||
|
if self.app_gw.pk not in POLICY_CACHE:
|
||||||
|
POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find_app_gw_for_request(request):
|
||||||
|
"""Check if a request should be proxied or forwarded to passbook"""
|
||||||
|
# Check if hostname is in cached list of ignored hostnames
|
||||||
|
# This saves us having to query the database on each request
|
||||||
|
host_header = request.META.get('HTTP_HOST')
|
||||||
|
if host_header in IGNORED_HOSTS:
|
||||||
|
# LOGGER.debug("%s is ignored", host_header)
|
||||||
|
return False
|
||||||
|
# Look through all ApplicationGatewayProviders and check hostnames
|
||||||
|
matches = ApplicationGatewayProvider.objects.filter(
|
||||||
|
server_name__contains=[host_header],
|
||||||
|
enabled=True)
|
||||||
|
if not matches.exists():
|
||||||
|
# Mo matching Providers found, add host header to ignored list
|
||||||
|
IGNORED_HOSTS.append(host_header)
|
||||||
|
cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS)
|
||||||
|
# LOGGER.debug("Ignoring %s", host_header)
|
||||||
|
return False
|
||||||
|
# At this point we're certain there's a matching ApplicationGateway
|
||||||
|
if len(matches) > 1:
|
||||||
|
# This should never happen
|
||||||
|
raise ValueError
|
||||||
|
app_gw = matches.first()
|
||||||
|
try:
|
||||||
|
# Check if ApplicationGateway is associated with application
|
||||||
|
getattr(app_gw, 'application')
|
||||||
|
if app_gw:
|
||||||
|
return app_gw
|
||||||
|
except Application.DoesNotExist:
|
||||||
|
pass
|
||||||
|
# LOGGER.debug("ApplicationGateway not associated with Application")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _get_upstream(self):
|
||||||
|
"""Choose random upstream and save in session"""
|
||||||
|
if SESSION_UPSTREAM_KEY not in self.request.session:
|
||||||
|
self.request.session[SESSION_UPSTREAM_KEY] = {}
|
||||||
|
if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]:
|
||||||
|
upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream))
|
||||||
|
self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index
|
||||||
|
return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]]
|
||||||
|
|
||||||
|
def get_upstream(self):
|
||||||
|
"""Get upstream as parsed url"""
|
||||||
|
upstream = self._get_upstream()
|
||||||
|
|
||||||
|
self._parsed_url = urlparse(upstream)
|
||||||
|
|
||||||
|
if self._parsed_url.scheme not in ('http', 'https'):
|
||||||
|
raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] %
|
||||||
|
upstream)
|
||||||
|
|
||||||
|
return upstream
|
||||||
|
|
||||||
|
def _format_path_to_redirect(self):
|
||||||
|
# LOGGER.debug("Path before: %s", self.request.get_full_path())
|
||||||
|
rewriter = Rewriter(self.app_gw, self.request)
|
||||||
|
after = rewriter.build()
|
||||||
|
# LOGGER.debug("Path after: %s", after)
|
||||||
|
return after
|
||||||
|
|
||||||
|
def get_proxy_request_headers(self):
|
||||||
|
"""Get normalized headers for the upstream
|
||||||
|
Gets all headers from the original request and normalizes them.
|
||||||
|
Normalization occurs by removing the prefix ``HTTP_`` and
|
||||||
|
replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING``
|
||||||
|
becames ``Accept-Encoding``.
|
||||||
|
.. versionadded:: 0.9.1
|
||||||
|
:param request: The original HTTPRequest instance
|
||||||
|
:returns: Normalized headers for the upstream
|
||||||
|
"""
|
||||||
|
return normalize_request_headers(self.request)
|
||||||
|
|
||||||
|
def get_request_headers(self):
|
||||||
|
"""Return request headers that will be sent to upstream.
|
||||||
|
The header REMOTE_USER is set to the current user
|
||||||
|
if AuthenticationMiddleware is enabled and
|
||||||
|
the view's add_remote_user property is True.
|
||||||
|
.. versionadded:: 0.9.8
|
||||||
|
"""
|
||||||
|
request_headers = self.get_proxy_request_headers()
|
||||||
|
if not self.app_gw.authentication_header:
|
||||||
|
return request_headers
|
||||||
|
request_headers[self.app_gw.authentication_header] = self.request.user.get_username()
|
||||||
|
# LOGGER.debug("%s set", self.app_gw.authentication_header)
|
||||||
|
|
||||||
|
return request_headers
|
||||||
|
|
||||||
|
def check_permission(self):
|
||||||
|
"""Check if user is authenticated and has permission to access app"""
|
||||||
|
if not hasattr(self.request, 'user'):
|
||||||
|
return False
|
||||||
|
if not self.request.user.is_authenticated:
|
||||||
|
return False
|
||||||
|
policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk])
|
||||||
|
policy_engine.for_user(self.request.user).with_request(self.request).build()
|
||||||
|
passing, _messages = policy_engine.result
|
||||||
|
|
||||||
|
return passing
|
||||||
|
|
||||||
|
def get_encoded_query_params(self):
|
||||||
|
"""Return encoded query params to be used in proxied request"""
|
||||||
|
get_data = encode_items(self.request.GET.lists())
|
||||||
|
return urlencode(get_data)
|
||||||
|
|
||||||
|
def _created_proxy_response(self, path):
|
||||||
|
request_payload = self.request.body
|
||||||
|
|
||||||
|
# LOGGER.debug("Request headers: %s", self._request_headers)
|
||||||
|
|
||||||
|
request_url = self.get_upstream() + path
|
||||||
|
# LOGGER.debug("Request URL: %s", request_url)
|
||||||
|
|
||||||
|
if self.request.GET:
|
||||||
|
request_url += '?' + self.get_encoded_query_params()
|
||||||
|
# LOGGER.debug("Request URL: %s", request_url)
|
||||||
|
|
||||||
|
http = HTTP
|
||||||
|
if not self.app_gw.upstream_ssl_verification:
|
||||||
|
http = HTTP_NO_VERIFY
|
||||||
|
|
||||||
|
try:
|
||||||
|
proxy_response = http.urlopen(self.request.method,
|
||||||
|
request_url,
|
||||||
|
redirect=False,
|
||||||
|
retries=None,
|
||||||
|
headers=self._request_headers,
|
||||||
|
body=request_payload,
|
||||||
|
decode_content=False,
|
||||||
|
preload_content=False)
|
||||||
|
# LOGGER.debug("Proxy response header: %s",
|
||||||
|
# proxy_response.getheaders())
|
||||||
|
except urllib3.exceptions.HTTPError as error:
|
||||||
|
LOGGER.exception(error)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return proxy_response
|
||||||
|
|
||||||
|
def _replace_host_on_redirect_location(self, proxy_response):
|
||||||
|
location = proxy_response.headers.get('Location')
|
||||||
|
if location:
|
||||||
|
if self.request.is_secure():
|
||||||
|
scheme = 'https://'
|
||||||
|
else:
|
||||||
|
scheme = 'http://'
|
||||||
|
request_host = scheme + self.request.META.get('HTTP_HOST')
|
||||||
|
|
||||||
|
upstream_host_http = 'http://' + self._parsed_url.netloc
|
||||||
|
upstream_host_https = 'https://' + self._parsed_url.netloc
|
||||||
|
|
||||||
|
location = location.replace(upstream_host_http, request_host)
|
||||||
|
location = location.replace(upstream_host_https, request_host)
|
||||||
|
proxy_response.headers['Location'] = location
|
||||||
|
# LOGGER.debug("Proxy response LOCATION: %s",
|
||||||
|
# proxy_response.headers['Location'])
|
||||||
|
|
||||||
|
def _set_content_type(self, proxy_response):
|
||||||
|
content_type = proxy_response.headers.get('Content-Type')
|
||||||
|
if not content_type:
|
||||||
|
content_type = (mimetypes.guess_type(self.request.path)[0] or
|
||||||
|
self.app_gw.default_content_type)
|
||||||
|
proxy_response.headers['Content-Type'] = content_type
|
||||||
|
# LOGGER.debug("Proxy response CONTENT-TYPE: %s",
|
||||||
|
# proxy_response.headers['Content-Type'])
|
||||||
|
|
||||||
|
def get_response(self):
|
||||||
|
"""Pass request to upstream and return response"""
|
||||||
|
self._request_headers = self.get_request_headers()
|
||||||
|
|
||||||
|
path = self._format_path_to_redirect()
|
||||||
|
proxy_response = self._created_proxy_response(path)
|
||||||
|
|
||||||
|
self._replace_host_on_redirect_location(proxy_response)
|
||||||
|
self._set_content_type(proxy_response)
|
||||||
|
response = get_django_response(proxy_response, strict_cookies=False)
|
||||||
|
|
||||||
|
# LOGGER.debug("RESPONSE RETURNED: %s", response)
|
||||||
|
return response
|
63
passbook/app_gw/proxy/response.py
Normal file
63
passbook/app_gw/proxy/response.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
"""response functions from django-revproxy"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.http import HttpResponse, StreamingHttpResponse
|
||||||
|
|
||||||
|
from passbook.app_gw.proxy.utils import (cookie_from_string,
|
||||||
|
set_response_headers, should_stream)
|
||||||
|
|
||||||
|
#: Default number of bytes that are going to be read in a file lecture
|
||||||
|
DEFAULT_AMT = 2 ** 16
|
||||||
|
|
||||||
|
logger = logging.getLogger('revproxy.response')
|
||||||
|
|
||||||
|
|
||||||
|
def get_django_response(proxy_response, strict_cookies=False):
|
||||||
|
"""This method is used to create an appropriate response based on the
|
||||||
|
Content-Length of the proxy_response. If the content is bigger than
|
||||||
|
MIN_STREAMING_LENGTH, which is found on utils.py,
|
||||||
|
than django.http.StreamingHttpResponse will be created,
|
||||||
|
else a django.http.HTTPResponse will be created instead
|
||||||
|
|
||||||
|
:param proxy_response: An Instance of urllib3.response.HTTPResponse that
|
||||||
|
will create an appropriate response
|
||||||
|
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||||
|
:returns: Returns an appropriate response based on the proxy_response
|
||||||
|
content-length
|
||||||
|
"""
|
||||||
|
status = proxy_response.status
|
||||||
|
headers = proxy_response.headers
|
||||||
|
|
||||||
|
logger.debug('Proxy response headers: %s', headers)
|
||||||
|
|
||||||
|
content_type = headers.get('Content-Type')
|
||||||
|
|
||||||
|
logger.debug('Content-Type: %s', content_type)
|
||||||
|
|
||||||
|
if should_stream(proxy_response):
|
||||||
|
logger.info('Content-Length is bigger than %s', DEFAULT_AMT)
|
||||||
|
response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT),
|
||||||
|
status=status,
|
||||||
|
content_type=content_type)
|
||||||
|
else:
|
||||||
|
content = proxy_response.data or b''
|
||||||
|
response = HttpResponse(content, status=status,
|
||||||
|
content_type=content_type)
|
||||||
|
|
||||||
|
logger.info('Normalizing response headers')
|
||||||
|
set_response_headers(response, headers)
|
||||||
|
|
||||||
|
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||||
|
|
||||||
|
cookies = proxy_response.headers.getlist('set-cookie')
|
||||||
|
logger.info('Checking for invalid cookies')
|
||||||
|
for cookie_string in cookies:
|
||||||
|
cookie_dict = cookie_from_string(cookie_string,
|
||||||
|
strict_cookies=strict_cookies)
|
||||||
|
# if cookie is invalid cookie_dict will be None
|
||||||
|
if cookie_dict:
|
||||||
|
response.set_cookie(**cookie_dict)
|
||||||
|
|
||||||
|
logger.debug('Response cookies: %s', response.cookies)
|
||||||
|
|
||||||
|
return response
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from passbook.app_gw.models import RewriteRule
|
from passbook.app_gw.models import RewriteRule
|
||||||
|
|
||||||
|
RULE_CACHE = {}
|
||||||
|
|
||||||
class Context:
|
class Context:
|
||||||
"""Empty class which we dynamically add attributes to"""
|
"""Empty class which we dynamically add attributes to"""
|
||||||
|
@ -15,6 +16,9 @@ class Rewriter:
|
||||||
def __init__(self, application, request):
|
def __init__(self, application, request):
|
||||||
self.__application = application
|
self.__application = application
|
||||||
self.__request = request
|
self.__request = request
|
||||||
|
if self.__application.pk not in RULE_CACHE:
|
||||||
|
RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter(
|
||||||
|
provider__in=[self.__application])
|
||||||
|
|
||||||
def __build_context(self, matches):
|
def __build_context(self, matches):
|
||||||
"""Build object with .0, .1, etc as groups and give access to request"""
|
"""Build object with .0, .1, etc as groups and give access to request"""
|
||||||
|
@ -27,7 +31,7 @@ class Rewriter:
|
||||||
def build(self):
|
def build(self):
|
||||||
"""Run all rules over path and return final path"""
|
"""Run all rules over path and return final path"""
|
||||||
path = self.__request.get_full_path()
|
path = self.__request.get_full_path()
|
||||||
for rule in RewriteRule.objects.filter(provider__in=[self.__application]):
|
for rule in RULE_CACHE[self.__application.pk]:
|
||||||
matches = rule.compiled_matcher.search(path)
|
matches = rule.compiled_matcher.search(path)
|
||||||
if not matches:
|
if not matches:
|
||||||
continue
|
continue
|
227
passbook/app_gw/proxy/utils.py
Normal file
227
passbook/app_gw/proxy/utils.py
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
"""Utils from django-revproxy, slightly adjusted"""
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from wsgiref.util import is_hop_by_hop
|
||||||
|
|
||||||
|
try:
|
||||||
|
from http.cookies import SimpleCookie
|
||||||
|
COOKIE_PREFIX = ''
|
||||||
|
except ImportError:
|
||||||
|
from Cookie import SimpleCookie
|
||||||
|
COOKIE_PREFIX = 'Set-Cookie: '
|
||||||
|
|
||||||
|
|
||||||
|
#: List containing string constant that are used to represent headers that can
|
||||||
|
#: be ignored in the required_header function
|
||||||
|
IGNORE_HEADERS = (
|
||||||
|
'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so
|
||||||
|
# we remove the Accept-Encoding from
|
||||||
|
# original request
|
||||||
|
'HTTP_HOST',
|
||||||
|
'HTTP_REMOTE_USER',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Default from HTTP RFC 2616
|
||||||
|
# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
|
||||||
|
#: Variable that represent the default charset used
|
||||||
|
DEFAULT_CHARSET = 'latin-1'
|
||||||
|
|
||||||
|
#: List containing string constants that represents possible html content type
|
||||||
|
HTML_CONTENT_TYPES = (
|
||||||
|
'text/html',
|
||||||
|
'application/xhtml+xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Variable used to represent a minimal content size required for response
|
||||||
|
#: to be turned into stream
|
||||||
|
MIN_STREAMING_LENGTH = 4 * 1024 # 4KB
|
||||||
|
|
||||||
|
#: Regex used to find charset in a html content type
|
||||||
|
_get_charset_re = re.compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def is_html_content_type(content_type):
|
||||||
|
"""Function used to verify if the parameter is a proper html content type
|
||||||
|
|
||||||
|
:param content_type: String variable that represent a content-type
|
||||||
|
:returns: A boolean value stating if the content_type is a valid html
|
||||||
|
content type
|
||||||
|
"""
|
||||||
|
for html_content_type in HTML_CONTENT_TYPES:
|
||||||
|
if content_type.startswith(html_content_type):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def should_stream(proxy_response):
|
||||||
|
"""Function to verify if the proxy_response must be converted into
|
||||||
|
a stream.This will be done by checking the proxy_response content-length
|
||||||
|
and verify if its length is bigger than one stipulated
|
||||||
|
by MIN_STREAMING_LENGTH.
|
||||||
|
|
||||||
|
:param proxy_response: An Instance of urllib3.response.HTTPResponse
|
||||||
|
:returns: A boolean stating if the proxy_response should
|
||||||
|
be treated as a stream
|
||||||
|
"""
|
||||||
|
content_type = proxy_response.headers.get('Content-Type')
|
||||||
|
|
||||||
|
if is_html_content_type(content_type):
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_length = int(proxy_response.headers.get('Content-Length', 0))
|
||||||
|
except ValueError:
|
||||||
|
content_length = 0
|
||||||
|
|
||||||
|
if not content_length or content_length > MIN_STREAMING_LENGTH:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_charset(content_type):
|
||||||
|
"""Function used to retrieve the charset from a content-type.If there is no
|
||||||
|
charset in the content type then the charset defined on DEFAULT_CHARSET
|
||||||
|
will be returned
|
||||||
|
|
||||||
|
:param content_type: A string containing a Content-Type header
|
||||||
|
:returns: A string containing the charset
|
||||||
|
"""
|
||||||
|
if not content_type:
|
||||||
|
return DEFAULT_CHARSET
|
||||||
|
|
||||||
|
matched = _get_charset_re.search(content_type)
|
||||||
|
if matched:
|
||||||
|
# Extract the charset and strip its double quotes
|
||||||
|
return matched.group('charset').replace('"', '')
|
||||||
|
return DEFAULT_CHARSET
|
||||||
|
|
||||||
|
|
||||||
|
def required_header(header):
|
||||||
|
"""Function that verify if the header parameter is a essential header
|
||||||
|
|
||||||
|
:param header: A string represented a header
|
||||||
|
:returns: A boolean value that represent if the header is required
|
||||||
|
"""
|
||||||
|
if header in IGNORE_HEADERS:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if header.startswith('HTTP_') or header == 'CONTENT_TYPE':
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def set_response_headers(response, response_headers):
|
||||||
|
"""Set response's header"""
|
||||||
|
for header, value in response_headers.items():
|
||||||
|
if is_hop_by_hop(header) or header.lower() == 'set-cookie':
|
||||||
|
continue
|
||||||
|
|
||||||
|
response[header.title()] = value
|
||||||
|
|
||||||
|
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_request_headers(request):
|
||||||
|
"""Function used to transform header, replacing 'HTTP\\_' to ''
|
||||||
|
and replace '_' to '-'
|
||||||
|
|
||||||
|
:param request: A HttpRequest that will be transformed
|
||||||
|
:returns: A dictionary with the normalized headers
|
||||||
|
"""
|
||||||
|
norm_headers = {}
|
||||||
|
for header, value in request.META.items():
|
||||||
|
if required_header(header):
|
||||||
|
norm_header = header.replace('HTTP_', '').title().replace('_', '-')
|
||||||
|
norm_headers[norm_header] = value
|
||||||
|
|
||||||
|
return norm_headers
|
||||||
|
|
||||||
|
|
||||||
|
def encode_items(items):
|
||||||
|
"""Function that encode all elements in the list of items passed as
|
||||||
|
a parameter
|
||||||
|
|
||||||
|
:param items: A list of tuple
|
||||||
|
:returns: A list of tuple with all items encoded in 'utf-8'
|
||||||
|
"""
|
||||||
|
encoded = []
|
||||||
|
for key, values in items:
|
||||||
|
for value in values:
|
||||||
|
encoded.append((key.encode('utf-8'), value.encode('utf-8')))
|
||||||
|
return encoded
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('revproxy.cookies')
|
||||||
|
|
||||||
|
|
||||||
|
def cookie_from_string(cookie_string, strict_cookies=False):
|
||||||
|
"""Parser for HTTP header set-cookie
|
||||||
|
The return from this function will be used as parameters for
|
||||||
|
django's response.set_cookie method. Because set_cookie doesn't
|
||||||
|
have parameter comment, this cookie attribute will be ignored.
|
||||||
|
|
||||||
|
:param cookie_string: A string representing a valid cookie
|
||||||
|
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||||
|
:returns: A dictionary containing the cookie_string attributes
|
||||||
|
"""
|
||||||
|
|
||||||
|
if strict_cookies:
|
||||||
|
|
||||||
|
cookies = SimpleCookie(COOKIE_PREFIX + cookie_string)
|
||||||
|
if not cookies.keys():
|
||||||
|
return None
|
||||||
|
cookie_name, = cookies.keys()
|
||||||
|
cookie_dict = {k: v for k, v in cookies[cookie_name].items()
|
||||||
|
if v and k != 'comment'}
|
||||||
|
cookie_dict['key'] = cookie_name
|
||||||
|
cookie_dict['value'] = cookies[cookie_name].value
|
||||||
|
return cookie_dict
|
||||||
|
valid_attrs = ('path', 'domain', 'comment', 'expires',
|
||||||
|
'max_age', 'httponly', 'secure')
|
||||||
|
|
||||||
|
cookie_dict = {}
|
||||||
|
|
||||||
|
cookie_parts = cookie_string.split(';')
|
||||||
|
try:
|
||||||
|
cookie_dict['key'], cookie_dict['value'] = \
|
||||||
|
cookie_parts[0].split('=', 1)
|
||||||
|
cookie_dict['value'] = cookie_dict['value'].replace('"', '')
|
||||||
|
# print('aaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
||||||
|
# print(cookie_parts[0].split('=', 1))
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if cookie_dict['value'].startswith('='):
|
||||||
|
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||||
|
return None
|
||||||
|
|
||||||
|
for part in cookie_parts[1:]:
|
||||||
|
if '=' in part:
|
||||||
|
attr, value = part.split('=', 1)
|
||||||
|
value = value.strip()
|
||||||
|
else:
|
||||||
|
attr = part
|
||||||
|
value = ''
|
||||||
|
|
||||||
|
attr = attr.strip().lower()
|
||||||
|
if not attr:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if attr in valid_attrs:
|
||||||
|
if attr in ('httponly', 'secure'):
|
||||||
|
cookie_dict[attr] = True
|
||||||
|
elif attr in 'comment':
|
||||||
|
# ignoring comment attr as explained in the
|
||||||
|
# function docstring
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
cookie_dict[attr] = value
|
||||||
|
else:
|
||||||
|
logger.warning('Unknown cookie attribute %s', attr)
|
||||||
|
|
||||||
|
return cookie_dict
|
|
@ -1,2 +1,7 @@
|
||||||
django-revproxy
|
django-revproxy
|
||||||
urllib3[secure]
|
urllib3[secure]
|
||||||
|
channels
|
||||||
|
service_identity
|
||||||
|
websocket-client
|
||||||
|
daphne<2.3.0
|
||||||
|
asgiref~=2.3
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
"""Application Security Gateway settings"""
|
"""Application Security Gateway settings"""
|
||||||
|
INSTALLED_APPS = [
|
||||||
# INSTALLED_APPS = [
|
'channels'
|
||||||
# 'revproxy'
|
]
|
||||||
# ]
|
ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application"
|
||||||
|
|
|
@ -6,8 +6,8 @@ from django.core.cache import cache
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
from passbook.app_gw.middleware import IGNORED_HOSTNAMES_KEY
|
|
||||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||||
|
from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY
|
||||||
|
|
||||||
LOGGER = getLogger(__name__)
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
|
|
0
passbook/app_gw/websocket/__init__.py
Normal file
0
passbook/app_gw/websocket/__init__.py
Normal file
83
passbook/app_gw/websocket/consumer.py
Normal file
83
passbook/app_gw/websocket/consumer.py
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
"""websocket proxy consumer"""
|
||||||
|
import threading
|
||||||
|
from logging import getLogger
|
||||||
|
from ssl import CERT_NONE
|
||||||
|
|
||||||
|
import websocket
|
||||||
|
from channels.generic.websocket import WebsocketConsumer
|
||||||
|
|
||||||
|
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||||
|
|
||||||
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
|
class ProxyConsumer(WebsocketConsumer):
|
||||||
|
"""Proxy websocket connection to upstream"""
|
||||||
|
|
||||||
|
_headers_dict = {}
|
||||||
|
_app_gw = None
|
||||||
|
_client = None
|
||||||
|
_thread = None
|
||||||
|
|
||||||
|
def _fix_headers(self, input_dict):
|
||||||
|
"""Fix headers from bytestrings to normal strings"""
|
||||||
|
return {
|
||||||
|
key.decode('utf-8'): value.decode('utf-8')
|
||||||
|
for key, value in dict(input_dict).items()
|
||||||
|
}
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
"""Extract host header, lookup in database and proxy connection"""
|
||||||
|
self._headers_dict = self._fix_headers(dict(self.scope.get('headers')))
|
||||||
|
host = self._headers_dict.pop('host')
|
||||||
|
query_string = self.scope.get('query_string').decode('utf-8')
|
||||||
|
matches = ApplicationGatewayProvider.objects.filter(
|
||||||
|
server_name__contains=[host],
|
||||||
|
enabled=True)
|
||||||
|
if matches.exists():
|
||||||
|
self._app_gw = matches.first()
|
||||||
|
# TODO: Get upstream that starts with wss or
|
||||||
|
upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path')
|
||||||
|
if query_string:
|
||||||
|
upstream += '?' + query_string
|
||||||
|
sslopt = {}
|
||||||
|
if not self._app_gw.upstream_ssl_verification:
|
||||||
|
sslopt = {"cert_reqs": CERT_NONE}
|
||||||
|
self._client = websocket.WebSocketApp(
|
||||||
|
url=upstream,
|
||||||
|
subprotocols=self.scope.get('subprotocols'),
|
||||||
|
header=self._headers_dict,
|
||||||
|
on_message=self._client_on_message_handler(),
|
||||||
|
on_error=self._client_on_error_handler(),
|
||||||
|
on_close=self._client_on_close_handler(),
|
||||||
|
on_open=self._client_on_open_handler())
|
||||||
|
LOGGER.debug("Accepting connection for %s", host)
|
||||||
|
self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt))
|
||||||
|
self._thread.start()
|
||||||
|
|
||||||
|
def _client_on_open_handler(self):
|
||||||
|
return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol)
|
||||||
|
|
||||||
|
def _client_on_message_handler(self):
|
||||||
|
# pylint: disable=unused-argument,invalid-name
|
||||||
|
def message_handler(ws, message):
|
||||||
|
if isinstance(message, str):
|
||||||
|
self.send(text_data=message)
|
||||||
|
else:
|
||||||
|
self.send(bytes_data=message)
|
||||||
|
return message_handler
|
||||||
|
|
||||||
|
def _client_on_error_handler(self):
|
||||||
|
return lambda ws, error: print(error)
|
||||||
|
|
||||||
|
def _client_on_close_handler(self):
|
||||||
|
return lambda ws: self.disconnect(0)
|
||||||
|
|
||||||
|
def disconnect(self, code):
|
||||||
|
self._client.close()
|
||||||
|
|
||||||
|
def receive(self, text_data=None, bytes_data=None):
|
||||||
|
if text_data:
|
||||||
|
opcode = websocket.ABNF.OPCODE_TEXT
|
||||||
|
if bytes_data:
|
||||||
|
opcode = websocket.ABNF.OPCODE_BINARY
|
||||||
|
self._client.send(text_data or bytes_data, opcode)
|
17
passbook/app_gw/websocket/routing.py
Normal file
17
passbook/app_gw/websocket/routing.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
"""app_gw websocket proxy"""
|
||||||
|
from channels.auth import AuthMiddlewareStack
|
||||||
|
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||||
|
from django.conf.urls import url
|
||||||
|
|
||||||
|
from passbook.app_gw.websocket.consumer import ProxyConsumer
|
||||||
|
|
||||||
|
websocket_urlpatterns = [
|
||||||
|
url(r'^(.*)$', ProxyConsumer),
|
||||||
|
]
|
||||||
|
|
||||||
|
application = ProtocolTypeRouter({
|
||||||
|
# (http->django views is added by default)
|
||||||
|
'websocket': AuthMiddlewareStack(
|
||||||
|
URLRouter(websocket_urlpatterns)
|
||||||
|
),
|
||||||
|
})
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook audit Header"""
|
"""passbook audit Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook captcha_factor Header"""
|
"""passbook captcha_factor Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook core"""
|
"""passbook core"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
13
passbook/core/asgi.py
Normal file
13
passbook/core/asgi.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
"""
|
||||||
|
ASGI entrypoint. Configures Django and then runs the application
|
||||||
|
defined in the ASGI_APPLICATION setting.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import django
|
||||||
|
from channels.routing import get_default_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.core.settings")
|
||||||
|
django.setup()
|
||||||
|
application = get_default_application()
|
|
@ -2,11 +2,11 @@
|
||||||
|
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
import cherrypy
|
from daphne.cli import CommandLineInterface
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import autoreload
|
||||||
|
|
||||||
from passbook.core.wsgi import application
|
from passbook.lib.config import CONFIG
|
||||||
|
|
||||||
LOGGER = getLogger(__name__)
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
|
@ -15,20 +15,15 @@ class Command(BaseCommand):
|
||||||
"""Run CherryPy webserver"""
|
"""Run CherryPy webserver"""
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""passbook cherrypy server"""
|
"""passbook daphne server"""
|
||||||
config = settings.CHERRYPY_SERVER
|
autoreload.run_with_reloader(self.daphne_server)
|
||||||
config.update(**options)
|
|
||||||
cherrypy.config.update(config)
|
def daphne_server(self):
|
||||||
cherrypy.tree.graft(application, '/')
|
"""Run daphne server within autoreload"""
|
||||||
# Mount NullObject to serve static files
|
autoreload.raise_last_exception()
|
||||||
cherrypy.tree.mount(None, '/static', config={
|
CommandLineInterface().run([
|
||||||
'/': {
|
'-p', str(CONFIG.y('web.port', 8000)),
|
||||||
'tools.staticdir.on': True,
|
'-b', CONFIG.y('web.listen', '0.0.0.0'), # nosec
|
||||||
'tools.staticdir.dir': settings.STATIC_ROOT,
|
'--access-log', '/dev/null',
|
||||||
'tools.expires.on': True,
|
'passbook.core.asgi:application'
|
||||||
'tools.expires.secs': 86400,
|
])
|
||||||
'tools.gzip.on': True,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
cherrypy.engine.start()
|
|
||||||
cherrypy.engine.block()
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import autoreload
|
||||||
|
|
||||||
from passbook.core.celery import CELERY_APP
|
from passbook.core.celery import CELERY_APP
|
||||||
|
|
||||||
|
@ -14,4 +15,9 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""celery worker"""
|
"""celery worker"""
|
||||||
|
autoreload.run_with_reloader(self.celery_worker)
|
||||||
|
|
||||||
|
def celery_worker(self):
|
||||||
|
"""Run celery worker within autoreload"""
|
||||||
|
autoreload.raise_last_exception()
|
||||||
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E', '-B'])
|
CELERY_APP.worker_main(['worker', '--autoscale=10,3', '-E', '-B'])
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
"""passbook core policy engine"""
|
"""passbook core policy engine"""
|
||||||
from logging import getLogger
|
# from logging import getLogger
|
||||||
|
|
||||||
from amqp.exceptions import UnexpectedFrame
|
from amqp.exceptions import UnexpectedFrame
|
||||||
from celery import group
|
from celery import group
|
||||||
from celery.exceptions import TimeoutError as CeleryTimeoutError
|
from celery.exceptions import TimeoutError as CeleryTimeoutError
|
||||||
|
@ -10,7 +9,7 @@ from ipware import get_client_ip
|
||||||
from passbook.core.celery import CELERY_APP
|
from passbook.core.celery import CELERY_APP
|
||||||
from passbook.core.models import Policy, User
|
from passbook.core.models import Policy, User
|
||||||
|
|
||||||
LOGGER = getLogger(__name__)
|
# LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
def _cache_key(policy, user):
|
def _cache_key(policy, user):
|
||||||
return "%s#%s" % (policy.uuid, user.pk)
|
return "%s#%s" % (policy.uuid, user.pk)
|
||||||
|
@ -24,8 +23,8 @@ def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||||
user_obj = User.objects.get(pk=user_pk)
|
user_obj = User.objects.get(pk=user_pk)
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
setattr(user_obj, key, value)
|
setattr(user_obj, key, value)
|
||||||
LOGGER.debug("Running policy `%s`#%s for user %s...", policy_obj.name,
|
# LOGGER.debug("Running policy `%s`#%s for user %s...", policy_obj.name,
|
||||||
policy_obj.pk.hex, user_obj)
|
# policy_obj.pk.hex, user_obj)
|
||||||
policy_result = policy_obj.passes(user_obj)
|
policy_result = policy_obj.passes(user_obj)
|
||||||
# Handle policy result correctly if result, message or just result
|
# Handle policy result correctly if result, message or just result
|
||||||
message = None
|
message = None
|
||||||
|
@ -34,10 +33,10 @@ def _policy_engine_task(user_pk, policy_pk, **kwargs):
|
||||||
# Invert result if policy.negate is set
|
# Invert result if policy.negate is set
|
||||||
if policy_obj.negate:
|
if policy_obj.negate:
|
||||||
policy_result = not policy_result
|
policy_result = not policy_result
|
||||||
LOGGER.debug("Policy %r#%s got %s", policy_obj.name, policy_obj.pk.hex, policy_result)
|
# LOGGER.debug("Policy %r#%s got %s", policy_obj.name, policy_obj.pk.hex, policy_result)
|
||||||
cache_key = _cache_key(policy_obj, user_obj)
|
cache_key = _cache_key(policy_obj, user_obj)
|
||||||
cache.set(cache_key, (policy_obj.action, policy_result, message))
|
cache.set(cache_key, (policy_obj.action, policy_result, message))
|
||||||
LOGGER.debug("Cached entry as %s", cache_key)
|
# LOGGER.debug("Cached entry as %s", cache_key)
|
||||||
return policy_obj.action, policy_result, message
|
return policy_obj.action, policy_result, message
|
||||||
|
|
||||||
class PolicyEngine:
|
class PolicyEngine:
|
||||||
|
@ -82,16 +81,16 @@ class PolicyEngine:
|
||||||
for policy in self.policies:
|
for policy in self.policies:
|
||||||
cached_policy = cache.get(_cache_key(policy, self.__user), None)
|
cached_policy = cache.get(_cache_key(policy, self.__user), None)
|
||||||
if cached_policy:
|
if cached_policy:
|
||||||
LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
|
# LOGGER.debug("Taking result from cache for %s", policy.pk.hex)
|
||||||
cached_policies.append(cached_policy)
|
cached_policies.append(cached_policy)
|
||||||
else:
|
else:
|
||||||
LOGGER.debug("Evaluating policy %s", policy.pk.hex)
|
# LOGGER.debug("Evaluating policy %s", policy.pk.hex)
|
||||||
signatures.append(_policy_engine_task.signature(
|
signatures.append(_policy_engine_task.signature(
|
||||||
args=(self.__user.pk, policy.pk.hex),
|
args=(self.__user.pk, policy.pk.hex),
|
||||||
kwargs=kwargs,
|
kwargs=kwargs,
|
||||||
time_limit=policy.timeout))
|
time_limit=policy.timeout))
|
||||||
self.__get_timeout += policy.timeout
|
self.__get_timeout += policy.timeout
|
||||||
LOGGER.debug("Set total policy timeout to %r", self.__get_timeout)
|
# LOGGER.debug("Set total policy timeout to %r", self.__get_timeout)
|
||||||
# If all policies are cached, we have an empty list here.
|
# If all policies are cached, we have an empty list here.
|
||||||
if signatures:
|
if signatures:
|
||||||
self.__group = group(signatures)()
|
self.__group = group(signatures)()
|
||||||
|
@ -120,7 +119,7 @@ class PolicyEngine:
|
||||||
for policy_action, policy_result, policy_message in result:
|
for policy_action, policy_result, policy_message in result:
|
||||||
passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \
|
passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \
|
||||||
(policy_action == Policy.ACTION_DENY and not policy_result)
|
(policy_action == Policy.ACTION_DENY and not policy_result)
|
||||||
LOGGER.debug('Action=%s, Result=%r => %r', policy_action, policy_result, passing)
|
# LOGGER.debug('Action=%s, Result=%r => %r', policy_action, policy_result, passing)
|
||||||
if policy_message:
|
if policy_message:
|
||||||
messages.append(policy_message)
|
messages.append(policy_message)
|
||||||
if not passing:
|
if not passing:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
celery
|
celery
|
||||||
cherrypy
|
|
||||||
colorlog
|
colorlog
|
||||||
django-guardian
|
django-guardian
|
||||||
django-ipware
|
django-ipware
|
||||||
|
@ -13,3 +12,4 @@ psycopg2
|
||||||
PyYAML
|
PyYAML
|
||||||
sentry-sdk
|
sentry-sdk
|
||||||
pip
|
pip
|
||||||
|
whitenoise
|
||||||
|
|
|
@ -124,6 +124,7 @@ CACHES = {
|
||||||
|
|
||||||
MIDDLEWARE = [
|
MIDDLEWARE = [
|
||||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
|
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
'passbook.app_gw.middleware.ApplicationGatewayMiddleware',
|
'passbook.app_gw.middleware.ApplicationGatewayMiddleware',
|
||||||
'django.middleware.security.SecurityMiddleware',
|
'django.middleware.security.SecurityMiddleware',
|
||||||
|
@ -219,38 +220,27 @@ CELERY_BEAT_SCHEDULE = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sentry_init(
|
|
||||||
dsn=("https://55b5dd780bc14f4c96bba69b7a9abbcc:449af483bd0745"
|
|
||||||
"0d83be640d834e5458@sentry.services.beryju.org/8"),
|
|
||||||
integrations=[
|
|
||||||
DjangoIntegration(),
|
|
||||||
CeleryIntegration(),
|
|
||||||
LoggingIntegration(
|
|
||||||
level=logging.INFO,
|
|
||||||
event_level=logging.ERROR
|
|
||||||
)
|
|
||||||
],
|
|
||||||
send_default_pii=True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
if not DEBUG:
|
||||||
# CherryPY settings
|
sentry_init(
|
||||||
with CONFIG.cd('web'):
|
dsn=("https://55b5dd780bc14f4c96bba69b7a9abbcc:449af483bd0745"
|
||||||
CHERRYPY_SERVER = {
|
"0d83be640d834e5458@sentry.services.beryju.org/8"),
|
||||||
'server.socket_host': CONFIG.get('listen', '0.0.0.0'), # nosec
|
integrations=[
|
||||||
'server.socket_port': CONFIG.get('port', 8000),
|
DjangoIntegration(),
|
||||||
'server.thread_pool': CONFIG.get('threads', 30),
|
CeleryIntegration(),
|
||||||
'log.screen': False,
|
LoggingIntegration(
|
||||||
'log.access_file': '',
|
level=logging.INFO,
|
||||||
'log.error_file': '',
|
event_level=logging.ERROR
|
||||||
}
|
)
|
||||||
|
],
|
||||||
|
send_default_pii=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
# Static files (CSS, JavaScript, Images)
|
||||||
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
# https://docs.djangoproject.com/en/2.1/howto/static-files/
|
||||||
|
|
||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||||
LOG_HANDLERS = ['console', 'syslog', 'file']
|
|
||||||
|
|
||||||
with CONFIG.cd('log'):
|
with CONFIG.cd('log'):
|
||||||
LOGGING = {
|
LOGGING = {
|
||||||
|
@ -294,38 +284,52 @@ with CONFIG.cd('log'):
|
||||||
'formatter': 'verbose',
|
'formatter': 'verbose',
|
||||||
'filename': CONFIG.get('file'),
|
'filename': CONFIG.get('file'),
|
||||||
},
|
},
|
||||||
|
'queue': {
|
||||||
|
'level': CONFIG.get('level').get('console'),
|
||||||
|
'class': 'passbook.lib.log.QueueListenerHandler',
|
||||||
|
'handlers': [
|
||||||
|
'cfg://handlers.console',
|
||||||
|
# 'cfg://handlers.syslog',
|
||||||
|
'cfg://handlers.file',
|
||||||
|
],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'loggers': {
|
'loggers': {
|
||||||
'passbook': {
|
'passbook': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
'django': {
|
'django': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'INFO',
|
'level': 'INFO',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
'tasks': {
|
'tasks': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
'cherrypy': {
|
'cherrypy': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
'oauthlib': {
|
'oauthlib': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
'oauth2_provider': {
|
'oauth2_provider': {
|
||||||
'handlers': LOG_HANDLERS,
|
'handlers': ['queue'],
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'propagate': True,
|
'propagate': True,
|
||||||
},
|
},
|
||||||
|
'daphne': {
|
||||||
|
'handlers': ['queue'],
|
||||||
|
'level': 'INFO',
|
||||||
|
'propagate': True,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,5 +24,5 @@ def send_email(to_address, subject, template, context):
|
||||||
@CELERY_APP.task()
|
@CELERY_APP.task()
|
||||||
def clean_nonces():
|
def clean_nonces():
|
||||||
"""Remove expired nonces"""
|
"""Remove expired nonces"""
|
||||||
amount = Nonce.objects.filter(expires__lt=datetime.now(), expiring=True).delete()
|
amount, _ = Nonce.objects.filter(expires__lt=datetime.now(), expiring=True).delete()
|
||||||
LOGGER.debug("Deleted expired %d nonces", amount)
|
LOGGER.debug("Deleted expired %d nonces", amount)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook hibp_policy"""
|
"""passbook hibp_policy"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""Passbook ldap app Header"""
|
"""Passbook ldap app Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook lib"""
|
"""passbook lib"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -8,6 +8,7 @@ from typing import Any
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from django.conf import ImproperlyConfigured
|
from django.conf import ImproperlyConfigured
|
||||||
|
from django.utils.autoreload import autoreload_started
|
||||||
|
|
||||||
SEARCH_PATHS = [
|
SEARCH_PATHS = [
|
||||||
'passbook/lib/default.yml',
|
'passbook/lib/default.yml',
|
||||||
|
@ -21,6 +22,8 @@ ENVIRONMENT = os.getenv('PASSBOOK_ENV', 'local')
|
||||||
class ConfigLoader:
|
class ConfigLoader:
|
||||||
"""Search through SEARCH_PATHS and load configuration"""
|
"""Search through SEARCH_PATHS and load configuration"""
|
||||||
|
|
||||||
|
loaded_file = []
|
||||||
|
|
||||||
__config = {}
|
__config = {}
|
||||||
__context_default = None
|
__context_default = None
|
||||||
__sub_dicts = []
|
__sub_dicts = []
|
||||||
|
@ -69,6 +72,8 @@ class ConfigLoader:
|
||||||
with open(path) as file:
|
with open(path) as file:
|
||||||
try:
|
try:
|
||||||
self.update(self.__config, yaml.safe_load(file))
|
self.update(self.__config, yaml.safe_load(file))
|
||||||
|
LOGGER.debug("Loaded %s", path)
|
||||||
|
self.loaded_file.append(path)
|
||||||
except yaml.YAMLError as exc:
|
except yaml.YAMLError as exc:
|
||||||
raise ImproperlyConfigured from exc
|
raise ImproperlyConfigured from exc
|
||||||
except PermissionError as exc:
|
except PermissionError as exc:
|
||||||
|
@ -126,3 +131,10 @@ class ConfigLoader:
|
||||||
|
|
||||||
|
|
||||||
CONFIG = ConfigLoader()
|
CONFIG = ConfigLoader()
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def signal_handler(sender, **kwargs):
|
||||||
|
"""Add all loaded config files to autoreload watcher"""
|
||||||
|
for path in CONFIG.loaded_file:
|
||||||
|
sender.watch_file(path)
|
||||||
|
autoreload_started.connect(signal_handler)
|
||||||
|
|
|
@ -35,6 +35,8 @@ redis: localhost/0
|
||||||
error_report_enabled: true
|
error_report_enabled: true
|
||||||
secret_key: 9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s
|
secret_key: 9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s
|
||||||
|
|
||||||
|
domains:
|
||||||
|
- passbook.local
|
||||||
primary_domain: 'localhost'
|
primary_domain: 'localhost'
|
||||||
|
|
||||||
passbook:
|
passbook:
|
||||||
|
|
37
passbook/lib/log.py
Normal file
37
passbook/lib/log.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
"""QueueListener that can be configured from logging.dictConfig"""
|
||||||
|
from atexit import register
|
||||||
|
from logging.config import ConvertingList
|
||||||
|
from logging.handlers import QueueHandler, QueueListener
|
||||||
|
from queue import Queue
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_handlers(_list):
|
||||||
|
"""Evaluates ConvertingList by iterating over it"""
|
||||||
|
if not isinstance(_list, ConvertingList):
|
||||||
|
return _list
|
||||||
|
|
||||||
|
# Indexing the list performs the evaluation.
|
||||||
|
return [_list[i] for i in range(len(_list))]
|
||||||
|
|
||||||
|
|
||||||
|
class QueueListenerHandler(QueueHandler):
|
||||||
|
"""QueueListener that can be configured from logging.dictConfig"""
|
||||||
|
|
||||||
|
def __init__(self, handlers, auto_run=True, queue=Queue(-1)):
|
||||||
|
super().__init__(queue)
|
||||||
|
handlers = _resolve_handlers(handlers)
|
||||||
|
self._listener = QueueListener(
|
||||||
|
self.queue,
|
||||||
|
*handlers,
|
||||||
|
respect_handler_level=True)
|
||||||
|
if auto_run:
|
||||||
|
self.start()
|
||||||
|
register(self.stop)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""start background thread"""
|
||||||
|
self._listener.start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""stop background thread"""
|
||||||
|
self._listener.stop()
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook oauth_client Header"""
|
"""passbook oauth_client Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook oauth_provider Header"""
|
"""passbook oauth_provider Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook otp Header"""
|
"""passbook otp Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook password_expiry"""
|
"""passbook password_expiry"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook saml_idp Header"""
|
"""passbook saml_idp Header"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
"""passbook suspicious_policy"""
|
"""passbook suspicious_policy"""
|
||||||
__version__ = '0.1.27-beta'
|
__version__ = '0.1.30-beta'
|
||||||
|
|
Reference in a new issue