diff --git a/Pipfile b/Pipfile index 098ff0ddb..d4099f7f0 100644 --- a/Pipfile +++ b/Pipfile @@ -4,13 +4,9 @@ url = "https://pypi.org/simple" verify_ssl = true [packages] -asgiref = "*" -beautifulsoup4 = "*" celery = "*" -channels = "*" cherrypy = "*" colorlog = "*" -daphne = "*" defusedxml = "*" django = "*" django-cors-middleware = "*" @@ -23,7 +19,6 @@ django-otp = "*" django-recaptcha = "*" django-redis = "*" django-rest-framework = "*" -django-revproxy = "*" djangorestframework = "==3.9.4" drf-yasg = "*" ldap3 = "*" @@ -40,7 +35,6 @@ sentry-sdk = "*" service_identity = "*" signxml = "*" urllib3 = {extras = ["secure"],version = "*"} -websocket_client = "*" structlog = "*" uwsgi = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 0f83897db..7eae812b2 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b7dff8588b702e20c77b5e52a82e5c5c596cc25790b8906dc9eabe5b1b836893" + "sha256": "ed6099cb01ff4d6dd62131fa60476f0ce3071dfa5ebd2475b95c2d782d1c7727" }, "pipfile-spec": 6, "requires": { @@ -23,14 +23,6 @@ ], "version": "==2.5.1" }, - "asgiref": { - "hashes": [ - "sha256:a4ce726e6ef49cca13642ff49588530ebabcc47c669c7a95af37ea5a74b9b823", - "sha256:f62b1c88ebf5fe95db202a372982970edcf375c1513d7e70717df0750f5c2b98" - ], - "index": "pypi", - "version": "==3.2.2" - }, "asn1crypto": { "hashes": [ "sha256:d02bf8ea1b964a5ff04ac7891fe3a39150045d1e5e4fe99273ba677d11b92a04", @@ -45,29 +37,6 @@ ], "version": "==19.2.0" }, - "autobahn": { - "hashes": [ - "sha256:734385b00547448b3f30a752cbfd2900d15924d77dc4a1699b8bce1ea8899f39", - "sha256:7ab1e51a9c9bf0aa6ccbe765635b79b9a659019d38904fa3c2072670f097a25d" - ], - "version": "==19.10.1" - }, - "automat": { - "hashes": [ - "sha256:cbd78b83fa2d81fe2a4d23d258e1661dd7493c9a50ee2f1a5b2cac61c1793b0e", - "sha256:fdccab66b68498af9ecfa1fa43693abe546014dd25cf28543cbe9d1334916a58" - ], - "version": "==0.7.0" - }, - "beautifulsoup4": { - "hashes": [ - "sha256:05668158c7b85b791c5abde53e50265e16f98ad601c402ba44d70f96c4159612", - "sha256:25288c9e176f354bf277c0a10aa96c782a6a18a17122dba2e8cec4a97e03343b", - "sha256:f040590be10520f2ea4c2ae8c3dae441c7cfff5308ec9d58a0ec0c1b8f81d469" - ], - "index": "pypi", - "version": "==4.8.0" - }, "billiard": { "hashes": [ "sha256:01afcb4e7c4fd6480940cfbd4d9edc19d7a7509d6ada533984d0d0f49901ec82", @@ -123,14 +92,6 @@ ], "version": "==1.12.3" }, - "channels": { - "hashes": [ - "sha256:5759b4b89fc354101299e5f24b49e83421c12c653c913161858be4c24364a26d", - "sha256:d0289e4a3aa6f1df34693b14d5c1d147832a16622c13e1f1eff5b22ff2f2c748" - ], - "index": "pypi", - "version": "==2.3.0" - }, "chardet": { "hashes": [ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", @@ -161,13 +122,6 @@ "index": "pypi", "version": "==4.0.2" }, - "constantly": { - "hashes": [ - "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35", - "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d" - ], - "version": "==15.1.0" - }, "coreapi": { "hashes": [ "sha256:46145fcc1f7017c076a2ef684969b641d18a2991051fddec9458ad3f78ffc1cb", @@ -203,14 +157,6 @@ ], "version": "==2.7" }, - "daphne": { - "hashes": [ - "sha256:2329b7a74b5559f7ea012879c10ba945c3a53df7d8d2b5932a904e3b4c9abcc2", - "sha256:3cae286a995ae5b127d7de84916f0480cb5be19f81125b6a150b8326250dadd5" - ], - "index": "pypi", - "version": "==2.3.0" - }, "defusedxml": { "hashes": [ "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93", @@ -302,14 +248,6 @@ "index": "pypi", "version": "==0.1.0" }, - "django-revproxy": { - "hashes": [ - "sha256:0b539736e438aad3cd8b34563125783678f65bcb847970c95d8e9820e6dc88b3", - "sha256:b2c6244aaf53fbbecb79084bf507761754b36895c0f6d01349066e9a355e8455" - ], - "index": "pypi", - "version": "==0.9.15" - }, "djangorestframework": { "hashes": [ "sha256:376f4b50340a46c15ae15ddd0c853085f4e66058f97e4dbe7d43ed62f5e60651", @@ -339,13 +277,6 @@ ], "version": "==0.16.0" }, - "hyperlink": { - "hashes": [ - "sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654", - "sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f" - ], - "version": "==19.0.0" - }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", @@ -360,13 +291,6 @@ ], "version": "==0.23" }, - "incremental": { - "hashes": [ - "sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f", - "sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3" - ], - "version": "==17.5.0" - }, "inflection": { "hashes": [ "sha256:18ea7fb7a7d152853386523def08736aa8c32636b047ade55f7578c4edeb16ca" @@ -628,13 +552,6 @@ ], "version": "==3.9.0" }, - "pyhamcrest": { - "hashes": [ - "sha256:6b672c02fdf7470df9674ab82263841ce8333fb143f32f021f6cb26f0e512420", - "sha256:8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd" - ], - "version": "==1.9.0" - }, "pyjwkest": { "hashes": [ "sha256:5560fd5ba08655f29ff6ad1df1e15dc05abc9d976fcbcec8d2b5167f49b70222" @@ -773,13 +690,6 @@ ], "version": "==1.12.0" }, - "soupsieve": { - "hashes": [ - "sha256:605f89ad5fdbfefe30cdc293303665eff2d188865d4dbe4eb510bba1edfbfce3", - "sha256:b91d676b330a0ebd5b21719cb6e9b57c57d433671f65b9c28dd3461d9a1ed0b6" - ], - "version": "==1.9.4" - }, "sqlparse": { "hashes": [ "sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177", @@ -802,37 +712,6 @@ ], "version": "==1.14.1" }, - "twisted": { - "hashes": [ - "sha256:02214ef6f125804969aedd55daccea57060b98dae6a2aa0a4cb60c4d0acb8a2c", - "sha256:15b51047ab116ee61d791cf9fe6f037f35e909a6d344ccb437d1691627c4d8a1", - "sha256:17704d98d58c9c52d97e88570732e4c094a93fe5df937d01b759bab593345eec", - "sha256:222e0cfd60b0c867dd303bce6355a3ffac46574079dff11ae7a1775235ad12c8", - "sha256:23090c9fcec01ce4e102912a39eb4645b2bf916abe459804f87853d977ced6e3", - "sha256:5102fc2bf0d870c1e217aa09ed7a48b633cc579950a31ecae9cecc556ebffdf2", - "sha256:6bc71d5a2320576a3ac7f2dac7802c290fcf9f1972c59f9ef5c5b85b8bac1e1e", - "sha256:6c7703b62de08fd5873d60e6ed30478cdb39e3a37b1ead3a5d2fed10deb6e112", - "sha256:6ca398abd58730070e9bc34e8a01d1198438b2ff130e95492090a2fec5fb683b", - "sha256:98840f28c44894f44dc597747b4cddc740197dc6f6f18ba4dd810422094e35cb", - "sha256:998e3baf509c7cf7973b8174c1050ac10f6a8bc1aaf0178ad6a7c422c75a0c68", - "sha256:a5f2de00c6630c8f5ad32fca64fc4c853536c21e9ea8d0d2ae54804ef5836b9c", - "sha256:aad65a24b27253eb94f2749131a872487b093c599c5873c03d90a65cc9b8a2fc", - "sha256:ab788465701f553f764f4442d22b850f39a6a6abd4861e70c05b4c27119c9b50", - "sha256:c7244e24fcb72f838be57d3e117ad7df135ff5af4c9d4c565417d671cd1e68c9", - "sha256:d5db93026568f60cacdc0615fcd21d46f694a6bfad0ef3ff53cde2b4bb85a39d", - "sha256:da92426002703b02d8fccff3acfea2d8baf76a9052e8c55ea76d0407eeaa06ce", - "sha256:f4f0af14d288140ecb00861a3bd1e0b94ffdc63057cc1abe8b9dc84f6b6dcf18", - "sha256:f985f31e3244d18610816b55becf8fbf445c8e30fe0731500cadaf19f296baf0" - ], - "version": "==19.7.0" - }, - "txaio": { - "hashes": [ - "sha256:67e360ac73b12c52058219bb5f8b3ed4105d2636707a36a7cdafb56fe06db7fe", - "sha256:b6b235d432cc58ffe111b43e337db71a5caa5d3eaa88f0eacf60b431c7626ef5" - ], - "version": "==18.8.1" - }, "uritemplate": { "hashes": [ "sha256:01c69f4fe8ed503b2951bef85d996a9d22434d2431584b5b107b2981ff416fbd", @@ -866,14 +745,6 @@ ], "version": "==1.3.0" }, - "websocket-client": { - "hashes": [ - "sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9", - "sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a" - ], - "index": "pypi", - "version": "==0.56.0" - }, "zc.lockfile": { "hashes": [ "sha256:307ad78227e48be260e64896ec8886edc7eae22d8ec53e4d528ab5537a83203b", @@ -887,40 +758,6 @@ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" ], "version": "==0.6.0" - }, - "zope.interface": { - "hashes": [ - "sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c", - "sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b", - "sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02", - "sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f", - "sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5", - "sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375", - "sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487", - "sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2", - "sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0", - "sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b", - "sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63", - "sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39", - "sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745", - "sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc", - "sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2", - "sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa", - "sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1", - "sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc", - "sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98", - "sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97", - "sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab", - "sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127", - "sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d", - "sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe", - "sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891", - "sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1", - "sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b", - "sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966", - "sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317" - ], - "version": "==4.6.0" } }, "develop": { diff --git a/passbook/admin/templates/administration/overview.html b/passbook/admin/templates/administration/overview.html index 98ae36cde..72b06ea81 100644 --- a/passbook/admin/templates/administration/overview.html +++ b/passbook/admin/templates/administration/overview.html @@ -179,8 +179,8 @@ {% if worker_count < 1%} - {{ worker_count }} + {{ worker_count }} {% else %} {{ worker_count }} {% endif %} diff --git a/passbook/app_gw/.DS_Store b/passbook/app_gw/.DS_Store index badfb0a20..45dfba929 100644 Binary files a/passbook/app_gw/.DS_Store and b/passbook/app_gw/.DS_Store differ diff --git a/passbook/app_gw/apps.py b/passbook/app_gw/apps.py index 9c696ae3e..b3c8cec2b 100644 --- a/passbook/app_gw/apps.py +++ b/passbook/app_gw/apps.py @@ -1,6 +1,4 @@ """passbook Application Security Gateway app""" -from importlib import import_module - from django.apps import AppConfig @@ -10,7 +8,4 @@ class PassbookApplicationApplicationGatewayConfig(AppConfig): name = 'passbook.app_gw' label = 'passbook_app_gw' verbose_name = 'passbook Application Security Gateway' - mountpoint = 'app_gw/' - - def ready(self): - import_module('passbook.app_gw.signals') + # mountpoint = 'app_gw/' diff --git a/passbook/app_gw/asgi.py b/passbook/app_gw/asgi.py deleted file mode 100644 index 38bf72a05..000000000 --- a/passbook/app_gw/asgi.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -ASGI entrypoint. Configures Django and then runs the application -defined in the ASGI_APPLICATION setting. -""" - -import os - -import django -from channels.routing import get_default_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.root.settings") -django.setup() -application = get_default_application() diff --git a/passbook/app_gw/management/__init__.py b/passbook/app_gw/management/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/passbook/app_gw/management/commands/__init__.py b/passbook/app_gw/management/commands/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/passbook/app_gw/management/commands/app_gw_web.py b/passbook/app_gw/management/commands/app_gw_web.py deleted file mode 100644 index 7ce281304..000000000 --- a/passbook/app_gw/management/commands/app_gw_web.py +++ /dev/null @@ -1,29 +0,0 @@ -"""passbook app_gw webserver management command""" - -from daphne.cli import CommandLineInterface -from django.core.management.base import BaseCommand -from django.utils import autoreload -from structlog import get_logger - -from passbook.lib.config import CONFIG - -LOGGER = get_logger(__name__) - - -class Command(BaseCommand): - """Run Daphne Webserver for app_gw""" - - def handle(self, *args, **options): - """passbook daphne server""" - autoreload.run_with_reloader(self.daphne_server) - - def daphne_server(self): - """Run daphne server within autoreload""" - autoreload.raise_last_exception() - CommandLineInterface().run([ - '-p', str(CONFIG.y('app_gw.port', 8000)), - '-b', CONFIG.y('app_gw.listen', '0.0.0.0'), # nosec - '--access-log', '/dev/null', - '--application-close-timeout', '500', - 'passbook.app_gw.asgi:application' - ]) diff --git a/passbook/app_gw/middleware.py b/passbook/app_gw/middleware.py deleted file mode 100644 index 6cd2ac729..000000000 --- a/passbook/app_gw/middleware.py +++ /dev/null @@ -1,33 +0,0 @@ -"""passbook app_gw middleware""" -from django.views.generic import RedirectView - -from passbook.app_gw.proxy.handler import RequestHandler -from passbook.lib.config import CONFIG - - -class ApplicationGatewayMiddleware: - """Check if request should be proxied or handeled normally""" - - _app_gw_cache = {} - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - # Rudimentary cache - host_header = request.META.get('HTTP_HOST') - if host_header not in self._app_gw_cache: - self._app_gw_cache[host_header] = RequestHandler.find_app_gw_for_request(request) - if self._app_gw_cache[host_header]: - return self.dispatch(request, self._app_gw_cache[host_header]) - return self.get_response(request) - - def dispatch(self, request, app_gw): - """Build proxied request and pass to upstream""" - handler = RequestHandler(app_gw, request) - - if not handler.check_permission(): - to_url = 'https://%s/?next=%s' % (CONFIG.y('domains')[0], request.get_full_path()) - return RedirectView.as_view(url=to_url)(request) - - return handler.get_response() diff --git a/passbook/app_gw/proxy/__init__.py b/passbook/app_gw/proxy/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/passbook/app_gw/proxy/exceptions.py b/passbook/app_gw/proxy/exceptions.py deleted file mode 100644 index 9d2b0dc8a..000000000 --- a/passbook/app_gw/proxy/exceptions.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Exception classes""" - -class ReverseProxyException(Exception): - """Base for revproxy exception""" - - -class InvalidUpstream(ReverseProxyException): - """Invalid upstream set""" diff --git a/passbook/app_gw/proxy/handler.py b/passbook/app_gw/proxy/handler.py deleted file mode 100644 index debb1981e..000000000 --- a/passbook/app_gw/proxy/handler.py +++ /dev/null @@ -1,233 +0,0 @@ -"""passbook app_gw request handler""" -import mimetypes -from random import SystemRandom -from urllib.parse import urlparse - -import certifi -import urllib3 -from django.core.cache import cache -from django.utils.http import urlencode -from structlog import get_logger - -from passbook.app_gw.models import ApplicationGatewayProvider -from passbook.app_gw.proxy.exceptions import InvalidUpstream -from passbook.app_gw.proxy.response import get_django_response -from passbook.app_gw.proxy.rewrite import Rewriter -from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers -from passbook.core.models import Application -from passbook.policy.engine import PolicyEngine - -SESSION_UPSTREAM_KEY = 'passbook_app_gw_upstream' -IGNORED_HOSTNAMES_KEY = 'passbook_app_gw_ignored' -LOGGER = get_logger(__name__) -QUOTE_SAFE = r'<.;>\(}*+|~=-$/_:^@)[{]&\'!,"`' -ERRORS_MESSAGES = { - 'upstream-no-scheme': ("Upstream URL scheme must be either " - "'http' or 'https' (%s).") -} -HTTP_NO_VERIFY = urllib3.PoolManager() -HTTP = urllib3.PoolManager( - cert_reqs='CERT_REQUIRED', - ca_certs=certifi.where()) -IGNORED_HOSTS = cache.get(IGNORED_HOSTNAMES_KEY, []) -POLICY_CACHE = {} - -class RequestHandler: - """Forward requests""" - - _parsed_url = None - _request_headers = None - - def __init__(self, app_gw, request): - self.app_gw = app_gw - self.request = request - if self.app_gw.pk not in POLICY_CACHE: - POLICY_CACHE[self.app_gw.pk] = self.app_gw.application.policies.all() - - @staticmethod - def find_app_gw_for_request(request): - """Check if a request should be proxied or forwarded to passbook""" - # Check if hostname is in cached list of ignored hostnames - # This saves us having to query the database on each request - host_header = request.META.get('HTTP_HOST') - if host_header in IGNORED_HOSTS: - # LOGGER.debug("%s is ignored", host_header) - return False - # Look through all ApplicationGatewayProviders and check hostnames - matches = ApplicationGatewayProvider.objects.filter( - server_name__contains=[host_header], - enabled=True) - if not matches.exists(): - # Mo matching Providers found, add host header to ignored list - IGNORED_HOSTS.append(host_header) - cache.set(IGNORED_HOSTNAMES_KEY, IGNORED_HOSTS) - # LOGGER.debug("Ignoring %s", host_header) - return False - # At this point we're certain there's a matching ApplicationGateway - if len(matches) > 1: - # This should never happen - raise ValueError - app_gw = matches.first() - try: - # Check if ApplicationGateway is associated with application - getattr(app_gw, 'application') - if app_gw: - return app_gw - except Application.DoesNotExist: - pass - # LOGGER.debug("ApplicationGateway not associated with Application") - return True - - def _get_upstream(self): - """Choose random upstream and save in session""" - if SESSION_UPSTREAM_KEY not in self.request.session: - self.request.session[SESSION_UPSTREAM_KEY] = {} - if self.app_gw.pk not in self.request.session[SESSION_UPSTREAM_KEY]: - upstream_index = int(SystemRandom().random() * len(self.app_gw.upstream)) - self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk] = upstream_index - return self.app_gw.upstream[self.request.session[SESSION_UPSTREAM_KEY][self.app_gw.pk]] - - def get_upstream(self): - """Get upstream as parsed url""" - upstream = self._get_upstream() - - self._parsed_url = urlparse(upstream) - - if self._parsed_url.scheme not in ('http', 'https'): - raise InvalidUpstream(ERRORS_MESSAGES['upstream-no-scheme'] % - upstream) - - return upstream - - def _format_path_to_redirect(self): - # LOGGER.debug("Path before: %s", self.request.get_full_path()) - rewriter = Rewriter(self.app_gw, self.request) - after = rewriter.build() - # LOGGER.debug("Path after: %s", after) - return after - - def get_proxy_request_headers(self): - """Get normalized headers for the upstream - Gets all headers from the original request and normalizes them. - Normalization occurs by removing the prefix ``HTTP_`` and - replacing and ``_`` by ``-``. Example: ``HTTP_ACCEPT_ENCODING`` - becames ``Accept-Encoding``. - .. versionadded:: 0.9.1 - :param request: The original HTTPRequest instance - :returns: Normalized headers for the upstream - """ - return normalize_request_headers(self.request) - - def get_request_headers(self): - """Return request headers that will be sent to upstream. - The header REMOTE_USER is set to the current user - if AuthenticationMiddleware is enabled and - the view's add_remote_user property is True. - .. versionadded:: 0.9.8 - """ - request_headers = self.get_proxy_request_headers() - if not self.app_gw.authentication_header: - return request_headers - request_headers[self.app_gw.authentication_header] = self.request.user.get_username() - # LOGGER.debug("%s set", self.app_gw.authentication_header) - - return request_headers - - def check_permission(self): - """Check if user is authenticated and has permission to access app""" - if not hasattr(self.request, 'user'): - return False - if not self.request.user.is_authenticated: - return False - policy_engine = PolicyEngine(POLICY_CACHE[self.app_gw.pk]) - policy_engine.for_user(self.request.user).with_request(self.request).build() - passing, _messages = policy_engine.result - - return passing - - def get_encoded_query_params(self): - """Return encoded query params to be used in proxied request""" - get_data = encode_items(self.request.GET.lists()) - return urlencode(get_data) - - def _created_proxy_response(self, path): - request_payload = self.request.body - - # LOGGER.debug("Request headers: %s", self._request_headers) - - request_url = self.get_upstream() + path - # LOGGER.debug("Request URL: %s", request_url) - - if self.request.GET: - request_url += '?' + self.get_encoded_query_params() - # LOGGER.debug("Request URL: %s", request_url) - - http = HTTP - if not self.app_gw.upstream_ssl_verification: - http = HTTP_NO_VERIFY - - try: - proxy_response = http.urlopen(self.request.method, - request_url, - redirect=False, - retries=None, - headers=self._request_headers, - body=request_payload, - decode_content=False, - preload_content=False) - # LOGGER.debug("Proxy response header: %s", - # proxy_response.getheaders()) - except urllib3.exceptions.HTTPError as error: - LOGGER.exception(error) - raise - - return proxy_response - - def _replace_host_on_redirect_location(self, proxy_response): - location = proxy_response.headers.get('Location') - if location: - if self.request.is_secure(): - scheme = 'https://' - else: - scheme = 'http://' - request_host = scheme + self.request.META.get('HTTP_HOST') - - upstream_host_http = 'http://' + self._parsed_url.netloc - upstream_host_https = 'https://' + self._parsed_url.netloc - - location = location.replace(upstream_host_http, request_host) - location = location.replace(upstream_host_https, request_host) - proxy_response.headers['Location'] = location - # LOGGER.debug("Proxy response LOCATION: %s", - # proxy_response.headers['Location']) - - def _set_content_type(self, proxy_response): - content_type = proxy_response.headers.get('Content-Type') - if not content_type: - content_type = (mimetypes.guess_type(self.request.path) - [0] or self.app_gw.default_content_type) - proxy_response.headers['Content-Type'] = content_type - # LOGGER.debug("Proxy response CONTENT-TYPE: %s", - # proxy_response.headers['Content-Type']) - - def get_response(self): - """Pass request to upstream and return response""" - self._request_headers = self.get_request_headers() - - path = self._format_path_to_redirect() - proxy_response = self._created_proxy_response(path) - - self._replace_host_on_redirect_location(proxy_response) - self._set_content_type(proxy_response) - response = get_django_response(proxy_response, strict_cookies=False) - - # If response has a 'Location' header, we rewrite that location as well - if 'Location' in response: - LOGGER.debug("Rewriting Location header") - for server_name in self.app_gw.server_name: - response['Location'] = response['Location'].replace( - self._parsed_url.hostname, server_name) - LOGGER.debug(response['Location']) - - # LOGGER.debug("RESPONSE RETURNED: %s", response) - return response diff --git a/passbook/app_gw/proxy/response.py b/passbook/app_gw/proxy/response.py deleted file mode 100644 index 6088c52d3..000000000 --- a/passbook/app_gw/proxy/response.py +++ /dev/null @@ -1,62 +0,0 @@ -"""response functions from django-revproxy""" -from django.http import HttpResponse, StreamingHttpResponse -from structlog import get_logger - -from passbook.app_gw.proxy.utils import (cookie_from_string, - set_response_headers, should_stream) - -#: Default number of bytes that are going to be read in a file lecture -DEFAULT_AMT = 2 ** 16 - -logger = get_logger(__name__) - - -def get_django_response(proxy_response, strict_cookies=False): - """This method is used to create an appropriate response based on the - Content-Length of the proxy_response. If the content is bigger than - MIN_STREAMING_LENGTH, which is found on utils.py, - than django.http.StreamingHttpResponse will be created, - else a django.http.HTTPResponse will be created instead - - :param proxy_response: An Instance of urllib3.response.HTTPResponse that - will create an appropriate response - :param strict_cookies: Whether to only accept RFC-compliant cookies - :returns: Returns an appropriate response based on the proxy_response - content-length - """ - status = proxy_response.status - headers = proxy_response.headers - - logger.debug('Proxy response headers: %s', headers) - - content_type = headers.get('Content-Type') - - logger.debug('Content-Type: %s', content_type) - - if should_stream(proxy_response): - logger.info('Content-Length is bigger than %s', DEFAULT_AMT) - response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT), - status=status, - content_type=content_type) - else: - content = proxy_response.data or b'' - response = HttpResponse(content, status=status, - content_type=content_type) - - logger.info('Normalizing response headers') - set_response_headers(response, headers) - - logger.debug('Response headers: %s', getattr(response, '_headers')) - - cookies = proxy_response.headers.getlist('set-cookie') - logger.info('Checking for invalid cookies') - for cookie_string in cookies: - cookie_dict = cookie_from_string(cookie_string, - strict_cookies=strict_cookies) - # if cookie is invalid cookie_dict will be None - if cookie_dict: - response.set_cookie(**cookie_dict) - - logger.debug('Response cookies: %s', response.cookies) - - return response diff --git a/passbook/app_gw/proxy/rewrite.py b/passbook/app_gw/proxy/rewrite.py deleted file mode 100644 index 20eac9a9a..000000000 --- a/passbook/app_gw/proxy/rewrite.py +++ /dev/null @@ -1,42 +0,0 @@ -"""passbook app_gw rewriter""" - -from passbook.app_gw.models import RewriteRule - -RULE_CACHE = {} - -class Context: - """Empty class which we dynamically add attributes to""" - -class Rewriter: - """Apply rewrites""" - - __application = None - __request = None - - def __init__(self, application, request): - self.__application = application - self.__request = request - if self.__application.pk not in RULE_CACHE: - RULE_CACHE[self.__application.pk] = RewriteRule.objects.filter( - provider__in=[self.__application]) - - def __build_context(self, matches): - """Build object with .0, .1, etc as groups and give access to request""" - context = Context() - for index, group_match in enumerate(matches.groups()): - setattr(context, "g%d" % (index + 1), group_match) - setattr(context, 'request', self.__request) - return context - - def build(self): - """Run all rules over path and return final path""" - path = self.__request.get_full_path() - for rule in RULE_CACHE[self.__application.pk]: - matches = rule.compiled_matcher.search(path) - if not matches: - continue - replace_context = self.__build_context(matches) - path = rule.replacement.format(context=replace_context) - if rule.halt: - return path - return path diff --git a/passbook/app_gw/proxy/utils.py b/passbook/app_gw/proxy/utils.py deleted file mode 100644 index 4ec691bd0..000000000 --- a/passbook/app_gw/proxy/utils.py +++ /dev/null @@ -1,226 +0,0 @@ -"""Utils from django-revproxy, slightly adjusted""" -import re -from wsgiref.util import is_hop_by_hop - -from structlog import get_logger - -try: - from http.cookies import SimpleCookie - COOKIE_PREFIX = '' -except ImportError: - from Cookie import SimpleCookie - COOKIE_PREFIX = 'Set-Cookie: ' - - -#: List containing string constant that are used to represent headers that can -#: be ignored in the required_header function -IGNORE_HEADERS = ( - 'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so - # we remove the Accept-Encoding from - # original request - 'HTTP_HOST', - 'HTTP_REMOTE_USER', -) - - -# Default from HTTP RFC 2616 -# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 -#: Variable that represent the default charset used -DEFAULT_CHARSET = 'latin-1' - -#: List containing string constants that represents possible html content type -HTML_CONTENT_TYPES = ( - 'text/html', - 'application/xhtml+xml' -) - -#: Variable used to represent a minimal content size required for response -#: to be turned into stream -MIN_STREAMING_LENGTH = 4 * 1024 # 4KB - -#: Regex used to find charset in a html content type -_get_charset_re = re.compile(r';\s*charset=(?P[^\s;]+)', re.I) - - -def is_html_content_type(content_type): - """Function used to verify if the parameter is a proper html content type - - :param content_type: String variable that represent a content-type - :returns: A boolean value stating if the content_type is a valid html - content type - """ - for html_content_type in HTML_CONTENT_TYPES: - if content_type.startswith(html_content_type): - return True - - return False - - -def should_stream(proxy_response): - """Function to verify if the proxy_response must be converted into - a stream.This will be done by checking the proxy_response content-length - and verify if its length is bigger than one stipulated - by MIN_STREAMING_LENGTH. - - :param proxy_response: An Instance of urllib3.response.HTTPResponse - :returns: A boolean stating if the proxy_response should - be treated as a stream - """ - content_type = proxy_response.headers.get('Content-Type') - - if is_html_content_type(content_type): - return False - - try: - content_length = int(proxy_response.headers.get('Content-Length', 0)) - except ValueError: - content_length = 0 - - if not content_length or content_length > MIN_STREAMING_LENGTH: - return True - - return False - - -def get_charset(content_type): - """Function used to retrieve the charset from a content-type.If there is no - charset in the content type then the charset defined on DEFAULT_CHARSET - will be returned - - :param content_type: A string containing a Content-Type header - :returns: A string containing the charset - """ - if not content_type: - return DEFAULT_CHARSET - - matched = _get_charset_re.search(content_type) - if matched: - # Extract the charset and strip its double quotes - return matched.group('charset').replace('"', '') - return DEFAULT_CHARSET - - -def required_header(header): - """Function that verify if the header parameter is a essential header - - :param header: A string represented a header - :returns: A boolean value that represent if the header is required - """ - if header in IGNORE_HEADERS: - return False - - if header.startswith('HTTP_') or header == 'CONTENT_TYPE': - return True - - return False - - -def set_response_headers(response, response_headers): - """Set response's header""" - for header, value in response_headers.items(): - if is_hop_by_hop(header) or header.lower() == 'set-cookie': - continue - - response[header.title()] = value - - logger.debug('Response headers: %s', getattr(response, '_headers')) - - -def normalize_request_headers(request): - """Function used to transform header, replacing 'HTTP\\_' to '' - and replace '_' to '-' - - :param request: A HttpRequest that will be transformed - :returns: A dictionary with the normalized headers - """ - norm_headers = {} - for header, value in request.META.items(): - if required_header(header): - norm_header = header.replace('HTTP_', '').title().replace('_', '-') - norm_headers[norm_header] = value - - return norm_headers - - -def encode_items(items): - """Function that encode all elements in the list of items passed as - a parameter - - :param items: A list of tuple - :returns: A list of tuple with all items encoded in 'utf-8' - """ - encoded = [] - for key, values in items: - for value in values: - encoded.append((key.encode('utf-8'), value.encode('utf-8'))) - return encoded - - -logger = get_logger() - - -def cookie_from_string(cookie_string, strict_cookies=False): - """Parser for HTTP header set-cookie - The return from this function will be used as parameters for - django's response.set_cookie method. Because set_cookie doesn't - have parameter comment, this cookie attribute will be ignored. - - :param cookie_string: A string representing a valid cookie - :param strict_cookies: Whether to only accept RFC-compliant cookies - :returns: A dictionary containing the cookie_string attributes - """ - - if strict_cookies: - - cookies = SimpleCookie(COOKIE_PREFIX + cookie_string) - if not cookies.keys(): - return None - cookie_name, = cookies.keys() - cookie_dict = {k: v for k, v in cookies[cookie_name].items() - if v and k != 'comment'} - cookie_dict['key'] = cookie_name - cookie_dict['value'] = cookies[cookie_name].value - return cookie_dict - valid_attrs = ('path', 'domain', 'comment', 'expires', - 'max_age', 'httponly', 'secure') - - cookie_dict = {} - - cookie_parts = cookie_string.split(';') - try: - cookie_dict['key'], cookie_dict['value'] = \ - cookie_parts[0].split('=', 1) - cookie_dict['value'] = cookie_dict['value'].replace('"', '') - except ValueError: - logger.warning('Invalid cookie: `%s`', cookie_string) - return None - - if cookie_dict['value'].startswith('='): - logger.warning('Invalid cookie: `%s`', cookie_string) - return None - - for part in cookie_parts[1:]: - if '=' in part: - attr, value = part.split('=', 1) - value = value.strip() - else: - attr = part - value = '' - - attr = attr.strip().lower() - if not attr: - continue - - if attr in valid_attrs: - if attr in ('httponly', 'secure'): - cookie_dict[attr] = True - elif attr in 'comment': - # ignoring comment attr as explained in the - # function docstring - continue - else: - cookie_dict[attr] = value - else: - logger.warning('Unknown cookie attribute %s', attr) - - return cookie_dict diff --git a/passbook/app_gw/settings.py b/passbook/app_gw/settings.py deleted file mode 100644 index 2fabd10ef..000000000 --- a/passbook/app_gw/settings.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Application Security Gateway settings""" -INSTALLED_APPS = [ - 'channels' -] -ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application" diff --git a/passbook/app_gw/signals.py b/passbook/app_gw/signals.py deleted file mode 100644 index 19a7600f8..000000000 --- a/passbook/app_gw/signals.py +++ /dev/null @@ -1,19 +0,0 @@ -"""passbook app_gw cache clean signals""" - -from django.core.cache import cache -from django.db.models.signals import post_save -from django.dispatch import receiver -from structlog import get_logger - -from passbook.app_gw.models import ApplicationGatewayProvider -from passbook.app_gw.proxy.handler import IGNORED_HOSTNAMES_KEY - -LOGGER = get_logger(__name__) - -@receiver(post_save) -# pylint: disable=unused-argument -def invalidate_app_gw_cache(sender, instance, **kwargs): - """Invalidate Policy cache when app_gw is updated""" - if isinstance(instance, ApplicationGatewayProvider): - LOGGER.debug("Invalidating cache for ignored hostnames") - cache.delete(IGNORED_HOSTNAMES_KEY) diff --git a/passbook/app_gw/urls.py b/passbook/app_gw/urls.py deleted file mode 100644 index b9798bcb4..000000000 --- a/passbook/app_gw/urls.py +++ /dev/null @@ -1,2 +0,0 @@ -"""passbook app_gw urls""" -urlpatterns = [] diff --git a/passbook/app_gw/websocket/__init__.py b/passbook/app_gw/websocket/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/passbook/app_gw/websocket/consumer.py b/passbook/app_gw/websocket/consumer.py deleted file mode 100644 index 303aa93d0..000000000 --- a/passbook/app_gw/websocket/consumer.py +++ /dev/null @@ -1,83 +0,0 @@ -"""websocket proxy consumer""" -import threading -from ssl import CERT_NONE - -import websocket -from channels.generic.websocket import WebsocketConsumer -from structlog import get_logger - -from passbook.app_gw.models import ApplicationGatewayProvider - -LOGGER = get_logger(__name__) - -class ProxyConsumer(WebsocketConsumer): - """Proxy websocket connection to upstream""" - - _headers_dict = {} - _app_gw = None - _client = None - _thread = None - - def _fix_headers(self, input_dict): - """Fix headers from bytestrings to normal strings""" - return { - key.decode('utf-8'): value.decode('utf-8') - for key, value in dict(input_dict).items() - } - - def connect(self): - """Extract host header, lookup in database and proxy connection""" - self._headers_dict = self._fix_headers(dict(self.scope.get('headers'))) - host = self._headers_dict.pop('host') - query_string = self.scope.get('query_string').decode('utf-8') - matches = ApplicationGatewayProvider.objects.filter( - server_name__contains=[host], - enabled=True) - if matches.exists(): - self._app_gw = matches.first() - # TODO: Get upstream that starts with wss or - upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path') - if query_string: - upstream += '?' + query_string - sslopt = {} - if not self._app_gw.upstream_ssl_verification: - sslopt = {"cert_reqs": CERT_NONE} - self._client = websocket.WebSocketApp( - url=upstream, - subprotocols=self.scope.get('subprotocols'), - header=self._headers_dict, - on_message=self._client_on_message_handler(), - on_error=self._client_on_error_handler(), - on_close=self._client_on_close_handler(), - on_open=self._client_on_open_handler()) - LOGGER.debug("Accepting connection for %s", host) - self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt)) - self._thread.start() - - def _client_on_open_handler(self): - return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol) - - def _client_on_message_handler(self): - # pylint: disable=unused-argument,invalid-name - def message_handler(ws, message): - if isinstance(message, str): - self.send(text_data=message) - else: - self.send(bytes_data=message) - return message_handler - - def _client_on_error_handler(self): - return lambda ws, error: print(error) - - def _client_on_close_handler(self): - return lambda ws: self.disconnect(0) - - def disconnect(self, code): - self._client.close() - - def receive(self, text_data=None, bytes_data=None): - if text_data: - opcode = websocket.ABNF.OPCODE_TEXT - if bytes_data: - opcode = websocket.ABNF.OPCODE_BINARY - self._client.send(text_data or bytes_data, opcode) diff --git a/passbook/app_gw/websocket/routing.py b/passbook/app_gw/websocket/routing.py deleted file mode 100644 index bbf7b7a83..000000000 --- a/passbook/app_gw/websocket/routing.py +++ /dev/null @@ -1,17 +0,0 @@ -"""app_gw websocket proxy""" -from channels.auth import AuthMiddlewareStack -from channels.routing import ProtocolTypeRouter, URLRouter -from django.conf.urls import url - -from passbook.app_gw.websocket.consumer import ProxyConsumer - -websocket_urlpatterns = [ - url(r'^(.*)$', ProxyConsumer), -] - -application = ProtocolTypeRouter({ - # (http->django views is added by default) - 'websocket': AuthMiddlewareStack( - URLRouter(websocket_urlpatterns) - ), -}) diff --git a/passbook/policy/engine.py b/passbook/policy/engine.py index 85fff1925..57f2e8daa 100644 --- a/passbook/policy/engine.py +++ b/passbook/policy/engine.py @@ -1,7 +1,7 @@ """passbook policy engine""" from multiprocessing import Pipe from multiprocessing.connection import Connection -from typing import List, Tuple +from typing import List, Tuple, Tuple from django.core.cache import cache from django.http import HttpRequest @@ -14,14 +14,11 @@ from passbook.policy.struct import PolicyResult, PolicyRequest LOGGER = get_logger() def _cache_key(policy, user): - return "policy_%s#%s" % (policy.uuid, user.pk) + return f"policy_{policy.pk}#{user.pk}" class PolicyEngine: """Orchestrate policy checking, launch tasks and return result""" - # __group = None - # __cached = None - policies: List[Policy] = [] __request: HttpRequest __user: User @@ -53,19 +50,19 @@ class PolicyEngine: for policy in self.policies: cached_policy = cache.get(_cache_key(policy, self.__user), None) if cached_policy: - LOGGER.debug("Taking result from cache for %s", policy.pk.hex) + LOGGER.debug("Taking result from cache", policy=policy.pk.hex) cached_policies.append(cached_policy) else: LOGGER.debug("Looking up real class of policy...") # TODO: Rewrite this to lookup all policies at once policy = Policy.objects.get_subclass(pk=policy.pk) - LOGGER.debug("Evaluating policy %s", policy.pk.hex) + LOGGER.debug("Evaluating policy", policy=policy.pk.hex) our_end, task_end = Pipe(False) task = PolicyTask() task.ret = task_end task.request = request task.policy = policy - LOGGER.debug("Starting Process %s", task.__class__.__name__) + LOGGER.debug("Starting Process", class_name=task.__class__.__name__) task.start() self.__proc_list.append((our_end, task)) # If all policies are cached, we have an empty list here. @@ -75,13 +72,11 @@ class PolicyEngine: return self @property - def result(self): + def result(self) -> Tuple[bool, List[str]]: """Get policy-checking result""" - results: List[PolicyResult] = [] messages: List[str] = [] for our_end, _ in self.__proc_list: - results.append(our_end.recv()) - for policy_result in results: + policy_result = our_end.recv() # passing = (policy_action == Policy.ACTION_ALLOW and policy_result) or \ # (policy_action == Policy.ACTION_DENY and not policy_result) LOGGER.debug('Result=%r => %r', policy_result, policy_result.passing) @@ -92,6 +87,6 @@ class PolicyEngine: return True, messages @property - def passing(self): + def passing(self) -> bool: """Only get true/false if user passes""" return self.result[0] diff --git a/passbook/root/settings.py b/passbook/root/settings.py index f3d24def0..4a13d5488 100644 --- a/passbook/root/settings.py +++ b/passbook/root/settings.py @@ -34,10 +34,8 @@ STATIC_ROOT = BASE_DIR + '/static' SECRET_KEY = CONFIG.y('secret_key', "9$@r!d^1^jrn#fk#1#@ks#9&i$^s#1)_13%$rwjrhd=e8jfi_s") # noqa Debug -# SECURITY WARNING: don't run with debug turned on in production! DEBUG = CONFIG.y_bool('debug') INTERNAL_IPS = ['127.0.0.1'] -# ALLOWED_HOSTS = CONFIG.y('domains', []) + [CONFIG.y('primary_domain')] ALLOWED_HOSTS = ['*'] SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') @@ -49,7 +47,6 @@ AUTH_USER_MODEL = 'passbook_core.User' CSRF_COOKIE_NAME = 'passbook_csrf' SESSION_COOKIE_NAME = 'passbook_session' -SESSION_COOKIE_DOMAIN = CONFIG.y('primary_domain') SESSION_ENGINE = "django.contrib.sessions.backends.cache" SESSION_CACHE_ALIAS = "default" LANGUAGE_COOKIE_NAME = 'passbook_language'