Merge branch '38-websocket-proxying' into 'master'
Resolve "Websocket Proxying" Closes #38 See merge request BeryJu.org/passbook!24
This commit is contained in:
commit
9b8f390e31
|
@ -6,7 +6,7 @@ COPY ./requirements.txt /app/
|
||||||
|
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
|
||||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||||
mkdir /app/static/ && \
|
mkdir /app/static/ && \
|
||||||
pip install -r requirements.txt && \
|
pip install -r requirements.txt && \
|
||||||
pip install psycopg2 && \
|
pip install psycopg2 && \
|
||||||
|
@ -23,7 +23,7 @@ COPY --from=build /app/static /app/static/
|
||||||
|
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
|
||||||
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev -y && \
|
RUN apt-get update && apt-get install build-essential libssl-dev libffi-dev libpq-dev -y && \
|
||||||
pip install -r requirements.txt && \
|
pip install -r requirements.txt && \
|
||||||
pip install psycopg2 && \
|
pip install psycopg2 && \
|
||||||
adduser --system --home /app/ passbook && \
|
adduser --system --home /app/ passbook && \
|
||||||
|
|
|
@ -29,7 +29,7 @@ spec:
|
||||||
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
image: "docker.pkg.beryju.org/passbook:{{ .Values.image.tag }}"
|
||||||
imagePullPolicy: IfNotPresent
|
imagePullPolicy: IfNotPresent
|
||||||
command: ["/bin/sh","-c"]
|
command: ["/bin/sh","-c"]
|
||||||
args: ["./manage.py migrate && ./manage.py web"]
|
args: ["./manage.py migrate && daphne -p 8000 passbook.core.asgi:application"]
|
||||||
ports:
|
ports:
|
||||||
- name: http
|
- name: http
|
||||||
containerPort: 8000
|
containerPort: 8000
|
||||||
|
|
|
@ -10,10 +10,10 @@ from django.core.cache import cache
|
||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
from django.views.generic import RedirectView
|
from django.views.generic import RedirectView
|
||||||
from revproxy.exceptions import InvalidUpstream
|
from revproxy.exceptions import InvalidUpstream
|
||||||
from revproxy.response import get_django_response
|
|
||||||
from revproxy.utils import encode_items, normalize_request_headers
|
|
||||||
|
|
||||||
from passbook.app_gw.models import ApplicationGatewayProvider
|
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||||
|
from passbook.app_gw.proxy.response import get_django_response
|
||||||
|
from passbook.app_gw.proxy.utils import encode_items, normalize_request_headers
|
||||||
from passbook.app_gw.rewrite import Rewriter
|
from passbook.app_gw.rewrite import Rewriter
|
||||||
from passbook.core.models import Application
|
from passbook.core.models import Application
|
||||||
from passbook.core.policies import PolicyEngine
|
from passbook.core.policies import PolicyEngine
|
||||||
|
|
0
passbook/app_gw/proxy/__init__.py
Normal file
0
passbook/app_gw/proxy/__init__.py
Normal file
8
passbook/app_gw/proxy/exceptions.py
Normal file
8
passbook/app_gw/proxy/exceptions.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
"""Exception classes"""
|
||||||
|
|
||||||
|
class ReverseProxyException(Exception):
|
||||||
|
"""Base for revproxy exception"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidUpstream(ReverseProxyException):
|
||||||
|
"""Invalid upstream set"""
|
63
passbook/app_gw/proxy/response.py
Normal file
63
passbook/app_gw/proxy/response.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
"""response functions from django-revproxy"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.http import HttpResponse, StreamingHttpResponse
|
||||||
|
|
||||||
|
from passbook.app_gw.proxy.utils import (cookie_from_string,
|
||||||
|
set_response_headers, should_stream)
|
||||||
|
|
||||||
|
#: Default number of bytes that are going to be read in a file lecture
|
||||||
|
DEFAULT_AMT = 2 ** 16
|
||||||
|
|
||||||
|
logger = logging.getLogger('revproxy.response')
|
||||||
|
|
||||||
|
|
||||||
|
def get_django_response(proxy_response, strict_cookies=False):
|
||||||
|
"""This method is used to create an appropriate response based on the
|
||||||
|
Content-Length of the proxy_response. If the content is bigger than
|
||||||
|
MIN_STREAMING_LENGTH, which is found on utils.py,
|
||||||
|
than django.http.StreamingHttpResponse will be created,
|
||||||
|
else a django.http.HTTPResponse will be created instead
|
||||||
|
|
||||||
|
:param proxy_response: An Instance of urllib3.response.HTTPResponse that
|
||||||
|
will create an appropriate response
|
||||||
|
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||||
|
:returns: Returns an appropriate response based on the proxy_response
|
||||||
|
content-length
|
||||||
|
"""
|
||||||
|
status = proxy_response.status
|
||||||
|
headers = proxy_response.headers
|
||||||
|
|
||||||
|
logger.debug('Proxy response headers: %s', headers)
|
||||||
|
|
||||||
|
content_type = headers.get('Content-Type')
|
||||||
|
|
||||||
|
logger.debug('Content-Type: %s', content_type)
|
||||||
|
|
||||||
|
if should_stream(proxy_response):
|
||||||
|
logger.info('Content-Length is bigger than %s', DEFAULT_AMT)
|
||||||
|
response = StreamingHttpResponse(proxy_response.stream(DEFAULT_AMT),
|
||||||
|
status=status,
|
||||||
|
content_type=content_type)
|
||||||
|
else:
|
||||||
|
content = proxy_response.data or b''
|
||||||
|
response = HttpResponse(content, status=status,
|
||||||
|
content_type=content_type)
|
||||||
|
|
||||||
|
logger.info('Normalizing response headers')
|
||||||
|
set_response_headers(response, headers)
|
||||||
|
|
||||||
|
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||||
|
|
||||||
|
cookies = proxy_response.headers.getlist('set-cookie')
|
||||||
|
logger.info('Checking for invalid cookies')
|
||||||
|
for cookie_string in cookies:
|
||||||
|
cookie_dict = cookie_from_string(cookie_string,
|
||||||
|
strict_cookies=strict_cookies)
|
||||||
|
# if cookie is invalid cookie_dict will be None
|
||||||
|
if cookie_dict:
|
||||||
|
response.set_cookie(**cookie_dict)
|
||||||
|
|
||||||
|
logger.debug('Response cookies: %s', response.cookies)
|
||||||
|
|
||||||
|
return response
|
227
passbook/app_gw/proxy/utils.py
Normal file
227
passbook/app_gw/proxy/utils.py
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
"""Utils from django-revproxy, slightly adjusted"""
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from wsgiref.util import is_hop_by_hop
|
||||||
|
|
||||||
|
try:
|
||||||
|
from http.cookies import SimpleCookie
|
||||||
|
COOKIE_PREFIX = ''
|
||||||
|
except ImportError:
|
||||||
|
from Cookie import SimpleCookie
|
||||||
|
COOKIE_PREFIX = 'Set-Cookie: '
|
||||||
|
|
||||||
|
|
||||||
|
#: List containing string constant that are used to represent headers that can
|
||||||
|
#: be ignored in the required_header function
|
||||||
|
IGNORE_HEADERS = (
|
||||||
|
'HTTP_ACCEPT_ENCODING', # We want content to be uncompressed so
|
||||||
|
# we remove the Accept-Encoding from
|
||||||
|
# original request
|
||||||
|
'HTTP_HOST',
|
||||||
|
'HTTP_REMOTE_USER',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Default from HTTP RFC 2616
|
||||||
|
# See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
|
||||||
|
#: Variable that represent the default charset used
|
||||||
|
DEFAULT_CHARSET = 'latin-1'
|
||||||
|
|
||||||
|
#: List containing string constants that represents possible html content type
|
||||||
|
HTML_CONTENT_TYPES = (
|
||||||
|
'text/html',
|
||||||
|
'application/xhtml+xml'
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Variable used to represent a minimal content size required for response
|
||||||
|
#: to be turned into stream
|
||||||
|
MIN_STREAMING_LENGTH = 4 * 1024 # 4KB
|
||||||
|
|
||||||
|
#: Regex used to find charset in a html content type
|
||||||
|
_get_charset_re = re.compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def is_html_content_type(content_type):
|
||||||
|
"""Function used to verify if the parameter is a proper html content type
|
||||||
|
|
||||||
|
:param content_type: String variable that represent a content-type
|
||||||
|
:returns: A boolean value stating if the content_type is a valid html
|
||||||
|
content type
|
||||||
|
"""
|
||||||
|
for html_content_type in HTML_CONTENT_TYPES:
|
||||||
|
if content_type.startswith(html_content_type):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def should_stream(proxy_response):
|
||||||
|
"""Function to verify if the proxy_response must be converted into
|
||||||
|
a stream.This will be done by checking the proxy_response content-length
|
||||||
|
and verify if its length is bigger than one stipulated
|
||||||
|
by MIN_STREAMING_LENGTH.
|
||||||
|
|
||||||
|
:param proxy_response: An Instance of urllib3.response.HTTPResponse
|
||||||
|
:returns: A boolean stating if the proxy_response should
|
||||||
|
be treated as a stream
|
||||||
|
"""
|
||||||
|
content_type = proxy_response.headers.get('Content-Type')
|
||||||
|
|
||||||
|
if is_html_content_type(content_type):
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_length = int(proxy_response.headers.get('Content-Length', 0))
|
||||||
|
except ValueError:
|
||||||
|
content_length = 0
|
||||||
|
|
||||||
|
if not content_length or content_length > MIN_STREAMING_LENGTH:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_charset(content_type):
|
||||||
|
"""Function used to retrieve the charset from a content-type.If there is no
|
||||||
|
charset in the content type then the charset defined on DEFAULT_CHARSET
|
||||||
|
will be returned
|
||||||
|
|
||||||
|
:param content_type: A string containing a Content-Type header
|
||||||
|
:returns: A string containing the charset
|
||||||
|
"""
|
||||||
|
if not content_type:
|
||||||
|
return DEFAULT_CHARSET
|
||||||
|
|
||||||
|
matched = _get_charset_re.search(content_type)
|
||||||
|
if matched:
|
||||||
|
# Extract the charset and strip its double quotes
|
||||||
|
return matched.group('charset').replace('"', '')
|
||||||
|
return DEFAULT_CHARSET
|
||||||
|
|
||||||
|
|
||||||
|
def required_header(header):
|
||||||
|
"""Function that verify if the header parameter is a essential header
|
||||||
|
|
||||||
|
:param header: A string represented a header
|
||||||
|
:returns: A boolean value that represent if the header is required
|
||||||
|
"""
|
||||||
|
if header in IGNORE_HEADERS:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if header.startswith('HTTP_') or header == 'CONTENT_TYPE':
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def set_response_headers(response, response_headers):
|
||||||
|
"""Set response's header"""
|
||||||
|
for header, value in response_headers.items():
|
||||||
|
if is_hop_by_hop(header) or header.lower() == 'set-cookie':
|
||||||
|
continue
|
||||||
|
|
||||||
|
response[header.title()] = value
|
||||||
|
|
||||||
|
logger.debug('Response headers: %s', getattr(response, '_headers'))
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_request_headers(request):
|
||||||
|
"""Function used to transform header, replacing 'HTTP\\_' to ''
|
||||||
|
and replace '_' to '-'
|
||||||
|
|
||||||
|
:param request: A HttpRequest that will be transformed
|
||||||
|
:returns: A dictionary with the normalized headers
|
||||||
|
"""
|
||||||
|
norm_headers = {}
|
||||||
|
for header, value in request.META.items():
|
||||||
|
if required_header(header):
|
||||||
|
norm_header = header.replace('HTTP_', '').title().replace('_', '-')
|
||||||
|
norm_headers[norm_header] = value
|
||||||
|
|
||||||
|
return norm_headers
|
||||||
|
|
||||||
|
|
||||||
|
def encode_items(items):
|
||||||
|
"""Function that encode all elements in the list of items passed as
|
||||||
|
a parameter
|
||||||
|
|
||||||
|
:param items: A list of tuple
|
||||||
|
:returns: A list of tuple with all items encoded in 'utf-8'
|
||||||
|
"""
|
||||||
|
encoded = []
|
||||||
|
for key, values in items:
|
||||||
|
for value in values:
|
||||||
|
encoded.append((key.encode('utf-8'), value.encode('utf-8')))
|
||||||
|
return encoded
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('revproxy.cookies')
|
||||||
|
|
||||||
|
|
||||||
|
def cookie_from_string(cookie_string, strict_cookies=False):
|
||||||
|
"""Parser for HTTP header set-cookie
|
||||||
|
The return from this function will be used as parameters for
|
||||||
|
django's response.set_cookie method. Because set_cookie doesn't
|
||||||
|
have parameter comment, this cookie attribute will be ignored.
|
||||||
|
|
||||||
|
:param cookie_string: A string representing a valid cookie
|
||||||
|
:param strict_cookies: Whether to only accept RFC-compliant cookies
|
||||||
|
:returns: A dictionary containing the cookie_string attributes
|
||||||
|
"""
|
||||||
|
|
||||||
|
if strict_cookies:
|
||||||
|
|
||||||
|
cookies = SimpleCookie(COOKIE_PREFIX + cookie_string)
|
||||||
|
if not cookies.keys():
|
||||||
|
return None
|
||||||
|
cookie_name, = cookies.keys()
|
||||||
|
cookie_dict = {k: v for k, v in cookies[cookie_name].items()
|
||||||
|
if v and k != 'comment'}
|
||||||
|
cookie_dict['key'] = cookie_name
|
||||||
|
cookie_dict['value'] = cookies[cookie_name].value
|
||||||
|
return cookie_dict
|
||||||
|
valid_attrs = ('path', 'domain', 'comment', 'expires',
|
||||||
|
'max_age', 'httponly', 'secure')
|
||||||
|
|
||||||
|
cookie_dict = {}
|
||||||
|
|
||||||
|
cookie_parts = cookie_string.split(';')
|
||||||
|
try:
|
||||||
|
cookie_dict['key'], cookie_dict['value'] = \
|
||||||
|
cookie_parts[0].split('=', 1)
|
||||||
|
cookie_dict['value'] = cookie_dict['value'].replace('"', '')
|
||||||
|
# print('aaaaaaaaaaaaaaaaaaaaaaaaaaaa')
|
||||||
|
# print(cookie_parts[0].split('=', 1))
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if cookie_dict['value'].startswith('='):
|
||||||
|
logger.warning('Invalid cookie: `%s`', cookie_string)
|
||||||
|
return None
|
||||||
|
|
||||||
|
for part in cookie_parts[1:]:
|
||||||
|
if '=' in part:
|
||||||
|
attr, value = part.split('=', 1)
|
||||||
|
value = value.strip()
|
||||||
|
else:
|
||||||
|
attr = part
|
||||||
|
value = ''
|
||||||
|
|
||||||
|
attr = attr.strip().lower()
|
||||||
|
if not attr:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if attr in valid_attrs:
|
||||||
|
if attr in ('httponly', 'secure'):
|
||||||
|
cookie_dict[attr] = True
|
||||||
|
elif attr in 'comment':
|
||||||
|
# ignoring comment attr as explained in the
|
||||||
|
# function docstring
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
cookie_dict[attr] = value
|
||||||
|
else:
|
||||||
|
logger.warning('Unknown cookie attribute %s', attr)
|
||||||
|
|
||||||
|
return cookie_dict
|
|
@ -1,2 +1,7 @@
|
||||||
django-revproxy
|
django-revproxy
|
||||||
urllib3[secure]
|
urllib3[secure]
|
||||||
|
channels
|
||||||
|
service_identity
|
||||||
|
websocket-client
|
||||||
|
daphne<2.3.0
|
||||||
|
asgiref~=2.3
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
"""Application Security Gateway settings"""
|
"""Application Security Gateway settings"""
|
||||||
|
INSTALLED_APPS = [
|
||||||
# INSTALLED_APPS = [
|
'channels'
|
||||||
# 'revproxy'
|
]
|
||||||
# ]
|
ASGI_APPLICATION = "passbook.app_gw.websocket.routing.application"
|
||||||
|
|
0
passbook/app_gw/websocket/__init__.py
Normal file
0
passbook/app_gw/websocket/__init__.py
Normal file
83
passbook/app_gw/websocket/consumer.py
Normal file
83
passbook/app_gw/websocket/consumer.py
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
"""websocket proxy consumer"""
|
||||||
|
import threading
|
||||||
|
from logging import getLogger
|
||||||
|
from ssl import CERT_NONE
|
||||||
|
|
||||||
|
import websocket
|
||||||
|
from channels.generic.websocket import WebsocketConsumer
|
||||||
|
|
||||||
|
from passbook.app_gw.models import ApplicationGatewayProvider
|
||||||
|
|
||||||
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
|
class ProxyConsumer(WebsocketConsumer):
|
||||||
|
"""Proxy websocket connection to upstream"""
|
||||||
|
|
||||||
|
_headers_dict = {}
|
||||||
|
_app_gw = None
|
||||||
|
_client = None
|
||||||
|
_thread = None
|
||||||
|
|
||||||
|
def _fix_headers(self, input_dict):
|
||||||
|
"""Fix headers from bytestrings to normal strings"""
|
||||||
|
return {
|
||||||
|
key.decode('utf-8'): value.decode('utf-8')
|
||||||
|
for key, value in dict(input_dict).items()
|
||||||
|
}
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
"""Extract host header, lookup in database and proxy connection"""
|
||||||
|
self._headers_dict = self._fix_headers(dict(self.scope.get('headers')))
|
||||||
|
host = self._headers_dict.pop('host')
|
||||||
|
query_string = self.scope.get('query_string').decode('utf-8')
|
||||||
|
matches = ApplicationGatewayProvider.objects.filter(
|
||||||
|
server_name__contains=[host],
|
||||||
|
enabled=True)
|
||||||
|
if matches.exists():
|
||||||
|
self._app_gw = matches.first()
|
||||||
|
# TODO: Get upstream that starts with wss or
|
||||||
|
upstream = self._app_gw.upstream[0].replace('http', 'ws') + self.scope.get('path')
|
||||||
|
if query_string:
|
||||||
|
upstream += '?' + query_string
|
||||||
|
sslopt = {}
|
||||||
|
if not self._app_gw.upstream_ssl_verification:
|
||||||
|
sslopt = {"cert_reqs": CERT_NONE}
|
||||||
|
self._client = websocket.WebSocketApp(
|
||||||
|
url=upstream,
|
||||||
|
subprotocols=self.scope.get('subprotocols'),
|
||||||
|
header=self._headers_dict,
|
||||||
|
on_message=self._client_on_message_handler(),
|
||||||
|
on_error=self._client_on_error_handler(),
|
||||||
|
on_close=self._client_on_close_handler(),
|
||||||
|
on_open=self._client_on_open_handler())
|
||||||
|
LOGGER.debug("Accepting connection for %s", host)
|
||||||
|
self._thread = threading.Thread(target=lambda: self._client.run_forever(sslopt=sslopt))
|
||||||
|
self._thread.start()
|
||||||
|
|
||||||
|
def _client_on_open_handler(self):
|
||||||
|
return lambda ws: self.accept(self._client.sock.handshake_response.subprotocol)
|
||||||
|
|
||||||
|
def _client_on_message_handler(self):
|
||||||
|
# pylint: disable=unused-argument,invalid-name
|
||||||
|
def message_handler(ws, message):
|
||||||
|
if isinstance(message, str):
|
||||||
|
self.send(text_data=message)
|
||||||
|
else:
|
||||||
|
self.send(bytes_data=message)
|
||||||
|
return message_handler
|
||||||
|
|
||||||
|
def _client_on_error_handler(self):
|
||||||
|
return lambda ws, error: print(error)
|
||||||
|
|
||||||
|
def _client_on_close_handler(self):
|
||||||
|
return lambda ws: self.disconnect(0)
|
||||||
|
|
||||||
|
def disconnect(self, code):
|
||||||
|
self._client.close()
|
||||||
|
|
||||||
|
def receive(self, text_data=None, bytes_data=None):
|
||||||
|
if text_data:
|
||||||
|
opcode = websocket.ABNF.OPCODE_TEXT
|
||||||
|
if bytes_data:
|
||||||
|
opcode = websocket.ABNF.OPCODE_BINARY
|
||||||
|
self._client.send(text_data or bytes_data, opcode)
|
17
passbook/app_gw/websocket/routing.py
Normal file
17
passbook/app_gw/websocket/routing.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
"""app_gw websocket proxy"""
|
||||||
|
from channels.auth import AuthMiddlewareStack
|
||||||
|
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||||
|
from django.conf.urls import url
|
||||||
|
|
||||||
|
from passbook.app_gw.websocket.consumer import ProxyConsumer
|
||||||
|
|
||||||
|
websocket_urlpatterns = [
|
||||||
|
url(r'^(.*)$', ProxyConsumer),
|
||||||
|
]
|
||||||
|
|
||||||
|
application = ProtocolTypeRouter({
|
||||||
|
# (http->django views is added by default)
|
||||||
|
'websocket': AuthMiddlewareStack(
|
||||||
|
URLRouter(websocket_urlpatterns)
|
||||||
|
),
|
||||||
|
})
|
13
passbook/core/asgi.py
Normal file
13
passbook/core/asgi.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
"""
|
||||||
|
ASGI entrypoint. Configures Django and then runs the application
|
||||||
|
defined in the ASGI_APPLICATION setting.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import django
|
||||||
|
from channels.routing import get_default_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "passbook.core.settings")
|
||||||
|
django.setup()
|
||||||
|
application = get_default_application()
|
Reference in a new issue