2014-05-08 16:59:35 +00:00
|
|
|
import os
|
2014-09-26 15:05:20 +00:00
|
|
|
import re
|
2015-03-11 16:32:33 +00:00
|
|
|
import textwrap
|
2014-05-08 16:59:35 +00:00
|
|
|
|
|
|
|
from django.template import Template, Context
|
|
|
|
from django.utils.translation import ugettext_lazy as _
|
|
|
|
|
2015-05-21 13:34:12 +00:00
|
|
|
from orchestra.contrib.orchestration import ServiceController
|
2015-04-05 10:46:24 +00:00
|
|
|
from orchestra.contrib.resources import ServiceMonitor
|
2014-05-08 16:59:35 +00:00
|
|
|
|
|
|
|
from .. import settings
|
2015-03-10 21:51:10 +00:00
|
|
|
from ..utils import normurlpath
|
2014-05-08 16:59:35 +00:00
|
|
|
|
|
|
|
|
2014-07-09 16:17:43 +00:00
|
|
|
class Apache2Backend(ServiceController):
|
2015-04-24 11:39:20 +00:00
|
|
|
"""
|
2015-04-26 13:53:00 +00:00
|
|
|
Apache ≥2.4 backend with support for the following directives:
|
2015-04-24 11:39:20 +00:00
|
|
|
<tt>static</tt>, <tt>location</tt>, <tt>fpm</tt>, <tt>fcgid</tt>, <tt>uwsgi</tt>, \
|
|
|
|
<tt>ssl</tt>, <tt>security</tt>, <tt>redirects</tt>, <tt>proxies</tt>, <tt>saas</tt>
|
|
|
|
"""
|
2015-03-10 21:51:10 +00:00
|
|
|
HTTP_PORT = 80
|
|
|
|
HTTPS_PORT = 443
|
|
|
|
|
2014-05-08 16:59:35 +00:00
|
|
|
model = 'websites.Website'
|
2014-09-24 20:09:41 +00:00
|
|
|
related_models = (
|
|
|
|
('websites.Content', 'website'),
|
2015-04-27 12:24:17 +00:00
|
|
|
('websites.WebsiteDirective', 'website'),
|
2015-04-10 15:03:38 +00:00
|
|
|
('webapps.WebApp', 'website_set'),
|
2014-09-24 20:09:41 +00:00
|
|
|
)
|
2014-05-08 16:59:35 +00:00
|
|
|
verbose_name = _("Apache 2")
|
2015-04-24 11:39:20 +00:00
|
|
|
doc_settings = (settings, (
|
|
|
|
'WEBSITES_VHOST_EXTRA_DIRECTIVES',
|
|
|
|
'WEBSITES_DEFAULT_SSL_CERT',
|
|
|
|
'WEBSITES_DEFAULT_SSL_KEY',
|
|
|
|
'WEBSITES_DEFAULT_SSL_CA',
|
|
|
|
'WEBSITES_BASE_APACHE_CONF',
|
|
|
|
'WEBSITES_DEFAULT_IPS',
|
|
|
|
'WEBSITES_SAAS_DIRECTIVES',
|
|
|
|
))
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-03-10 21:51:10 +00:00
|
|
|
def render_virtual_host(self, site, context, ssl=False):
|
|
|
|
context['port'] = self.HTTPS_PORT if ssl else self.HTTP_PORT
|
2015-06-22 14:14:16 +00:00
|
|
|
context['vhost_wrapper_dirs'] = []
|
2015-04-13 14:46:10 +00:00
|
|
|
extra_conf = self.get_content_directives(site, context)
|
2015-03-10 21:51:10 +00:00
|
|
|
directives = site.get_directives()
|
|
|
|
if ssl:
|
|
|
|
extra_conf += self.get_ssl(directives)
|
|
|
|
extra_conf += self.get_security(directives)
|
|
|
|
extra_conf += self.get_redirects(directives)
|
|
|
|
extra_conf += self.get_proxies(directives)
|
2015-03-25 15:45:04 +00:00
|
|
|
extra_conf += self.get_saas(directives)
|
2015-03-27 19:50:54 +00:00
|
|
|
settings_context = site.get_settings_context()
|
|
|
|
for location, directive in settings.WEBSITES_VHOST_EXTRA_DIRECTIVES:
|
|
|
|
extra_conf.append((location, directive % settings_context))
|
2015-03-16 16:52:41 +00:00
|
|
|
# Order extra conf directives based on directives (longer first)
|
|
|
|
extra_conf = sorted(extra_conf, key=lambda a: len(a[0]), reverse=True)
|
|
|
|
context['extra_conf'] = '\n'.join([conf for location, conf in extra_conf])
|
2015-03-10 21:51:10 +00:00
|
|
|
return Template(textwrap.dedent("""\
|
2015-04-23 19:46:23 +00:00
|
|
|
<VirtualHost{% for ip in ips %} {{ ip }}:{{ port }}{% endfor %}>
|
2015-04-09 14:32:10 +00:00
|
|
|
IncludeOptional /etc/apache2/site[s]-override/{{ site_unique_name }}.con[f]
|
2015-04-10 15:03:38 +00:00
|
|
|
ServerName {{ server_name }}\
|
|
|
|
{% if server_alias %}
|
|
|
|
ServerAlias {{ server_alias|join:' ' }}{% endif %}\
|
2015-03-02 10:37:25 +00:00
|
|
|
{% if access_log %}
|
|
|
|
CustomLog {{ access_log }} common{% endif %}\
|
|
|
|
{% if error_log %}
|
|
|
|
ErrorLog {{ error_log }}{% endif %}
|
2014-10-10 14:39:46 +00:00
|
|
|
SuexecUserGroup {{ user }} {{ group }}\
|
|
|
|
{% for line in extra_conf.splitlines %}
|
|
|
|
{{ line | safe }}{% endfor %}
|
2015-05-25 19:16:07 +00:00
|
|
|
</VirtualHost>
|
|
|
|
""")
|
2015-03-10 21:51:10 +00:00
|
|
|
).render(Context(context))
|
|
|
|
|
|
|
|
def render_redirect_https(self, context):
|
|
|
|
context['port'] = self.HTTP_PORT
|
|
|
|
return Template(textwrap.dedent("""
|
2015-04-23 19:46:23 +00:00
|
|
|
<VirtualHost{% for ip in ips %} {{ ip }}:{{ port }}{% endfor %}>
|
2015-04-10 15:03:38 +00:00
|
|
|
ServerName {{ server_name }}\
|
|
|
|
{% if server_alias %}
|
|
|
|
ServerAlias {{ server_alias|join:' ' }}{% endif %}\
|
2015-03-10 21:51:10 +00:00
|
|
|
{% if access_log %}
|
|
|
|
CustomLog {{ access_log }} common{% endif %}\
|
|
|
|
{% if error_log %}
|
|
|
|
ErrorLog {{ error_log }}{% endif %}
|
|
|
|
RewriteEngine On
|
|
|
|
RewriteCond %{HTTPS} off
|
|
|
|
RewriteRule (.*) https://%{HTTP_HOST}%{REQUEST_URI}
|
2015-05-25 19:16:07 +00:00
|
|
|
</VirtualHost>
|
|
|
|
""")
|
2015-03-10 21:51:10 +00:00
|
|
|
).render(Context(context))
|
|
|
|
|
|
|
|
def save(self, site):
|
|
|
|
context = self.get_context(site)
|
2015-04-10 15:03:38 +00:00
|
|
|
if context['server_name']:
|
|
|
|
apache_conf = '# %(banner)s\n' % context
|
|
|
|
if site.protocol in (site.HTTP, site.HTTP_AND_HTTPS):
|
|
|
|
apache_conf += self.render_virtual_host(site, context, ssl=False)
|
|
|
|
if site.protocol in (site.HTTP_AND_HTTPS, site.HTTPS_ONLY, site.HTTPS):
|
|
|
|
apache_conf += self.render_virtual_host(site, context, ssl=True)
|
|
|
|
if site.protocol == site.HTTPS_ONLY:
|
|
|
|
apache_conf += self.render_redirect_https(context)
|
2015-05-21 14:32:06 +00:00
|
|
|
context['apache_conf'] = apache_conf.strip()
|
|
|
|
self.append(textwrap.dedent("""
|
2015-05-21 17:53:59 +00:00
|
|
|
# Generate Apache site config for %(site_name)s
|
2015-05-21 13:34:12 +00:00
|
|
|
read -r -d '' apache_conf << 'EOF' || true
|
|
|
|
%(apache_conf)s
|
|
|
|
EOF
|
2015-04-10 15:03:38 +00:00
|
|
|
{
|
|
|
|
echo -e "${apache_conf}" | diff -N -I'^\s*#' %(sites_available)s -
|
|
|
|
} || {
|
|
|
|
echo -e "${apache_conf}" > %(sites_available)s
|
2015-05-11 14:22:36 +00:00
|
|
|
UPDATED_APACHE=1
|
2015-04-10 15:03:38 +00:00
|
|
|
}""") % context
|
|
|
|
)
|
|
|
|
if context['server_name'] and site.active:
|
2015-05-25 19:16:07 +00:00
|
|
|
self.append(textwrap.dedent("""
|
2015-05-21 17:53:59 +00:00
|
|
|
# Enable site %(site_name)s
|
2015-04-10 15:03:38 +00:00
|
|
|
if [[ ! -f %(sites_enabled)s ]]; then
|
|
|
|
a2ensite %(site_unique_name)s.conf
|
2015-05-11 14:22:36 +00:00
|
|
|
UPDATED_APACHE=1
|
2015-04-10 15:03:38 +00:00
|
|
|
fi""") % context
|
|
|
|
)
|
|
|
|
else:
|
2015-05-25 19:16:07 +00:00
|
|
|
self.append(textwrap.dedent("""
|
2015-05-21 17:53:59 +00:00
|
|
|
# Disable site %(site_name)s
|
2015-04-10 15:03:38 +00:00
|
|
|
if [[ -f %(sites_enabled)s ]]; then
|
|
|
|
a2dissite %(site_unique_name)s.conf;
|
2015-05-11 14:22:36 +00:00
|
|
|
UPDATED_APACHE=1
|
2015-04-10 15:03:38 +00:00
|
|
|
fi""") % context
|
|
|
|
)
|
2014-05-08 16:59:35 +00:00
|
|
|
|
|
|
|
def delete(self, site):
|
|
|
|
context = self.get_context(site)
|
2015-05-21 14:32:06 +00:00
|
|
|
self.append(textwrap.dedent("""
|
2015-05-21 17:53:59 +00:00
|
|
|
# Remove site configuration for %(site_name)s
|
2015-05-21 14:32:06 +00:00
|
|
|
a2dissite %(site_unique_name)s.conf && UPDATED_APACHE=1
|
|
|
|
rm -f %(sites_available)s\
|
|
|
|
""") % context
|
|
|
|
)
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-04-10 15:03:38 +00:00
|
|
|
def prepare(self):
|
|
|
|
super(Apache2Backend, self).prepare()
|
|
|
|
# Coordinate apache restart with php backend in order not to overdo it
|
2015-06-22 14:14:16 +00:00
|
|
|
self.append(textwrap.dedent("""
|
2015-05-06 14:39:25 +00:00
|
|
|
backend="Apache2Backend"
|
2015-06-22 14:14:16 +00:00
|
|
|
echo "$backend" >> /dev/shm/restart.apache2""")
|
2015-05-06 14:39:25 +00:00
|
|
|
)
|
2015-04-10 15:03:38 +00:00
|
|
|
|
2014-05-08 16:59:35 +00:00
|
|
|
def commit(self):
|
|
|
|
""" reload Apache2 if necessary """
|
2015-05-06 14:39:25 +00:00
|
|
|
self.append(textwrap.dedent("""
|
2015-05-21 14:32:06 +00:00
|
|
|
# Coordinate Apache restart with other concurrent backends (e.g. PHPBackend)
|
2015-05-06 14:39:25 +00:00
|
|
|
is_last=0
|
2015-05-06 10:51:12 +00:00
|
|
|
mv /dev/shm/restart.apache2 /dev/shm/restart.apache2.locked || {
|
2015-05-06 14:39:25 +00:00
|
|
|
sleep 0.2
|
2015-05-06 10:51:12 +00:00
|
|
|
mv /dev/shm/restart.apache2 /dev/shm/restart.apache2.locked
|
|
|
|
}
|
2015-05-06 14:39:25 +00:00
|
|
|
state="$(grep -v "$backend" /dev/shm/restart.apache2.locked)" || is_last=1
|
2015-05-12 12:38:40 +00:00
|
|
|
[[ $is_last -eq 0 ]] && {
|
|
|
|
echo "$state" | grep -v ' RESTART$' || is_last=1
|
|
|
|
}
|
2015-05-06 14:39:25 +00:00
|
|
|
if [[ $is_last -eq 1 ]]; then
|
|
|
|
if [[ $UPDATED_APACHE -eq 1 || "$state" =~ .*RESTART$ ]]; then
|
2015-05-25 19:16:07 +00:00
|
|
|
if [[ $(service apache2 status) ]]; then
|
|
|
|
service apache2 reload
|
|
|
|
else
|
|
|
|
service apache2 start
|
|
|
|
fi
|
2015-04-10 15:03:38 +00:00
|
|
|
fi
|
2015-05-06 10:51:12 +00:00
|
|
|
rm /dev/shm/restart.apache2.locked
|
|
|
|
else
|
2015-05-06 14:39:25 +00:00
|
|
|
echo -n "$state" > /dev/shm/restart.apache2.locked
|
|
|
|
if [[ $UPDATED_APACHE -eq 1 ]]; then
|
2015-05-21 14:32:06 +00:00
|
|
|
echo -e "Apache will be restarted by another backend:\\n${state}"
|
2015-05-06 14:39:25 +00:00
|
|
|
echo "$backend RESTART" >> /dev/shm/restart.apache2.locked
|
|
|
|
fi
|
2015-05-06 10:51:12 +00:00
|
|
|
mv /dev/shm/restart.apache2.locked /dev/shm/restart.apache2
|
2015-05-06 14:39:25 +00:00
|
|
|
fi
|
|
|
|
# End of coordination
|
|
|
|
""")
|
2015-04-10 15:03:38 +00:00
|
|
|
)
|
|
|
|
super(Apache2Backend, self).commit()
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-03-23 15:36:51 +00:00
|
|
|
def get_directives(self, directive, context):
|
|
|
|
method, args = directive[0], directive[1:]
|
|
|
|
try:
|
|
|
|
method = getattr(self, 'get_%s_directives' % method)
|
|
|
|
except AttributeError:
|
2015-06-22 14:14:16 +00:00
|
|
|
context = (self.__class__.__name__, method)
|
|
|
|
raise AttributeError("%s does not has suport for '%s' directive." % context)
|
2015-03-23 15:36:51 +00:00
|
|
|
return method(context, *args)
|
|
|
|
|
2015-04-13 14:46:10 +00:00
|
|
|
def get_content_directives(self, site, context):
|
2015-03-16 16:52:41 +00:00
|
|
|
directives = []
|
|
|
|
for content in site.content_set.all():
|
2015-03-10 11:46:48 +00:00
|
|
|
directive = content.webapp.get_directive()
|
2015-04-13 14:46:10 +00:00
|
|
|
self.set_content_context(content, context)
|
2015-03-23 15:36:51 +00:00
|
|
|
directives += self.get_directives(directive, context)
|
2014-05-08 16:59:35 +00:00
|
|
|
return directives
|
|
|
|
|
2015-03-23 15:36:51 +00:00
|
|
|
def get_static_directives(self, context, app_path):
|
2015-03-25 15:45:04 +00:00
|
|
|
context['app_path'] = os.path.normpath(app_path % context)
|
2015-04-10 15:03:38 +00:00
|
|
|
directive = self.get_location_filesystem_map(context)
|
|
|
|
return [
|
|
|
|
(context['location'], directive),
|
|
|
|
]
|
|
|
|
|
|
|
|
def get_location_filesystem_map(self, context):
|
|
|
|
if not context['location']:
|
|
|
|
return 'DocumentRoot %(app_path)s' % context
|
|
|
|
return 'Alias %(location)s %(app_path)s' % context
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-03-23 15:36:51 +00:00
|
|
|
def get_fpm_directives(self, context, socket, app_path):
|
|
|
|
if ':' in socket:
|
|
|
|
# TCP socket
|
2015-03-10 11:46:48 +00:00
|
|
|
target = 'fcgi://%(socket)s%(app_path)s/$1'
|
|
|
|
else:
|
2015-03-23 15:36:51 +00:00
|
|
|
# UNIX socket
|
|
|
|
target = 'unix:%(socket)s|fcgi://127.0.0.1%(app_path)s/'
|
2015-03-25 15:45:04 +00:00
|
|
|
if context['location']:
|
2015-03-23 15:36:51 +00:00
|
|
|
target = 'unix:%(socket)s|fcgi://127.0.0.1%(app_path)s/$1'
|
2015-03-10 11:46:48 +00:00
|
|
|
context.update({
|
2015-03-25 15:45:04 +00:00
|
|
|
'app_path': os.path.normpath(app_path),
|
2015-03-10 11:46:48 +00:00
|
|
|
'socket': socket,
|
|
|
|
})
|
2015-04-10 15:03:38 +00:00
|
|
|
directives = "ProxyPassMatch ^%(location)s/(.*\.php(/.*)?)$ {target}\n".format(target=target) % context
|
|
|
|
directives += self.get_location_filesystem_map(context)
|
|
|
|
return [
|
|
|
|
(context['location'], directives),
|
|
|
|
]
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-03-23 15:36:51 +00:00
|
|
|
def get_fcgid_directives(self, context, app_path, wrapper_path):
|
2015-03-10 11:46:48 +00:00
|
|
|
context.update({
|
2015-03-25 15:45:04 +00:00
|
|
|
'app_path': os.path.normpath(app_path),
|
2015-04-13 14:46:10 +00:00
|
|
|
'wrapper_name': os.path.basename(wrapper_path),
|
2015-03-10 11:46:48 +00:00
|
|
|
})
|
2015-04-13 14:46:10 +00:00
|
|
|
directives = ''
|
2015-04-14 14:29:22 +00:00
|
|
|
# This Action trick is used instead of FcgidWrapper because we don't want to define
|
2015-04-13 14:46:10 +00:00
|
|
|
# a new fcgid process class each time an app is mounted (num proc limits enforcement).
|
2015-06-22 14:14:16 +00:00
|
|
|
context['wrapper_dir'] = os.path.dirname(wrapper_path)
|
|
|
|
if context['wrapper_dir'] not in context['vhost_wrapper_dirs']:
|
2015-04-13 14:46:10 +00:00
|
|
|
# fcgi-bin only needs to be defined once per vhots
|
2015-04-14 14:29:22 +00:00
|
|
|
# We assume that all account wrapper paths will share the same dir
|
2015-04-13 14:46:10 +00:00
|
|
|
directives = textwrap.dedent("""\
|
|
|
|
Alias /fcgi-bin/ %(wrapper_dir)s/
|
|
|
|
<Location /fcgi-bin/>
|
|
|
|
SetHandler fcgid-script
|
|
|
|
Options +ExecCGI
|
|
|
|
</Location>
|
|
|
|
""") % context
|
2015-06-22 14:14:16 +00:00
|
|
|
context['vhost_wrapper_dirs'].append(context['wrapper_dir'])
|
2015-04-13 14:46:10 +00:00
|
|
|
directives += self.get_location_filesystem_map(context)
|
2015-04-10 15:03:38 +00:00
|
|
|
directives += textwrap.dedent("""
|
2015-03-10 21:51:10 +00:00
|
|
|
ProxyPass %(location)s/ !
|
|
|
|
<Directory %(app_path)s/>
|
2015-04-13 14:46:10 +00:00
|
|
|
AddHandler php-fcgi .php
|
|
|
|
Action php-fcgi /fcgi-bin/%(wrapper_name)s
|
2015-03-16 16:52:41 +00:00
|
|
|
</Directory>""") % context
|
2015-04-10 15:03:38 +00:00
|
|
|
return [
|
|
|
|
(context['location'], directives),
|
|
|
|
]
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-04-16 13:15:21 +00:00
|
|
|
def get_uwsgi_directives(self, context, socket):
|
|
|
|
# requires apache2 mod_proxy_uwsgi
|
|
|
|
context['socket'] = socket
|
|
|
|
directives = "ProxyPass / unix:%(socket)s|uwsgi://" % context
|
|
|
|
directives += self.get_location_filesystem_map(context)
|
|
|
|
return [
|
|
|
|
(context['location'], directives),
|
|
|
|
]
|
|
|
|
|
2015-03-10 21:51:10 +00:00
|
|
|
def get_ssl(self, directives):
|
2015-04-07 15:14:49 +00:00
|
|
|
cert = directives.get('ssl-cert')
|
|
|
|
key = directives.get('ssl-key')
|
|
|
|
ca = directives.get('ssl-ca')
|
2015-03-25 15:45:04 +00:00
|
|
|
if not (cert and key):
|
|
|
|
cert = [settings.WEBSITES_DEFAULT_SSL_CERT]
|
|
|
|
key = [settings.WEBSITES_DEFAULT_SSL_KEY]
|
|
|
|
ca = [settings.WEBSITES_DEFAULT_SSL_CA]
|
|
|
|
if not (cert and key):
|
|
|
|
return []
|
2015-06-22 14:14:16 +00:00
|
|
|
ssl_config = [
|
|
|
|
"SSLEngine on",
|
|
|
|
"SSLCertificateFile %s" % cert[0],
|
|
|
|
"SSLCertificateKeyFile %s" % key[0],
|
|
|
|
]
|
2015-03-25 15:45:04 +00:00
|
|
|
if ca:
|
2015-06-22 14:14:16 +00:00
|
|
|
ssl_config.append("SSLCACertificateFile %s" % ca[0])
|
2015-04-10 15:03:38 +00:00
|
|
|
return [
|
2015-06-22 14:14:16 +00:00
|
|
|
('', '\n'.join(ssl_config)),
|
2015-04-10 15:03:38 +00:00
|
|
|
]
|
2015-03-10 21:51:10 +00:00
|
|
|
|
|
|
|
def get_security(self, directives):
|
2015-06-22 14:14:16 +00:00
|
|
|
remove_rules = []
|
2015-04-09 14:32:10 +00:00
|
|
|
for values in directives.get('sec-rule-remove', []):
|
|
|
|
for rule in values.split():
|
2015-06-22 14:14:16 +00:00
|
|
|
sec_rule = " SecRuleRemoveById %i" % int(rule)
|
|
|
|
remove_rules.append(sec_rule)
|
|
|
|
security = []
|
|
|
|
if remove_rules:
|
|
|
|
remove_rules.insert(0, '<IfModule mod_security2.c>')
|
|
|
|
remove_rules.append('</IfModule>')
|
|
|
|
security.append(('', '\n'.join(remove_rules)))
|
2015-04-07 15:14:49 +00:00
|
|
|
for location in directives.get('sec-engine', []):
|
2015-03-16 16:52:41 +00:00
|
|
|
sec_rule = textwrap.dedent("""\
|
2015-06-22 14:14:16 +00:00
|
|
|
<IfModule mod_security2.c>
|
|
|
|
<Location %s>
|
|
|
|
SecRuleEngine Off
|
|
|
|
</Location>
|
|
|
|
</IfModule>""") % location
|
2015-03-16 16:52:41 +00:00
|
|
|
security.append((location, sec_rule))
|
|
|
|
return security
|
2014-10-30 16:34:02 +00:00
|
|
|
|
2015-03-10 21:51:10 +00:00
|
|
|
def get_redirects(self, directives):
|
2015-03-16 16:52:41 +00:00
|
|
|
redirects = []
|
2015-03-10 21:51:10 +00:00
|
|
|
for redirect in directives.get('redirect', []):
|
2015-03-16 16:52:41 +00:00
|
|
|
location, target = redirect.split()
|
2015-03-10 21:51:10 +00:00
|
|
|
if re.match(r'^.*[\^\*\$\?\)]+.*$', redirect):
|
2015-03-16 16:52:41 +00:00
|
|
|
redirect = "RedirectMatch %s %s" % (location, target)
|
2014-10-30 16:34:02 +00:00
|
|
|
else:
|
2015-03-16 16:52:41 +00:00
|
|
|
redirect = "Redirect %s %s" % (location, target)
|
2015-04-10 15:03:38 +00:00
|
|
|
redirects.append(
|
|
|
|
(location, redirect)
|
|
|
|
)
|
2015-03-16 16:52:41 +00:00
|
|
|
return redirects
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-03-10 21:51:10 +00:00
|
|
|
def get_proxies(self, directives):
|
2015-03-16 16:52:41 +00:00
|
|
|
proxies = []
|
2015-03-10 21:51:10 +00:00
|
|
|
for proxy in directives.get('proxy', []):
|
2015-04-10 15:03:38 +00:00
|
|
|
proxy = proxy.split()
|
|
|
|
location = proxy[0]
|
|
|
|
target = proxy[1]
|
|
|
|
options = ' '.join(proxy[2:])
|
2015-04-04 17:44:07 +00:00
|
|
|
location = normurlpath(location)
|
2015-03-16 16:52:41 +00:00
|
|
|
proxy = textwrap.dedent("""\
|
2015-04-10 15:03:38 +00:00
|
|
|
ProxyPass {location}/ {target} {options}
|
2015-03-31 12:39:08 +00:00
|
|
|
ProxyPassReverse {location}/ {target}""".format(
|
2015-04-10 15:03:38 +00:00
|
|
|
location=location, target=target, options=options)
|
|
|
|
)
|
|
|
|
proxies.append(
|
|
|
|
(location, proxy)
|
2015-03-16 16:52:41 +00:00
|
|
|
)
|
|
|
|
return proxies
|
2015-03-23 15:36:51 +00:00
|
|
|
|
|
|
|
def get_saas(self, directives):
|
|
|
|
saas = []
|
2015-04-02 16:14:55 +00:00
|
|
|
for name, values in directives.items():
|
2015-03-23 15:36:51 +00:00
|
|
|
if name.endswith('-saas'):
|
2015-03-25 15:45:04 +00:00
|
|
|
for value in values:
|
|
|
|
context = {
|
|
|
|
'location': normurlpath(value),
|
|
|
|
}
|
|
|
|
directive = settings.WEBSITES_SAAS_DIRECTIVES[name]
|
|
|
|
saas += self.get_directives(directive, context)
|
2015-03-23 15:36:51 +00:00
|
|
|
return saas
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2014-11-10 15:03:34 +00:00
|
|
|
def get_username(self, site):
|
2015-03-10 21:51:10 +00:00
|
|
|
option = site.get_directives().get('user_group')
|
2014-11-10 15:03:34 +00:00
|
|
|
if option:
|
2015-03-10 21:51:10 +00:00
|
|
|
return option[0]
|
2015-03-18 21:51:12 +00:00
|
|
|
return site.get_username()
|
2014-11-10 15:03:34 +00:00
|
|
|
|
|
|
|
def get_groupname(self, site):
|
2015-03-10 21:51:10 +00:00
|
|
|
option = site.get_directives().get('user_group')
|
|
|
|
if option and ' ' in option:
|
|
|
|
user, group = option.split()
|
2014-11-10 15:03:34 +00:00
|
|
|
return group
|
2015-03-18 21:51:12 +00:00
|
|
|
return site.get_groupname()
|
2014-11-10 15:03:34 +00:00
|
|
|
|
2015-04-10 15:03:38 +00:00
|
|
|
def get_server_names(self, site):
|
|
|
|
server_name = None
|
|
|
|
server_alias = []
|
2015-04-13 14:46:10 +00:00
|
|
|
for domain in site.domains.all().order_by('name'):
|
2015-04-10 15:03:38 +00:00
|
|
|
if not server_name and not domain.name.startswith('*'):
|
|
|
|
server_name = domain.name
|
|
|
|
else:
|
|
|
|
server_alias.append(domain.name)
|
|
|
|
return server_name, server_alias
|
|
|
|
|
2014-05-08 16:59:35 +00:00
|
|
|
def get_context(self, site):
|
|
|
|
base_apache_conf = settings.WEBSITES_BASE_APACHE_CONF
|
|
|
|
sites_available = os.path.join(base_apache_conf, 'sites-available')
|
|
|
|
sites_enabled = os.path.join(base_apache_conf, 'sites-enabled')
|
2015-04-10 15:03:38 +00:00
|
|
|
server_name, server_alias = self.get_server_names(site)
|
2014-05-08 16:59:35 +00:00
|
|
|
context = {
|
|
|
|
'site': site,
|
|
|
|
'site_name': site.name,
|
2015-04-21 14:14:07 +00:00
|
|
|
'ips': settings.WEBSITES_DEFAULT_IPS,
|
2015-04-13 14:46:10 +00:00
|
|
|
'site_unique_name': site.unique_name,
|
2014-11-10 15:03:34 +00:00
|
|
|
'user': self.get_username(site),
|
|
|
|
'group': self.get_groupname(site),
|
2015-04-10 15:03:38 +00:00
|
|
|
'server_name': server_name,
|
|
|
|
'server_alias': server_alias,
|
2015-04-13 14:46:10 +00:00
|
|
|
'sites_enabled': "%s.conf" % os.path.join(sites_enabled, site.unique_name),
|
|
|
|
'sites_available': "%s.conf" % os.path.join(sites_available, site.unique_name),
|
2015-03-02 10:37:25 +00:00
|
|
|
'access_log': site.get_www_access_log_path(),
|
|
|
|
'error_log': site.get_www_error_log_path(),
|
2014-05-08 16:59:35 +00:00
|
|
|
'banner': self.get_banner(),
|
|
|
|
}
|
2015-04-21 14:14:07 +00:00
|
|
|
if not context['ips']:
|
|
|
|
raise ValueError("WEBSITES_DEFAULT_IPS is empty.")
|
2015-05-21 13:34:12 +00:00
|
|
|
return context
|
2014-05-08 16:59:35 +00:00
|
|
|
|
2015-04-13 14:46:10 +00:00
|
|
|
def set_content_context(self, content, context):
|
|
|
|
content_context = {
|
2014-05-08 16:59:35 +00:00
|
|
|
'type': content.webapp.type,
|
2015-03-10 21:51:10 +00:00
|
|
|
'location': normurlpath(content.path),
|
2014-05-08 16:59:35 +00:00
|
|
|
'app_name': content.webapp.name,
|
|
|
|
'app_path': content.webapp.get_path(),
|
2015-04-13 14:46:10 +00:00
|
|
|
}
|
|
|
|
context.update(content_context)
|
2014-07-09 16:17:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Apache2Traffic(ServiceMonitor):
|
2015-04-08 14:41:09 +00:00
|
|
|
"""
|
|
|
|
Parses apache logs,
|
2015-04-24 11:39:20 +00:00
|
|
|
looking for the size of each request on the last word of the log line.
|
2015-04-08 14:41:09 +00:00
|
|
|
"""
|
2014-07-09 16:17:43 +00:00
|
|
|
model = 'websites.Website'
|
|
|
|
resource = ServiceMonitor.TRAFFIC
|
|
|
|
verbose_name = _("Apache 2 Traffic")
|
2015-04-24 11:39:20 +00:00
|
|
|
doc_settings = (settings,
|
|
|
|
('WEBSITES_TRAFFIC_IGNORE_HOSTS',)
|
|
|
|
)
|
2014-07-25 15:17:50 +00:00
|
|
|
def prepare(self):
|
2014-11-21 13:53:39 +00:00
|
|
|
super(Apache2Traffic, self).prepare()
|
2014-11-20 15:34:59 +00:00
|
|
|
ignore_hosts = '\\|'.join(settings.WEBSITES_TRAFFIC_IGNORE_HOSTS)
|
|
|
|
context = {
|
|
|
|
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
|
|
|
'ignore_hosts': '-v "%s"' % ignore_hosts if ignore_hosts else '',
|
|
|
|
}
|
2014-10-10 14:39:46 +00:00
|
|
|
self.append(textwrap.dedent("""\
|
2014-07-25 15:17:50 +00:00
|
|
|
function monitor () {
|
|
|
|
OBJECT_ID=$1
|
2014-11-20 15:34:59 +00:00
|
|
|
INI_DATE=$(date "+%%Y%%m%%d%%H%%M%%S" -d "$2")
|
|
|
|
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%(current_date)s')
|
2014-07-25 15:17:50 +00:00
|
|
|
LOG_FILE="$3"
|
|
|
|
{
|
2015-03-02 10:37:25 +00:00
|
|
|
{ grep %(ignore_hosts)s ${LOG_FILE} || echo -e '\\r'; } \\
|
2014-11-20 15:34:59 +00:00
|
|
|
| awk -v ini="${INI_DATE}" -v end="${END_DATE}" '
|
|
|
|
BEGIN {
|
|
|
|
sum = 0
|
2015-03-02 10:37:25 +00:00
|
|
|
months["Jan"] = "01"
|
|
|
|
months["Feb"] = "02"
|
|
|
|
months["Mar"] = "03"
|
|
|
|
months["Apr"] = "04"
|
|
|
|
months["May"] = "05"
|
|
|
|
months["Jun"] = "06"
|
|
|
|
months["Jul"] = "07"
|
|
|
|
months["Aug"] = "08"
|
|
|
|
months["Sep"] = "09"
|
|
|
|
months["Oct"] = "10"
|
|
|
|
months["Nov"] = "11"
|
|
|
|
months["Dec"] = "12"
|
2014-11-20 15:34:59 +00:00
|
|
|
} {
|
|
|
|
# date = [11/Jul/2014:13:50:41
|
|
|
|
date = substr($4, 2)
|
|
|
|
year = substr(date, 8, 4)
|
|
|
|
month = months[substr(date, 4, 3)];
|
|
|
|
day = substr(date, 1, 2)
|
|
|
|
hour = substr(date, 13, 2)
|
|
|
|
minute = substr(date, 16, 2)
|
|
|
|
second = substr(date, 19, 2)
|
|
|
|
line_date = year month day hour minute second
|
|
|
|
if ( line_date > ini && line_date < end)
|
|
|
|
sum += $NF
|
|
|
|
} END {
|
|
|
|
print sum
|
|
|
|
}' || [[ $? == 1 ]] && true
|
2014-07-25 15:17:50 +00:00
|
|
|
} | xargs echo ${OBJECT_ID}
|
2015-03-10 16:57:23 +00:00
|
|
|
}""") % context)
|
2014-07-25 15:17:50 +00:00
|
|
|
|
2014-07-09 16:17:43 +00:00
|
|
|
def monitor(self, site):
|
|
|
|
context = self.get_context(site)
|
2014-11-20 15:34:59 +00:00
|
|
|
self.append('monitor {object_id} "{last_date}" {log_file}'.format(**context))
|
2014-07-09 16:17:43 +00:00
|
|
|
|
|
|
|
def get_context(self, site):
|
2015-06-22 14:14:16 +00:00
|
|
|
return {
|
2015-03-20 15:13:08 +00:00
|
|
|
'log_file': '%s{,.1}' % site.get_www_access_log_path(),
|
2014-10-27 17:34:14 +00:00
|
|
|
'last_date': self.get_last_date(site.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
2014-07-11 14:48:46 +00:00
|
|
|
'object_id': site.pk,
|
2014-07-09 16:17:43 +00:00
|
|
|
}
|