Fixed mailman monitoring backend
This commit is contained in:
parent
d601773bf3
commit
c60b2008bf
2
TODO.md
2
TODO.md
|
@ -187,4 +187,4 @@ Multi-tenant WebApps
|
|||
|
||||
* username maximum as group user in UNIX
|
||||
|
||||
* forms autocomplete="off"
|
||||
* forms autocomplete="off", doesn't work in chrome
|
||||
|
|
|
@ -150,35 +150,61 @@ class MailmanTraffic(ServiceMonitor):
|
|||
|
||||
def prepare(self):
|
||||
super(MailmanTraffic, self).prepare()
|
||||
current_date = timezone.localtime(self.current_date)
|
||||
current_date = current_date.strftime("%b %d %H:%M:%S")
|
||||
current_date = self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
self.append(textwrap.dedent("""\
|
||||
function monitor () {
|
||||
OBJECT_ID=$1
|
||||
LAST_DATE=$2
|
||||
# Dates convertions are done server-side because of timezone discrepancies
|
||||
INI_DATE=$(date "+%%Y%%m%%d%%H%%M%%S" -d "$2")
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%s')
|
||||
LIST_NAME="$3"
|
||||
MAILMAN_LOG="$4"
|
||||
|
||||
SUBSCRIBERS=$(list_members ${LIST_NAME} | wc -l)
|
||||
SIZE=$(grep " post to ${LIST_NAME} " "${MAILMAN_LOG}" \\
|
||||
| awk '"$LAST_DATE"<=$0 && $0<="%s"' \\
|
||||
| sed 's/.*size=\([0-9]*\).*/\\1/' \\
|
||||
| tr '\\n' '+' \\
|
||||
| xargs -i echo {}0 )
|
||||
echo ${OBJECT_ID} $(( ${SIZE}*${SUBSCRIBERS} ))
|
||||
{
|
||||
{ grep " post to ${LIST_NAME} " ${MAILMAN_LOG} || echo '\\r'; } \\
|
||||
| awk -v ini="${INI_DATE}" -v end="${END_DATE}" -v subs="${SUBSCRIBERS}" '
|
||||
BEGIN {
|
||||
sum = 0
|
||||
months["Jan"] = "01"
|
||||
months["Feb"] = "02"
|
||||
months["Mar"] = "03"
|
||||
months["Apr"] = "04"
|
||||
months["May"] = "05"
|
||||
months["Jun"] = "06"
|
||||
months["Jul"] = "07"
|
||||
months["Aug"] = "08"
|
||||
months["Sep"] = "09"
|
||||
months["Oct"] = "10"
|
||||
months["Nov"] = "11"
|
||||
months["Dec"] = "12"
|
||||
} {
|
||||
# Mar 01 08:29:02 2015
|
||||
month = months[$1]
|
||||
day = $2
|
||||
year = $4
|
||||
split($3, time, ":")
|
||||
line_date = year month day time[1] time[2] time[3]
|
||||
if ( line_date > ini && line_date < end)
|
||||
sum += substr($11, 6, length($11)-6)
|
||||
} END {
|
||||
print sum * subs
|
||||
}' || [[ $? == 1 ]] && true
|
||||
} | xargs echo ${OBJECT_ID}
|
||||
}""") % current_date)
|
||||
|
||||
def monitor(self, mail_list):
|
||||
context = self.get_context(mail_list)
|
||||
self.append(
|
||||
'monitor %(object_id)i "%(last_date)s" "%(list_name)s" %(mailman_log)s{,.1}' % context)
|
||||
'monitor %(object_id)i "%(last_date)s" "%(list_name)s" %(mailman_log)s{,.1}' % context
|
||||
)
|
||||
|
||||
def get_context(self, mail_list):
|
||||
return {
|
||||
'mailman_log': settings.LISTS_MAILMAN_POST_LOG_PATH,
|
||||
'list_name': mail_list.name,
|
||||
'object_id': mail_list.pk,
|
||||
'last_date': self.get_last_date(mail_list.pk).strftime("%b %d %H:%M:%S"),
|
||||
'last_date': self.get_last_date(mail_list.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -117,7 +117,6 @@ class PasswdVirtualUserBackend(ServiceController):
|
|||
class PostfixAddressBackend(ServiceController):
|
||||
verbose_name = _("Postfix address")
|
||||
model = 'mailboxes.Address'
|
||||
# TODO
|
||||
related_models = (
|
||||
('mailboxes.Mailbox', 'addresses'),
|
||||
)
|
||||
|
|
|
@ -49,13 +49,13 @@ def message_user(request, logs):
|
|||
url = reverse('admin:orchestration_backendlog_change', args=ids)
|
||||
if errors:
|
||||
msg = ungettext(
|
||||
_('{errors} out of {total} <a href="{url}">banckends</a> has fail to execute.'),
|
||||
_('{errors} out of {total} <a href="{url}">banckends</a> have fail to execute.'),
|
||||
_('{errors} out of {total} <a href="{url}">backends</a> has fail to execute.'),
|
||||
_('{errors} out of {total} <a href="{url}">backends</a> have fail to execute.'),
|
||||
errors)
|
||||
messages.error(request, mark_safe(msg.format(errors=errors, total=total, url=url)))
|
||||
else:
|
||||
msg = ungettext(
|
||||
_('{total} <a href="{url}">banckend</a> has been executed.'),
|
||||
_('{total} <a href="{url}">banckends</a> have been executed.'),
|
||||
_('{total} <a href="{url}">backend</a> has been executed.'),
|
||||
_('{total} <a href="{url}">backends</a> have been executed.'),
|
||||
total)
|
||||
messages.success(request, mark_safe(msg.format(total=total, url=url)))
|
||||
|
|
|
@ -18,20 +18,19 @@ def post_save_collector(sender, *args, **kwargs):
|
|||
if sender not in [BackendLog, Operation]:
|
||||
OperationsMiddleware.collect(Operation.SAVE, **kwargs)
|
||||
|
||||
|
||||
@receiver(pre_delete, dispatch_uid='orchestration.pre_delete_collector')
|
||||
def pre_delete_collector(sender, *args, **kwargs):
|
||||
if sender not in [BackendLog, Operation]:
|
||||
OperationsMiddleware.collect(Operation.DELETE, **kwargs)
|
||||
|
||||
|
||||
@receiver(m2m_changed, dispatch_uid='orchestration.m2m_collector')
|
||||
def m2m_collector(sender, *args, **kwargs):
|
||||
# m2m relations without intermediary models are shit. Model.post_save is not sent and
|
||||
# by the time related.post_save is sent rel objects are not accessible via RelatedManager.all()
|
||||
# We have to use this inefficient technique of collecting the instances via m2m_changed.post_add
|
||||
if kwargs.pop('action') == 'post_add' and kwargs['pk_set']:
|
||||
for instance in kwargs['model'].objects.filter(pk__in=kwargs['pk_set']):
|
||||
kwargs['instance'] = instance
|
||||
OperationsMiddleware.collect(Operation.SAVE, **kwargs)
|
||||
OperationsMiddleware.collect(Operation.SAVE, **kwargs)
|
||||
|
||||
|
||||
class OperationsMiddleware(object):
|
||||
|
@ -68,23 +67,26 @@ class OperationsMiddleware(object):
|
|||
candidate = backend.get_related(kwargs['instance'])
|
||||
if candidate:
|
||||
if candidate.__class__.__name__ == 'ManyRelatedManager':
|
||||
candidates = candidate.all()
|
||||
if 'pk_set' in kwargs:
|
||||
# m2m_changed signal
|
||||
candidates = kwargs['model'].objects.filter(pk__in=kwargs['pk_set'])
|
||||
else:
|
||||
candidates = candidate.all()
|
||||
else:
|
||||
candidates = [candidate]
|
||||
for candidate in candidates:
|
||||
# Check if a delete for candidate is in pending_operations
|
||||
delete = Operation.create(backend, candidate, Operation.DELETE)
|
||||
if delete not in pending_operations:
|
||||
delete_mock = Operation.create(backend, candidate, Operation.DELETE)
|
||||
if delete_mock not in pending_operations:
|
||||
# related objects with backend.model trigger save()
|
||||
action = Operation.SAVE
|
||||
instances.append((candidate, action))
|
||||
for instance, action in instances:
|
||||
instances.append((candidate, Operation.SAVE))
|
||||
for instance, iaction in instances:
|
||||
# Maintain consistent state of pending_operations based on save/delete behaviour
|
||||
# Prevent creating a deleted instance by deleting existing saves
|
||||
if action == Operation.DELETE:
|
||||
save = Operation.create(backend, instance, Operation.SAVE)
|
||||
if iaction == Operation.DELETE:
|
||||
save_mock = Operation.create(backend, instance, Operation.SAVE)
|
||||
try:
|
||||
pending_operations.remove(save)
|
||||
pending_operations.remove(save_mock)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
|
@ -100,8 +102,8 @@ class OperationsMiddleware(object):
|
|||
break
|
||||
if not execute:
|
||||
continue
|
||||
operation = Operation.create(backend, instance, action)
|
||||
if action != Operation.DELETE:
|
||||
operation = Operation.create(backend, instance, iaction)
|
||||
if iaction != Operation.DELETE:
|
||||
# usually we expect to be using last object state,
|
||||
# except when we are deleting it
|
||||
pending_operations.discard(operation)
|
||||
|
|
|
@ -87,7 +87,7 @@ class SystemUserDisk(ServiceMonitor):
|
|||
if user.is_main or os.path.normpath(user.home) == user.get_base_home():
|
||||
self.append("echo %(object_id)s $(monitor %(home)s)" % context)
|
||||
else:
|
||||
# Home appears to be included in other user home
|
||||
# Home is already included in other user home
|
||||
self.append("echo %(object_id)s 0" % context)
|
||||
|
||||
def get_context(self, user):
|
||||
|
@ -109,11 +109,11 @@ class FTPTraffic(ServiceMonitor):
|
|||
function monitor () {
|
||||
OBJECT_ID=$1
|
||||
INI_DATE=$(date "+%%Y%%m%%d%%H%%M%%S" -d "$2")
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%(current_date)s')
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%s')
|
||||
USERNAME="$3"
|
||||
LOG_FILE="$4"
|
||||
{
|
||||
grep "UPLOAD\|DOWNLOAD" "${LOG_FILE}" \\
|
||||
grep "UPLOAD\|DOWNLOAD" ${LOG_FILE} \\
|
||||
| grep " \\[${USERNAME}\\] " \\
|
||||
| awk -v ini="${INI_DATE}" -v end="${END_DATE}" '
|
||||
BEGIN {
|
||||
|
@ -131,14 +131,12 @@ class FTPTraffic(ServiceMonitor):
|
|||
months["Nov"] = "11"
|
||||
months["Dec"] = "12"
|
||||
} {
|
||||
# log: Fri Jul 11 13:23:17 2014
|
||||
split($4, t, ":")
|
||||
# Fri Jul 11 13:23:17 2014
|
||||
split($4, time, ":")
|
||||
# line_date = year month day hour minute second
|
||||
line_date = $5 months[$2] $3 t[1] t[2] t[3]
|
||||
line_date = $5 months[$2] $3 time[1] time[2] time[3]
|
||||
if ( line_date > ini && line_date < end) {
|
||||
split($0, l, "\\", ")
|
||||
split(l[3], b, " ")
|
||||
sum += b[1]
|
||||
sum += $(NF-2)
|
||||
}
|
||||
} END {
|
||||
print sum
|
||||
|
|
|
@ -105,9 +105,9 @@ class SystemUser(models.Model):
|
|||
shell=data.get('shell') or self.shell,
|
||||
)
|
||||
if 'home' in data and data['home']:
|
||||
home = data['home'].rstrip('/')
|
||||
user_home = user.get_base_home().rstrip('/')
|
||||
account_home = account.main_systemuser.get_home().rstrip('/')
|
||||
home = os.path.normpath(data['home'])
|
||||
user_home = user.get_base_home()
|
||||
account_home = account.main_systemuser.get_home()
|
||||
if user.has_shell:
|
||||
if home != user_home:
|
||||
raise ValidationError({
|
||||
|
@ -128,7 +128,7 @@ class SystemUser(models.Model):
|
|||
return os.path.normpath(settings.SYSTEMUSERS_HOME % context)
|
||||
|
||||
def get_home(self):
|
||||
return os.path.join(self.home, self.directory)
|
||||
return os.path.normpath(os.path.join(self.home, self.directory))
|
||||
|
||||
|
||||
services.register(SystemUser)
|
||||
|
|
|
@ -38,7 +38,7 @@ class WebAppOptionInline(admin.TabularInline):
|
|||
|
||||
|
||||
class WebAppAdmin(AccountAdminMixin, ExtendedModelAdmin):
|
||||
list_display = ('display_name', 'type', 'display_websites', 'account_link')
|
||||
list_display = ('name', 'type', 'display_websites', 'account_link')
|
||||
list_filter = ('type',)
|
||||
add_fields = ('account', 'name', 'type')
|
||||
fields = ('account_link', 'name', 'type')
|
||||
|
@ -52,11 +52,6 @@ class WebAppAdmin(AccountAdminMixin, ExtendedModelAdmin):
|
|||
for k, v in settings.WEBAPPS_TYPES.iteritems()
|
||||
}
|
||||
|
||||
def display_name(self, webapp):
|
||||
return webapp.get_name()
|
||||
display_name.short_description = _("Name")
|
||||
display_name.admin_order_field = 'name'
|
||||
|
||||
def display_websites(self, webapp):
|
||||
websites = []
|
||||
for content in webapp.content_set.all():
|
||||
|
|
|
@ -13,8 +13,7 @@ from . import settings
|
|||
|
||||
class WebApp(models.Model):
|
||||
""" Represents a web application """
|
||||
name = models.CharField(_("name"), max_length=128, validators=[validators.validate_name],
|
||||
blank=settings.WEBAPPS_ALLOW_BLANK_NAME)
|
||||
name = models.CharField(_("name"), max_length=128, validators=[validators.validate_name])
|
||||
type = models.CharField(_("type"), max_length=32,
|
||||
choices=dict_setting_to_choices(settings.WEBAPPS_TYPES),
|
||||
default=settings.WEBAPPS_DEFAULT_TYPE)
|
||||
|
@ -27,7 +26,7 @@ class WebApp(models.Model):
|
|||
verbose_name_plural = _("Web Apps")
|
||||
|
||||
def __unicode__(self):
|
||||
return self.get_name()
|
||||
return self.name
|
||||
|
||||
def get_description(self):
|
||||
return self.get_type_display()
|
||||
|
@ -52,9 +51,6 @@ class WebApp(models.Model):
|
|||
def app_type(self):
|
||||
return settings.WEBAPPS_TYPES[self.type]
|
||||
|
||||
def get_name(self):
|
||||
return self.name or settings.WEBAPPS_BLANK_NAME
|
||||
|
||||
def get_fpm_port(self):
|
||||
return settings.WEBAPPS_FPM_START_PORT + self.account_id
|
||||
|
||||
|
@ -66,9 +62,10 @@ class WebApp(models.Model):
|
|||
def get_path(self):
|
||||
context = {
|
||||
'home': self.get_user().get_home(),
|
||||
'app_name': self.get_name(),
|
||||
'app_name': self.name,
|
||||
}
|
||||
return settings.WEBAPPS_BASE_ROOT % context
|
||||
path = settings.WEBAPPS_BASE_ROOT % context
|
||||
return path.replace('//', '/')
|
||||
|
||||
def get_user(self):
|
||||
return self.account.main_systemuser
|
||||
|
|
|
@ -10,12 +10,6 @@ WEBAPPS_FPM_LISTEN = getattr(settings, 'WEBAPPS_FPM_LISTEN',
|
|||
'127.0.0.1:%(fpm_port)s')
|
||||
|
||||
|
||||
WEBAPPS_ALLOW_BLANK_NAME = getattr(settings, 'WEBAPPS_ALLOW_BLANK_NAME', False)
|
||||
|
||||
# Default name when blank
|
||||
WEBAPPS_BLANK_NAME = getattr(settings, 'WEBAPPS_BLANK_NAME', 'webapp')
|
||||
|
||||
|
||||
WEBAPPS_FPM_START_PORT = getattr(settings, 'WEBAPPS_FPM_START_PORT', 10000)
|
||||
|
||||
|
||||
|
|
|
@ -32,12 +32,15 @@ class Apache2Backend(ServiceController):
|
|||
<VirtualHost {{ ip }}:{{ site.port }}>
|
||||
ServerName {{ site.domains.all|first }}\
|
||||
{% if site.domains.all|slice:"1:" %}
|
||||
ServerAlias {{ site.domains.all|slice:"1:"|join:' ' }}{% endif %}
|
||||
CustomLog {{ logs }} common
|
||||
ServerAlias {{ site.domains.all|slice:"1:"|join:' ' }}{% endif %}\
|
||||
{% if access_log %}
|
||||
CustomLog {{ access_log }} common{% endif %}\
|
||||
{% if error_log %}
|
||||
ErrorLog {{ error_log }}{% endif %}
|
||||
SuexecUserGroup {{ user }} {{ group }}\
|
||||
{% for line in extra_conf.splitlines %}
|
||||
{{ line | safe }}{% endfor %}
|
||||
IncludeOptional /etc/apache2/extra-vhos[t]/{{ site_unique_name }}.con[f]
|
||||
#IncludeOptional /etc/apache2/extra-vhos[t]/{{ site_unique_name }}.con[f]
|
||||
</VirtualHost>"""
|
||||
))
|
||||
apache_conf = apache_conf.render(Context(context))
|
||||
|
@ -61,7 +64,7 @@ class Apache2Backend(ServiceController):
|
|||
|
||||
def commit(self):
|
||||
""" reload Apache2 if necessary """
|
||||
self.append('[[ $UPDATED == 1 ]] && service apache2 reload')
|
||||
self.append('[[ $UPDATED == 1 ]] && service apache2 reload || true')
|
||||
|
||||
def get_content_directives(self, site):
|
||||
directives = ''
|
||||
|
@ -159,15 +162,22 @@ class Apache2Backend(ServiceController):
|
|||
|
||||
def enable_or_disable(self, site):
|
||||
context = self.get_context(site)
|
||||
self.append("ls -l %(sites_enabled)s > /dev/null; DISABLED=$?" % context)
|
||||
if site.is_active:
|
||||
self.append(
|
||||
"if [[ $DISABLED ]]; then a2ensite %(site_unique_name)s.conf;\n"
|
||||
"else UPDATED=0; fi" % context)
|
||||
self.append(textwrap.dedent("""\
|
||||
if [[ ! -f %(sites_enabled)s ]]; then
|
||||
a2ensite %(site_unique_name)s.conf
|
||||
else
|
||||
UPDATED=0
|
||||
fi""" % context
|
||||
))
|
||||
else:
|
||||
self.append(
|
||||
"if [[ ! $DISABLED ]]; then a2dissite %(site_unique_name)s.conf;\n"
|
||||
"else UPDATED=0; fi" % context)
|
||||
self.append(textwrap.dedent("""\
|
||||
if [[ -f %(sites_enabled)s ]]; then
|
||||
a2dissite %(site_unique_name)s.conf;
|
||||
else
|
||||
UPDATED=0
|
||||
fi""" % context
|
||||
))
|
||||
|
||||
def get_username(self, site):
|
||||
option = site.options.filter(name='user_group').first()
|
||||
|
@ -193,9 +203,10 @@ class Apache2Backend(ServiceController):
|
|||
'site_unique_name': site.unique_name,
|
||||
'user': self.get_username(site),
|
||||
'group': self.get_groupname(site),
|
||||
'sites_enabled': sites_enabled,
|
||||
'sites_enabled': "%s.conf" % os.path.join(sites_enabled, site.unique_name),
|
||||
'sites_available': "%s.conf" % os.path.join(sites_available, site.unique_name),
|
||||
'logs': site.get_www_log_path(),
|
||||
'access_log': site.get_www_access_log_path(),
|
||||
'error_log': site.get_www_error_log_path(),
|
||||
'banner': self.get_banner(),
|
||||
}
|
||||
return context
|
||||
|
@ -231,22 +242,22 @@ class Apache2Traffic(ServiceMonitor):
|
|||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%(current_date)s')
|
||||
LOG_FILE="$3"
|
||||
{
|
||||
{ grep %(ignore_hosts)s "${LOG_FILE}" || echo '\\n'; } \\
|
||||
{ grep %(ignore_hosts)s ${LOG_FILE} || echo -e '\\r'; } \\
|
||||
| awk -v ini="${INI_DATE}" -v end="${END_DATE}" '
|
||||
BEGIN {
|
||||
sum = 0
|
||||
months["Jan"] = "01";
|
||||
months["Feb"] = "02";
|
||||
months["Mar"] = "03";
|
||||
months["Apr"] = "04";
|
||||
months["May"] = "05";
|
||||
months["Jun"] = "06";
|
||||
months["Jul"] = "07";
|
||||
months["Aug"] = "08";
|
||||
months["Sep"] = "09";
|
||||
months["Oct"] = "10";
|
||||
months["Nov"] = "11";
|
||||
months["Dec"] = "12";
|
||||
months["Jan"] = "01"
|
||||
months["Feb"] = "02"
|
||||
months["Mar"] = "03"
|
||||
months["Apr"] = "04"
|
||||
months["May"] = "05"
|
||||
months["Jun"] = "06"
|
||||
months["Jul"] = "07"
|
||||
months["Aug"] = "08"
|
||||
months["Sep"] = "09"
|
||||
months["Oct"] = "10"
|
||||
months["Nov"] = "11"
|
||||
months["Dec"] = "12"
|
||||
} {
|
||||
# date = [11/Jul/2014:13:50:41
|
||||
date = substr($4, 2)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import textwrap
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
@ -13,71 +14,76 @@ class WebalizerBackend(ServiceController):
|
|||
|
||||
def save(self, content):
|
||||
context = self.get_context(content)
|
||||
self.append("mkdir -p %(webalizer_path)s" % context)
|
||||
self.append("[[ ! -e %(webalizer_path)s/index.html ]] && "
|
||||
"echo 'Webstats are coming soon' > %(webalizer_path)s/index.html" % context)
|
||||
self.append("echo '%(webalizer_conf)s' > %(webalizer_conf_path)s" % context)
|
||||
self.append("chown %(user)s:www-data %(webalizer_path)s" % context)
|
||||
self.append(textwrap.dedent("""\
|
||||
mkdir -p %(webalizer_path)s
|
||||
if [[ ! -e %(webalizer_path)s/index.html ]]; then
|
||||
echo 'Webstats are coming soon' > %(webalizer_path)s/index.html
|
||||
fi
|
||||
echo '%(webalizer_conf)s' > %(webalizer_conf_path)s
|
||||
chown %(user)s:www-data %(webalizer_path)s""" % context
|
||||
))
|
||||
|
||||
def delete(self, content):
|
||||
context = self.get_context(content)
|
||||
self.append("rm -fr %(webalizer_path)s" % context)
|
||||
self.append("rm %(webalizer_conf_path)s" % context)
|
||||
pass
|
||||
# TODO delete has to be done on webapp deleteion, not content deletion
|
||||
# context = self.get_context(content)
|
||||
# self.append("rm -fr %(webalizer_path)s" % context)
|
||||
# self.append("rm %(webalizer_conf_path)s" % context)
|
||||
|
||||
def get_context(self, content):
|
||||
conf_file = "%s.conf" % content.website.name
|
||||
conf_file = "%s.conf" % content.website.unique_name
|
||||
context = {
|
||||
'site_logs': os.path.join(settings.WEBSITES_BASE_APACHE_LOGS, content.website.unique_name),
|
||||
'site_logs': content.website.get_www_access_log_path(),
|
||||
'site_name': content.website.name,
|
||||
'webalizer_path': os.path.join(content.webapp.get_path(), content.website.name),
|
||||
'webalizer_conf_path': os.path.join(settings.WEBSITES_WEBALIZER_PATH, conf_file),
|
||||
'user': content.webapp.account.user,
|
||||
'user': content.webapp.account.username,
|
||||
'banner': self.get_banner(),
|
||||
}
|
||||
context['webalizer_conf'] = (
|
||||
"# %(banner)s\n"
|
||||
"LogFile %(site_logs)s\n"
|
||||
"LogType clf\n"
|
||||
"OutputDir %(webalizer_path)s\n"
|
||||
"HistoryName webalizer.hist\n"
|
||||
"Incremental yes\n"
|
||||
"IncrementalName webalizer.current\n"
|
||||
"ReportTitle Stats of\n"
|
||||
"HostName %(site_name)s\n"
|
||||
"\n"
|
||||
"PageType htm*\n"
|
||||
"PageType php*\n"
|
||||
"PageType shtml\n"
|
||||
"PageType cgi\n"
|
||||
"PageType pl\n"
|
||||
"\n"
|
||||
"DNSCache /var/lib/dns_cache.db\n"
|
||||
"DNSChildren 15\n"
|
||||
"\n"
|
||||
"HideURL *.gif\n"
|
||||
"HideURL *.GIF\n"
|
||||
"HideURL *.jpg\n"
|
||||
"HideURL *.JPG\n"
|
||||
"HideURL *.png\n"
|
||||
"HideURL *.PNG\n"
|
||||
"HideURL *.ra\n"
|
||||
"\n"
|
||||
"IncludeURL *\n"
|
||||
"\n"
|
||||
"SearchEngine yahoo.com p=\n"
|
||||
"SearchEngine altavista.com q=\n"
|
||||
"SearchEngine google.com q=\n"
|
||||
"SearchEngine eureka.com q=\n"
|
||||
"SearchEngine lycos.com query=\n"
|
||||
"SearchEngine hotbot.com MT=\n"
|
||||
"SearchEngine msn.com MT=\n"
|
||||
"SearchEngine infoseek.com qt=\n"
|
||||
"SearchEngine webcrawler searchText=\n"
|
||||
"SearchEngine excite search=\n"
|
||||
"SearchEngine netscape.com search=\n"
|
||||
"SearchEngine mamma.com query=\n"
|
||||
"SearchEngine alltheweb.com query=\n"
|
||||
"\n"
|
||||
"DumpSites yes\n"
|
||||
) % context
|
||||
context['webalizer_conf'] = textwrap.dedent("""\
|
||||
# %(banner)s
|
||||
LogFile %(site_logs)s
|
||||
LogType clf
|
||||
OutputDir %(webalizer_path)s
|
||||
HistoryName webalizer.hist
|
||||
Incremental yes
|
||||
IncrementalName webalizer.current
|
||||
ReportTitle Stats of
|
||||
HostName %(site_name)s
|
||||
|
||||
PageType htm*
|
||||
PageType php*
|
||||
PageType shtml
|
||||
PageType cgi
|
||||
PageType pl
|
||||
|
||||
DNSCache /var/lib/dns_cache.db
|
||||
DNSChildren 15
|
||||
|
||||
HideURL *.gif
|
||||
HideURL *.GIF
|
||||
HideURL *.jpg
|
||||
HideURL *.JPG
|
||||
HideURL *.png
|
||||
HideURL *.PNG
|
||||
HideURL *.ra
|
||||
|
||||
IncludeURL *
|
||||
|
||||
SearchEngine yahoo.com p=
|
||||
SearchEngine altavista.com q=
|
||||
SearchEngine google.com q=
|
||||
SearchEngine eureka.com q=
|
||||
SearchEngine lycos.com query=
|
||||
SearchEngine hotbot.com MT=
|
||||
SearchEngine msn.com MT=
|
||||
SearchEngine infoseek.com qt=
|
||||
SearchEngine webcrawler searchText=
|
||||
SearchEngine excite search=
|
||||
SearchEngine netscape.com search=
|
||||
SearchEngine mamma.com query=
|
||||
SearchEngine alltheweb.com query=
|
||||
|
||||
DumpSites yes""" % context
|
||||
)
|
||||
return context
|
||||
|
|
|
@ -60,15 +60,23 @@ class Website(models.Model):
|
|||
if domain:
|
||||
return '%s://%s' % (self.protocol, domain)
|
||||
|
||||
def get_www_log_path(self):
|
||||
context = {
|
||||
def get_www_log_context(self):
|
||||
return {
|
||||
'home': self.account.main_systemuser.get_home(),
|
||||
'account': self.account.username,
|
||||
'name': self.name,
|
||||
'unique_name': self.unique_name
|
||||
}
|
||||
return settings.WEBSITES_WEBSITE_WWW_LOG_PATH % context
|
||||
|
||||
|
||||
def get_www_access_log_path(self):
|
||||
context = self.get_www_log_context()
|
||||
path = settings.WEBSITES_WEBSITE_WWW_ACCESS_LOG_PATH % context
|
||||
return path.replace('//', '/')
|
||||
|
||||
def get_www_error_log_path(self):
|
||||
context = self.get_www_log_context()
|
||||
path = settings.WEBSITES_WEBSITE_WWW_ERROR_LOG_PATH % context
|
||||
return path.replace('//', '/')
|
||||
|
||||
class WebsiteOption(models.Model):
|
||||
website = models.ForeignKey(Website, verbose_name=_("web site"),
|
||||
|
|
|
@ -90,9 +90,13 @@ WEBSITES_WEBALIZER_PATH = getattr(settings, 'WEBSITES_WEBALIZER_PATH',
|
|||
'/home/httpd/webalizer/')
|
||||
|
||||
|
||||
WEBSITES_WEBSITE_WWW_LOG_PATH = getattr(settings, 'WEBSITES_WEBSITE_WWW_LOG_PATH',
|
||||
WEBSITES_WEBSITE_WWW_ACCESS_LOG_PATH = getattr(settings, 'WEBSITES_WEBSITE_WWW_ACCESS_LOG_PATH',
|
||||
'/var/log/apache2/virtual/%(unique_name)s.log')
|
||||
|
||||
|
||||
WEBSITES_WEBSITE_WWW_ERROR_LOG_PATH = getattr(settings, 'WEBSITES_WEBSITE_WWW_ERROR_LOG_PATH',
|
||||
'')
|
||||
|
||||
|
||||
WEBSITES_TRAFFIC_IGNORE_HOSTS = getattr(settings, 'WEBSITES_TRAFFIC_IGNORE_HOSTS',
|
||||
('127.0.0.1',))
|
||||
|
|
Loading…
Reference in a new issue