Added resource old monitored data aggregation for space efficiency
This commit is contained in:
parent
a8f4b17149
commit
d8c529f936
27
TODO.md
27
TODO.md
|
@ -429,7 +429,30 @@ Case
|
|||
|
||||
# Serie1
|
||||
|
||||
# Pangea post-create: lorena no has afegit el webalizer
|
||||
# cleanup monitor data
|
||||
# cleanup monitor data: helpers.functions into ServiceMonitor methods
|
||||
|
||||
# Add SPF record type
|
||||
|
||||
# OVZ TRAFFIC ACCOUNTING!!
|
||||
|
||||
# PHPlist cron, bounces and traffic (maybe specific mail script with sitename)
|
||||
'crontab': settings.SAAS_PHPLIST_CRONTAB.replace('$', '$$')
|
||||
}
|
||||
*/10 * * * * PHPLIST=%(php_list_path)s; export SITE="%(site)s"; php $PHPLIST/admin/index.php -c $PHPLIST/config/config.php -p processqueue > /dev/null
|
||||
*/40 * * * * PHPLIST=%(php_list_path)s; export SITE="%(site)s"; php $PHPLIST/admin/index.php -c $PHPLIST/config/config.php -p processbounces > /dev/null
|
||||
|
||||
if settings.SAAS_PHPLIST_CRONTAB:
|
||||
self.append(textwrap.dedent("""
|
||||
# Configuring phpList crontabs
|
||||
if [[ ! $(crontab -l | grep "^%(site)s") ]]; then
|
||||
cat << EOF | crontab
|
||||
$(crontab -l)
|
||||
# %(banner)s
|
||||
%(crontab)s
|
||||
EOF
|
||||
fi
|
||||
|
||||
# use "su $user --shell /bin/bash" on backends for security : MKDIR -p...
|
||||
|
||||
|
||||
# model.field.flatchoices
|
||||
|
|
|
@ -120,6 +120,7 @@ class MysqlDisk(ServiceMonitor):
|
|||
"""
|
||||
model = 'databases.Database'
|
||||
verbose_name = _("MySQL disk")
|
||||
delete_old_equal_values = True
|
||||
|
||||
def exceeded(self, db):
|
||||
if db.type != db.MYSQL:
|
||||
|
|
|
@ -259,6 +259,7 @@ class Record(models.Model):
|
|||
AAAA = 'AAAA'
|
||||
SRV = 'SRV'
|
||||
TXT = 'TXT'
|
||||
SPF = 'SPF'
|
||||
SOA = 'SOA'
|
||||
|
||||
TYPE_CHOICES = (
|
||||
|
@ -269,6 +270,7 @@ class Record(models.Model):
|
|||
(AAAA, _("AAAA (IPv6 address)")),
|
||||
(SRV, "SRV"),
|
||||
(TXT, "TXT"),
|
||||
(SPF, "SPF"),
|
||||
(SOA, "SOA"),
|
||||
)
|
||||
|
||||
|
@ -279,6 +281,7 @@ class Record(models.Model):
|
|||
AAAA: (validate_ipv6_address,),
|
||||
CNAME: (validators.validate_zone_label,),
|
||||
TXT: (validate_ascii, validators.validate_quoted_record),
|
||||
SPF: (validate_ascii, validators.validate_quoted_record),
|
||||
SRV: (validators.validate_srv_record,),
|
||||
SOA: (validators.validate_soa_record,),
|
||||
}
|
||||
|
|
|
@ -228,6 +228,7 @@ class MailmanTraffic(ServiceMonitor):
|
|||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Mailman traffic")
|
||||
script_executable = '/usr/bin/python'
|
||||
monthly_sum_old_values = True
|
||||
doc_settings = (settings,
|
||||
('LISTS_MAILMAN_POST_LOG_PATH',)
|
||||
)
|
||||
|
@ -332,6 +333,7 @@ class MailmanSubscribers(ServiceMonitor):
|
|||
"""
|
||||
model = 'lists.List'
|
||||
verbose_name = _("Mailman subscribers")
|
||||
delete_old_equal_values = True
|
||||
|
||||
def monitor(self, mail_list):
|
||||
context = self.get_context(mail_list)
|
||||
|
|
|
@ -24,10 +24,12 @@ class SieveFilteringMixin(object):
|
|||
context['box'] = box
|
||||
self.append(textwrap.dedent("""
|
||||
# Create %(box)s mailbox
|
||||
mkdir -p %(maildir)s/.%(box)s
|
||||
chown %(user)s:%(group)s %(maildir)s/.%(box)s
|
||||
su $user --shell /bin/bash << 'EOF'
|
||||
mkdir -p "%(maildir)s/.%(box)s"
|
||||
EOF
|
||||
if [[ ! $(grep '%(box)s' %(maildir)s/subscriptions) ]]; then
|
||||
echo '%(box)s' >> %(maildir)s/subscriptions
|
||||
chown $user:$user %(maildir)s/subscriptions
|
||||
fi
|
||||
""") % context
|
||||
)
|
||||
|
@ -37,17 +39,18 @@ class SieveFilteringMixin(object):
|
|||
context['filtering'] = ('# %(banner)s\n' + content) % context
|
||||
self.append(textwrap.dedent("""\
|
||||
# Create and compile orchestra sieve filtering
|
||||
mkdir -p $(dirname '%(filtering_path)s')
|
||||
cat << 'EOF' > %(filtering_path)s
|
||||
%(filtering)s
|
||||
su $user --shell /bin/bash << 'EOF'
|
||||
mkdir -p $(dirname "%(filtering_path)s")
|
||||
cat << ' EOF' > %(filtering_path)s
|
||||
%(filtering)s
|
||||
EOF
|
||||
sievec %(filtering_path)s
|
||||
EOF
|
||||
sievec %(filtering_path)s
|
||||
chown %(user)s:%(group)s %(filtering_path)s
|
||||
chown %(user)s:%(group)s %(filtering_cpath)s
|
||||
""") % context
|
||||
)
|
||||
else:
|
||||
self.append("echo '' > %(filtering_path)s" % context)
|
||||
self.append('chown $user:$group %(filtering_path)s' % context)
|
||||
|
||||
|
||||
class UNIXUserMaildirBackend(SieveFilteringMixin, ServiceController):
|
||||
|
@ -94,8 +97,9 @@ class UNIXUserMaildirBackend(SieveFilteringMixin, ServiceController):
|
|||
#unit_to_bytes(mailbox.resources.disk.unit)
|
||||
self.append(textwrap.dedent("""
|
||||
# Set Maildir quota for %(user)s
|
||||
mkdir -p %(maildir)s
|
||||
chown %(user)s:%(group)s %(maildir)s
|
||||
su $user --shell /bin/bash << 'EOF'
|
||||
mkdir -p %(maildir)s
|
||||
EOF
|
||||
if [[ ! -f %(maildir)s/maildirsize ]]; then
|
||||
echo "%(quota)iS" > %(maildir)s/maildirsize
|
||||
chown %(user)s:%(group)s %(maildir)s/maildirsize
|
||||
|
@ -110,7 +114,7 @@ class UNIXUserMaildirBackend(SieveFilteringMixin, ServiceController):
|
|||
self.append(textwrap.dedent("""
|
||||
nohup bash -c '{ sleep 2 && killall -u %(user)s -s KILL; }' &> /dev/null &
|
||||
killall -u %(user)s || true
|
||||
# Fucking postfix SASL caches credentials
|
||||
# Restart because of Postfix SASL caches credentials
|
||||
userdel %(user)s || true && RESTART_POSTFIX=1
|
||||
groupdel %(user)s || true""") % context
|
||||
)
|
||||
|
@ -141,7 +145,6 @@ class DovecotPostfixPasswdVirtualUserBackend(SieveFilteringMixin, ServiceControl
|
|||
|
||||
verbose_name = _("Dovecot-Postfix virtualuser")
|
||||
model = 'mailboxes.Mailbox'
|
||||
# TODO related_models = ('resources__content_type') ?? needed for updating disk usage from resource.data
|
||||
|
||||
def set_user(self, context):
|
||||
self.append(textwrap.dedent("""
|
||||
|
@ -394,6 +397,7 @@ class DovecotMaildirDisk(ServiceMonitor):
|
|||
model = 'mailboxes.Mailbox'
|
||||
resource = ServiceMonitor.DISK
|
||||
verbose_name = _("Dovecot Maildir size")
|
||||
delete_old_equal_values = True
|
||||
doc_settings = (settings,
|
||||
('MAILBOXES_MAILDIRSIZE_PATH',)
|
||||
)
|
||||
|
@ -428,6 +432,7 @@ class PostfixMailscannerTraffic(ServiceMonitor):
|
|||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Postfix-Mailscanner traffic")
|
||||
script_executable = '/usr/bin/python'
|
||||
monthly_sum_old_values = True
|
||||
doc_settings = (settings,
|
||||
('MAILBOXES_MAIL_LOG_PATH',)
|
||||
)
|
||||
|
|
|
@ -7,6 +7,8 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
from orchestra.contrib.orchestration import ServiceBackend
|
||||
|
||||
from . import helpers
|
||||
|
||||
|
||||
class ServiceMonitor(ServiceBackend):
|
||||
TRAFFIC = 'traffic'
|
||||
|
@ -16,6 +18,8 @@ class ServiceMonitor(ServiceBackend):
|
|||
# TODO UNITS
|
||||
actions = ('monitor', 'exceeded', 'recovery')
|
||||
abstract = True
|
||||
delete_old_equal_values = False
|
||||
monthly_sum_old_values = False
|
||||
|
||||
@classmethod
|
||||
def get_plugins(cls):
|
||||
|
@ -81,3 +85,10 @@ class ServiceMonitor(ServiceBackend):
|
|||
log = super(ServiceMonitor, self).execute(*args, **kwargs)
|
||||
self.store(log)
|
||||
return log
|
||||
|
||||
@classmethod
|
||||
def aggregate(cls, dataset):
|
||||
if cls.delete_old_equal_values:
|
||||
return helpers.delete_old_equal_values(dataset)
|
||||
elif cls.monthly_sum_old_values:
|
||||
return helpers.monthly_sum_old_values(dataset)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import decimal
|
||||
|
||||
from django.template.defaultfilters import date as date_format
|
||||
|
||||
|
||||
|
@ -79,3 +81,54 @@ def get_history_data(queryset):
|
|||
else:
|
||||
result = [resource[0] for resource in resources.values()]
|
||||
return result
|
||||
|
||||
|
||||
def delete_old_equal_values(dataset):
|
||||
""" only first and last values of an equal serie (+-error) are kept """
|
||||
prev_value = None
|
||||
prev_key = None
|
||||
delete_count = 0
|
||||
error = decimal.Decimal('0.005')
|
||||
third = False
|
||||
for mdata in dataset.order_by('content_type_id', 'object_id', 'created_at'):
|
||||
key = (mdata.content_type_id, mdata.object_id)
|
||||
if prev_key == key:
|
||||
if prev_value is not None and mdata.value*(1-error) < prev_value < mdata.value*(1+error):
|
||||
if third:
|
||||
prev.delete()
|
||||
delete_count += 1
|
||||
else:
|
||||
third = True
|
||||
else:
|
||||
third = False
|
||||
prev_value = mdata.value
|
||||
prev_key = key
|
||||
else:
|
||||
prev_value = None
|
||||
prev_key = key
|
||||
prev = mdata
|
||||
return delete_count
|
||||
|
||||
|
||||
def monthly_sum_old_values(dataset):
|
||||
aggregated = 0
|
||||
prev_key = None
|
||||
prev = None
|
||||
to_delete = []
|
||||
delete_count = 0
|
||||
for mdata in dataset.order_by('content_type_id', 'object_id', 'created_at'):
|
||||
key = (mdata.content_type_id, mdata.object_id, mdata.created_at.year, mdata.created_at.month)
|
||||
if prev_key is not None and prev_key != key:
|
||||
if prev.value != aggregated:
|
||||
prev.value = aggregated
|
||||
prev.save(update_fields=('value',))
|
||||
for obj in to_delete[:-1]:
|
||||
obj.delete()
|
||||
delete_count += 1
|
||||
aggregated = 0
|
||||
to_delete = []
|
||||
prev = mdata
|
||||
prev_key = key
|
||||
aggregated += mdata.value
|
||||
to_delete.append(mdata)
|
||||
return delete_count
|
||||
|
|
6
orchestra/contrib/resources/settings.py
Normal file
6
orchestra/contrib/resources/settings.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from orchestra.contrib.settings import Setting
|
||||
|
||||
|
||||
RESOURCES_OLD_MONITOR_DATA_DAYS = Setting('RESOURCES_OLD_MONITOR_DATA_DAYS',
|
||||
40,
|
||||
)
|
|
@ -1,8 +1,15 @@
|
|||
import datetime
|
||||
|
||||
from celery.task.schedules import crontab
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
|
||||
from orchestra.contrib.orchestration import Operation
|
||||
from orchestra.contrib.tasks import task
|
||||
from orchestra.contrib.tasks import task, periodic_task
|
||||
from orchestra.models.utils import get_model_field_path
|
||||
from orchestra.utils.sys import LockFile
|
||||
|
||||
from . import settings
|
||||
from .backends import ServiceMonitor
|
||||
|
||||
|
||||
|
@ -49,3 +56,22 @@ def monitor(resource_id, ids=None):
|
|||
triggers.append(op)
|
||||
Operation.execute(triggers)
|
||||
return logs
|
||||
|
||||
|
||||
@periodic_task(run_every=crontab(hour=2, minute=30), name='resources.cleanup_old_monitors')
|
||||
@transaction.atomic
|
||||
def cleanup_old_monitors(queryset=None):
|
||||
if queryset is None:
|
||||
from .models import MonitorData
|
||||
queryset = MonitorData.objects.filter()
|
||||
delta = datetime.timedelta(days=settings.RESOURCES_OLD_MONITOR_DATA_DAYS)
|
||||
threshold = timezone.now() - delta
|
||||
queryset = queryset.filter(created_at__lt=threshold)
|
||||
delete_counts = []
|
||||
for monitor in ServiceMonitor.get_plugins():
|
||||
dataset = queryset.filter(monitor=monitor)
|
||||
delete_count = monitor.aggregate(dataset)
|
||||
delete_counts.append(
|
||||
(monitor.get_name(), delete_count)
|
||||
)
|
||||
return delete_counts
|
||||
|
|
|
@ -29,6 +29,9 @@
|
|||
resource['aggregation'] +
|
||||
(div.indexOf('aggregate') > 0 ? ' (aggregated)': '')
|
||||
},
|
||||
xAxis: {
|
||||
ordinal: false
|
||||
},
|
||||
yAxis: {
|
||||
labels: {
|
||||
formatter: function () {
|
||||
|
|
|
@ -101,12 +101,11 @@ class UNIXUserBackend(ServiceController):
|
|||
context = self.get_context(user)
|
||||
context.update({
|
||||
'perm_action': user.set_perm_action,
|
||||
'perm_home': user.set_perm_base_home,
|
||||
'perm_to': os.path.join(user.set_perm_base_home, user.set_perm_home_extension),
|
||||
})
|
||||
exclude_acl = []
|
||||
for exclude in settings.SYSTEMUSERS_FORBIDDEN_PATHS:
|
||||
context['exclude_acl'] = os.path.join(context['perm_home'], exclude)
|
||||
context['exclude_acl'] = os.path.join(user.set_perm_base_home, exclude)
|
||||
exclude_acl.append('-not -path "%(exclude_acl)s"' % context)
|
||||
context['exclude_acl'] = ' \\\n -a '.join(exclude_acl) if exclude_acl else ''
|
||||
if user.set_perm_perms == 'rw':
|
||||
|
@ -115,10 +114,32 @@ class UNIXUserBackend(ServiceController):
|
|||
context['perm_perms'] = 'r-x' if user.set_perm_action == 'grant' else '-wx'
|
||||
elif user.set_perm_perms == 'w':
|
||||
context['perm_perms'] = '-wx' if user.set_perm_action == 'grant' else 'r-x'
|
||||
# Access paths
|
||||
head = user.set_perm_base_home
|
||||
relative = ''
|
||||
access_paths = ["'%s'" % head]
|
||||
import fnmatch
|
||||
for tail in user.set_perm_home_extension.split(os.sep)[:-1]:
|
||||
relative = os.path.join(relative, tail)
|
||||
for exclude in settings.SYSTEMUSERS_FORBIDDEN_PATHS:
|
||||
if fnmatch.fnmatch(relative, exclude):
|
||||
break
|
||||
else:
|
||||
# No match
|
||||
head = os.path.join(head, tail)
|
||||
access_paths.append("'%s'" % head)
|
||||
context['access_paths'] = ' '.join(access_paths)
|
||||
if user.set_perm_action == 'grant':
|
||||
self.append(textwrap.dedent("""\
|
||||
# Home access
|
||||
setfacl -m u:%(user)s:--x '%(perm_home)s'
|
||||
# Grant execution permissions to every parent directory
|
||||
for access_path in %(access_paths)s; do
|
||||
# Preserve existing ACLs
|
||||
acl=$(getfacl -a "$access_path" | grep '^user:%(user)s:') && {
|
||||
perms=$(echo "$acl" | cut -d':' -f3)
|
||||
perms=$(echo "$perms" | cut -c 1,2)x
|
||||
setfacl -m u:%(user)s:$perms "$access_path"
|
||||
} || setfacl -m u:%(user)s:--x "$access_path"
|
||||
done
|
||||
# Grant perms to existing and future files
|
||||
find '%(perm_to)s' %(exclude_acl)s \\
|
||||
-exec setfacl -m u:%(user)s:%(perm_perms)s {} \\;
|
||||
|
@ -182,6 +203,7 @@ class UNIXUserDisk(ServiceMonitor):
|
|||
model = 'systemusers.SystemUser'
|
||||
resource = ServiceMonitor.DISK
|
||||
verbose_name = _('UNIX user disk')
|
||||
delete_old_equal_values = True
|
||||
|
||||
def prepare(self):
|
||||
super(UNIXUserDisk, self).prepare()
|
||||
|
@ -211,6 +233,7 @@ class Exim4Traffic(ServiceMonitor):
|
|||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Exim4 traffic")
|
||||
script_executable = '/usr/bin/python'
|
||||
monthly_sum_old_values = True
|
||||
doc_settings = (settings,
|
||||
('SYSTEMUSERS_MAIL_LOG_PATH',)
|
||||
)
|
||||
|
@ -296,6 +319,7 @@ class VsFTPdTraffic(ServiceMonitor):
|
|||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _('VsFTPd traffic')
|
||||
script_executable = '/usr/bin/python'
|
||||
monthly_sum_old_values = True
|
||||
doc_settings = (settings,
|
||||
('SYSTEMUSERS_FTP_LOG_PATH',)
|
||||
)
|
||||
|
|
|
@ -8,6 +8,7 @@ class OpenVZTraffic(ServiceMonitor):
|
|||
"""
|
||||
model = 'vps.VPS'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
monthly_sum_old_values = True
|
||||
|
||||
def process(self, line):
|
||||
""" diff with last stored value """
|
||||
|
|
|
@ -420,9 +420,11 @@ class Apache2Traffic(ServiceMonitor):
|
|||
model = 'websites.Website'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Apache 2 Traffic")
|
||||
monthly_sum_old_values = True
|
||||
doc_settings = (settings,
|
||||
('WEBSITES_TRAFFIC_IGNORE_HOSTS',)
|
||||
)
|
||||
|
||||
def prepare(self):
|
||||
super(Apache2Traffic, self).prepare()
|
||||
ignore_hosts = '\\|'.join(settings.WEBSITES_TRAFFIC_IGNORE_HOSTS)
|
||||
|
|
Loading…
Reference in a new issue