2015-07-16 13:07:15 +00:00
|
|
|
import copy
|
2015-03-31 12:39:08 +00:00
|
|
|
import datetime
|
2015-04-01 15:49:21 +00:00
|
|
|
import decimal
|
2015-03-31 12:39:08 +00:00
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
from dateutil.relativedelta import relativedelta
|
2015-03-31 12:39:08 +00:00
|
|
|
from django.utils import timezone
|
|
|
|
from django.utils.translation import ugettext_lazy as _
|
|
|
|
|
|
|
|
from orchestra import plugins
|
|
|
|
|
|
|
|
|
2015-04-08 14:41:09 +00:00
|
|
|
class Aggregation(plugins.Plugin, metaclass=plugins.PluginMount):
|
2015-03-31 12:39:08 +00:00
|
|
|
""" filters and computes dataset usage """
|
|
|
|
def filter(self, dataset):
|
|
|
|
""" Filter the dataset to get the relevant data according to the period """
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def historic_filter(self, dataset):
|
|
|
|
""" Generates (date, dataset) tuples for resource data history reporting """
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-03-31 12:39:08 +00:00
|
|
|
def compute_usage(self, dataset):
|
|
|
|
""" given a dataset computes its usage according to the method (avg, sum, ...) """
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
2015-04-08 14:41:09 +00:00
|
|
|
class Last(Aggregation):
|
2015-04-09 14:32:10 +00:00
|
|
|
""" Sum of the last value of all monitors """
|
2015-03-31 12:39:08 +00:00
|
|
|
name = 'last'
|
2015-04-01 15:49:21 +00:00
|
|
|
verbose_name = _("Last value")
|
2015-03-31 12:39:08 +00:00
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def filter(self, dataset, date=None):
|
|
|
|
dataset = dataset.order_by('object_id', '-id').distinct('monitor')
|
|
|
|
if date is not None:
|
|
|
|
dataset = dataset.filter(created_at__lte=date)
|
|
|
|
return dataset
|
|
|
|
|
|
|
|
def historic_filter(self, dataset):
|
|
|
|
yield (timezone.now(), self.filter(dataset))
|
|
|
|
now = timezone.now()
|
|
|
|
date = datetime.datetime(
|
|
|
|
year=now.year,
|
|
|
|
month=now.month,
|
|
|
|
day=1,
|
|
|
|
tzinfo=timezone.utc,
|
|
|
|
)
|
|
|
|
while True:
|
|
|
|
dataset_copy = copy.copy(dataset)
|
|
|
|
dataset_copy = self.filter(dataset_copy, date=date)
|
|
|
|
try:
|
|
|
|
dataset_copy[0]
|
|
|
|
except IndexError:
|
|
|
|
raise StopIteration
|
|
|
|
yield (date, dataset_copy)
|
|
|
|
date -= relativedelta(months=1)
|
2015-03-31 12:39:08 +00:00
|
|
|
|
|
|
|
def compute_usage(self, dataset):
|
|
|
|
values = dataset.values_list('value', flat=True)
|
|
|
|
if values:
|
|
|
|
return sum(values)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
class MonthlySum(Last):
|
2015-04-09 14:32:10 +00:00
|
|
|
""" Monthly sum the values of all monitors """
|
2015-03-31 12:39:08 +00:00
|
|
|
name = 'monthly-sum'
|
|
|
|
verbose_name = _("Monthly Sum")
|
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def filter(self, dataset, date=None):
|
|
|
|
if date is None:
|
|
|
|
date = timezone.now()
|
2015-03-31 12:39:08 +00:00
|
|
|
return dataset.filter(
|
2015-07-16 13:07:15 +00:00
|
|
|
created_at__year=date.year,
|
|
|
|
created_at__month=date.month,
|
2015-03-31 12:39:08 +00:00
|
|
|
)
|
2015-07-16 13:07:15 +00:00
|
|
|
|
|
|
|
def historic_filter(self, dataset):
|
|
|
|
now = timezone.now()
|
|
|
|
date = datetime.datetime(
|
|
|
|
year=now.year,
|
|
|
|
month=now.month,
|
|
|
|
day=1,
|
|
|
|
tzinfo=timezone.utc,
|
|
|
|
)
|
|
|
|
while True:
|
|
|
|
dataset_copy = copy.copy(dataset)
|
|
|
|
dataset_copy = self.filter(dataset_copy, date=date)
|
|
|
|
try:
|
|
|
|
dataset_copy[0]
|
|
|
|
except IndexError:
|
|
|
|
raise StopIteration
|
|
|
|
yield (date, dataset_copy)
|
|
|
|
date -= relativedelta(months=1)
|
2015-03-31 12:39:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
class MonthlyAvg(MonthlySum):
|
2015-04-09 14:32:10 +00:00
|
|
|
""" sum of the monthly averages of each monitor """
|
2015-03-31 12:39:08 +00:00
|
|
|
name = 'monthly-avg'
|
|
|
|
verbose_name = _("Monthly AVG")
|
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def filter(self, dataset, date=None):
|
|
|
|
qs = super(MonthlyAvg, self).filter(dataset, date=date)
|
2015-04-09 14:32:10 +00:00
|
|
|
return qs.order_by('created_at')
|
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def get_epoch(self, date=None):
|
|
|
|
if date is None:
|
|
|
|
date = timezone.now()
|
|
|
|
return datetime.datetime(
|
|
|
|
year=date.year,
|
|
|
|
month=date.month,
|
2015-03-31 12:39:08 +00:00
|
|
|
day=1,
|
2015-07-16 13:07:15 +00:00
|
|
|
tzinfo=timezone.utc,
|
2015-03-31 12:39:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def compute_usage(self, dataset):
|
2015-04-03 10:14:45 +00:00
|
|
|
result = 0
|
2015-04-09 14:32:10 +00:00
|
|
|
has_result = False
|
|
|
|
for monitor, dataset in dataset.group_by('monitor').items():
|
|
|
|
try:
|
|
|
|
last = dataset[-1]
|
|
|
|
except IndexError:
|
|
|
|
continue
|
2015-07-16 13:07:15 +00:00
|
|
|
epoch = self.get_epoch(date=last.created_at)
|
2015-04-09 14:32:10 +00:00
|
|
|
total = (last.created_at-epoch).total_seconds()
|
|
|
|
ini = epoch
|
|
|
|
for data in dataset:
|
|
|
|
has_result = True
|
|
|
|
slot = (data.created_at-ini).total_seconds()
|
|
|
|
result += data.value * decimal.Decimal(str(slot/total))
|
|
|
|
ini = data.created_at
|
|
|
|
if has_result:
|
2015-04-03 10:14:45 +00:00
|
|
|
return result
|
2015-04-09 14:32:10 +00:00
|
|
|
return None
|
2015-03-31 12:39:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Last10DaysAvg(MonthlyAvg):
|
2015-04-09 14:32:10 +00:00
|
|
|
""" sum of the last 10 days averages of each monitor """
|
2015-03-31 12:39:08 +00:00
|
|
|
name = 'last-10-days-avg'
|
|
|
|
verbose_name = _("Last 10 days AVG")
|
|
|
|
days = 10
|
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def get_epoch(self, date=None):
|
|
|
|
if date is None:
|
|
|
|
date = timezone.now()
|
|
|
|
return date - datetime.timedelta(days=self.days)
|
2015-03-31 12:39:08 +00:00
|
|
|
|
2015-07-16 13:07:15 +00:00
|
|
|
def filter(self, dataset, date=None):
|
|
|
|
epoch = self.get_epoch(date=date)
|
|
|
|
dataset = dataset.filter(created_at__gt=epoch).order_by('created_at')
|
|
|
|
if date is not None:
|
|
|
|
dataset = dataset.filter(created_at__lte=date)
|
|
|
|
return dataset
|
|
|
|
|
|
|
|
def historic_filter(self, dataset):
|
|
|
|
yield (timezone.now(), self.filter(dataset))
|
|
|
|
yield from super(Last10DaysAvg, self).historic_filter(dataset)
|