Implemented queryset.group_by0

This commit is contained in:
Marc 2014-07-25 13:27:31 +00:00
parent b88689864f
commit 800ee58d2d
5 changed files with 80 additions and 33 deletions

View File

@ -3,7 +3,7 @@ import inspect
from orchestra.apps.accounts.models import Account
def search_for_related(origin, max_depth=2):
def get_related_objects(origin, max_depth=2):
"""
Introspects origin object and return the first related service object

View File

@ -11,11 +11,11 @@ from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from orchestra.core import caches, services
from orchestra.models import queryset
from orchestra.utils.apps import autodiscover
from . import settings
from . import settings, helpers
from .handlers import ServiceHandler
from .helpers import search_for_related
autodiscover('handlers')
@ -175,7 +175,7 @@ class Service(models.Model):
return services
# FIXME some times caching is nasty, do we really have to? make get_plugin more efficient?
@cached_property
# @cached_property
def handler(self):
""" Accessor of this service handler instance """
if self.handler_type:
@ -214,6 +214,19 @@ class Service(models.Model):
class OrderQuerySet(models.QuerySet):
group_by = queryset.group_by
def bill(self, **options):
for account, services in self.group_by('account_id', 'service_id'):
bill_lines = []
for service, orders in services:
lines = helpers.create_bill_lines(service, orders, **options)
bill_lines.extend(lines)
helpers.create_bills(account, bill_lines)
def get_related(self):
pass
def by_object(self, obj, **kwargs):
ct = ContentType.objects.get_for_model(obj)
return self.filter(object_id=obj.pk, content_type=ct, **kwargs)
@ -322,6 +335,6 @@ def update_orders(sender, **kwargs):
if instance.pk:
# post_save
Order.update_orders(instance)
related = search_for_related(instance)
related = helpers.get_related_objects(instance)
if related:
Order.update_orders(related)

View File

@ -129,7 +129,7 @@ def resource_inline_factory(resources):
if not running_syncdb():
# not run during syncdb
for resources in Resource.group_by_content_type():
for ct, resources in Resource.objects.group_by('content_type'):
inline = resource_inline_factory(resources)
model = resources[0].content_type.model_class()
model = ct.model_class()
insertattr(model, 'inlines', inline)

View File

@ -5,13 +5,17 @@ from django.db import models
from django.utils.translation import ugettext_lazy as _
from djcelery.models import PeriodicTask, CrontabSchedule
from orchestra.models.fields import MultiSelectField
from orchestra.models import queryset, fields
from orchestra.utils.functional import cached
from . import helpers
from .backends import ServiceMonitor
class ResourceQuerySet(models.QuerySet):
group_by = queryset.group_by
class Resource(models.Model):
"""
Defines a resource, a resource is basically an interpretation of data
@ -58,11 +62,13 @@ class Resource(models.Model):
null=True, blank=True,
help_text=_("Crontab for periodic execution. "
"Leave it empty to disable periodic monitoring"))
monitors = MultiSelectField(_("monitors"), max_length=256, blank=True,
monitors = fields.MultiSelectField(_("monitors"), max_length=256, blank=True,
choices=ServiceMonitor.get_plugin_choices(),
help_text=_("Monitor backends used for monitoring this resource."))
is_active = models.BooleanField(_("is active"), default=True)
objects = ResourceQuerySet.as_manager()
class Meta:
unique_together = (
('name', 'content_type'),
@ -80,8 +86,12 @@ class Resource(models.Model):
task = PeriodicTask.objects.get(name=name)
except PeriodicTask.DoesNotExist:
if self.is_active:
PeriodicTask.objects.create(name=name, task='resources.Monitor',
args=[self.pk], crontab=self.crontab)
PeriodicTask.objects.create(
name=name,
task='resources.Monitor',
args=[self.pk],
crontab=self.crontab
)
else:
if not self.is_active:
task.delete()
@ -92,25 +102,11 @@ class Resource(models.Model):
def delete(self, *args, **kwargs):
super(Resource, self).delete(*args, **kwargs)
name = 'monitor.%s' % str(self)
PeriodicTask.objects.filter(name=name, task='resources.Monitor',
args=[self.pk]).delete()
@classmethod
def group_by_content_type(cls):
prev = None
group = []
resources = cls.objects.filter(is_active=True).order_by('content_type')
for resource in resources:
ct = resource.content_type
if prev != ct:
if group:
yield group
group = [resource]
else:
group.append(resource)
prev = ct
if group:
yield group
PeriodicTask.objects.filter(
name=name,
task='resources.Monitor',
args=[self.pk]
).delete()
class ResourceData(models.Model):
@ -181,7 +177,7 @@ def create_resource_relation():
return self
relation = GenericRelation('resources.ResourceData')
for resources in Resource.group_by_content_type():
model = resources[0].content_type.model_class()
for ct, resources in Resource.objects.group_by('content_type'):
model = ct.model_class()
model.add_to_class('resource_set', relation)
model.resources = ResourceHandler()

View File

@ -0,0 +1,38 @@
def group_by(qset, *fields):
""" group_by iterator with support for multiple nested fields """
def nest(objects, ix):
objs = []
result = []
first = True
for obj in objects:
current = getattr(obj, fields[ix])
if first or current == previous:
objs.append(obj)
else:
if ix < len(fields)-1:
objs = nest(list(objs), ix+1)
result.append((previous, objs))
objs = [obj]
previous = current
first = False
if ix < len(fields)-1:
objs = nest(list(objs), ix+1)
result.append((current, objs))
return result
objs = []
first = True
for obj in qset.order_by(*fields):
current = getattr(obj, fields[0])
if first or current == previous:
objs.append(obj)
else:
if len(fields) > 1:
objs = nest(objs, 1)
yield previous, objs
objs = [obj]
previous = current
first = False
if len(fields) > 1:
objs = nest(objs, 1)
yield current, objs