fucked up tests

This commit is contained in:
Marc 2014-10-04 09:29:18 +00:00
parent 6a3e3f637c
commit 55f4b6e88c
11 changed files with 97 additions and 70 deletions

View file

@ -43,10 +43,11 @@ class Bind9MasterDomainBackend(ServiceController):
self.delete_conf(context)
def delete_conf(self, context):
self.append('awk -v s=%(name)s \'BEGIN {'
' RS=""; s="zone \\""s"\\""'
'} $0!~s{ print $0"\\n" }\' %(conf_path)s > %(conf_path)s.tmp'
% context)
self.append(textwrap.dedent("""
awk -v s=%(name)s 'BEGIN {
RS=""; s="zone \""s"\""
} $0!~s{ print $0"\n" }' %(conf_path)s > %(conf_path)s.tmp""" % context
))
self.append('diff -I"^\s*//" %(conf_path)s.tmp %(conf_path)s || UPDATED=1' % context)
self.append('mv %(conf_path)s.tmp %(conf_path)s' % context)
@ -62,13 +63,16 @@ class Bind9MasterDomainBackend(ServiceController):
servers.append(server.get_ip())
return servers
def get_slaves(self, domain):
return self.get_servers(domain, Bind9SlaveDomainBackend)
def get_context(self, domain):
context = {
'name': domain.name,
'zone_path': settings.DOMAINS_ZONE_PATH % {'name': domain.name},
'subdomains': domain.subdomains.all(),
'banner': self.get_banner(),
'slaves': '; '.join(self.get_servers(domain, Bind9SlaveDomainBackend)),
'slaves': '; '.join(self.get_slaves(domain)) or 'none',
}
context.update({
'conf_path': settings.DOMAINS_MASTERS_PATH,
@ -96,17 +100,20 @@ class Bind9SlaveDomainBackend(Bind9MasterDomainBackend):
def delete(self, domain):
context = self.get_context(domain)
self.delete_conf(context)
def commit(self):
""" ideally slave should be restarted after master """
self.append('[[ $UPDATED == 1 ]] && { sleep 1 && service bind9 reload; } &')
def get_masters(self, domain):
return self.get_servers(domain, Bind9MasterDomainBackend)
def get_context(self, domain):
context = {
'name': domain.name,
'banner': self.get_banner(),
'subdomains': domain.subdomains.all(),
'masters': '; '.join(self.get_servers(domain, Bind9MasterDomainBackend)),
'masters': '; '.join(self.get_masters(domain)) or 'none',
}
context.update({
'conf_path': settings.DOMAINS_SLAVES_PATH,

View file

@ -1,33 +1,38 @@
import copy
from functools import partial
from .models import Domain, Record
def domain_for_validation(instance, records):
""" Create a fake zone in order to generate the whole zone file and check it """
"""
Since the new data is not yet on the database, we update it on the fly,
so when validation calls render_zone() it will use the new provided data
"""
domain = copy.copy(instance)
if not domain.pk:
domain.top = domain.get_top()
def get_records():
for data in records:
yield Record(type=data['type'], value=data['value'])
domain.get_records = get_records
def get_top_subdomains(exclude=None):
subdomains = []
for subdomain in Domain.objects.filter(name__endswith='.%s' % domain.origin.name):
if exclude != subdomain.pk:
subdomain.top = domain
def get_subdomains(replace=None, make_top=False):
for subdomain in Domain.objects.filter(name__endswith='.%s' % domain.name):
if replace == subdomain.pk:
# domain is a subdomain, yield our copy
yield domain
else:
if make_top:
subdomain.top = domain
yield subdomain
domain.get_top_subdomains = get_top_subdomains
if not domain.pk:
# top domain lookup for new domains
domain.top = domain.get_top()
if domain.top:
subdomains = domain.get_top_subdomains(exclude=instance.pk)
domain.top.get_subdomains = lambda: list(subdomains) + [domain]
# is a subdomains
domain.top.get_subdomains = partial(get_subdomains, replace=domain.pk)
elif not domain.pk:
subdomains = []
for subdomain in Domain.objects.filter(name__endswith=domain.name):
subdomain.top = domain
subdomains.append(subdomain)
domain.get_subdomains = get_top_subdomains
# is top domain
domain.get_subdomains = partial(get_subdomains, make_top=True)
return domain

View file

@ -24,30 +24,35 @@ class Domain(models.Model):
@property
def origin(self):
# Do not cache
return self.top or self
@property
def is_top(self):
# Do not cache
# don't cache, don't replace by top_id
return not bool(self.top)
def get_records(self):
""" proxy method, needed for input validation, see helpers.domain_for_validation """
return self.records.all()
def get_top_subdomains(self):
def get_subdomains(self):
""" proxy method, needed for input validation, see helpers.domain_for_validation """
return self.origin.subdomains.all()
def get_subdomains(self):
""" proxy method, needed for input validation, see helpers.domain_for_validation """
return self.get_top_subdomains().filter(name__endswith=r'.%s' % self.name)
def get_top(self):
split = self.name.split('.')
top = None
for i in range(1, len(split)-1):
name = '.'.join(split[i:])
domain = Domain.objects.filter(name=name)
if domain:
top = domain.get()
return top
def render_zone(self):
origin = self.origin
zone = origin.render_records()
for subdomain in origin.get_top_subdomains():
for subdomain in origin.get_subdomains():
zone += subdomain.render_records()
return zone
@ -134,16 +139,6 @@ class Domain(models.Model):
domain.top = self
domain.save(update_fields=['top'])
self.subdomains.update(account_id=self.account_id)
def get_top(self):
split = self.name.split('.')
top = None
for i in range(1, len(split)-1):
name = '.'.join(split[i:])
domain = Domain.objects.filter(name=name)
if domain:
top = domain.get()
return top
class Record(models.Model):

View file

@ -71,7 +71,7 @@ class ServiceBackend(plugins.Plugin):
def get_banner(self):
time = timezone.now().strftime("%h %d, %Y %I:%M:%S")
return "Generated by Orchestra %s" % time
return "Generated by Orchestra at %s" % time
def execute(self, server):
from .models import BackendLog

View file

@ -37,9 +37,9 @@ def close_connection(execute):
def execute(operations):
""" generates and executes the operations on the servers """
router = import_class(settings.ORCHESTRATION_ROUTER)
# Generate scripts per server+backend
scripts = {}
cache = {}
# Generate scripts per server+backend
for operation in operations:
logger.debug("Queued %s" % str(operation))
servers = router.get_servers(operation, cache=cache)
@ -50,6 +50,7 @@ def execute(operations):
scripts[key][0].prepare()
else:
scripts[key][1].append(operation)
# Get and call backend action method
method = getattr(scripts[key][0], operation.action)
method(operation.instance)
# Execute scripts on each server
@ -67,12 +68,15 @@ def execute(operations):
executions.append((execute, operations))
[ thread.join() for thread in threads ]
logs = []
# collect results
for execution, operations in executions:
for operation in operations:
logger.info("Executed %s" % str(operation))
operation.log = execution.log
operation.save()
logger.debug(execution.log.stdout)
logger.debug(execution.log.stderr)
stdout = execution.log.stdout.strip()
stdout and logger.debug('STDOUT %s', stdout)
stderr = execution.log.stderr.strip()
stderr and logger.debug('STDERR %s', stderr)
logs.append(execution.log)
return logs

View file

@ -1,5 +1,6 @@
import hashlib
import json
import logging
import os
import socket
import sys
@ -11,13 +12,16 @@ from celery.datastructures import ExceptionInfo
from . import settings
logger = logging.getLogger(__name__)
def BashSSH(backend, log, server, cmds):
from .models import BackendLog
script = '\n'.join(['set -e', 'set -o pipefail'] + cmds + ['exit 0'])
script = script.replace('\r', '')
log.script = script
log.save(update_fields=['script'])
logger.debug('%s is going to be executed on %s' % (backend, server))
try:
# Avoid "Argument list too long" on large scripts by genereting a file
# and scping it to the remote server
@ -30,15 +34,16 @@ def BashSSH(backend, log, server, cmds):
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
addr = server.get_address()
try:
ssh.connect(addr, username='root',
key_filename=settings.ORCHESTRATION_SSH_KEY_PATH)
ssh.connect(addr, username='root', key_filename=settings.ORCHESTRATION_SSH_KEY_PATH)
except socket.error:
logger.error('%s timed out on %s' % (backend, server))
log.state = BackendLog.TIMEOUT
log.save(update_fields=['state'])
return
transport = ssh.get_transport()
sftp = paramiko.SFTPClient.from_transport(transport)
sftp.put(path, "%s.remote" % path)
logger.debug('%s copied on %s' % (backend, server))
sftp.close()
os.remove(path)
@ -55,6 +60,7 @@ def BashSSH(backend, log, server, cmds):
channel = transport.open_session()
channel.exec_command(cmd)
logger.debug('%s running on %s' % (backend, server))
if True: # TODO if not async
log.stdout += channel.makefile('rb', -1).read().decode('utf-8')
log.stderr += channel.makefile_stderr('rb', -1).read().decode('utf-8')
@ -71,10 +77,12 @@ def BashSSH(backend, log, server, cmds):
break
log.exit_code = exit_code = channel.recv_exit_status()
log.state = BackendLog.SUCCESS if exit_code == 0 else BackendLog.FAILURE
logger.debug('%s execution state on %s is %s' % (backend, server, log.state))
channel.close()
ssh.close()
log.save()
except:
logger.error('Exception while executing %s on %s' % (backend, server))
log.state = BackendLog.ERROR
log.traceback = ExceptionInfo(sys.exc_info()).traceback
log.save()

View file

@ -176,7 +176,10 @@ class Route(models.Model):
for route in cls.objects.filter(is_active=True, backend=backend.get_name()):
for action in backend.get_actions():
_key = (route.backend, action)
cache[_key] = [route]
try:
cache[_key].append(route)
except KeyError:
cache[_key] = [route]
routes = cache[key]
for route in routes:
if route.matches(operation.instance):
@ -185,7 +188,9 @@ class Route(models.Model):
def matches(self, instance):
safe_locals = {
'instance': instance
'instance': instance,
'obj': instance,
instance._meta.model_name: instance,
}
return eval(self.match, safe_locals)

View file

@ -1,8 +1,7 @@
from orchestra.utils.tests import BaseTestCase
from .. import operations, backends
from ..models import Route, Server
from ..utils import get_backend_choices
from .. import backends
from ..models import Route, Server, BackendOperation as Operation
class RouterTests(BaseTestCase):
@ -18,25 +17,25 @@ class RouterTests(BaseTestCase):
def test_get_instances(self):
class TestBackend(backends.ServiceBackend):
class TestBackend(backends.ServiceController):
verbose_name = 'Route'
models = ['routes.Route',]
models = ['routes.Route']
def save(self, instance):
pass
choices = get_backend_choices(backends.ServiceBackend.get_backends())
choices = backends.ServiceBackend.get_plugin_choices()
Route._meta.get_field_by_name('backend')[0]._choices = choices
backend = TestBackend.get_name()
route = Route.objects.create(backend=backend, host=self.host,
match='True')
operation = operations.Operation(TestBackend, route, 'commit')
route = Route.objects.create(backend=backend, host=self.host, match='True')
operation = Operation(backend=TestBackend, instance=route, action='save')
self.assertEqual(1, len(Route.get_servers(operation)))
route = Route.objects.create(backend=backend, host=self.host1,
match='instance.backend == "TestBackend"')
operation = operations.Operation(TestBackend, route, 'commit')
match='route.backend == "%s"' % TestBackend.get_name())
self.assertEqual(2, len(Route.get_servers(operation)))
route = Route.objects.create(backend=backend, host=self.host2,
match='instance.backend == "something else"')
operation = operations.Operation(TestBackend, route, 'commit')
match='route.backend == "something else"')
self.assertEqual(2, len(Route.get_servers(operation)))

View file

@ -46,7 +46,9 @@ class ServiceHandler(plugins.Plugin):
def matches(self, instance):
safe_locals = {
instance._meta.model_name: instance
'instance': instance,
'obj': instance,
instance._meta.model_name: instance,
}
return eval(self.match, safe_locals)

View file

@ -90,6 +90,7 @@ class BaseLiveServerTestCase(AppDependencyMixin, LiveServerTestCase):
def setUp(self):
super(BaseLiveServerTestCase, self).setUp()
self.rest = Api(self.live_server_url + '/api/')
self.rest.enable_logging()
self.account = self.create_account(superuser=True)
def admin_login(self):

View file

@ -1,5 +1,3 @@
apt-get install postfix
# http://www.postfix.org/VIRTUAL_README.html#virtual_mailbox
# https://help.ubuntu.com/community/PostfixVirtualMailBoxClamSmtpHowto
@ -9,15 +7,17 @@ apt-get install postfix
apt-get install dovecot-core dovecot-imapd dovecot-pop3d dovecot-lmtpd dovecot-sieve
sed -i "s#^mail_location = mbox.*#mail_location = maildir:~/Maildir#" /etc/dovecot/conf.d/10-mail.conf
echo 'auth_username_format = %n' >> /etc/dovecot/conf.d/10-auth.conf
echo 'service lmtp {
echo 'mail_location = maildir:~/Maildir
mail_plugins = quota
auth_username_format = %n
service lmtp {
unix_listener /var/spool/postfix/private/dovecot-lmtp {
group = postfix
mode = 0600
user = postfix
}
}' >> /etc/dovecot/conf.d/10-master.conf
}' > /etc/dovecot/local.conf
cat > /etc/apt/sources.list.d/mailscanner.list << 'EOF'
@ -38,6 +38,7 @@ echo 'mailbox_transport = lmtp:unix:private/dovecot-lmtp' >> /etc/postfix/main.c
/etc/init.d/dovecot restart
/etc/init.d/postfix restart