Rename async
--> run_async
On Python3.5 async becames a reserved keyword.
This commit is contained in:
parent
350d93f820
commit
9d2d0befc4
|
@ -178,7 +178,7 @@ def fire_pending_tasks(manage, db):
|
|||
if is_due(now, minute, hour, day_of_week, day_of_month, month_of_year):
|
||||
command = 'python3 -W ignore::DeprecationWarning {manage} runtask {task_id}'.format(
|
||||
manage=manage, task_id=task_id)
|
||||
proc = run(command, async=True)
|
||||
proc = run(command, run_async=True)
|
||||
yield proc
|
||||
|
||||
|
||||
|
@ -201,7 +201,7 @@ def fire_pending_messages(settings, db):
|
|||
|
||||
if has_pending_messages(settings, db):
|
||||
command = 'python3 -W ignore::DeprecationWarning {manage} sendpendingmessages'.format(manage=manage)
|
||||
proc = run(command, async=True)
|
||||
proc = run(command, run_async=True)
|
||||
yield proc
|
||||
|
||||
|
||||
|
|
|
@ -39,10 +39,10 @@ class Operation():
|
|||
self.routes = routes
|
||||
|
||||
@classmethod
|
||||
def execute(cls, operations, serialize=False, async=None):
|
||||
def execute(cls, operations, serialize=False, run_async=None):
|
||||
from . import manager
|
||||
scripts, backend_serialize = manager.generate(operations)
|
||||
return manager.execute(scripts, serialize=(serialize or backend_serialize), async=async)
|
||||
return manager.execute(scripts, serialize=(serialize or backend_serialize), run_async=run_async)
|
||||
|
||||
@classmethod
|
||||
def create_for_action(cls, instances, action):
|
||||
|
|
|
@ -30,14 +30,14 @@ STATE_COLORS = {
|
|||
|
||||
class RouteAdmin(ExtendedModelAdmin):
|
||||
list_display = (
|
||||
'display_backend', 'host', 'match', 'display_model', 'display_actions', 'async',
|
||||
'display_backend', 'host', 'match', 'display_model', 'display_actions', 'run_async',
|
||||
'is_active'
|
||||
)
|
||||
list_editable = ('host', 'match', 'async', 'is_active')
|
||||
list_filter = ('host', 'is_active', 'async', 'backend')
|
||||
list_editable = ('host', 'match', 'run_async', 'is_active')
|
||||
list_filter = ('host', 'is_active', 'run_async', 'backend')
|
||||
list_prefetch_related = ('host',)
|
||||
ordering = ('backend',)
|
||||
add_fields = ('backend', 'host', 'match', 'async', 'is_active')
|
||||
add_fields = ('backend', 'host', 'match', 'run_async', 'is_active')
|
||||
change_form = RouteForm
|
||||
actions = (orchestrate,)
|
||||
change_view_actions = actions
|
||||
|
|
|
@ -182,7 +182,7 @@ class ServiceBackend(plugins.Plugin, metaclass=ServiceMount):
|
|||
log = manager.create(backend=self.get_name(), state=state, server=server)
|
||||
return log
|
||||
|
||||
def execute(self, server, async=False, log=None):
|
||||
def execute(self, server, run_async=False, log=None):
|
||||
from .models import BackendLog
|
||||
if log is None:
|
||||
log = self.create_log(server)
|
||||
|
@ -190,7 +190,7 @@ class ServiceBackend(plugins.Plugin, metaclass=ServiceMount):
|
|||
if run:
|
||||
scripts = self.scripts
|
||||
for method, commands in scripts:
|
||||
method(log, server, commands, async)
|
||||
method(log, server, commands, run_async)
|
||||
if log.state != BackendLog.SUCCESS:
|
||||
break
|
||||
return log
|
||||
|
|
|
@ -105,7 +105,7 @@ def get_backend_url(ids):
|
|||
|
||||
def get_messages(logs):
|
||||
messages = []
|
||||
total, successes, async = 0, 0, 0
|
||||
total, successes, run_async = 0, 0, 0
|
||||
ids = []
|
||||
async_ids = []
|
||||
for log in logs:
|
||||
|
@ -118,17 +118,17 @@ def get_messages(logs):
|
|||
if log.is_success:
|
||||
successes += 1
|
||||
elif not log.has_finished:
|
||||
async += 1
|
||||
run_async += 1
|
||||
async_ids.append(log.id)
|
||||
errors = total-successes-async
|
||||
errors = total-successes-run_async
|
||||
url = get_backend_url(ids)
|
||||
async_url = get_backend_url(async_ids)
|
||||
async_msg = ''
|
||||
if async:
|
||||
if run_async:
|
||||
async_msg = ungettext(
|
||||
_('<a href="{async_url}">{name}</a> is running on the background'),
|
||||
_('<a href="{async_url}">{async} backends</a> are running on the background'),
|
||||
async)
|
||||
_('<a href="{async_url}">{run_async} backends</a> are running on the background'),
|
||||
run_async)
|
||||
if errors:
|
||||
if total == 1:
|
||||
msg = _('<a href="{url}">{name}</a> has fail to execute')
|
||||
|
@ -139,7 +139,7 @@ def get_messages(logs):
|
|||
errors)
|
||||
if async_msg:
|
||||
msg += ', ' + str(async_msg)
|
||||
msg = msg.format(errors=errors, async=async, async_url=async_url, total=total, url=url,
|
||||
msg = msg.format(errors=errors, run_async=run_async, async_url=async_url, total=total, url=url,
|
||||
name=log.backend)
|
||||
messages.append(('error', msg + '.'))
|
||||
elif successes:
|
||||
|
@ -158,12 +158,12 @@ def get_messages(logs):
|
|||
_('<a href="{url}">{total} backends</a> have been executed'),
|
||||
total)
|
||||
msg = msg.format(
|
||||
total=total, url=url, async_url=async_url, async=async, successes=successes,
|
||||
total=total, url=url, async_url=async_url, run_async=run_async, successes=successes,
|
||||
name=log.backend
|
||||
)
|
||||
messages.append(('success', msg + '.'))
|
||||
else:
|
||||
msg = async_msg.format(url=url, async_url=async_url, async=async, name=log.backend)
|
||||
msg = async_msg.format(url=url, async_url=async_url, run_async=run_async, name=log.backend)
|
||||
messages.append(('success', msg + '.'))
|
||||
return messages
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ class Command(BaseCommand):
|
|||
if not confirm("\n\nAre your sure to execute the previous scripts on %(servers)s (yes/no)? " % context):
|
||||
return
|
||||
if not dry:
|
||||
logs = manager.execute(scripts, serialize=serialize, async=True)
|
||||
logs = manager.execute(scripts, serialize=serialize, run_async=True)
|
||||
running = list(logs)
|
||||
stdout = 0
|
||||
stderr = 0
|
||||
|
|
|
@ -97,12 +97,12 @@ def generate(operations):
|
|||
return scripts, serialize
|
||||
|
||||
|
||||
def execute(scripts, serialize=False, async=None):
|
||||
def execute(scripts, serialize=False, run_async=None):
|
||||
"""
|
||||
executes the operations on the servers
|
||||
|
||||
serialize: execute one backend at a time
|
||||
async: do not join threads (overrides route.async)
|
||||
run_async: do not join threads (overrides route.run_async)
|
||||
"""
|
||||
if settings.ORCHESTRATION_DISABLE_EXECUTION:
|
||||
logger.info('Orchestration execution is dissabled by ORCHESTRATION_DISABLE_EXECUTION.')
|
||||
|
@ -115,12 +115,12 @@ def execute(scripts, serialize=False, async=None):
|
|||
route, __, async_action = key
|
||||
backend, operations = value
|
||||
args = (route.host,)
|
||||
if async is None:
|
||||
is_async = not serialize and (route.async or async_action)
|
||||
if run_async is None:
|
||||
is_async = not serialize and (route.run_async or async_action)
|
||||
else:
|
||||
is_async = not serialize and (async or async_action)
|
||||
is_async = not serialize and (run_async or async_action)
|
||||
kwargs = {
|
||||
'async': is_async,
|
||||
'run_async': is_async,
|
||||
}
|
||||
# we clone the connection just in case we are isolated inside a transaction
|
||||
with db.clone(model=BackendLog) as handle:
|
||||
|
|
|
@ -17,7 +17,7 @@ from . import settings
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def Paramiko(backend, log, server, cmds, async=False, paramiko_connections={}):
|
||||
def Paramiko(backend, log, server, cmds, run_async=False, paramiko_connections={}):
|
||||
"""
|
||||
Executes cmds to remote server using Pramaiko
|
||||
"""
|
||||
|
@ -55,7 +55,7 @@ def Paramiko(backend, log, server, cmds, async=False, paramiko_connections={}):
|
|||
channel.shutdown_write()
|
||||
# Log results
|
||||
logger.debug('%s running on %s' % (backend, server))
|
||||
if async:
|
||||
if run_async:
|
||||
second = False
|
||||
while True:
|
||||
# Non-blocking is the secret ingridient in the async sauce
|
||||
|
@ -97,7 +97,7 @@ def Paramiko(backend, log, server, cmds, async=False, paramiko_connections={}):
|
|||
channel.close()
|
||||
|
||||
|
||||
def OpenSSH(backend, log, server, cmds, async=False):
|
||||
def OpenSSH(backend, log, server, cmds, run_async=False):
|
||||
"""
|
||||
Executes cmds to remote server using SSH with connection resuse for maximum performance
|
||||
"""
|
||||
|
@ -110,9 +110,9 @@ def OpenSSH(backend, log, server, cmds, async=False):
|
|||
return
|
||||
try:
|
||||
ssh = sshrun(server.get_address(), script, executable=backend.script_executable,
|
||||
persist=True, async=async, silent=True)
|
||||
persist=True, run_async=run_async, silent=True)
|
||||
logger.debug('%s running on %s' % (backend, server))
|
||||
if async:
|
||||
if run_async:
|
||||
for state in ssh:
|
||||
log.stdout += state.stdout.decode('utf8')
|
||||
log.stderr += state.stderr.decode('utf8')
|
||||
|
@ -148,7 +148,7 @@ def SSH(*args, **kwargs):
|
|||
return method(*args, **kwargs)
|
||||
|
||||
|
||||
def Python(backend, log, server, cmds, async=False):
|
||||
def Python(backend, log, server, cmds, run_async=False):
|
||||
script = ''
|
||||
functions = set()
|
||||
for cmd in cmds:
|
||||
|
@ -170,7 +170,7 @@ def Python(backend, log, server, cmds, async=False):
|
|||
log.stdout += line + '\n'
|
||||
if result:
|
||||
log.stdout += '# Result: %s\n' % result
|
||||
if async:
|
||||
if run_async:
|
||||
log.save(update_fields=('stdout', 'updated_at'))
|
||||
except:
|
||||
log.exit_code = 1
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.5 on 2021-03-30 10:49
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orchestration', '0008_auto_20190805_1134'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='route',
|
||||
old_name='async',
|
||||
new_name='run_async',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='route',
|
||||
name='backend',
|
||||
field=models.CharField(choices=[('Apache2Traffic', '[M] Apache 2 Traffic'), ('ApacheTrafficByName', '[M] ApacheTrafficByName'), ('DokuWikiMuTraffic', '[M] DokuWiki MU Traffic'), ('DovecotMaildirDisk', '[M] Dovecot Maildir size'), ('Exim4Traffic', '[M] Exim4 traffic'), ('MailmanSubscribers', '[M] Mailman subscribers'), ('MailmanTraffic', '[M] Mailman traffic'), ('MysqlDisk', '[M] MySQL disk'), ('PostfixMailscannerTraffic', '[M] Postfix-Mailscanner traffic'), ('ProxmoxOpenVZTraffic', '[M] ProxmoxOpenVZTraffic'), ('UNIXUserDisk', '[M] UNIX user disk'), ('VsFTPdTraffic', '[M] VsFTPd traffic'), ('WordpressMuTraffic', '[M] Wordpress MU Traffic'), ('NextCloudDiskQuota', '[M] nextCloud SaaS Disk Quota'), ('NextcloudTraffic', '[M] nextCloud SaaS Traffic'), ('OwnCloudDiskQuota', '[M] ownCloud SaaS Disk Quota'), ('OwncloudTraffic', '[M] ownCloud SaaS Traffic'), ('PhpListTraffic', '[M] phpList SaaS Traffic'), ('Apache2Controller', '[S] Apache 2'), ('BSCWController', '[S] BSCW SaaS'), ('Bind9MasterDomainController', '[S] Bind9 master domain'), ('Bind9SlaveDomainController', '[S] Bind9 slave domain'), ('DokuWikiMuController', '[S] DokuWiki multisite'), ('DrupalMuController', '[S] Drupal multisite'), ('GitLabSaaSController', '[S] GitLab SaaS'), ('LetsEncryptController', "[S] Let's encrypt!"), ('LxcController', '[S] LxcController'), ('AutoresponseController', '[S] Mail autoresponse'), ('MailmanController', '[S] Mailman'), ('MailmanVirtualDomainController', '[S] Mailman virtdomain-only'), ('MoodleController', '[S] Moodle'), ('MoodleWWWRootController', '[S] Moodle WWWRoot (required)'), ('MoodleMuController', '[S] Moodle multisite'), ('MySQLController', '[S] MySQL database'), ('MySQLUserController', '[S] MySQL user'), ('PHPController', '[S] PHP FPM/FCGID'), ('PostfixAddressController', '[S] Postfix address'), ('PostfixAddressVirtualDomainController', '[S] Postfix address virtdomain-only'), ('ProxmoxOVZ', '[S] ProxmoxOVZ'), ('uWSGIPythonController', '[S] Python uWSGI'), ('RoundcubeIdentityController', '[S] Roundcube Identity Controller'), ('StaticController', '[S] Static'), ('SymbolicLinkController', '[S] Symbolic link webapp'), ('UNIXUserMaildirController', '[S] UNIX maildir user'), ('UNIXUserController', '[S] UNIX user'), ('WebalizerAppController', '[S] Webalizer App'), ('WebalizerController', '[S] Webalizer Content'), ('WordPressForceSSLController', '[S] WordPress Force SSL'), ('WordPressURLController', '[S] WordPress URL'), ('WordPressController', '[S] Wordpress'), ('WordpressMuController', '[S] Wordpress multisite'), ('NextCloudController', '[S] nextCloud SaaS'), ('OwnCloudController', '[S] ownCloud SaaS'), ('PhpListSaaSController', '[S] phpList SaaS')], max_length=256, verbose_name='backend'),
|
||||
),
|
||||
]
|
|
@ -203,7 +203,7 @@ class Route(models.Model):
|
|||
match = models.CharField(_("match"), max_length=256, blank=True, default='True',
|
||||
help_text=_("Python expression used for selecting the targe host, "
|
||||
"<em>instance</em> referes to the current object."))
|
||||
async = models.BooleanField(default=False,
|
||||
run_async = models.BooleanField(default=False,
|
||||
help_text=_("Whether or not block the request/response cycle waitting this backend to "
|
||||
"finish its execution. Usually you want slave servers to run asynchronously."))
|
||||
async_actions = MultiSelectField(max_length=256, blank=True,
|
||||
|
|
|
@ -6,9 +6,9 @@ def retrieve_state(servers):
|
|||
pings = []
|
||||
for server in servers:
|
||||
address = server.get_address()
|
||||
ping = run('ping -c 1 -w 1 %s' % address, async=True)
|
||||
ping = run('ping -c 1 -w 1 %s' % address, run_async=True)
|
||||
pings.append(ping)
|
||||
uptime = sshrun(address, 'uptime', persist=True, async=True, options={'ConnectTimeout': 1})
|
||||
uptime = sshrun(address, 'uptime', persist=True, run_async=True, options={'ConnectTimeout': 1})
|
||||
uptimes.append(uptime)
|
||||
|
||||
state = {}
|
||||
|
|
|
@ -7,14 +7,14 @@ from django.utils.translation import ungettext, ugettext_lazy as _
|
|||
def run_monitor(modeladmin, request, queryset):
|
||||
""" Resource and ResourceData run monitors """
|
||||
referer = request.META.get('HTTP_REFERER')
|
||||
async = modeladmin.model.monitor.__defaults__[0]
|
||||
run_async = modeladmin.model.monitor.__defaults__[0]
|
||||
logs = set()
|
||||
for resource in queryset:
|
||||
rlogs = resource.monitor()
|
||||
if not async:
|
||||
if not run_async:
|
||||
logs = logs.union(set([str(log.pk) for log in rlogs]))
|
||||
modeladmin.log_change(request, resource, _("Run monitors"))
|
||||
if async:
|
||||
if run_async:
|
||||
num = len(queryset)
|
||||
# TODO listfilter by uuid: task.request.id + ?task_id__in=ids
|
||||
link = reverse('admin:djcelery_taskstate_changelist')
|
||||
|
|
|
@ -153,8 +153,8 @@ class Resource(models.Model):
|
|||
def get_verbose_name(self):
|
||||
return self.verbose_name or self.name
|
||||
|
||||
def monitor(self, async=True):
|
||||
if async:
|
||||
def monitor(self, run_async=True):
|
||||
if run_async:
|
||||
return tasks.monitor.apply_async(self.pk)
|
||||
return tasks.monitor(self.pk)
|
||||
|
||||
|
@ -229,9 +229,9 @@ class ResourceData(models.Model):
|
|||
self.content_object_repr = str(self.content_object)
|
||||
self.save(update_fields=('used', 'updated_at', 'content_object_repr'))
|
||||
|
||||
def monitor(self, async=False):
|
||||
def monitor(self, run_async=False):
|
||||
ids = (self.object_id,)
|
||||
if async:
|
||||
if run_async:
|
||||
return tasks.monitor.delay(self.resource_id, ids=ids)
|
||||
return tasks.monitor(self.resource_id, ids=ids)
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ def monitor(resource_id, ids=None):
|
|||
for obj in model.objects.filter(**kwargs):
|
||||
op = Operation(backend, obj, Operation.MONITOR)
|
||||
monitorings.append(op)
|
||||
logs += Operation.execute(monitorings, async=False)
|
||||
logs += Operation.execute(monitorings, run_async=False)
|
||||
|
||||
kwargs = {'id__in': ids} if ids else {}
|
||||
# Update used resources and trigger resource exceeded and revovery
|
||||
|
|
|
@ -23,11 +23,11 @@ def is_due(task, time=None):
|
|||
)
|
||||
|
||||
|
||||
def run_task(task, thread=True, process=False, async=False):
|
||||
def run_task(task, thread=True, process=False, run_async=False):
|
||||
args = json.loads(task.args)
|
||||
kwargs = json.loads(task.kwargs)
|
||||
task_fn = current_app.tasks.get(task.task)
|
||||
if async:
|
||||
if run_async:
|
||||
method = 'process' if process else 'thread'
|
||||
return apply_async(task_fn, method=method).apply_async(*args, **kwargs)
|
||||
return task_fn(*args, **kwargs)
|
||||
|
@ -38,6 +38,6 @@ def run():
|
|||
procs = []
|
||||
for task in PeriodicTask.objects.enabled().select_related('crontab'):
|
||||
if is_due(task, now):
|
||||
proc = run_task(task, process=True, async=True)
|
||||
proc = run_task(task, process=True, run_async=True)
|
||||
procs.append(proc)
|
||||
[proc.join() for proc in procs]
|
||||
|
|
|
@ -13,7 +13,7 @@ def get_name(fn):
|
|||
|
||||
|
||||
def run(method, *args, **kwargs):
|
||||
async = kwargs.pop('async', True)
|
||||
run_async = kwargs.pop('run_async', True)
|
||||
thread = threading.Thread(target=close_connection(method), args=args, kwargs=kwargs)
|
||||
thread = Process(target=close_connection(counter))
|
||||
thread.start()
|
||||
|
|
|
@ -151,10 +151,10 @@ def joinall(iterators, **kwargs):
|
|||
return results
|
||||
|
||||
|
||||
def run(command, display=False, valid_codes=(0,), silent=False, stdin=b'', async=False):
|
||||
def run(command, display=False, valid_codes=(0,), silent=False, stdin=b'', run_async=False):
|
||||
iterator = runiterator(command, display, stdin)
|
||||
next(iterator)
|
||||
if async:
|
||||
if run_async:
|
||||
return iterator
|
||||
return join(iterator, display=display, silent=silent, valid_codes=valid_codes)
|
||||
|
||||
|
@ -240,4 +240,4 @@ class LockFile(object):
|
|||
|
||||
def touch_wsgi(delay=0):
|
||||
from . import paths
|
||||
run('{ sleep %i && touch %s/wsgi.py; } &' % (delay, paths.get_project_dir()), async=True)
|
||||
run('{ sleep %i && touch %s/wsgi.py; } &' % (delay, paths.get_project_dir()), run_async=True)
|
||||
|
|
Loading…
Reference in a new issue