Reimplemented traffic monitors, order of magnitude faster
This commit is contained in:
parent
7711988a63
commit
2d3e925c36
20
TODO.md
20
TODO.md
|
@ -215,3 +215,23 @@ Php binaries should have this format: /usr/bin/php5.2-cgi
|
|||
|
||||
|
||||
* show details data on webapp changelist
|
||||
|
||||
* lock resource monitoring
|
||||
|
||||
* Optimize backends like mail backend (log files single read), single "/var/log/vsftpd.log{,.1}" on ftp traffic
|
||||
|
||||
|
||||
* -EXecCGI in common CMS upload locations /wp-upload/upload/uploads
|
||||
* cgi user / pervent shell access
|
||||
* merge php wrapper configuration to optimize process classes
|
||||
|
||||
|
||||
* prevent stderr when users exists on backend i.e. mysql user create
|
||||
|
||||
* disable anonymized list options (mailman)
|
||||
|
||||
* webapps directory protection and disable excecgi
|
||||
|
||||
* php-fpm disable execCGI
|
||||
|
||||
* SuexecUserGroup needs to be per app othewise wrapper/fpm user can't be correct
|
||||
|
|
|
@ -155,22 +155,25 @@ class MailmanBackend(ServiceController):
|
|||
return context
|
||||
|
||||
|
||||
class MailmanTraffic(ServiceMonitor):
|
||||
class MailmanTrafficBash(ServiceMonitor):
|
||||
model = 'lists.List'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Mailman traffic")
|
||||
verbose_name = _("Mailman traffic (Bash)")
|
||||
|
||||
def prepare(self):
|
||||
super(MailmanTraffic, self).prepare()
|
||||
current_date = self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
context = {
|
||||
'mailman_log': '%s{,.1}' % settings.LISTS_MAILMAN_POST_LOG_PATH,
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
function monitor () {
|
||||
OBJECT_ID=$1
|
||||
# Dates convertions are done server-side because of timezone discrepancies
|
||||
INI_DATE=$(date "+%%Y%%m%%d%%H%%M%%S" -d "$2")
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%s')
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%(current_date)s')
|
||||
LIST_NAME="$3"
|
||||
MAILMAN_LOG="$4"
|
||||
MAILMAN_LOG=%(mailman_log)s
|
||||
|
||||
SUBSCRIBERS=$(list_members ${LIST_NAME} | wc -l)
|
||||
{
|
||||
|
@ -203,17 +206,115 @@ class MailmanTraffic(ServiceMonitor):
|
|||
print sum * subs
|
||||
}' || [[ $? == 1 ]] && true
|
||||
} | xargs echo ${OBJECT_ID}
|
||||
}""") % current_date)
|
||||
}""") % context)
|
||||
|
||||
def monitor(self, mail_list):
|
||||
context = self.get_context(mail_list)
|
||||
self.append(
|
||||
'monitor %(object_id)i "%(last_date)s" "%(list_name)s" %(mailman_log)s{,.1}' % context
|
||||
'monitor %(object_id)i "%(last_date)s" "%(list_name)s"' % context
|
||||
)
|
||||
|
||||
def get_context(self, mail_list):
|
||||
return {
|
||||
'mailman_log': settings.LISTS_MAILMAN_POST_LOG_PATH,
|
||||
'list_name': mail_list.name,
|
||||
'object_id': mail_list.pk,
|
||||
'last_date': self.get_last_date(mail_list.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
|
||||
|
||||
class MailmanTraffic(ServiceMonitor):
|
||||
model = 'lists.List'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Mailman traffic")
|
||||
script_executable = '/usr/bin/python'
|
||||
|
||||
def prepare(self):
|
||||
postlog = settings.LISTS_MAILMAN_POST_LOG_PATH
|
||||
context = {
|
||||
'postlogs': str((postlog, postlog+'.1')),
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
|
||||
def to_local_timezone(date, tzlocal=tz.tzlocal()):
|
||||
date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S %Z')
|
||||
date = date.replace(tzinfo=tz.tzutc())
|
||||
date = date.astimezone(tzlocal)
|
||||
return date
|
||||
|
||||
postlogs = {postlogs}
|
||||
# Use local timezone
|
||||
end_date = to_local_timezone('{current_date}')
|
||||
end_date = int(end_date.strftime('%Y%m%d%H%M%S'))
|
||||
lists = {{}}
|
||||
months = {{
|
||||
'Jan': '01',
|
||||
'Feb': '02',
|
||||
'Mar': '03',
|
||||
'Apr': '04',
|
||||
'May': '05',
|
||||
'Jun': '06',
|
||||
'Jul': '07',
|
||||
'Aug': '08',
|
||||
'Sep': '09',
|
||||
'Oct': '10',
|
||||
'Nov': '11',
|
||||
'Dec': '12',
|
||||
}}
|
||||
|
||||
def prepare(object_id, list_name, ini_date):
|
||||
global lists
|
||||
ini_date = to_local_timezone(ini_date)
|
||||
ini_date = int(ini_date.strftime('%Y%m%d%H%M%S'))
|
||||
lists[list_name] = [ini_date, object_id, 0]
|
||||
|
||||
def monitor(lists, end_date, months, postlogs):
|
||||
for postlog in postlogs:
|
||||
try:
|
||||
with open(postlog, 'r') as postlog:
|
||||
for line in postlog.readlines():
|
||||
month, day, time, year, __, __, __, list_name, __, __, size = line.split()[:11]
|
||||
try:
|
||||
list = lists[list_name]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
date = year + months[month] + day + time.replace(':', '')
|
||||
if list[0] < int(date) < end_date:
|
||||
size = size[5:-1]
|
||||
try:
|
||||
list[2] += int(size)
|
||||
except ValueError:
|
||||
# anonymized post
|
||||
pass
|
||||
except IOError as e:
|
||||
sys.stderr.write(e)
|
||||
|
||||
for list_name, opts in lists.iteritems():
|
||||
__, object_id, size = opts
|
||||
if size:
|
||||
cmd = ' '.join(('list_members', list_name, '| wc -l'))
|
||||
ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
subscribers = ps.communicate()[0].strip()
|
||||
size *= int(subscribers)
|
||||
print object_id, size
|
||||
""").format(**context)
|
||||
)
|
||||
|
||||
def monitor(self, user):
|
||||
context = self.get_context(user)
|
||||
self.append("prepare(%(object_id)s, '%(list_name)s', '%(last_date)s')" % context)
|
||||
|
||||
def commit(self):
|
||||
self.append('monitor(lists, end_date, months, postlogs)')
|
||||
|
||||
def get_context(self, mail_list):
|
||||
return {
|
||||
'list_name': mail_list.name,
|
||||
'object_id': mail_list.pk,
|
||||
'last_date': self.get_last_date(mail_list.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
|
|
|
@ -9,7 +9,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from orchestra.apps.orchestration import ServiceController
|
||||
from orchestra.apps.systemusers.backends import SystemUserBackend
|
||||
from orchestra.apps.resources import ServiceMonitor
|
||||
from orchestra.utils.humanize import unit_to_bytes
|
||||
#from orchestra.utils.humanize import unit_to_bytes
|
||||
|
||||
from . import settings
|
||||
from .models import Address
|
||||
|
@ -42,7 +42,8 @@ class MailSystemUserBackend(ServiceController):
|
|||
self.set_quota(mailbox, context)
|
||||
|
||||
def set_quota(self, mailbox, context):
|
||||
context['quota'] = mailbox.resources.disk.allocated * unit_to_bytes(mailbox.resources.disk.unit)
|
||||
context['quota'] = mailbox.resources.disk.allocated * mailbox.resources.disk.resource.get_scale()
|
||||
#unit_to_bytes(mailbox.resources.disk.unit)
|
||||
self.append(textwrap.dedent("""
|
||||
mkdir -p %(home)s/Maildir
|
||||
chown %(user)s:%(group)s %(home)s/Maildir
|
||||
|
@ -294,3 +295,166 @@ class MaildirDisk(ServiceMonitor):
|
|||
}
|
||||
context['maildir_path'] = settings.MAILBOXES_MAILDIRSIZE_PATH % context
|
||||
return context
|
||||
|
||||
|
||||
class PostfixTraffic(ServiceMonitor):
|
||||
"""
|
||||
A high-performance log parser
|
||||
Reads the mail.log file only once, for all users
|
||||
"""
|
||||
model = 'mailboxes.Mailbox'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Postfix traffic usage")
|
||||
script_executable = '/usr/bin/python'
|
||||
|
||||
def prepare(self):
|
||||
mail_log = '/var/log/mail.log'
|
||||
context = {
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'mail_logs': str((mail_log, mail_log+'.1')),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
|
||||
def to_local_timezone(date, tzlocal=tz.tzlocal()):
|
||||
# Converts orchestra's UTC dates to local timezone
|
||||
date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S %Z')
|
||||
date = date.replace(tzinfo=tz.tzutc())
|
||||
date = date.astimezone(tzlocal)
|
||||
return date
|
||||
|
||||
maillogs = {mail_logs}
|
||||
end_datetime = to_local_timezone('{current_date}')
|
||||
end_date = int(end_datetime.strftime('%Y%m%d%H%M%S'))
|
||||
months = {{
|
||||
"Jan": "01",
|
||||
"Feb": "02",
|
||||
"Mar": "03",
|
||||
"Apr": "04",
|
||||
"May": "05",
|
||||
"Jun": "06",
|
||||
"Jul": "07",
|
||||
"Aug": "08",
|
||||
"Sep": "09",
|
||||
"Oct": "10",
|
||||
"Nov": "11",
|
||||
"Dec": "12",
|
||||
}}
|
||||
|
||||
def inside_period(month, day, time, ini_date):
|
||||
global months
|
||||
global end_datetime
|
||||
# Mar 19 17:13:22
|
||||
month = months[month]
|
||||
year = end_datetime.year
|
||||
if month == '12' and end_datetime.month == 1:
|
||||
year = year+1
|
||||
date = str(year) + month + day
|
||||
date += time.replace(':', '')
|
||||
return ini_date < int(date) < end_date
|
||||
|
||||
users = {{}}
|
||||
delivers = {{}}
|
||||
reverse = {{}}
|
||||
|
||||
def prepare(object_id, mailbox, ini_date):
|
||||
global users
|
||||
global delivers
|
||||
global reverse
|
||||
ini_date = to_local_timezone(ini_date)
|
||||
ini_date = int(ini_date.strftime('%Y%m%d%H%M%S'))
|
||||
users[mailbox] = (ini_date, object_id)
|
||||
delivers[mailbox] = set()
|
||||
reverse[mailbox] = set()
|
||||
|
||||
def monitor(users, delivers, reverse, maillogs):
|
||||
targets = {{}}
|
||||
counter = {{}}
|
||||
user_regex = re.compile(r'\(Authenticated sender: ([^ ]+)\)')
|
||||
for maillog in maillogs:
|
||||
try:
|
||||
with open(maillog, 'r') as maillog:
|
||||
for line in maillog.readlines():
|
||||
# Only search for Authenticated sendings
|
||||
if '(Authenticated sender: ' in line:
|
||||
username = user_regex.search(line).groups()[0]
|
||||
try:
|
||||
sender = users[username]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
month, day, time, __, proc, id = line.split()[:6]
|
||||
if inside_period(month, day, time, sender[0]):
|
||||
# Add new email
|
||||
delivers[id[:-1]] = username
|
||||
# Look for a MailScanner requeue ID
|
||||
elif ' Requeue: ' in line:
|
||||
id, __, req_id = line.split()[6:9]
|
||||
id = id.split('.')[0]
|
||||
try:
|
||||
username = delivers[id]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
targets[req_id] = (username, None)
|
||||
reverse[username].add(req_id)
|
||||
# Look for the mail size and count the number of recipients of each email
|
||||
else:
|
||||
try:
|
||||
month, day, time, __, proc, req_id, __, msize = line.split()[:8]
|
||||
except ValueError:
|
||||
# not interested in this line
|
||||
continue
|
||||
if proc.startswith('postfix/'):
|
||||
req_id = req_id[:-1]
|
||||
if msize.startswith('size='):
|
||||
try:
|
||||
target = targets[req_id]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
targets[req_id] = (target[0], int(msize[5:-1]))
|
||||
elif proc.startswith('postfix/smtp'):
|
||||
try:
|
||||
target = targets[req_id]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if inside_period(month, day, time, users[target[0]][0]):
|
||||
try:
|
||||
counter[req_id] += 1
|
||||
except KeyError:
|
||||
counter[req_id] = 1
|
||||
except IOError as e:
|
||||
sys.stderr.write(e)
|
||||
|
||||
for username, opts in users.iteritems():
|
||||
size = 0
|
||||
for req_id in reverse[username]:
|
||||
size += targets[req_id][1] * counter.get(req_id, 0)
|
||||
print opts[1], size
|
||||
""").format(**context)
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
self.append('monitor(users, delivers, reverse, maillogs)')
|
||||
|
||||
def monitor(self, mailbox):
|
||||
context = self.get_context(mailbox)
|
||||
self.append("prepare(%(object_id)s, '%(mailbox)s', '%(last_date)s')" % context)
|
||||
|
||||
def get_context(self, mailbox):
|
||||
return {
|
||||
# 'mainlog': settings.LISTS_MAILMAN_POST_LOG_PATH,
|
||||
'mailbox': mailbox.name,
|
||||
'object_id': mailbox.pk,
|
||||
'last_date': self.get_last_date(mailbox.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -88,9 +88,15 @@ def SSH(backend, log, server, cmds, async=False):
|
|||
# Non-blocking is the secret ingridient in the async sauce
|
||||
select.select([channel], [], [])
|
||||
if channel.recv_ready():
|
||||
log.stdout += channel.recv(1024)
|
||||
part = channel.recv(1024)
|
||||
while part:
|
||||
log.stdout += part
|
||||
part = channel.recv(1024)
|
||||
if channel.recv_stderr_ready():
|
||||
log.stderr += channel.recv_stderr(1024)
|
||||
part = channel.recv_stderr(1024)
|
||||
while part:
|
||||
log.stderr += part
|
||||
part = channel.recv_stderr(1024)
|
||||
log.save(update_fields=['stdout', 'stderr'])
|
||||
if channel.exit_status_ready():
|
||||
break
|
||||
|
|
|
@ -16,7 +16,7 @@ def run_monitor(modeladmin, request, queryset):
|
|||
if not async:
|
||||
for result in results:
|
||||
if hasattr(result, 'log'):
|
||||
logs.add(result.log.pk)
|
||||
logs.add(str(result.log.pk))
|
||||
modeladmin.log_change(request, resource, _("Run monitors"))
|
||||
if async:
|
||||
num = len(queryset)
|
||||
|
@ -28,8 +28,8 @@ def run_monitor(modeladmin, request, queryset):
|
|||
else:
|
||||
num = len(logs)
|
||||
if num == 1:
|
||||
log = logs.pop()
|
||||
link = reverse('admin:orchestration_backendlog_change', args=(log,))
|
||||
log_pk = int(logs.pop())
|
||||
link = reverse('admin:orchestration_backendlog_change', args=(log_pk,))
|
||||
msg = _("One related monitor has <a href='%s'>been executed</a>.") % link
|
||||
elif num >= 1:
|
||||
link = reverse('admin:orchestration_backendlog_changelist')
|
||||
|
|
|
@ -181,23 +181,35 @@ def resource_inline_factory(resources):
|
|||
def total_form_count(self, resources=resources):
|
||||
return len(resources)
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super(ResourceInlineFormSet, self).get_queryset()
|
||||
return queryset.order_by('-id').filter(resource__is_active=True)
|
||||
|
||||
@cached_property
|
||||
def forms(self, resources=resources):
|
||||
forms = []
|
||||
resources_copy = list(resources)
|
||||
queryset = self.queryset
|
||||
# Remove queryset disabled objects
|
||||
queryset = [data for data in self.queryset if data.resource in resources]
|
||||
if self.instance.pk:
|
||||
# Create missing resource data
|
||||
queryset = list(queryset)
|
||||
queryset_resources = [data.resource for data in queryset]
|
||||
for resource in resources:
|
||||
if resource not in queryset_resources:
|
||||
data = resource.dataset.create(content_object=self.instance)
|
||||
kwargs = {
|
||||
'content_object': self.instance,
|
||||
}
|
||||
if resource.default_allocation:
|
||||
kwargs['allocated'] = resource.default_allocation
|
||||
data = resource.dataset.create(**kwargs)
|
||||
queryset.append(data)
|
||||
# Existing dataset
|
||||
for i, data in enumerate(queryset):
|
||||
forms.append(self._construct_form(i, resource=data.resource))
|
||||
resources_copy.remove(data.resource)
|
||||
try:
|
||||
resources_copy.remove(data.resource)
|
||||
except ValueError:
|
||||
pass
|
||||
# Missing dataset
|
||||
for i, resource in enumerate(resources_copy, len(queryset)):
|
||||
forms.append(self._construct_form(i, resource=resource))
|
||||
|
@ -246,8 +258,8 @@ def insert_resource_inlines():
|
|||
for inline in getattr(modeladmin_class, 'inlines', []):
|
||||
if inline.__name__ == 'ResourceInline':
|
||||
modeladmin_class.inlines.remove(inline)
|
||||
|
||||
for ct, resources in Resource.objects.group_by('content_type').iteritems():
|
||||
resources = Resource.objects.filter(is_active=True)
|
||||
for ct, resources in resources.group_by('content_type').iteritems():
|
||||
inline = resource_inline_factory(resources)
|
||||
model = ct.model_class()
|
||||
insertattr(model, 'inlines', inline)
|
||||
|
|
|
@ -64,7 +64,11 @@ class ServiceMonitor(ServiceBackend):
|
|||
ct = ContentType.objects.get_by_natural_key(app_label, model_name.lower())
|
||||
for line in log.stdout.splitlines():
|
||||
line = line.strip()
|
||||
object_id, value = self.process(line)
|
||||
try:
|
||||
object_id, value = self.process(line)
|
||||
except ValueError:
|
||||
cls_name = self.__class__.__name__
|
||||
raise ValueError("%s expected '<id> <value>' got '%s'" % (cls_name, line))
|
||||
MonitorData.objects.create(monitor=name, object_id=object_id,
|
||||
content_type=ct, value=value, created_at=self.current_date)
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ class ResourceForm(forms.ModelForm):
|
|||
else:
|
||||
self.fields['allocated'].required = True
|
||||
self.fields['allocated'].initial = self.resource.default_allocation
|
||||
|
||||
# def has_changed(self):
|
||||
# """ Make sure resourcedata objects are created for all resources """
|
||||
# if not self.instance.pk:
|
||||
|
|
|
@ -86,6 +86,10 @@ class Resource(models.Model):
|
|||
|
||||
def clean(self):
|
||||
self.verbose_name = self.verbose_name.strip()
|
||||
if self.on_demand and self.default_allocation:
|
||||
raise validators.ValidationError({
|
||||
'default_allocation': _("Default allocation can not be set for 'on demand' services")
|
||||
})
|
||||
# Validate that model path exists between ct and each monitor.model
|
||||
monitor_errors = []
|
||||
for monitor in self.monitors:
|
||||
|
@ -172,6 +176,9 @@ class ResourceData(models.Model):
|
|||
unique_together = ('resource', 'content_type', 'object_id')
|
||||
verbose_name_plural = _("resource data")
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: %s" % (str(self.resource), str(self.content_object))
|
||||
|
||||
@classmethod
|
||||
def get_or_create(cls, obj, resource):
|
||||
ct = ContentType.objects.get_for_model(type(obj))
|
||||
|
|
|
@ -99,21 +99,24 @@ class SystemUserDisk(ServiceMonitor):
|
|||
}
|
||||
|
||||
|
||||
class FTPTraffic(ServiceMonitor):
|
||||
class FTPTrafficBash(ServiceMonitor):
|
||||
model = 'systemusers.SystemUser'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _('Systemuser FTP traffic')
|
||||
verbose_name = _('Systemuser FTP traffic (Bash)')
|
||||
|
||||
def prepare(self):
|
||||
super(FTPTraffic, self).prepare()
|
||||
current_date = self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
super(FTPTrafficBash, self).prepare()
|
||||
context = {
|
||||
'log_file': '%s{,.1}' % settings.SYSTEMUSERS_FTP_LOG_PATH,
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
function monitor () {
|
||||
OBJECT_ID=$1
|
||||
INI_DATE=$(date "+%%Y%%m%%d%%H%%M%%S" -d "$2")
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%s')
|
||||
END_DATE=$(date '+%%Y%%m%%d%%H%%M%%S' -d '%(current_date)s')
|
||||
USERNAME="$3"
|
||||
LOG_FILE="$4"
|
||||
LOG_FILE=%(log_file)s
|
||||
{
|
||||
grep " bytes, " ${LOG_FILE} \\
|
||||
| grep " \\[${USERNAME}\\] " \\
|
||||
|
@ -145,18 +148,191 @@ class FTPTraffic(ServiceMonitor):
|
|||
print sum
|
||||
}' || [[ $? == 1 ]] && true
|
||||
} | xargs echo ${OBJECT_ID}
|
||||
}""") % current_date)
|
||||
}""") % context)
|
||||
|
||||
def monitor(self, user):
|
||||
context = self.get_context(user)
|
||||
self.append(
|
||||
'monitor {object_id} "{last_date}" "{username}" {log_file}'.format(**context)
|
||||
'monitor {object_id} "{last_date}" "{username}"'.format(**context)
|
||||
)
|
||||
|
||||
def get_context(self, user):
|
||||
return {
|
||||
'log_file': '%s{,.1}' % settings.SYSTEMUSERS_FTP_LOG_PATH,
|
||||
'last_date': self.get_last_date(user.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'object_id': user.pk,
|
||||
'username': user.username,
|
||||
}
|
||||
|
||||
|
||||
class Exim4Traffic(ServiceMonitor):
|
||||
model = 'systemusers.SystemUser'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _("Exim4 traffic usage")
|
||||
script_executable = '/usr/bin/python'
|
||||
|
||||
def prepare(self):
|
||||
mainlog = '/var/log/exim4/mainlog'
|
||||
context = {
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'mainlogs': str((mainlog, mainlog+'.1')),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
|
||||
def to_local_timezone(date, tzlocal=tz.tzlocal()):
|
||||
date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S %Z')
|
||||
date = date.replace(tzinfo=tz.tzutc())
|
||||
date = date.astimezone(tzlocal)
|
||||
return date
|
||||
|
||||
mainlogs = {mainlogs}
|
||||
# Use local timezone
|
||||
end_date = to_local_timezone('{current_date}')
|
||||
end_date = int(end_date.strftime('%Y%m%d%H%M%S'))
|
||||
users = {{}}
|
||||
|
||||
def prepare(object_id, username, ini_date):
|
||||
global users
|
||||
ini_date = to_local_timezone(ini_date)
|
||||
ini_date = int(ini_date.strftime('%Y%m%d%H%M%S'))
|
||||
users[username] = [ini_date, object_id, 0]
|
||||
|
||||
def monitor(users, end_date, mainlogs):
|
||||
user_regex = re.compile(r' U=([^ ]+) ')
|
||||
for mainlog in mainlogs:
|
||||
try:
|
||||
with open(mainlog, 'r') as mainlog:
|
||||
for line in mainlog.readlines():
|
||||
if ' <= ' in line and 'P=local' in line:
|
||||
username = user_regex.search(line).groups()[0]
|
||||
try:
|
||||
sender = users[username]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
date, time, id, __, __, user, protocol, size = line.split()[:8]
|
||||
date = date.replace('-', '')
|
||||
date += time.replace(':', '')
|
||||
if sender[0] < int(date) < end_date:
|
||||
sender[2] += int(size[2:])
|
||||
except IOError as e:
|
||||
sys.stderr.write(e)
|
||||
|
||||
for username, opts in users.iteritems():
|
||||
__, object_id, size = opts
|
||||
print object_id, size
|
||||
""").format(**context)
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
self.append('monitor(users, end_date, mainlogs)')
|
||||
|
||||
def monitor(self, user):
|
||||
context = self.get_context(user)
|
||||
self.append("prepare(%(object_id)s, '%(username)s', '%(last_date)s')" % context)
|
||||
|
||||
def get_context(self, user):
|
||||
return {
|
||||
# 'mainlog': settings.LISTS_MAILMAN_POST_LOG_PATH,
|
||||
'username': user.username,
|
||||
'object_id': user.pk,
|
||||
'last_date': self.get_last_date(user.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
}
|
||||
|
||||
|
||||
|
||||
class FTPTraffic(ServiceMonitor):
|
||||
model = 'systemusers.SystemUser'
|
||||
resource = ServiceMonitor.TRAFFIC
|
||||
verbose_name = _('Systemuser FTP traffic')
|
||||
script_executable = '/usr/bin/python'
|
||||
|
||||
def prepare(self):
|
||||
vsftplog = settings.SYSTEMUSERS_FTP_LOG_PATH
|
||||
context = {
|
||||
'current_date': self.current_date.strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'vsftplogs': str((vsftplog, vsftplog+'.1')),
|
||||
}
|
||||
self.append(textwrap.dedent("""\
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from dateutil import tz
|
||||
|
||||
def to_local_timezone(date, tzlocal=tz.tzlocal()):
|
||||
date = datetime.strptime(date, '%Y-%m-%d %H:%M:%S %Z')
|
||||
date = date.replace(tzinfo=tz.tzutc())
|
||||
date = date.astimezone(tzlocal)
|
||||
return date
|
||||
|
||||
vsftplogs = {vsftplogs}
|
||||
# Use local timezone
|
||||
end_date = to_local_timezone('{current_date}')
|
||||
end_date = int(end_date.strftime('%Y%m%d%H%M%S'))
|
||||
users = {{}}
|
||||
months = {{
|
||||
'Jan': '01',
|
||||
'Feb': '02',
|
||||
'Mar': '03',
|
||||
'Apr': '04',
|
||||
'May': '05',
|
||||
'Jun': '06',
|
||||
'Jul': '07',
|
||||
'Aug': '08',
|
||||
'Sep': '09',
|
||||
'Oct': '10',
|
||||
'Nov': '11',
|
||||
'Dec': '12',
|
||||
}}
|
||||
|
||||
def prepare(object_id, username, ini_date):
|
||||
global users
|
||||
ini_date = to_local_timezone(ini_date)
|
||||
ini_date = int(ini_date.strftime('%Y%m%d%H%M%S'))
|
||||
users[username] = [ini_date, object_id, 0]
|
||||
|
||||
def monitor(users, end_date, months, vsftplogs):
|
||||
user_regex = re.compile(r'\] \[([^ ]+)\] OK ')
|
||||
bytes_regex = re.compile(r', ([0-9]+) bytes, ')
|
||||
for vsftplog in vsftplogs:
|
||||
try:
|
||||
with open(vsftplog, 'r') as vsftplog:
|
||||
for line in vsftplog.readlines():
|
||||
if ' bytes, ' in line:
|
||||
username = user_regex.search(line).groups()[0]
|
||||
try:
|
||||
user = users[username]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
__, month, day, time, year = line.split()[:5]
|
||||
date = year + months[month] + day + time.replace(':', '')
|
||||
if user[0] < int(date) < end_date:
|
||||
bytes = bytes_regex.search(line).groups()[0]
|
||||
user[2] += int(bytes)
|
||||
except IOError as e:
|
||||
sys.stderr.write(e)
|
||||
|
||||
for username, opts in users.iteritems():
|
||||
__, object_id, size = opts
|
||||
print object_id, size
|
||||
""").format(**context)
|
||||
)
|
||||
|
||||
def monitor(self, user):
|
||||
context = self.get_context(user)
|
||||
self.append("prepare(%(object_id)s, '%(username)s', '%(last_date)s')" % context)
|
||||
|
||||
def commit(self):
|
||||
self.append('monitor(users, end_date, months, vsftplogs)')
|
||||
|
||||
def get_context(self, user):
|
||||
return {
|
||||
'last_date': self.get_last_date(user.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'object_id': user.pk,
|
||||
'username': user.username,
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ class WebAppOptionInline(admin.TabularInline):
|
|||
|
||||
|
||||
class WebAppAdmin(SelectPluginAdminMixin, AccountAdminMixin, ExtendedModelAdmin):
|
||||
list_display = ('name', 'type', 'display_websites', 'account_link')
|
||||
list_display = ('name', 'type', 'display_detail', 'display_websites', 'account_link')
|
||||
list_filter = ('type',)
|
||||
# add_fields = ('account', 'name', 'type')
|
||||
# fields = ('account_link', 'name', 'type')
|
||||
|
@ -80,6 +80,10 @@ class WebAppAdmin(SelectPluginAdminMixin, AccountAdminMixin, ExtendedModelAdmin)
|
|||
display_websites.short_description = _("web sites")
|
||||
display_websites.allow_tags = True
|
||||
|
||||
def display_detail(self, webapp):
|
||||
return webapp.type_instance.get_detail()
|
||||
display_detail.short_description = _("detail")
|
||||
|
||||
# def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
# """ Make value input widget bigger """
|
||||
# if db_field.name == 'type':
|
||||
|
|
|
@ -28,6 +28,12 @@ WEBAPPS_PHP_ERROR_LOG_PATH = getattr(settings, 'WEBAPPS_PHP_ERROR_LOG_PATH',
|
|||
'')
|
||||
|
||||
|
||||
WEBAPPS_MERGE_PHP_WEBAPPS = getattr(settings, 'WEBAPPS_MERGE_PHP_WEBAPPS',
|
||||
# Combine all fcgid-wrappers/fpm-pools into one per account-php_version
|
||||
# to better control num processes per account and save memory
|
||||
False)
|
||||
|
||||
|
||||
WEBAPPS_TYPES = getattr(settings, 'WEBAPPS_TYPES', (
|
||||
'orchestra.apps.webapps.types.php.PHPApp',
|
||||
'orchestra.apps.webapps.types.misc.StaticApp',
|
||||
|
|
|
@ -100,6 +100,9 @@ class AppType(plugins.Plugin):
|
|||
else:
|
||||
yield (group, [(op.name, op.verbose_name) for op in options])
|
||||
|
||||
def get_detail(self):
|
||||
return ''
|
||||
|
||||
def save(self):
|
||||
pass
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from orchestra.plugins.forms import PluginDataForm
|
|||
from ..options import AppOption
|
||||
|
||||
from . import AppType
|
||||
from .php import PHPApp
|
||||
from .php import PHPApp, PHPAppForm, PHPAppSerializer
|
||||
|
||||
|
||||
class StaticApp(AppType):
|
||||
|
@ -39,12 +39,12 @@ class WebalizerApp(AppType):
|
|||
return ('static', webalizer_path)
|
||||
|
||||
|
||||
class SymbolicLinkForm(PluginDataForm):
|
||||
class SymbolicLinkForm(PHPAppForm):
|
||||
path = forms.CharField(label=_("Path"), widget=forms.TextInput(attrs={'size':'100'}),
|
||||
help_text=_("Path for the origin of the symbolic link."))
|
||||
|
||||
|
||||
class SymbolicLinkSerializer(serializers.Serializer):
|
||||
class SymbolicLinkSerializer(PHPAppSerializer):
|
||||
path = serializers.CharField(label=_("Path"))
|
||||
|
||||
|
||||
|
|
|
@ -54,6 +54,9 @@ class PHPApp(AppType):
|
|||
def is_fcgid(self):
|
||||
return self.get_php_version().endswith('-cgi')
|
||||
|
||||
def get_detail(self):
|
||||
return self.instance.data.get('php_version', '')
|
||||
|
||||
def get_context(self):
|
||||
""" context used to format settings """
|
||||
return {
|
||||
|
|
|
@ -343,7 +343,7 @@ class Apache2Traffic(ServiceMonitor):
|
|||
|
||||
def get_context(self, site):
|
||||
return {
|
||||
'log_file': '%s{,.1}' % site.get_www_log_path(),
|
||||
'log_file': '%s{,.1}' % site.get_www_access_log_path(),
|
||||
'last_date': self.get_last_date(site.pk).strftime("%Y-%m-%d %H:%M:%S %Z"),
|
||||
'object_id': site.pk,
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue