diff --git a/.flake8 b/.flake8
index f8728f47..d0affc8e 100644
--- a/.flake8
+++ b/.flake8
@@ -1,5 +1,5 @@
[flake8]
max-line-length = 120
-ignore = F401,F405,E402,F403,W504,W605
+ignore = F401,F405,E402,F403,W504,W605,E203
max-complexity = 126
exclude = venv/*,*/migrations/*,wip_plugins/*,datatableview/*, sal/settings.py
diff --git a/sal/version.plist b/sal/version.plist
index 262fadfb..88a087fc 100644
--- a/sal/version.plist
+++ b/sal/version.plist
@@ -3,6 +3,6 @@
version
- 4.1.6.2167
+ 4.1.6.2169
diff --git a/server/non_ui_views.py b/server/non_ui_views.py
index 72e170f3..ca39e7cd 100644
--- a/server/non_ui_views.py
+++ b/server/non_ui_views.py
@@ -11,70 +11,83 @@
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.db.models import Q
-from django.http import (
- HttpResponse, JsonResponse, Http404, HttpResponseBadRequest)
+from django.http import HttpResponse, JsonResponse, Http404, HttpResponseBadRequest
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
+from django.utils.html import escape
import server.utils
import utils.csv
from sal.decorators import key_auth_required
from sal.plugin import Widget, ReportPlugin, PluginManager
-from server.models import (Machine, Fact, HistoricalFact, MachineGroup, Message, Plugin, Report,
- ManagedItem, MachineDetailPlugin, ManagementSource, ManagedItemHistory)
+from server.models import (
+ Machine,
+ Fact,
+ HistoricalFact,
+ MachineGroup,
+ Message,
+ Plugin,
+ Report,
+ ManagedItem,
+ MachineDetailPlugin,
+ ManagementSource,
+ ManagedItemHistory,
+)
# The database probably isn't going to change while this is loaded.
IS_POSTGRES = server.utils.is_postgres()
-HISTORICAL_FACTS = set(server.utils.get_django_setting('HISTORICAL_FACTS', []))
-if server.utils.get_django_setting('IGNORE_FACTS'):
+HISTORICAL_FACTS = set(server.utils.get_django_setting("HISTORICAL_FACTS", []))
+if server.utils.get_django_setting("IGNORE_FACTS"):
IGNORE_PREFIXES = re.compile(
- '|'.join(server.utils.get_django_setting('IGNORE_FACTS')))
+ "|".join(server.utils.get_django_setting("IGNORE_FACTS"))
+ )
else:
IGNORE_PREFIXES = None
# Build a translation table for serial numbers, to remove garbage
# VMware puts in.
-SERIAL_TRANSLATE = {ord(c): None for c in '+/'}
+SERIAL_TRANSLATE = {ord(c): None for c in "+/"}
logger = logging.getLogger(__name__)
@login_required
-def tableajax(request, plugin_name, data, group_type='all', group_id=None):
+def tableajax(request, plugin_name, data, group_type="all", group_id=None):
"""Table ajax for dataTables"""
# Pull our variables out of the GET request
- get_data = request.GET['args']
+ get_data = request.GET["args"]
get_data = json.loads(get_data)
- draw = get_data.get('draw', 0)
- start = int(get_data.get('start', 0))
- length = int(get_data.get('length', 0))
- search_value = ''
- if 'search' in get_data:
- if 'value' in get_data['search']:
- search_value = get_data['search']['value']
+ draw = get_data.get("draw", 0)
+ start = int(get_data.get("start", 0))
+ length = int(get_data.get("length", 0))
+ search_value = ""
+ if "search" in get_data:
+ if "value" in get_data["search"]:
+ search_value = get_data["search"]["value"]
# default ordering
order_column = 2
- order_direction = 'desc'
- order_name = ''
- if 'order' in get_data:
- order_column = get_data['order'][0]['column']
- order_direction = get_data['order'][0]['dir']
- for column in get_data.get('columns', None):
- if column['data'] == order_column:
- order_name = column['name']
+ order_direction = "desc"
+ order_name = ""
+ if "order" in get_data:
+ order_column = get_data["order"][0]["column"]
+ order_direction = get_data["order"][0]["dir"]
+ for column in get_data.get("columns", None):
+ if column["data"] == order_column:
+ order_name = column["name"]
break
plugin_object = process_plugin(plugin_name, group_type, group_id)
queryset = plugin_object.get_queryset(
- request, group_type=group_type, group_id=group_id)
+ request, group_type=group_type, group_id=group_id
+ )
machines, title = plugin_object.filter_machines(queryset, data)
- machines = machines.values('id', 'hostname', 'console_user', 'last_checkin')
+ machines = machines.values("id", "hostname", "console_user", "last_checkin")
if len(order_name) != 0:
- if order_direction == 'desc':
+ if order_direction == "desc":
order_string = "-%s" % order_name
else:
order_string = "%s" % order_name
@@ -83,19 +96,21 @@ def tableajax(request, plugin_name, data, group_type='all', group_id=None):
hostname_q = Q(hostname__icontains=search_value)
user_q = Q(console_user__icontains=search_value)
checkin_q = Q(last_checkin__icontains=search_value)
- searched_machines = machines.filter(hostname_q | user_q | checkin_q).order_by(order_string)
+ searched_machines = machines.filter(hostname_q | user_q | checkin_q).order_by(
+ order_string
+ )
else:
searched_machines = machines.order_by(order_string)
- limited_machines = searched_machines[start:(start + length)]
+ limited_machines = searched_machines[start : (start + length)]
return_data = {}
- return_data['title'] = title
- return_data['draw'] = int(draw)
- return_data['recordsTotal'] = machines.count()
- return_data['recordsFiltered'] = return_data['recordsTotal']
+ return_data["title"] = title
+ return_data["draw"] = int(draw)
+ return_data["recordsTotal"] = machines.count()
+ return_data["recordsFiltered"] = return_data["recordsTotal"]
- return_data['data'] = []
+ return_data["data"] = []
settings_time_zone = None
try:
settings_time_zone = pytz.timezone(settings.TIME_ZONE)
@@ -103,32 +118,38 @@ def tableajax(request, plugin_name, data, group_type='all', group_id=None):
pass
for machine in limited_machines:
- if machine['last_checkin']:
+ if machine["last_checkin"]:
# formatted_date = pytz.utc.localize(machine.last_checkin)
if settings_time_zone:
- formatted_date = machine['last_checkin'].astimezone(
- settings_time_zone).strftime("%Y-%m-%d %H:%M %Z")
+ formatted_date = (
+ machine["last_checkin"]
+ .astimezone(settings_time_zone)
+ .strftime("%Y-%m-%d %H:%M %Z")
+ )
else:
- formatted_date = machine['last_checkin'].strftime("%Y-%m-%d %H:%M")
+ formatted_date = machine["last_checkin"].strftime("%Y-%m-%d %H:%M")
else:
formatted_date = ""
- hostname_link = "%s" % (
- reverse('machine_detail', args=[machine['id']]), machine['hostname'])
+ hostname_link = '%s' % (
+ reverse("machine_detail", args=[machine["id"]]),
+ escape(machine["hostname"]),
+ )
- list_data = [hostname_link, machine['console_user'], formatted_date]
- return_data['data'].append(list_data)
+ list_data = [hostname_link, escape(machine["console_user"]), formatted_date]
+ return_data["data"].append(list_data)
return JsonResponse(return_data)
@login_required
-def plugin_load(request, plugin_name, group_type='all', group_id=None):
+def plugin_load(request, plugin_name, group_type="all", group_id=None):
plugin_object = process_plugin(plugin_name, group_type, group_id)
return HttpResponse(
- plugin_object.widget_content(request, group_type=group_type, group_id=group_id))
+ plugin_object.widget_content(request, group_type=group_type, group_id=group_id)
+ )
-def process_plugin(plugin_name, group_type='all', group_id=None):
+def process_plugin(plugin_name, group_type="all", group_id=None):
plugin = PluginManager.get_plugin_by_name(plugin_name)
# Ensure that a plugin was instantiated before proceeding.
@@ -148,10 +169,11 @@ def process_plugin(plugin_name, group_type='all', group_id=None):
@login_required
-def export_csv(request, plugin_name, data, group_type='all', group_id=None):
+def export_csv(request, plugin_name, data, group_type="all", group_id=None):
plugin_object = process_plugin(plugin_name, group_type, group_id)
queryset = plugin_object.get_queryset(
- request, group_type=group_type, group_id=group_id)
+ request, group_type=group_type, group_id=group_id
+ )
machines, title = plugin_object.filter_machines(queryset, data)
return utils.csv.get_csv_response(machines, utils.csv.machine_fields(), title)
@@ -165,12 +187,14 @@ def preflight_v2(request):
server.utils.load_default_plugins()
output = []
# Old Sal scripts just do a GET; just send everything in that case.
- os_family = None if request.method != 'POST' else request.POST.get('os_family')
+ os_family = None if request.method != "POST" else request.POST.get("os_family")
enabled_reports = Report.objects.all()
enabled_plugins = Plugin.objects.all()
enabled_detail_plugins = MachineDetailPlugin.objects.all()
- for enabled_plugin in itertools.chain(enabled_reports, enabled_plugins, enabled_detail_plugins):
+ for enabled_plugin in itertools.chain(
+ enabled_reports, enabled_plugins, enabled_detail_plugins
+ ):
plugin = PluginManager.get_plugin_by_name(enabled_plugin.name)
if not plugin:
continue
@@ -203,24 +227,26 @@ def report_broken_client(request):
# Take out some of the weird junk VMware puts in. Keep an eye out in case
# Apple actually uses these:
- serial = data.get('serial', '').upper().translate(SERIAL_TRANSLATE)
+ serial = data.get("serial", "").upper().translate(SERIAL_TRANSLATE)
# Are we using Sal for some sort of inventory (like, I don't know, Puppet?)
machine = get_object_or_404(Machine, serial=serial)
- machine_group_key = data.get('key')
+ machine_group_key = data.get("key")
machine.machine_group = get_object_or_404(MachineGroup, key=machine_group_key)
machine.last_checkin = django.utils.timezone.now()
- machine.hostname = data.get('name', '')
- machine.sal_version = data.get('sal_version')
+ machine.hostname = data.get("name", "")
+ machine.sal_version = data.get("sal_version")
- if server.utils.get_django_setting('DEPLOYED_ON_CHECKIN', False):
+ if server.utils.get_django_setting("DEPLOYED_ON_CHECKIN", False):
machine.deployed = True
- if bool(data.get('broken_client', False)):
+ if bool(data.get("broken_client", False)):
machine.broken_client = True
machine.save()
- return HttpResponse("Broken Client report submmitted for %s" % data.get('serial'))
+ return HttpResponse(
+ "Broken Client report submmitted for %s" % data.get("serial")
+ )
return HttpResponseBadRequest()
@@ -228,48 +254,58 @@ def report_broken_client(request):
@require_POST
@key_auth_required
def checkin(request):
- if request.content_type != 'application/json':
- return HttpResponseBadRequest('Checkin must be content-type "application/json"!')
+ if request.content_type != "application/json":
+ return HttpResponseBadRequest(
+ 'Checkin must be content-type "application/json"!'
+ )
# Ensure we have the bare minimum data before continuing.
try:
submission = json.loads(request.body.decode())
except json.JSONDecodeError:
- return HttpResponseBadRequest('Checkin has invalid JSON!')
- if not isinstance(submission, dict) or 'Machine' not in submission:
+ return HttpResponseBadRequest("Checkin has invalid JSON!")
+ if not isinstance(submission, dict) or "Machine" not in submission:
return HttpResponseBadRequest('Checkin JSON is missing required key "Machine"!')
# Process machine submission information.
try:
- serial = submission['Machine']['extra_data'].get('serial')
+ serial = submission["Machine"]["extra_data"].get("serial")
except KeyError:
- return HttpResponseBadRequest('Checkin JSON is missing required "Machine" key "serial"!')
+ return HttpResponseBadRequest(
+ 'Checkin JSON is missing required "Machine" key "serial"!'
+ )
if not serial:
- return HttpResponseBadRequest('Checkin JSON is missing required "Machine" key "serial"!')
+ return HttpResponseBadRequest(
+ 'Checkin JSON is missing required "Machine" key "serial"!'
+ )
machine = process_checkin_serial(serial)
- machine_group = get_object_or_404(MachineGroup, key=submission['Sal']['extra_data'].get('key'))
+ machine_group = get_object_or_404(
+ MachineGroup, key=submission["Sal"]["extra_data"].get("key")
+ )
machine.machine_group = machine_group
machine.broken_client = False
machine.save()
clean_related(machine)
object_queue = {
- 'facts': [],
- 'historical_facts': [],
- 'managed_items': [],
- 'managed_item_histories': [],
- 'messages': []
+ "facts": [],
+ "historical_facts": [],
+ "managed_items": [],
+ "managed_item_histories": [],
+ "messages": [],
}
# Pop off the plugin_results, because they are a list instead of
# a dict.
- plugin_results = submission.pop('plugin_results', {})
+ plugin_results = submission.pop("plugin_results", {})
for management_source_name, management_data in submission.items():
management_source, _ = ManagementSource.objects.get_or_create(
- name=management_source_name)
+ name=management_source_name
+ )
object_queue = process_management_submission(
- management_source, management_data, machine, object_queue)
+ management_source, management_data, machine, object_queue
+ )
object_queue = process_managed_item_histories(object_queue, machine)
@@ -278,7 +314,7 @@ def checkin(request):
server.utils.process_plugin_script(plugin_results, machine)
server.utils.run_plugin_processing(machine, submission)
- if server.utils.get_setting('send_data') in (None, True):
+ if server.utils.get_setting("send_data") in (None, True):
# If setting is None, it hasn't been configured yet; assume True
try:
# If the report server is down, don't halt all submissions
@@ -297,7 +333,7 @@ def process_checkin_serial(serial):
serial = serial.upper().translate(SERIAL_TRANSLATE)
# Are we using Sal for some sort of inventory (like, I don't know, Puppet?)
- if server.utils.get_django_setting('ADD_NEW_MACHINES', True):
+ if server.utils.get_django_setting("ADD_NEW_MACHINES", True):
try:
machine = Machine.objects.get(serial=serial)
except Machine.DoesNotExist:
@@ -339,9 +375,10 @@ def process_management_submission(source, management_data, machine, object_queue
# The func's signature must be
# f(management_data: dict, machine: Machine, object_queue: dict)
processing_funcs = {
- 'Machine': process_machine_submission,
- 'Sal': process_sal_submission,
- 'Munki': process_munki_extra_keys}
+ "Machine": process_machine_submission,
+ "Sal": process_sal_submission,
+ "Munki": process_munki_extra_keys,
+ }
processing_func = processing_funcs.get(source.name)
if processing_func:
@@ -355,33 +392,33 @@ def process_management_submission(source, management_data, machine, object_queue
def process_machine_submission(machine_submission, machine, object_queue):
- extra_data = machine_submission.get('extra_data', {})
- machine.hostname = extra_data.get('hostname', '')
+ extra_data = machine_submission.get("extra_data", {})
+ machine.hostname = extra_data.get("hostname", "")
# Drop the setup assistant user if encountered.
- console_user = extra_data.get('console_user')
- console_user = console_user if console_user != '_mbsetupuser' else None
+ console_user = extra_data.get("console_user")
+ console_user = console_user if console_user != "_mbsetupuser" else None
machine.console_user = console_user
- machine.os_family = extra_data.get('os_family', 'Darwin')
- machine.operating_system = extra_data.get('operating_system')
- machine.hd_space = extra_data.get('hd_space')
- machine.hd_total = extra_data.get('hd_total')
- machine.hd_percent = extra_data.get('hd_percent')
- machine.machine_model = extra_data.get('machine_model')
- machine.machine_model_friendly = extra_data.get('machine_model_friendly')
- machine.cpu_type = extra_data.get('cpu_type')
- machine.cpu_speed = extra_data.get('cpu_speed')
- machine.memory = extra_data.get('memory')
- machine.memory_kb = extra_data.get('memory_kb', 0)
+ machine.os_family = extra_data.get("os_family", "Darwin")
+ machine.operating_system = extra_data.get("operating_system")
+ machine.hd_space = extra_data.get("hd_space")
+ machine.hd_total = extra_data.get("hd_total")
+ machine.hd_percent = extra_data.get("hd_percent")
+ machine.machine_model = extra_data.get("machine_model")
+ machine.machine_model_friendly = extra_data.get("machine_model_friendly")
+ machine.cpu_type = extra_data.get("cpu_type")
+ machine.cpu_speed = extra_data.get("cpu_speed")
+ machine.memory = extra_data.get("memory")
+ machine.memory_kb = extra_data.get("memory_kb", 0)
machine.save()
return object_queue
def process_sal_submission(sal_submission, machine, object_queue):
- extras = sal_submission.get('extra_data', {})
- machine.sal_version = extras.get('sal_version')
+ extras = sal_submission.get("extra_data", {})
+ machine.sal_version = extras.get("sal_version")
machine.last_checkin = django.utils.timezone.now()
- if server.utils.get_django_setting('DEPLOYED_ON_CHECKIN', True):
+ if server.utils.get_django_setting("DEPLOYED_ON_CHECKIN", True):
machine.deployed = True
machine.save()
@@ -389,70 +426,88 @@ def process_sal_submission(sal_submission, machine, object_queue):
def process_munki_extra_keys(management_data, machine, object_queue):
- extra_data = management_data.get('extra_data', {})
- machine.munki_version = extra_data.get('munki_version')
- machine.manifest = extra_data.get('manifest')
+ extra_data = management_data.get("extra_data", {})
+ machine.munki_version = extra_data.get("munki_version")
+ machine.manifest = extra_data.get("manifest")
machine.save()
return object_queue
def process_facts(management_source, management_data, machine, object_queue):
now = django.utils.timezone.now()
- for fact_name, fact_data in management_data.get('facts', {}).items():
+ for fact_name, fact_data in management_data.get("facts", {}).items():
if IGNORE_PREFIXES and IGNORE_PREFIXES.match(fact_name):
continue
- object_queue['facts'].append(
+ object_queue["facts"].append(
Fact(
- machine=machine, fact_data=fact_data, fact_name=fact_name,
- management_source=management_source))
+ machine=machine,
+ fact_data=fact_data,
+ fact_name=fact_name,
+ management_source=management_source,
+ )
+ )
if fact_name in HISTORICAL_FACTS:
- object_queue['historical_facts'].append(
+ object_queue["historical_facts"].append(
HistoricalFact(
- machine=machine, fact_data=fact_data, fact_name=fact_name,
- management_source=management_source, fact_recorded=now))
+ machine=machine,
+ fact_data=fact_data,
+ fact_name=fact_name,
+ management_source=management_source,
+ fact_recorded=now,
+ )
+ )
return object_queue
def process_managed_items(management_source, management_data, machine, object_queue):
now = django.utils.timezone.now()
- for name, managed_item in management_data.get('managed_items', {}).items():
- object_queue['managed_items'].append(
- _process_managed_item(name, managed_item, machine, management_source, now))
+ for name, managed_item in management_data.get("managed_items", {}).items():
+ object_queue["managed_items"].append(
+ _process_managed_item(name, managed_item, machine, management_source, now)
+ )
return object_queue
def _process_managed_item(name, managed_item, machine, management_source, now):
- submitted_date = managed_item.get('date_managed')
+ submitted_date = managed_item.get("date_managed")
try:
# Make sure there isn't somerthing stupid in the date format
date_managed = dateutil.parser.parse(submitted_date) if submitted_date else now
except Exception:
date_managed = now
- status = managed_item.get('status', 'UNKNOWN')
- data = managed_item.get('data')
+ status = managed_item.get("status", "UNKNOWN")
+ data = managed_item.get("data")
dumped_data = json.dumps(data) if data else None
return ManagedItem(
- name=name, machine=machine, management_source=management_source,
- date_managed=date_managed, status=status, data=dumped_data)
+ name=name,
+ machine=machine,
+ management_source=management_source,
+ date_managed=date_managed,
+ status=status,
+ data=dumped_data,
+ )
def process_managed_item_histories(object_queue, machine):
histories = machine.manageditemhistory_set.all()
- for managed_item in object_queue['managed_items']:
+ for managed_item in object_queue["managed_items"]:
item_histories = histories.filter(
- name=managed_item.name, management_source=managed_item.management_source)
+ name=managed_item.name, management_source=managed_item.management_source
+ )
if _history_creation_needed(managed_item, item_histories.first()):
- object_queue['managed_item_histories'].append(
+ object_queue["managed_item_histories"].append(
ManagedItemHistory(
name=managed_item.name,
status=managed_item.status,
management_source=managed_item.management_source,
machine=machine,
- recorded=managed_item.date_managed))
+ recorded=managed_item.date_managed,
+ )
+ )
return object_queue
@@ -466,22 +521,29 @@ def _history_creation_needed(managed_item, last_history):
def process_messages(management_source, management_data, machine, object_queue):
now = django.utils.timezone.now()
- for message_item in management_data.get('messages', []):
- object_queue['messages'].append(
+ for message_item in management_data.get("messages", []):
+ object_queue["messages"].append(
Message(
machine=machine,
management_source=management_source,
- text=message_item.get('text'),
- message_type=message_item.get('message_type'),
- date=message_item.get('date', now)))
+ text=message_item.get("text"),
+ message_type=message_item.get("message_type"),
+ date=message_item.get("date", now),
+ )
+ )
return object_queue
def create_objects(object_queue):
"""Bulk create Fact, HistoricalFact, Message, and ManagedItem objects."""
- models = {'facts': Fact, 'historical_facts': HistoricalFact, 'managed_items': ManagedItem,
- 'messages': Message, 'managed_item_histories': ManagedItemHistory}
+ models = {
+ "facts": Fact,
+ "historical_facts": HistoricalFact,
+ "managed_items": ManagedItem,
+ "messages": Message,
+ "managed_item_histories": ManagedItemHistory,
+ }
for name, objects in object_queue.items():
_bulk_create_or_iterate_save(objects, models[name])