2020-09-21 19:35:50 +00:00
|
|
|
"""Sync LDAP Users and groups into passbook"""
|
|
|
|
from typing import Any, Dict
|
2018-11-11 12:41:48 +00:00
|
|
|
|
|
|
|
import ldap3
|
|
|
|
import ldap3.core.exceptions
|
2020-02-19 15:20:33 +00:00
|
|
|
from django.db.utils import IntegrityError
|
2020-02-20 16:23:05 +00:00
|
|
|
from structlog import get_logger
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2020-02-18 09:13:05 +00:00
|
|
|
from passbook.core.exceptions import PropertyMappingExpressionException
|
2019-10-11 10:53:48 +00:00
|
|
|
from passbook.core.models import Group, User
|
2020-02-18 09:13:05 +00:00
|
|
|
from passbook.sources.ldap.models import LDAPPropertyMapping, LDAPSource
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-04 08:08:53 +00:00
|
|
|
LOGGER = get_logger()
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2020-09-21 09:04:26 +00:00
|
|
|
|
2020-09-21 19:35:50 +00:00
|
|
|
class LDAPSynchronizer:
|
|
|
|
"""Sync LDAP Users and groups into passbook"""
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-10 15:36:09 +00:00
|
|
|
_source: LDAPSource
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2018-11-26 17:12:04 +00:00
|
|
|
def __init__(self, source: LDAPSource):
|
|
|
|
self._source = source
|
2018-11-11 12:41:48 +00:00
|
|
|
|
2019-10-11 10:53:48 +00:00
|
|
|
@property
|
|
|
|
def base_dn_users(self) -> str:
|
|
|
|
"""Shortcut to get full base_dn for user lookups"""
|
2020-05-23 20:01:38 +00:00
|
|
|
if self._source.additional_user_dn:
|
|
|
|
return f"{self._source.additional_user_dn},{self._source.base_dn}"
|
|
|
|
return self._source.base_dn
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def base_dn_groups(self) -> str:
|
|
|
|
"""Shortcut to get full base_dn for group lookups"""
|
2020-05-23 20:01:38 +00:00
|
|
|
if self._source.additional_group_dn:
|
|
|
|
return f"{self._source.additional_group_dn},{self._source.base_dn}"
|
|
|
|
return self._source.base_dn
|
2019-10-11 10:53:48 +00:00
|
|
|
|
2020-10-16 12:17:47 +00:00
|
|
|
def sync_groups(self) -> int:
|
2019-10-11 10:53:48 +00:00
|
|
|
"""Iterate over all LDAP Groups and create passbook_core.Group instances"""
|
2019-10-11 11:41:12 +00:00
|
|
|
if not self._source.sync_groups:
|
2020-05-23 20:01:38 +00:00
|
|
|
LOGGER.warning("Group syncing is disabled for this Source")
|
2020-10-16 12:17:47 +00:00
|
|
|
return -1
|
2020-05-23 20:01:38 +00:00
|
|
|
groups = self._source.connection.extend.standard.paged_search(
|
2019-10-11 10:53:48 +00:00
|
|
|
search_base=self.base_dn_groups,
|
|
|
|
search_filter=self._source.group_object_filter,
|
|
|
|
search_scope=ldap3.SUBTREE,
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes=ldap3.ALL_ATTRIBUTES,
|
|
|
|
)
|
2020-10-16 12:17:47 +00:00
|
|
|
group_count = 0
|
2019-10-11 10:53:48 +00:00
|
|
|
for group in groups:
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes = group.get("attributes", {})
|
2020-07-10 18:10:51 +00:00
|
|
|
if self._source.object_uniqueness_field not in attributes:
|
|
|
|
LOGGER.warning(
|
|
|
|
"Cannot find uniqueness Field in attributes", user=attributes.keys()
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
uniq = attributes[self._source.object_uniqueness_field]
|
2019-10-11 10:53:48 +00:00
|
|
|
_, created = Group.objects.update_or_create(
|
2020-07-10 18:10:51 +00:00
|
|
|
attributes__ldap_uniq=uniq,
|
2019-10-11 11:41:12 +00:00
|
|
|
parent=self._source.sync_parent_group,
|
|
|
|
defaults={
|
2019-12-31 11:51:16 +00:00
|
|
|
"name": attributes.get("name", ""),
|
|
|
|
"attributes": {
|
2020-07-10 18:10:51 +00:00
|
|
|
"ldap_uniq": uniq,
|
2019-12-31 11:51:16 +00:00
|
|
|
"distinguishedName": attributes.get("distinguishedName"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
LOGGER.debug(
|
|
|
|
"Synced group", group=attributes.get("name", ""), created=created
|
2019-10-11 10:53:48 +00:00
|
|
|
)
|
2020-10-16 12:17:47 +00:00
|
|
|
group_count += 1
|
|
|
|
return group_count
|
2019-10-11 10:53:48 +00:00
|
|
|
|
2020-10-16 12:17:47 +00:00
|
|
|
def sync_users(self) -> int:
|
2019-10-11 10:53:48 +00:00
|
|
|
"""Iterate over all LDAP Users and create passbook_core.User instances"""
|
2020-05-23 20:01:38 +00:00
|
|
|
if not self._source.sync_users:
|
|
|
|
LOGGER.warning("User syncing is disabled for this Source")
|
2020-10-16 12:17:47 +00:00
|
|
|
return -1
|
2020-05-23 20:01:38 +00:00
|
|
|
users = self._source.connection.extend.standard.paged_search(
|
2019-10-11 10:53:48 +00:00
|
|
|
search_base=self.base_dn_users,
|
|
|
|
search_filter=self._source.user_object_filter,
|
|
|
|
search_scope=ldap3.SUBTREE,
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes=ldap3.ALL_ATTRIBUTES,
|
|
|
|
)
|
2020-10-16 12:17:47 +00:00
|
|
|
user_count = 0
|
2019-10-11 10:53:48 +00:00
|
|
|
for user in users:
|
2019-12-31 11:51:16 +00:00
|
|
|
attributes = user.get("attributes", {})
|
2020-07-10 18:10:51 +00:00
|
|
|
if self._source.object_uniqueness_field not in attributes:
|
|
|
|
LOGGER.warning(
|
|
|
|
"Cannot find uniqueness Field in attributes", user=user.keys()
|
|
|
|
)
|
2020-02-19 15:20:33 +00:00
|
|
|
continue
|
2020-07-10 18:10:51 +00:00
|
|
|
uniq = attributes[self._source.object_uniqueness_field]
|
2020-02-19 15:20:33 +00:00
|
|
|
try:
|
2020-05-23 20:01:38 +00:00
|
|
|
defaults = self._build_object_properties(attributes)
|
2020-02-19 15:20:33 +00:00
|
|
|
user, created = User.objects.update_or_create(
|
2020-09-30 17:34:22 +00:00
|
|
|
attributes__ldap_uniq=uniq,
|
|
|
|
defaults=defaults,
|
2020-02-19 15:20:33 +00:00
|
|
|
)
|
|
|
|
except IntegrityError as exc:
|
|
|
|
LOGGER.warning("Failed to create user", exc=exc)
|
|
|
|
LOGGER.warning(
|
|
|
|
(
|
|
|
|
"To merge new User with existing user, set the User's "
|
|
|
|
f"Attribute 'ldap_uniq' to '{uniq}'"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
if created:
|
|
|
|
user.set_unusable_password()
|
|
|
|
user.save()
|
|
|
|
LOGGER.debug(
|
|
|
|
"Synced User", user=attributes.get("name", ""), created=created
|
|
|
|
)
|
2020-10-16 12:17:47 +00:00
|
|
|
user_count += 1
|
|
|
|
return user_count
|
2019-10-11 10:53:48 +00:00
|
|
|
|
|
|
|
def sync_membership(self):
|
|
|
|
"""Iterate over all Users and assign Groups using memberOf Field"""
|
2020-05-23 20:01:38 +00:00
|
|
|
users = self._source.connection.extend.standard.paged_search(
|
2019-10-11 11:41:12 +00:00
|
|
|
search_base=self.base_dn_users,
|
|
|
|
search_filter=self._source.user_object_filter,
|
|
|
|
search_scope=ldap3.SUBTREE,
|
|
|
|
attributes=[
|
|
|
|
self._source.user_group_membership_field,
|
2019-12-31 11:51:16 +00:00
|
|
|
self._source.object_uniqueness_field,
|
|
|
|
],
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
group_cache: Dict[str, Group] = {}
|
|
|
|
for user in users:
|
2019-12-31 11:51:16 +00:00
|
|
|
member_of = user.get("attributes", {}).get(
|
|
|
|
self._source.user_group_membership_field, []
|
|
|
|
)
|
|
|
|
uniq = user.get("attributes", {}).get(
|
|
|
|
self._source.object_uniqueness_field, []
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
for group_dn in member_of:
|
|
|
|
# Check if group_dn is within our base_dn_groups, and skip if not
|
|
|
|
if not group_dn.endswith(self.base_dn_groups):
|
|
|
|
continue
|
|
|
|
# Check if we fetched the group already, and if not cache it for later
|
|
|
|
if group_dn not in group_cache:
|
2019-12-31 11:51:16 +00:00
|
|
|
groups = Group.objects.filter(
|
|
|
|
attributes__distinguishedName=group_dn
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
if not groups.exists():
|
2019-12-31 11:51:16 +00:00
|
|
|
LOGGER.warning(
|
|
|
|
"Group does not exist in our DB yet, run sync_groups first.",
|
|
|
|
group=group_dn,
|
|
|
|
)
|
2019-10-11 11:41:12 +00:00
|
|
|
return
|
|
|
|
group_cache[group_dn] = groups.first()
|
|
|
|
group = group_cache[group_dn]
|
|
|
|
users = User.objects.filter(attributes__ldap_uniq=uniq)
|
2020-09-15 20:37:31 +00:00
|
|
|
group.users.add(*list(users))
|
2019-10-11 11:41:12 +00:00
|
|
|
# Now that all users are added, lets write everything
|
|
|
|
for _, group in group_cache.items():
|
|
|
|
group.save()
|
|
|
|
LOGGER.debug("Successfully updated group membership")
|
2019-10-11 10:53:48 +00:00
|
|
|
|
2019-12-31 11:51:16 +00:00
|
|
|
def _build_object_properties(
|
|
|
|
self, attributes: Dict[str, Any]
|
|
|
|
) -> Dict[str, Dict[Any, Any]]:
|
|
|
|
properties = {"attributes": {}}
|
2019-10-11 10:53:48 +00:00
|
|
|
for mapping in self._source.property_mappings.all().select_subclasses():
|
2020-02-19 15:20:33 +00:00
|
|
|
if not isinstance(mapping, LDAPPropertyMapping):
|
|
|
|
continue
|
2020-02-17 16:55:48 +00:00
|
|
|
mapping: LDAPPropertyMapping
|
2020-02-18 09:13:05 +00:00
|
|
|
try:
|
2020-06-05 10:00:27 +00:00
|
|
|
value = mapping.evaluate(user=None, request=None, ldap=attributes)
|
|
|
|
if value is None:
|
|
|
|
continue
|
2020-09-15 19:08:10 +00:00
|
|
|
object_field = mapping.object_field
|
|
|
|
if object_field.startswith("attributes."):
|
|
|
|
properties["attributes"][
|
|
|
|
object_field.replace("attributes.", "")
|
|
|
|
] = value
|
|
|
|
else:
|
|
|
|
properties[object_field] = value
|
2020-02-18 09:13:05 +00:00
|
|
|
except PropertyMappingExpressionException as exc:
|
2020-02-19 15:20:33 +00:00
|
|
|
LOGGER.warning("Mapping failed to evaluate", exc=exc, mapping=mapping)
|
2020-02-18 09:13:05 +00:00
|
|
|
continue
|
2019-10-11 11:41:12 +00:00
|
|
|
if self._source.object_uniqueness_field in attributes:
|
2019-12-31 11:51:16 +00:00
|
|
|
properties["attributes"]["ldap_uniq"] = attributes.get(
|
|
|
|
self._source.object_uniqueness_field
|
|
|
|
)
|
|
|
|
properties["attributes"]["distinguishedName"] = attributes.get(
|
|
|
|
"distinguishedName"
|
|
|
|
)
|
2019-10-11 10:53:48 +00:00
|
|
|
return properties
|