You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
1015 lines
43 KiB
1015 lines
43 KiB
import os |
|
import re |
|
import requests |
|
from datetime import datetime, timedelta |
|
from slugify import slugify |
|
from dotenv import dotenv_values |
|
from ldap3.utils.conv import escape_filter_chars |
|
from ldap3 import LEVEL, MODIFY_REPLACE, Server, Connection, ALL, MODIFY_ADD, MODIFY_DELETE |
|
import logging |
|
import logging.handlers |
|
from logging.config import dictConfig, fileConfig |
|
import sentry_sdk |
|
from sentry_sdk.integrations.logging import LoggingIntegration |
|
from pathlib import Path |
|
import time |
|
import glob |
|
import codecs |
|
import zipfile |
|
import copy |
|
|
|
|
|
class TimedCompressedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler): |
|
""" |
|
Extended version of TimedRotatingFileHandler that compress logs on rollover. |
|
""" |
|
|
|
def doRollover(self): |
|
""" |
|
do a rollover; in this case, a date/time stamp is appended to the filename |
|
when the rollover happens. However, you want the file to be named for the |
|
start of the interval, not the current time. If there is a backup count, |
|
then we have to get a list of matching filenames, sort them and remove |
|
the one with the oldest suffix. |
|
""" |
|
|
|
self.stream.close() |
|
# get the time that this sequence started at and make it a TimeTuple |
|
t = self.rolloverAt - self.interval |
|
timeTuple = time.localtime(t) |
|
dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple) |
|
if os.path.exists(dfn): |
|
os.remove(dfn) |
|
os.rename(self.baseFilename, dfn) |
|
if self.backupCount > 0: |
|
# find the oldest log file and delete it |
|
s = glob.glob(self.baseFilename + ".20*") |
|
if len(s) > self.backupCount: |
|
s.sort() |
|
os.remove(s[0]) |
|
# print "%s -> %s" % (self.baseFilename, dfn) |
|
if self.encoding: |
|
self.stream = codecs.open(self.baseFilename, 'w', self.encoding) |
|
else: |
|
self.stream = open(self.baseFilename, 'w') |
|
self.rolloverAt = self.rolloverAt + self.interval |
|
if os.path.exists(dfn + ".zip"): |
|
os.remove(dfn + ".zip") |
|
file = zipfile.ZipFile(dfn + ".zip", "w") |
|
file.write(dfn, os.path.basename(dfn), zipfile.ZIP_DEFLATED) |
|
file.close() |
|
os.remove(dfn) |
|
|
|
|
|
if Path('logging.conf').exists(): |
|
# Custom logging config |
|
fileConfig('logging.conf') |
|
else: |
|
# Default logging config |
|
logging_config = dict( |
|
version=1, |
|
formatters={ |
|
'f': {'format': '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'} |
|
}, |
|
handlers={ |
|
'console': {'class': 'logging.StreamHandler', |
|
'formatter': 'f', |
|
'level': logging.WARNING}, |
|
|
|
'file': { |
|
'class': 'agent.TimedCompressedRotatingFileHandler', |
|
'formatter': 'f', |
|
'filename': 'log/agent.log', |
|
'when': 'midnight', |
|
'interval': 1, |
|
'backupCount': 3, |
|
'level': logging.INFO} |
|
}, |
|
root={ |
|
'handlers': ['console', 'file'], |
|
'level': logging.DEBUG, |
|
}, |
|
) |
|
|
|
dictConfig(logging_config) |
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
# https://ldap3.readthedocs.io/en/latest/ |
|
|
|
WORKINGDIR, _ = os.path.split(os.path.abspath(__file__)) |
|
|
|
|
|
class LDAPError(Exception): |
|
"""General LDAP error exception |
|
|
|
Args: |
|
message (string): Error message |
|
""" |
|
|
|
def __init__(self, message): |
|
super().__init__(message) |
|
|
|
|
|
class DuplicateResearchGroup(Exception): |
|
"""Duplicate researchgroup exception |
|
|
|
The researchgroup that is being added does already exists in the LDAP server |
|
|
|
Args: |
|
message (string): Error message |
|
""" |
|
|
|
def __init__(self, message): |
|
super().__init__(message) |
|
|
|
|
|
class ResearchGroupDoesNotExists(Exception): |
|
"""Researchgroup does not exists exception |
|
|
|
The requested researchgroup does not exists in the LDAP server |
|
|
|
Args: |
|
message (string): Error message |
|
""" |
|
|
|
def __init__(self, message): |
|
super().__init__(message) |
|
|
|
|
|
class DuplicateResearcher(Exception): |
|
"""Duplicate researcher exception |
|
|
|
The researcher that is being added does already exists in the LDAP server |
|
|
|
Args: |
|
message (string): Error message |
|
""" |
|
|
|
def __init__(self, message): |
|
super().__init__(message) |
|
|
|
|
|
class ResearcherDoesNotExists(Exception): |
|
"""Researcher does not exists exception |
|
|
|
The requested researcher does not exists in the LDAP server |
|
|
|
Args: |
|
message (string): Error message |
|
""" |
|
|
|
def __init__(self, message): |
|
super().__init__(message) |
|
|
|
|
|
class VRWLDAP(): |
|
"""This is the VRW LDAP client. With this client you can make changes in the VRW LDAP server. This includes adding, updating and deleting of researchgroups and researchers. |
|
|
|
Args: |
|
host (string): The hostname or IP of the LDAP server |
|
port (string): The portnumber of the LDAP server |
|
login (string): The login name for the LDAP server |
|
password (string): The password for the LDAP server |
|
ssl (bool, optional): Use SSL for the connection. Defaults to False. |
|
|
|
Returns: |
|
VRWLDAP: LDAP client |
|
""" |
|
|
|
__RESOURCE_GROUPS_BASE = 'ou=researchgroups,ou=dfh,o=co' |
|
__RESOURCE_MEMBERS_BASE = 'ou=users,ou=dfh,o=co' |
|
|
|
def __init__(self, host, port, login, password, ssl=False, production=False): |
|
"""Create a new VRW LDAP client |
|
|
|
Args: |
|
host (string): The hostname or IP of the LDAP server |
|
port (string): The portnumber of the LDAP server |
|
login (string): The login name for the LDAP server |
|
password (string): The password for the LDAP server |
|
ssl (bool, optional): Use SSL for the connection. Defaults to False. |
|
""" |
|
self.server = Server(host, port=port, use_ssl=ssl, get_info=ALL) |
|
self.connection = Connection(self.server, login, password) |
|
self.production = production |
|
|
|
def __find_next_group_id(self): |
|
"""Helper function. This will return a new group ID number that can be used for creating researchgroups |
|
|
|
It will search for the current max value of field 'hsrGroupGID' in all the existing researchgroups and add 1 to that value. |
|
|
|
Returns: |
|
int: The new group ID that can be used |
|
""" |
|
_field = 'hsrGroupGID' |
|
group_id = 0 |
|
self.connection.search(self.__RESOURCE_GROUPS_BASE, f'({_field}=*)', attributes=[_field]) |
|
for data in self.connection.entries: |
|
if data.entry_attributes_as_dict[_field][0] > group_id: |
|
group_id = data.entry_attributes_as_dict[_field][0] |
|
|
|
return group_id + 1 |
|
|
|
def __find_next_researcher_id(self): |
|
"""Helper function. This will return a new researcher ID number that can be used for creating researchers |
|
|
|
It will search for the current max value of field 'hsrUserUID' in all the existing researchers and add 1 to that value. |
|
|
|
Returns: |
|
int: The new researcher ID that can be used |
|
""" |
|
_field = 'hsrUserUID' |
|
researcher_id = 0 |
|
self.connection.search(self.__RESOURCE_MEMBERS_BASE, f'({_field}=*)', attributes=[_field]) |
|
for data in self.connection.entries: |
|
if data.entry_attributes_as_dict[_field][0] > researcher_id: |
|
researcher_id = data.entry_attributes_as_dict[_field][0] |
|
|
|
return researcher_id + 1 |
|
|
|
def __safe_dn_name(self, value, slug=True): |
|
# The following characters give problems: + |
|
value = value.replace("+", "-") |
|
if slug: |
|
value = slugify(value) |
|
|
|
return escape_filter_chars(value) |
|
|
|
def check_connection(self): |
|
"""Check if the LDAP connection is successfull |
|
|
|
Returns: |
|
bool: True when the connection is successfull, else False |
|
""" |
|
return self.connection.bind() |
|
|
|
def search_research_group(self, name, id=None): |
|
"""Find the LDAP DN path for a researchgroup. |
|
|
|
It can search both on name and existing ID. When the 'id' parameter is set (not None), it will do first a LDAP DN key lookup. If that fails, if will fallback on the 'name' parameter. |
|
If the 'id' parameter is None, there will be only searched on the name. |
|
|
|
Args: |
|
name (string): The researchgroup name |
|
id (string, optional): Find the researchgroup based on ID. Defaults to None. |
|
|
|
Returns: |
|
string, bool: Return the LDAP DN name when found or False when not found |
|
""" |
|
search_base = self.__RESOURCE_GROUPS_BASE |
|
search_filter = f'(ou={self.__safe_dn_name(name)})' |
|
|
|
if id is not None: |
|
split_index = id.index(',') |
|
search_base = id[split_index + 1:] |
|
search_filter = f'({id[:split_index]})' |
|
|
|
logger.debug(f'Searching for research group "({search_filter[1:-1]})" in DN "{search_base}"') |
|
|
|
if self.connection.search(search_base, search_filter) and len(self.connection.entries) == 1: |
|
logger.debug(f'Found existing research group at "{self.connection.entries[0].entry_dn}"') |
|
return self.connection.entries[0].entry_dn |
|
|
|
if id is not None and '' != name: |
|
# If we have searched on ID but could not find it, try once more with only the group name |
|
return self.search_research_group(name) |
|
|
|
logger.debug(f'Research group {name} does not exist.') |
|
return False |
|
|
|
def create_research_group(self, name, subgroups=False): |
|
"""Create a new research group in the LDAP server based on the name. |
|
|
|
The name will be changed to strip if from special characters and spaces. The name will bug slugified. |
|
|
|
If will return the newly created researchgroup DN key. |
|
|
|
Raises: |
|
LDAPError: When there is a general LDAP error |
|
DuplicateResearchGroup: When the researchgroup already exists in the LDAP server |
|
|
|
Args: |
|
name (string): The researchgroup name |
|
subgroups (bool, optional): Create the needed subgroups in LDAP. Defaults to False. |
|
""" |
|
|
|
def __create_sub_groups(group_name, parent_dn, start_id): |
|
sub_groups = [f'{group_name}:Members', |
|
f'{group_name}:Role:Controller', |
|
f'{group_name}:Role:Datamanager', |
|
f'{group_name}:Role:Researcher', |
|
f'{group_name}:Role:ResearcherPlus', |
|
f'{group_name}:Role:Reviewer', |
|
f'{group_name}:WS:Dedicated:Large', |
|
f'{group_name}:WS:Dedicated:Medium', |
|
f'{group_name}:WS:Dedicated:Small', |
|
f'{group_name}:WS:Shared'] |
|
|
|
for sub_group in sub_groups: |
|
self.connection.add(f'cn={sub_group},{parent_dn}', ['groupOfNames', 'hsrGroup', 'Top'], {'description': f'{sub_group}'}) |
|
|
|
start_id += 1 |
|
|
|
return True |
|
|
|
def __check_if_subgroups_exists(group_name, parent_dn): |
|
return self.connection.search(parent_dn, f'(cn={self.__safe_dn_name(group_name)})', LEVEL) |
|
|
|
research_group = self.search_research_group(name) |
|
if not research_group: |
|
logger.debug(f'Creating a new research group with the name "{name}"') |
|
|
|
dn_name = self.__safe_dn_name(name) |
|
dn = f'ou={dn_name},{self.__RESOURCE_GROUPS_BASE}' |
|
add_ok = self.connection.add(dn, |
|
['hsrGroup', 'ndsContainerLoginProperties', |
|
'ndsLoginProperties', 'organizationalUnit', 'Top'], |
|
{'description': f'{name} Researchgroup', |
|
'hsrGroupOrganisation': 'RUG', |
|
'hsrGroupProjectCode': 'Support'}) |
|
|
|
if add_ok: |
|
if subgroups: |
|
__create_sub_groups(dn_name, dn, self.__find_next_group_id()) |
|
|
|
else: |
|
# Here we 'wait' a few seconds (max 10) to make sure all the subgroups are created on the LDAP server |
|
subgroups_ok = False |
|
for counter in range(10): |
|
if __check_if_subgroups_exists(dn_name, dn): |
|
subgroups_ok = True |
|
break |
|
|
|
if not subgroups_ok: |
|
raise LDAPError(f'LDAP Server did not create the needed subgroups in time. Therefore we have to abort!') |
|
|
|
logger.info(f'Created new researchgroup {name} with DN value: {dn}') |
|
return dn |
|
|
|
else: |
|
raise LDAPError(f'Could not create the researchgroup {name}! Got some errors...... LDAP Error: {self.connection.last_error}') |
|
|
|
raise DuplicateResearchGroup(f'Researchgroup {name} already exists!') |
|
|
|
def update_research_group(self, group_dn, name): |
|
"""Update an existing researchgroup. |
|
|
|
This will change the name of the researchgroup, and make sure that all the references in the LDAP to this researchgroup are updated. |
|
|
|
Args: |
|
group_dn (string): The researchgroup LDAP DN key |
|
name (string): The new name |
|
|
|
Raises: |
|
LDAPError: When there is a general LDAP error |
|
ResearchGroupDoesNotExists: When the researchgroup does not exists in the LDAP server |
|
|
|
Returns: |
|
string: The new researchgroup LDAP DN key |
|
""" |
|
# Find the existing research group based on DN and then its name |
|
research_group = self.search_research_group(name, group_dn) |
|
if research_group is not False: |
|
# Change the DN key in the LDAP based on new research name |
|
update_ok = self.connection.modify_dn(research_group, f'ou={self.__safe_dn_name(name)}') |
|
if update_ok: |
|
# Get the new DN key based on the new name |
|
research_group = self.search_research_group(name) |
|
|
|
# Update the description of the LDAP object |
|
update_ok = update_ok and self.connection.modify(research_group, {'description': [(MODIFY_REPLACE, [f'{name} Researchgroup'])]}) |
|
|
|
# Rename the subgroups also. |
|
self.connection.search(research_group, '(cn=*)', LEVEL) |
|
regex = r"^cn=([^:,]+):" |
|
for entry in self.connection.entries: |
|
new_name = re.sub(regex, f'cn={self.__safe_dn_name(name)}:', entry.entry_dn, 1) |
|
self.connection.modify_dn(entry.entry_dn, new_name) |
|
|
|
return research_group |
|
|
|
else: |
|
raise LDAPError(f'Could not update the researchgroup {name}! Got some errors...... LDAP Error: {self.connection.last_error}') |
|
|
|
raise ResearchGroupDoesNotExists( |
|
f'Invalid research group DN: {group_dn}') |
|
|
|
def delete_research_group(self, group_dn): |
|
"""Delete an existing researchgroup from the LDAP server based on DN key |
|
|
|
This will also update the LDAP references of the researchers if they where member of this researchgroup. |
|
|
|
Args: |
|
group_dn (string): The LDAP DN key to be deleted |
|
|
|
Raises: |
|
ResearchGroupDoesNotExists: When the researchgroup does not exists in the LDAP server |
|
""" |
|
logger.debug(f'Delete research group "{group_dn}"') |
|
if not self.search_research_group('', group_dn): |
|
raise ResearchGroupDoesNotExists(f'Invalid research group DN: {group_dn}') |
|
|
|
group_members = self.research_group_members(group_dn) |
|
for member in group_members: |
|
self.remove_researcher_from_group(group_dn, member) |
|
|
|
if self.connection.search(f'{group_dn}', '(cn=*)'): |
|
for entry in self.connection.entries: |
|
self.connection.delete(entry.entry_dn) |
|
|
|
self.connection.delete(group_dn) |
|
|
|
def research_group_members(self, group_dn): |
|
"""Get all the researchers from a researchgroup |
|
|
|
Args: |
|
group_dn (string): The researchgroup LDAP DN key |
|
|
|
Raises: |
|
ResearchGroupDoesNotExists: When the researchgroup does not exists in the LDAP server |
|
|
|
Returns: |
|
list (string): Returns a list with all the member DNs |
|
""" |
|
|
|
logger.debug(f'Getting all researchers for group "{group_dn}"') |
|
|
|
members = [] |
|
if not self.search_research_group('', group_dn): |
|
raise ResearchGroupDoesNotExists(f'Invalid research group DN: {group_dn}') |
|
|
|
if self.connection.search(f'{group_dn}', '(cn=*:Members)', attributes=['member']): |
|
for entry in self.connection.entries: |
|
for member in entry.member: |
|
members.append(member) |
|
|
|
return members |
|
|
|
def search_researcher(self, email): |
|
"""Find a reseacher based on its email address. |
|
|
|
Args: |
|
email (string): The email address of the researcher to find |
|
|
|
Returns: |
|
string, bool: Return the LDAP DN key when found or False when not found |
|
""" |
|
logger.debug(f'Searching for research member "{email}" in DN "{self.__RESOURCE_MEMBERS_BASE}"') |
|
if self.connection.search(self.__RESOURCE_MEMBERS_BASE, f'(uid={self.__safe_dn_name(email, False)})'): |
|
if len(self.connection.entries) == 1: |
|
logger.debug(f'Found research member "{email}" at "{self.connection.entries[0].entry_dn}"') |
|
return self.connection.entries[0].entry_dn |
|
|
|
logger.debug(f'Research member {email} does not exist.') |
|
return False |
|
|
|
def create_researcher(self, firstname, lastname, email, mobile, pnumber): |
|
"""Create a new researcher. |
|
|
|
Args: |
|
firstname (string): Firstname of the researcher |
|
lastname (string): Middle and lastname of the researcher |
|
email (string): Email address of the researcher |
|
mobile (string): Mobile number used for MFA. |
|
pnumber (string): Researcher ID number at the university |
|
|
|
Raises: |
|
LDAPError: When there is a general LDAP error |
|
DuplicateResearcher: When the researcher does already exists |
|
|
|
Returns: |
|
[type]: [description] |
|
""" |
|
researcher = self.search_researcher(email) |
|
if not researcher: |
|
logger.debug(f'Creating a new researcher ({firstname} {lastname}) with the email address "{email}"') |
|
|
|
# DN name cannot contain the chars: + |
|
dn = f'uid={self.__safe_dn_name(email, False)},{self.__RESOURCE_MEMBERS_BASE}' |
|
user_id = self.__find_next_researcher_id() |
|
add_ok = self.connection.add(dn, |
|
['hsrUser', 'inetOrgPerson', 'ldapPublicKey', |
|
'ndsLoginProperties', 'organizationalPerson', 'Person', 'Top'], |
|
{'cn': f'{firstname} {lastname}', |
|
'mail': email, |
|
'sn': lastname, |
|
'displayName': f'{firstname} {lastname}', |
|
'givenName': firstname, |
|
'loginDisabled': 'FALSE', |
|
'hsrUserUID': user_id, |
|
'employeeNumber': pnumber, |
|
'description': f'Researcher {firstname} {lastname}', |
|
'mobile': mobile, |
|
'o': 'RUG'}) |
|
|
|
if add_ok: |
|
logger.info(f'Created new researcher "{firstname} {lastname}" with the email address "{email}". DN value: {dn}') |
|
return dn |
|
|
|
raise LDAPError(f'Could not create the researcher {firstname} {lastname} - {email}! LDAP Error: {self.connection.last_error}') |
|
|
|
raise DuplicateResearcher(f'Researcher {firstname} {lastname} - {email} already exists!') |
|
|
|
def update_researcher(self, firstname, lastname, email, mobile, pnumber): |
|
"""Update an existing researcher with new data. Email address is used as unique key |
|
|
|
Args: |
|
firstname (string): Firstname of the researcher |
|
lastname (string): Middle and lastname of the researcher |
|
email (string): Email address of the researcher |
|
mobile (string): Mobile number used for MFA. |
|
pnumber (string): Researcher ID number at the university |
|
|
|
Raises: |
|
LDAPError: When there is a general LDAP error |
|
|
|
Returns: |
|
string: The researcher LDAP DN key |
|
""" |
|
researcher = self.search_researcher(email) |
|
if researcher is not False: |
|
update_ok = self.connection.modify(researcher, {'cn': [(MODIFY_REPLACE, [f'{firstname} {lastname}'])], |
|
'sn': [(MODIFY_REPLACE, [lastname])], |
|
'displayName': [(MODIFY_REPLACE, [f'{firstname} {lastname}'])], |
|
'givenName': [(MODIFY_REPLACE, [firstname])], |
|
'employeeNumber': [(MODIFY_REPLACE, [pnumber])], |
|
'description': [(MODIFY_REPLACE, [f'Researcher {firstname} {lastname}'])], |
|
'mobile': [(MODIFY_REPLACE, [mobile])]}) |
|
|
|
if update_ok: |
|
logger.info(f'Updated researcher ({firstname} {lastname}) with the email address "{email}"') |
|
return researcher |
|
|
|
raise LDAPError(f'Could not update the researcher {firstname} {lastname} - {email}! LDAP Error: {self.connection.last_error}') |
|
|
|
def researcher_to_group(self, group_dn, researcher_dn, role=None, workstation=None): |
|
"""Add a researcher to a researchgroup. And add roles and a workstation for this researcher is needed. |
|
|
|
Args: |
|
group_dn (string): The researchgroup LDAP DN key |
|
researcher_dn (string): The researcher LDAP DN key |
|
role (string, optional): The role for this researcher in this group. Defaults to None. |
|
workstation (string, optional): The workstation for this researcher in this group. Defaults to None. |
|
|
|
Returns: |
|
tuple: Returns a tuple with the groupmember LDAP DN key and the workstation LDAP DN key |
|
""" |
|
regex = r"^ou=(?P<group_name>[^,]+)" |
|
matches = re.search(regex, group_dn) |
|
if matches: |
|
# Make the researcher member of the group |
|
members_dn = f'cn={matches.group("group_name")}:Members,{group_dn}' |
|
self.connection.modify(members_dn, {'member': [(MODIFY_ADD, [researcher_dn])]}) |
|
|
|
# We do not have to make the 'groupMembership' attribute at the researcher. This is automatically done by the LDAP server. |
|
if not self.production: |
|
self.connection.modify(researcher_dn, {'groupMembership': [(MODIFY_ADD, [members_dn])]}) |
|
|
|
# Add the correct researcher rights |
|
if role in ['researcher']: |
|
members_dn = f'cn={matches.group("group_name")}:Role:Researcher,{group_dn}' |
|
|
|
elif role in ['manager', 'admin']: |
|
members_dn = f'cn={matches.group("group_name")}:Role:Datamanager,{group_dn}' |
|
|
|
self.connection.modify(members_dn, {'member': [(MODIFY_ADD, [researcher_dn])]}) |
|
|
|
# We do not have to make the 'groupMembership' attribute at the researcher. This is automatically done by the LDAP server. |
|
if not self.production: |
|
self.connection.modify(researcher_dn, {'groupMembership': [(MODIFY_ADD, [members_dn])]}) |
|
|
|
# Add correct machine |
|
# The naming is very confusing. But from the 'old days' it was Shared for Basic, and Premium is Small :( |
|
if workstation in ['premium']: |
|
workstation_dn = f'cn={matches.group("group_name")}:WS:Dedicated:Small,{group_dn}' |
|
if workstation in ['standaard', 'basic']: |
|
workstation_dn = f'cn={matches.group("group_name")}:WS:Dedicated:Shared,{group_dn}' |
|
|
|
self.connection.modify(workstation_dn, {'member': [(MODIFY_ADD, [researcher_dn])]}) |
|
|
|
# We do not have to make the 'groupMembership' attribute at the researcher. This is automatically done by the LDAP server. |
|
if not self.production: |
|
self.connection.modify(researcher_dn, {'groupMembership': [(MODIFY_ADD, [workstation_dn])]}) |
|
|
|
logger.info(f'Added researcher {researcher_dn} to group {group_dn} with rights: {members_dn} and workstation: {workstation_dn}') |
|
|
|
return (members_dn, workstation_dn) |
|
|
|
def remove_researcher_from_group(self, group_dn, researcher_dn): |
|
"""Remove a researcher from a researchgroup. This will also cleanup all LDAP references between researcher and researchgroup |
|
|
|
Args: |
|
group_dn (string): The LDAP DN key of the researchgroup |
|
researcher_dn (string): The LDAP DN key of the researcher |
|
""" |
|
logger.debug(f'Removing researcher {researcher_dn} from group {group_dn}') |
|
if self.connection.search(researcher_dn, '(objectclass=person)', attributes=['groupMembership']) and len(self.connection.entries) == 1: |
|
logger.debug(f'Found researcher {researcher_dn} with {len(self.connection.entries[0]["groupMembership"])} memberships.') |
|
for group in self.connection.entries[0]['groupMembership']: |
|
if group.endswith(group_dn): |
|
self.connection.modify(f'{group}', {'member': [(MODIFY_DELETE, [researcher_dn])]}) |
|
logger.info(f'Removed researcher {researcher_dn} from research group {researcher_dn}') |
|
|
|
# We do not have to delete the 'groupMembership' attribute at the researcher. This is automatically done by the LDAP server. |
|
if not self.production: |
|
# It looks like this is taken care by the LDAP server |
|
self.connection.modify(researcher_dn, {'groupMembership': [(MODIFY_DELETE, [group])]}) |
|
|
|
def export_studies(self): |
|
"""This will export all the existing studies that are created in the LDAP server. The starting point is `self.__RESOURCE_GROUPS_BASE` setting |
|
|
|
Returns: |
|
dict: A python dictionary with all the studies with the study ID as key |
|
|
|
""" |
|
studies = {} |
|
logger.debug(f'Exporting all studies in DN "{self.__RESOURCE_GROUPS_BASE}"') |
|
# We are missing Faculty information here.... |
|
if self.connection.search(self.__RESOURCE_GROUPS_BASE, f'(ou=*)', attributes=['ou', 'description', ]): |
|
entries = copy.deepcopy(self.connection.entries) |
|
for entry in entries: |
|
# Skip starting point... not sure why it is found in the list... |
|
if entry.entry_dn == self.__RESOURCE_GROUPS_BASE: |
|
continue |
|
|
|
dn = entry.ou.value.strip() |
|
# But when using the description field, we get a better study name (I think) |
|
study_name = entry.description.value.strip().replace('Researchgroup', '').strip() |
|
studies[dn] = { |
|
'dn': dn, |
|
'name': study_name, |
|
'description': None if not entry.description else entry.description.value.strip(), |
|
'contributors': {} |
|
} |
|
|
|
return studies |
|
|
|
def export_researchers(self, studies): |
|
"""Export all the existing researchers. And will update the studies with the correct researchers with their role and workspace |
|
|
|
Args: |
|
studies (dict): A dict of all the studies generated by `export_studies` |
|
|
|
Returns: |
|
tuple: Returns a tuple with two values. First is a list with all the researchers, the second is a list with all the studies |
|
""" |
|
researchers = [] |
|
logger.debug(f'Exporting all researchers in DN "{self.__RESOURCE_MEMBERS_BASE}"') |
|
# We are missing Faculty information here.... |
|
if self.connection.search(self.__RESOURCE_MEMBERS_BASE, f'(uid=*)', attributes=['sn', 'givenName', 'uid', 'mobile', 'employeeNumber', 'groupMembership']): |
|
for entry in self.connection.entries: |
|
uid = entry.uid.value.strip() |
|
researchers.append({ |
|
'uid': uid, |
|
'first_name': None if not entry.givenName else entry.givenName.value.strip(), |
|
'last_name': None if not entry.sn else entry.sn.value.strip(), |
|
'email_address': None if not entry.uid else entry.uid.value.strip(), |
|
'mobile': None if not entry.mobile else entry.mobile.value.strip(), |
|
'pnumber': None if not entry.employeeNumber else entry.employeeNumber.value.strip() |
|
}) |
|
|
|
# Loop over group member ships to construct the contributors data |
|
for group in entry.groupMembership: |
|
group = group.replace('cn=', '').split(',')[0].split(':') |
|
if len(group) < 2: |
|
continue |
|
|
|
group_id = group[0] |
|
if uid not in studies[group_id]['contributors']: |
|
studies[group_id]['contributors'][uid] = { |
|
'uid': uid, |
|
'role': 'Contributor', |
|
'workspace': '' |
|
} |
|
|
|
if group[1].lower() == 'ws' and len(group) == 4: |
|
studies[group_id]['contributors'][uid]['workspace'] = 'Premium' if 'large' == group[3].lower() else 'Basic' |
|
|
|
elif group[1].lower() == 'role' and len(group) == 3: |
|
studies[group_id]['contributors'][uid]['role'] = 'Administrator' if 'datamanager' == group[2].lower() else 'Contributor' |
|
|
|
# convert study dict to list. Is less data and easier for the import later on. |
|
studies_list = [] |
|
for study_data in studies.values(): |
|
studies_list.append(study_data) |
|
|
|
return (researchers, studies_list) |
|
|
|
def export(self): |
|
return self.export_researchers(self.export_studies()) |
|
|
|
|
|
class VRE_API_CLient(): |
|
"""This is the VRE API client. With this client you can easily get the latest VRW actions from the VRE API. |
|
|
|
Args: |
|
base_url (string): The full url to the base API. Including protocol and optionally port number |
|
api_prefix (string): The current API version |
|
username (string): The login name for the VRE API server |
|
password (string): The password for the VRE API server |
|
|
|
Returns: |
|
VRE_API_CLient: VRE API client |
|
""" |
|
|
|
def __init__(self, base_url, api_prefix, username, password): |
|
"""This is the VRE API client. With this client you can easily get the latest VRW actions from the VRE API. |
|
|
|
Args: |
|
base_url (string): The full url to the base API. Including protocol and optionally port number |
|
api_prefix (string): The current API version |
|
username (string): The login name for the VRE API server |
|
password (string): The password for the VRE API server |
|
|
|
Returns: |
|
VRE_API_CLient: VRE API client |
|
""" |
|
self.base_url = base_url.strip('/') |
|
self.username = username |
|
self.password = password |
|
self.api_prefix = api_prefix |
|
|
|
self.__authorization_header = None |
|
|
|
def __get_full_url(self, part): |
|
"""Helperfunction: This will construct a full url to the API endpoint. |
|
|
|
Args: |
|
part (string): The endpoint location |
|
|
|
Returns: |
|
string: The full url including protocol and complete path. |
|
""" |
|
return f'{self.base_url}/{self.api_prefix.strip("/")}/{part.lstrip("/")}'.replace(f'/{self.api_prefix.strip("/")}/auth/', '/auth/') |
|
|
|
def __get_JWT_token(self): |
|
"""Helperfunction: This will get a new JWT token from the VRE API server based on the user login. |
|
""" |
|
self.__authorization_header = None |
|
login = requests.post(self.__get_full_url('/auth/jwt/create/'), json={ |
|
'username': self.username, |
|
'password': self.password |
|
}) |
|
|
|
self.__authorization_header = {'Authorization': f'JWT {login.json().get("access")}'} |
|
|
|
def __process_workspace(self, workspace): |
|
workspace_id = workspace['id'] |
|
workspace = workspace['workspace'] |
|
|
|
return { |
|
'workspace_id': workspace_id, |
|
'workspace_type': None if 'type' not in workspace else workspace['type'], |
|
'workspace_dn': workspace['cloud_id'], |
|
|
|
'study_id': workspace['study']['id'], |
|
'study_name': workspace['study']['name'], |
|
# https://stackoverflow.com/a/12572399 |
|
'study_dn': None if workspace['cloud_id'] is None else workspace['cloud_id'][workspace['cloud_id'].index(',') + 1:], |
|
|
|
'researcher_first_name': workspace['researcher']['first_name'], |
|
'researcher_last_name': workspace['researcher']['last_name'], |
|
'researcher_email': workspace['researcher']['email_address'], |
|
'researcher_mobile_phone': workspace['researcher']['mobilephone'], |
|
'researcher_Pnumber': 'N/A', # Is currently missing in the VRE-VRW API |
|
'researcher_role': None if 'role' not in workspace or workspace['role'] is None else workspace['role'].lower(), |
|
} |
|
|
|
def _get_data(self, url, retry=True): |
|
online_data = requests.get(self.__get_full_url(url), headers=self.__authorization_header) |
|
|
|
if online_data.status_code == 200: |
|
return online_data.json() |
|
|
|
if retry: |
|
self.__get_JWT_token() |
|
return self._get_data(url, False) |
|
|
|
return None |
|
|
|
def _post_data(self, url, data, retry=True): |
|
online_data = requests.post(self.__get_full_url(url), headers=self.__authorization_header, json=data) |
|
|
|
if online_data.status_code in [200, 201]: |
|
return online_data.json() |
|
|
|
def _put_data(self, url, data, retry=True): |
|
online_data = requests.put(self.__get_full_url(url), headers=self.__authorization_header, json=data) |
|
|
|
if online_data.status_code in [200, 201]: |
|
return online_data.json() |
|
|
|
def check_connection(self): |
|
"""Check if the VRE API connection is successfull |
|
|
|
Returns: |
|
bool: True when the connection is successfull, else False |
|
""" |
|
if self.__authorization_header is None: |
|
self.__get_JWT_token() |
|
|
|
return True |
|
# Code below can only work with version 5.0.0 of https://pypi.org/project/djangorestframework-simplejwt/ which is not compatible with Djoser |
|
|
|
# data = self._post_data('/auth/jwt/verify/', data={'token': self.__authorization_header.get('Authorization')[4:]}) |
|
|
|
# # We should get back an empty dict.... Else there was an error... |
|
# return data == {} |
|
|
|
def get_new_workspaces(self): |
|
"""Get a list of new workstations to make |
|
|
|
Returns: |
|
list(dict): A list of dicts with new workstations to make |
|
""" |
|
data = self._get_data('/vrw/list/new/') |
|
|
|
workspaces = [] |
|
for workspace in data['results']: |
|
workspaces.append(self.__process_workspace(workspace)) |
|
|
|
return workspaces |
|
|
|
def get_changing_workspaces(self): |
|
"""Get a list of workstations to update |
|
|
|
Returns: |
|
list(dict): A list of dicts with workstations to be updated |
|
""" |
|
data = self._get_data('/vrw/list/change/') |
|
|
|
workspaces = [] |
|
for workspace in data['results']: |
|
workspaces.append(self.__process_workspace(workspace)) |
|
|
|
return workspaces |
|
|
|
def get_deleted_workspaces(self): |
|
"""Get a list of workstations to delete |
|
|
|
Returns: |
|
list(dict): A list of dicts with workstations to be deleted |
|
""" |
|
data = self._get_data('/vrw/list/delete/') |
|
workspaces = [] |
|
for workspace in data['results']: |
|
workspaces.append(self.__process_workspace(workspace)) |
|
|
|
return workspaces |
|
|
|
def workspace_done(self, id, cloud_id): |
|
"""Update back to the VRE API that a workstation has been created. Add a cloud id for reference. |
|
|
|
Args: |
|
id (int): The VRE API workstation ID |
|
cloud_id (string): The cloud id that is created with this workstation |
|
""" |
|
data = self._put_data(f'vrw/{id}/status/', data={'status': 'DONE', 'cloud_id': cloud_id}) |
|
|
|
def workspace_deleted(self, id, cloud_id): |
|
"""Update back to the VRE API that a workstation has been deleted. |
|
|
|
Args: |
|
id (int): The VRE API workstation ID |
|
cloud_id (string): The cloud id that is created with this workstation |
|
""" |
|
data = self._put_data(f'vrw/{id}/status/', data={'status': 'TERMINATED', 'cloud_id': cloud_id}) |
|
|
|
|
|
if __name__ == "__main__": |
|
# Load config settings in a dict. |
|
config = dotenv_values(f'{WORKINGDIR}/.env') |
|
if '' != config.get('SENTRY_DSN', ''): |
|
# Load Sentry logging |
|
|
|
# All of this is already happening by default! |
|
sentry_logging = LoggingIntegration( |
|
level=logging.INFO, # Capture info and above as breadcrumbs |
|
event_level=logging.ERROR # Send errors as events |
|
) |
|
sentry_sdk.init( |
|
dsn=config['SENTRY_DSN'], |
|
integrations=[sentry_logging] |
|
) |
|
|
|
lockfile = Path(f'{WORKINGDIR}/.running') |
|
|
|
if lockfile.exists(): |
|
process_id = int(lockfile.read_text()) |
|
now = datetime.now() |
|
started = datetime.fromtimestamp(lockfile.stat().st_ctime) |
|
if Path(f'/proc/{process_id}/stat').exists(): |
|
logger.error(f'There is already a process running with PID {lockfile.read_text()}, started at: {started}, {now - started} ago') |
|
quit() |
|
else: |
|
logger.warning(f'Removing old stale PID({process_id}), started at: {started}, {now - started} ago') |
|
lockfile.unlink() |
|
|
|
lockfile.write_text(str(os.getpid())) |
|
|
|
# Load API call. |
|
vre_api = VRE_API_CLient(config['VRE_API_HOST'], config['VRE_API_PREFIX'], config['VRE_API_USER'], config['VRE_API_PASS']) |
|
if not vre_api.check_connection(): |
|
logger.error( |
|
f'Could not login to the VRE API server. Check connection credentials.') |
|
quit() |
|
|
|
# Load LDAP client for making LDAP changes |
|
ldap_client = VRWLDAP(config['LDAP_HOST'], int(config['LDAP_PORT']), config['LDAP_USER'], config['LDAP_PASS'], config['LDAP_SSL'], not config['LDAP_MANUAL_GROUPS']) |
|
if not ldap_client.check_connection(): |
|
logger.error(f'Could not login to the LDAP server. Check connection credentials.') |
|
quit() |
|
|
|
# Createing workspaces |
|
workspaces = vre_api.get_new_workspaces() |
|
logger.info(f'We have {len(workspaces)} new workstations to create.') |
|
|
|
for workspace in workspaces: |
|
study_dn = ldap_client.search_research_group( |
|
workspace['study_name'], workspace['study_dn']) |
|
if not study_dn: |
|
try: |
|
study_dn = ldap_client.create_research_group(workspace['study_name'], config['LDAP_MANUAL_GROUPS']) |
|
except DuplicateResearchGroup as ex: |
|
print(ex) |
|
|
|
user_dn = ldap_client.search_researcher(workspace['researcher_email']) |
|
if not user_dn: |
|
try: |
|
user_dn = ldap_client.create_researcher(workspace['researcher_first_name'], |
|
workspace['researcher_last_name'], |
|
workspace['researcher_email'], |
|
workspace['researcher_mobile_phone'], |
|
workspace['researcher_Pnumber']) |
|
except DuplicateResearchGroup as ex: |
|
print(ex) |
|
|
|
else: |
|
logger.info(f'Using existing Member DN: {user_dn}') |
|
|
|
# Remove old member data if exists |
|
ldap_client.remove_researcher_from_group(study_dn, user_dn) |
|
|
|
# Add new member data for groups |
|
_, workstation_dn = ldap_client.researcher_to_group(study_dn, user_dn, workspace['researcher_role'], workspace['workspace_type']) |
|
|
|
# Update the VRE API that this workspace is done.... |
|
vre_api.workspace_done(workspace['workspace_id'], workstation_dn) |
|
|
|
# Done processing new Workspaces.. |
|
|
|
# Update workspaces |
|
workspaces = vre_api.get_changing_workspaces() |
|
logger.info(f'We have {len(workspaces)} workstations to update.') |
|
for workspace in workspaces: |
|
# Find the existing research DN |
|
study_dn = ldap_client.search_research_group(workspace['study_name'], workspace['study_dn']) |
|
# Update the existing research with a new name |
|
study_dn = ldap_client.update_research_group(study_dn, workspace['study_name']) |
|
|
|
# Update the user role and workstation |
|
user_dn = ldap_client.search_researcher(workspace['researcher_email']) |
|
if user_dn: |
|
# Update user details |
|
_ = ldap_client.update_researcher(workspace['researcher_first_name'], |
|
workspace['researcher_last_name'], |
|
workspace['researcher_email'], |
|
workspace['researcher_mobile_phone'], |
|
workspace['researcher_Pnumber']) |
|
|
|
# Remove existing role and workstation of the research from the research group |
|
ldap_client.remove_researcher_from_group(workspace['study_dn'], user_dn) |
|
# Add new member data for groups (which is the update) |
|
_, workstation_dn = ldap_client.researcher_to_group(study_dn, user_dn, workspace['researcher_role'], workspace['workspace_type']) |
|
# Update the VRE API that this workspace is done.... |
|
vre_api.workspace_done(workspace['workspace_id'], workstation_dn) |
|
|
|
# Done processing updated Workspaces.. |
|
|
|
# Delete existing workspaces |
|
workspaces = vre_api.get_deleted_workspaces() |
|
logger.info(f'We have {len(workspaces)} new workstations to delete.') |
|
|
|
for workspace in workspaces: |
|
user_dn = ldap_client.search_researcher(workspace['researcher_email']) |
|
|
|
if user_dn: |
|
ldap_client.remove_researcher_from_group(workspace['study_dn'], user_dn) |
|
else: |
|
logger.error(f'User {workspace["researcher_email"]} is not member of research group {workspace["study_name"]}') |
|
|
|
left_over_members = ldap_client.research_group_members(workspace['study_dn']) |
|
logger.info(f'There are {len(left_over_members)} member(s) left in research group: {workspace["study_name"]}') |
|
|
|
# Do we want to clean up research groups that does not have any members anymore? Does this delete the storage as well?? |
|
# For now, we do not clean up empty research groups!!!! |
|
# if len(left_over_members) == 0: |
|
# # No members left, so delete the research group |
|
# ldap_client.delete_research_group(workspace['study_dn']) |
|
|
|
# Update the VRE API that this workspace is deleted.... |
|
vre_api.workspace_deleted(workspace['workspace_id'], workspace['workspace_dn']) |
|
|
|
# Release the lockfile |
|
lockfile.unlink()
|
|
|