@ -539,39 +542,43 @@ class dHealthImport():
@@ -539,39 +542,43 @@ class dHealthImport():
# Collect the account IDs from Netivity data which holds 3 fields: AccountId, LegacyAccountId and DacadooId. This allow us to join all the data sets
self.__logmessage('Inserting Netivity account data to new rug ids ...')
sql=f'INSERT INTO {new_user_table}(menzis_id,netivity_legacy_id,dacadoo_id) SELECT AccountId, LegacyAccountId, DacadooId FROM netivity_account'
sql=f'INSERT INTO {new_user_table}(menzis_id,netivity_legacy_id,dacadoo_id,email_address) SELECT AccountId, LegacyAccountId, DacadooId, EmailAddress FROM netivity_account'
self.__logmessage(sql)
conn.execute(sql)
self.__logmessage('Inserting Netivity account data to new rug ids is done!')
self.__logmessage('Get the legacy account ids based on Dacadoo Ids ...')
sql=f'UPDATE {new_user_table} SET netivity_legacy_id = (SELECT AccountId FROM netivity_legacyaccountoud WHERE netivity_legacyaccountoud.DacadooId = {new_user_table}.dacadoo_id LIMIT 1) WHERE netivity_legacy_id = \'\''
self.__logmessage(sql)
conn.execute(sql)
count=conn.execute(sql)
self.__logmessage(f'Updated {count.rowcount} Netivity legacy accounts based on the Dacadoo IDs.')
# Collect old IDS from legacy tables which holds 2 fields: AccountId, DacadooId. But now we only want NEW records in table rug_userid_conversion. So only request records which are not al ready seen.
self.__logmessage('Inserting Netivity LEGACY account data to new rug ids ...')
sql=f'INSERT INTO {new_user_table}(netivity_legacy_id,dacadoo_id) SELECT AccountId, DacadooId FROM netivity_legacyaccountoud WHERE AccountId NOT IN (SELECT netivity_legacy_id FROM {new_user_table}) AND DacadooId NOT IN (SELECT dacadoo_id FROM {new_user_table})'
self.__logmessage(sql)
conn.execute(sql)
self.__logmessage('Inserting Netivity LEGACY account data to new rug ids is done!')
# This happens due to the fact, that Netivity users are sharing their Dacadoo account. We cannot handle that.
print(account)
continue
sql=f'UPDATE {table_name} SET rug_id = \'{account[3]}\' WHERE {source_id} = {source_value}'
self.__logmessage(sql)
sql=f'UPDATE {table_name} SET rug_id = \'{account[4]}\' WHERE accountid = {source_value}'
#self.__logmessage(sql)
updates=conn.execute(sql)
self.__logmessage(f'[{counter} off {total_accounts}({(counter/total_accounts)*100:.2f}%)]: Updated {updates.rowcount} records for account id {source_value} in table {table_name}')
@ -640,99 +652,99 @@ class dHealthImport():
@@ -640,99 +652,99 @@ class dHealthImport():
# self.__logmessage(sql)
# conn.execute(sql)
tables_to_process=[]
withself.db.connect()asconn,conn.begin():
self.__init_mysql_connection(conn)
# Get all the Dacadoo tables which holds ids in field name userId
sql=f'UPDATE {table_name} SET rug_id = (SELECT rug_id FROM {new_user_table} WHERE {new_user_table}.dacadoo_id = {table_name}.id LIMIT 1)'
self.__logmessage(sql)
conn.execute(sql)
id_field_name='userId'
self.__drop_fields.append(id_field_name)
sql=f'SELECT table_name FROM information_schema.COLUMNS WHERE COLUMN_NAME = \'{id_field_name}\' AND TABLE_SCHEMA = \'{self.__temp_db_name}\' ORDER BY table_name'
result=conn.execute(sql)
forrowinresult:
tables_to_process.append(row[0])
# Here we loop over the tables that needs to be changed. We open a new DB connection for every table update.
# This will hopefully reduce the undo log and commit earlier the changes
fortable_nameintables_to_process:
withself.db.connect()asconn,conn.begin():
self.__init_mysql_connection(conn)
try:
sql=f'ALTER TABLE {table_name} DROP COLUMN rug_id'
sql=f'UPDATE {table_name} SET rug_id = (SELECT rug_id FROM {new_user_table} WHERE {new_user_table}.dacadoo_id = {table_name}.{id_field_name} LIMIT 1) WHERE rug_id IS NULL LIMIT {batch_size}'
sql=f'SELECT table_name FROM information_schema.COLUMNS WHERE COLUMN_NAME = \'{id_field_name}\' AND TABLE_SCHEMA = \'{self.__temp_db_name}\' ORDER BY table_name'
result=conn.execute(sql)
forrowinresult:
table_name=row[0]
sql=f'DELETE FROM {table_name} WHERE {id_field_name} IS NULL'
self.__logmessage(sql)
conn.execute(sql)
# Special case. These are the original Dacadoo ids. Only in the user table they should be deleted.
withself.db.connect()asconn,conn.begin():
self.__init_mysql_connection(conn)
drop_field='id'
table_name='dacadoo_user'
self.__logmessage(f'Dropping field {drop_field} from table {table_name}')
sql=f'ALTER TABLE {table_name} DROP {drop_field}'
result=conn.execute(sql)
self.__logmessage(f'Dropped field {drop_field} from table {table_name}')
## tables_to_process = []
## with self.db.connect() as conn, conn.begin():
## self.__init_mysql_connection(conn)
## # Get all the Dacadoo tables which holds ids in field name userId
## sql = f'UPDATE {table_name} SET rug_id = (SELECT rug_id FROM {new_user_table} WHERE {new_user_table}.dacadoo_id = {table_name}.id LIMIT 1)'
## self.__logmessage(sql)
## conn.execute(sql)
##
## id_field_name = 'userId'
## self.__drop_fields.append(id_field_name)
## sql = f'SELECT table_name FROM information_schema.COLUMNS WHERE COLUMN_NAME = \'{id_field_name}\' AND TABLE_SCHEMA = \'{self.__temp_db_name}\' ORDER BY table_name'
## result = conn.execute(sql)
## for row in result:
##tables_to_process.append(row[0])
##
## # Here we loop over the tables that needs to be changed. We open a new DB connection for every table update.
## # This will hopefully reduce the undo log and commit earlier the changes
## for table_name in tables_to_process:
## with self.db.connect() as conn, conn.begin():
##self.__init_mysql_connection(conn)
##try:
## sql = f'ALTER TABLE {table_name} DROP COLUMN rug_id'
## self.__logmessage(sql)
## conn.execute(sql)
##except Exception:
## pass
##
##try:
## sql = f'ALTER TABLE {table_name} DROP INDEX rug_id'
## sql = f'UPDATE {table_name} SET rug_id = (SELECT rug_id FROM {new_user_table} WHERE {new_user_table}.dacadoo_id = {table_name}.{id_field_name} LIMIT 1) WHERE rug_id IS NULL LIMIT {batch_size}'
## sql = f'SELECT table_name FROM information_schema.COLUMNS WHERE COLUMN_NAME = \'{id_field_name}\' AND TABLE_SCHEMA = \'{self.__temp_db_name}\' ORDER BY table_name'
## result = conn.execute(sql)
## for row in result:
##table_name = row[0]
##sql = f'DELETE FROM {table_name} WHERE {id_field_name} IS NULL'
##self.__logmessage(sql)
## conn.execute(sql)
##
## # Special case. These are the original Dacadoo ids. Only in the user table they should be deleted.
## with self.db.connect() as conn, conn.begin():
## self.__init_mysql_connection(conn)
## drop_field = 'id'
## table_name = 'dacadoo_user'
## self.__logmessage(f'Dropping field {drop_field} from table {table_name}')
## sql = f'ALTER TABLE {table_name} DROP {drop_field}'
## result = conn.execute(sql)
## self.__logmessage(f'Dropped field {drop_field} from table {table_name}')
##
defdrop_fields(self,drop=True):
@ -776,7 +788,7 @@ class dHealthImport():
@@ -776,7 +788,7 @@ class dHealthImport():
forrowinresult:
table_name=row[0]
self.__logmessage(f'Updateing postal code field {postal_code_field} for table {table_name}')
sql=f'UPDATE {table_name} SET {postal_code_field} = SUBSTRING({postal_code_field},0,4)'
sql=f'UPDATE {table_name} SET {postal_code_field} = SUBSTRING({postal_code_field},1,4)'
result=conn.execute(sql)
self.__logmessage(f'Updated postal code field {postal_code_field} for table {table_name}')
@ -816,36 +828,46 @@ class dHealthImport():
@@ -816,36 +828,46 @@ class dHealthImport():
print('')
defaddDacadooData(self,location):
location=Path(location)
iflocation.exists()andlocation.is_dir():
self.__source_folders['dacadoo']=location
self.__load_files()
else:
raiseRuntimeError(f'Location {location} is not a valid Dacadoo source location')
forlocationinlocation.split(','):
location=Path(location)
iflocation.exists()andlocation.is_dir():
self.__source_folders['dacadoo'].append(location)
else:
print(f'Location {location} is not a valid Dacadoo source location')
self.__load_files()
defaddMenzisData(self,location):
location=Path(location)
iflocation.exists()andlocation.is_dir():
self.__source_folders['menzis']=location
self.__load_files()
else:
raiseRuntimeError(f'Location {location} is not a valid Menzis source location')
forlocationinlocation.split(','):
location=Path(location)
iflocation.exists()andlocation.is_dir():
self.__source_folders['menzis'].append(location)
else:
print(f'Location {location} is not a valid Menzis source location')
self.__load_files()
defaddNetivityData(self,location):
location=Path(location)
iflocation.exists()andlocation.is_dir():
self.__source_folders['netivity']=location
self.__load_files()
else:
raiseRuntimeError(f'Location {location} is not a valid Netivity source location')