diff --git a/base_external_dbsource/base_external_dbsource.py b/base_external_dbsource/base_external_dbsource.py index 92ff9b3de..fb04a679c 100644 --- a/base_external_dbsource/base_external_dbsource.py +++ b/base_external_dbsource/base_external_dbsource.py @@ -105,8 +105,8 @@ Sample connection strings: return conn - def execute_and_inspect(self, cr, uid, ids, sqlquery, sqlparams=None, context=None): - """Executes SQL and returns a dict with the rows and the list of columns. + def execute(self, cr, uid, ids, sqlquery, sqlparams=None, metadata=False, context=None): + """Executes SQL and returns a list of rows. "sqlparams" can be a dict of values, that can be referenced in the SQL statement using "%(key)s" or, in the case of Oracle, ":key". @@ -114,31 +114,32 @@ Sample connection strings: sqlquery = "select * from mytable where city = %(city)s and date > %(dt)s" params = {'city': 'Lisbon', 'dt': datetime.datetime(2000, 12, 31)} - Sample result: { 'colnames'; ['col_a', 'col_b', ...] - , 'rows': [ (a0, b0, ...), (a1, b1, ...), ...] } + If metadata=True, it will instead return a dict containing the rows list and the columns list, + in the format: + { 'cols': [ 'col_a', 'col_b', ...] + , 'rows': [ (a0, b0, ...), (a1, b1, ...), ...] } """ data = self.browse(cr, uid, ids) - nams = dict() - rows = list() + rows, cols = list(), list() for obj in data: conn = self.conn_open(cr, uid, obj.id) if obj.connector in ["sqlite","mysql","mssql"]: #using sqlalchemy cur = conn.execute(sqlquery, sqlparams) - nams = cur.keys() + if metadata: cols = cur.keys() rows = [r for r in cur] else: #using other db connectors cur = conn.cursor() cur.execute(sqlquery, sqlparams) - nams = [d[0] for d in cur.description] + if metadata: cols = [d[0] for d in cur.description] rows = cur.fetchall() conn.close() - return {'colnames': nams, 'rows': rows} + if metadata: + return{'cols': cols, 'rows': rows} + else: + return rows - def execute(self, cr, uid, ids, sqlquery, sqlparams=None, context=None): - return self.execute_and_inspect(self, cr, uid, ids, sqlquery, sqlparams)['rows'] - def connection_test(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context): conn = False diff --git a/import_odbc/import_odbc.py b/import_odbc/import_odbc.py index 8e6698453..8b14675d7 100644 --- a/import_odbc/import_odbc.py +++ b/import_odbc/import_odbc.py @@ -19,13 +19,13 @@ # ############################################################################## -import os import sys from datetime import datetime from osv import fields, osv import logging _logger = logging.getLogger(__name__) _loglvl = _logger.getEffectiveLevel() +SEP = '|' class import_odbc_dbtable(osv.osv): _name="import.odbc.dbtable" @@ -50,7 +50,7 @@ class import_odbc_dbtable(osv.osv): help = "On error try to reimport rows ignoring relationships."), 'raise_import_errors': fields.boolean('Raise import errors', help = "Import errors not handled, intended for debugging purposes." - + "\nAlso forces debug messages to be written to the serevr log."), + + "\nAlso forces debug messages to be written to the server log."), } _defaults = { 'enabled': True, @@ -79,25 +79,24 @@ class import_odbc_dbtable(osv.osv): _logger.debug( data ) - cols = list(flds) #copy to avoid side effects - err = list() - msg = str() + cols = list(flds) #copy to avoid side effects + errmsg = str() if table_obj.raise_import_errors: model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) else: try: model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) except: - msg = str(sys.exc_info()[1]) + errmsg = str(sys.exc_info()[1]) - if err and not table_obj.ignore_rel_errors: + if errmsg and not table_obj.ignore_rel_errors: #Fail - append_to_log(log, 'ERROR', data, msg ) + append_to_log(log, 'ERROR', data, errmsg ) log['last_error_count'] += 1 return False - if err and table_obj.ignore_rel_errors: + if errmsg and table_obj.ignore_rel_errors: #Warn and retry ignoring many2one fields... - append_to_log(log, 'WARN', data, msg ) + append_to_log(log, 'WARN', data, errmsg ) log['last_warn_count'] += 1 #Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) i = find_m2o(cols) @@ -147,11 +146,11 @@ class import_odbc_dbtable(osv.osv): if obj.last_sync: sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S") else: sync = datetime.datetime(1900, 1, 1, 0, 0, 0) params = {'sync': sync} - res = db_model.execute_and_inspect(cr, uid, [obj.dbsource_id.id], obj.sql_source, params) + res = db_model.execute(cr, uid, [obj.dbsource_id.id], obj.sql_source, params, metadata=True) #Exclude columns titled "None"; add (xml_)"id" column - cidx = [i for i, x in enumerate(res['colnames']) if x.upper() != 'NONE'] - cols = [x for i, x in enumerate(res['colnames']) if x.upper() != 'NONE'] + ['id'] + cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id'] #Import each row: for row in res['rows']: @@ -181,9 +180,9 @@ class import_odbc_dbtable(osv.osv): model_name, log['last_record_count'], log['last_error_count'] , log['last_warn_count'] ) ) #Write run log, either if the table import is active or inactive - if log['last_log']: - log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') - log.update( {'last_log': '\n'.join(log['last_log'])} ) + if log['last_log']: + log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') + log.update( {'last_log': '\n'.join(log['last_log'])} ) log.update({ 'last_run': datetime.now().replace(microsecond=0) }) #second=0, self.write(cr, uid, [obj.id], log) #Finished