[FIX] PEP8 compliance and review comments
parent
7e09d76422
commit
0042904f45
|
@ -19,6 +19,6 @@
|
||||||
#
|
#
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
import base_external_dbsource
|
from . import base_external_dbsource
|
||||||
|
|
||||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
This module allows you to define connections to foreign databases using ODBC,
|
This module allows you to define connections to foreign databases using ODBC,
|
||||||
Oracle Client or SQLAlchemy.
|
Oracle Client or SQLAlchemy.
|
||||||
|
|
||||||
Databases sources can be configured in Settings > Configuration -> Data sources.
|
Database sources can be configured in Settings > Configuration -> Data sources.
|
||||||
|
|
||||||
Depending on the database, you need:
|
Depending on the database, you need:
|
||||||
* to install unixodbc and python-pyodbc packages to use ODBC connections.
|
* to install unixodbc and python-pyodbc packages to use ODBC connections.
|
||||||
|
@ -44,7 +44,6 @@ Depending on the database, you need:
|
||||||
'depends': [
|
'depends': [
|
||||||
'base',
|
'base',
|
||||||
],
|
],
|
||||||
'init': [],
|
|
||||||
'data': [
|
'data': [
|
||||||
'base_external_dbsource_view.xml',
|
'base_external_dbsource_view.xml',
|
||||||
'security/ir.model.access.csv',
|
'security/ir.model.access.csv',
|
||||||
|
|
|
@ -20,10 +20,10 @@
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from osv import fields, osv
|
import logging
|
||||||
|
from openerp.osv import orm, fields
|
||||||
from openerp.tools.translate import _
|
from openerp.tools.translate import _
|
||||||
import openerp.tools as tools
|
import openerp.tools as tools
|
||||||
import logging
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
CONNECTORS = []
|
CONNECTORS = []
|
||||||
|
@ -31,56 +31,67 @@ CONNECTORS = []
|
||||||
try:
|
try:
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
import pymssql
|
import pymssql
|
||||||
CONNECTORS.append( ('mssql', 'Microsoft SQL Server') )
|
CONNECTORS.append(('mssql', 'Microsoft SQL Server'))
|
||||||
except:
|
except:
|
||||||
_logger.info('MS SQL Server not available. Please install "slqalchemy" and "pymssql" python package.')
|
_logger.info('MS SQL Server not available. Please install "slqalchemy"\
|
||||||
|
and "pymssql" python package.')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
import MySQLdb
|
import MySQLdb
|
||||||
CONNECTORS.append( ('mysql', 'MySQL') )
|
CONNECTORS.append(('mysql', 'MySQL'))
|
||||||
except:
|
except:
|
||||||
_logger.info('MySQL not available. Please install "slqalchemy" and "mysqldb" python package.')
|
_logger.info('MySQL not available. Please install "slqalchemy" and\
|
||||||
|
"mysqldb" python package.')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pyodbc
|
import pyodbc
|
||||||
CONNECTORS.append( ('pyodbc', 'ODBC') )
|
CONNECTORS.append(('pyodbc', 'ODBC'))
|
||||||
except:
|
except:
|
||||||
_logger.info('ODBC libraries not available. Please install "unixodbc" and "python-pyodbc" packages.')
|
_logger.info('ODBC libraries not available. Please install "unixodbc"\
|
||||||
|
and "python-pyodbc" packages.')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cx_Oracle
|
import cx_Oracle
|
||||||
CONNECTORS.append( ('cx_Oracle', 'Oracle') )
|
CONNECTORS.append(('cx_Oracle', 'Oracle'))
|
||||||
except:
|
except:
|
||||||
_logger.info('Oracle libraries not available. Please install "cx_Oracle" python package.')
|
_logger.info('Oracle libraries not available. Please install "cx_Oracle"\
|
||||||
|
python package.')
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
CONNECTORS.append( ('postgresql', 'PostgreSQL') )
|
CONNECTORS.append(('postgresql', 'PostgreSQL'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
CONNECTORS.append( ('sqlite', 'SQLite') )
|
CONNECTORS.append(('sqlite', 'SQLite'))
|
||||||
except:
|
except:
|
||||||
_logger.info('SQLAlchemy not available. Please install "slqalchemy" python package.')
|
_logger.info('SQLAlchemy not available. Please install "slqalchemy" python\
|
||||||
|
package.')
|
||||||
|
|
||||||
class base_external_dbsource(osv.osv):
|
|
||||||
|
class base_external_dbsource(orm.Model):
|
||||||
_name = "base.external.dbsource"
|
_name = "base.external.dbsource"
|
||||||
_description = 'External Database Sources'
|
_description = 'External Database Sources'
|
||||||
_columns = {
|
_columns = {
|
||||||
'name': fields.char('Datasource name', required=True, size=64),
|
'name': fields.char('Datasource name', required=True, size=64),
|
||||||
'conn_string': fields.text('Connection string', help="""\
|
'conn_string': fields.text('Connection string', help="""
|
||||||
Sample connection strings:
|
Sample connection strings:
|
||||||
- Microsoft SQL Server: mssql+pymssql://username:%s@server:port/dbname?charset=utf8
|
- Microsoft SQL Server:
|
||||||
|
mssql+pymssql://username:%s@server:port/dbname?charset=utf8
|
||||||
- MySQL: mysql://user:%s@server:port/dbname
|
- MySQL: mysql://user:%s@server:port/dbname
|
||||||
- ODBC: DRIVER={FreeTDS};SERVER=server.address;Database=mydb;UID=sa
|
- ODBC: DRIVER={FreeTDS};SERVER=server.address;Database=mydb;UID=sa
|
||||||
- ORACLE: username/%s@//server.address:port/instance
|
- ORACLE: username/%s@//server.address:port/instance
|
||||||
- PostgreSQL: dbname='template1' user='dbuser' host='localhost' port='5432' password=%s
|
- PostgreSQL:
|
||||||
|
dbname='template1' user='dbuser' host='localhost' port='5432' password=%s
|
||||||
- SQLite: sqlite:///test.db
|
- SQLite: sqlite:///test.db
|
||||||
"""),
|
"""),
|
||||||
'password': fields.char('Password' , size=40),
|
'password': fields.char('Password', size=40),
|
||||||
'connector': fields.selection(CONNECTORS, 'Connector', required=True,
|
'connector': fields.selection(CONNECTORS, 'Connector',
|
||||||
help = "If a connector is missing from the list, check the " \
|
required=True,
|
||||||
+ "server log to confirm that the required componentes were detected."),
|
help="If a connector is missing from the\
|
||||||
|
list, check the server log to confirm\
|
||||||
|
that the required components were\
|
||||||
|
detected."),
|
||||||
}
|
}
|
||||||
|
|
||||||
def conn_open(self, cr, uid, id1):
|
def conn_open(self, cr, uid, id1):
|
||||||
|
@ -98,24 +109,28 @@ Sample connection strings:
|
||||||
conn = cx_Oracle.connect(connStr)
|
conn = cx_Oracle.connect(connStr)
|
||||||
elif data.connector == 'pyodbc':
|
elif data.connector == 'pyodbc':
|
||||||
conn = pyodbc.connect(connStr)
|
conn = pyodbc.connect(connStr)
|
||||||
elif data.connector in ('sqlite','mysql','mssql'):
|
elif data.connector in ('sqlite', 'mysql', 'mssql'):
|
||||||
conn = sqlalchemy.create_engine(connStr).connect()
|
conn = sqlalchemy.create_engine(connStr).connect()
|
||||||
elif data.connector == 'postgresql':
|
elif data.connector == 'postgresql':
|
||||||
conn = psycopg2.connect(connStr)
|
conn = psycopg2.connect(connStr)
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
def execute(self, cr, uid, ids, sqlquery, sqlparams=None, metadata=False, context=None):
|
def execute(self, cr, uid, ids, sqlquery, sqlparams=None, metadata=False,
|
||||||
|
context=None):
|
||||||
"""Executes SQL and returns a list of rows.
|
"""Executes SQL and returns a list of rows.
|
||||||
|
|
||||||
"sqlparams" can be a dict of values, that can be referenced in the SQL statement
|
"sqlparams" can be a dict of values, that can be referenced in
|
||||||
using "%(key)s" or, in the case of Oracle, ":key".
|
the SQL statement using "%(key)s" or, in the case of Oracle,
|
||||||
|
":key".
|
||||||
Example:
|
Example:
|
||||||
sqlquery = "select * from mytable where city = %(city)s and date > %(dt)s"
|
sqlquery = "select * from mytable where city = %(city)s and
|
||||||
params = {'city': 'Lisbon', 'dt': datetime.datetime(2000, 12, 31)}
|
date > %(dt)s"
|
||||||
|
params = {'city': 'Lisbon',
|
||||||
|
'dt': datetime.datetime(2000, 12, 31)}
|
||||||
|
|
||||||
If metadata=True, it will instead return a dict containing the rows list and the columns list,
|
If metadata=True, it will instead return a dict containing the
|
||||||
in the format:
|
rows list and the columns list, in the format:
|
||||||
{ 'cols': [ 'col_a', 'col_b', ...]
|
{ 'cols': [ 'col_a', 'col_b', ...]
|
||||||
, 'rows': [ (a0, b0, ...), (a1, b1, ...), ...] }
|
, 'rows': [ (a0, b0, ...), (a1, b1, ...), ...] }
|
||||||
"""
|
"""
|
||||||
|
@ -123,16 +138,18 @@ Sample connection strings:
|
||||||
rows, cols = list(), list()
|
rows, cols = list(), list()
|
||||||
for obj in data:
|
for obj in data:
|
||||||
conn = self.conn_open(cr, uid, obj.id)
|
conn = self.conn_open(cr, uid, obj.id)
|
||||||
if obj.connector in ["sqlite","mysql","mssql"]:
|
if obj.connector in ["sqlite", "mysql", "mssql"]:
|
||||||
#using sqlalchemy
|
#using sqlalchemy
|
||||||
cur = conn.execute(sqlquery, sqlparams)
|
cur = conn.execute(sqlquery, sqlparams)
|
||||||
if metadata: cols = cur.keys()
|
if metadata:
|
||||||
|
cols = cur.keys()
|
||||||
rows = [r for r in cur]
|
rows = [r for r in cur]
|
||||||
else:
|
else:
|
||||||
#using other db connectors
|
#using other db connectors
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
cur.execute(sqlquery, sqlparams)
|
cur.execute(sqlquery, sqlparams)
|
||||||
if metadata: cols = [d[0] for d in cur.description]
|
if metadata:
|
||||||
|
cols = [d[0] for d in cur.description]
|
||||||
rows = cur.fetchall()
|
rows = cur.fetchall()
|
||||||
conn.close()
|
conn.close()
|
||||||
if metadata:
|
if metadata:
|
||||||
|
@ -146,14 +163,18 @@ Sample connection strings:
|
||||||
try:
|
try:
|
||||||
conn = self.conn_open(cr, uid, obj.id)
|
conn = self.conn_open(cr, uid, obj.id)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
|
raise osv.except_osv(_("Connection test failed!"),
|
||||||
|
_("Here is what we got instead:\n %s")
|
||||||
|
% tools.ustr(e))
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
if conn: conn.close()
|
if conn:
|
||||||
|
conn.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
# ignored, just a consequence of the previous exception
|
# ignored, just a consequence of the previous exception
|
||||||
pass
|
pass
|
||||||
#TODO: if OK a (wizard) message box should be displayed
|
#TODO: if OK a (wizard) message box should be displayed
|
||||||
raise osv.except_osv(_("Connection test succeeded!"), _("Everything seems properly set up!"))
|
raise osv.except_osv(_("Connection test succeeded!"),
|
||||||
|
_("Everything seems properly set up!"))
|
||||||
|
|
||||||
base_external_dbsource()
|
#EOF
|
||||||
|
|
|
@ -19,6 +19,6 @@
|
||||||
#
|
#
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
import import_odbc
|
from . import import_odbc
|
||||||
|
|
||||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||||
|
|
|
@ -68,7 +68,6 @@ Improvements ideas waiting for a contributor:
|
||||||
'base',
|
'base',
|
||||||
'base_external_dbsource',
|
'base_external_dbsource',
|
||||||
],
|
],
|
||||||
'init': [],
|
|
||||||
'data': [
|
'data': [
|
||||||
'import_odbc_view.xml',
|
'import_odbc_view.xml',
|
||||||
'security/ir.model.access.csv',
|
'security/ir.model.access.csv',
|
||||||
|
|
|
@ -21,14 +21,15 @@
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from osv import fields, osv
|
from openerp.osv import orm, fields
|
||||||
import logging
|
import logging
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
_loglvl = _logger.getEffectiveLevel()
|
_loglvl = _logger.getEffectiveLevel()
|
||||||
SEP = '|'
|
SEP = '|'
|
||||||
|
|
||||||
class import_odbc_dbtable(osv.osv):
|
|
||||||
_name="import.odbc.dbtable"
|
class import_odbc_dbtable(orm.Model):
|
||||||
|
_name = "import.odbc.dbtable"
|
||||||
_description = 'Import Table Data'
|
_description = 'Import Table Data'
|
||||||
_order = 'exec_order'
|
_order = 'exec_order'
|
||||||
_columns = {
|
_columns = {
|
||||||
|
@ -36,7 +37,7 @@ class import_odbc_dbtable(osv.osv):
|
||||||
'enabled': fields.boolean('Execution enabled'),
|
'enabled': fields.boolean('Execution enabled'),
|
||||||
'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
|
'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
|
||||||
'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
|
'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
|
||||||
'model_target': fields.many2one('ir.model','Target object'),
|
'model_target': fields.many2one('ir.model', 'Target object'),
|
||||||
'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
|
'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
|
||||||
'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
|
'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
|
||||||
'last_sync': fields.datetime('Last sync date', help="Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination"),
|
'last_sync': fields.datetime('Last sync date', help="Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination"),
|
||||||
|
@ -47,10 +48,10 @@ class import_odbc_dbtable(osv.osv):
|
||||||
'last_warn_count': fields.integer('Last warning count', readonly=True),
|
'last_warn_count': fields.integer('Last warning count', readonly=True),
|
||||||
'last_log': fields.text('Last run log', readonly=True),
|
'last_log': fields.text('Last run log', readonly=True),
|
||||||
'ignore_rel_errors': fields.boolean('Ignore relationship errors',
|
'ignore_rel_errors': fields.boolean('Ignore relationship errors',
|
||||||
help = "On error try to reimport rows ignoring relationships."),
|
help="On error try to reimport rows ignoring relationships."),
|
||||||
'raise_import_errors': fields.boolean('Raise import errors',
|
'raise_import_errors': fields.boolean('Raise import errors',
|
||||||
help = "Import errors not handled, intended for debugging purposes."
|
help="Import errors not handled, intended for debugging purposes."
|
||||||
+ "\nAlso forces debug messages to be written to the server log."),
|
"\nAlso forces debug messages to be written to the server log."),
|
||||||
}
|
}
|
||||||
_defaults = {
|
_defaults = {
|
||||||
'enabled': True,
|
'enabled': True,
|
||||||
|
@ -63,11 +64,11 @@ class import_odbc_dbtable(osv.osv):
|
||||||
def find_m2o(field_list):
|
def find_m2o(field_list):
|
||||||
""""Find index of first column with a one2many field"""
|
""""Find index of first column with a one2many field"""
|
||||||
for i, x in enumerate(field_list):
|
for i, x in enumerate(field_list):
|
||||||
if len(x)>3 and x[-3:] == ':id' or x[-3:] == '/id':
|
if len(x) > 3 and x[-3:] == ':id' or x[-3:] == '/id':
|
||||||
return i
|
return i
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
def append_to_log(log, level, obj_id = '', msg = '', rel_id = ''):
|
def append_to_log(log, level, obj_id='', msg='', rel_id=''):
|
||||||
if '_id_' in obj_id:
|
if '_id_' in obj_id:
|
||||||
obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1]
|
obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1]
|
||||||
if ': .' in msg and not rel_id:
|
if ': .' in msg and not rel_id:
|
||||||
|
@ -76,10 +77,8 @@ class import_odbc_dbtable(osv.osv):
|
||||||
rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1]
|
rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1]
|
||||||
msg = msg[:msg.find(': .')]
|
msg = msg[:msg.find(': .')]
|
||||||
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg))
|
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg))
|
||||||
|
_logger.debug(data)
|
||||||
|
cols = list(flds) # copy to avoid side effects
|
||||||
_logger.debug( data )
|
|
||||||
cols = list(flds) #copy to avoid side effects
|
|
||||||
errmsg = str()
|
errmsg = str()
|
||||||
if table_obj.raise_import_errors:
|
if table_obj.raise_import_errors:
|
||||||
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
|
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
|
||||||
|
@ -88,15 +87,14 @@ class import_odbc_dbtable(osv.osv):
|
||||||
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
|
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
|
||||||
except:
|
except:
|
||||||
errmsg = str(sys.exc_info()[1])
|
errmsg = str(sys.exc_info()[1])
|
||||||
|
|
||||||
if errmsg and not table_obj.ignore_rel_errors:
|
if errmsg and not table_obj.ignore_rel_errors:
|
||||||
#Fail
|
#Fail
|
||||||
append_to_log(log, 'ERROR', data, errmsg )
|
append_to_log(log, 'ERROR', data, errmsg)
|
||||||
log['last_error_count'] += 1
|
log['last_error_count'] += 1
|
||||||
return False
|
return False
|
||||||
if errmsg and table_obj.ignore_rel_errors:
|
if errmsg and table_obj.ignore_rel_errors:
|
||||||
#Warn and retry ignoring many2one fields...
|
#Warn and retry ignoring many2one fields...
|
||||||
append_to_log(log, 'WARN', data, errmsg )
|
append_to_log(log, 'WARN', data, errmsg)
|
||||||
log['last_warn_count'] += 1
|
log['last_warn_count'] += 1
|
||||||
#Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
|
#Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
|
||||||
i = find_m2o(cols)
|
i = find_m2o(cols)
|
||||||
|
@ -107,30 +105,28 @@ class import_odbc_dbtable(osv.osv):
|
||||||
self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
|
self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
|
||||||
else:
|
else:
|
||||||
#Fail
|
#Fail
|
||||||
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.' )
|
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.')
|
||||||
log['last_error_count'] += 1
|
log['last_error_count'] += 1
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def import_run(self, cr, uid, ids=None, context=None):
|
def import_run(self, cr, uid, ids=None, context=None):
|
||||||
|
|
||||||
db_model = self.pool.get('base.external.dbsource')
|
db_model = self.pool.get('base.external.dbsource')
|
||||||
actions = self.read(cr, uid, ids, ['id', 'exec_order'])
|
actions = self.read(cr, uid, ids, ['id', 'exec_order'])
|
||||||
actions.sort(key = lambda x:(x['exec_order'], x['id']))
|
actions.sort(key=lambda x: (x['exec_order'], x['id']))
|
||||||
|
|
||||||
#Consider each dbtable:
|
#Consider each dbtable:
|
||||||
for action_ref in actions:
|
for action_ref in actions:
|
||||||
|
|
||||||
obj = self.browse(cr, uid, action_ref['id'])
|
obj = self.browse(cr, uid, action_ref['id'])
|
||||||
if not obj.enabled: continue #skip
|
if not obj.enabled:
|
||||||
|
continue # skip
|
||||||
|
|
||||||
_logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl)
|
_logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl)
|
||||||
_logger.debug('Importing %s...' % obj.name)
|
_logger.debug('Importing %s...' % obj.name)
|
||||||
|
|
||||||
#now() microseconds are stripped to avoid problem with SQL smalldate
|
#now() microseconds are stripped to avoid problem with SQL smalldate
|
||||||
#TODO: convert UTC Now to local timezone (http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime)
|
#TODO: convert UTC Now to local timezone
|
||||||
|
#http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
|
||||||
model_name = obj.model_target.model
|
model_name = obj.model_target.model
|
||||||
model_obj = self.pool.get(model_name)
|
model_obj = self.pool.get(model_name)
|
||||||
xml_prefix = model_name.replace('.', '_') + "_id_"
|
xml_prefix = model_name.replace('.', '_') + "_id_"
|
||||||
|
@ -143,10 +139,13 @@ class import_odbc_dbtable(osv.osv):
|
||||||
self.write(cr, uid, [obj.id], log)
|
self.write(cr, uid, [obj.id], log)
|
||||||
|
|
||||||
#Prepare SQL sentence; replace "%s" with the last_sync date
|
#Prepare SQL sentence; replace "%s" with the last_sync date
|
||||||
if obj.last_sync: sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
|
if obj.last_sync:
|
||||||
else: sync = datetime.datetime(1900, 1, 1, 0, 0, 0)
|
sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
|
||||||
|
else:
|
||||||
|
sync = datetime.datetime(1900, 1, 1, 0, 0, 0)
|
||||||
params = {'sync': sync}
|
params = {'sync': sync}
|
||||||
res = db_model.execute(cr, uid, [obj.dbsource_id.id], obj.sql_source, params, metadata=True)
|
res = db_model.execute(cr, uid, [obj.dbsource_id.id],
|
||||||
|
obj.sql_source, params, metadata=True)
|
||||||
|
|
||||||
#Exclude columns titled "None"; add (xml_)"id" column
|
#Exclude columns titled "None"; add (xml_)"id" column
|
||||||
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE']
|
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE']
|
||||||
|
@ -159,37 +158,40 @@ class import_odbc_dbtable(osv.osv):
|
||||||
for i in cidx:
|
for i in cidx:
|
||||||
#TODO: Handle imported datetimes properly - convert from localtime to UTC!
|
#TODO: Handle imported datetimes properly - convert from localtime to UTC!
|
||||||
v = row[i]
|
v = row[i]
|
||||||
if isinstance(v, str): v = v.strip()
|
if isinstance(v, str):
|
||||||
|
v = v.strip()
|
||||||
data.append(v)
|
data.append(v)
|
||||||
data.append( xml_prefix + str(row[0]).strip() )
|
data.append(xml_prefix + str(row[0]).strip())
|
||||||
|
|
||||||
#Import the row; on error, write line to the log
|
#Import the row; on error, write line to the log
|
||||||
log['last_record_count'] += 1
|
log['last_record_count'] += 1
|
||||||
self._import_data(cr, uid, cols, data, model_obj, obj, log)
|
self._import_data(cr, uid, cols, data, model_obj, obj, log)
|
||||||
if log['last_record_count'] % 500 == 0:
|
if log['last_record_count'] % 500 == 0:
|
||||||
_logger.info('...%s rows processed...' % (log['last_record_count']) )
|
_logger.info('...%s rows processed...' % (log['last_record_count']))
|
||||||
|
|
||||||
#Finished importing all rows
|
#Finished importing all rows
|
||||||
#If no errors, write new sync date
|
#If no errors, write new sync date
|
||||||
if not (log['last_error_count'] or log['last_warn_count']):
|
if not (log['last_error_count'] or log['last_warn_count']):
|
||||||
log['last_sync'] = log['start_run']
|
log['last_sync'] = log['start_run']
|
||||||
level = logging.DEBUG
|
level = logging.DEBUG
|
||||||
if log['last_warn_count']: level = logging.WARN
|
if log['last_warn_count']:
|
||||||
if log['last_error_count']: level = logging.ERROR
|
level = logging.WARN
|
||||||
|
if log['last_error_count']:
|
||||||
|
level = logging.ERROR
|
||||||
_logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % (
|
_logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % (
|
||||||
model_name, log['last_record_count'], log['last_error_count'] ,
|
model_name, log['last_record_count'], log['last_error_count'],
|
||||||
log['last_warn_count'] ) )
|
log['last_warn_count']))
|
||||||
#Write run log, either if the table import is active or inactive
|
#Write run log, either if the table import is active or inactive
|
||||||
if log['last_log']:
|
if log['last_log']:
|
||||||
log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==')
|
log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==')
|
||||||
log.update( {'last_log': '\n'.join(log['last_log'])} )
|
log.update({'last_log': '\n'.join(log['last_log'])})
|
||||||
log.update({ 'last_run': datetime.now().replace(microsecond=0) }) #second=0,
|
log.update({'last_run': datetime.now().replace(microsecond=0)})
|
||||||
self.write(cr, uid, [obj.id], log)
|
self.write(cr, uid, [obj.id], log)
|
||||||
|
|
||||||
#Finished
|
#Finished
|
||||||
_logger.debug('Import job FINISHED.')
|
_logger.debug('Import job FINISHED.')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def import_schedule(self, cr, uid, ids, context=None):
|
def import_schedule(self, cr, uid, ids, context=None):
|
||||||
cron_obj = self.pool.get('ir.cron')
|
cron_obj = self.pool.get('ir.cron')
|
||||||
new_create_id = cron_obj.create(cr, uid, {
|
new_create_id = cron_obj.create(cr, uid, {
|
||||||
|
@ -211,4 +213,4 @@ class import_odbc_dbtable(osv.osv):
|
||||||
'type': 'ir.actions.act_window',
|
'type': 'ir.actions.act_window',
|
||||||
}
|
}
|
||||||
|
|
||||||
import_odbc_dbtable()
|
#EOF
|
||||||
|
|
Loading…
Reference in New Issue