diff --git a/auditlog/__manifest__.py b/auditlog/__manifest__.py index 8a452d5c3..c9c3e5abb 100644 --- a/auditlog/__manifest__.py +++ b/auditlog/__manifest__.py @@ -2,22 +2,20 @@ # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). { - 'name': "Audit Log", - 'version': "13.0.1.0.0", - 'author': "ABF OSIELL,Odoo Community Association (OCA)", - 'license': "AGPL-3", - 'website': "https://github.com/OCA/server-tools/", - 'category': "Tools", - 'depends': [ - 'base', + "name": "Audit Log", + "version": "13.0.1.0.0", + "author": "ABF OSIELL,Odoo Community Association (OCA)", + "license": "AGPL-3", + "website": "https://github.com/OCA/server-tools/", + "category": "Tools", + "depends": ["base"], + "data": [ + "security/ir.model.access.csv", + "data/ir_cron.xml", + "views/auditlog_view.xml", + "views/http_session_view.xml", + "views/http_request_view.xml", ], - 'data': [ - 'security/ir.model.access.csv', - 'data/ir_cron.xml', - 'views/auditlog_view.xml', - 'views/http_session_view.xml', - 'views/http_request_view.xml', - ], - 'application': True, - 'installable': True, + "application": True, + "installable": True, } diff --git a/auditlog/data/ir_cron.xml b/auditlog/data/ir_cron.xml index 87c03d0d4..ce365e19c 100644 --- a/auditlog/data/ir_cron.xml +++ b/auditlog/data/ir_cron.xml @@ -1,16 +1,14 @@ - + - - - Auto-vacuum audit logs - 1 - days - -1 - - - model.autovacuum(180) - code - - - + + Auto-vacuum audit logs + 1 + days + -1 + + + model.autovacuum(180) + code + + diff --git a/auditlog/models/autovacuum.py b/auditlog/models/autovacuum.py index b3770ed63..4803a1d6e 100644 --- a/auditlog/models/autovacuum.py +++ b/auditlog/models/autovacuum.py @@ -3,14 +3,13 @@ import logging from datetime import datetime, timedelta -from odoo import models, fields, api - +from odoo import api, fields, models _logger = logging.getLogger(__name__) class AuditlogAutovacuum(models.TransientModel): - _name = 'auditlog.autovacuum' + _name = "auditlog.autovacuum" _description = "Auditlog - Delete old logs" @api.model @@ -24,17 +23,12 @@ class AuditlogAutovacuum(models.TransientModel): """ days = (days > 0) and int(days) or 0 deadline = datetime.now() - timedelta(days=days) - data_models = ( - 'auditlog.log', - 'auditlog.http.request', - 'auditlog.http.session', - ) + data_models = ("auditlog.log", "auditlog.http.request", "auditlog.http.session") for data_model in data_models: records = self.env[data_model].search( - [('create_date', '<=', fields.Datetime.to_string(deadline))]) + [("create_date", "<=", fields.Datetime.to_string(deadline))] + ) nb_records = len(records) records.unlink() - _logger.info( - "AUTOVACUUM - %s '%s' records deleted", - nb_records, data_model) + _logger.info("AUTOVACUUM - %s '%s' records deleted", nb_records, data_model) return True diff --git a/auditlog/models/http_request.py b/auditlog/models/http_request.py index 094554ea7..624b3ba61 100644 --- a/auditlog/models/http_request.py +++ b/auditlog/models/http_request.py @@ -3,36 +3,31 @@ from psycopg2.extensions import AsIs -from odoo import models, fields, api +from odoo import api, fields, models from odoo.http import request class AuditlogHTTPRequest(models.Model): - _name = 'auditlog.http.request' + _name = "auditlog.http.request" _description = "Auditlog - HTTP request log" _order = "create_date DESC" - display_name = fields.Char( - "Name", compute="_compute_display_name", store=True) + display_name = fields.Char("Name", compute="_compute_display_name", store=True) name = fields.Char("Path") root_url = fields.Char("Root URL") - user_id = fields.Many2one( - 'res.users', string="User") - http_session_id = fields.Many2one( - 'auditlog.http.session', string="Session") + user_id = fields.Many2one("res.users", string="User") + http_session_id = fields.Many2one("auditlog.http.session", string="Session") user_context = fields.Char("Context") - log_ids = fields.One2many( - 'auditlog.log', 'http_request_id', string="Logs") + log_ids = fields.One2many("auditlog.log", "http_request_id", string="Logs") - @api.depends('create_date', 'name') + @api.depends("create_date", "name") def _compute_display_name(self): for httprequest in self: create_date = fields.Datetime.from_string(httprequest.create_date) - tz_create_date = fields.Datetime.context_timestamp( - httprequest, create_date) - httprequest.display_name = "%s (%s)" % ( - httprequest.name or '?', - fields.Datetime.to_string(tz_create_date)) + tz_create_date = fields.Datetime.context_timestamp(httprequest, create_date) + httprequest.display_name = "{} ({})".format( + httprequest.name or "?", fields.Datetime.to_string(tz_create_date) + ) def name_get(self): return [(request.id, request.display_name) for request in self] @@ -47,24 +42,24 @@ class AuditlogHTTPRequest(models.Model): """ if not request: return False - http_session_model = self.env['auditlog.http.session'] + http_session_model = self.env["auditlog.http.session"] httprequest = request.httprequest if httprequest: - if hasattr(httprequest, 'auditlog_http_request_id'): + if hasattr(httprequest, "auditlog_http_request_id"): # Verify existence. Could have been rolled back after a # concurrency error self.env.cr.execute( - "SELECT id FROM %s WHERE id = %s", ( - AsIs(self._table), - httprequest.auditlog_http_request_id)) + "SELECT id FROM %s WHERE id = %s", + (AsIs(self._table), httprequest.auditlog_http_request_id), + ) if self.env.cr.fetchone(): return httprequest.auditlog_http_request_id vals = { - 'name': httprequest.path, - 'root_url': httprequest.url_root, - 'user_id': request.uid, - 'http_session_id': http_session_model.current_http_session(), - 'user_context': request.context, + "name": httprequest.path, + "root_url": httprequest.url_root, + "user_id": request.uid, + "http_session_id": http_session_model.current_http_session(), + "user_context": request.context, } httprequest.auditlog_http_request_id = self.create(vals).id return httprequest.auditlog_http_request_id diff --git a/auditlog/models/http_session.py b/auditlog/models/http_session.py index 2e636a8a3..e64d29171 100644 --- a/auditlog/models/http_session.py +++ b/auditlog/models/http_session.py @@ -1,32 +1,31 @@ # Copyright 2015 ABF OSIELL # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -from odoo import models, fields, api +from odoo import api, fields, models from odoo.http import request class AuditlogtHTTPSession(models.Model): - _name = 'auditlog.http.session' + _name = "auditlog.http.session" _description = "Auditlog - HTTP User session log" _order = "create_date DESC" - display_name = fields.Char( - "Name", compute="_compute_display_name", store=True) + display_name = fields.Char("Name", compute="_compute_display_name", store=True) name = fields.Char("Session ID", index=True) - user_id = fields.Many2one( - 'res.users', string="User", index=True) + user_id = fields.Many2one("res.users", string="User", index=True) http_request_ids = fields.One2many( - 'auditlog.http.request', 'http_session_id', string="HTTP Requests") + "auditlog.http.request", "http_session_id", string="HTTP Requests" + ) - @api.depends('create_date', 'user_id') + @api.depends("create_date", "user_id") def _compute_display_name(self): for httpsession in self: create_date = fields.Datetime.from_string(httpsession.create_date) - tz_create_date = fields.Datetime.context_timestamp( - httpsession, create_date) - httpsession.display_name = "%s (%s)" % ( - httpsession.user_id and httpsession.user_id.name or '?', - fields.Datetime.to_string(tz_create_date)) + tz_create_date = fields.Datetime.context_timestamp(httpsession, create_date) + httpsession.display_name = "{} ({})".format( + httpsession.user_id and httpsession.user_id.name or "?", + fields.Datetime.to_string(tz_create_date), + ) def name_get(self): return [(session.id, session.display_name) for session in self] @@ -44,15 +43,11 @@ class AuditlogtHTTPSession(models.Model): httpsession = request.session if httpsession: existing_session = self.search( - [('name', '=', httpsession.sid), - ('user_id', '=', request.uid)], - limit=1) + [("name", "=", httpsession.sid), ("user_id", "=", request.uid)], limit=1 + ) if existing_session: return existing_session.id - vals = { - 'name': httpsession.sid, - 'user_id': request.uid, - } + vals = {"name": httpsession.sid, "user_id": request.uid} httpsession.auditlog_http_session_id = self.create(vals).id return httpsession.auditlog_http_session_id return False diff --git a/auditlog/models/log.py b/auditlog/models/log.py index cf53832d4..b54426c8f 100644 --- a/auditlog/models/log.py +++ b/auditlog/models/log.py @@ -1,45 +1,39 @@ # Copyright 2015 ABF OSIELL # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -from odoo import models, fields +from odoo import fields, models class AuditlogLog(models.Model): - _name = 'auditlog.log' + _name = "auditlog.log" _description = "Auditlog - Log" _order = "create_date desc" name = fields.Char("Resource Name", size=64) - model_id = fields.Many2one( - 'ir.model', string="Model") + model_id = fields.Many2one("ir.model", string="Model") res_id = fields.Integer("Resource ID") - user_id = fields.Many2one( - 'res.users', string="User") + user_id = fields.Many2one("res.users", string="User") method = fields.Char(size=64) - line_ids = fields.One2many( - 'auditlog.log.line', 'log_id', string="Fields updated") - http_session_id = fields.Many2one( - 'auditlog.http.session', string="Session") - http_request_id = fields.Many2one( - 'auditlog.http.request', string="HTTP Request") + line_ids = fields.One2many("auditlog.log.line", "log_id", string="Fields updated") + http_session_id = fields.Many2one("auditlog.http.session", string="Session") + http_request_id = fields.Many2one("auditlog.http.request", string="HTTP Request") log_type = fields.Selection( - [('full', "Full log"), - ('fast', "Fast log"), - ], - string="Type") + [("full", "Full log"), ("fast", "Fast log")], string="Type" + ) class AuditlogLogLine(models.Model): - _name = 'auditlog.log.line' + _name = "auditlog.log.line" _description = "Auditlog - Log details (fields updated)" field_id = fields.Many2one( - 'ir.model.fields', ondelete='cascade', string="Field", required=True) + "ir.model.fields", ondelete="cascade", string="Field", required=True + ) log_id = fields.Many2one( - 'auditlog.log', string="Log", ondelete='cascade', index=True) + "auditlog.log", string="Log", ondelete="cascade", index=True + ) old_value = fields.Text() new_value = fields.Text() old_value_text = fields.Text("Old value Text") new_value_text = fields.Text("New value Text") - field_name = fields.Char("Technical name", related='field_id.name') - field_description = fields.Char( - "Description", related='field_id.field_description') + field_name = fields.Char("Technical name", related="field_id.name") + field_description = fields.Char("Description", related="field_id.field_description") diff --git a/auditlog/models/rule.py b/auditlog/models/rule.py index 86fd0b01b..f3d7eef26 100644 --- a/auditlog/models/rule.py +++ b/auditlog/models/rule.py @@ -1,11 +1,16 @@ # Copyright 2015 ABF OSIELL # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -from odoo import models, fields, api, modules, _ +from odoo import _, api, fields, models, modules FIELDS_BLACKLIST = [ - 'id', 'create_uid', 'create_date', 'write_uid', 'write_date', - 'display_name', '__last_update', + "id", + "create_uid", + "create_date", + "write_uid", + "write_date", + "display_name", + "__last_update", ] # Used for performance, to avoid a dictionary instanciation when we need an # empty dict to simplify algorithms @@ -19,6 +24,7 @@ class DictDiffer(object): (3) keys same in both but changed values (4) keys same in both and unchanged values """ + def __init__(self, current_dict, past_dict): self.current_dict, self.past_dict = current_dict, past_dict self.set_current = set(current_dict) @@ -32,62 +38,82 @@ class DictDiffer(object): return self.set_past - self.intersect def changed(self): - return set(o for o in self.intersect - if self.past_dict[o] != self.current_dict[o]) + return {o for o in self.intersect if self.past_dict[o] != self.current_dict[o]} def unchanged(self): - return set(o for o in self.intersect - if self.past_dict[o] == self.current_dict[o]) + return {o for o in self.intersect if self.past_dict[o] == self.current_dict[o]} class AuditlogRule(models.Model): - _name = 'auditlog.rule' + _name = "auditlog.rule" _description = "Auditlog - Rule" - name = fields.Char( - required=True, states={'subscribed': [('readonly', True)]}) + name = fields.Char(required=True, states={"subscribed": [("readonly", True)]}) model_id = fields.Many2one( - 'ir.model', "Model", required=True, + "ir.model", + "Model", + required=True, help="Select model for which you want to generate log.", - states={'subscribed': [('readonly', True)]}) + states={"subscribed": [("readonly", True)]}, + ) user_ids = fields.Many2many( - 'res.users', - 'audittail_rules_users', - 'user_id', 'rule_id', + "res.users", + "audittail_rules_users", + "user_id", + "rule_id", string="Users", help="if User is not added then it will applicable for all users", - states={'subscribed': [('readonly', True)]}) + states={"subscribed": [("readonly", True)]}, + ) log_read = fields.Boolean( "Log Reads", - help=("Select this if you want to keep track of read/open on any " - "record of the model of this rule"), - states={'subscribed': [('readonly', True)]}) + help=( + "Select this if you want to keep track of read/open on any " + "record of the model of this rule" + ), + states={"subscribed": [("readonly", True)]}, + ) log_write = fields.Boolean( - "Log Writes", default=True, - help=("Select this if you want to keep track of modification on any " - "record of the model of this rule"), - states={'subscribed': [('readonly', True)]}) + "Log Writes", + default=True, + help=( + "Select this if you want to keep track of modification on any " + "record of the model of this rule" + ), + states={"subscribed": [("readonly", True)]}, + ) log_unlink = fields.Boolean( - "Log Deletes", default=True, - help=("Select this if you want to keep track of deletion on any " - "record of the model of this rule"), - states={'subscribed': [('readonly', True)]}) + "Log Deletes", + default=True, + help=( + "Select this if you want to keep track of deletion on any " + "record of the model of this rule" + ), + states={"subscribed": [("readonly", True)]}, + ) log_create = fields.Boolean( - "Log Creates", default=True, - help=("Select this if you want to keep track of creation on any " - "record of the model of this rule"), - states={'subscribed': [('readonly', True)]}) + "Log Creates", + default=True, + help=( + "Select this if you want to keep track of creation on any " + "record of the model of this rule" + ), + states={"subscribed": [("readonly", True)]}, + ) log_type = fields.Selection( - [('full', "Full log"), - ('fast', "Fast log"), - ], - string="Type", required=True, default='full', - help=("Full log: make a diff between the data before and after " - "the operation (log more info like computed fields which were " - "updated, but it is slower)\n" - "Fast log: only log the changes made through the create and " - "write operations (less information, but it is faster)"), - states={'subscribed': [('readonly', True)]}) + [("full", "Full log"), ("fast", "Fast log")], + string="Type", + required=True, + default="full", + help=( + "Full log: make a diff between the data before and after " + "the operation (log more info like computed fields which were " + "updated, but it is slower)\n" + "Fast log: only log the changes made through the create and " + "write operations (less information, but it is faster)" + ), + states={"subscribed": [("readonly", True)]}, + ) # log_action = fields.Boolean( # "Log Action", # help=("Select this if you want to keep track of actions on the " @@ -97,27 +123,36 @@ class AuditlogRule(models.Model): # help=("Select this if you want to keep track of workflow on any " # "record of the model of this rule")) state = fields.Selection( - [('draft', "Draft"), ('subscribed', "Subscribed")], - required=True, default='draft') + [("draft", "Draft"), ("subscribed", "Subscribed")], + required=True, + default="draft", + ) action_id = fields.Many2one( - 'ir.actions.act_window', string="Action", - states={'subscribed': [('readonly', True)]}) + "ir.actions.act_window", + string="Action", + states={"subscribed": [("readonly", True)]}, + ) _sql_constraints = [ - ('model_uniq', 'unique(model_id)', - ("There is already a rule defined on this model\n" - "You cannot define another: please edit the existing one.")) + ( + "model_uniq", + "unique(model_id)", + ( + "There is already a rule defined on this model\n" + "You cannot define another: please edit the existing one." + ), + ) ] def _register_hook(self): """Get all rules and apply them to log method calls.""" super(AuditlogRule, self)._register_hook() - if not hasattr(self.pool, '_auditlog_field_cache'): + if not hasattr(self.pool, "_auditlog_field_cache"): self.pool._auditlog_field_cache = {} - if not hasattr(self.pool, '_auditlog_model_cache'): + if not hasattr(self.pool, "_auditlog_model_cache"): self.pool._auditlog_model_cache = {} if not self: - self = self.search([('state', '=', 'subscribed')]) + self = self.search([("state", "=", "subscribed")]) return self._patch_methods() def _patch_methods(self): @@ -125,7 +160,7 @@ class AuditlogRule(models.Model): updated = False model_cache = self.pool._auditlog_model_cache for rule in self: - if rule.state != 'subscribed': + if rule.state != "subscribed": continue if not self.pool.get(rule.model_id.model): # ignore rules for models not loadable currently @@ -134,31 +169,27 @@ class AuditlogRule(models.Model): model_model = self.env[rule.model_id.model] # CRUD # -> create - check_attr = 'auditlog_ruled_create' - if getattr(rule, 'log_create') \ - and not hasattr(model_model, check_attr): - model_model._patch_method('create', rule._make_create()) + check_attr = "auditlog_ruled_create" + if rule.log_create and not hasattr(model_model, check_attr): + model_model._patch_method("create", rule._make_create()) setattr(type(model_model), check_attr, True) updated = True # -> read - check_attr = 'auditlog_ruled_read' - if getattr(rule, 'log_read') \ - and not hasattr(model_model, check_attr): - model_model._patch_method('read', rule._make_read()) + check_attr = "auditlog_ruled_read" + if rule.log_read and not hasattr(model_model, check_attr): + model_model._patch_method("read", rule._make_read()) setattr(type(model_model), check_attr, True) updated = True # -> write - check_attr = 'auditlog_ruled_write' - if getattr(rule, 'log_write') \ - and not hasattr(model_model, check_attr): - model_model._patch_method('write', rule._make_write()) + check_attr = "auditlog_ruled_write" + if rule.log_write and not hasattr(model_model, check_attr): + model_model._patch_method("write", rule._make_write()) setattr(type(model_model), check_attr, True) updated = True # -> unlink - check_attr = 'auditlog_ruled_unlink' - if getattr(rule, 'log_unlink') \ - and not hasattr(model_model, check_attr): - model_model._patch_method('unlink', rule._make_unlink()) + check_attr = "auditlog_ruled_unlink" + if rule.log_unlink and not hasattr(model_model, check_attr): + model_model._patch_method("unlink", rule._make_unlink()) setattr(type(model_model), check_attr, True) updated = True return updated @@ -168,11 +199,12 @@ class AuditlogRule(models.Model): updated = False for rule in self: model_model = self.env[rule.model_id.model] - for method in ['create', 'read', 'write', 'unlink']: - if getattr(rule, 'log_%s' % method) and hasattr( - getattr(model_model, method), 'origin'): + for method in ["create", "read", "write", "unlink"]: + if getattr(rule, "log_%s" % method) and hasattr( + getattr(model_model, method), "origin" + ): model_model._revert_method(method) - delattr(type(model_model), 'auditlog_ruled_%s' % method) + delattr(type(model_model), "auditlog_ruled_%s" % method) updated = True if updated: modules.registry.Registry(self.env.cr.dbname).signal_changes() @@ -203,61 +235,83 @@ class AuditlogRule(models.Model): log_type = self.log_type @api.model - @api.returns('self', lambda value: value.id) + @api.returns("self", lambda value: value.id) def create_full(self, vals, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] + rule_model = self.env["auditlog.rule"] new_record = create_full.origin(self, vals, **kwargs) - new_values = dict( - (d['id'], d) for d in new_record.sudo() - .with_context(prefetch_fields=False).read(list(self._fields))) + new_values = { + d["id"]: d + for d in new_record.sudo() + .with_context(prefetch_fields=False) + .read(list(self._fields)) + } rule_model.sudo().create_logs( - self.env.uid, self._name, new_record.ids, - 'create', None, new_values, {'log_type': log_type}) + self.env.uid, + self._name, + new_record.ids, + "create", + None, + new_values, + {"log_type": log_type}, + ) return new_record @api.model - @api.returns('self', lambda value: value.id) + @api.returns("self", lambda value: value.id) def create_fast(self, vals, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] + rule_model = self.env["auditlog.rule"] vals2 = dict(vals) new_record = create_fast.origin(self, vals, **kwargs) new_values = {new_record.id: vals2} rule_model.sudo().create_logs( - self.env.uid, self._name, new_record.ids, - 'create', None, new_values, {'log_type': log_type}) + self.env.uid, + self._name, + new_record.ids, + "create", + None, + new_values, + {"log_type": log_type}, + ) return new_record - return create_full if self.log_type == 'full' else create_fast + return create_full if self.log_type == "full" else create_fast def _make_read(self): """Instanciate a read method that log its calls.""" self.ensure_one() log_type = self.log_type - def read(self, fields=None, load='_classic_read', **kwargs): + def read(self, fields=None, load="_classic_read", **kwargs): result = read.origin(self, fields, load, **kwargs) # Sometimes the result is not a list but a dictionary # Also, we can not modify the current result as it will break calls result2 = result if not isinstance(result2, list): result2 = [result] - read_values = dict((d['id'], d) for d in result2) + read_values = {d["id"]: d for d in result2} # Old API # If the call came from auditlog itself, skip logging: # avoid logs on `read` produced by auditlog during internal # processing: read data of relevant records, 'ir.model', # 'ir.model.fields'... (no interest in logging such operations) - if self.env.context.get('auditlog_disabled'): + if self.env.context.get("auditlog_disabled"): return result self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] + rule_model = self.env["auditlog.rule"] rule_model.sudo().create_logs( - self.env.uid, self._name, self.ids, - 'read', read_values, None, {'log_type': log_type}) + self.env.uid, + self._name, + self.ids, + "read", + read_values, + None, + {"log_type": log_type}, + ) return result + return read def _make_write(self): @@ -267,36 +321,54 @@ class AuditlogRule(models.Model): def write_full(self, vals, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] - old_values = dict( - (d['id'], d) for d in self.sudo() - .with_context(prefetch_fields=False).read(list(self._fields))) + rule_model = self.env["auditlog.rule"] + old_values = { + d["id"]: d + for d in self.sudo() + .with_context(prefetch_fields=False) + .read(list(self._fields)) + } result = write_full.origin(self, vals, **kwargs) - new_values = dict( - (d['id'], d) for d in self.sudo() - .with_context(prefetch_fields=False).read(list(self._fields))) + new_values = { + d["id"]: d + for d in self.sudo() + .with_context(prefetch_fields=False) + .read(list(self._fields)) + } rule_model.sudo().create_logs( - self.env.uid, self._name, self.ids, - 'write', old_values, new_values, {'log_type': log_type}) + self.env.uid, + self._name, + self.ids, + "write", + old_values, + new_values, + {"log_type": log_type}, + ) return result def write_fast(self, vals, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] + rule_model = self.env["auditlog.rule"] # Log the user input only, no matter if the `vals` is updated # afterwards as it could not represent the real state # of the data in the database vals2 = dict(vals) old_vals2 = dict.fromkeys(list(vals2.keys()), False) - old_values = dict((id_, old_vals2) for id_ in self.ids) - new_values = dict((id_, vals2) for id_ in self.ids) + old_values = {id_: old_vals2 for id_ in self.ids} + new_values = {id_: vals2 for id_ in self.ids} result = write_fast.origin(self, vals, **kwargs) rule_model.sudo().create_logs( - self.env.uid, self._name, self.ids, - 'write', old_values, new_values, {'log_type': log_type}) + self.env.uid, + self._name, + self.ids, + "write", + old_values, + new_values, + {"log_type": log_type}, + ) return result - return write_full if self.log_type == 'full' else write_fast + return write_full if self.log_type == "full" else write_fast def _make_unlink(self): """Instanciate an unlink method that log its calls.""" @@ -305,28 +377,50 @@ class AuditlogRule(models.Model): def unlink_full(self, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] - old_values = dict( - (d['id'], d) for d in self.sudo() - .with_context(prefetch_fields=False).read(list(self._fields))) + rule_model = self.env["auditlog.rule"] + old_values = { + d["id"]: d + for d in self.sudo() + .with_context(prefetch_fields=False) + .read(list(self._fields)) + } rule_model.sudo().create_logs( - self.env.uid, self._name, self.ids, 'unlink', old_values, None, - {'log_type': log_type}) + self.env.uid, + self._name, + self.ids, + "unlink", + old_values, + None, + {"log_type": log_type}, + ) return unlink_full.origin(self, **kwargs) def unlink_fast(self, **kwargs): self = self.with_context(auditlog_disabled=True) - rule_model = self.env['auditlog.rule'] + rule_model = self.env["auditlog.rule"] rule_model.sudo().create_logs( - self.env.uid, self._name, self.ids, 'unlink', None, None, - {'log_type': log_type}) + self.env.uid, + self._name, + self.ids, + "unlink", + None, + None, + {"log_type": log_type}, + ) return unlink_fast.origin(self, **kwargs) - return unlink_full if self.log_type == 'full' else unlink_fast + return unlink_full if self.log_type == "full" else unlink_fast - def create_logs(self, uid, res_model, res_ids, method, - old_values=None, new_values=None, - additional_log_values=None): + def create_logs( + self, + uid, + res_model, + res_ids, + method, + old_values=None, + new_values=None, + additional_log_values=None, + ): """Create logs. `old_values` and `new_values` are dictionaries, e.g: {RES_ID: {'FIELD': VALUE, ...}} """ @@ -334,37 +428,37 @@ class AuditlogRule(models.Model): old_values = EMPTY_DICT if new_values is None: new_values = EMPTY_DICT - log_model = self.env['auditlog.log'] - http_request_model = self.env['auditlog.http.request'] - http_session_model = self.env['auditlog.http.session'] + log_model = self.env["auditlog.log"] + http_request_model = self.env["auditlog.http.request"] + http_session_model = self.env["auditlog.http.session"] for res_id in res_ids: model_model = self.env[res_model] name = model_model.browse(res_id).name_get() res_name = name and name[0] and name[0][1] vals = { - 'name': res_name, - 'model_id': self.pool._auditlog_model_cache[res_model], - 'res_id': res_id, - 'method': method, - 'user_id': uid, - 'http_request_id': http_request_model.current_http_request(), - 'http_session_id': http_session_model.current_http_session(), + "name": res_name, + "model_id": self.pool._auditlog_model_cache[res_model], + "res_id": res_id, + "method": method, + "user_id": uid, + "http_request_id": http_request_model.current_http_request(), + "http_session_id": http_session_model.current_http_session(), } vals.update(additional_log_values or {}) log = log_model.create(vals) diff = DictDiffer( - new_values.get(res_id, EMPTY_DICT), - old_values.get(res_id, EMPTY_DICT)) - if method == 'create': + new_values.get(res_id, EMPTY_DICT), old_values.get(res_id, EMPTY_DICT) + ) + if method == "create": self._create_log_line_on_create(log, diff.added(), new_values) - elif method == 'read': + elif method == "read": self._create_log_line_on_read( - log, - list(old_values.get(res_id, EMPTY_DICT).keys()), old_values + log, list(old_values.get(res_id, EMPTY_DICT).keys()), old_values ) - elif method == 'write': + elif method == "write": self._create_log_line_on_write( - log, diff.changed(), old_values, new_values) + log, diff.changed(), old_values, new_values + ) def _get_field(self, model, field_name): cache = self.pool._auditlog_field_cache @@ -373,32 +467,31 @@ class AuditlogRule(models.Model): # - we use 'search()' then 'read()' instead of the 'search_read()' # to take advantage of the 'classic_write' loading # - search the field in the current model and those it inherits - field_model = self.env['ir.model.fields'] + field_model = self.env["ir.model.fields"] all_model_ids = [model.id] all_model_ids.extend(model.inherited_model_ids.ids) field = field_model.search( - [('model_id', 'in', all_model_ids), ('name', '=', field_name)]) + [("model_id", "in", all_model_ids), ("name", "=", field_name)] + ) # The field can be a dummy one, like 'in_group_X' on 'res.users' # As such we can't log it (field_id is required to create a log) if not field: cache[model.model][field_name] = False else: - field_data = field.read(load='_classic_write')[0] + field_data = field.read(load="_classic_write")[0] cache[model.model][field_name] = field_data return cache[model.model][field_name] - def _create_log_line_on_read( - self, log, fields_list, read_values): + def _create_log_line_on_read(self, log, fields_list, read_values): """Log field filled on a 'read' operation.""" - log_line_model = self.env['auditlog.log.line'] + log_line_model = self.env["auditlog.log.line"] for field_name in fields_list: if field_name in FIELDS_BLACKLIST: continue field = self._get_field(log.model_id, field_name) # not all fields have an ir.models.field entry (ie. related fields) if field: - log_vals = self._prepare_log_line_vals_on_read( - log, field, read_values) + log_vals = self._prepare_log_line_vals_on_read(log, field, read_values) log_line_model.create(log_vals) def _prepare_log_line_vals_on_read(self, log, field, read_values): @@ -406,23 +499,23 @@ class AuditlogRule(models.Model): 'read' operation. """ vals = { - 'field_id': field['id'], - 'log_id': log.id, - 'old_value': read_values[log.res_id][field['name']], - 'old_value_text': read_values[log.res_id][field['name']], - 'new_value': False, - 'new_value_text': False, + "field_id": field["id"], + "log_id": log.id, + "old_value": read_values[log.res_id][field["name"]], + "old_value_text": read_values[log.res_id][field["name"]], + "new_value": False, + "new_value_text": False, } - if field['relation'] and '2many' in field['ttype']: - old_value_text = self.env[field['relation']].browse( - vals['old_value']).name_get() - vals['old_value_text'] = old_value_text + if field["relation"] and "2many" in field["ttype"]: + old_value_text = ( + self.env[field["relation"]].browse(vals["old_value"]).name_get() + ) + vals["old_value_text"] = old_value_text return vals - def _create_log_line_on_write( - self, log, fields_list, old_values, new_values): + def _create_log_line_on_write(self, log, fields_list, old_values, new_values): """Log field updated on a 'write' operation.""" - log_line_model = self.env['auditlog.log.line'] + log_line_model = self.env["auditlog.log.line"] for field_name in fields_list: if field_name in FIELDS_BLACKLIST: continue @@ -430,55 +523,55 @@ class AuditlogRule(models.Model): # not all fields have an ir.models.field entry (ie. related fields) if field: log_vals = self._prepare_log_line_vals_on_write( - log, field, old_values, new_values) + log, field, old_values, new_values + ) log_line_model.create(log_vals) - def _prepare_log_line_vals_on_write( - self, log, field, old_values, new_values): + def _prepare_log_line_vals_on_write(self, log, field, old_values, new_values): """Prepare the dictionary of values used to create a log line on a 'write' operation. """ vals = { - 'field_id': field['id'], - 'log_id': log.id, - 'old_value': old_values[log.res_id][field['name']], - 'old_value_text': old_values[log.res_id][field['name']], - 'new_value': new_values[log.res_id][field['name']], - 'new_value_text': new_values[log.res_id][field['name']], + "field_id": field["id"], + "log_id": log.id, + "old_value": old_values[log.res_id][field["name"]], + "old_value_text": old_values[log.res_id][field["name"]], + "new_value": new_values[log.res_id][field["name"]], + "new_value_text": new_values[log.res_id][field["name"]], } # for *2many fields, log the name_get - if log.log_type == 'full' and field['relation'] \ - and '2many' in field['ttype']: + if log.log_type == "full" and field["relation"] and "2many" in field["ttype"]: # Filter IDs to prevent a 'name_get()' call on deleted resources - existing_ids = self.env[field['relation']]._search( - [('id', 'in', vals['old_value'])]) + existing_ids = self.env[field["relation"]]._search( + [("id", "in", vals["old_value"])] + ) old_value_text = [] if existing_ids: - existing_values = self.env[field['relation']].browse( - existing_ids).name_get() + existing_values = ( + self.env[field["relation"]].browse(existing_ids).name_get() + ) old_value_text.extend(existing_values) # Deleted resources will have a 'DELETED' text representation - deleted_ids = set(vals['old_value']) - set(existing_ids) + deleted_ids = set(vals["old_value"]) - set(existing_ids) for deleted_id in deleted_ids: - old_value_text.append((deleted_id, 'DELETED')) - vals['old_value_text'] = old_value_text - new_value_text = self.env[field['relation']].browse( - vals['new_value']).name_get() - vals['new_value_text'] = new_value_text + old_value_text.append((deleted_id, "DELETED")) + vals["old_value_text"] = old_value_text + new_value_text = ( + self.env[field["relation"]].browse(vals["new_value"]).name_get() + ) + vals["new_value_text"] = new_value_text return vals - def _create_log_line_on_create( - self, log, fields_list, new_values): + def _create_log_line_on_create(self, log, fields_list, new_values): """Log field filled on a 'create' operation.""" - log_line_model = self.env['auditlog.log.line'] + log_line_model = self.env["auditlog.log.line"] for field_name in fields_list: if field_name in FIELDS_BLACKLIST: continue field = self._get_field(log.model_id, field_name) # not all fields have an ir.models.field entry (ie. related fields) if field: - log_vals = self._prepare_log_line_vals_on_create( - log, field, new_values) + log_vals = self._prepare_log_line_vals_on_create(log, field, new_values) log_line_model.create(log_vals) def _prepare_log_line_vals_on_create(self, log, field, new_values): @@ -486,37 +579,38 @@ class AuditlogRule(models.Model): 'create' operation. """ vals = { - 'field_id': field['id'], - 'log_id': log.id, - 'old_value': False, - 'old_value_text': False, - 'new_value': new_values[log.res_id][field['name']], - 'new_value_text': new_values[log.res_id][field['name']], + "field_id": field["id"], + "log_id": log.id, + "old_value": False, + "old_value_text": False, + "new_value": new_values[log.res_id][field["name"]], + "new_value_text": new_values[log.res_id][field["name"]], } - if log.log_type == 'full' and field['relation'] \ - and '2many' in field['ttype']: - new_value_text = self.env[field['relation']].browse( - vals['new_value']).name_get() - vals['new_value_text'] = new_value_text + if log.log_type == "full" and field["relation"] and "2many" in field["ttype"]: + new_value_text = ( + self.env[field["relation"]].browse(vals["new_value"]).name_get() + ) + vals["new_value_text"] = new_value_text return vals def subscribe(self): """Subscribe Rule for auditing changes on model and apply shortcut to view logs on that model. """ - act_window_model = self.env['ir.actions.act_window'] + act_window_model = self.env["ir.actions.act_window"] for rule in self: # Create a shortcut to view logs domain = "[('model_id', '=', %s), ('res_id', '=', active_id)]" % ( - rule.model_id.id) + rule.model_id.id + ) vals = { - 'name': _("View logs"), - 'res_model': 'auditlog.log', - 'binding_model_id': rule.model_id.id, - 'domain': domain, + "name": _("View logs"), + "res_model": "auditlog.log", + "binding_model_id": rule.model_id.id, + "domain": domain, } act_window = act_window_model.sudo().create(vals) - rule.write({'state': 'subscribed', 'action_id': act_window.id}) + rule.write({"state": "subscribed", "action_id": act_window.id}) return True def unsubscribe(self): @@ -528,5 +622,5 @@ class AuditlogRule(models.Model): act_window = rule.action_id if act_window: act_window.unlink() - self.write({'state': 'draft'}) + self.write({"state": "draft"}) return True diff --git a/auditlog/tests/test_auditlog.py b/auditlog/tests/test_auditlog.py index 996cf5fa1..ed0b83456 100644 --- a/auditlog/tests/test_auditlog.py +++ b/auditlog/tests/test_auditlog.py @@ -4,48 +4,59 @@ from odoo.tests.common import TransactionCase class AuditlogCommon(object): - def test_LogCreation(self): """First test, caching some data.""" self.groups_rule.subscribe() - auditlog_log = self.env['auditlog.log'] - group = self.env['res.groups'].create({ - 'name': 'testgroup1', - }) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'create'), - ('res_id', '=', group.id), - ]).ensure_one()) - group.write({'name': 'Testgroup1'}) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'write'), - ('res_id', '=', group.id), - ]).ensure_one()) + auditlog_log = self.env["auditlog.log"] + group = self.env["res.groups"].create({"name": "testgroup1"}) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "create"), + ("res_id", "=", group.id), + ] + ).ensure_one() + ) + group.write({"name": "Testgroup1"}) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "write"), + ("res_id", "=", group.id), + ] + ).ensure_one() + ) group.unlink() - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'unlink'), - ('res_id', '=', group.id), - ]).ensure_one()) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "unlink"), + ("res_id", "=", group.id), + ] + ).ensure_one() + ) def test_LogCreation2(self): """Second test, using cached data of the first one.""" self.groups_rule.subscribe() - auditlog_log = self.env['auditlog.log'] - testgroup2 = self.env['res.groups'].create({ - 'name': 'testgroup2', - }) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'create'), - ('res_id', '=', testgroup2.id), - ]).ensure_one()) + auditlog_log = self.env["auditlog.log"] + testgroup2 = self.env["res.groups"].create({"name": "testgroup2"}) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "create"), + ("res_id", "=", testgroup2.id), + ] + ).ensure_one() + ) def test_LogCreation3(self): """Third test, two groups, the latter being the parent of the former. @@ -55,46 +66,56 @@ class AuditlogCommon(object): """ self.groups_rule.subscribe() - auditlog_log = self.env['auditlog.log'] - testgroup3 = testgroup3 = self.env['res.groups'].create({ - 'name': 'testgroup3', - }) - testgroup4 = self.env['res.groups'].create({ - 'name': 'testgroup4', - 'implied_ids': [(4, testgroup3.id)], - }) - testgroup4.write({'implied_ids': [(2, testgroup3.id)]}) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'create'), - ('res_id', '=', testgroup3.id), - ]).ensure_one()) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'create'), - ('res_id', '=', testgroup4.id), - ]).ensure_one()) - self.assertTrue(auditlog_log.search([ - ('model_id', '=', self.groups_model_id), - ('method', '=', 'write'), - ('res_id', '=', testgroup4.id), - ]).ensure_one()) + auditlog_log = self.env["auditlog.log"] + testgroup3 = testgroup3 = self.env["res.groups"].create({"name": "testgroup3"}) + testgroup4 = self.env["res.groups"].create( + {"name": "testgroup4", "implied_ids": [(4, testgroup3.id)]} + ) + testgroup4.write({"implied_ids": [(2, testgroup3.id)]}) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "create"), + ("res_id", "=", testgroup3.id), + ] + ).ensure_one() + ) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "create"), + ("res_id", "=", testgroup4.id), + ] + ).ensure_one() + ) + self.assertTrue( + auditlog_log.search( + [ + ("model_id", "=", self.groups_model_id), + ("method", "=", "write"), + ("res_id", "=", testgroup4.id), + ] + ).ensure_one() + ) class TestAuditlogFull(TransactionCase, AuditlogCommon): - def setUp(self): super(TestAuditlogFull, self).setUp() - self.groups_model_id = self.env.ref('base.model_res_groups').id - self.groups_rule = self.env['auditlog.rule'].create({ - 'name': 'testrule for groups', - 'model_id': self.groups_model_id, - 'log_read': True, - 'log_create': True, - 'log_write': True, - 'log_unlink': True, - 'log_type': 'full', - }) + self.groups_model_id = self.env.ref("base.model_res_groups").id + self.groups_rule = self.env["auditlog.rule"].create( + { + "name": "testrule for groups", + "model_id": self.groups_model_id, + "log_read": True, + "log_create": True, + "log_write": True, + "log_unlink": True, + "log_type": "full", + } + ) def tearDown(self): self.groups_rule.unlink() @@ -102,19 +123,20 @@ class TestAuditlogFull(TransactionCase, AuditlogCommon): class TestAuditlogFast(TransactionCase, AuditlogCommon): - def setUp(self): super(TestAuditlogFast, self).setUp() - self.groups_model_id = self.env.ref('base.model_res_groups').id - self.groups_rule = self.env['auditlog.rule'].create({ - 'name': 'testrule for groups', - 'model_id': self.groups_model_id, - 'log_read': True, - 'log_create': True, - 'log_write': True, - 'log_unlink': True, - 'log_type': 'fast', - }) + self.groups_model_id = self.env.ref("base.model_res_groups").id + self.groups_rule = self.env["auditlog.rule"].create( + { + "name": "testrule for groups", + "model_id": self.groups_model_id, + "log_read": True, + "log_create": True, + "log_write": True, + "log_unlink": True, + "log_type": "fast", + } + ) def tearDown(self): self.groups_rule.unlink() diff --git a/auditlog/tests/test_autovacuum.py b/auditlog/tests/test_autovacuum.py index 1b6fae60d..2b4de24dd 100644 --- a/auditlog/tests/test_autovacuum.py +++ b/auditlog/tests/test_autovacuum.py @@ -6,42 +6,39 @@ from odoo.tests.common import TransactionCase class TestAuditlogAutovacuum(TransactionCase): - def setUp(self): super(TestAuditlogAutovacuum, self).setUp() - self.groups_model_id = self.env.ref('base.model_res_groups').id - self.groups_rule = self.env['auditlog.rule'].create({ - 'name': 'testrule for groups', - 'model_id': self.groups_model_id, - 'log_read': True, - 'log_create': True, - 'log_write': True, - 'log_unlink': True, - 'state': 'subscribed', - 'log_type': 'full', - }) + self.groups_model_id = self.env.ref("base.model_res_groups").id + self.groups_rule = self.env["auditlog.rule"].create( + { + "name": "testrule for groups", + "model_id": self.groups_model_id, + "log_read": True, + "log_create": True, + "log_write": True, + "log_unlink": True, + "state": "subscribed", + "log_type": "full", + } + ) def tearDown(self): self.groups_rule.unlink() super(TestAuditlogAutovacuum, self).tearDown() def test_autovacuum(self): - log_model = self.env['auditlog.log'] - autovacuum_model = self.env['auditlog.autovacuum'] - group = self.env['res.groups'].create({ - 'name': 'testgroup1', - }) - nb_logs = log_model.search_count([ - ('model_id', '=', self.groups_model_id), - ('res_id', '=', group.id), - ]) + log_model = self.env["auditlog.log"] + autovacuum_model = self.env["auditlog.autovacuum"] + group = self.env["res.groups"].create({"name": "testgroup1"}) + nb_logs = log_model.search_count( + [("model_id", "=", self.groups_model_id), ("res_id", "=", group.id)] + ) self.assertGreater(nb_logs, 0) # Milliseconds are ignored by autovacuum, waiting 1s ensure that # the logs generated will be processed by the vacuum time.sleep(1) autovacuum_model.autovacuum(days=0) - nb_logs = log_model.search_count([ - ('model_id', '=', self.groups_model_id), - ('res_id', '=', group.id), - ]) + nb_logs = log_model.search_count( + [("model_id", "=", self.groups_model_id), ("res_id", "=", group.id)] + ) self.assertEqual(nb_logs, 0) diff --git a/auditlog/views/auditlog_view.xml b/auditlog/views/auditlog_view.xml index c2f9cf17c..98b0d9bfe 100644 --- a/auditlog/views/auditlog_view.xml +++ b/auditlog/views/auditlog_view.xml @@ -1,94 +1,118 @@ - + - - - + - auditlog.rule.form auditlog.rule
-
- - - - + + + + - - - - + + + +
- auditlog.rule.tree auditlog.rule - - - - - - - - - + + + + + + + + + - auditlog.rule.search auditlog.rule - - - - + + + + - + - Rules auditlog.rule ir.actions.act_window tree,form {} - + - - - - + - auditlog.log.form auditlog.log @@ -97,41 +121,41 @@ - - - - + + + + - - - + + + - - + +
- + - - - - + + + +
- - + + - + - +
@@ -139,61 +163,79 @@
- auditlog.log.tree auditlog.log - - - - - - + + + + + + - - + auditlog.log.search auditlog.log - - - - + + + + - - + - + - + - + - + + domain="[]" + context="{'group_by':'http_request_id'}" + /> - Logs auditlog.log - + - - +
diff --git a/auditlog/views/http_request_view.xml b/auditlog/views/http_request_view.xml index 22c697b8f..7682d7342 100644 --- a/auditlog/views/http_request_view.xml +++ b/auditlog/views/http_request_view.xml @@ -1,4 +1,4 @@ - + auditlog.http.request.form @@ -7,71 +7,84 @@
- - - - - + + + + + - +
- auditlog.http.request.tree auditlog.http.request - - - + + + - auditlog.http.request.search auditlog.http.request - - - - - + + + + + - - + - + - + - + + domain="[]" + context="{'group_by':'http_session_id'}" + /> - HTTP Requests ir.actions.act_window auditlog.http.request - + - - + action="action_auditlog_http_request_tree" + />
diff --git a/auditlog/views/http_session_view.xml b/auditlog/views/http_session_view.xml index b31b5c26f..de8f2bc7d 100644 --- a/auditlog/views/http_session_view.xml +++ b/auditlog/views/http_session_view.xml @@ -1,4 +1,4 @@ - + auditlog.http.session.form @@ -7,58 +7,62 @@
- - - + + + - +
- auditlog.http.session.tree auditlog.http.session - - - + + + - auditlog.http.session.search auditlog.http.session - - - + + + - - + + domain="[]" + context="{'group_by':'create_date'}" + /> - User sessions ir.actions.act_window auditlog.http.session - + - - + action="action_auditlog_http_session_tree" + />