[IMP] attachment synchronize : when importing files from a remote server :

If one task is failing, keep processing the others
Commit fir attachment imported after each file to be sure not to loose any data, when deleting on remote after import for example
pull/2923/head
Florian da Costa 2024-04-24 16:30:52 +02:00
parent 38296f3e65
commit e6a8b97d5c
4 changed files with 97 additions and 27 deletions

View File

@ -4,10 +4,46 @@
<field name="name">TEST Import</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />
<field name="method_type">import</field>
<field name="filepath">test_import</field>
<field name="avoid_duplicated_files" eval="True" />
</record>
<record id="import_from_filestore_delete" model="attachment.synchronize.task">
<field name="name">TEST Import then delete</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />
<field name="method_type">import</field>
<field name="after_import">delete</field>
<field name="filepath">test_import</field>
</record>
<record id="import_from_filestore_rename" model="attachment.synchronize.task">
<field name="name">TEST Import then rename</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />
<field name="method_type">import</field>
<field name="after_import">rename</field>
<field name="filepath">test_import</field>
<field name="new_name">test-${obj.name}</field>
</record>
<record id="import_from_filestore_move" model="attachment.synchronize.task">
<field name="name">TEST Import then move</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />
<field name="method_type">import</field>
<field name="after_import">move</field>
<field name="filepath">test_import</field>
<field name="move_path">test_archived</field>
</record>
<record id="import_from_filestore_move_rename" model="attachment.synchronize.task">
<field name="name">TEST Import then move and rename</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />
<field name="method_type">import</field>
<field name="after_import">move_rename</field>
<field name="filepath">test_import</field>
<field name="move_path">test_archived</field>
<field name="new_name">foo.txt</field>
</record>
<record id="export_to_filestore" model="attachment.synchronize.task">
<field name="name">TEST Export</field>
<field name="backend_id" ref="fs_storage.default_fs_storage" />

View File

@ -143,7 +143,15 @@ class AttachmentSynchronizeTask(models.Model):
domain = []
domain = expression.AND([domain, [("method_type", "=", "import")]])
for task in self.search(domain):
task.run_import()
try:
task.run_import()
except Exception as e:
self.env.cr.rollback()
# log exception and continue in order to no block all task from all
# remote servers one is unavailable
_logger.warning(
"Task import %s failed with error %s" % (task.name, str(e))
)
def run(self):
for record in self:
@ -197,16 +205,24 @@ class AttachmentSynchronizeTask(models.Model):
for file_path in file_path_names:
if fs.isdir(file_path):
continue
with self.env.cr.savepoint():
# we need to commit after each file because it may be renamed, deleted
# moved on remote and if it fails later, we would could lose the file
# forever.
with self.env.registry.cursor() as new_cr:
new_env = api.Environment(new_cr, self.env.uid, self.env.context)
file_name = file_path.split(fs.sep)[-1]
# avoid use of cat_file because it may not be implemeted in async
# implementation like sshfs
with fs.open(file_path, "rb") as fs_file:
data = fs_file.read()
data = base64.b64encode(data)
attach_vals = self._prepare_attachment_vals(data, file_name)
attachment = attach_obj.create(attach_vals)
self._manage_file_after_import(file_name, file_path, attachment)
attach_vals = self.with_env(new_env)._prepare_attachment_vals(
data, file_name
)
attachment = attach_obj.with_env(new_env).create(attach_vals)
self.with_env(new_env)._manage_file_after_import(
file_name, file_path, attachment
)
total_import += 1
_logger.info("Run import complete! Imported {} files".format(total_import))

View File

@ -36,6 +36,18 @@ class SyncCommon(TransactionCase):
self._clean_testing_directory()
self._create_test_file()
self.task = self.env.ref("attachment_synchronize.import_from_filestore")
self.task_delete = self.env.ref(
"attachment_synchronize.import_from_filestore_delete"
)
self.task_move = self.env.ref(
"attachment_synchronize.import_from_filestore_move"
)
self.task_rename = self.env.ref(
"attachment_synchronize.import_from_filestore_rename"
)
self.task_move_rename = self.env.ref(
"attachment_synchronize.import_from_filestore_move_rename"
)
def tearDown(self):
self._clean_testing_directory()

View File

@ -2,10 +2,23 @@
# @author Sébastien BEAU <sebastien.beau@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api
from .common import SyncCommon
class TestImport(SyncCommon):
def tearDown(self):
self.registry.leave_test_mode()
super().tearDown()
def setUp(self):
super().setUp()
self.registry.enter_test_mode(self.env.cr)
self.env = api.Environment(
self.registry.test_cr, self.env.uid, self.env.context
)
@property
def archived_files(self):
return self.backend.fs.ls(self.directory_archived, detail=False)
@ -15,54 +28,45 @@ class TestImport(SyncCommon):
return self.backend.fs.ls(self.directory_input, detail=False)
def _check_attachment_created(self, count=1):
attachment = self.env["attachment.queue"].search([("name", "=", "bar.txt")])
self.assertEqual(len(attachment), count)
with self.env.registry.cursor() as new_cr:
attachment = self.env(cr=new_cr)["attachment.queue"].search(
[("name", "=", "bar.txt")]
)
self.assertEqual(len(attachment), count)
def test_import_with_rename(self):
self.task.write({"after_import": "rename", "new_name": "test-${obj.name}"})
self.task.run_import()
self.task_rename.run_import()
self._check_attachment_created()
self.assertEqual(self.input_files, ["test_import/test-bar.txt"])
self.assertEqual(self.archived_files, [])
def test_import_with_move(self):
self.task.write({"after_import": "move", "move_path": self.directory_archived})
self.task.run_import()
self.task_move.run_import()
self._check_attachment_created()
self.assertEqual(self.input_files, [])
self.assertEqual(self.archived_files, ["test_archived/bar.txt"])
def test_import_with_move_and_rename(self):
self.task.write(
{
"after_import": "move_rename",
"new_name": "foo.txt",
"move_path": self.directory_archived,
}
)
self.task.run_import()
self.task_move_rename.run_import()
self._check_attachment_created()
self.assertEqual(self.input_files, [])
self.assertEqual(self.archived_files, ["test_archived/foo.txt"])
def test_import_with_delete(self):
self.task.write({"after_import": "delete"})
self.task.run_import()
self.task_delete.run_import()
self._check_attachment_created()
self.assertEqual(self.input_files, [])
self.assertEqual(self.archived_files, [])
def test_import_twice(self):
self.task.write({"after_import": "delete"})
self.task.run_import()
self.task_delete.run_import()
self._check_attachment_created(count=1)
self._create_test_file()
self.task.run_import()
self.task_delete.run_import()
self._check_attachment_created(count=2)
def test_import_twice_no_duplicate(self):
self.task.write({"after_import": "delete", "avoid_duplicated_files": True})
self.task.run_import()
self._check_attachment_created(count=1)
@ -71,11 +75,13 @@ class TestImport(SyncCommon):
self._check_attachment_created(count=1)
def test_running_cron(self):
self.task.write({"after_import": "delete"})
self.env["attachment.synchronize.task"].search(
[("id", "!=", self.task.id)]
).write({"active": False})
self.env["attachment.synchronize.task"].run_task_import_scheduler()
self._check_attachment_created(count=1)
def test_running_cron_disable_task(self):
self.task.write({"after_import": "delete", "active": False})
self.env["attachment.synchronize.task"].search([]).write({"active": False})
self.env["attachment.synchronize.task"].run_task_import_scheduler()
self._check_attachment_created(count=0)