[MIG] attachment_synchronize : Migration to v16
parent
a73d811f19
commit
deaa0b805f
|
@ -5,7 +5,7 @@
|
|||
|
||||
{
|
||||
"name": "Attachment Synchronize",
|
||||
"version": "14.0.1.0.2",
|
||||
"version": "16.0.1.0.0",
|
||||
"author": "Akretion,Odoo Community Association (OCA)",
|
||||
"website": "https://github.com/OCA/server-tools",
|
||||
"maintainers": ["florian-dacosta", "sebastienbeau", "GSLabIt", "bealdav"],
|
||||
|
@ -13,7 +13,7 @@
|
|||
"category": "Generic Modules",
|
||||
"depends": [
|
||||
"attachment_queue",
|
||||
"storage_backend", # https://github.com/OCA/storage
|
||||
"fs_storage", # https://github.com/OCA/storage
|
||||
],
|
||||
"data": [
|
||||
"views/attachment_queue_views.xml",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<odoo>
|
||||
<record id="import_from_filestore" model="attachment.synchronize.task">
|
||||
<field name="name">TEST Import</field>
|
||||
<field name="backend_id" ref="storage_backend.default_storage_backend" />
|
||||
<field name="backend_id" ref="fs_storage.default_fs_storage" />
|
||||
<field name="method_type">import</field>
|
||||
<field name="after_import">delete</field>
|
||||
<field name="filepath">test_import</field>
|
||||
|
@ -10,7 +10,7 @@
|
|||
|
||||
<record id="export_to_filestore" model="attachment.synchronize.task">
|
||||
<field name="name">TEST Export</field>
|
||||
<field name="backend_id" ref="storage_backend.default_storage_backend" />
|
||||
<field name="backend_id" ref="fs_storage.default_fs_storage" />
|
||||
<field name="method_type">export</field>
|
||||
<field name="filepath">test_export</field>
|
||||
</record>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# @ 2016 Florian DA COSTA @ Akretion
|
||||
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
|
||||
|
||||
import os
|
||||
import base64
|
||||
|
||||
from odoo import api, fields, models
|
||||
|
||||
|
@ -11,9 +11,9 @@ class AttachmentQueue(models.Model):
|
|||
|
||||
task_id = fields.Many2one("attachment.synchronize.task", string="Task")
|
||||
method_type = fields.Selection(related="task_id.method_type")
|
||||
storage_backend_id = fields.Many2one(
|
||||
"storage.backend",
|
||||
string="Storage Backend",
|
||||
fs_storage_id = fields.Many2one(
|
||||
"fs.storage",
|
||||
string="Filestore Storage",
|
||||
related="task_id.backend_id",
|
||||
store=True,
|
||||
)
|
||||
|
@ -22,10 +22,20 @@ class AttachmentQueue(models.Model):
|
|||
)
|
||||
|
||||
def _run(self):
|
||||
super()._run()
|
||||
res = super()._run()
|
||||
if self.file_type == "export":
|
||||
path = os.path.join(self.task_id.filepath, self.name)
|
||||
self.storage_backend_id._add_b64_data(path, self.datas)
|
||||
fs = self.fs_storage_id.fs
|
||||
folder_path = self.task_id.filepath
|
||||
full_path = (
|
||||
folder_path and fs.sep.join([folder_path, self.name]) or self.name
|
||||
)
|
||||
# create missing folders if necessary :
|
||||
if folder_path and not fs.exists(folder_path):
|
||||
fs.makedirs(folder_path)
|
||||
data = base64.b64decode(self.datas)
|
||||
with fs.open(full_path, "wb") as f:
|
||||
f.write(data)
|
||||
return res
|
||||
|
||||
def _get_failure_emails(self):
|
||||
res = super()._get_failure_emails()
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
|
||||
import odoo
|
||||
from odoo import api, fields, models, tools
|
||||
from odoo.osv import expression
|
||||
|
||||
|
@ -62,13 +61,10 @@ class AttachmentSynchronizeTask(models.Model):
|
|||
filepath = fields.Char(
|
||||
string="File Path", help="Path to imported/exported files in the Backend"
|
||||
)
|
||||
backend_id = fields.Many2one("storage.backend", string="Backend")
|
||||
backend_id = fields.Many2one("fs.storage", string="Backend")
|
||||
attachment_ids = fields.One2many("attachment.queue", "task_id", string="Attachment")
|
||||
move_path = fields.Char(
|
||||
string="Move Path", help="Imported File will be moved to this path"
|
||||
)
|
||||
move_path = fields.Char(help="Imported File will be moved to this path")
|
||||
new_name = fields.Char(
|
||||
string="New Name",
|
||||
help="Imported File will be renamed to this name.\n"
|
||||
"New Name can use 'mako' template where 'obj' is the original file's name.\n"
|
||||
"For instance : ${obj.name}-${obj.create_date}.csv",
|
||||
|
@ -84,7 +80,6 @@ class AttachmentSynchronizeTask(models.Model):
|
|||
)
|
||||
file_type = fields.Selection(
|
||||
selection=[],
|
||||
string="File Type",
|
||||
help="Used to fill the 'File Type' field in the imported 'Attachments Queues'."
|
||||
"\nFurther operations will be realized on these Attachments Queues depending "
|
||||
"on their 'File Type' value.",
|
||||
|
@ -96,7 +91,6 @@ class AttachmentSynchronizeTask(models.Model):
|
|||
"same name already exists.",
|
||||
)
|
||||
failure_emails = fields.Char(
|
||||
string="Failure Emails",
|
||||
help="Used to fill the 'Failure Emails' field in the 'Attachments Queues' "
|
||||
"related to this task.\nAn alert will be sent to these emails if any operation "
|
||||
"on these Attachment Queue's file type fails.",
|
||||
|
@ -159,61 +153,78 @@ class AttachmentSynchronizeTask(models.Model):
|
|||
else:
|
||||
getattr(record, method)()
|
||||
|
||||
def _get_files(self):
|
||||
self.ensure_one()
|
||||
fs = self.backend_id.fs
|
||||
filepath = self.filepath or ""
|
||||
if filepath and not fs.exists(filepath):
|
||||
return []
|
||||
if self.pattern:
|
||||
path = filepath and fs.sep.join([filepath, self.pattern]) or self.pattern
|
||||
file_path_names = fs.glob(path)
|
||||
else:
|
||||
file_path_names = fs.ls(filepath, detail=False)
|
||||
if self.avoid_duplicated_files:
|
||||
file_path_names = self._filter_duplicates(file_path_names)
|
||||
return file_path_names
|
||||
|
||||
def _manage_file_after_import(self, file_name, fullpath, attachment):
|
||||
self.ensure_one()
|
||||
fs = self.backend_id.fs
|
||||
new_full_path = False
|
||||
if self.after_import == "rename":
|
||||
new_name = self._template_render(self.new_name, attachment)
|
||||
new_full_path = fs.sep.join([self.filepath, new_name])
|
||||
elif self.after_import == "move":
|
||||
new_full_path = fs.sep.join([self.move_path, file_name])
|
||||
elif self.after_import == "move_rename":
|
||||
new_name = self._template_render(self.new_name, attachment)
|
||||
new_full_path = fs.sep.join([self.move_path, new_name])
|
||||
if new_full_path:
|
||||
fs.move(fullpath, new_full_path)
|
||||
if self.after_import == "delete":
|
||||
fs.rm(fullpath)
|
||||
|
||||
def run_import(self):
|
||||
self.ensure_one()
|
||||
attach_obj = self.env["attachment.queue"]
|
||||
backend = self.backend_id
|
||||
filepath = self.filepath or ""
|
||||
filenames = backend._list(relative_path=filepath, pattern=self.pattern)
|
||||
if self.avoid_duplicated_files:
|
||||
filenames = self._file_to_import(filenames)
|
||||
file_path_names = self._get_files()
|
||||
total_import = 0
|
||||
for file_name in filenames:
|
||||
with api.Environment.manage():
|
||||
with odoo.registry(self.env.cr.dbname).cursor() as new_cr:
|
||||
new_env = api.Environment(new_cr, self.env.uid, self.env.context)
|
||||
try:
|
||||
full_absolute_path = os.path.join(filepath, file_name)
|
||||
data = backend._get_b64_data(full_absolute_path)
|
||||
attach_vals = self._prepare_attachment_vals(data, file_name)
|
||||
attachment = attach_obj.with_env(new_env).create(attach_vals)
|
||||
new_full_path = False
|
||||
if self.after_import == "rename":
|
||||
new_name = self._template_render(self.new_name, attachment)
|
||||
new_full_path = os.path.join(filepath, new_name)
|
||||
elif self.after_import == "move":
|
||||
new_full_path = os.path.join(self.move_path, file_name)
|
||||
elif self.after_import == "move_rename":
|
||||
new_name = self._template_render(self.new_name, attachment)
|
||||
new_full_path = os.path.join(self.move_path, new_name)
|
||||
if new_full_path:
|
||||
backend._add_b64_data(new_full_path, data)
|
||||
if self.after_import in (
|
||||
"delete",
|
||||
"rename",
|
||||
"move",
|
||||
"move_rename",
|
||||
):
|
||||
backend._delete(full_absolute_path)
|
||||
total_import += 1
|
||||
except Exception as e:
|
||||
new_env.cr.rollback()
|
||||
raise e
|
||||
else:
|
||||
new_env.cr.commit()
|
||||
fs = self.backend_id.fs
|
||||
for file_path in file_path_names:
|
||||
if fs.isdir(file_path):
|
||||
continue
|
||||
with self.env.cr.savepoint():
|
||||
file_name = file_path.split(fs.sep)[-1]
|
||||
data = fs.read_bytes(file_path)
|
||||
data = base64.b64encode(data)
|
||||
attach_vals = self._prepare_attachment_vals(data, file_name)
|
||||
attachment = attach_obj.create(attach_vals)
|
||||
self._manage_file_after_import(file_name, file_path, attachment)
|
||||
total_import += 1
|
||||
_logger.info("Run import complete! Imported {} files".format(total_import))
|
||||
|
||||
def _file_to_import(self, filenames):
|
||||
def _filter_duplicates(self, file_path_names):
|
||||
fs = self.backend_id.fs
|
||||
self.ensure_one()
|
||||
if self.filepath:
|
||||
filenames = [x.split(fs.sep)[-1] for x in file_path_names]
|
||||
else:
|
||||
filenames = file_path_names
|
||||
imported = (
|
||||
self.env["attachment.queue"]
|
||||
.search([("name", "in", filenames)])
|
||||
.mapped("name")
|
||||
)
|
||||
return list(set(filenames) - set(imported))
|
||||
file_path_names_no_duplicate = [
|
||||
x for x in file_path_names if x.split(fs.sep)[-1] not in imported
|
||||
]
|
||||
return file_path_names_no_duplicate
|
||||
|
||||
def run_export(self):
|
||||
for task in self:
|
||||
task.attachment_ids.filtered(lambda a: a.state == "pending").run()
|
||||
for att in task.attachment_ids.filtered(lambda a: a.state == "pending"):
|
||||
att.run_as_job()
|
||||
|
||||
def button_duplicate_record(self):
|
||||
# due to orm limitation method call from ui should not have params
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
from odoo import fields, models
|
||||
|
||||
|
||||
class StorageBackend(models.Model):
|
||||
_inherit = "storage.backend"
|
||||
class FsStorage(models.Model):
|
||||
_inherit = "fs.storage"
|
||||
|
||||
synchronize_task_ids = fields.One2many(
|
||||
"attachment.synchronize.task", "backend_id", string="Tasks"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
This module allows to **import/export files** from/to backend servers.
|
||||
|
||||
A backend server is defined by the basic `storage_backend <https://github.com/OCA/storage/tree/12.0/storage_backend>`_ OCA module, while it can be configured (amazon S3, sftp,...) with additional modules from the `storage <https://github.com/oca/storage>`_ repository.
|
||||
A backend server is defined by the basic `fs_storage <https://github.com/OCA/storage/tree/16.0/fs_storage>`_ OCA module, while it can be configured (amazon S3, sftp,...) with additional modules fs python libraries
|
||||
|
||||
The imported files (and the files to be exported) are stored in Odoo as ``attachment.queue`` objects, defined by the `attachment_queue <https://github.com/OCA/server-tools/tree/12.0/attachment_queue>`_ module while the importation itself (resp. exportation) is realized by **"Attachments Import Tasks"** (resp. "Attachments Export Tasks") defined by this current module.
|
||||
The imported files (and the files to be exported) are stored in Odoo as ``attachment.queue`` objects, defined by the `attachment_queue <https://github.com/OCA/server-tools/tree/16.0/attachment_queue>`_ module while the importation itself (resp. exportation) is realized by **"Attachments Import Tasks"** (resp. "Attachments Export Tasks") defined by this current module.
|
||||
|
|
|
@ -2,34 +2,36 @@
|
|||
# @author Sébastien BEAU <sebastien.beau@akretion.com>
|
||||
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
|
||||
|
||||
import os
|
||||
import base64
|
||||
|
||||
import mock
|
||||
|
||||
from odoo.addons.storage_backend.tests.common import CommonCase
|
||||
from odoo.tests.common import TransactionCase
|
||||
|
||||
|
||||
class SyncCommon(CommonCase):
|
||||
class SyncCommon(TransactionCase):
|
||||
def _clean_testing_directory(self):
|
||||
for test_dir in [
|
||||
self.directory_input,
|
||||
self.directory_output,
|
||||
self.directory_archived,
|
||||
]:
|
||||
for filename in self.backend.list_files(test_dir):
|
||||
self.backend.delete(os.path.join(test_dir, filename))
|
||||
fs = self.backend.fs
|
||||
if not fs.exists(test_dir):
|
||||
fs.makedirs(test_dir)
|
||||
for filename in fs.ls(test_dir, detail=False):
|
||||
fs.rm(filename)
|
||||
|
||||
def _create_test_file(self):
|
||||
self.backend._add_b64_data(
|
||||
os.path.join(self.directory_input, "bar.txt"),
|
||||
self.filedata,
|
||||
mimetype="text/plain",
|
||||
)
|
||||
fs = self.backend.fs
|
||||
path = fs.sep.join([self.directory_input, "bar.txt"])
|
||||
with fs.open(path, "wb") as f:
|
||||
f.write(self.filedata)
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.env.cr.commit = mock.Mock()
|
||||
self.registry.enter_test_mode(self.env.cr)
|
||||
# self.env.cr.commit = mock.Mock()
|
||||
# self.registry.enter_test_mode(self.env.cr)
|
||||
self.backend = self.env.ref("fs_storage.default_fs_storage")
|
||||
self.filedata = base64.b64encode(b"This is a simple file")
|
||||
self.directory_input = "test_import"
|
||||
self.directory_output = "test_export"
|
||||
self.directory_archived = "test_archived"
|
||||
|
@ -38,6 +40,5 @@ class SyncCommon(CommonCase):
|
|||
self.task = self.env.ref("attachment_synchronize.import_from_filestore")
|
||||
|
||||
def tearDown(self):
|
||||
self.registry.leave_test_mode()
|
||||
self._clean_testing_directory()
|
||||
super().tearDown()
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# @author Sébastien BEAU <sebastien.beau@akretion.com>
|
||||
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
|
||||
|
||||
import mock
|
||||
from unittest import mock
|
||||
|
||||
from odoo.tools import mute_logger
|
||||
|
||||
|
@ -28,16 +28,16 @@ class TestExport(SyncCommon):
|
|||
|
||||
def test_export(self):
|
||||
self.attachment.run()
|
||||
result = self.backend._list("test_export")
|
||||
self.assertEqual(result, ["foo.txt"])
|
||||
result = self.backend.fs.ls("test_export", detail=False)
|
||||
self.assertEqual(result, ["test_export/foo.txt"])
|
||||
|
||||
@mute_logger("odoo.addons.attachment_queue.models.attachment_queue")
|
||||
def test_failing_export(self):
|
||||
with mock.patch.object(
|
||||
type(self.backend),
|
||||
"_add_b64_data",
|
||||
type(self.backend.fs),
|
||||
"open",
|
||||
side_effect=raising_side_effect,
|
||||
):
|
||||
self.attachment.run()
|
||||
self.attachment.with_context(queue_job__no_delay=True).run_as_job()
|
||||
self.assertEqual(self.attachment.state, "failed")
|
||||
self.assertEqual(self.attachment.state_message, "Boom")
|
||||
|
|
|
@ -8,21 +8,21 @@ from .common import SyncCommon
|
|||
class TestImport(SyncCommon):
|
||||
@property
|
||||
def archived_files(self):
|
||||
return self.backend._list(self.directory_archived)
|
||||
return self.backend.fs.ls(self.directory_archived, detail=False)
|
||||
|
||||
@property
|
||||
def input_files(self):
|
||||
return self.backend._list(self.directory_input)
|
||||
return self.backend.fs.ls(self.directory_input, detail=False)
|
||||
|
||||
def _check_attachment_created(self, count=1):
|
||||
attachment = self.env["attachment.queue"].search([("name", "=", "bar.txt")])
|
||||
self.assertEqual(len(attachment), count)
|
||||
|
||||
def test_import_with_rename(self):
|
||||
self.task.write({"after_import": "rename", "new_name": "foo.txt"})
|
||||
self.task.write({"after_import": "rename", "new_name": "test-${obj.name}"})
|
||||
self.task.run_import()
|
||||
self._check_attachment_created()
|
||||
self.assertEqual(self.input_files, ["foo.txt"])
|
||||
self.assertEqual(self.input_files, ["test_import/test-bar.txt"])
|
||||
self.assertEqual(self.archived_files, [])
|
||||
|
||||
def test_import_with_move(self):
|
||||
|
@ -30,7 +30,7 @@ class TestImport(SyncCommon):
|
|||
self.task.run_import()
|
||||
self._check_attachment_created()
|
||||
self.assertEqual(self.input_files, [])
|
||||
self.assertEqual(self.archived_files, ["bar.txt"])
|
||||
self.assertEqual(self.archived_files, ["test_archived/bar.txt"])
|
||||
|
||||
def test_import_with_move_and_rename(self):
|
||||
self.task.write(
|
||||
|
@ -43,7 +43,7 @@ class TestImport(SyncCommon):
|
|||
self.task.run_import()
|
||||
self._check_attachment_created()
|
||||
self.assertEqual(self.input_files, [])
|
||||
self.assertEqual(self.archived_files, ["foo.txt"])
|
||||
self.assertEqual(self.archived_files, ["test_archived/foo.txt"])
|
||||
|
||||
def test_import_with_delete(self):
|
||||
self.task.write({"after_import": "delete"})
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
domain="[('method_type', '!=', 'import')]"
|
||||
attrs="{'required': [('file_type', '=', 'export')], 'readonly': [('method_type', '=', 'import')]}"
|
||||
/>
|
||||
<field name="storage_backend_id" />
|
||||
<field name="fs_storage_id" />
|
||||
</xpath>
|
||||
</field>
|
||||
</record>
|
||||
|
@ -27,7 +27,7 @@
|
|||
</xpath>
|
||||
<xpath expr="//field[@name='file_type']" position="after">
|
||||
<field name="task_id" />
|
||||
<field name="storage_backend_id" />
|
||||
<field name="fs_storage_id" />
|
||||
</xpath>
|
||||
</field>
|
||||
</record>
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
type="action"
|
||||
class="oe_stat_button"
|
||||
icon="fa-thumbs-o-down"
|
||||
title="Failed attachments"
|
||||
context="{'search_default_failed': 1}"
|
||||
>
|
||||
<div class="o_field_widget o_stat_info">
|
||||
|
@ -46,6 +47,7 @@
|
|||
type="action"
|
||||
class="oe_stat_button"
|
||||
icon="fa-spinner"
|
||||
title="Pending attachments"
|
||||
context="{'search_default_pending': 1}"
|
||||
>
|
||||
<div class="o_field_widget o_stat_info">
|
||||
|
@ -64,6 +66,7 @@
|
|||
type="action"
|
||||
class="oe_stat_button"
|
||||
icon="fa-thumbs-o-up"
|
||||
title="Done attachments"
|
||||
context="{'search_default_done': 1}"
|
||||
>
|
||||
<div class="o_field_widget o_stat_info">
|
||||
|
@ -109,13 +112,11 @@
|
|||
<field name="after_import" />
|
||||
<field
|
||||
name="move_path"
|
||||
colspan="4"
|
||||
attrs="{'invisible':[('after_import','!=','move'), ('after_import','!=','move_rename')]}"
|
||||
attrs="{'invisible':[('after_import','not in',('move', 'move_rename'))], 'required': [('after_import', 'in', ('move', 'move_rename'))]}"
|
||||
/>
|
||||
<field
|
||||
name="new_name"
|
||||
colspan="4"
|
||||
attrs="{'invisible': [('after_import','!=','rename'), ('after_import','!=','move_rename')]}"
|
||||
attrs="{'invisible': [('after_import','not in',('rename', 'move_rename'))], 'required': [('after_import', 'in', ('rename', 'move_rename'))]}"
|
||||
/>
|
||||
<field name="file_type" />
|
||||
</group>
|
||||
|
@ -130,7 +131,7 @@
|
|||
<record id="view_attachment_task_tree" model="ir.ui.view">
|
||||
<field name="model">attachment.synchronize.task</field>
|
||||
<field name="arch" type="xml">
|
||||
<tree string="Tasks" decoration-muted="active == False">
|
||||
<tree decoration-muted="active == False">
|
||||
<field name="name" select="1" />
|
||||
<field name="backend_id" />
|
||||
<field name="filepath" />
|
||||
|
@ -140,6 +141,7 @@
|
|||
name="%(action_attachment_queue_related)d"
|
||||
type="action"
|
||||
icon="fa-thumbs-o-down"
|
||||
title="Failed attachments"
|
||||
context="{'search_default_failed': 1}"
|
||||
/>
|
||||
<field name="count_attachment_pending" string=" " />
|
||||
|
@ -147,6 +149,7 @@
|
|||
name="%(action_attachment_queue_related)d"
|
||||
type="action"
|
||||
icon="fa-spinner"
|
||||
title="Pending attachments"
|
||||
context="{'search_default_pending': 1}"
|
||||
/>
|
||||
<field name="count_attachment_done" string=" " />
|
||||
|
@ -154,6 +157,7 @@
|
|||
name="%(action_attachment_queue_related)d"
|
||||
type="action"
|
||||
icon="fa-thumbs-o-up"
|
||||
title="Done attachments"
|
||||
context="{'search_default_done': 1}"
|
||||
/>
|
||||
<field name="active" widget="boolean_toggle" />
|
||||
|
@ -161,7 +165,12 @@
|
|||
show it if it's embeded in an other view
|
||||
But it's seem that invisible do not work on button
|
||||
-->
|
||||
<button name="button_duplicate_record" type="object" icon="fa-clone" />
|
||||
<button
|
||||
name="button_duplicate_record"
|
||||
type="object"
|
||||
icon="fa-clone"
|
||||
title="Duplicate"
|
||||
/>
|
||||
</tree>
|
||||
</field>
|
||||
</record>
|
||||
|
@ -202,7 +211,7 @@
|
|||
|
||||
<menuitem
|
||||
id="menu_attachment_import_task"
|
||||
parent="base.next_id_9"
|
||||
parent="fs_storage.menu_storage"
|
||||
sequence="21"
|
||||
action="action_attachment_import_task"
|
||||
/>
|
||||
|
@ -223,7 +232,7 @@
|
|||
|
||||
<menuitem
|
||||
id="menu_attachment_export_task"
|
||||
parent="base.next_id_9"
|
||||
parent="fs_storage.menu_storage"
|
||||
sequence="22"
|
||||
action="action_attachment_export_task"
|
||||
/>
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<odoo>
|
||||
|
||||
<record id="view_storage_backend_form" model="ir.ui.view">
|
||||
<field name="model">storage.backend</field>
|
||||
<field name="inherit_id" ref="storage_backend.storage_backend_view_form" />
|
||||
<record id="view_fs_storage_form" model="ir.ui.view">
|
||||
<field name="model">fs.storage</field>
|
||||
<field name="inherit_id" ref="fs_storage.fs_storage_form_view" />
|
||||
<field name="priority" eval="250" />
|
||||
<field name="arch" type="xml">
|
||||
<xpath expr="//div[hasclass('oe_title')]" position="before">
|
||||
|
|
Loading…
Reference in New Issue