initial commit
This commit is contained in:
commit
2db2b9d3eb
5
__init__.py
Normal file
5
__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import controllers
|
||||
from . import models
|
288
__manifest__.py
Normal file
288
__manifest__.py
Normal file
|
@ -0,0 +1,288 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
{
|
||||
'name': 'Web Editor',
|
||||
'category': 'Hidden',
|
||||
'description': """
|
||||
Odoo Web Editor widget.
|
||||
==========================
|
||||
|
||||
""",
|
||||
'depends': ['bus', 'web'],
|
||||
'data': [
|
||||
'security/ir.model.access.csv',
|
||||
'data/editor_assets.xml',
|
||||
'views/editor.xml',
|
||||
'views/snippets.xml',
|
||||
],
|
||||
'assets': {
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# MAIN BUNDLES
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
'web_editor.assets_legacy_wysiwyg': [
|
||||
'web_editor/static/src/js/editor/snippets.editor.js',
|
||||
'web_editor/static/src/js/editor/snippets.options.js',
|
||||
],
|
||||
'web_editor.wysiwyg_iframe_editor_assets': [
|
||||
('include', 'web._assets_helpers'),
|
||||
|
||||
'web/static/src/scss/pre_variables.scss',
|
||||
'web/static/lib/bootstrap/scss/_variables.scss',
|
||||
|
||||
'web/static/src/libs/fontawesome/css/font-awesome.css',
|
||||
'web/static/lib/odoo_ui_icons/*',
|
||||
'web/static/lib/select2/select2.css',
|
||||
'web/static/lib/select2-bootstrap-css/select2-bootstrap.css',
|
||||
'web/static/src/webclient/navbar/navbar.scss',
|
||||
'web/static/src/scss/animation.scss',
|
||||
'web/static/src/core/colorpicker/colorpicker.scss',
|
||||
'web/static/src/scss/mimetypes.scss',
|
||||
'web/static/src/scss/ui.scss',
|
||||
'web/static/src/legacy/scss/ui.scss',
|
||||
'web/static/src/legacy/scss/modal.scss',
|
||||
'web/static/src/views/fields/translation_dialog.scss',
|
||||
'web/static/src/scss/fontawesome_overridden.scss',
|
||||
|
||||
'web/static/src/module_loader.js',
|
||||
'web/static/src/session.js',
|
||||
'web/static/lib/owl/owl.js',
|
||||
'web/static/lib/owl/odoo_module.js',
|
||||
'web/static/lib/jquery/jquery.js',
|
||||
'web/static/lib/popper/popper.js',
|
||||
'web/static/lib/bootstrap/js/dist/dom/data.js',
|
||||
'web/static/lib/bootstrap/js/dist/dom/event-handler.js',
|
||||
'web/static/lib/bootstrap/js/dist/dom/manipulator.js',
|
||||
'web/static/lib/bootstrap/js/dist/dom/selector-engine.js',
|
||||
'web/static/lib/bootstrap/js/dist/base-component.js',
|
||||
'web/static/lib/bootstrap/js/dist/alert.js',
|
||||
'web/static/lib/bootstrap/js/dist/button.js',
|
||||
'web/static/lib/bootstrap/js/dist/carousel.js',
|
||||
'web/static/lib/bootstrap/js/dist/collapse.js',
|
||||
'web/static/lib/bootstrap/js/dist/dropdown.js',
|
||||
'web/static/lib/bootstrap/js/dist/modal.js',
|
||||
'web/static/lib/bootstrap/js/dist/offcanvas.js',
|
||||
'web/static/lib/bootstrap/js/dist/tooltip.js',
|
||||
'web/static/lib/bootstrap/js/dist/popover.js',
|
||||
'web/static/lib/bootstrap/js/dist/scrollspy.js',
|
||||
'web/static/lib/bootstrap/js/dist/tab.js',
|
||||
'web/static/lib/bootstrap/js/dist/toast.js',
|
||||
'web/static/lib/select2/select2.js',
|
||||
'web/static/src/legacy/js/libs/bootstrap.js',
|
||||
'web/static/src/legacy/js/libs/jquery.js',
|
||||
'web/static/src/core/registry.js',
|
||||
|
||||
# odoo-editor
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/utils.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/constants.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/align.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/commands.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/deleteBackward.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/deleteForward.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/enter.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/shiftEnter.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/shiftTab.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/tab.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/toggleList.js',
|
||||
|
||||
# odoo utils
|
||||
'web_editor/static/src/scss/bootstrap_overridden.scss',
|
||||
'web/static/src/scss/pre_variables.scss',
|
||||
'web/static/lib/bootstrap/scss/_variables.scss',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/style.scss',
|
||||
|
||||
# integration
|
||||
'web_editor/static/src/scss/wysiwyg.scss',
|
||||
'web_editor/static/src/scss/wysiwyg_iframe.scss',
|
||||
'web_editor/static/src/scss/wysiwyg_snippets.scss',
|
||||
|
||||
'web_editor/static/src/xml/editor.xml',
|
||||
'web_editor/static/src/xml/commands.xml',
|
||||
'web_editor/static/src/xml/grid_layout.xml',
|
||||
'web_editor/static/src/xml/snippets.xml',
|
||||
'web_editor/static/src/xml/wysiwyg.xml',
|
||||
'web_editor/static/src/xml/wysiwyg_colorpicker.xml',
|
||||
],
|
||||
'web_editor.assets_media_dialog': [
|
||||
'web_editor/static/src/components/**/*',
|
||||
],
|
||||
'web_editor.assets_tests_styles': [
|
||||
('include', 'web._assets_helpers'),
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/base_style.scss',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/checklist.scss',
|
||||
],
|
||||
'web_editor.assets_wysiwyg': [
|
||||
# legacy stuff that are no longer in assets_backend
|
||||
'web/static/src/legacy/js/core/class.js',
|
||||
'web/static/src/legacy/js/core/dialog.js',
|
||||
'web/static/src/legacy/xml/dialog.xml',
|
||||
'web/static/src/legacy/js/core/minimal_dom.js',
|
||||
'web/static/src/legacy/js/core/dom.js',
|
||||
'web/static/src/legacy/js/core/mixins.js',
|
||||
'web/static/src/legacy/js/core/service_mixins.js',
|
||||
'web/static/src/legacy/js/core/widget.js',
|
||||
'web/static/src/legacy/utils.js',
|
||||
|
||||
# lib
|
||||
'web_editor/static/lib/cropperjs/cropper.css',
|
||||
'web_editor/static/lib/cropperjs/cropper.js',
|
||||
'web_editor/static/lib/jquery-cropper/jquery-cropper.js',
|
||||
'web_editor/static/lib/jQuery.transfo.js',
|
||||
'web_editor/static/lib/webgl-image-filter/webgl-image-filter.js',
|
||||
'web_editor/static/lib/DOMPurify.js',
|
||||
|
||||
# odoo-editor
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/OdooEditor.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/constants.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/sanitize.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/serialize.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/tablepicker/TablePicker.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/powerbox/patienceDiff.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/powerbox/Powerbox.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/align.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/commands.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/deleteBackward.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/deleteForward.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/enter.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/shiftEnter.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/shiftTab.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/tab.js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/commands/toggleList.js',
|
||||
|
||||
# utils
|
||||
'web_editor/static/src/js/editor/drag_and_drop.js',
|
||||
'web_editor/static/src/js/wysiwyg/linkDialogCommand.js',
|
||||
'web_editor/static/src/js/wysiwyg/MoveNodePlugin.js',
|
||||
'web_editor/static/src/js/wysiwyg/PeerToPeer.js',
|
||||
'web_editor/static/src/js/wysiwyg/conflict_dialog.js',
|
||||
'web_editor/static/src/js/wysiwyg/conflict_dialog.xml',
|
||||
'web_editor/static/src/js/wysiwyg/get_color_picker_template_service.js',
|
||||
|
||||
# odoo utils
|
||||
('include', 'web._assets_helpers'),
|
||||
|
||||
'web_editor/static/src/scss/bootstrap_overridden.scss',
|
||||
'web/static/src/scss/pre_variables.scss',
|
||||
'web/static/lib/bootstrap/scss/_variables.scss',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/style.scss',
|
||||
|
||||
# integration
|
||||
'web_editor/static/src/scss/wysiwyg.scss',
|
||||
'web_editor/static/src/scss/wysiwyg_iframe.scss',
|
||||
'web_editor/static/src/scss/wysiwyg_snippets.scss',
|
||||
|
||||
'web_editor/static/src/js/editor/perspective_utils.js',
|
||||
'web_editor/static/src/js/editor/image_processing.js',
|
||||
'web_editor/static/src/js/editor/custom_colors.js',
|
||||
|
||||
# widgets & plugins
|
||||
'web_editor/static/src/js/wysiwyg/widgets/**/*',
|
||||
'web_editor/static/src/js/editor/toolbar.js',
|
||||
|
||||
# Launcher
|
||||
'web_editor/static/src/js/wysiwyg/wysiwyg_jquery_extention.js',
|
||||
'web_editor/static/src/js/wysiwyg/wysiwyg.js',
|
||||
'web_editor/static/src/js/wysiwyg/wysiwyg_iframe.js',
|
||||
|
||||
'web_editor/static/src/xml/editor.xml',
|
||||
'web_editor/static/src/xml/commands.xml',
|
||||
'web_editor/static/src/xml/grid_layout.xml',
|
||||
'web_editor/static/src/xml/snippets.xml',
|
||||
'web_editor/static/src/xml/wysiwyg.xml',
|
||||
'web_editor/static/src/xml/wysiwyg_colorpicker.xml',
|
||||
],
|
||||
'web_editor.backend_assets_wysiwyg': [
|
||||
('include', 'web_editor.assets_wysiwyg'),
|
||||
],
|
||||
'web.assets_backend': [
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/base_style.scss',
|
||||
'web_editor/static/lib/vkbeautify/**/*',
|
||||
'web_editor/static/src/js/common/**/*',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/utils.js',
|
||||
'web_editor/static/src/js/wysiwyg/fonts.js',
|
||||
|
||||
('include', 'web_editor.assets_media_dialog'),
|
||||
|
||||
'web_editor/static/src/scss/web_editor.common.scss',
|
||||
'web_editor/static/src/scss/web_editor.backend.scss',
|
||||
|
||||
'web_editor/static/src/js/backend/**/*',
|
||||
'web_editor/static/src/xml/backend.xml',
|
||||
'web_editor/static/src/components/history_dialog/**/*',
|
||||
],
|
||||
"web.assets_web_dark": [
|
||||
'web_editor/static/src/scss/odoo-editor/powerbox.dark.scss',
|
||||
'web_editor/static/src/scss/odoo-editor/tablepicker.dark.scss',
|
||||
'web_editor/static/src/scss/odoo-editor/tableui.dark.scss',
|
||||
'web_editor/static/src/scss/wysiwyg.dark.scss',
|
||||
'web_editor/static/src/scss/web_editor.common.dark.scss',
|
||||
],
|
||||
'web.assets_frontend_minimal': [
|
||||
'web_editor/static/src/js/frontend/loader_loading.js',
|
||||
],
|
||||
'web.assets_frontend': [
|
||||
# legacy stuff that are no longer in assets_backend
|
||||
'web/static/src/legacy/utils.js',
|
||||
|
||||
('include', 'web_editor.assets_media_dialog'),
|
||||
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/base_style.scss',
|
||||
'web_editor/static/src/js/common/**/*',
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/utils/utils.js',
|
||||
'web_editor/static/src/js/wysiwyg/fonts.js',
|
||||
|
||||
'web_editor/static/src/scss/web_editor.common.scss',
|
||||
'web_editor/static/src/scss/web_editor.frontend.scss',
|
||||
|
||||
'web_editor/static/src/js/frontend/loadWysiwygFromTextarea.js',
|
||||
],
|
||||
'web.report_assets_common': [
|
||||
'web_editor/static/src/scss/bootstrap_overridden.scss',
|
||||
'web_editor/static/src/scss/web_editor.common.scss',
|
||||
],
|
||||
|
||||
#----------------------------------------------------------------------
|
||||
# SUB BUNDLES
|
||||
#----------------------------------------------------------------------
|
||||
|
||||
'web._assets_primary_variables': [
|
||||
'web_editor/static/src/scss/web_editor.variables.scss',
|
||||
'web_editor/static/src/scss/wysiwyg.variables.scss',
|
||||
],
|
||||
'web._assets_secondary_variables': [
|
||||
'web_editor/static/src/scss/secondary_variables.scss',
|
||||
],
|
||||
'web._assets_backend_helpers': [
|
||||
'web_editor/static/src/scss/bootstrap_overridden_backend.scss',
|
||||
'web_editor/static/src/scss/bootstrap_overridden.scss',
|
||||
],
|
||||
'web._assets_frontend_helpers': [
|
||||
('prepend', 'web_editor/static/src/scss/bootstrap_overridden.scss'),
|
||||
],
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# TESTS BUNDLES
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
'web.qunit_suite_tests': [
|
||||
('include', 'web_editor.assets_legacy_wysiwyg'),
|
||||
('include', 'web_editor.backend_assets_wysiwyg'),
|
||||
|
||||
'web_editor/static/tests/**/*',
|
||||
'web_editor/static/src/js/editor/odoo-editor/test/utils.js'
|
||||
],
|
||||
'web_editor.mocha_tests': [
|
||||
'web/static/src/module_loader.js',
|
||||
# insert module dependencies here
|
||||
'web/static/src/core/utils/concurrency.js',
|
||||
|
||||
'web_editor/static/src/js/editor/odoo-editor/src/**/*js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/test/spec/*js',
|
||||
'web_editor/static/src/js/editor/odoo-editor/test/*js',
|
||||
],
|
||||
},
|
||||
'auto_install': True,
|
||||
'license': 'LGPL-3',
|
||||
}
|
BIN
__pycache__/__init__.cpython-311.pyc
Normal file
BIN
__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
__pycache__/tools.cpython-311.pyc
Normal file
BIN
__pycache__/tools.cpython-311.pyc
Normal file
Binary file not shown.
4
controllers/__init__.py
Normal file
4
controllers/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import main
|
BIN
controllers/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
controllers/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
controllers/__pycache__/main.cpython-311.pyc
Normal file
BIN
controllers/__pycache__/main.cpython-311.pyc
Normal file
Binary file not shown.
820
controllers/main.py
Normal file
820
controllers/main.py
Normal file
|
@ -0,0 +1,820 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
import contextlib
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import requests
|
||||
import uuid
|
||||
import werkzeug.exceptions
|
||||
import werkzeug.urls
|
||||
from PIL import Image, ImageFont, ImageDraw
|
||||
from lxml import etree
|
||||
from base64 import b64decode, b64encode
|
||||
from datetime import datetime
|
||||
from math import floor
|
||||
from os.path import join as opj
|
||||
|
||||
from odoo.http import request, Response
|
||||
from odoo import http, tools, _, SUPERUSER_ID, release
|
||||
from odoo.addons.http_routing.models.ir_http import slug, unslug
|
||||
from odoo.addons.web_editor.tools import get_video_url_data
|
||||
from odoo.exceptions import UserError, MissingError, AccessError
|
||||
from odoo.tools.misc import file_open
|
||||
from odoo.tools.mimetypes import guess_mimetype
|
||||
from odoo.tools.image import image_data_uri, binary_to_image
|
||||
from odoo.addons.iap.tools import iap_tools
|
||||
from odoo.addons.base.models.assetsbundle import AssetsBundle
|
||||
|
||||
from ..models.ir_attachment import SUPPORTED_IMAGE_MIMETYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_LIBRARY_ENDPOINT = 'https://media-api.odoo.com'
|
||||
DEFAULT_OLG_ENDPOINT = 'https://olg.api.odoo.com'
|
||||
|
||||
|
||||
def get_existing_attachment(IrAttachment, vals):
|
||||
"""
|
||||
Check if an attachment already exists for the same vals. Return it if
|
||||
so, None otherwise.
|
||||
"""
|
||||
fields = dict(vals)
|
||||
# Falsy res_id defaults to 0 on attachment creation.
|
||||
fields['res_id'] = fields.get('res_id') or 0
|
||||
raw, datas = fields.pop('raw', None), fields.pop('datas', None)
|
||||
domain = [(field, '=', value) for field, value in fields.items()]
|
||||
if fields.get('type') == 'url':
|
||||
if 'url' not in fields:
|
||||
return None
|
||||
domain.append(('checksum', '=', False))
|
||||
else:
|
||||
if not (raw or datas):
|
||||
return None
|
||||
domain.append(('checksum', '=', IrAttachment._compute_checksum(raw or b64decode(datas))))
|
||||
return IrAttachment.search(domain, limit=1) or None
|
||||
|
||||
class Web_Editor(http.Controller):
|
||||
#------------------------------------------------------
|
||||
# convert font into picture
|
||||
#------------------------------------------------------
|
||||
@http.route([
|
||||
'/web_editor/font_to_img/<icon>',
|
||||
'/web_editor/font_to_img/<icon>/<color>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<int:size>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<int:width>x<int:height>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<int:size>/<int:alpha>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<int:width>x<int:height>/<int:alpha>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<bg>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<bg>/<int:size>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<bg>/<int:width>x<int:height>',
|
||||
'/web_editor/font_to_img/<icon>/<color>/<bg>/<int:width>x<int:height>/<int:alpha>',
|
||||
], type='http', auth="none")
|
||||
def export_icon_to_png(self, icon, color='#000', bg=None, size=100, alpha=255, font='/web/static/src/libs/fontawesome/fonts/fontawesome-webfont.ttf', width=None, height=None):
|
||||
""" This method converts an unicode character to an image (using Font
|
||||
Awesome font by default) and is used only for mass mailing because
|
||||
custom fonts are not supported in mail.
|
||||
:param icon : decimal encoding of unicode character
|
||||
:param color : RGB code of the color
|
||||
:param bg : RGB code of the background color
|
||||
:param size : Pixels in integer
|
||||
:param alpha : transparency of the image from 0 to 255
|
||||
:param font : font path
|
||||
:param width : Pixels in integer
|
||||
:param height : Pixels in integer
|
||||
|
||||
:returns PNG image converted from given font
|
||||
"""
|
||||
# For custom icons, use the corresponding custom font
|
||||
if icon.isdigit():
|
||||
if int(icon) == 57467:
|
||||
font = "/web/static/fonts/tiktok_only.woff"
|
||||
|
||||
size = max(width, height, 1) if width else size
|
||||
width = width or size
|
||||
height = height or size
|
||||
# Make sure we have at least size=1
|
||||
width = max(1, min(width, 512))
|
||||
height = max(1, min(height, 512))
|
||||
# Initialize font
|
||||
if font.startswith('/'):
|
||||
font = font[1:]
|
||||
font_obj = ImageFont.truetype(file_open(font, 'rb'), height)
|
||||
|
||||
# if received character is not a number, keep old behaviour (icon is character)
|
||||
icon = chr(int(icon)) if icon.isdigit() else icon
|
||||
|
||||
# Background standardization
|
||||
if bg is not None and bg.startswith('rgba'):
|
||||
bg = bg.replace('rgba', 'rgb')
|
||||
bg = ','.join(bg.split(',')[:-1])+')'
|
||||
|
||||
# Convert the opacity value compatible with PIL Image color (0 to 255)
|
||||
# when color specifier is 'rgba'
|
||||
if color is not None and color.startswith('rgba'):
|
||||
*rgb, a = color.strip(')').split(',')
|
||||
opacity = str(floor(float(a) * 255))
|
||||
color = ','.join([*rgb, opacity]) + ')'
|
||||
|
||||
# Determine the dimensions of the icon
|
||||
image = Image.new("RGBA", (width, height), color)
|
||||
draw = ImageDraw.Draw(image)
|
||||
|
||||
box = draw.textbbox((0, 0), icon, font=font_obj)
|
||||
boxw = box[2] - box[0]
|
||||
boxh = box[3] - box[1]
|
||||
draw.text((0, 0), icon, font=font_obj)
|
||||
left, top, right, bottom = image.getbbox()
|
||||
|
||||
# Create an alpha mask
|
||||
imagemask = Image.new("L", (boxw, boxh), 0)
|
||||
drawmask = ImageDraw.Draw(imagemask)
|
||||
drawmask.text((-left, -top), icon, font=font_obj, fill=255)
|
||||
|
||||
# Create a solid color image and apply the mask
|
||||
if color.startswith('rgba'):
|
||||
color = color.replace('rgba', 'rgb')
|
||||
color = ','.join(color.split(',')[:-1])+')'
|
||||
iconimage = Image.new("RGBA", (boxw, boxh), color)
|
||||
iconimage.putalpha(imagemask)
|
||||
|
||||
# Create output image
|
||||
outimage = Image.new("RGBA", (boxw, height), bg or (0, 0, 0, 0))
|
||||
outimage.paste(iconimage, (left, top), iconimage)
|
||||
|
||||
# output image
|
||||
output = io.BytesIO()
|
||||
outimage.save(output, format="PNG")
|
||||
response = Response()
|
||||
response.mimetype = 'image/png'
|
||||
response.data = output.getvalue()
|
||||
response.headers['Cache-Control'] = 'public, max-age=604800'
|
||||
response.headers['Access-Control-Allow-Origin'] = '*'
|
||||
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
|
||||
response.headers['Connection'] = 'close'
|
||||
response.headers['Date'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime())
|
||||
response.headers['Expires'] = time.strftime("%a, %d-%b-%Y %T GMT", time.gmtime(time.time()+604800*60))
|
||||
|
||||
return response
|
||||
|
||||
#------------------------------------------------------
|
||||
# Update a checklist in the editor on check/uncheck
|
||||
#------------------------------------------------------
|
||||
@http.route('/web_editor/checklist', type='json', auth='user')
|
||||
def update_checklist(self, res_model, res_id, filename, checklistId, checked, **kwargs):
|
||||
record = request.env[res_model].browse(res_id)
|
||||
value = filename in record._fields and record[filename]
|
||||
htmlelem = etree.fromstring("<div>%s</div>" % value, etree.HTMLParser())
|
||||
checked = bool(checked)
|
||||
|
||||
li = htmlelem.find(".//li[@id='checkId-%s']" % checklistId)
|
||||
|
||||
if li is None:
|
||||
return value
|
||||
|
||||
classname = li.get('class', '')
|
||||
if ('o_checked' in classname) != checked:
|
||||
if checked:
|
||||
classname = '%s o_checked' % classname
|
||||
else:
|
||||
classname = re.sub(r"\s?o_checked\s?", '', classname)
|
||||
li.set('class', classname)
|
||||
else:
|
||||
return value
|
||||
|
||||
value = etree.tostring(htmlelem[0][0], encoding='utf-8', method='html')[5:-6].decode("utf-8")
|
||||
record.write({filename: value})
|
||||
|
||||
return value
|
||||
|
||||
#------------------------------------------------------
|
||||
# Update a stars rating in the editor on check/uncheck
|
||||
#------------------------------------------------------
|
||||
@http.route('/web_editor/stars', type='json', auth='user')
|
||||
def update_stars(self, res_model, res_id, filename, starsId, rating):
|
||||
record = request.env[res_model].browse(res_id)
|
||||
value = filename in record._fields and record[filename]
|
||||
htmlelem = etree.fromstring("<div>%s</div>" % value, etree.HTMLParser())
|
||||
|
||||
stars_widget = htmlelem.find(".//span[@id='checkId-%s']" % starsId)
|
||||
|
||||
if stars_widget is None:
|
||||
return value
|
||||
|
||||
# Check the `rating` first stars and uncheck the others if any.
|
||||
stars = []
|
||||
for star in stars_widget.getchildren():
|
||||
if 'fa-star' in star.get('class', ''):
|
||||
stars.append(star)
|
||||
star_index = 0
|
||||
for star in stars:
|
||||
classname = star.get('class', '')
|
||||
if star_index < rating and (not 'fa-star' in classname or 'fa-star-o' in classname):
|
||||
classname = re.sub(r"\s?fa-star-o\s?", '', classname)
|
||||
classname = '%s fa-star' % classname
|
||||
star.set('class', classname)
|
||||
elif star_index >= rating and not 'fa-star-o' in classname:
|
||||
classname = re.sub(r"\s?fa-star\s?", '', classname)
|
||||
classname = '%s fa-star-o' % classname
|
||||
star.set('class', classname)
|
||||
star_index += 1
|
||||
|
||||
value = etree.tostring(htmlelem[0][0], encoding='utf-8', method='html')[5:-6]
|
||||
record.write({filename: value})
|
||||
|
||||
return value
|
||||
|
||||
@http.route('/web_editor/video_url/data', type='json', auth='user', website=True)
|
||||
def video_url_data(self, video_url, autoplay=False, loop=False,
|
||||
hide_controls=False, hide_fullscreen=False, hide_yt_logo=False,
|
||||
hide_dm_logo=False, hide_dm_share=False):
|
||||
return get_video_url_data(
|
||||
video_url, autoplay=autoplay, loop=loop,
|
||||
hide_controls=hide_controls, hide_fullscreen=hide_fullscreen,
|
||||
hide_yt_logo=hide_yt_logo, hide_dm_logo=hide_dm_logo,
|
||||
hide_dm_share=hide_dm_share
|
||||
)
|
||||
|
||||
@http.route('/web_editor/attachment/add_data', type='json', auth='user', methods=['POST'], website=True)
|
||||
def add_data(self, name, data, is_image, quality=0, width=0, height=0, res_id=False, res_model='ir.ui.view', **kwargs):
|
||||
data = b64decode(data)
|
||||
if is_image:
|
||||
format_error_msg = _("Uploaded image's format is not supported. Try with: %s", ', '.join(SUPPORTED_IMAGE_MIMETYPES.values()))
|
||||
try:
|
||||
data = tools.image_process(data, size=(width, height), quality=quality, verify_resolution=True)
|
||||
mimetype = guess_mimetype(data)
|
||||
if mimetype not in SUPPORTED_IMAGE_MIMETYPES:
|
||||
return {'error': format_error_msg}
|
||||
if not name:
|
||||
name = '%s-%s%s' % (
|
||||
datetime.now().strftime('%Y%m%d%H%M%S'),
|
||||
str(uuid.uuid4())[:6],
|
||||
SUPPORTED_IMAGE_MIMETYPES[mimetype],
|
||||
)
|
||||
except UserError:
|
||||
# considered as an image by the browser file input, but not
|
||||
# recognized as such by PIL, eg .webp
|
||||
return {'error': format_error_msg}
|
||||
except ValueError as e:
|
||||
return {'error': e.args[0]}
|
||||
|
||||
self._clean_context()
|
||||
attachment = self._attachment_create(name=name, data=data, res_id=res_id, res_model=res_model)
|
||||
return attachment._get_media_info()
|
||||
|
||||
@http.route('/web_editor/attachment/add_url', type='json', auth='user', methods=['POST'], website=True)
|
||||
def add_url(self, url, res_id=False, res_model='ir.ui.view', **kwargs):
|
||||
self._clean_context()
|
||||
attachment = self._attachment_create(url=url, res_id=res_id, res_model=res_model)
|
||||
return attachment._get_media_info()
|
||||
|
||||
@http.route('/web_editor/attachment/remove', type='json', auth='user', website=True)
|
||||
def remove(self, ids, **kwargs):
|
||||
""" Removes a web-based image attachment if it is used by no view (template)
|
||||
|
||||
Returns a dict mapping attachments which would not be removed (if any)
|
||||
mapped to the views preventing their removal
|
||||
"""
|
||||
self._clean_context()
|
||||
Attachment = attachments_to_remove = request.env['ir.attachment']
|
||||
Views = request.env['ir.ui.view']
|
||||
|
||||
# views blocking removal of the attachment
|
||||
removal_blocked_by = {}
|
||||
|
||||
for attachment in Attachment.browse(ids):
|
||||
# in-document URLs are html-escaped, a straight search will not
|
||||
# find them
|
||||
url = tools.html_escape(attachment.local_url)
|
||||
views = Views.search([
|
||||
"|",
|
||||
('arch_db', 'like', '"%s"' % url),
|
||||
('arch_db', 'like', "'%s'" % url)
|
||||
])
|
||||
|
||||
if views:
|
||||
removal_blocked_by[attachment.id] = views.read(['name'])
|
||||
else:
|
||||
attachments_to_remove += attachment
|
||||
if attachments_to_remove:
|
||||
attachments_to_remove.unlink()
|
||||
return removal_blocked_by
|
||||
|
||||
@http.route('/web_editor/get_image_info', type='json', auth='user', website=True)
|
||||
def get_image_info(self, src=''):
|
||||
"""This route is used to determine the original of an attachment so that
|
||||
it can be used as a base to modify it again (crop/optimization/filters).
|
||||
"""
|
||||
attachment = None
|
||||
if src.startswith('/web/image'):
|
||||
with contextlib.suppress(werkzeug.exceptions.NotFound, MissingError):
|
||||
_, args = request.env['ir.http']._match(src)
|
||||
record = request.env['ir.binary']._find_record(
|
||||
xmlid=args.get('xmlid'),
|
||||
res_model=args.get('model', 'ir.attachment'),
|
||||
res_id=args.get('id'),
|
||||
)
|
||||
if record._name == 'ir.attachment':
|
||||
attachment = record
|
||||
if not attachment:
|
||||
# Find attachment by url. There can be multiple matches because of default
|
||||
# snippet images referencing the same image in /static/, so we limit to 1
|
||||
attachment = request.env['ir.attachment'].search([
|
||||
'|', ('url', '=like', src), ('url', '=like', '%s?%%' % src),
|
||||
('mimetype', 'in', list(SUPPORTED_IMAGE_MIMETYPES.keys())),
|
||||
], limit=1)
|
||||
if not attachment:
|
||||
return {
|
||||
'attachment': False,
|
||||
'original': False,
|
||||
}
|
||||
return {
|
||||
'attachment': attachment.read(['id'])[0],
|
||||
'original': (attachment.original_id or attachment).read(['id', 'image_src', 'mimetype'])[0],
|
||||
}
|
||||
|
||||
def _attachment_create(self, name='', data=False, url=False, res_id=False, res_model='ir.ui.view'):
|
||||
"""Create and return a new attachment."""
|
||||
IrAttachment = request.env['ir.attachment']
|
||||
|
||||
if name.lower().endswith('.bmp'):
|
||||
# Avoid mismatch between content type and mimetype, see commit msg
|
||||
name = name[:-4]
|
||||
|
||||
if not name and url:
|
||||
name = url.split("/").pop()
|
||||
|
||||
if res_model != 'ir.ui.view' and res_id:
|
||||
res_id = int(res_id)
|
||||
else:
|
||||
res_id = False
|
||||
|
||||
attachment_data = {
|
||||
'name': name,
|
||||
'public': res_model == 'ir.ui.view',
|
||||
'res_id': res_id,
|
||||
'res_model': res_model,
|
||||
}
|
||||
|
||||
if data:
|
||||
attachment_data['raw'] = data
|
||||
if url:
|
||||
attachment_data['url'] = url
|
||||
elif url:
|
||||
attachment_data.update({
|
||||
'type': 'url',
|
||||
'url': url,
|
||||
})
|
||||
else:
|
||||
raise UserError(_("You need to specify either data or url to create an attachment."))
|
||||
|
||||
# Despite the user having no right to create an attachment, he can still
|
||||
# create an image attachment through some flows
|
||||
if (
|
||||
not request.env.is_admin()
|
||||
and IrAttachment._can_bypass_rights_on_media_dialog(**attachment_data)
|
||||
):
|
||||
attachment = IrAttachment.sudo().create(attachment_data)
|
||||
# When portal users upload an attachment with the wysiwyg widget,
|
||||
# the access token is needed to use the image in the editor. If
|
||||
# the attachment is not public, the user won't be able to generate
|
||||
# the token, so we need to generate it using sudo
|
||||
if not attachment_data['public']:
|
||||
attachment.sudo().generate_access_token()
|
||||
else:
|
||||
attachment = get_existing_attachment(IrAttachment, attachment_data) \
|
||||
or IrAttachment.create(attachment_data)
|
||||
|
||||
return attachment
|
||||
|
||||
def _clean_context(self):
|
||||
# avoid allowed_company_ids which may erroneously restrict based on website
|
||||
context = dict(request.context)
|
||||
context.pop('allowed_company_ids', None)
|
||||
request.update_env(context=context)
|
||||
|
||||
@http.route("/web_editor/get_assets_editor_resources", type="json", auth="user", website=True)
|
||||
def get_assets_editor_resources(self, key, get_views=True, get_scss=True, get_js=True, bundles=False, bundles_restriction=[], only_user_custom_files=True):
|
||||
"""
|
||||
Transmit the resources the assets editor needs to work.
|
||||
|
||||
Params:
|
||||
key (str): the key of the view the resources are related to
|
||||
|
||||
get_views (bool, default=True):
|
||||
True if the views must be fetched
|
||||
|
||||
get_scss (bool, default=True):
|
||||
True if the style must be fetched
|
||||
|
||||
get_js (bool, default=True):
|
||||
True if the javascript must be fetched
|
||||
|
||||
bundles (bool, default=False):
|
||||
True if the bundles views must be fetched
|
||||
|
||||
bundles_restriction (list, default=[]):
|
||||
Names of the bundles in which to look for scss files
|
||||
(if empty, search in all of them)
|
||||
|
||||
only_user_custom_files (bool, default=True):
|
||||
True if only user custom files must be fetched
|
||||
|
||||
Returns:
|
||||
dict: views, scss, js
|
||||
"""
|
||||
# Related views must be fetched if the user wants the views and/or the style
|
||||
views = request.env["ir.ui.view"].with_context(no_primary_children=True, __views_get_original_hierarchy=[]).get_related_views(key, bundles=bundles)
|
||||
views = views.read(['name', 'id', 'key', 'xml_id', 'arch', 'active', 'inherit_id'])
|
||||
|
||||
scss_files_data_by_bundle = []
|
||||
js_files_data_by_bundle = []
|
||||
|
||||
if get_scss:
|
||||
scss_files_data_by_bundle = self._load_resources('scss', views, bundles_restriction, only_user_custom_files)
|
||||
if get_js:
|
||||
js_files_data_by_bundle = self._load_resources('js', views, bundles_restriction, only_user_custom_files)
|
||||
|
||||
return {
|
||||
'views': get_views and views or [],
|
||||
'scss': get_scss and scss_files_data_by_bundle or [],
|
||||
'js': get_js and js_files_data_by_bundle or [],
|
||||
}
|
||||
|
||||
def _load_resources(self, file_type, views, bundles_restriction, only_user_custom_files):
|
||||
AssetsUtils = request.env['web_editor.assets']
|
||||
|
||||
files_data_by_bundle = []
|
||||
t_call_assets_attribute = 't-js'
|
||||
if file_type == 'scss':
|
||||
t_call_assets_attribute = 't-css'
|
||||
|
||||
# Compile regex outside of the loop
|
||||
# This will used to exclude library scss files from the result
|
||||
excluded_url_matcher = re.compile("^(.+/lib/.+)|(.+import_bootstrap.+\.scss)$")
|
||||
|
||||
# First check the t-call-assets used in the related views
|
||||
url_infos = dict()
|
||||
for v in views:
|
||||
for asset_call_node in etree.fromstring(v["arch"]).xpath("//t[@t-call-assets]"):
|
||||
attr = asset_call_node.get(t_call_assets_attribute)
|
||||
if attr and not json.loads(attr.lower()):
|
||||
continue
|
||||
asset_name = asset_call_node.get("t-call-assets")
|
||||
|
||||
# Loop through bundle files to search for file info
|
||||
files_data = []
|
||||
for file_info in request.env["ir.qweb"]._get_asset_content(asset_name)[0]:
|
||||
if file_info["url"].rpartition('.')[2] != file_type:
|
||||
continue
|
||||
url = file_info["url"]
|
||||
|
||||
# Exclude library files (see regex above)
|
||||
if excluded_url_matcher.match(url):
|
||||
continue
|
||||
|
||||
# Check if the file is customized and get bundle/path info
|
||||
file_data = AssetsUtils._get_data_from_url(url)
|
||||
if not file_data:
|
||||
continue
|
||||
|
||||
# Save info according to the filter (arch will be fetched later)
|
||||
url_infos[url] = file_data
|
||||
|
||||
if '/user_custom_' in url \
|
||||
or file_data['customized'] \
|
||||
or file_type == 'scss' and not only_user_custom_files:
|
||||
files_data.append(url)
|
||||
|
||||
# scss data is returned sorted by bundle, with the bundles
|
||||
# names and xmlids
|
||||
if len(files_data):
|
||||
files_data_by_bundle.append([asset_name, files_data])
|
||||
|
||||
# Filter bundles/files:
|
||||
# - A file which appears in multiple bundles only appears in the
|
||||
# first one (the first in the DOM)
|
||||
# - Only keep bundles with files which appears in the asked bundles
|
||||
# and only keep those files
|
||||
for i in range(0, len(files_data_by_bundle)):
|
||||
bundle_1 = files_data_by_bundle[i]
|
||||
for j in range(0, len(files_data_by_bundle)):
|
||||
bundle_2 = files_data_by_bundle[j]
|
||||
# In unwanted bundles, keep only the files which are in wanted bundles too (web._helpers)
|
||||
if bundle_1[0] not in bundles_restriction and bundle_2[0] in bundles_restriction:
|
||||
bundle_1[1] = [item_1 for item_1 in bundle_1[1] if item_1 in bundle_2[1]]
|
||||
for i in range(0, len(files_data_by_bundle)):
|
||||
bundle_1 = files_data_by_bundle[i]
|
||||
for j in range(i + 1, len(files_data_by_bundle)):
|
||||
bundle_2 = files_data_by_bundle[j]
|
||||
# In every bundle, keep only the files which were not found
|
||||
# in previous bundles
|
||||
bundle_2[1] = [item_2 for item_2 in bundle_2[1] if item_2 not in bundle_1[1]]
|
||||
|
||||
# Only keep bundles which still have files and that were requested
|
||||
files_data_by_bundle = [
|
||||
data for data in files_data_by_bundle
|
||||
if (len(data[1]) > 0 and (not bundles_restriction or data[0] in bundles_restriction))
|
||||
]
|
||||
|
||||
# Fetch the arch of each kept file, in each bundle
|
||||
urls = []
|
||||
for bundle_data in files_data_by_bundle:
|
||||
urls += bundle_data[1]
|
||||
custom_attachments = AssetsUtils._get_custom_attachment(urls, op='in')
|
||||
|
||||
for bundle_data in files_data_by_bundle:
|
||||
for i in range(0, len(bundle_data[1])):
|
||||
url = bundle_data[1][i]
|
||||
url_info = url_infos[url]
|
||||
|
||||
content = AssetsUtils._get_content_from_url(url, url_info, custom_attachments)
|
||||
|
||||
bundle_data[1][i] = {
|
||||
'url': "/%s/%s" % (url_info["module"], url_info["resource_path"]),
|
||||
'arch': content,
|
||||
'customized': url_info["customized"],
|
||||
}
|
||||
|
||||
return files_data_by_bundle
|
||||
|
||||
@http.route('/web_editor/modify_image/<model("ir.attachment"):attachment>', type="json", auth="user", website=True)
|
||||
def modify_image(self, attachment, res_model=None, res_id=None, name=None, data=None, original_id=None, mimetype=None, alt_data=None):
|
||||
"""
|
||||
Creates a modified copy of an attachment and returns its image_src to be
|
||||
inserted into the DOM.
|
||||
"""
|
||||
fields = {
|
||||
'original_id': attachment.id,
|
||||
'datas': data,
|
||||
'type': 'binary',
|
||||
'res_model': res_model or 'ir.ui.view',
|
||||
'mimetype': mimetype or attachment.mimetype,
|
||||
'name': name or attachment.name,
|
||||
}
|
||||
if fields['res_model'] == 'ir.ui.view':
|
||||
fields['res_id'] = 0
|
||||
elif res_id:
|
||||
fields['res_id'] = res_id
|
||||
if fields['mimetype'] == 'image/webp':
|
||||
fields['name'] = re.sub(r'\.(jpe?g|png)$', '.webp', fields['name'], flags=re.I)
|
||||
existing_attachment = get_existing_attachment(request.env['ir.attachment'], fields)
|
||||
if existing_attachment and not existing_attachment.url:
|
||||
attachment = existing_attachment
|
||||
else:
|
||||
attachment = attachment.copy(fields)
|
||||
if alt_data:
|
||||
for size, per_type in alt_data.items():
|
||||
reference_id = attachment.id
|
||||
if 'image/webp' in per_type:
|
||||
resized = attachment.create_unique([{
|
||||
'name': attachment.name,
|
||||
'description': 'resize: %s' % size,
|
||||
'datas': per_type['image/webp'],
|
||||
'res_id': reference_id,
|
||||
'res_model': 'ir.attachment',
|
||||
'mimetype': 'image/webp',
|
||||
}])
|
||||
reference_id = resized[0]
|
||||
if 'image/jpeg' in per_type:
|
||||
attachment.create_unique([{
|
||||
'name': re.sub(r'\.webp$', '.jpg', attachment.name, flags=re.I),
|
||||
'description': 'format: jpeg',
|
||||
'datas': per_type['image/jpeg'],
|
||||
'res_id': reference_id,
|
||||
'res_model': 'ir.attachment',
|
||||
'mimetype': 'image/jpeg',
|
||||
}])
|
||||
if attachment.url:
|
||||
# Don't keep url if modifying static attachment because static images
|
||||
# are only served from disk and don't fallback to attachments.
|
||||
if re.match(r'^/\w+/static/', attachment.url):
|
||||
attachment.url = None
|
||||
# Uniquify url by adding a path segment with the id before the name.
|
||||
# This allows us to keep the unsplash url format so it still reacts
|
||||
# to the unsplash beacon.
|
||||
else:
|
||||
url_fragments = attachment.url.split('/')
|
||||
url_fragments.insert(-1, str(attachment.id))
|
||||
attachment.url = '/'.join(url_fragments)
|
||||
if attachment.public:
|
||||
return attachment.image_src
|
||||
attachment.generate_access_token()
|
||||
return '%s?access_token=%s' % (attachment.image_src, attachment.access_token)
|
||||
|
||||
def _get_shape_svg(self, module, *segments):
|
||||
shape_path = opj(module, 'static', *segments)
|
||||
try:
|
||||
with file_open(shape_path, 'r', filter_ext=('.svg',)) as file:
|
||||
return file.read()
|
||||
except FileNotFoundError:
|
||||
raise werkzeug.exceptions.NotFound()
|
||||
|
||||
def _update_svg_colors(self, options, svg):
|
||||
user_colors = []
|
||||
svg_options = {}
|
||||
default_palette = {
|
||||
'1': '#3AADAA',
|
||||
'2': '#7C6576',
|
||||
'3': '#F6F6F6',
|
||||
'4': '#FFFFFF',
|
||||
'5': '#383E45',
|
||||
}
|
||||
bundle_css = None
|
||||
regex_hex = r'#[0-9A-F]{6,8}'
|
||||
regex_rgba = r'rgba?\(\d{1,3}, ?\d{1,3}, ?\d{1,3}(?:, ?[0-9.]{1,4})?\)'
|
||||
for key, value in options.items():
|
||||
colorMatch = re.match('^c([1-5])$', key)
|
||||
if colorMatch:
|
||||
css_color_value = value
|
||||
# Check that color is hex or rgb(a) to prevent arbitrary injection
|
||||
if not re.match(r'(?i)^%s$|^%s$' % (regex_hex, regex_rgba), css_color_value.replace(' ', '')):
|
||||
if re.match('^o-color-([1-5])$', css_color_value):
|
||||
if not bundle_css:
|
||||
bundle = 'web.assets_frontend'
|
||||
asset = request.env["ir.qweb"]._get_asset_bundle(bundle)
|
||||
bundle_css = asset.css().index_content
|
||||
color_search = re.search(r'(?i)--%s:\s+(%s|%s)' % (css_color_value, regex_hex, regex_rgba), bundle_css)
|
||||
if not color_search:
|
||||
raise werkzeug.exceptions.BadRequest()
|
||||
css_color_value = color_search.group(1)
|
||||
else:
|
||||
raise werkzeug.exceptions.BadRequest()
|
||||
user_colors.append([tools.html_escape(css_color_value), colorMatch.group(1)])
|
||||
else:
|
||||
svg_options[key] = value
|
||||
|
||||
color_mapping = {default_palette[palette_number]: color for color, palette_number in user_colors}
|
||||
# create a case-insensitive regex to match all the colors to replace, eg: '(?i)(#3AADAA)|(#7C6576)'
|
||||
regex = '(?i)%s' % '|'.join('(%s)' % color for color in color_mapping.keys())
|
||||
|
||||
def subber(match):
|
||||
key = match.group().upper()
|
||||
return color_mapping[key] if key in color_mapping else key
|
||||
return re.sub(regex, subber, svg), svg_options
|
||||
|
||||
@http.route(['/web_editor/shape/<module>/<path:filename>'], type='http', auth="public", website=True)
|
||||
def shape(self, module, filename, **kwargs):
|
||||
"""
|
||||
Returns a color-customized svg (background shape or illustration).
|
||||
"""
|
||||
svg = None
|
||||
if module == 'illustration':
|
||||
attachment = request.env['ir.attachment'].sudo().browse(unslug(filename)[1])
|
||||
if (not attachment.exists()
|
||||
or attachment.type != 'binary'
|
||||
or not attachment.public
|
||||
or not attachment.url.startswith(request.httprequest.path)):
|
||||
# Fallback to URL lookup to allow using shapes that were
|
||||
# imported from data files.
|
||||
attachment = request.env['ir.attachment'].sudo().search([
|
||||
('type', '=', 'binary'),
|
||||
('public', '=', True),
|
||||
('url', '=', request.httprequest.path),
|
||||
], limit=1)
|
||||
if not attachment:
|
||||
raise werkzeug.exceptions.NotFound()
|
||||
svg = attachment.raw.decode('utf-8')
|
||||
else:
|
||||
svg = self._get_shape_svg(module, 'shapes', filename)
|
||||
|
||||
svg, options = self._update_svg_colors(kwargs, svg)
|
||||
flip_value = options.get('flip', False)
|
||||
if flip_value == 'x':
|
||||
svg = svg.replace('<svg ', '<svg style="transform: scaleX(-1);" ', 1)
|
||||
elif flip_value == 'y':
|
||||
svg = svg.replace('<svg ', '<svg style="transform: scaleY(-1)" ', 1)
|
||||
elif flip_value == 'xy':
|
||||
svg = svg.replace('<svg ', '<svg style="transform: scale(-1)" ', 1)
|
||||
|
||||
return request.make_response(svg, [
|
||||
('Content-type', 'image/svg+xml'),
|
||||
('Cache-control', 'max-age=%s' % http.STATIC_CACHE_LONG),
|
||||
])
|
||||
|
||||
@http.route(['/web_editor/image_shape/<string:img_key>/<module>/<path:filename>'], type='http', auth="public", website=True)
|
||||
def image_shape(self, module, filename, img_key, **kwargs):
|
||||
svg = self._get_shape_svg(module, 'image_shapes', filename)
|
||||
|
||||
record = request.env['ir.binary']._find_record(img_key)
|
||||
stream = request.env['ir.binary']._get_image_stream_from(record)
|
||||
if stream.type == 'url':
|
||||
return stream.get_response()
|
||||
|
||||
image = stream.read()
|
||||
img = binary_to_image(image)
|
||||
width, height = tuple(str(size) for size in img.size)
|
||||
root = etree.fromstring(svg)
|
||||
root.attrib.update({'width': width, 'height': height})
|
||||
# Update default color palette on shape SVG.
|
||||
svg, _ = self._update_svg_colors(kwargs, etree.tostring(root, pretty_print=True).decode('utf-8'))
|
||||
# Add image in base64 inside the shape.
|
||||
uri = image_data_uri(b64encode(image))
|
||||
svg = svg.replace('<image xlink:href="', '<image xlink:href="%s' % uri)
|
||||
|
||||
return request.make_response(svg, [
|
||||
('Content-type', 'image/svg+xml'),
|
||||
('Cache-control', 'max-age=%s' % http.STATIC_CACHE_LONG),
|
||||
])
|
||||
|
||||
@http.route(['/web_editor/media_library_search'], type='json', auth="user", website=True)
|
||||
def media_library_search(self, **params):
|
||||
ICP = request.env['ir.config_parameter'].sudo()
|
||||
endpoint = ICP.get_param('web_editor.media_library_endpoint', DEFAULT_LIBRARY_ENDPOINT)
|
||||
params['dbuuid'] = ICP.get_param('database.uuid')
|
||||
response = requests.post('%s/media-library/1/search' % endpoint, data=params)
|
||||
if response.status_code == requests.codes.ok and response.headers['content-type'] == 'application/json':
|
||||
return response.json()
|
||||
else:
|
||||
return {'error': response.status_code}
|
||||
|
||||
@http.route('/web_editor/save_library_media', type='json', auth='user', methods=['POST'])
|
||||
def save_library_media(self, media):
|
||||
"""
|
||||
Saves images from the media library as new attachments, making them
|
||||
dynamic SVGs if needed.
|
||||
media = {
|
||||
<media_id>: {
|
||||
'query': 'space separated search terms',
|
||||
'is_dynamic_svg': True/False,
|
||||
'dynamic_colors': maps color names to their color,
|
||||
}, ...
|
||||
}
|
||||
"""
|
||||
attachments = []
|
||||
ICP = request.env['ir.config_parameter'].sudo()
|
||||
library_endpoint = ICP.get_param('web_editor.media_library_endpoint', DEFAULT_LIBRARY_ENDPOINT)
|
||||
|
||||
media_ids = ','.join(media.keys())
|
||||
params = {
|
||||
'dbuuid': ICP.get_param('database.uuid'),
|
||||
'media_ids': media_ids,
|
||||
}
|
||||
response = requests.post('%s/media-library/1/download_urls' % library_endpoint, data=params)
|
||||
if response.status_code != requests.codes.ok:
|
||||
raise Exception(_("ERROR: couldn't get download urls from media library."))
|
||||
|
||||
for id, url in response.json().items():
|
||||
req = requests.get(url)
|
||||
name = '_'.join([media[id]['query'], url.split('/')[-1]])
|
||||
# Need to bypass security check to write image with mimetype image/svg+xml
|
||||
# ok because svgs come from whitelisted origin
|
||||
attachment = request.env['ir.attachment'].with_user(SUPERUSER_ID).create({
|
||||
'name': name,
|
||||
'mimetype': req.headers['content-type'],
|
||||
'public': True,
|
||||
'raw': req.content,
|
||||
'res_model': 'ir.ui.view',
|
||||
'res_id': 0,
|
||||
})
|
||||
if media[id]['is_dynamic_svg']:
|
||||
colorParams = werkzeug.urls.url_encode(media[id]['dynamic_colors'])
|
||||
attachment['url'] = '/web_editor/shape/illustration/%s?%s' % (slug(attachment), colorParams)
|
||||
attachments.append(attachment._get_media_info())
|
||||
|
||||
return attachments
|
||||
|
||||
@http.route("/web_editor/get_ice_servers", type='json', auth="user")
|
||||
def get_ice_servers(self):
|
||||
return request.env['mail.ice.server']._get_ice_servers()
|
||||
|
||||
@http.route("/web_editor/bus_broadcast", type="json", auth="user")
|
||||
def bus_broadcast(self, model_name, field_name, res_id, bus_data):
|
||||
document = request.env[model_name].browse([res_id])
|
||||
|
||||
document.check_access_rights('read')
|
||||
document.check_field_access_rights('read', [field_name])
|
||||
document.check_access_rule('read')
|
||||
document.check_access_rights('write')
|
||||
document.check_field_access_rights('write', [field_name])
|
||||
document.check_access_rule('write')
|
||||
|
||||
channel = (request.db, 'editor_collaboration', model_name, field_name, int(res_id))
|
||||
bus_data.update({'model_name': model_name, 'field_name': field_name, 'res_id': res_id})
|
||||
request.env['bus.bus']._sendone(channel, 'editor_collaboration', bus_data)
|
||||
|
||||
@http.route('/web_editor/tests', type='http', auth="user")
|
||||
def test_suite(self, mod=None, **kwargs):
|
||||
return request.render('web_editor.tests')
|
||||
|
||||
@http.route("/web_editor/generate_text", type="json", auth="user")
|
||||
def generate_text(self, prompt, conversation_history):
|
||||
try:
|
||||
IrConfigParameter = request.env['ir.config_parameter'].sudo()
|
||||
olg_api_endpoint = IrConfigParameter.get_param('web_editor.olg_api_endpoint', DEFAULT_OLG_ENDPOINT)
|
||||
database_id = IrConfigParameter.get_param('database.uuid')
|
||||
response = iap_tools.iap_jsonrpc(olg_api_endpoint + "/api/olg/1/chat", params={
|
||||
'prompt': prompt,
|
||||
'conversation_history': conversation_history or [],
|
||||
'database_id': database_id,
|
||||
}, timeout=30)
|
||||
if response['status'] == 'success':
|
||||
return response['content']
|
||||
elif response['status'] == 'error_prompt_too_long':
|
||||
raise UserError(_("Sorry, your prompt is too long. Try to say it in fewer words."))
|
||||
elif response['status'] == 'limit_call_reached':
|
||||
raise UserError(_("You have reached the maximum number of requests for this service. Try again later."))
|
||||
else:
|
||||
raise UserError(_("Sorry, we could not generate a response. Please try again later."))
|
||||
except AccessError:
|
||||
raise AccessError(_("Oops, it looks like our AI is unreachable!"))
|
11
data/editor_assets.xml
Normal file
11
data/editor_assets.xml
Normal file
|
@ -0,0 +1,11 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<odoo>
|
||||
<data>
|
||||
<record id="web_editor.13_0_color_system_support_primary_variables_scss" model="ir.asset">
|
||||
<field name="name">13 0 color system support primary variables SCSS</field>
|
||||
<field name="bundle">web._assets_primary_variables</field>
|
||||
<field name="path">web_editor/static/src/scss/13_0_color_system_support_primary_variables.scss</field>
|
||||
<field name="active" eval="False"/>
|
||||
</record>
|
||||
</data>
|
||||
</odoo>
|
3390
i18n/af.po
Normal file
3390
i18n/af.po
Normal file
File diff suppressed because it is too large
Load Diff
3755
i18n/ar.po
Normal file
3755
i18n/ar.po
Normal file
File diff suppressed because it is too large
Load Diff
3391
i18n/az.po
Normal file
3391
i18n/az.po
Normal file
File diff suppressed because it is too large
Load Diff
3739
i18n/bg.po
Normal file
3739
i18n/bg.po
Normal file
File diff suppressed because it is too large
Load Diff
3391
i18n/bs.po
Normal file
3391
i18n/bs.po
Normal file
File diff suppressed because it is too large
Load Diff
3774
i18n/ca.po
Normal file
3774
i18n/ca.po
Normal file
File diff suppressed because it is too large
Load Diff
3748
i18n/cs.po
Normal file
3748
i18n/cs.po
Normal file
File diff suppressed because it is too large
Load Diff
3758
i18n/da.po
Normal file
3758
i18n/da.po
Normal file
File diff suppressed because it is too large
Load Diff
3779
i18n/de.po
Normal file
3779
i18n/de.po
Normal file
File diff suppressed because it is too large
Load Diff
3391
i18n/el.po
Normal file
3391
i18n/el.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/en_AU.po
Normal file
3388
i18n/en_AU.po
Normal file
File diff suppressed because it is too large
Load Diff
3389
i18n/en_GB.po
Normal file
3389
i18n/en_GB.po
Normal file
File diff suppressed because it is too large
Load Diff
3779
i18n/es.po
Normal file
3779
i18n/es.po
Normal file
File diff suppressed because it is too large
Load Diff
3771
i18n/es_419.po
Normal file
3771
i18n/es_419.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/es_CL.po
Normal file
3388
i18n/es_CL.po
Normal file
File diff suppressed because it is too large
Load Diff
3390
i18n/es_CO.po
Normal file
3390
i18n/es_CO.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/es_CR.po
Normal file
3388
i18n/es_CR.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/es_DO.po
Normal file
3388
i18n/es_DO.po
Normal file
File diff suppressed because it is too large
Load Diff
3389
i18n/es_EC.po
Normal file
3389
i18n/es_EC.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/es_PE.po
Normal file
3388
i18n/es_PE.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/es_VE.po
Normal file
3388
i18n/es_VE.po
Normal file
File diff suppressed because it is too large
Load Diff
3740
i18n/et.po
Normal file
3740
i18n/et.po
Normal file
File diff suppressed because it is too large
Load Diff
3745
i18n/fa.po
Normal file
3745
i18n/fa.po
Normal file
File diff suppressed because it is too large
Load Diff
3761
i18n/fi.po
Normal file
3761
i18n/fi.po
Normal file
File diff suppressed because it is too large
Load Diff
3772
i18n/fr.po
Normal file
3772
i18n/fr.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/fr_CA.po
Normal file
3388
i18n/fr_CA.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/gl.po
Normal file
3388
i18n/gl.po
Normal file
File diff suppressed because it is too large
Load Diff
3393
i18n/gu.po
Normal file
3393
i18n/gu.po
Normal file
File diff suppressed because it is too large
Load Diff
3747
i18n/he.po
Normal file
3747
i18n/he.po
Normal file
File diff suppressed because it is too large
Load Diff
3402
i18n/hr.po
Normal file
3402
i18n/hr.po
Normal file
File diff suppressed because it is too large
Load Diff
3745
i18n/hu.po
Normal file
3745
i18n/hu.po
Normal file
File diff suppressed because it is too large
Load Diff
3723
i18n/hy.po
Normal file
3723
i18n/hy.po
Normal file
File diff suppressed because it is too large
Load Diff
3759
i18n/id.po
Normal file
3759
i18n/id.po
Normal file
File diff suppressed because it is too large
Load Diff
3728
i18n/is.po
Normal file
3728
i18n/is.po
Normal file
File diff suppressed because it is too large
Load Diff
3776
i18n/it.po
Normal file
3776
i18n/it.po
Normal file
File diff suppressed because it is too large
Load Diff
3736
i18n/ja.po
Normal file
3736
i18n/ja.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/ka.po
Normal file
3388
i18n/ka.po
Normal file
File diff suppressed because it is too large
Load Diff
3388
i18n/kab.po
Normal file
3388
i18n/kab.po
Normal file
File diff suppressed because it is too large
Load Diff
3390
i18n/km.po
Normal file
3390
i18n/km.po
Normal file
File diff suppressed because it is too large
Load Diff
3733
i18n/ko.po
Normal file
3733
i18n/ko.po
Normal file
File diff suppressed because it is too large
Load Diff
3386
i18n/lb.po
Normal file
3386
i18n/lb.po
Normal file
File diff suppressed because it is too large
Load Diff
3737
i18n/lt.po
Normal file
3737
i18n/lt.po
Normal file
File diff suppressed because it is too large
Load Diff
3735
i18n/lv.po
Normal file
3735
i18n/lv.po
Normal file
File diff suppressed because it is too large
Load Diff
3393
i18n/mk.po
Normal file
3393
i18n/mk.po
Normal file
File diff suppressed because it is too large
Load Diff
3399
i18n/mn.po
Normal file
3399
i18n/mn.po
Normal file
File diff suppressed because it is too large
Load Diff
3398
i18n/nb.po
Normal file
3398
i18n/nb.po
Normal file
File diff suppressed because it is too large
Load Diff
3783
i18n/nl.po
Normal file
3783
i18n/nl.po
Normal file
File diff suppressed because it is too large
Load Diff
3739
i18n/pl.po
Normal file
3739
i18n/pl.po
Normal file
File diff suppressed because it is too large
Load Diff
3735
i18n/pt.po
Normal file
3735
i18n/pt.po
Normal file
File diff suppressed because it is too large
Load Diff
3764
i18n/pt_BR.po
Normal file
3764
i18n/pt_BR.po
Normal file
File diff suppressed because it is too large
Load Diff
3397
i18n/ro.po
Normal file
3397
i18n/ro.po
Normal file
File diff suppressed because it is too large
Load Diff
3787
i18n/ru.po
Normal file
3787
i18n/ru.po
Normal file
File diff suppressed because it is too large
Load Diff
3728
i18n/sk.po
Normal file
3728
i18n/sk.po
Normal file
File diff suppressed because it is too large
Load Diff
3753
i18n/sl.po
Normal file
3753
i18n/sl.po
Normal file
File diff suppressed because it is too large
Load Diff
3746
i18n/sr.po
Normal file
3746
i18n/sr.po
Normal file
File diff suppressed because it is too large
Load Diff
3392
i18n/sr@latin.po
Normal file
3392
i18n/sr@latin.po
Normal file
File diff suppressed because it is too large
Load Diff
3774
i18n/sv.po
Normal file
3774
i18n/sv.po
Normal file
File diff suppressed because it is too large
Load Diff
3746
i18n/th.po
Normal file
3746
i18n/th.po
Normal file
File diff suppressed because it is too large
Load Diff
3754
i18n/tr.po
Normal file
3754
i18n/tr.po
Normal file
File diff suppressed because it is too large
Load Diff
3765
i18n/uk.po
Normal file
3765
i18n/uk.po
Normal file
File diff suppressed because it is too large
Load Diff
3757
i18n/vi.po
Normal file
3757
i18n/vi.po
Normal file
File diff suppressed because it is too large
Load Diff
3738
i18n/web_editor.pot
Normal file
3738
i18n/web_editor.pot
Normal file
File diff suppressed because it is too large
Load Diff
3734
i18n/zh_CN.po
Normal file
3734
i18n/zh_CN.po
Normal file
File diff suppressed because it is too large
Load Diff
3732
i18n/zh_TW.po
Normal file
3732
i18n/zh_TW.po
Normal file
File diff suppressed because it is too large
Load Diff
15
models/__init__.py
Normal file
15
models/__init__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from . import ir_attachment
|
||||
from . import ir_qweb
|
||||
from . import ir_qweb_fields
|
||||
from . import ir_ui_view
|
||||
from . import ir_http
|
||||
from . import ir_websocket
|
||||
from . import models
|
||||
from . import html_field_history_mixin
|
||||
|
||||
from . import assets
|
||||
|
||||
from . import test_models
|
BIN
models/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
models/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/assets.cpython-311.pyc
Normal file
BIN
models/__pycache__/assets.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/diff_utils.cpython-311.pyc
Normal file
BIN
models/__pycache__/diff_utils.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/html_field_history_mixin.cpython-311.pyc
Normal file
BIN
models/__pycache__/html_field_history_mixin.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_attachment.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_attachment.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_http.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_http.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_qweb.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_qweb.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_qweb_fields.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_qweb_fields.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_ui_view.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_ui_view.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/ir_websocket.cpython-311.pyc
Normal file
BIN
models/__pycache__/ir_websocket.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/models.cpython-311.pyc
Normal file
BIN
models/__pycache__/models.cpython-311.pyc
Normal file
Binary file not shown.
BIN
models/__pycache__/test_models.cpython-311.pyc
Normal file
BIN
models/__pycache__/test_models.cpython-311.pyc
Normal file
Binary file not shown.
237
models/assets.py
Normal file
237
models/assets.py
Normal file
|
@ -0,0 +1,237 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import base64
|
||||
import re
|
||||
|
||||
from odoo import api, models
|
||||
from odoo.tools import misc
|
||||
from odoo.addons.base.models.assetsbundle import EXTENSIONS
|
||||
|
||||
_match_asset_file_url_regex = re.compile(r"^(/_custom/([^/]+))?/(\w+)/([/\w]+\.\w+)$")
|
||||
|
||||
|
||||
class Assets(models.AbstractModel):
|
||||
_name = 'web_editor.assets'
|
||||
_description = 'Assets Utils'
|
||||
|
||||
@api.model
|
||||
def reset_asset(self, url, bundle):
|
||||
"""
|
||||
Delete the potential customizations made to a given (original) asset.
|
||||
|
||||
Params:
|
||||
url (str): the URL of the original asset (scss / js) file
|
||||
|
||||
bundle (str):
|
||||
the name of the bundle in which the customizations to delete
|
||||
were made
|
||||
"""
|
||||
custom_url = self._make_custom_asset_url(url, bundle)
|
||||
|
||||
# Simply delete the attachement which contains the modified scss/js file
|
||||
# and the xpath view which links it
|
||||
self._get_custom_attachment(custom_url).unlink()
|
||||
self._get_custom_asset(custom_url).unlink()
|
||||
|
||||
@api.model
|
||||
def save_asset(self, url, bundle, content, file_type):
|
||||
"""
|
||||
Customize the content of a given asset (scss / js).
|
||||
|
||||
Params:
|
||||
url (src):
|
||||
the URL of the original asset to customize (whether or not the
|
||||
asset was already customized)
|
||||
|
||||
bundle (src):
|
||||
the name of the bundle in which the customizations will take
|
||||
effect
|
||||
|
||||
content (src): the new content of the asset (scss / js)
|
||||
|
||||
file_type (src):
|
||||
either 'scss' or 'js' according to the file being customized
|
||||
"""
|
||||
custom_url = self._make_custom_asset_url(url, bundle)
|
||||
datas = base64.b64encode((content or "\n").encode("utf-8"))
|
||||
|
||||
# Check if the file to save had already been modified
|
||||
custom_attachment = self._get_custom_attachment(custom_url)
|
||||
if custom_attachment:
|
||||
# If it was already modified, simply override the corresponding
|
||||
# attachment content
|
||||
custom_attachment.write({"datas": datas})
|
||||
self.env.registry.clear_cache('assets')
|
||||
else:
|
||||
# If not, create a new attachment to copy the original scss/js file
|
||||
# content, with its modifications
|
||||
new_attach = {
|
||||
'name': url.split("/")[-1],
|
||||
'type': "binary",
|
||||
'mimetype': (file_type == 'js' and 'text/javascript' or 'text/scss'),
|
||||
'datas': datas,
|
||||
'url': custom_url,
|
||||
**self._save_asset_attachment_hook(),
|
||||
}
|
||||
self.env["ir.attachment"].create(new_attach)
|
||||
|
||||
# Create an asset with the new attachment
|
||||
IrAsset = self.env['ir.asset']
|
||||
new_asset = {
|
||||
'path': custom_url,
|
||||
'target': url,
|
||||
'directive': 'replace',
|
||||
**self._save_asset_hook(),
|
||||
}
|
||||
target_asset = self._get_custom_asset(url)
|
||||
if target_asset:
|
||||
new_asset['name'] = target_asset.name + ' override'
|
||||
new_asset['bundle'] = target_asset.bundle
|
||||
new_asset['sequence'] = target_asset.sequence
|
||||
else:
|
||||
new_asset['name'] = '%s: replace %s' % (bundle, custom_url.split('/')[-1])
|
||||
new_asset['bundle'] = IrAsset._get_related_bundle(url, bundle)
|
||||
IrAsset.create(new_asset)
|
||||
|
||||
|
||||
@api.model
|
||||
def _get_content_from_url(self, url, url_info=None, custom_attachments=None):
|
||||
"""
|
||||
Fetch the content of an asset (scss / js) file. That content is either
|
||||
the one of the related file on the disk or the one of the corresponding
|
||||
custom ir.attachment record.
|
||||
|
||||
Params:
|
||||
url (str): the URL of the asset (scss / js) file/ir.attachment
|
||||
|
||||
url_info (dict, optional):
|
||||
the related url info (see _get_data_from_url) (allows to optimize
|
||||
some code which already have the info and do not want this
|
||||
function to re-get it)
|
||||
|
||||
custom_attachments (ir.attachment(), optional):
|
||||
the related custom ir.attachment records the function might need
|
||||
to search into (allows to optimize some code which already have
|
||||
that info and do not want this function to re-get it)
|
||||
|
||||
Returns:
|
||||
utf-8 encoded content of the asset (scss / js)
|
||||
"""
|
||||
if url_info is None:
|
||||
url_info = self._get_data_from_url(url)
|
||||
|
||||
if url_info["customized"]:
|
||||
# If the file is already customized, the content is found in the
|
||||
# corresponding attachment
|
||||
attachment = None
|
||||
if custom_attachments is None:
|
||||
attachment = self._get_custom_attachment(url)
|
||||
else:
|
||||
attachment = custom_attachments.filtered(lambda r: r.url == url)
|
||||
return attachment and base64.b64decode(attachment.datas) or False
|
||||
|
||||
# If the file is not yet customized, the content is found by reading
|
||||
# the local file
|
||||
with misc.file_open(url.strip('/'), 'rb', filter_ext=EXTENSIONS) as f:
|
||||
return f.read()
|
||||
|
||||
@api.model
|
||||
def _get_data_from_url(self, url):
|
||||
"""
|
||||
Return information about an asset (scss / js) file/ir.attachment just by
|
||||
looking at its URL.
|
||||
|
||||
Params:
|
||||
url (str): the url of the asset (scss / js) file/ir.attachment
|
||||
|
||||
Returns:
|
||||
dict:
|
||||
module (str): the original asset's related app
|
||||
|
||||
resource_path (str):
|
||||
the relative path to the original asset from the related app
|
||||
|
||||
customized (bool): whether the asset is a customized one or not
|
||||
|
||||
bundle (str):
|
||||
the name of the bundle the asset customizes (False if this
|
||||
is not a customized asset)
|
||||
"""
|
||||
m = _match_asset_file_url_regex.match(url)
|
||||
if not m:
|
||||
return False
|
||||
return {
|
||||
'module': m.group(3),
|
||||
'resource_path': m.group(4),
|
||||
'customized': bool(m.group(1)),
|
||||
'bundle': m.group(2) or False
|
||||
}
|
||||
|
||||
@api.model
|
||||
def _make_custom_asset_url(self, url, bundle_xmlid):
|
||||
"""
|
||||
Return the customized version of an asset URL, that is the URL the asset
|
||||
would have if it was customized.
|
||||
|
||||
Params:
|
||||
url (str): the original asset's url
|
||||
bundle_xmlid (str): the name of the bundle the asset would customize
|
||||
|
||||
Returns:
|
||||
str: the URL the given asset would have if it was customized in the
|
||||
given bundle
|
||||
"""
|
||||
return f"/_custom/{bundle_xmlid}{url}"
|
||||
|
||||
@api.model
|
||||
def _get_custom_attachment(self, custom_url, op='='):
|
||||
"""
|
||||
Fetch the ir.attachment record related to the given customized asset.
|
||||
|
||||
Params:
|
||||
custom_url (str): the URL of the customized asset
|
||||
op (str, default: '='): the operator to use to search the records
|
||||
|
||||
Returns:
|
||||
ir.attachment()
|
||||
"""
|
||||
assert op in ('in', '='), 'Invalid operator'
|
||||
return self.env["ir.attachment"].search([("url", op, custom_url)])
|
||||
|
||||
@api.model
|
||||
def _get_custom_asset(self, custom_url):
|
||||
"""
|
||||
Fetch the ir.asset record related to the given customized asset (the
|
||||
inheriting view which replace the original asset by the customized one).
|
||||
|
||||
Params:
|
||||
custom_url (str): the URL of the customized asset
|
||||
|
||||
Returns:
|
||||
ir.asset()
|
||||
"""
|
||||
url = custom_url[1:] if custom_url.startswith(('/', '\\')) else custom_url
|
||||
return self.env['ir.asset'].search([('path', 'like', url)])
|
||||
|
||||
@api.model
|
||||
def _save_asset_attachment_hook(self):
|
||||
"""
|
||||
Returns the additional values to use to write the DB on customized
|
||||
ir.attachment creation.
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
return {}
|
||||
|
||||
@api.model
|
||||
def _save_asset_hook(self):
|
||||
"""
|
||||
Returns the additional values to use to write the DB on customized
|
||||
ir.asset creation.
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
return {}
|
276
models/diff_utils.py
Normal file
276
models/diff_utils.py
Normal file
|
@ -0,0 +1,276 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
|
||||
# ------------------------------------------------------------
|
||||
# Patch and comparison functions
|
||||
# ------------------------------------------------------------
|
||||
|
||||
|
||||
OPERATION_SEPARATOR = "\n"
|
||||
LINE_SEPARATOR = "<"
|
||||
|
||||
PATCH_OPERATION_LINE_AT = "@"
|
||||
PATCH_OPERATION_CONTENT = ":"
|
||||
|
||||
PATCH_OPERATION_ADD = "+"
|
||||
PATCH_OPERATION_REMOVE = "-"
|
||||
PATCH_OPERATION_REPLACE = "R"
|
||||
|
||||
PATCH_OPERATIONS = dict(
|
||||
insert=PATCH_OPERATION_ADD,
|
||||
delete=PATCH_OPERATION_REMOVE,
|
||||
replace=PATCH_OPERATION_REPLACE,
|
||||
)
|
||||
|
||||
HTML_ATTRIBUTES_TO_REMOVE = [
|
||||
"data-last-history-steps",
|
||||
]
|
||||
|
||||
|
||||
def apply_patch(initial_content, patch):
|
||||
"""Apply a patch (multiple operations) on a content.
|
||||
Each operation is a string with the following format:
|
||||
<operation_type>@<start_index>[,<end_index>][:<patch_text>*]
|
||||
patch format example:
|
||||
+@4:<p>ab</p><p>cd</p>
|
||||
+@4,15:<p>ef</p><p>gh</p>
|
||||
-@32
|
||||
-@125,129
|
||||
R@523:<b>sdf</b>
|
||||
|
||||
:param string initial_content: the initial content to patch
|
||||
:param string patch: the patch to apply
|
||||
|
||||
:return: string: the patched content
|
||||
"""
|
||||
if patch == "":
|
||||
return initial_content
|
||||
|
||||
# Replace break line in initial content to ensure they don't interfere with
|
||||
# operations
|
||||
initial_content = initial_content.replace("\n", "")
|
||||
initial_content = _remove_html_attribute(
|
||||
initial_content, HTML_ATTRIBUTES_TO_REMOVE
|
||||
)
|
||||
|
||||
content = initial_content.split(LINE_SEPARATOR)
|
||||
patch_operations = patch.split(OPERATION_SEPARATOR)
|
||||
# Apply operations in reverse order to preserve the indexes integrity.
|
||||
patch_operations.reverse()
|
||||
|
||||
for operation in patch_operations:
|
||||
metadata, *patch_content_line = operation.split(LINE_SEPARATOR)
|
||||
|
||||
metadata_split = metadata.split(PATCH_OPERATION_LINE_AT)
|
||||
operation_type = metadata_split[0]
|
||||
lines_index_range = metadata_split[1] if len(metadata_split) > 1 else ""
|
||||
# We need to remove PATCH_OPERATION_CONTENT char from lines_index_range.
|
||||
lines_index_range = lines_index_range.split(PATCH_OPERATION_CONTENT)[0]
|
||||
indexes = lines_index_range.split(",")
|
||||
start_index = int(indexes[0])
|
||||
end_index = int(indexes[1]) if len(indexes) > 1 else start_index
|
||||
|
||||
# We need to insert lines from last to the first
|
||||
# to preserve the indexes integrity.
|
||||
patch_content_line.reverse()
|
||||
|
||||
if end_index > start_index:
|
||||
for index in range(end_index, start_index, -1):
|
||||
if operation_type in [
|
||||
PATCH_OPERATION_REMOVE,
|
||||
PATCH_OPERATION_REPLACE,
|
||||
]:
|
||||
del content[index]
|
||||
|
||||
if operation_type in [PATCH_OPERATION_ADD, PATCH_OPERATION_REPLACE]:
|
||||
for line in patch_content_line:
|
||||
content.insert(start_index + 1, line)
|
||||
if operation_type in [PATCH_OPERATION_REMOVE, PATCH_OPERATION_REPLACE]:
|
||||
del content[start_index]
|
||||
|
||||
return LINE_SEPARATOR.join(content)
|
||||
|
||||
|
||||
HTML_TAG_ISOLATION_REGEX = r"^([^>]*>)(.*)$"
|
||||
ADDITION_COMPARISON_REGEX = r"\1<added>\2</added>"
|
||||
ADDITION_1ST_REPLACE_COMPARISON_REGEX = r"added>\2</added>"
|
||||
DELETION_COMPARISON_REGEX = r"\1<removed>\2</removed>"
|
||||
EMPTY_OPERATION_TAG = r"<(added|removed)><\/(added|removed)>"
|
||||
|
||||
|
||||
def generate_comparison(new_content, old_content):
|
||||
"""Compare a content to an older content
|
||||
and generate a comparison html between both content.
|
||||
|
||||
:param string new_content: the current content
|
||||
:param string old_content: the old content
|
||||
|
||||
:return: string: the comparison content
|
||||
"""
|
||||
new_content = _remove_html_attribute(new_content, HTML_ATTRIBUTES_TO_REMOVE)
|
||||
old_content = _remove_html_attribute(old_content, HTML_ATTRIBUTES_TO_REMOVE)
|
||||
|
||||
if new_content == old_content:
|
||||
return new_content
|
||||
|
||||
patch = generate_patch(new_content, old_content)
|
||||
comparison = new_content.split(LINE_SEPARATOR)
|
||||
patch_operations = patch.split(OPERATION_SEPARATOR)
|
||||
# We need to apply operation from last to the first
|
||||
# to preserve the indexes integrity.
|
||||
patch_operations.reverse()
|
||||
|
||||
for operation in patch_operations:
|
||||
metadata, *patch_content_line = operation.split(LINE_SEPARATOR)
|
||||
|
||||
metadata_split = metadata.split(PATCH_OPERATION_LINE_AT)
|
||||
operation_type = metadata_split[0]
|
||||
lines_index_range = metadata_split[1] if len(metadata_split) > 1 else ""
|
||||
# We need to remove PATCH_OPERATION_CONTENT char from lines_index_range.
|
||||
lines_index_range = lines_index_range.split(PATCH_OPERATION_CONTENT)[0]
|
||||
indexes = lines_index_range.split(",")
|
||||
start_index = int(indexes[0])
|
||||
end_index = int(indexes[1]) if len(indexes) > 1 else start_index
|
||||
|
||||
# We need to insert lines from last to the first
|
||||
# to preserve the indexes integrity.
|
||||
patch_content_line.reverse()
|
||||
|
||||
if end_index > start_index:
|
||||
for index in range(end_index, start_index, -1):
|
||||
if operation_type in [
|
||||
PATCH_OPERATION_REMOVE,
|
||||
PATCH_OPERATION_REPLACE,
|
||||
]:
|
||||
comparison[index] = re.sub(
|
||||
HTML_TAG_ISOLATION_REGEX,
|
||||
DELETION_COMPARISON_REGEX,
|
||||
comparison[index],
|
||||
)
|
||||
|
||||
if operation_type == PATCH_OPERATION_ADD:
|
||||
for line in patch_content_line:
|
||||
comparison.insert(
|
||||
start_index + 1,
|
||||
re.sub(
|
||||
HTML_TAG_ISOLATION_REGEX,
|
||||
ADDITION_COMPARISON_REGEX,
|
||||
line,
|
||||
),
|
||||
)
|
||||
|
||||
if operation_type == PATCH_OPERATION_REPLACE:
|
||||
for i, line in enumerate(patch_content_line):
|
||||
# We need to remove the first tag of a replace operation
|
||||
# to avoid having a duplicate opening tag in the middle of a
|
||||
# line.
|
||||
replace_regex = (
|
||||
ADDITION_1ST_REPLACE_COMPARISON_REGEX
|
||||
if i == len(patch_content_line) - 1
|
||||
else ADDITION_COMPARISON_REGEX
|
||||
)
|
||||
comparison.insert(
|
||||
start_index + 1,
|
||||
re.sub(HTML_TAG_ISOLATION_REGEX, replace_regex, line),
|
||||
)
|
||||
|
||||
if operation_type in [PATCH_OPERATION_REMOVE, PATCH_OPERATION_REPLACE]:
|
||||
comparison[start_index] = re.sub(
|
||||
HTML_TAG_ISOLATION_REGEX,
|
||||
DELETION_COMPARISON_REGEX,
|
||||
comparison[start_index],
|
||||
)
|
||||
|
||||
comparison = [re.sub(EMPTY_OPERATION_TAG, "", line) for line in comparison]
|
||||
return LINE_SEPARATOR.join(comparison)
|
||||
|
||||
|
||||
def _format_line_index(start, end):
|
||||
"""Format the line index to be used in a patch operation.
|
||||
|
||||
:param start: the start index
|
||||
:param end: the end index
|
||||
:return: string
|
||||
"""
|
||||
length = end - start
|
||||
if not length:
|
||||
start -= 1
|
||||
if length <= 1:
|
||||
return "{}{}".format(PATCH_OPERATION_LINE_AT, start)
|
||||
return "{}{},{}".format(PATCH_OPERATION_LINE_AT, start, start + length - 1)
|
||||
|
||||
|
||||
def _patch_generator(new_content, old_content):
|
||||
"""Generate a patch (multiple operations) between two contents.
|
||||
Each operation is a string with the following format:
|
||||
<operation_type>@<start_index>[,<end_index>][:<patch_text>*]
|
||||
patch format example:
|
||||
+@4:<p>ab</p><p>cd</p>
|
||||
+@4,15:<p>ef</p><p>gh</p>
|
||||
-@32
|
||||
-@125,129
|
||||
R@523:<b>sdf</b>
|
||||
|
||||
:param string new_content: the new content
|
||||
:param string old_content: the old content
|
||||
|
||||
:return: string: the patch containing all the operations to reverse
|
||||
the new content to the old content
|
||||
"""
|
||||
# remove break line in contents to ensure they don't interfere with
|
||||
# operations
|
||||
new_content = new_content.replace("\n", "")
|
||||
old_content = old_content.replace("\n", "")
|
||||
|
||||
new_content_lines = new_content.split(LINE_SEPARATOR)
|
||||
old_content_lines = old_content.split(LINE_SEPARATOR)
|
||||
|
||||
for group in SequenceMatcher(
|
||||
None, new_content_lines, old_content_lines, False
|
||||
).get_grouped_opcodes(0):
|
||||
patch_content_line = []
|
||||
first, last = group[0], group[-1]
|
||||
patch_operation = _format_line_index(first[1], last[2])
|
||||
|
||||
if any(tag in {"replace", "delete"} for tag, _, _, _, _ in group):
|
||||
for tag, _, _, _, _ in group:
|
||||
if tag not in {"insert", "equal", "replace"}:
|
||||
patch_operation = PATCH_OPERATIONS[tag] + patch_operation
|
||||
|
||||
if any(tag in {"replace", "insert"} for tag, _, _, _, _ in group):
|
||||
for tag, _, _, j1, j2 in group:
|
||||
if tag not in {"delete", "equal"}:
|
||||
patch_operation = PATCH_OPERATIONS[tag] + patch_operation
|
||||
for line in old_content_lines[j1:j2]:
|
||||
patch_content_line.append(line)
|
||||
|
||||
if patch_content_line:
|
||||
patch_content = LINE_SEPARATOR + LINE_SEPARATOR.join(
|
||||
patch_content_line
|
||||
)
|
||||
yield str(patch_operation) + PATCH_OPERATION_CONTENT + patch_content
|
||||
else:
|
||||
yield str(patch_operation)
|
||||
|
||||
|
||||
def generate_patch(new_content, old_content):
|
||||
new_content = _remove_html_attribute(new_content, HTML_ATTRIBUTES_TO_REMOVE)
|
||||
old_content = _remove_html_attribute(old_content, HTML_ATTRIBUTES_TO_REMOVE)
|
||||
|
||||
return OPERATION_SEPARATOR.join(
|
||||
list(_patch_generator(new_content, old_content))
|
||||
)
|
||||
|
||||
|
||||
def _remove_html_attribute(html_content, attributes_to_remove):
|
||||
for attribute in attributes_to_remove:
|
||||
html_content = re.sub(
|
||||
r' {}="[^"]*"'.format(attribute), "", html_content
|
||||
)
|
||||
|
||||
return html_content
|
140
models/html_field_history_mixin.py
Normal file
140
models/html_field_history_mixin.py
Normal file
|
@ -0,0 +1,140 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import api, fields, models
|
||||
from odoo.exceptions import ValidationError
|
||||
|
||||
from .diff_utils import apply_patch, generate_comparison, generate_patch
|
||||
|
||||
|
||||
class HtmlFieldHistory(models.AbstractModel):
|
||||
_name = "html.field.history.mixin"
|
||||
_description = "Field html History"
|
||||
_html_field_history_size_limit = 300
|
||||
|
||||
html_field_history = fields.Json("History data", prefetch=False)
|
||||
|
||||
html_field_history_metadata = fields.Json(
|
||||
"History metadata", compute="_compute_metadata"
|
||||
)
|
||||
|
||||
@api.model
|
||||
def _get_versioned_fields(self):
|
||||
"""This method should be overriden
|
||||
|
||||
:return: List[string]: A list of name of the fields to be versioned
|
||||
"""
|
||||
return []
|
||||
|
||||
@api.depends("html_field_history")
|
||||
def _compute_metadata(self):
|
||||
for rec in self:
|
||||
history_metadata = None
|
||||
if rec.html_field_history:
|
||||
history_metadata = {}
|
||||
for field_name in rec.html_field_history:
|
||||
history_metadata[field_name] = []
|
||||
for revision in rec.html_field_history[field_name]:
|
||||
metadata = revision.copy()
|
||||
metadata.pop("patch")
|
||||
history_metadata[field_name].append(metadata)
|
||||
rec.html_field_history_metadata = history_metadata
|
||||
|
||||
def write(self, vals):
|
||||
new_revisions = False
|
||||
db_contents = None
|
||||
versioned_fields = self._get_versioned_fields()
|
||||
vals_contain_versioned_fields = set(vals).intersection(versioned_fields)
|
||||
|
||||
if vals_contain_versioned_fields:
|
||||
self.ensure_one()
|
||||
db_contents = dict([(f, self[f]) for f in versioned_fields])
|
||||
fields_data = self.env[self._name]._fields
|
||||
|
||||
if any(f in vals and not fields_data[f].sanitize for f in versioned_fields):
|
||||
raise ValidationError(
|
||||
"Ensure all versioned fields ( %s ) in model %s are declared as sanitize=True"
|
||||
% (str(versioned_fields), self._name)
|
||||
)
|
||||
|
||||
# Call super().write before generating the patch to be sure we perform
|
||||
# the diff on sanitized data
|
||||
write_result = super().write(vals)
|
||||
|
||||
if not vals_contain_versioned_fields:
|
||||
return write_result
|
||||
|
||||
history_revs = self.html_field_history or {}
|
||||
|
||||
for field in versioned_fields:
|
||||
new_content = self[field] or ""
|
||||
|
||||
if field not in history_revs:
|
||||
history_revs[field] = []
|
||||
|
||||
old_content = db_contents[field] or ""
|
||||
if new_content != old_content:
|
||||
new_revisions = True
|
||||
patch = generate_patch(new_content, old_content)
|
||||
revision_id = (
|
||||
(history_revs[field][0]["revision_id"] + 1)
|
||||
if history_revs[field]
|
||||
else 1
|
||||
)
|
||||
|
||||
history_revs[field].insert(
|
||||
0,
|
||||
{
|
||||
"patch": patch,
|
||||
"revision_id": revision_id,
|
||||
"create_date": self.env.cr.now().isoformat(),
|
||||
"create_uid": self.env.uid,
|
||||
"create_user_name": self.env.user.name,
|
||||
},
|
||||
)
|
||||
limit = self._html_field_history_size_limit
|
||||
history_revs[field] = history_revs[field][:limit]
|
||||
# Call super().write again to include the new revision
|
||||
if new_revisions:
|
||||
extra_vals = {"html_field_history": history_revs}
|
||||
write_result = super().write(extra_vals) and write_result
|
||||
return write_result
|
||||
|
||||
def html_field_history_get_content_at_revision(self, field_name, revision_id):
|
||||
"""Get the requested field content restored at the revision_id.
|
||||
|
||||
:param str field_name: the name of the field
|
||||
:param int revision_id: id of the last revision to restore
|
||||
|
||||
:return: string: the restored content
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
revisions = [
|
||||
i
|
||||
for i in self.html_field_history[field_name]
|
||||
if i["revision_id"] >= revision_id
|
||||
]
|
||||
|
||||
content = self[field_name]
|
||||
for revision in revisions:
|
||||
content = apply_patch(content, revision["patch"])
|
||||
|
||||
return content
|
||||
|
||||
def html_field_history_get_comparison_at_revision(self, field_name, revision_id):
|
||||
"""For the requested field,
|
||||
Get a comparison between the current content of the field and the
|
||||
content restored at the requested revision_id.
|
||||
|
||||
:param str field_name: the name of the field
|
||||
:param int revision_id: id of the last revision to compare
|
||||
|
||||
:return: string: the comparison
|
||||
"""
|
||||
self.ensure_one()
|
||||
restored_content = self.html_field_history_get_content_at_revision(
|
||||
field_name, revision_id
|
||||
)
|
||||
|
||||
return generate_comparison(self[field_name], restored_content)
|
86
models/ir_attachment.py
Normal file
86
models/ir_attachment.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from werkzeug.urls import url_quote
|
||||
|
||||
from odoo import api, models, fields, tools
|
||||
|
||||
SUPPORTED_IMAGE_MIMETYPES = {
|
||||
'image/gif': '.gif',
|
||||
'image/jpe': '.jpe',
|
||||
'image/jpeg': '.jpeg',
|
||||
'image/jpg': '.jpg',
|
||||
'image/png': '.png',
|
||||
'image/svg+xml': '.svg',
|
||||
'image/webp': '.webp',
|
||||
}
|
||||
|
||||
|
||||
class IrAttachment(models.Model):
|
||||
|
||||
_inherit = "ir.attachment"
|
||||
|
||||
local_url = fields.Char("Attachment URL", compute='_compute_local_url')
|
||||
image_src = fields.Char(compute='_compute_image_src')
|
||||
image_width = fields.Integer(compute='_compute_image_size')
|
||||
image_height = fields.Integer(compute='_compute_image_size')
|
||||
original_id = fields.Many2one('ir.attachment', string="Original (unoptimized, unresized) attachment")
|
||||
|
||||
def _compute_local_url(self):
|
||||
for attachment in self:
|
||||
if attachment.url:
|
||||
attachment.local_url = attachment.url
|
||||
else:
|
||||
attachment.local_url = '/web/image/%s?unique=%s' % (attachment.id, attachment.checksum)
|
||||
|
||||
@api.depends('mimetype', 'url', 'name')
|
||||
def _compute_image_src(self):
|
||||
for attachment in self:
|
||||
# Only add a src for supported images
|
||||
if attachment.mimetype not in SUPPORTED_IMAGE_MIMETYPES:
|
||||
attachment.image_src = False
|
||||
continue
|
||||
|
||||
if attachment.type == 'url':
|
||||
if attachment.url.startswith('/'):
|
||||
# Local URL
|
||||
attachment.image_src = attachment.url
|
||||
else:
|
||||
name = url_quote(attachment.name)
|
||||
attachment.image_src = '/web/image/%s-redirect/%s' % (attachment.id, name)
|
||||
else:
|
||||
# Adding unique in URLs for cache-control
|
||||
unique = attachment.checksum[:8]
|
||||
if attachment.url:
|
||||
# For attachments-by-url, unique is used as a cachebuster. They
|
||||
# currently do not leverage max-age headers.
|
||||
separator = '&' if '?' in attachment.url else '?'
|
||||
attachment.image_src = '%s%sunique=%s' % (attachment.url, separator, unique)
|
||||
else:
|
||||
name = url_quote(attachment.name)
|
||||
attachment.image_src = '/web/image/%s-%s/%s' % (attachment.id, unique, name)
|
||||
|
||||
@api.depends('datas')
|
||||
def _compute_image_size(self):
|
||||
for attachment in self:
|
||||
try:
|
||||
image = tools.base64_to_image(attachment.datas)
|
||||
attachment.image_width = image.width
|
||||
attachment.image_height = image.height
|
||||
except Exception:
|
||||
attachment.image_width = 0
|
||||
attachment.image_height = 0
|
||||
|
||||
def _get_media_info(self):
|
||||
"""Return a dict with the values that we need on the media dialog."""
|
||||
self.ensure_one()
|
||||
return self._read_format(['id', 'name', 'description', 'mimetype', 'checksum', 'url', 'type', 'res_id', 'res_model', 'public', 'access_token', 'image_src', 'image_width', 'image_height', 'original_id'])[0]
|
||||
|
||||
def _can_bypass_rights_on_media_dialog(self, **attachment_data):
|
||||
""" This method is meant to be overridden, for instance to allow to
|
||||
create image attachment despite the user not allowed to create
|
||||
attachment, eg:
|
||||
- Portal user uploading an image on the forum (bypass acl)
|
||||
- Non admin user uploading an unsplash image (bypass binary/url check)
|
||||
"""
|
||||
return False
|
31
models/ir_http.py
Normal file
31
models/ir_http.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models
|
||||
from odoo.http import request
|
||||
|
||||
|
||||
CONTEXT_KEYS = ['editable', 'edit_translations', 'translatable']
|
||||
|
||||
|
||||
class IrHttp(models.AbstractModel):
|
||||
_inherit = 'ir.http'
|
||||
|
||||
@classmethod
|
||||
def _get_web_editor_context(cls):
|
||||
""" Check for ?editable and stuff in the query-string """
|
||||
return {
|
||||
key: True
|
||||
for key in CONTEXT_KEYS
|
||||
if key in request.httprequest.args and key not in request.env.context
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _pre_dispatch(cls, rule, args):
|
||||
super()._pre_dispatch(rule, args)
|
||||
ctx = cls._get_web_editor_context()
|
||||
request.update_context(**ctx)
|
||||
|
||||
@classmethod
|
||||
def _get_translation_frontend_modules_name(cls):
|
||||
mods = super(IrHttp, cls)._get_translation_frontend_modules_name()
|
||||
return mods + ['web_editor']
|
15
models/ir_qweb.py
Normal file
15
models/ir_qweb.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from odoo import models
|
||||
|
||||
class IrQWeb(models.AbstractModel):
|
||||
_inherit = "ir.qweb"
|
||||
|
||||
def _get_bundles_to_pregenarate(self):
|
||||
js_assets, css_assets = super(IrQWeb, self)._get_bundles_to_pregenarate()
|
||||
assets = {
|
||||
'web_editor.assets_legacy_wysiwyg',
|
||||
'web_editor.backend_assets_wysiwyg',
|
||||
'web_editor.assets_wysiwyg',
|
||||
'web_editor.wysiwyg_iframe_editor_assets',
|
||||
}
|
||||
return (js_assets | assets, css_assets | assets)
|
676
models/ir_qweb_fields.py
Normal file
676
models/ir_qweb_fields.py
Normal file
|
@ -0,0 +1,676 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
"""
|
||||
Web_editor-context rendering needs to add some metadata to rendered and allow to edit fields,
|
||||
as well as render a few fields differently.
|
||||
|
||||
Also, adds methods to convert values back to Odoo models.
|
||||
"""
|
||||
|
||||
import babel
|
||||
import base64
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import pytz
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from lxml import etree, html
|
||||
from PIL import Image as I
|
||||
from werkzeug import urls
|
||||
|
||||
import odoo.modules
|
||||
|
||||
from odoo import _, api, models, fields
|
||||
from odoo.exceptions import UserError, ValidationError
|
||||
from odoo.tools import ustr, posix_to_ldml, pycompat
|
||||
from odoo.tools import html_escape as escape
|
||||
from odoo.tools.misc import file_open, get_lang, babel_locale_parse
|
||||
|
||||
REMOTE_CONNECTION_TIMEOUT = 2.5
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IrQWeb(models.AbstractModel):
|
||||
""" IrQWeb object for rendering editor stuff
|
||||
"""
|
||||
_inherit = 'ir.qweb'
|
||||
|
||||
def _compile_node(self, el, compile_context, indent):
|
||||
snippet_key = compile_context.get('snippet-key')
|
||||
if snippet_key == compile_context['template'] \
|
||||
or compile_context.get('snippet-sub-call-key') == compile_context['template']:
|
||||
# Get the path of element to only consider the first node of the
|
||||
# snippet template content (ignoring all ancestors t elements which
|
||||
# are not t-call ones)
|
||||
nb_real_elements_in_hierarchy = 0
|
||||
node = el
|
||||
while node is not None and nb_real_elements_in_hierarchy < 2:
|
||||
if node.tag != 't' or 't-call' in node.attrib:
|
||||
nb_real_elements_in_hierarchy += 1
|
||||
node = node.getparent()
|
||||
if nb_real_elements_in_hierarchy == 1:
|
||||
# The first node might be a call to a sub template
|
||||
sub_call = el.get('t-call')
|
||||
if sub_call:
|
||||
el.set('t-options', f"{{'snippet-key': '{snippet_key}', 'snippet-sub-call-key': '{sub_call}'}}")
|
||||
# If it already has a data-snippet it is a saved or an inherited snippet.
|
||||
# Do not override it.
|
||||
elif 'data-snippet' not in el.attrib:
|
||||
el.attrib['data-snippet'] = snippet_key.split('.', 1)[-1]
|
||||
|
||||
return super()._compile_node(el, compile_context, indent)
|
||||
|
||||
# compile directives
|
||||
|
||||
def _compile_directive_snippet(self, el, compile_context, indent):
|
||||
key = el.attrib.pop('t-snippet')
|
||||
el.set('t-call', key)
|
||||
snippet_lang = self._context.get('snippet_lang')
|
||||
if snippet_lang:
|
||||
el.set('t-lang', f"'{snippet_lang}'")
|
||||
|
||||
el.set('t-options', f"{{'snippet-key': {key!r}}}")
|
||||
view = self.env['ir.ui.view']._get(key).sudo()
|
||||
name = el.attrib.pop('string', view.name)
|
||||
thumbnail = el.attrib.pop('t-thumbnail', "oe-thumbnail")
|
||||
# Forbid sanitize contains the specific reason:
|
||||
# - "true": always forbid
|
||||
# - "form": forbid if forms are sanitized
|
||||
forbid_sanitize = el.attrib.pop('t-forbid-sanitize', None)
|
||||
div = '<div name="%s" data-oe-type="snippet" data-oe-thumbnail="%s" data-oe-snippet-id="%s" data-oe-keywords="%s" %s>' % (
|
||||
escape(pycompat.to_text(name)),
|
||||
escape(pycompat.to_text(thumbnail)),
|
||||
escape(pycompat.to_text(view.id)),
|
||||
escape(pycompat.to_text(el.findtext('keywords'))),
|
||||
f'data-oe-forbid-sanitize="{forbid_sanitize}"' if forbid_sanitize else '',
|
||||
)
|
||||
self._append_text(div, compile_context)
|
||||
code = self._compile_node(el, compile_context, indent)
|
||||
self._append_text('</div>', compile_context)
|
||||
return code
|
||||
|
||||
def _compile_directive_snippet_call(self, el, compile_context, indent):
|
||||
key = el.attrib.pop('t-snippet-call')
|
||||
el.set('t-call', key)
|
||||
el.set('t-options', f"{{'snippet-key': {key!r}}}")
|
||||
return self._compile_node(el, compile_context, indent)
|
||||
|
||||
def _compile_directive_install(self, el, compile_context, indent):
|
||||
key = el.attrib.pop('t-install')
|
||||
thumbnail = el.attrib.pop('t-thumbnail', 'oe-thumbnail')
|
||||
if self.user_has_groups('base.group_system'):
|
||||
module = self.env['ir.module.module'].search([('name', '=', key)])
|
||||
if not module or module.state == 'installed':
|
||||
return []
|
||||
name = el.attrib.get('string') or 'Snippet'
|
||||
div = '<div name="%s" data-oe-type="snippet" data-module-id="%s" data-oe-thumbnail="%s"><section/></div>' % (
|
||||
escape(pycompat.to_text(name)),
|
||||
module.id,
|
||||
escape(pycompat.to_text(thumbnail))
|
||||
)
|
||||
self._append_text(div, compile_context)
|
||||
return []
|
||||
|
||||
def _compile_directive_placeholder(self, el, compile_context, indent):
|
||||
el.set('t-att-placeholder', el.attrib.pop('t-placeholder'))
|
||||
return []
|
||||
|
||||
# order and ignore
|
||||
|
||||
def _directives_eval_order(self):
|
||||
directives = super()._directives_eval_order()
|
||||
# Insert before "att" as those may rely on static attributes like
|
||||
# "string" and "att" clears all of those
|
||||
index = directives.index('att') - 1
|
||||
directives.insert(index, 'placeholder')
|
||||
directives.insert(index, 'snippet')
|
||||
directives.insert(index, 'snippet-call')
|
||||
directives.insert(index, 'install')
|
||||
return directives
|
||||
|
||||
def _get_template_cache_keys(self):
|
||||
return super()._get_template_cache_keys() + ['snippet_lang']
|
||||
|
||||
|
||||
#------------------------------------------------------
|
||||
# QWeb fields
|
||||
#------------------------------------------------------
|
||||
|
||||
|
||||
class Field(models.AbstractModel):
|
||||
_name = 'ir.qweb.field'
|
||||
_description = 'Qweb Field'
|
||||
_inherit = 'ir.qweb.field'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(Field, self).attributes(record, field_name, options, values)
|
||||
field = record._fields[field_name]
|
||||
|
||||
placeholder = options.get('placeholder') or getattr(field, 'placeholder', None)
|
||||
if placeholder:
|
||||
attrs['placeholder'] = placeholder
|
||||
|
||||
if options['translate'] and field.type in ('char', 'text'):
|
||||
lang = record.env.lang or 'en_US'
|
||||
base_lang = record._get_base_lang()
|
||||
if lang == base_lang:
|
||||
attrs['data-oe-translation-state'] = 'translated'
|
||||
else:
|
||||
base_value = record.with_context(lang=base_lang)[field_name]
|
||||
value = record[field_name]
|
||||
attrs['data-oe-translation-state'] = 'translated' if base_value != value else 'to_translate'
|
||||
|
||||
return attrs
|
||||
|
||||
def value_from_string(self, value):
|
||||
return value
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
return self.value_from_string(element.text_content().strip())
|
||||
|
||||
|
||||
class Integer(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.integer'
|
||||
_description = 'Qweb Field Integer'
|
||||
_inherit = 'ir.qweb.field.integer'
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
lang = self.user_lang()
|
||||
value = element.text_content().strip()
|
||||
return int(value.replace(lang.thousands_sep or '', ''))
|
||||
|
||||
|
||||
class Float(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.float'
|
||||
_description = 'Qweb Field Float'
|
||||
_inherit = 'ir.qweb.field.float'
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
lang = self.user_lang()
|
||||
value = element.text_content().strip()
|
||||
return float(value.replace(lang.thousands_sep or '', '')
|
||||
.replace(lang.decimal_point, '.'))
|
||||
|
||||
|
||||
class ManyToOne(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.many2one'
|
||||
_description = 'Qweb Field Many to One'
|
||||
_inherit = 'ir.qweb.field.many2one'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(ManyToOne, self).attributes(record, field_name, options, values)
|
||||
if options.get('inherit_branding'):
|
||||
many2one = record[field_name]
|
||||
if many2one:
|
||||
attrs['data-oe-many2one-id'] = many2one.id
|
||||
attrs['data-oe-many2one-model'] = many2one._name
|
||||
if options.get('null_text'):
|
||||
attrs['data-oe-many2one-allowreset'] = 1
|
||||
if not many2one:
|
||||
attrs['data-oe-many2one-model'] = record._fields[field_name].comodel_name
|
||||
return attrs
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
Model = self.env[element.get('data-oe-model')]
|
||||
id = int(element.get('data-oe-id'))
|
||||
M2O = self.env[field.comodel_name]
|
||||
field_name = element.get('data-oe-field')
|
||||
many2one_id = int(element.get('data-oe-many2one-id'))
|
||||
|
||||
allow_reset = element.get('data-oe-many2one-allowreset')
|
||||
if allow_reset and not many2one_id:
|
||||
# Reset the id of the many2one
|
||||
Model.browse(id).write({field_name: False})
|
||||
return None
|
||||
|
||||
record = many2one_id and M2O.browse(many2one_id)
|
||||
if record and record.exists():
|
||||
# save the new id of the many2one
|
||||
Model.browse(id).write({field_name: many2one_id})
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class Contact(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.contact'
|
||||
_description = 'Qweb Field Contact'
|
||||
_inherit = 'ir.qweb.field.contact'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(Contact, self).attributes(record, field_name, options, values)
|
||||
if options.get('inherit_branding'):
|
||||
attrs['data-oe-contact-options'] = json.dumps(options)
|
||||
return attrs
|
||||
|
||||
# helper to call the rendering of contact field
|
||||
@api.model
|
||||
def get_record_to_html(self, ids, options=None):
|
||||
return self.value_to_html(self.env['res.partner'].search([('id', '=', ids[0])]), options=options)
|
||||
|
||||
|
||||
class Date(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.date'
|
||||
_description = 'Qweb Field Date'
|
||||
_inherit = 'ir.qweb.field.date'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(Date, self).attributes(record, field_name, options, values)
|
||||
if options.get('inherit_branding'):
|
||||
attrs['data-oe-original'] = record[field_name]
|
||||
|
||||
if record._fields[field_name].type == 'datetime':
|
||||
attrs = self.env['ir.qweb.field.datetime'].attributes(record, field_name, options, values)
|
||||
attrs['data-oe-type'] = 'datetime'
|
||||
return attrs
|
||||
|
||||
lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env)
|
||||
locale = babel_locale_parse(lg.code)
|
||||
babel_format = value_format = posix_to_ldml(lg.date_format, locale=locale)
|
||||
|
||||
if record[field_name]:
|
||||
date = fields.Date.from_string(record[field_name])
|
||||
value_format = pycompat.to_text(babel.dates.format_date(date, format=babel_format, locale=locale))
|
||||
|
||||
attrs['data-oe-original-with-format'] = value_format
|
||||
return attrs
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
value = element.text_content().strip()
|
||||
if not value:
|
||||
return False
|
||||
|
||||
lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env)
|
||||
date = datetime.strptime(value, lg.date_format)
|
||||
return fields.Date.to_string(date)
|
||||
|
||||
|
||||
class DateTime(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.datetime'
|
||||
_description = 'Qweb Field Datetime'
|
||||
_inherit = 'ir.qweb.field.datetime'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(DateTime, self).attributes(record, field_name, options, values)
|
||||
|
||||
if options.get('inherit_branding'):
|
||||
value = record[field_name]
|
||||
|
||||
lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env)
|
||||
locale = babel_locale_parse(lg.code)
|
||||
babel_format = value_format = posix_to_ldml('%s %s' % (lg.date_format, lg.time_format), locale=locale)
|
||||
tz = record.env.context.get('tz') or self.env.user.tz
|
||||
|
||||
if isinstance(value, str):
|
||||
value = fields.Datetime.from_string(value)
|
||||
|
||||
if value:
|
||||
# convert from UTC (server timezone) to user timezone
|
||||
value = fields.Datetime.context_timestamp(self.with_context(tz=tz), timestamp=value)
|
||||
value_format = pycompat.to_text(babel.dates.format_datetime(value, format=babel_format, locale=locale))
|
||||
value = fields.Datetime.to_string(value)
|
||||
|
||||
attrs['data-oe-original'] = value
|
||||
attrs['data-oe-original-with-format'] = value_format
|
||||
attrs['data-oe-original-tz'] = tz
|
||||
return attrs
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
value = element.text_content().strip()
|
||||
if not value:
|
||||
return False
|
||||
|
||||
# parse from string to datetime
|
||||
lg = self.env['res.lang']._lang_get(self.env.user.lang) or get_lang(self.env)
|
||||
try:
|
||||
datetime_format = f'{lg.date_format} {lg.time_format}'
|
||||
dt = datetime.strptime(value, datetime_format)
|
||||
except ValueError:
|
||||
raise ValidationError(_("The datetime %s does not match the format %s", value, datetime_format))
|
||||
|
||||
# convert back from user's timezone to UTC
|
||||
tz_name = element.attrib.get('data-oe-original-tz') or self.env.context.get('tz') or self.env.user.tz
|
||||
if tz_name:
|
||||
try:
|
||||
user_tz = pytz.timezone(tz_name)
|
||||
utc = pytz.utc
|
||||
|
||||
dt = user_tz.localize(dt).astimezone(utc)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"Failed to convert the value for a field of the model"
|
||||
" %s back from the user's timezone (%s) to UTC",
|
||||
model, tz_name,
|
||||
exc_info=True)
|
||||
|
||||
# format back to string
|
||||
return fields.Datetime.to_string(dt)
|
||||
|
||||
|
||||
class Text(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.text'
|
||||
_description = 'Qweb Field Text'
|
||||
_inherit = 'ir.qweb.field.text'
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
return html_to_text(element)
|
||||
|
||||
|
||||
class Selection(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.selection'
|
||||
_description = 'Qweb Field Selection'
|
||||
_inherit = 'ir.qweb.field.selection'
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
value = element.text_content().strip()
|
||||
selection = field.get_description(self.env)['selection']
|
||||
for k, v in selection:
|
||||
if isinstance(v, str):
|
||||
v = ustr(v)
|
||||
if value == v:
|
||||
return k
|
||||
|
||||
raise ValueError(u"No value found for label %s in selection %s" % (
|
||||
value, selection))
|
||||
|
||||
|
||||
class HTML(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.html'
|
||||
_description = 'Qweb Field HTML'
|
||||
_inherit = 'ir.qweb.field.html'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values=None):
|
||||
attrs = super().attributes(record, field_name, options, values)
|
||||
if options.get('inherit_branding'):
|
||||
field = record._fields[field_name]
|
||||
if field.sanitize:
|
||||
if field.sanitize_overridable:
|
||||
if record.user_has_groups('base.group_sanitize_override'):
|
||||
# Don't mark the field as 'sanitize' if the sanitize
|
||||
# is defined as overridable and the user has the right
|
||||
# to do so
|
||||
return attrs
|
||||
else:
|
||||
try:
|
||||
field.convert_to_column(record[field_name], record)
|
||||
except UserError:
|
||||
# The field contains element(s) that would be
|
||||
# removed if sanitized. It means that someone who
|
||||
# was part of a group allowing to bypass the
|
||||
# sanitation saved that field previously. Mark the
|
||||
# field as not editable.
|
||||
attrs['data-oe-sanitize-prevent-edition'] = 1
|
||||
return attrs
|
||||
# The field edition is not fully prevented and the sanitation cannot be bypassed
|
||||
attrs['data-oe-sanitize'] = 'no_block' if field.sanitize_attributes else 1 if field.sanitize_form else 'allow_form'
|
||||
|
||||
return attrs
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
content = []
|
||||
if element.text:
|
||||
content.append(element.text)
|
||||
content.extend(html.tostring(child, encoding='unicode')
|
||||
for child in element.iterchildren(tag=etree.Element))
|
||||
return '\n'.join(content)
|
||||
|
||||
|
||||
class Image(models.AbstractModel):
|
||||
"""
|
||||
Widget options:
|
||||
|
||||
``class``
|
||||
set as attribute on the generated <img> tag
|
||||
"""
|
||||
_name = 'ir.qweb.field.image'
|
||||
_description = 'Qweb Field Image'
|
||||
_inherit = 'ir.qweb.field.image'
|
||||
|
||||
local_url_re = re.compile(r'^/(?P<module>[^]]+)/static/(?P<rest>.+)$')
|
||||
redirect_url_re = re.compile(r'\/web\/image\/\d+-redirect\/')
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
if element.find('img') is None:
|
||||
return False
|
||||
url = element.find('img').get('src')
|
||||
|
||||
url_object = urls.url_parse(url)
|
||||
if url_object.path.startswith('/web/image'):
|
||||
fragments = url_object.path.split('/')
|
||||
query = url_object.decode_query()
|
||||
url_id = fragments[3].split('-')[0]
|
||||
# ir.attachment image urls: /web/image/<id>[-<checksum>][/...]
|
||||
if url_id.isdigit():
|
||||
model = 'ir.attachment'
|
||||
oid = url_id
|
||||
field = 'datas'
|
||||
# url of binary field on model: /web/image/<model>/<id>/<field>[/...]
|
||||
else:
|
||||
model = query.get('model', fragments[3])
|
||||
oid = query.get('id', fragments[4])
|
||||
field = query.get('field', fragments[5])
|
||||
item = self.env[model].browse(int(oid))
|
||||
if self.redirect_url_re.match(url_object.path):
|
||||
return self.load_remote_url(item.url)
|
||||
return item[field]
|
||||
|
||||
if self.local_url_re.match(url_object.path):
|
||||
return self.load_local_url(url)
|
||||
|
||||
return self.load_remote_url(url)
|
||||
|
||||
def load_local_url(self, url):
|
||||
match = self.local_url_re.match(urls.url_parse(url).path)
|
||||
rest = match.group('rest')
|
||||
|
||||
path = os.path.join(
|
||||
match.group('module'), 'static', rest)
|
||||
|
||||
try:
|
||||
with file_open(path, 'rb') as f:
|
||||
# force complete image load to ensure it's valid image data
|
||||
image = I.open(f)
|
||||
image.load()
|
||||
f.seek(0)
|
||||
return base64.b64encode(f.read())
|
||||
except Exception:
|
||||
logger.exception("Failed to load local image %r", url)
|
||||
return None
|
||||
|
||||
def load_remote_url(self, url):
|
||||
try:
|
||||
# should probably remove remote URLs entirely:
|
||||
# * in fields, downloading them without blowing up the server is a
|
||||
# challenge
|
||||
# * in views, may trigger mixed content warnings if HTTPS CMS
|
||||
# linking to HTTP images
|
||||
# implement drag & drop image upload to mitigate?
|
||||
|
||||
req = requests.get(url, timeout=REMOTE_CONNECTION_TIMEOUT)
|
||||
# PIL needs a seekable file-like image so wrap result in IO buffer
|
||||
image = I.open(io.BytesIO(req.content))
|
||||
# force a complete load of the image data to validate it
|
||||
image.load()
|
||||
except Exception:
|
||||
logger.warning("Failed to load remote image %r", url, exc_info=True)
|
||||
return None
|
||||
|
||||
# don't use original data in case weird stuff was smuggled in, with
|
||||
# luck PIL will remove some of it?
|
||||
out = io.BytesIO()
|
||||
image.save(out, image.format)
|
||||
return base64.b64encode(out.getvalue())
|
||||
|
||||
|
||||
class Monetary(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.monetary'
|
||||
_inherit = 'ir.qweb.field.monetary'
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
lang = self.user_lang()
|
||||
|
||||
value = element.find('span').text_content().strip()
|
||||
|
||||
return float(value.replace(lang.thousands_sep or '', '')
|
||||
.replace(lang.decimal_point, '.'))
|
||||
|
||||
|
||||
class Duration(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.duration'
|
||||
_description = 'Qweb Field Duration'
|
||||
_inherit = 'ir.qweb.field.duration'
|
||||
|
||||
@api.model
|
||||
def attributes(self, record, field_name, options, values):
|
||||
attrs = super(Duration, self).attributes(record, field_name, options, values)
|
||||
if options.get('inherit_branding'):
|
||||
attrs['data-oe-original'] = record[field_name]
|
||||
return attrs
|
||||
|
||||
@api.model
|
||||
def from_html(self, model, field, element):
|
||||
value = element.text_content().strip()
|
||||
|
||||
# non-localized value
|
||||
return float(value)
|
||||
|
||||
|
||||
class RelativeDatetime(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.relative'
|
||||
_description = 'Qweb Field Relative'
|
||||
_inherit = 'ir.qweb.field.relative'
|
||||
|
||||
# get formatting from ir.qweb.field.relative but edition/save from datetime
|
||||
|
||||
|
||||
class QwebView(models.AbstractModel):
|
||||
_name = 'ir.qweb.field.qweb'
|
||||
_description = 'Qweb Field qweb'
|
||||
_inherit = 'ir.qweb.field.qweb'
|
||||
|
||||
|
||||
def html_to_text(element):
|
||||
""" Converts HTML content with HTML-specified line breaks (br, p, div, ...)
|
||||
in roughly equivalent textual content.
|
||||
|
||||
Used to replace and fixup the roundtripping of text and m2o: when using
|
||||
libxml 2.8.0 (but not 2.9.1) and parsing HTML with lxml.html.fromstring
|
||||
whitespace text nodes (text nodes composed *solely* of whitespace) are
|
||||
stripped out with no recourse, and fundamentally relying on newlines
|
||||
being in the text (e.g. inserted during user edition) is probably poor form
|
||||
anyway.
|
||||
|
||||
-> this utility function collapses whitespace sequences and replaces
|
||||
nodes by roughly corresponding linebreaks
|
||||
* p are pre-and post-fixed by 2 newlines
|
||||
* br are replaced by a single newline
|
||||
* block-level elements not already mentioned are pre- and post-fixed by
|
||||
a single newline
|
||||
|
||||
ought be somewhat similar (but much less high-tech) to aaronsw's html2text.
|
||||
the latter produces full-blown markdown, our text -> html converter only
|
||||
replaces newlines by <br> elements at this point so we're reverting that,
|
||||
and a few more newline-ish elements in case the user tried to add
|
||||
newlines/paragraphs into the text field
|
||||
|
||||
:param element: lxml.html content
|
||||
:returns: corresponding pure-text output
|
||||
"""
|
||||
|
||||
# output is a list of str | int. Integers are padding requests (in minimum
|
||||
# number of newlines). When multiple padding requests, fold them into the
|
||||
# biggest one
|
||||
output = []
|
||||
_wrap(element, output)
|
||||
|
||||
# remove any leading or tailing whitespace, replace sequences of
|
||||
# (whitespace)\n(whitespace) by a single newline, where (whitespace) is a
|
||||
# non-newline whitespace in this case
|
||||
return re.sub(
|
||||
r'[ \t\r\f]*\n[ \t\r\f]*',
|
||||
'\n',
|
||||
''.join(_realize_padding(output)).strip())
|
||||
|
||||
_PADDED_BLOCK = set('p h1 h2 h3 h4 h5 h6'.split())
|
||||
# https://developer.mozilla.org/en-US/docs/HTML/Block-level_elements minus p
|
||||
_MISC_BLOCK = set((
|
||||
'address article aside audio blockquote canvas dd dl div figcaption figure'
|
||||
' footer form header hgroup hr ol output pre section tfoot ul video'
|
||||
).split())
|
||||
|
||||
|
||||
def _collapse_whitespace(text):
|
||||
""" Collapses sequences of whitespace characters in ``text`` to a single
|
||||
space
|
||||
"""
|
||||
return re.sub('\s+', ' ', text)
|
||||
|
||||
|
||||
def _realize_padding(it):
|
||||
""" Fold and convert padding requests: integers in the output sequence are
|
||||
requests for at least n newlines of padding. Runs thereof can be collapsed
|
||||
into the largest requests and converted to newlines.
|
||||
"""
|
||||
padding = 0
|
||||
for item in it:
|
||||
if isinstance(item, int):
|
||||
padding = max(padding, item)
|
||||
continue
|
||||
|
||||
if padding:
|
||||
yield '\n' * padding
|
||||
padding = 0
|
||||
|
||||
yield item
|
||||
# leftover padding irrelevant as the output will be stripped
|
||||
|
||||
|
||||
def _wrap(element, output, wrapper=''):
|
||||
""" Recursively extracts text from ``element`` (via _element_to_text), and
|
||||
wraps it all in ``wrapper``. Extracted text is added to ``output``
|
||||
|
||||
:type wrapper: basestring | int
|
||||
"""
|
||||
output.append(wrapper)
|
||||
if element.text:
|
||||
output.append(_collapse_whitespace(element.text))
|
||||
for child in element:
|
||||
_element_to_text(child, output)
|
||||
output.append(wrapper)
|
||||
|
||||
|
||||
def _element_to_text(e, output):
|
||||
if e.tag == 'br':
|
||||
output.append('\n')
|
||||
elif e.tag in _PADDED_BLOCK:
|
||||
_wrap(e, output, 2)
|
||||
elif e.tag in _MISC_BLOCK:
|
||||
_wrap(e, output, 1)
|
||||
else:
|
||||
# inline
|
||||
_wrap(e, output)
|
||||
|
||||
if e.tail:
|
||||
output.append(_collapse_whitespace(e.tail))
|
511
models/ir_ui_view.py
Normal file
511
models/ir_ui_view.py
Normal file
|
@ -0,0 +1,511 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import uuid
|
||||
from lxml import etree, html
|
||||
|
||||
from odoo import api, models, _
|
||||
from odoo.osv import expression
|
||||
from odoo.exceptions import ValidationError
|
||||
from odoo.addons.base.models.ir_ui_view import MOVABLE_BRANDING
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
EDITING_ATTRIBUTES = MOVABLE_BRANDING + [
|
||||
'data-oe-type',
|
||||
'data-oe-expression',
|
||||
'data-oe-translation-id',
|
||||
'data-note-id'
|
||||
]
|
||||
|
||||
|
||||
class IrUiView(models.Model):
|
||||
_inherit = 'ir.ui.view'
|
||||
|
||||
def _get_cleaned_non_editing_attributes(self, attributes):
|
||||
"""
|
||||
Returns a new mapping of attributes -> value without the parts that are
|
||||
not meant to be saved (branding, editing classes, ...). Note that
|
||||
classes are meant to be cleaned on the client side before saving as
|
||||
mostly linked to the related options (so we are not supposed to know
|
||||
which to remove here).
|
||||
|
||||
:param attributes: a mapping of attributes -> value
|
||||
:return: a new mapping of attributes -> value
|
||||
"""
|
||||
attributes = {k: v for k, v in attributes if k not in EDITING_ATTRIBUTES}
|
||||
if 'class' in attributes:
|
||||
classes = attributes['class'].split()
|
||||
attributes['class'] = ' '.join([c for c in classes if c != 'o_editable'])
|
||||
if attributes.get('contenteditable') == 'true':
|
||||
del attributes['contenteditable']
|
||||
return attributes
|
||||
|
||||
#------------------------------------------------------
|
||||
# Save from html
|
||||
#------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def extract_embedded_fields(self, arch):
|
||||
return arch.xpath('//*[@data-oe-model != "ir.ui.view"]')
|
||||
|
||||
@api.model
|
||||
def extract_oe_structures(self, arch):
|
||||
return arch.xpath('//*[hasclass("oe_structure")][contains(@id, "oe_structure")]')
|
||||
|
||||
@api.model
|
||||
def get_default_lang_code(self):
|
||||
return False
|
||||
|
||||
@api.model
|
||||
def save_embedded_field(self, el):
|
||||
Model = self.env[el.get('data-oe-model')]
|
||||
field = el.get('data-oe-field')
|
||||
|
||||
model = 'ir.qweb.field.' + el.get('data-oe-type')
|
||||
converter = self.env[model] if model in self.env else self.env['ir.qweb.field']
|
||||
|
||||
try:
|
||||
value = converter.from_html(Model, Model._fields[field], el)
|
||||
if value is not None:
|
||||
# TODO: batch writes?
|
||||
record = Model.browse(int(el.get('data-oe-id')))
|
||||
if not self.env.context.get('lang') and self.get_default_lang_code():
|
||||
record.with_context(lang=self.get_default_lang_code()).write({field: value})
|
||||
else:
|
||||
record.write({field: value})
|
||||
|
||||
if callable(Model._fields[field].translate):
|
||||
self._copy_custom_snippet_translations(record, field)
|
||||
|
||||
except (ValueError, TypeError):
|
||||
raise ValidationError(_("Invalid field value for %s: %s", Model._fields[field].string, el.text_content().strip()))
|
||||
|
||||
def save_oe_structure(self, el):
|
||||
self.ensure_one()
|
||||
|
||||
if el.get('id') in self.key:
|
||||
# Do not inherit if the oe_structure already has its own inheriting view
|
||||
return False
|
||||
|
||||
arch = etree.Element('data')
|
||||
xpath = etree.Element('xpath', expr="//*[hasclass('oe_structure')][@id='{}']".format(el.get('id')), position="replace")
|
||||
arch.append(xpath)
|
||||
attributes = self._get_cleaned_non_editing_attributes(el.attrib.items())
|
||||
structure = etree.Element(el.tag, attrib=attributes)
|
||||
structure.text = el.text
|
||||
xpath.append(structure)
|
||||
for child in el.iterchildren(tag=etree.Element):
|
||||
structure.append(copy.deepcopy(child))
|
||||
|
||||
vals = {
|
||||
'inherit_id': self.id,
|
||||
'name': '%s (%s)' % (self.name, el.get('id')),
|
||||
'arch': etree.tostring(arch, encoding='unicode'),
|
||||
'key': '%s_%s' % (self.key, el.get('id')),
|
||||
'type': 'qweb',
|
||||
'mode': 'extension',
|
||||
}
|
||||
vals.update(self._save_oe_structure_hook())
|
||||
oe_structure_view = self.env['ir.ui.view'].create(vals)
|
||||
self._copy_custom_snippet_translations(oe_structure_view, 'arch_db')
|
||||
|
||||
return True
|
||||
|
||||
@api.model
|
||||
def _copy_custom_snippet_translations(self, record, html_field):
|
||||
""" Given a ``record`` and its HTML ``field``, detect any
|
||||
usage of a custom snippet and copy its translations.
|
||||
"""
|
||||
lang_value = record[html_field]
|
||||
if not lang_value:
|
||||
return
|
||||
|
||||
tree = html.fromstring(lang_value)
|
||||
for custom_snippet_el in tree.xpath('//*[hasclass("s_custom_snippet")]'):
|
||||
custom_snippet_name = custom_snippet_el.get('data-name')
|
||||
custom_snippet_view = self.search([('name', '=', custom_snippet_name)], limit=1)
|
||||
if custom_snippet_view:
|
||||
self._copy_field_terms_translations(custom_snippet_view, 'arch_db', record, html_field)
|
||||
|
||||
@api.model
|
||||
def _copy_field_terms_translations(self, records_from, name_field_from, record_to, name_field_to):
|
||||
""" Copy the terms translation from records/field ``Model1.Field1``
|
||||
to a (possibly) completely different record/field ``Model2.Field2``.
|
||||
|
||||
For instance, copy the translations of a
|
||||
``product.template.html_description`` field to a ``ir.ui.view.arch_db``
|
||||
field.
|
||||
|
||||
The method takes care of read and write access of both records/fields.
|
||||
"""
|
||||
record_to.check_access_rights('write')
|
||||
record_to.check_access_rule('write')
|
||||
record_to.check_field_access_rights('write', [name_field_to])
|
||||
|
||||
field_from = records_from._fields[name_field_from]
|
||||
field_to = record_to._fields[name_field_to]
|
||||
error_callable_msg = "'translate' property of field %r is not callable"
|
||||
if not callable(field_from.translate):
|
||||
raise ValueError(error_callable_msg % field_from)
|
||||
if not callable(field_to.translate):
|
||||
raise ValueError(error_callable_msg % field_to)
|
||||
if not field_to.store:
|
||||
raise ValueError("Field %r is not stored" % field_to)
|
||||
|
||||
# This will also implicitly check for `read` access rights
|
||||
if not record_to[name_field_to] or not any(records_from.mapped(name_field_from)):
|
||||
return
|
||||
|
||||
lang_env = self.env.lang or 'en_US'
|
||||
langs = set(lang for lang, _ in self.env['res.lang'].get_installed())
|
||||
|
||||
# 1. Get translations
|
||||
records_from.flush_model([name_field_from])
|
||||
existing_translation_dictionary = field_to.get_translation_dictionary(
|
||||
record_to[name_field_to],
|
||||
{lang: record_to.with_context(prefetch_langs=True, lang=lang)[name_field_to] for lang in langs if lang != lang_env}
|
||||
)
|
||||
extra_translation_dictionary = {}
|
||||
for record_from in records_from:
|
||||
extra_translation_dictionary.update(field_from.get_translation_dictionary(
|
||||
record_from[name_field_from],
|
||||
{lang: record_from.with_context(prefetch_langs=True, lang=lang)[name_field_from] for lang in langs if lang != lang_env}
|
||||
))
|
||||
existing_translation_dictionary.update(extra_translation_dictionary)
|
||||
translation_dictionary = existing_translation_dictionary
|
||||
|
||||
# The `en_US` jsonb value should always be set, even if english is not
|
||||
# installed. If we don't do this, the custom snippet `arch_db` will only
|
||||
# have a `fr_BE` key but no `en_US` key.
|
||||
langs.add('en_US')
|
||||
|
||||
# 2. Set translations
|
||||
new_value = {
|
||||
lang: field_to.translate(lambda term: translation_dictionary.get(term, {}).get(lang), record_to[name_field_to])
|
||||
for lang in langs
|
||||
}
|
||||
record_to.env.cache.update_raw(record_to, field_to, [new_value], dirty=True)
|
||||
# Call `write` to trigger compute etc (`modified()`)
|
||||
record_to[name_field_to] = new_value[lang_env]
|
||||
|
||||
@api.model
|
||||
def _save_oe_structure_hook(self):
|
||||
return {}
|
||||
|
||||
@api.model
|
||||
def _are_archs_equal(self, arch1, arch2):
|
||||
# Note that comparing the strings would not be ok as attributes order
|
||||
# must not be relevant
|
||||
if arch1.tag != arch2.tag:
|
||||
return False
|
||||
if arch1.text != arch2.text:
|
||||
return False
|
||||
if arch1.tail != arch2.tail:
|
||||
return False
|
||||
if arch1.attrib != arch2.attrib:
|
||||
return False
|
||||
if len(arch1) != len(arch2):
|
||||
return False
|
||||
return all(self._are_archs_equal(arch1, arch2) for arch1, arch2 in zip(arch1, arch2))
|
||||
|
||||
@api.model
|
||||
def _get_allowed_root_attrs(self):
|
||||
return ['style', 'class']
|
||||
|
||||
def replace_arch_section(self, section_xpath, replacement, replace_tail=False):
|
||||
# the root of the arch section shouldn't actually be replaced as it's
|
||||
# not really editable itself, only the content truly is editable.
|
||||
self.ensure_one()
|
||||
arch = etree.fromstring(self.arch.encode('utf-8'))
|
||||
# => get the replacement root
|
||||
if not section_xpath:
|
||||
root = arch
|
||||
else:
|
||||
# ensure there's only one match
|
||||
[root] = arch.xpath(section_xpath)
|
||||
|
||||
root.text = replacement.text
|
||||
|
||||
# We need to replace some attrib for styles changes on the root element
|
||||
for attribute in self._get_allowed_root_attrs():
|
||||
if attribute in replacement.attrib:
|
||||
root.attrib[attribute] = replacement.attrib[attribute]
|
||||
|
||||
# Note: after a standard edition, the tail *must not* be replaced
|
||||
if replace_tail:
|
||||
root.tail = replacement.tail
|
||||
# replace all children
|
||||
del root[:]
|
||||
for child in replacement:
|
||||
root.append(copy.deepcopy(child))
|
||||
|
||||
return arch
|
||||
|
||||
@api.model
|
||||
def to_field_ref(self, el):
|
||||
# filter out meta-information inserted in the document
|
||||
attributes = {k: v for k, v in el.attrib.items()
|
||||
if not k.startswith('data-oe-')}
|
||||
attributes['t-field'] = el.get('data-oe-expression')
|
||||
|
||||
out = html.html_parser.makeelement(el.tag, attrib=attributes)
|
||||
out.tail = el.tail
|
||||
return out
|
||||
|
||||
@api.model
|
||||
def to_empty_oe_structure(self, el):
|
||||
out = html.html_parser.makeelement(el.tag, attrib=el.attrib)
|
||||
out.tail = el.tail
|
||||
return out
|
||||
|
||||
@api.model
|
||||
def _set_noupdate(self):
|
||||
self.sudo().mapped('model_data_id').write({'noupdate': True})
|
||||
|
||||
def save(self, value, xpath=None):
|
||||
""" Update a view section. The view section may embed fields to write
|
||||
|
||||
Note that `self` record might not exist when saving an embed field
|
||||
|
||||
:param str xpath: valid xpath to the tag to replace
|
||||
"""
|
||||
self.ensure_one()
|
||||
|
||||
arch_section = html.fromstring(
|
||||
value, parser=html.HTMLParser(encoding='utf-8'))
|
||||
|
||||
if xpath is None:
|
||||
# value is an embedded field on its own, not a view section
|
||||
self.save_embedded_field(arch_section)
|
||||
return
|
||||
|
||||
for el in self.extract_embedded_fields(arch_section):
|
||||
self.save_embedded_field(el)
|
||||
|
||||
# transform embedded field back to t-field
|
||||
el.getparent().replace(el, self.to_field_ref(el))
|
||||
|
||||
for el in self.extract_oe_structures(arch_section):
|
||||
if self.save_oe_structure(el):
|
||||
# empty oe_structure in parent view
|
||||
empty = self.to_empty_oe_structure(el)
|
||||
if el == arch_section:
|
||||
arch_section = empty
|
||||
else:
|
||||
el.getparent().replace(el, empty)
|
||||
|
||||
new_arch = self.replace_arch_section(xpath, arch_section)
|
||||
old_arch = etree.fromstring(self.arch.encode('utf-8'))
|
||||
if not self._are_archs_equal(old_arch, new_arch):
|
||||
self._set_noupdate()
|
||||
self.write({'arch': etree.tostring(new_arch, encoding='unicode')})
|
||||
self._copy_custom_snippet_translations(self, 'arch_db')
|
||||
|
||||
@api.model
|
||||
def _view_get_inherited_children(self, view):
|
||||
if self._context.get('no_primary_children', False):
|
||||
original_hierarchy = self._context.get('__views_get_original_hierarchy', [])
|
||||
return view.inherit_children_ids.filtered(lambda extension: extension.mode != 'primary' or extension.id in original_hierarchy)
|
||||
return view.inherit_children_ids
|
||||
|
||||
@api.model
|
||||
def _view_obj(self, view_id):
|
||||
if isinstance(view_id, str):
|
||||
return self.search([('key', '=', view_id)], limit=1) or self.env.ref(view_id)
|
||||
elif isinstance(view_id, int):
|
||||
return self.browse(view_id)
|
||||
# It can already be a view object when called by '_views_get()' that is calling '_view_obj'
|
||||
# for it's inherit_children_ids, passing them directly as object record.
|
||||
return view_id
|
||||
|
||||
# Returns all views (called and inherited) related to a view
|
||||
# Used by translation mechanism, SEO and optional templates
|
||||
|
||||
@api.model
|
||||
def _views_get(self, view_id, get_children=True, bundles=False, root=True, visited=None):
|
||||
""" For a given view ``view_id``, should return:
|
||||
* the view itself (starting from its top most parent)
|
||||
* all views inheriting from it, enabled or not
|
||||
- but not the optional children of a non-enabled child
|
||||
* all views called from it (via t-call)
|
||||
:returns recordset of ir.ui.view
|
||||
"""
|
||||
try:
|
||||
view = self._view_obj(view_id)
|
||||
except ValueError:
|
||||
_logger.warning("Could not find view object with view_id '%s'", view_id)
|
||||
return self.env['ir.ui.view']
|
||||
|
||||
if visited is None:
|
||||
visited = []
|
||||
original_hierarchy = self._context.get('__views_get_original_hierarchy', [])
|
||||
while root and view.inherit_id:
|
||||
original_hierarchy.append(view.id)
|
||||
view = view.inherit_id
|
||||
|
||||
views_to_return = view
|
||||
|
||||
node = etree.fromstring(view.arch)
|
||||
xpath = "//t[@t-call]"
|
||||
if bundles:
|
||||
xpath += "| //t[@t-call-assets]"
|
||||
for child in node.xpath(xpath):
|
||||
try:
|
||||
called_view = self._view_obj(child.get('t-call', child.get('t-call-assets')))
|
||||
except ValueError:
|
||||
continue
|
||||
if called_view and called_view not in views_to_return and called_view.id not in visited:
|
||||
views_to_return += self._views_get(called_view, get_children=get_children, bundles=bundles, visited=visited + views_to_return.ids)
|
||||
|
||||
if not get_children:
|
||||
return views_to_return
|
||||
|
||||
extensions = self._view_get_inherited_children(view)
|
||||
|
||||
# Keep children in a deterministic order regardless of their applicability
|
||||
for extension in extensions.sorted(key=lambda v: v.id):
|
||||
# only return optional grandchildren if this child is enabled
|
||||
if extension.id not in visited:
|
||||
for ext_view in self._views_get(extension, get_children=extension.active, root=False, visited=visited + views_to_return.ids):
|
||||
if ext_view not in views_to_return:
|
||||
views_to_return += ext_view
|
||||
return views_to_return
|
||||
|
||||
@api.model
|
||||
def get_related_views(self, key, bundles=False):
|
||||
""" Get inherit view's informations of the template ``key``.
|
||||
returns templates info (which can be active or not)
|
||||
``bundles=True`` returns also the asset bundles
|
||||
"""
|
||||
user_groups = set(self.env.user.groups_id)
|
||||
new_context = {
|
||||
**self._context,
|
||||
'active_test': False,
|
||||
}
|
||||
new_context.pop('lang', None)
|
||||
View = self.with_context(new_context)
|
||||
views = View._views_get(key, bundles=bundles)
|
||||
return views.filtered(lambda v: not v.groups_id or len(user_groups.intersection(v.groups_id)))
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# Snippet saving
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
@api.model
|
||||
def _get_snippet_addition_view_key(self, template_key, key):
|
||||
return '%s.%s' % (template_key, key)
|
||||
|
||||
@api.model
|
||||
def _snippet_save_view_values_hook(self):
|
||||
return {}
|
||||
|
||||
def _find_available_name(self, name, used_names):
|
||||
attempt = 1
|
||||
candidate_name = name
|
||||
while candidate_name in used_names:
|
||||
attempt += 1
|
||||
candidate_name = f"{name} ({attempt})"
|
||||
return candidate_name
|
||||
|
||||
@api.model
|
||||
def save_snippet(self, name, arch, template_key, snippet_key, thumbnail_url):
|
||||
"""
|
||||
Saves a new snippet arch so that it appears with the given name when
|
||||
using the given snippets template.
|
||||
|
||||
:param name: the name of the snippet to save
|
||||
:param arch: the html structure of the snippet to save
|
||||
:param template_key: the key of the view regrouping all snippets in
|
||||
which the snippet to save is meant to appear
|
||||
:param snippet_key: the key (without module part) to identify
|
||||
the snippet from which the snippet to save originates
|
||||
:param thumbnail_url: the url of the thumbnail to use when displaying
|
||||
the snippet to save
|
||||
"""
|
||||
app_name = template_key.split('.')[0]
|
||||
snippet_key = '%s_%s' % (snippet_key, uuid.uuid4().hex)
|
||||
full_snippet_key = '%s.%s' % (app_name, snippet_key)
|
||||
|
||||
# find available name
|
||||
current_website = self.env['website'].browse(self._context.get('website_id'))
|
||||
website_domain = current_website.website_domain()
|
||||
used_names = self.search(expression.AND([
|
||||
[('name', '=like', '%s%%' % name)], website_domain
|
||||
])).mapped('name')
|
||||
name = self._find_available_name(name, used_names)
|
||||
|
||||
# html to xml to add '/' at the end of self closing tags like br, ...
|
||||
arch_tree = html.fromstring(arch)
|
||||
attributes = self._get_cleaned_non_editing_attributes(arch_tree.attrib.items())
|
||||
for attr in arch_tree.attrib:
|
||||
if attr in attributes:
|
||||
arch_tree.attrib[attr] = attributes[attr]
|
||||
else:
|
||||
del arch_tree.attrib[attr]
|
||||
xml_arch = etree.tostring(arch_tree, encoding='utf-8')
|
||||
new_snippet_view_values = {
|
||||
'name': name,
|
||||
'key': full_snippet_key,
|
||||
'type': 'qweb',
|
||||
'arch': xml_arch,
|
||||
}
|
||||
new_snippet_view_values.update(self._snippet_save_view_values_hook())
|
||||
custom_snippet_view = self.create(new_snippet_view_values)
|
||||
model = self._context.get('model')
|
||||
field = self._context.get('field')
|
||||
if field == 'arch':
|
||||
# Special case for `arch` which is a kind of related (through a
|
||||
# compute) to `arch_db` but which is hosting XML/HTML content while
|
||||
# being a char field.. Which is then messing around with the
|
||||
# `get_translation_dictionary` call, returning XML instead of
|
||||
# strings
|
||||
field = 'arch_db'
|
||||
res_id = self._context.get('resId')
|
||||
if model and field and res_id:
|
||||
self._copy_field_terms_translations(
|
||||
self.env[model].browse(int(res_id)),
|
||||
field,
|
||||
custom_snippet_view,
|
||||
'arch_db',
|
||||
)
|
||||
|
||||
custom_section = self.search([('key', '=', template_key)])
|
||||
snippet_addition_view_values = {
|
||||
'name': name + ' Block',
|
||||
'key': self._get_snippet_addition_view_key(template_key, snippet_key),
|
||||
'inherit_id': custom_section.id,
|
||||
'type': 'qweb',
|
||||
'arch': """
|
||||
<data inherit_id="%s">
|
||||
<xpath expr="//div[@id='snippet_custom']" position="attributes">
|
||||
<attribute name="class" remove="d-none" separator=" "/>
|
||||
</xpath>
|
||||
<xpath expr="//div[@id='snippet_custom_body']" position="inside">
|
||||
<t t-snippet="%s" t-thumbnail="%s"/>
|
||||
</xpath>
|
||||
</data>
|
||||
""" % (template_key, full_snippet_key, thumbnail_url),
|
||||
}
|
||||
snippet_addition_view_values.update(self._snippet_save_view_values_hook())
|
||||
self.create(snippet_addition_view_values)
|
||||
|
||||
@api.model
|
||||
def rename_snippet(self, name, view_id, template_key):
|
||||
snippet_view = self.browse(view_id)
|
||||
key = snippet_view.key.split('.')[1]
|
||||
custom_key = self._get_snippet_addition_view_key(template_key, key)
|
||||
snippet_addition_view = self.search([('key', '=', custom_key)])
|
||||
if snippet_addition_view:
|
||||
snippet_addition_view.name = name + ' Block'
|
||||
snippet_view.name = name
|
||||
|
||||
@api.model
|
||||
def delete_snippet(self, view_id, template_key):
|
||||
snippet_view = self.browse(view_id)
|
||||
key = snippet_view.key.split('.')[1]
|
||||
custom_key = self._get_snippet_addition_view_key(template_key, key)
|
||||
snippet_addition_view = self.search([('key', '=', custom_key)])
|
||||
(snippet_addition_view | snippet_view).unlink()
|
41
models/ir_websocket.py
Normal file
41
models/ir_websocket.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
import re
|
||||
|
||||
from odoo import models
|
||||
from odoo.exceptions import AccessDenied
|
||||
|
||||
|
||||
class IrWebsocket(models.AbstractModel):
|
||||
_inherit = 'ir.websocket'
|
||||
|
||||
def _build_bus_channel_list(self, channels):
|
||||
if self.env.uid:
|
||||
# Do not alter original list.
|
||||
channels = list(channels)
|
||||
for channel in channels:
|
||||
if isinstance(channel, str):
|
||||
match = re.match(r'editor_collaboration:(\w+(?:\.\w+)*):(\w+):(\d+)', channel)
|
||||
if match:
|
||||
model_name = match[1]
|
||||
field_name = match[2]
|
||||
res_id = int(match[3])
|
||||
|
||||
# Verify access to the edition channel.
|
||||
if self.env.user._is_public():
|
||||
raise AccessDenied()
|
||||
|
||||
document = self.env[model_name].browse([res_id])
|
||||
if not document.exists():
|
||||
continue
|
||||
|
||||
document.check_access_rights('read')
|
||||
document.check_field_access_rights('read', [field_name])
|
||||
document.check_access_rule('read')
|
||||
document.check_access_rights('write')
|
||||
document.check_field_access_rights('write', [field_name])
|
||||
document.check_access_rule('write')
|
||||
|
||||
channels.append((self.env.registry.db_name, 'editor_collaboration', model_name, field_name, res_id))
|
||||
return super()._build_bus_channel_list(channels)
|
28
models/models.py
Normal file
28
models/models.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
from hashlib import sha256
|
||||
|
||||
from odoo import api, models
|
||||
|
||||
|
||||
class Base(models.AbstractModel):
|
||||
_inherit = 'base'
|
||||
|
||||
@api.model
|
||||
def _get_view_field_attributes(self):
|
||||
keys = super()._get_view_field_attributes()
|
||||
keys.append('sanitize')
|
||||
keys.append('sanitize_tags')
|
||||
return keys
|
||||
|
||||
|
||||
class BaseModel(models.AbstractModel):
|
||||
_inherit = 'base'
|
||||
|
||||
def update_field_translations_sha(self, fname, translations):
|
||||
field = self._fields[fname]
|
||||
if callable(field.translate):
|
||||
for translation in translations.values():
|
||||
for key, value in translation.items():
|
||||
translation[key] = field.translate.term_converter(value)
|
||||
return self._update_field_translations(fname, translations, lambda old_term: sha256(old_term.encode()).hexdigest())
|
37
models/test_models.py
Normal file
37
models/test_models.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Part of Odoo. See LICENSE file for full copyright and licensing details.
|
||||
|
||||
from odoo import models, fields
|
||||
|
||||
|
||||
class ConverterTest(models.Model):
|
||||
_name = 'web_editor.converter.test'
|
||||
_description = 'Web Editor Converter Test'
|
||||
|
||||
# disable translation export for those brilliant field labels and values
|
||||
_translate = False
|
||||
|
||||
char = fields.Char()
|
||||
integer = fields.Integer()
|
||||
float = fields.Float()
|
||||
numeric = fields.Float(digits=(16, 2))
|
||||
many2one = fields.Many2one('web_editor.converter.test.sub')
|
||||
binary = fields.Binary(attachment=False)
|
||||
date = fields.Date()
|
||||
datetime = fields.Datetime()
|
||||
selection_str = fields.Selection([
|
||||
('A', "Qu'il n'est pas arrivé à Toronto"),
|
||||
('B', "Qu'il était supposé arriver à Toronto"),
|
||||
('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"),
|
||||
('D', "La réponse D"),
|
||||
], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et "
|
||||
u"qu'il fait une escale technique à St Claude, on dit:")
|
||||
html = fields.Html()
|
||||
text = fields.Text()
|
||||
|
||||
|
||||
class ConverterTestSub(models.Model):
|
||||
_name = 'web_editor.converter.test.sub'
|
||||
_description = 'Web Editor Converter Subtest'
|
||||
|
||||
name = fields.Char()
|
3
security/ir.model.access.csv
Normal file
3
security/ir.model.access.csv
Normal file
|
@ -0,0 +1,3 @@
|
|||
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
|
||||
access_web_editor_converter_test,access_web_editor_converter_test,model_web_editor_converter_test,base.group_system,1,1,1,1
|
||||
access_web_editor_converter_test_sub,access_web_editor_converter_test_sub,model_web_editor_converter_test_sub,base.group_system,1,1,1,1
|
|
13
static/image_shapes/brushed/brush_1.svg
Normal file
13
static/image_shapes/brushed/brush_1.svg
Normal file
|
@ -0,0 +1,13 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="600" height="600">
|
||||
<defs>
|
||||
<clipPath id="clip-path" clipPathUnits="objectBoundingBox">
|
||||
<use xlink:href="#filterPath" fill="none"/>
|
||||
</clipPath>
|
||||
<path id="filterPath" d="M0.4281,0.0681C0.4281,0.0305,0.4603,0,0.5,0C0.5397,0,0.5719,0.0305,0.5719,0.0681V0.1326C0.5779,0.1008,0.6073,0.0766,0.6427,0.0766C0.6824,0.0766,0.7146,0.1071,0.7146,0.1447V0.2432C0.7206,0.2114,0.75,0.1872,0.7854,0.1872C0.8251,0.1872,0.8573,0.2177,0.8573,0.2553V0.3539C0.8633,0.322,0.8927,0.2979,0.9281,0.2979C0.9678,0.2979,1,0.3284,1,0.366V0.5872C1,0.6248,0.9678,0.6553,0.9281,0.6553C0.8927,0.6553,0.8633,0.6312,0.8573,0.5993V0.7489C0.8573,0.7865,0.8251,0.817,0.7854,0.817C0.75,0.817,0.7206,0.7929,0.7146,0.761V0.8596C0.7146,0.8972,0.6824,0.9277,0.6427,0.9277C0.6073,0.9277,0.5779,0.9035,0.5719,0.8716V0.9319C0.5719,0.9695,0.5397,1,0.5,1C0.4603,1,0.4281,0.9695,0.4281,0.9319V0.8716C0.4221,0.9035,0.3927,0.9277,0.3573,0.9277C0.3176,0.9277,0.2854,0.8972,0.2854,0.8596V0.761C0.2794,0.7929,0.25,0.817,0.2146,0.817C0.1749,0.817,0.1427,0.7865,0.1427,0.7489V0.5993C0.1367,0.6312,0.1073,0.6553,0.0719,0.6553C0.0322,0.6553,0,0.6248,0,0.5872V0.366C0,0.3284,0.0322,0.2979,0.0719,0.2979C0.1073,0.2979,0.1367,0.322,0.1427,0.3539V0.2553C0.1427,0.2177,0.1749,0.1872,0.2146,0.1872C0.25,0.1872,0.2794,0.2114,0.2854,0.2432V0.1447C0.2854,0.1071,0.3176,0.0766,0.3573,0.0766C0.3927,0.0766,0.4221,0.1008,0.4281,0.1326V0.0681Z">
|
||||
</path>
|
||||
</defs>
|
||||
<svg viewBox="0 0 1 1" id="preview" preserveAspectRatio="none">
|
||||
<use xlink:href="#filterPath" fill="darkgrey"/>
|
||||
</svg>
|
||||
<image xlink:href="" clip-path="url(#clip-path)"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.7 KiB |
13
static/image_shapes/brushed/brush_2.svg
Normal file
13
static/image_shapes/brushed/brush_2.svg
Normal file
|
@ -0,0 +1,13 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="600" height="600">
|
||||
<defs>
|
||||
<clipPath id="clip-path" clipPathUnits="objectBoundingBox">
|
||||
<use xlink:href="#filterPath" fill="none"/>
|
||||
</clipPath>
|
||||
<path id="filterPath" d="M0.6299,0.0288C0.6606-0.0096,0.7103-0.0096,0.741,0.0288C0.7686,0.0634,0.7713,0.1173,0.7492,0.1557L0.84,0.042C0.8706,0.0036,0.9203,0.0036,0.951,0.042C0.9817,0.0803,0.9817,0.1426,0.951,0.1809L0.8542,0.3021C0.8849,0.2645,0.9341,0.2648,0.9646,0.3029C0.9952,0.3413,0.9952,0.4035,0.9646,0.4419L0.8935,0.5308C0.9216,0.5174,0.9544,0.5249,0.977,0.5531C1.0077,0.5915,1.0077,0.6537,0.977,0.6921L0.8374,0.8669C0.8067,0.9052,0.757,0.9052,0.7264,0.8669C0.7038,0.8386,0.6979,0.7975,0.7085,0.7624L0.5417,0.9712C0.511,1.0096,0.4613,1.0096,0.4307,0.9712C0.4002,0.9331,0.4,0.8715,0.4301,0.8331L0.3226,0.9675C0.292,1.0059,0.2423,1.0059,0.2116,0.9675C0.181,0.9292,0.181,0.8669,0.2116,0.8286L0.2168,0.8221C0.1861,0.8498,0.1431,0.8464,0.1154,0.8118C0.0852,0.774,0.0848,0.7129,0.1142,0.6745C0.0847,0.6933,0.0477,0.6873,0.023,0.6564C-0.0077,0.618-0.0077,0.5558,0.023,0.5174L0.2785,0.1976C0.3091,0.1592,0.3588,0.1592,0.3895,0.1976C0.4142,0.2285,0.419,0.2748,0.404,0.3117L0.6299,0.0288Z">
|
||||
</path>
|
||||
</defs>
|
||||
<svg viewBox="0 0 1 1" id="preview" preserveAspectRatio="none">
|
||||
<use xlink:href="#filterPath" fill="darkgrey"/>
|
||||
</svg>
|
||||
<image xlink:href="" clip-path="url(#clip-path)"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.4 KiB |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user