Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[14.0][IMP] datev_export_xml: Improve handling and download of bigger files #158

Merged
merged 7 commits into from
Jun 28, 2024
2 changes: 1 addition & 1 deletion datev_export_xml/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
# @author Grzegorz Grzelak
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).

from . import models
from . import controllers, models
2 changes: 1 addition & 1 deletion datev_export_xml/__manifest__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

{
"name": "Datev Export XML",
"version": "14.0.1.0.3",
"version": "14.0.1.1.3",
"category": "Accounting",
"license": "AGPL-3",
"author": "Guenter Selbert, Thorsten Vocks, Maciej Wichowski, Daniela Scarpa, "
Expand Down
4 changes: 4 additions & 0 deletions datev_export_xml/controllers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (C) 2022-2024 initOS GmbH
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

from . import main
40 changes: 40 additions & 0 deletions datev_export_xml/controllers/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright (C) 2022-2024 initOS GmbH
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

import base64
import logging

from odoo import http
from odoo.http import request, send_file

from odoo.addons.web.controllers.main import Home

_logger = logging.getLogger(__name__)


class DatevHome(Home):
@http.route("/datev/xml/download/<int:line_id>", type="http", auth="user")
def datev_xml_download_attachment(self, line_id):
export = request.env["datev.export.xml.line"].search([("id", "=", line_id)])

if not export.attachment_id:
return request.not_found()

att = export.attachment_id

if att.store_fname:
full_path = att._full_path(att.store_fname)
return send_file(
full_path,
filename=att.name,
mimetype=att.mimetype,
as_attachment=True,
)

return request.make_response(
base64.b64decode(att.datas),
[
("Content-Type", att.mimetype),
("Content-Disposition", f'attachment; filename="{att.name}"'),
],
)
20 changes: 10 additions & 10 deletions datev_export_xml/i18n/de.po
Original file line number Diff line number Diff line change
Expand Up @@ -398,13 +398,13 @@ msgstr "Dateigröße"
#: code:addons/datev_export_xml/models/datev_export.py:0
#, python-format
msgid ""
"Filtered Export of {} Documents\n"
"Date Range: {}-{}\n"
"Types: {}"
"Filtered Export of %(count)s Documents\n"
"Date Range: %(start)s-%(stop)s\n"
"Types: %(types)s"
msgstr ""
"Gefilterter Export von {} Dokumente\n"
"Datumsbereich: {}-{}\n"
"Typen: {}"
"Gefilterter Export von %(count)s Dokumente\n"
"Datumsbereich: %(start)s-%(stop)s\n"
"Typen: %(types)s"

#. module: datev_export_xml
#: model:ir.model.fields,field_description:datev_export_xml.field_datev_export_xml__message_follower_ids
Expand Down Expand Up @@ -567,11 +567,11 @@ msgstr ""
#: code:addons/datev_export_xml/models/datev_export.py:0
#, python-format
msgid ""
"Manually Doc Export of {} Documents \n"
"Numbers: {}"
"Manual Export of %(count)s Documents\n"
"Numbers: %(names)s"
msgstr ""
"Manueller Export von {} Documenten \n"
"Nummern: {}"
"Manueller Export von %(count)s Documenten \n"
"Nummern: %(names)s"

#. module: datev_export_xml
#: model:ir.model.fields,field_description:datev_export_xml.field_datev_export_xml__manually_document_selection
Expand Down
34 changes: 34 additions & 0 deletions datev_export_xml/migrations/14.0.1.1.3/post-migrate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# © 2023 initOS GmbH
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).

import logging

from odoo import SUPERUSER_ID, api

_logger = logging.getLogger(__name__)


def migrate(cr, version):
env = api.Environment(cr, SUPERUSER_ID, {})

query = """
SELECT id, attachment_id FROM datev_export_xml
WHERE attachment_id IS NOT NULL
"""
env.cr.execute(query)

for export_id, attachment_id in env.cr.fetchall():
export = env["datev.export.xml"].browse(export_id)
attachment = env["ir.attachment"].browse(attachment_id)

_logger.info(f"Migrating attachment of {export}")

line = export.line_ids.create(
{
"attachment_id": attachment_id,
"export_id": export_id,
"invoice_ids": [(6, 0, export.invoice_ids.ids)],
}
)

attachment.write({"res_model": line._name, "res_id": line.id})
2 changes: 1 addition & 1 deletion datev_export_xml/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
res_config_settings,
)

from . import datev_export # isort:skip
from . import datev_export, datev_export_line # isort:skip
176 changes: 99 additions & 77 deletions datev_export_xml/models/datev_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,24 +109,22 @@ def name_get(self):
readonly=True,
states={"draft": [("readonly", False)]},
)
attachment_id = fields.Many2one(
comodel_name="ir.attachment", string="Attachment", required=False, readonly=True
)
datev_file = fields.Binary("ZIP file", readonly=True, related="attachment_id.datas")
datev_filename = fields.Char(
"ZIP filename", readonly=True, related="attachment_id.name"
)
datev_filesize = fields.Char(
"Filesize",
compute="_compute_datev_filesize",
line_ids = fields.One2many(
"datev.export.xml.line",
"export_id",
"Lines",
)
line_count = fields.Integer(compute="_compute_line_count")

problematic_invoices_count = fields.Integer(
compute="_compute_problematic_invoices_count"
)
invoice_ids = fields.Many2many(comodel_name="account.move", string="Invoices")
invoices_count = fields.Integer(
string="Invoices Count", compute="_compute_invoices_count", store=True
string="Total Invoices", compute="_compute_invoices_count", store=True
)
invoices_exported_count = fields.Integer(
string="Exported Invoices", compute="_compute_invoices_count", store=True
)

manually_document_selection = fields.Boolean(default=False)
Expand All @@ -147,22 +145,23 @@ def name_get(self):
tracking=True,
)

@api.depends("attachment_id", "attachment_id.datas")
def _compute_datev_filesize(self):
for r in self.with_context(bin_size=True):
r.datev_filesize = r.datev_file
@api.depends("line_ids")
def _compute_line_count(self):
for rec in self:
rec.line_count = len(rec.line_ids)

@api.depends("invoice_ids")
def _compute_problematic_invoices_count(self):
for r in self:
r.problematic_invoices_count = len(
r.invoice_ids.filtered("datev_validation")
for rec in self:
rec.problematic_invoices_count = len(
rec.invoice_ids.filtered("datev_validation")
)

@api.depends("invoice_ids")
@api.depends("invoice_ids", "line_ids", "line_ids.invoice_ids")
def _compute_invoices_count(self):
for r in self:
r.invoices_count = len(r.invoice_ids)
for rec in self:
rec.invoices_count = len(rec.invoice_ids)
rec.invoices_exported_count = len(rec.mapped("line_ids.invoice_ids"))

@api.constrains("export_invoice", "export_refund", "export_type")
def validate_types(self):
Expand Down Expand Up @@ -215,50 +214,66 @@ def get_invoices(self):
)
return self.env["account.move"].search(search_clause)

def get_zip(self):
self = self.with_context(bin_size=False)
try:
if self.attachment_id:
self.attachment_id.unlink()
def _get_zip(self):
generator = self.generate_zip(
self.invoice_ids - self.mapped("line_ids.invoice_ids"),
self.check_xsd,
)

self.write({"state": "running", "exception_info": None})
with self.env.cr.savepoint():
zip_file = self.generate_zip(
self.invoice_ids,
self.check_xsd,
for index, (zip_file, invoices) in enumerate(generator, 1):
if not self.manually_document_selection:
description = _(
"Filtered Export of %(count)s Documents\n"
"Date Range: %(start)s-%(stop)s\nTypes: %(types)s",
count=len(invoices),
start=self.date_start,
stop=self.date_stop,
types=", ".join(self.get_type_list()),
)
if not self.manually_document_selection:
description = _(
"Filtered Export of {} Documents\nDate Range: {}-{}\nTypes: {}"
).format(
len(self.invoice_ids),
self.date_start,
self.date_stop,
", ".join(self.get_type_list()),
)
else:
description = _(
"Manually Doc Export of {} Documents \nNumbers: {}"
).format(
len(self.invoice_ids),
", ".join(self.invoice_ids.mapped("name")),
)

attachment = self.env["ir.attachment"].create(
{
"name": time.strftime("%Y_%m_%d_%H_%M") + ".zip",
"datas": zip_file,
"res_model": "datev.export.xml",
"res_id": self.id,
"res_field": "attachment_id",
"description": description,
}
else:
description = _(
"Manual Export of %(count)s Documents\n" "Numbers: %(names)s",
count=len(invoices),
names=", ".join(self.invoice_ids.mapped("name")),
)
self.write({"attachment_id": attachment.id, "state": "done"})

attachment = self.env["ir.attachment"].create(
{
"name": time.strftime(f"%Y-%m-%d_%H%M-{index}.zip"),
"datas": zip_file,
"res_model": "datev.export.xml",
"res_id": self.id,
"description": description,
}
)
self.line_ids.sudo().create(
{
"export_id": self.id,
"attachment_id": attachment.id,
"invoice_ids": [(6, 0, invoices.ids)],
}
)

# Huge numbers of invoices can lead to cron timeouts. Commit after
# each package and continue. When the timeout hits the job is still
# in running and is set to pending in the cron (hanging job) and
# will continue with the next package
if self.env.context.get("datev_autocommit"):
# pylint: disable=invalid-commit
self.env.cr.commit()

def get_zip(self):
self.ensure_one()

try:
self.write({"state": "running", "exception_info": None})
self.with_context(bin_size=False)._get_zip()
if self.invoices_count == self.invoices_exported_count:
self.write({"state": "done"})
except Exception as e:
_logger.exception(e)
msg = e.name if hasattr(e, "name") else str(e)
self.write({"exception_info": msg, "state": "failed"})
_logger.exception(e)

self._compute_problematic_invoices_count()

Expand All @@ -273,14 +288,25 @@ def cron_run_pending_export(self):
)
hanging_datev_exports.write({"state": "pending"})
datev_export = self.search(
[("state", "=", "pending"), ("manually_document_selection", "=", False)],
[
("state", "in", ("running", "pending")),
("manually_document_selection", "=", False),
],
# Favor hanging jobs
order="state DESC",
limit=1,
)
if datev_export:
datev_export.with_user(datev_export.create_uid.id).get_zip()

if not datev_export:
return

datev_export.with_user(datev_export.create_uid.id).with_context(
datev_autocommit=True
).get_zip()

if datev_export.state == "done":
datev_export._create_activity()
datev_export.invoice_ids.write({"datev_exported": True})
return True

def export_zip(self):
self.ensure_one()
Expand All @@ -291,7 +317,6 @@ def export_zip(self):
else:
self.invoice_ids = [(6, 0, self.get_invoices().ids)]
self.action_pending()
return True

@api.model
def export_zip_invoice(self, invoice_ids=None):
Expand Down Expand Up @@ -357,6 +382,9 @@ def _create_activity(self):
}
)

def action_invalidate_lines(self):
self.line_ids.write({"invalidated": True})

def action_validate(self):
generator = self.env["datev.xml.generator"]
for invoice in self.invoice_ids:
Expand All @@ -381,13 +409,6 @@ def action_pending(self):
r.invoice_ids = [(6, 0, r.get_invoices().ids)]
if r.invoices_count == 0:
raise ValidationError(_("No invoices/refunds for export!"))
if r.invoices_count > 4999 and r.check_xsd:
raise ValidationError(
_(
"The numbers of invoices/refunds is limited to 4999 by DATEV! "
"Please decrease the number of documents or deactivate Check XSD."
)
)
if r.state == "running":
raise ValidationError(
_("It's not allowed to set an already running export to pending!")
Expand All @@ -400,12 +421,12 @@ def action_pending(self):
)

def action_draft(self):
for r in self:
if r.state == "running":
raise ValidationError(
_("It's not allowed to set a running export to draft!")
)
r.write({"state": "draft"})
if "running" in self.mapped("state"):
raise ValidationError(
_("It's not allowed to set a running export to draft!")
)

self.write({"state": "draft", "line_ids": [(5,)]})

def action_show_invalid_invoices_view(self):
tree_view = self.env.ref("datev_export_xml.view_move_datev_validation")
Expand Down Expand Up @@ -450,6 +471,7 @@ def write(self, vals):
for r in self:
if r.manually_document_selection:
continue

super(DatevExport, r).write(
{"invoice_ids": [(6, 0, r.get_invoices().ids)]}
)
Expand Down
Loading
Loading