Merge pull request #38118 from frappe/mergify/bp/version-15-hotfix/pr-38038
refactor: supercharge Bulk actions (backport #38038)
This commit is contained in:
commit
d18fd87650
@ -1,30 +1,21 @@
|
||||
// Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
|
||||
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
|
||||
// For license information, please see license.txt
|
||||
|
||||
frappe.ui.form.on('Bulk Transaction Log', {
|
||||
|
||||
refresh: function(frm) {
|
||||
frm.disable_save();
|
||||
frm.add_custom_button(__('Retry Failed Transactions'), ()=>{
|
||||
frappe.confirm(__("Retry Failing Transactions ?"), ()=>{
|
||||
query(frm, 1);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
frappe.ui.form.on("Bulk Transaction Log", {
|
||||
refresh(frm) {
|
||||
frm.add_custom_button(__('Succeeded Entries'), function() {
|
||||
frappe.set_route('List', 'Bulk Transaction Log Detail', {'date': frm.doc.date, 'transaction_status': "Success"});
|
||||
}, __("View"));
|
||||
frm.add_custom_button(__('Failed Entries'), function() {
|
||||
frappe.set_route('List', 'Bulk Transaction Log Detail', {'date': frm.doc.date, 'transaction_status': "Failed"});
|
||||
}, __("View"));
|
||||
if (frm.doc.failed) {
|
||||
frm.add_custom_button(__('Retry Failed Transactions'), function() {
|
||||
frappe.call({
|
||||
method: "erpnext.utilities.bulk_transaction.retry",
|
||||
args: {date: frm.doc.date}
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
function query(frm) {
|
||||
frappe.call({
|
||||
method: "erpnext.bulk_transaction.doctype.bulk_transaction_log.bulk_transaction_log.retry_failing_transaction",
|
||||
args: {
|
||||
log_date: frm.doc.log_date
|
||||
}
|
||||
}).then((r) => {
|
||||
if (r.message === "No Failed Records") {
|
||||
frappe.show_alert(__(r.message), 5);
|
||||
} else {
|
||||
frappe.show_alert(__("Retrying Failed Transactions"), 5);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,31 +1,64 @@
|
||||
{
|
||||
"actions": [],
|
||||
"allow_rename": 1,
|
||||
"creation": "2021-11-30 13:41:16.343827",
|
||||
"allow_copy": 1,
|
||||
"creation": "2023-11-09 20:14:45.139593",
|
||||
"default_view": "List",
|
||||
"doctype": "DocType",
|
||||
"editable_grid": 1,
|
||||
"engine": "InnoDB",
|
||||
"field_order": [
|
||||
"log_date",
|
||||
"logger_data"
|
||||
"date",
|
||||
"column_break_bsan",
|
||||
"log_entries",
|
||||
"section_break_mdmv",
|
||||
"succeeded",
|
||||
"column_break_qryp",
|
||||
"failed"
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"fieldname": "log_date",
|
||||
"fieldname": "date",
|
||||
"fieldtype": "Date",
|
||||
"label": "Log Date",
|
||||
"in_list_view": 1,
|
||||
"in_standard_filter": 1,
|
||||
"label": "Date",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "logger_data",
|
||||
"fieldtype": "Table",
|
||||
"label": "Logger Data",
|
||||
"options": "Bulk Transaction Log Detail"
|
||||
"fieldname": "log_entries",
|
||||
"fieldtype": "Int",
|
||||
"in_list_view": 1,
|
||||
"label": "Log Entries",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "column_break_bsan",
|
||||
"fieldtype": "Column Break"
|
||||
},
|
||||
{
|
||||
"fieldname": "section_break_mdmv",
|
||||
"fieldtype": "Section Break"
|
||||
},
|
||||
{
|
||||
"fieldname": "succeeded",
|
||||
"fieldtype": "Int",
|
||||
"label": "Succeeded",
|
||||
"read_only": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "column_break_qryp",
|
||||
"fieldtype": "Column Break"
|
||||
},
|
||||
{
|
||||
"fieldname": "failed",
|
||||
"fieldtype": "Int",
|
||||
"label": "Failed",
|
||||
"read_only": 1
|
||||
}
|
||||
],
|
||||
"index_web_pages_for_search": 1,
|
||||
"in_create": 1,
|
||||
"is_virtual": 1,
|
||||
"links": [],
|
||||
"modified": "2022-02-03 17:23:02.935325",
|
||||
"modified": "2023-11-11 04:52:49.347376",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Bulk Transaction",
|
||||
"name": "Bulk Transaction Log",
|
||||
@ -47,5 +80,5 @@
|
||||
"sort_field": "modified",
|
||||
"sort_order": "DESC",
|
||||
"states": [],
|
||||
"track_changes": 1
|
||||
"title_field": "date"
|
||||
}
|
@ -1,67 +1,112 @@
|
||||
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
|
||||
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
|
||||
# For license information, please see license.txt
|
||||
|
||||
from datetime import date
|
||||
|
||||
import frappe
|
||||
from frappe import qb
|
||||
from frappe.model.document import Document
|
||||
|
||||
from erpnext.utilities.bulk_transaction import task, update_logger
|
||||
from frappe.query_builder.functions import Count
|
||||
from frappe.utils import cint
|
||||
from pypika import Order
|
||||
|
||||
|
||||
class BulkTransactionLog(Document):
|
||||
pass
|
||||
def db_insert(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def load_from_db(self):
|
||||
log_detail = qb.DocType("Bulk Transaction Log Detail")
|
||||
|
||||
@frappe.whitelist()
|
||||
def retry_failing_transaction(log_date=None):
|
||||
if not log_date:
|
||||
log_date = str(date.today())
|
||||
btp = frappe.qb.DocType("Bulk Transaction Log Detail")
|
||||
data = (
|
||||
frappe.qb.from_(btp)
|
||||
.select(btp.transaction_name, btp.from_doctype, btp.to_doctype)
|
||||
.distinct()
|
||||
.where(btp.retried != 1)
|
||||
.where(btp.transaction_status == "Failed")
|
||||
.where(btp.date == log_date)
|
||||
).run(as_dict=True)
|
||||
has_records = frappe.db.sql(
|
||||
f"select exists (select * from `tabBulk Transaction Log Detail` where date = '{self.name}');"
|
||||
)[0][0]
|
||||
if not has_records:
|
||||
raise frappe.DoesNotExistError
|
||||
|
||||
if data:
|
||||
if len(data) > 10:
|
||||
frappe.enqueue(job, queue="long", job_name="bulk_retry", data=data, log_date=log_date)
|
||||
else:
|
||||
job(data, log_date)
|
||||
else:
|
||||
return "No Failed Records"
|
||||
succeeded_logs = (
|
||||
qb.from_(log_detail)
|
||||
.select(Count(log_detail.date).as_("count"))
|
||||
.where((log_detail.date == self.name) & (log_detail.transaction_status == "Success"))
|
||||
.run()
|
||||
)[0][0] or 0
|
||||
failed_logs = (
|
||||
qb.from_(log_detail)
|
||||
.select(Count(log_detail.date).as_("count"))
|
||||
.where((log_detail.date == self.name) & (log_detail.transaction_status == "Failed"))
|
||||
.run()
|
||||
)[0][0] or 0
|
||||
total_logs = succeeded_logs + failed_logs
|
||||
transaction_log = frappe._dict(
|
||||
{
|
||||
"date": self.name,
|
||||
"count": total_logs,
|
||||
"succeeded": succeeded_logs,
|
||||
"failed": failed_logs,
|
||||
}
|
||||
)
|
||||
super(Document, self).__init__(serialize_transaction_log(transaction_log))
|
||||
|
||||
@staticmethod
|
||||
def get_list(args):
|
||||
filter_date = parse_list_filters(args)
|
||||
limit = cint(args.get("page_length")) or 20
|
||||
log_detail = qb.DocType("Bulk Transaction Log Detail")
|
||||
|
||||
def job(data, log_date):
|
||||
for d in data:
|
||||
failed = []
|
||||
try:
|
||||
frappe.db.savepoint("before_creation_of_record")
|
||||
task(d.transaction_name, d.from_doctype, d.to_doctype)
|
||||
except Exception as e:
|
||||
frappe.db.rollback(save_point="before_creation_of_record")
|
||||
failed.append(e)
|
||||
update_logger(
|
||||
d.transaction_name,
|
||||
e,
|
||||
d.from_doctype,
|
||||
d.to_doctype,
|
||||
status="Failed",
|
||||
log_date=log_date,
|
||||
restarted=1,
|
||||
dates_query = (
|
||||
qb.from_(log_detail)
|
||||
.select(log_detail.date)
|
||||
.distinct()
|
||||
.orderby(log_detail.date, order=Order.desc)
|
||||
.limit(limit)
|
||||
)
|
||||
if filter_date:
|
||||
dates_query = dates_query.where(log_detail.date == filter_date)
|
||||
dates = dates_query.run()
|
||||
|
||||
transaction_logs = []
|
||||
if dates:
|
||||
transaction_logs_query = (
|
||||
qb.from_(log_detail)
|
||||
.select(log_detail.date.as_("date"), Count(log_detail.date).as_("count"))
|
||||
.where(log_detail.date.isin(dates))
|
||||
.orderby(log_detail.date, order=Order.desc)
|
||||
.groupby(log_detail.date)
|
||||
.limit(limit)
|
||||
)
|
||||
transaction_logs = transaction_logs_query.run(as_dict=True)
|
||||
|
||||
if not failed:
|
||||
update_logger(
|
||||
d.transaction_name,
|
||||
None,
|
||||
d.from_doctype,
|
||||
d.to_doctype,
|
||||
status="Success",
|
||||
log_date=log_date,
|
||||
restarted=1,
|
||||
)
|
||||
return [serialize_transaction_log(x) for x in transaction_logs]
|
||||
|
||||
@staticmethod
|
||||
def get_count(args):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_stats(args):
|
||||
pass
|
||||
|
||||
def db_update(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def delete(self):
|
||||
pass
|
||||
|
||||
|
||||
def serialize_transaction_log(data):
|
||||
return frappe._dict(
|
||||
name=data.date,
|
||||
date=data.date,
|
||||
log_entries=data.count,
|
||||
succeeded=data.succeeded,
|
||||
failed=data.failed,
|
||||
)
|
||||
|
||||
|
||||
def parse_list_filters(args):
|
||||
# parse date filter
|
||||
filter_date = None
|
||||
for fil in args.get("filters"):
|
||||
if isinstance(fil, list):
|
||||
for elem in fil:
|
||||
if elem == "date":
|
||||
filter_date = fil[3]
|
||||
return filter_date
|
||||
|
@ -1,79 +1,9 @@
|
||||
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and Contributors
|
||||
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
|
||||
# See license.txt
|
||||
|
||||
import unittest
|
||||
from datetime import date
|
||||
|
||||
import frappe
|
||||
|
||||
from erpnext.utilities.bulk_transaction import transaction_processing
|
||||
# import frappe
|
||||
from frappe.tests.utils import FrappeTestCase
|
||||
|
||||
|
||||
class TestBulkTransactionLog(unittest.TestCase):
|
||||
def setUp(self):
|
||||
create_company()
|
||||
create_customer()
|
||||
create_item()
|
||||
|
||||
def test_entry_in_log(self):
|
||||
so_name = create_so()
|
||||
transaction_processing([{"name": so_name}], "Sales Order", "Sales Invoice")
|
||||
doc = frappe.get_doc("Bulk Transaction Log", str(date.today()))
|
||||
for d in doc.get("logger_data"):
|
||||
if d.transaction_name == so_name:
|
||||
self.assertEqual(d.transaction_name, so_name)
|
||||
self.assertEqual(d.transaction_status, "Success")
|
||||
self.assertEqual(d.from_doctype, "Sales Order")
|
||||
self.assertEqual(d.to_doctype, "Sales Invoice")
|
||||
self.assertEqual(d.retried, 0)
|
||||
|
||||
|
||||
def create_company():
|
||||
if not frappe.db.exists("Company", "_Test Company"):
|
||||
frappe.get_doc(
|
||||
{
|
||||
"doctype": "Company",
|
||||
"company_name": "_Test Company",
|
||||
"country": "India",
|
||||
"default_currency": "INR",
|
||||
}
|
||||
).insert()
|
||||
|
||||
|
||||
def create_customer():
|
||||
if not frappe.db.exists("Customer", "Bulk Customer"):
|
||||
frappe.get_doc({"doctype": "Customer", "customer_name": "Bulk Customer"}).insert()
|
||||
|
||||
|
||||
def create_item():
|
||||
if not frappe.db.exists("Item", "MK"):
|
||||
frappe.get_doc(
|
||||
{
|
||||
"doctype": "Item",
|
||||
"item_code": "MK",
|
||||
"item_name": "Milk",
|
||||
"description": "Milk",
|
||||
"item_group": "Products",
|
||||
}
|
||||
).insert()
|
||||
|
||||
|
||||
def create_so(intent=None):
|
||||
so = frappe.new_doc("Sales Order")
|
||||
so.customer = "Bulk Customer"
|
||||
so.company = "_Test Company"
|
||||
so.transaction_date = date.today()
|
||||
|
||||
so.set_warehouse = "Finished Goods - _TC"
|
||||
so.append(
|
||||
"items",
|
||||
{
|
||||
"item_code": "MK",
|
||||
"delivery_date": date.today(),
|
||||
"qty": 10,
|
||||
"rate": 80,
|
||||
},
|
||||
)
|
||||
so.insert()
|
||||
so.submit()
|
||||
return so.name
|
||||
class TestBulkTransactionLog(FrappeTestCase):
|
||||
pass
|
||||
|
@ -0,0 +1,8 @@
|
||||
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
|
||||
// For license information, please see license.txt
|
||||
|
||||
// frappe.ui.form.on("Bulk Transaction Log Detail", {
|
||||
// refresh(frm) {
|
||||
|
||||
// },
|
||||
// });
|
@ -6,12 +6,12 @@
|
||||
"editable_grid": 1,
|
||||
"engine": "InnoDB",
|
||||
"field_order": [
|
||||
"from_doctype",
|
||||
"transaction_name",
|
||||
"date",
|
||||
"time",
|
||||
"transaction_status",
|
||||
"error_description",
|
||||
"from_doctype",
|
||||
"to_doctype",
|
||||
"retried"
|
||||
],
|
||||
@ -20,8 +20,11 @@
|
||||
"fieldname": "transaction_name",
|
||||
"fieldtype": "Dynamic Link",
|
||||
"in_list_view": 1,
|
||||
"in_standard_filter": 1,
|
||||
"label": "Name",
|
||||
"options": "from_doctype"
|
||||
"options": "from_doctype",
|
||||
"read_only": 1,
|
||||
"search_index": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "transaction_status",
|
||||
@ -39,9 +42,11 @@
|
||||
{
|
||||
"fieldname": "from_doctype",
|
||||
"fieldtype": "Link",
|
||||
"in_standard_filter": 1,
|
||||
"label": "From Doctype",
|
||||
"options": "DocType",
|
||||
"read_only": 1
|
||||
"read_only": 1,
|
||||
"search_index": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "to_doctype",
|
||||
@ -54,8 +59,10 @@
|
||||
"fieldname": "date",
|
||||
"fieldtype": "Date",
|
||||
"in_list_view": 1,
|
||||
"in_standard_filter": 1,
|
||||
"label": "Date ",
|
||||
"read_only": 1
|
||||
"read_only": 1,
|
||||
"search_index": 1
|
||||
},
|
||||
{
|
||||
"fieldname": "time",
|
||||
@ -66,19 +73,33 @@
|
||||
{
|
||||
"fieldname": "retried",
|
||||
"fieldtype": "Int",
|
||||
"in_list_view": 1,
|
||||
"label": "Retried",
|
||||
"read_only": 1
|
||||
}
|
||||
],
|
||||
"in_create": 1,
|
||||
"index_web_pages_for_search": 1,
|
||||
"istable": 1,
|
||||
"links": [],
|
||||
"modified": "2022-02-03 19:57:31.650359",
|
||||
"modified": "2023-11-10 11:44:10.758342",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Bulk Transaction",
|
||||
"name": "Bulk Transaction Log Detail",
|
||||
"owner": "Administrator",
|
||||
"permissions": [],
|
||||
"permissions": [
|
||||
{
|
||||
"create": 1,
|
||||
"delete": 1,
|
||||
"email": 1,
|
||||
"export": 1,
|
||||
"print": 1,
|
||||
"read": 1,
|
||||
"report": 1,
|
||||
"role": "System Manager",
|
||||
"share": 1,
|
||||
"write": 1
|
||||
}
|
||||
],
|
||||
"sort_field": "modified",
|
||||
"sort_order": "DESC",
|
||||
"states": [],
|
||||
|
@ -0,0 +1,9 @@
|
||||
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and Contributors
|
||||
# See license.txt
|
||||
|
||||
# import frappe
|
||||
from frappe.tests.utils import FrappeTestCase
|
||||
|
||||
|
||||
class TestBulkTransactionLogDetail(FrappeTestCase):
|
||||
pass
|
@ -3,6 +3,7 @@ from datetime import date, datetime
|
||||
|
||||
import frappe
|
||||
from frappe import _
|
||||
from frappe.utils import get_link_to_form, today
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
@ -28,6 +29,48 @@ def transaction_processing(data, from_doctype, to_doctype):
|
||||
job(deserialized_data, from_doctype, to_doctype)
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def retry(date: str | None):
|
||||
if date:
|
||||
failed_docs = frappe.db.get_all(
|
||||
"Bulk Transaction Log Detail",
|
||||
filters={"date": date, "transaction_status": "Failed", "retried": 0},
|
||||
fields=["name", "transaction_name", "from_doctype", "to_doctype"],
|
||||
)
|
||||
if not failed_docs:
|
||||
frappe.msgprint(_("There are no Failed transactions"))
|
||||
else:
|
||||
job = frappe.enqueue(
|
||||
retry_failed_transactions,
|
||||
failed_docs=failed_docs,
|
||||
)
|
||||
frappe.msgprint(
|
||||
_("Job: {0} has been triggered for processing failed transactions").format(
|
||||
get_link_to_form("RQ Job", job.id)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def retry_failed_transactions(failed_docs: list | None):
|
||||
if failed_docs:
|
||||
for log in failed_docs:
|
||||
try:
|
||||
frappe.db.savepoint("before_creation_state")
|
||||
task(log.transaction_name, log.from_doctype, log.to_doctype)
|
||||
except Exception as e:
|
||||
frappe.db.rollback(save_point="before_creation_state")
|
||||
update_log(log.name, "Failed", 1, str(frappe.get_traceback()))
|
||||
else:
|
||||
update_log(log.name, "Success", 1)
|
||||
|
||||
|
||||
def update_log(log_name, status, retried, err=None):
|
||||
frappe.db.set_value("Bulk Transaction Log Detail", log_name, "transaction_status", status)
|
||||
frappe.db.set_value("Bulk Transaction Log Detail", log_name, "retried", retried)
|
||||
if err:
|
||||
frappe.db.set_value("Bulk Transaction Log Detail", log_name, "error_description", err)
|
||||
|
||||
|
||||
def job(deserialized_data, from_doctype, to_doctype):
|
||||
fail_count = 0
|
||||
for d in deserialized_data:
|
||||
@ -38,7 +81,7 @@ def job(deserialized_data, from_doctype, to_doctype):
|
||||
except Exception as e:
|
||||
frappe.db.rollback(save_point="before_creation_state")
|
||||
fail_count += 1
|
||||
update_logger(
|
||||
create_log(
|
||||
doc_name,
|
||||
str(frappe.get_traceback()),
|
||||
from_doctype,
|
||||
@ -47,7 +90,7 @@ def job(deserialized_data, from_doctype, to_doctype):
|
||||
log_date=str(date.today()),
|
||||
)
|
||||
else:
|
||||
update_logger(
|
||||
create_log(
|
||||
doc_name, None, from_doctype, to_doctype, status="Success", log_date=str(date.today())
|
||||
)
|
||||
|
||||
@ -108,45 +151,18 @@ def task(doc_name, from_doctype, to_doctype):
|
||||
obj.insert(ignore_mandatory=True)
|
||||
|
||||
|
||||
def check_logger_doc_exists(log_date):
|
||||
return frappe.db.exists("Bulk Transaction Log", log_date)
|
||||
|
||||
|
||||
def get_logger_doc(log_date):
|
||||
return frappe.get_doc("Bulk Transaction Log", log_date)
|
||||
|
||||
|
||||
def create_logger_doc():
|
||||
log_doc = frappe.new_doc("Bulk Transaction Log")
|
||||
log_doc.set_new_name(set_name=str(date.today()))
|
||||
log_doc.log_date = date.today()
|
||||
|
||||
return log_doc
|
||||
|
||||
|
||||
def append_data_to_logger(log_doc, doc_name, error, from_doctype, to_doctype, status, restarted):
|
||||
row = log_doc.append("logger_data", {})
|
||||
row.transaction_name = doc_name
|
||||
row.date = date.today()
|
||||
def create_log(doc_name, e, from_doctype, to_doctype, status, log_date=None, restarted=0):
|
||||
transaction_log = frappe.new_doc("Bulk Transaction Log Detail")
|
||||
transaction_log.transaction_name = doc_name
|
||||
transaction_log.date = today()
|
||||
now = datetime.now()
|
||||
row.time = now.strftime("%H:%M:%S")
|
||||
row.transaction_status = status
|
||||
row.error_description = str(error)
|
||||
row.from_doctype = from_doctype
|
||||
row.to_doctype = to_doctype
|
||||
row.retried = restarted
|
||||
|
||||
|
||||
def update_logger(doc_name, e, from_doctype, to_doctype, status, log_date=None, restarted=0):
|
||||
if not check_logger_doc_exists(log_date):
|
||||
log_doc = create_logger_doc()
|
||||
append_data_to_logger(log_doc, doc_name, e, from_doctype, to_doctype, status, restarted)
|
||||
log_doc.insert()
|
||||
else:
|
||||
log_doc = get_logger_doc(log_date)
|
||||
if record_exists(log_doc, doc_name, status):
|
||||
append_data_to_logger(log_doc, doc_name, e, from_doctype, to_doctype, status, restarted)
|
||||
log_doc.save()
|
||||
transaction_log.time = now.strftime("%H:%M:%S")
|
||||
transaction_log.transaction_status = status
|
||||
transaction_log.error_description = str(e)
|
||||
transaction_log.from_doctype = from_doctype
|
||||
transaction_log.to_doctype = to_doctype
|
||||
transaction_log.retried = restarted
|
||||
transaction_log.save()
|
||||
|
||||
|
||||
def show_job_status(fail_count, deserialized_data_count, to_doctype):
|
||||
@ -176,23 +192,3 @@ def show_job_status(fail_count, deserialized_data_count, to_doctype):
|
||||
title="Failed",
|
||||
indicator="red",
|
||||
)
|
||||
|
||||
|
||||
def record_exists(log_doc, doc_name, status):
|
||||
record = mark_retrired_transaction(log_doc, doc_name)
|
||||
if record and status == "Failed":
|
||||
return False
|
||||
elif record and status == "Success":
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def mark_retrired_transaction(log_doc, doc_name):
|
||||
record = 0
|
||||
for d in log_doc.get("logger_data"):
|
||||
if d.transaction_name == doc_name and d.transaction_status == "Failed":
|
||||
frappe.db.set_value("Bulk Transaction Log Detail", d.name, "retried", 1)
|
||||
record = record + 1
|
||||
|
||||
return record
|
||||
|
Loading…
x
Reference in New Issue
Block a user