Merge pull request #21298 from gavindsouza/tally-migration-fixes

fix: Tally migration
This commit is contained in:
gavin 2020-04-17 19:12:55 +05:30 committed by GitHub
commit 25932e0613
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 218 additions and 129 deletions

View File

@ -2,15 +2,40 @@
// For license information, please see license.txt
frappe.ui.form.on('Tally Migration', {
onload: function(frm) {
onload: function (frm) {
let reload_status = true;
frappe.realtime.on("tally_migration_progress_update", function (data) {
if (reload_status) {
frappe.model.with_doc(frm.doc.doctype, frm.doc.name, () => {
frm.refresh_header();
});
reload_status = false;
}
frm.dashboard.show_progress(data.title, (data.count / data.total) * 100, data.message);
if (data.count == data.total) {
window.setTimeout(title => frm.dashboard.hide_progress(title), 1500, data.title);
let error_occurred = data.count === -1;
if (data.count == data.total || error_occurred) {
window.setTimeout((title) => {
frm.dashboard.hide_progress(title);
frm.reload_doc();
if (error_occurred) {
frappe.msgprint({
message: __("An error has occurred during {0}. Check {1} for more details",
[
repl("<a href='#Form/Tally Migration/%(tally_document)s' class='variant-click'>%(tally_document)s</a>", {
tally_document: frm.docname
}),
"<a href='#List/Error Log' class='variant-click'>Error Log</a>"
]
),
title: __("Tally Migration Error"),
indicator: "red"
});
}
}, 2000, data.title);
}
});
},
refresh: function(frm) {
refresh: function (frm) {
if (frm.doc.master_data && !frm.doc.is_master_data_imported) {
if (frm.doc.is_master_data_processed) {
if (frm.doc.status != "Importing Master Data") {
@ -34,17 +59,17 @@ frappe.ui.form.on('Tally Migration', {
}
}
},
add_button: function(frm, label, method) {
add_button: function (frm, label, method) {
frm.add_custom_button(
label,
() => frm.call({
doc: frm.doc,
method: method,
freeze: true,
callback: () => {
frm.remove_custom_button(label);
}
})
() => {
frm.call({
doc: frm.doc,
method: method,
freeze: true
});
frm.reload_doc();
}
);
}
});

View File

@ -1,4 +1,5 @@
{
"actions": [],
"beta": 1,
"creation": "2019-02-01 14:27:09.485238",
"doctype": "DocType",
@ -14,6 +15,7 @@
"tally_debtors_account",
"company_section",
"tally_company",
"default_uom",
"column_break_8",
"erpnext_company",
"processed_files_section",
@ -43,6 +45,7 @@
"label": "Status"
},
{
"description": "Data exported from Tally that consists of the Chart of Accounts, Customers, Suppliers, Addresses, Items and UOMs",
"fieldname": "master_data",
"fieldtype": "Attach",
"in_list_view": 1,
@ -50,6 +53,7 @@
},
{
"default": "Sundry Creditors",
"description": "Creditors Account set in Tally",
"fieldname": "tally_creditors_account",
"fieldtype": "Data",
"label": "Tally Creditors Account",
@ -61,6 +65,7 @@
},
{
"default": "Sundry Debtors",
"description": "Debtors Account set in Tally",
"fieldname": "tally_debtors_account",
"fieldtype": "Data",
"label": "Tally Debtors Account",
@ -72,6 +77,7 @@
"fieldtype": "Section Break"
},
{
"description": "Company Name as per Imported Tally Data",
"fieldname": "tally_company",
"fieldtype": "Data",
"label": "Tally Company",
@ -82,9 +88,11 @@
"fieldtype": "Column Break"
},
{
"description": "Your Company set in ERPNext",
"fieldname": "erpnext_company",
"fieldtype": "Data",
"label": "ERPNext Company"
"label": "ERPNext Company",
"read_only_depends_on": "eval:doc.is_master_data_processed == 1"
},
{
"fieldname": "processed_files_section",
@ -155,24 +163,28 @@
"options": "Cost Center"
},
{
"default": "0",
"fieldname": "is_master_data_processed",
"fieldtype": "Check",
"label": "Is Master Data Processed",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_day_book_data_processed",
"fieldtype": "Check",
"label": "Is Day Book Data Processed",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_day_book_data_imported",
"fieldtype": "Check",
"label": "Is Day Book Data Imported",
"read_only": 1
},
{
"default": "0",
"fieldname": "is_master_data_imported",
"fieldtype": "Check",
"label": "Is Master Data Imported",
@ -188,13 +200,23 @@
"fieldtype": "Column Break"
},
{
"description": "Day Book Data exported from Tally that consists of all historic transactions",
"fieldname": "day_book_data",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Day Book Data"
},
{
"default": "Unit",
"description": "UOM in case unspecified in imported data",
"fieldname": "default_uom",
"fieldtype": "Link",
"label": "Default UOM",
"options": "UOM"
}
],
"modified": "2019-04-29 05:46:54.394967",
"links": [],
"modified": "2020-04-16 13:03:28.894919",
"modified_by": "Administrator",
"module": "ERPNext Integrations",
"name": "Tally Migration",

View File

@ -4,20 +4,23 @@
from __future__ import unicode_literals
from decimal import Decimal
import json
import re
import traceback
import zipfile
from decimal import Decimal
from bs4 import BeautifulSoup as bs
import frappe
from erpnext import encode_company_abbr
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
from frappe import _
from frappe.custom.doctype.custom_field.custom_field import create_custom_field
from frappe.model.document import Document
from frappe.model.naming import getseries, revert_series_if_last
from frappe.utils.data import format_datetime
from bs4 import BeautifulSoup as bs
from erpnext import encode_company_abbr
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
PRIMARY_ACCOUNT = "Primary"
VOUCHER_CHUNK_SIZE = 500
@ -39,13 +42,15 @@ class TallyMigration(Document):
return string
master_file = frappe.get_doc("File", {"file_url": data_file})
master_file_path = master_file.get_full_path()
with zipfile.ZipFile(master_file.get_full_path()) as zf:
encoded_content = zf.read(zf.namelist()[0])
try:
content = encoded_content.decode("utf-8-sig")
except UnicodeDecodeError:
content = encoded_content.decode("utf-16")
if zipfile.is_zipfile(master_file_path):
with zipfile.ZipFile(master_file_path) as zf:
encoded_content = zf.read(zf.namelist()[0])
try:
content = encoded_content.decode("utf-8-sig")
except UnicodeDecodeError:
content = encoded_content.decode("utf-16")
master = bs(sanitize(emptify(content)), "xml")
collection = master.BODY.IMPORTDATA.REQUESTDATA
@ -58,13 +63,14 @@ class TallyMigration(Document):
"file_name": key + ".json",
"attached_to_doctype": self.doctype,
"attached_to_name": self.name,
"content": json.dumps(value)
"content": json.dumps(value),
"is_private": True
}).insert()
setattr(self, key, f.file_url)
def _process_master_data(self):
def get_company_name(collection):
return collection.find_all("REMOTECMPINFO.LIST")[0].REMOTECMPNAME.string
return collection.find_all("REMOTECMPINFO.LIST")[0].REMOTECMPNAME.string.strip()
def get_coa_customers_suppliers(collection):
root_type_map = {
@ -97,17 +103,17 @@ class TallyMigration(Document):
# If Ledger doesn't have PARENT field then don't create Account
# For example "Profit & Loss A/c"
if account.PARENT:
yield account.PARENT.string, account["NAME"], 0
yield account.PARENT.string.strip(), account["NAME"], 0
def get_parent(account):
if account.PARENT:
return account.PARENT.string
return account.PARENT.string.strip()
return {
("Yes", "No"): "Application of Funds (Assets)",
("Yes", "Yes"): "Expenses",
("No", "Yes"): "Income",
("No", "No"): "Source of Funds (Liabilities)",
}[(account.ISDEEMEDPOSITIVE.string, account.ISREVENUE.string)]
}[(account.ISDEEMEDPOSITIVE.string.strip(), account.ISREVENUE.string.strip())]
def get_children_and_parent_dict(accounts):
children, parents = {}, {}
@ -145,38 +151,38 @@ class TallyMigration(Document):
parties, addresses = [], []
for account in collection.find_all("LEDGER"):
party_type = None
if account.NAME.string in customers:
if account.NAME.string.strip() in customers:
party_type = "Customer"
parties.append({
"doctype": party_type,
"customer_name": account.NAME.string,
"tax_id": account.INCOMETAXNUMBER.string if account.INCOMETAXNUMBER else None,
"customer_name": account.NAME.string.strip(),
"tax_id": account.INCOMETAXNUMBER.string.strip() if account.INCOMETAXNUMBER else None,
"customer_group": "All Customer Groups",
"territory": "All Territories",
"customer_type": "Individual",
})
elif account.NAME.string in suppliers:
elif account.NAME.string.strip() in suppliers:
party_type = "Supplier"
parties.append({
"doctype": party_type,
"supplier_name": account.NAME.string,
"pan": account.INCOMETAXNUMBER.string if account.INCOMETAXNUMBER else None,
"supplier_name": account.NAME.string.strip(),
"pan": account.INCOMETAXNUMBER.string.strip() if account.INCOMETAXNUMBER else None,
"supplier_group": "All Supplier Groups",
"supplier_type": "Individual",
})
if party_type:
address = "\n".join([a.string for a in account.find_all("ADDRESS")])
address = "\n".join([a.string.strip() for a in account.find_all("ADDRESS")])
addresses.append({
"doctype": "Address",
"address_line1": address[:140].strip(),
"address_line2": address[140:].strip(),
"country": account.COUNTRYNAME.string if account.COUNTRYNAME else None,
"state": account.LEDSTATENAME.string if account.LEDSTATENAME else None,
"gst_state": account.LEDSTATENAME.string if account.LEDSTATENAME else None,
"pin_code": account.PINCODE.string if account.PINCODE else None,
"mobile": account.LEDGERPHONE.string if account.LEDGERPHONE else None,
"phone": account.LEDGERPHONE.string if account.LEDGERPHONE else None,
"gstin": account.PARTYGSTIN.string if account.PARTYGSTIN else None,
"country": account.COUNTRYNAME.string.strip() if account.COUNTRYNAME else None,
"state": account.LEDSTATENAME.string.strip() if account.LEDSTATENAME else None,
"gst_state": account.LEDSTATENAME.string.strip() if account.LEDSTATENAME else None,
"pin_code": account.PINCODE.string.strip() if account.PINCODE else None,
"mobile": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
"phone": account.LEDGERPHONE.string.strip() if account.LEDGERPHONE else None,
"gstin": account.PARTYGSTIN.string.strip() if account.PARTYGSTIN else None,
"links": [{"link_doctype": party_type, "link_name": account["NAME"]}],
})
return parties, addresses
@ -184,41 +190,50 @@ class TallyMigration(Document):
def get_stock_items_uoms(collection):
uoms = []
for uom in collection.find_all("UNIT"):
uoms.append({"doctype": "UOM", "uom_name": uom.NAME.string})
uoms.append({"doctype": "UOM", "uom_name": uom.NAME.string.strip()})
items = []
for item in collection.find_all("STOCKITEM"):
stock_uom = item.BASEUNITS.string.strip() if item.BASEUNITS else self.default_uom
items.append({
"doctype": "Item",
"item_code" : item.NAME.string,
"stock_uom": item.BASEUNITS.string,
"item_code" : item.NAME.string.strip(),
"stock_uom": stock_uom.strip(),
"is_stock_item": 0,
"item_group": "All Item Groups",
"item_defaults": [{"company": self.erpnext_company}]
})
return items, uoms
try:
self.publish("Process Master Data", _("Reading Uploaded File"), 1, 5)
collection = self.get_collection(self.master_data)
company = get_company_name(collection)
self.tally_company = company
self.erpnext_company = company
self.publish("Process Master Data", _("Reading Uploaded File"), 1, 5)
collection = self.get_collection(self.master_data)
self.publish("Process Master Data", _("Processing Chart of Accounts and Parties"), 2, 5)
chart_of_accounts, customers, suppliers = get_coa_customers_suppliers(collection)
company = get_company_name(collection)
self.tally_company = company
self.erpnext_company = company
self.publish("Process Master Data", _("Processing Party Addresses"), 3, 5)
parties, addresses = get_parties_addresses(collection, customers, suppliers)
self.publish("Process Master Data", _("Processing Chart of Accounts and Parties"), 2, 5)
chart_of_accounts, customers, suppliers = get_coa_customers_suppliers(collection)
self.publish("Process Master Data", _("Processing Party Addresses"), 3, 5)
parties, addresses = get_parties_addresses(collection, customers, suppliers)
self.publish("Process Master Data", _("Processing Items and UOMs"), 4, 5)
items, uoms = get_stock_items_uoms(collection)
data = {"chart_of_accounts": chart_of_accounts, "parties": parties, "addresses": addresses, "items": items, "uoms": uoms}
self.publish("Process Master Data", _("Done"), 5, 5)
self.publish("Process Master Data", _("Processing Items and UOMs"), 4, 5)
items, uoms = get_stock_items_uoms(collection)
data = {"chart_of_accounts": chart_of_accounts, "parties": parties, "addresses": addresses, "items": items, "uoms": uoms}
self.dump_processed_data(data)
self.is_master_data_processed = 1
self.status = ""
self.save()
self.publish("Process Master Data", _("Done"), 5, 5)
self.dump_processed_data(data)
self.is_master_data_processed = 1
except:
self.publish("Process Master Data", _("Process Failed"), -1, 5)
self.log()
finally:
self.set_status()
def publish(self, title, message, count, total):
frappe.publish_realtime("tally_migration_progress_update", {"title": title, "message": message, "count": count, "total": total})
@ -256,7 +271,6 @@ class TallyMigration(Document):
except:
self.log(address)
def create_items_uoms(items_file_url, uoms_file_url):
uoms_file = frappe.get_doc("File", {"file_url": uoms_file_url})
for uom in json.loads(uoms_file.get_content()):
@ -273,25 +287,35 @@ class TallyMigration(Document):
except:
self.log(item)
self.publish("Import Master Data", _("Creating Company and Importing Chart of Accounts"), 1, 4)
create_company_and_coa(self.chart_of_accounts)
self.publish("Import Master Data", _("Importing Parties and Addresses"), 2, 4)
create_parties_and_addresses(self.parties, self.addresses)
self.publish("Import Master Data", _("Importing Items and UOMs"), 3, 4)
create_items_uoms(self.items, self.uoms)
self.publish("Import Master Data", _("Done"), 4, 4)
self.status = ""
self.is_master_data_imported = 1
self.save()
try:
self.publish("Import Master Data", _("Creating Company and Importing Chart of Accounts"), 1, 4)
create_company_and_coa(self.chart_of_accounts)
self.publish("Import Master Data", _("Importing Parties and Addresses"), 2, 4)
create_parties_and_addresses(self.parties, self.addresses)
self.publish("Import Master Data", _("Importing Items and UOMs"), 3, 4)
create_items_uoms(self.items, self.uoms)
self.publish("Import Master Data", _("Done"), 4, 4)
self.is_master_data_imported = 1
except:
self.publish("Import Master Data", _("Process Failed"), -1, 5)
self.log()
finally:
self.set_status()
def _process_day_book_data(self):
def get_vouchers(collection):
vouchers = []
for voucher in collection.find_all("VOUCHER"):
if voucher.ISCANCELLED.string == "Yes":
if voucher.ISCANCELLED.string.strip() == "Yes":
continue
inventory_entries = voucher.find_all("INVENTORYENTRIES.LIST") + voucher.find_all("ALLINVENTORYENTRIES.LIST") + voucher.find_all("INVENTORYENTRIESIN.LIST") + voucher.find_all("INVENTORYENTRIESOUT.LIST")
if voucher.VOUCHERTYPENAME.string not in ["Journal", "Receipt", "Payment", "Contra"] and inventory_entries:
if voucher.VOUCHERTYPENAME.string.strip() not in ["Journal", "Receipt", "Payment", "Contra"] and inventory_entries:
function = voucher_to_invoice
else:
function = voucher_to_journal_entry
@ -307,15 +331,15 @@ class TallyMigration(Document):
accounts = []
ledger_entries = voucher.find_all("ALLLEDGERENTRIES.LIST") + voucher.find_all("LEDGERENTRIES.LIST")
for entry in ledger_entries:
account = {"account": encode_company_abbr(entry.LEDGERNAME.string, self.erpnext_company), "cost_center": self.default_cost_center}
if entry.ISPARTYLEDGER.string == "Yes":
party_details = get_party(entry.LEDGERNAME.string)
account = {"account": encode_company_abbr(entry.LEDGERNAME.string.strip(), self.erpnext_company), "cost_center": self.default_cost_center}
if entry.ISPARTYLEDGER.string.strip() == "Yes":
party_details = get_party(entry.LEDGERNAME.string.strip())
if party_details:
party_type, party_account = party_details
account["party_type"] = party_type
account["account"] = party_account
account["party"] = entry.LEDGERNAME.string
amount = Decimal(entry.AMOUNT.string)
account["party"] = entry.LEDGERNAME.string.strip()
amount = Decimal(entry.AMOUNT.string.strip())
if amount > 0:
account["credit_in_account_currency"] = str(abs(amount))
else:
@ -324,21 +348,21 @@ class TallyMigration(Document):
journal_entry = {
"doctype": "Journal Entry",
"tally_guid": voucher.GUID.string,
"posting_date": voucher.DATE.string,
"tally_guid": voucher.GUID.string.strip(),
"posting_date": voucher.DATE.string.strip(),
"company": self.erpnext_company,
"accounts": accounts,
}
return journal_entry
def voucher_to_invoice(voucher):
if voucher.VOUCHERTYPENAME.string in ["Sales", "Credit Note"]:
if voucher.VOUCHERTYPENAME.string.strip() in ["Sales", "Credit Note"]:
doctype = "Sales Invoice"
party_field = "customer"
account_field = "debit_to"
account_name = encode_company_abbr(self.tally_debtors_account, self.erpnext_company)
price_list_field = "selling_price_list"
elif voucher.VOUCHERTYPENAME.string in ["Purchase", "Debit Note"]:
elif voucher.VOUCHERTYPENAME.string.strip() in ["Purchase", "Debit Note"]:
doctype = "Purchase Invoice"
party_field = "supplier"
account_field = "credit_to"
@ -351,10 +375,10 @@ class TallyMigration(Document):
invoice = {
"doctype": doctype,
party_field: voucher.PARTYNAME.string,
"tally_guid": voucher.GUID.string,
"posting_date": voucher.DATE.string,
"due_date": voucher.DATE.string,
party_field: voucher.PARTYNAME.string.strip(),
"tally_guid": voucher.GUID.string.strip(),
"posting_date": voucher.DATE.string.strip(),
"due_date": voucher.DATE.string.strip(),
"items": get_voucher_items(voucher, doctype),
"taxes": get_voucher_taxes(voucher),
account_field: account_name,
@ -375,15 +399,15 @@ class TallyMigration(Document):
for entry in inventory_entries:
qty, uom = entry.ACTUALQTY.string.strip().split()
items.append({
"item_code": entry.STOCKITEMNAME.string,
"description": entry.STOCKITEMNAME.string,
"item_code": entry.STOCKITEMNAME.string.strip(),
"description": entry.STOCKITEMNAME.string.strip(),
"qty": qty.strip(),
"uom": uom.strip(),
"conversion_factor": 1,
"price_list_rate": entry.RATE.string.split("/")[0],
"price_list_rate": entry.RATE.string.strip().split("/")[0],
"cost_center": self.default_cost_center,
"warehouse": self.default_warehouse,
account_field: encode_company_abbr(entry.find_all("ACCOUNTINGALLOCATIONS.LIST")[0].LEDGERNAME.string, self.erpnext_company),
account_field: encode_company_abbr(entry.find_all("ACCOUNTINGALLOCATIONS.LIST")[0].LEDGERNAME.string.strip(), self.erpnext_company),
})
return items
@ -391,13 +415,13 @@ class TallyMigration(Document):
ledger_entries = voucher.find_all("ALLLEDGERENTRIES.LIST") + voucher.find_all("LEDGERENTRIES.LIST")
taxes = []
for entry in ledger_entries:
if entry.ISPARTYLEDGER.string == "No":
tax_account = encode_company_abbr(entry.LEDGERNAME.string, self.erpnext_company)
if entry.ISPARTYLEDGER.string.strip() == "No":
tax_account = encode_company_abbr(entry.LEDGERNAME.string.strip(), self.erpnext_company)
taxes.append({
"charge_type": "Actual",
"account_head": tax_account,
"description": tax_account,
"tax_amount": entry.AMOUNT.string,
"tax_amount": entry.AMOUNT.string.strip(),
"cost_center": self.default_cost_center,
})
return taxes
@ -408,15 +432,24 @@ class TallyMigration(Document):
elif frappe.db.exists({"doctype": "Customer", "customer_name": party}):
return "Customer", encode_company_abbr(self.tally_debtors_account, self.erpnext_company)
self.publish("Process Day Book Data", _("Reading Uploaded File"), 1, 3)
collection = self.get_collection(self.day_book_data)
self.publish("Process Day Book Data", _("Processing Vouchers"), 2, 3)
vouchers = get_vouchers(collection)
self.publish("Process Day Book Data", _("Done"), 3, 3)
self.dump_processed_data({"vouchers": vouchers})
self.status = ""
self.is_day_book_data_processed = 1
self.save()
try:
self.publish("Process Day Book Data", _("Reading Uploaded File"), 1, 3)
collection = self.get_collection(self.day_book_data)
self.publish("Process Day Book Data", _("Processing Vouchers"), 2, 3)
vouchers = get_vouchers(collection)
self.publish("Process Day Book Data", _("Done"), 3, 3)
self.dump_processed_data({"vouchers": vouchers})
self.is_day_book_data_processed = 1
except:
self.publish("Process Day Book Data", _("Process Failed"), -1, 5)
self.log()
finally:
self.set_status()
def _import_day_book_data(self):
def create_fiscal_years(vouchers):
@ -454,23 +487,31 @@ class TallyMigration(Document):
"currency": "INR"
}).insert()
frappe.db.set_value("Account", encode_company_abbr(self.tally_creditors_account, self.erpnext_company), "account_type", "Payable")
frappe.db.set_value("Account", encode_company_abbr(self.tally_debtors_account, self.erpnext_company), "account_type", "Receivable")
frappe.db.set_value("Company", self.erpnext_company, "round_off_account", self.round_off_account)
try:
frappe.db.set_value("Account", encode_company_abbr(self.tally_creditors_account, self.erpnext_company), "account_type", "Payable")
frappe.db.set_value("Account", encode_company_abbr(self.tally_debtors_account, self.erpnext_company), "account_type", "Receivable")
frappe.db.set_value("Company", self.erpnext_company, "round_off_account", self.round_off_account)
vouchers_file = frappe.get_doc("File", {"file_url": self.vouchers})
vouchers = json.loads(vouchers_file.get_content())
vouchers_file = frappe.get_doc("File", {"file_url": self.vouchers})
vouchers = json.loads(vouchers_file.get_content())
create_fiscal_years(vouchers)
create_price_list()
create_custom_fields(["Journal Entry", "Purchase Invoice", "Sales Invoice"])
create_fiscal_years(vouchers)
create_price_list()
create_custom_fields(["Journal Entry", "Purchase Invoice", "Sales Invoice"])
total = len(vouchers)
is_last = False
for index in range(0, total, VOUCHER_CHUNK_SIZE):
if index + VOUCHER_CHUNK_SIZE >= total:
is_last = True
frappe.enqueue_doc(self.doctype, self.name, "_import_vouchers", queue="long", timeout=3600, start=index+1, total=total, is_last=is_last)
total = len(vouchers)
is_last = False
for index in range(0, total, VOUCHER_CHUNK_SIZE):
if index + VOUCHER_CHUNK_SIZE >= total:
is_last = True
frappe.enqueue_doc(self.doctype, self.name, "_import_vouchers", queue="long", timeout=3600, start=index+1, total=total, is_last=is_last)
except:
self.log()
finally:
self.set_status()
def _import_vouchers(self, start, total, is_last=False):
frappe.flags.in_migrate = True
@ -494,25 +535,26 @@ class TallyMigration(Document):
frappe.flags.in_migrate = False
def process_master_data(self):
self.status = "Processing Master Data"
self.save()
self.set_status("Processing Master Data")
frappe.enqueue_doc(self.doctype, self.name, "_process_master_data", queue="long", timeout=3600)
def import_master_data(self):
self.status = "Importing Master Data"
self.save()
self.set_status("Importing Master Data")
frappe.enqueue_doc(self.doctype, self.name, "_import_master_data", queue="long", timeout=3600)
def process_day_book_data(self):
self.status = "Processing Day Book Data"
self.save()
self.set_status("Processing Day Book Data")
frappe.enqueue_doc(self.doctype, self.name, "_process_day_book_data", queue="long", timeout=3600)
def import_day_book_data(self):
self.status = "Importing Day Book Data"
self.save()
self.set_status("Importing Day Book Data")
frappe.enqueue_doc(self.doctype, self.name, "_import_day_book_data", queue="long", timeout=3600)
def log(self, data=None):
message = "\n".join(["Data", json.dumps(data, default=str, indent=4), "Exception", traceback.format_exc()])
data = data or self.status
message = "\n".join(["Data:", json.dumps(data, default=str, indent=4), "--" * 50, "\nException:", traceback.format_exc()])
return frappe.log_error(title="Tally Migration Error", message=message)
def set_status(self, status=""):
self.status = status
self.save()