2021-02-23 18:57:52 +00:00
|
|
|
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and contributors
|
|
|
|
# For license information, please see license.txt
|
|
|
|
# Contributed by Case Solved and sponsored by Nulight Studios
|
|
|
|
|
|
|
|
from __future__ import unicode_literals
|
2021-03-14 06:05:02 +00:00
|
|
|
import frappe, json
|
2021-02-23 18:57:52 +00:00
|
|
|
from frappe import _
|
|
|
|
|
2021-03-30 17:03:16 +00:00
|
|
|
# NOTE: Payroll is implemented using Journal Entries which translate directly to GL Entries
|
2021-03-24 04:01:18 +00:00
|
|
|
|
2021-02-23 18:57:52 +00:00
|
|
|
# field lists in multiple doctypes will be coalesced
|
|
|
|
required_sql_fields = {
|
2021-03-30 17:03:16 +00:00
|
|
|
("GL Entry", 1): ["posting_date"],
|
|
|
|
("Account",): ["account_type"],
|
|
|
|
("GL Entry", 2): ["account", "voucher_type", "voucher_no", "debit", "credit"],
|
|
|
|
("Purchase Invoice Item", "Sales Invoice Item"): ["base_net_amount", "item_tax_rate", "item_tax_template", "item_group", "item_name"],
|
2021-02-23 18:57:52 +00:00
|
|
|
("Purchase Invoice", "Sales Invoice"): ["taxes_and_charges", "tax_category"],
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-03-14 06:05:02 +00:00
|
|
|
def execute(filters=None):
|
2021-02-23 18:57:52 +00:00
|
|
|
if not filters:
|
|
|
|
return [], []
|
|
|
|
|
2021-03-14 06:05:02 +00:00
|
|
|
fieldlist = required_sql_fields
|
2021-02-23 18:57:52 +00:00
|
|
|
fieldstr = get_fieldstr(fieldlist)
|
|
|
|
|
|
|
|
gl_entries = frappe.db.sql("""
|
|
|
|
select {fieldstr}
|
|
|
|
from `tabGL Entry` ge
|
|
|
|
inner join `tabAccount` a on
|
|
|
|
ge.account=a.name and ge.company=a.company
|
|
|
|
left join `tabSales Invoice` si on
|
2021-03-27 04:02:59 +00:00
|
|
|
ge.company=si.company and ge.voucher_type='Sales Invoice' and ge.voucher_no=si.name
|
2021-02-23 18:57:52 +00:00
|
|
|
left join `tabSales Invoice Item` sii on
|
2021-03-30 17:03:16 +00:00
|
|
|
a.root_type='Income' and si.name=sii.parent
|
2021-02-23 18:57:52 +00:00
|
|
|
left join `tabPurchase Invoice` pi on
|
2021-03-27 04:02:59 +00:00
|
|
|
ge.company=pi.company and ge.voucher_type='Purchase Invoice' and ge.voucher_no=pi.name
|
2021-02-23 18:57:52 +00:00
|
|
|
left join `tabPurchase Invoice Item` pii on
|
2021-03-30 17:03:16 +00:00
|
|
|
a.root_type='Expense' and pi.name=pii.parent
|
2021-03-27 04:02:59 +00:00
|
|
|
where
|
|
|
|
ge.company=%(company)s and
|
|
|
|
ge.posting_date>=%(from_date)s and
|
|
|
|
ge.posting_date<=%(to_date)s
|
2021-02-23 18:57:52 +00:00
|
|
|
order by ge.posting_date, ge.voucher_no
|
|
|
|
""".format(fieldstr=fieldstr), filters, as_dict=1)
|
|
|
|
|
2021-03-19 23:05:19 +00:00
|
|
|
report_data = modify_report_data(gl_entries)
|
|
|
|
summary = None
|
|
|
|
if filters['mode'] == 'run' and filters['report_name'] != 'Tax Detail':
|
|
|
|
report_data, summary = run_report(filters['report_name'], report_data)
|
|
|
|
|
|
|
|
# return columns, data, message, chart, report_summary
|
|
|
|
return get_columns(fieldlist), report_data, None, None, summary
|
|
|
|
|
|
|
|
def run_report(report_name, data):
|
|
|
|
"Applies the sections and filters saved in the custom report"
|
|
|
|
report_config = json.loads(frappe.get_doc('Report', report_name).json)
|
|
|
|
# Columns indexed from 1 wrt colno
|
|
|
|
columns = report_config.get('columns')
|
|
|
|
sections = report_config.get('sections', {})
|
|
|
|
show_detail = report_config.get('show_detail', 1)
|
|
|
|
new_data = []
|
|
|
|
summary = []
|
|
|
|
for section_name, section in sections.items():
|
|
|
|
section_total = 0.0
|
|
|
|
for filt_name, filt in section.items():
|
|
|
|
value_field = filt['fieldname']
|
|
|
|
rmidxs = []
|
|
|
|
for colno, filter_string in filt['filters'].items():
|
|
|
|
filter_field = columns[int(colno) - 1]['fieldname']
|
|
|
|
for i, row in enumerate(data):
|
|
|
|
if not filter_match(row[filter_field], filter_string):
|
|
|
|
rmidxs += [i]
|
|
|
|
rows = [row for i, row in enumerate(data) if i not in rmidxs]
|
|
|
|
section_total += subtotal(rows, value_field)
|
|
|
|
if show_detail: new_data += rows
|
|
|
|
new_data += [ {columns[1]['fieldname']: section_name, columns[2]['fieldname']: section_total} ]
|
|
|
|
summary += [ {'label': section_name, 'datatype': 'Currency', 'value': section_total} ]
|
|
|
|
if show_detail: new_data += [ {} ]
|
2021-03-24 04:01:18 +00:00
|
|
|
return new_data or data, summary or None
|
2021-03-19 23:05:19 +00:00
|
|
|
|
|
|
|
def filter_match(value, string):
|
|
|
|
"Approximation to datatable filters"
|
|
|
|
import datetime
|
|
|
|
if string == '': return True
|
|
|
|
if value is None: value = -999999999999999
|
|
|
|
elif isinstance(value, datetime.date): return True
|
|
|
|
|
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.lower()
|
|
|
|
string = string.lower()
|
|
|
|
if string[0] == '<': return True if string[1:].strip() else False
|
|
|
|
elif string[0] == '>': return False if string[1:].strip() else True
|
|
|
|
elif string[0] == '=': return string[1:] in value if string[1:] else False
|
|
|
|
elif string[0:2] == '!=': return string[2:] not in value
|
|
|
|
elif len(string.split(':')) == 2:
|
|
|
|
pre, post = string.split(':')
|
|
|
|
return (True if not pre.strip() and post.strip() in value else False)
|
|
|
|
else:
|
|
|
|
return string in value
|
|
|
|
else:
|
|
|
|
if string[0] in ['<', '>', '=']:
|
|
|
|
operator = string[0]
|
|
|
|
if operator == '=': operator = '=='
|
|
|
|
string = string[1:].strip()
|
|
|
|
elif string[0:2] == '!=':
|
|
|
|
operator = '!='
|
|
|
|
string = string[2:].strip()
|
|
|
|
elif len(string.split(':')) == 2:
|
|
|
|
pre, post = string.split(':')
|
|
|
|
try:
|
|
|
|
return (True if float(pre) <= value and float(post) >= value else False)
|
|
|
|
except ValueError:
|
|
|
|
return (False if pre.strip() else True)
|
|
|
|
else:
|
|
|
|
return string in str(value)
|
2021-02-23 18:57:52 +00:00
|
|
|
|
2021-03-19 23:05:19 +00:00
|
|
|
try:
|
|
|
|
num = float(string) if string.strip() else 0
|
|
|
|
return eval(f'{value} {operator} {num}')
|
|
|
|
except ValueError:
|
|
|
|
if operator == '<': return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def subtotal(data, field):
|
|
|
|
subtotal = 0.0
|
|
|
|
for row in data:
|
|
|
|
subtotal += row[field]
|
|
|
|
return subtotal
|
2021-02-23 18:57:52 +00:00
|
|
|
|
|
|
|
abbrev = lambda dt: ''.join(l[0].lower() for l in dt.split(' ')) + '.'
|
|
|
|
doclist = lambda dt, dfs: [abbrev(dt) + f for f in dfs]
|
2021-03-27 03:02:30 +00:00
|
|
|
|
|
|
|
def as_split(fields):
|
|
|
|
for field in fields:
|
|
|
|
split = field.split(' as ')
|
|
|
|
yield (split[0], split[1] if len(split) > 1 else split[0])
|
|
|
|
|
|
|
|
def coalesce(doctypes, fields):
|
|
|
|
coalesce = []
|
|
|
|
for name, new_name in as_split(fields):
|
|
|
|
sharedfields = ', '.join(abbrev(dt) + name for dt in doctypes)
|
|
|
|
coalesce += [f'coalesce({sharedfields}) as {new_name}']
|
|
|
|
return coalesce
|
2021-02-23 18:57:52 +00:00
|
|
|
|
|
|
|
def get_fieldstr(fieldlist):
|
|
|
|
fields = []
|
|
|
|
for doctypes, docfields in fieldlist.items():
|
2021-03-30 17:03:16 +00:00
|
|
|
if len(doctypes) == 1 or isinstance(doctypes[1], int):
|
|
|
|
fields += doclist(doctypes[0], docfields)
|
|
|
|
else:
|
2021-02-23 18:57:52 +00:00
|
|
|
fields += coalesce(doctypes, docfields)
|
|
|
|
return ', '.join(fields)
|
|
|
|
|
|
|
|
def get_columns(fieldlist):
|
|
|
|
columns = {}
|
|
|
|
for doctypes, docfields in fieldlist.items():
|
2021-03-27 03:02:30 +00:00
|
|
|
fieldmap = {name: new_name for name, new_name in as_split(docfields)}
|
2021-02-23 18:57:52 +00:00
|
|
|
for doctype in doctypes:
|
2021-03-30 17:03:16 +00:00
|
|
|
if isinstance(doctype, int): break
|
2021-02-23 18:57:52 +00:00
|
|
|
meta = frappe.get_meta(doctype)
|
|
|
|
# get column field metadata from the db
|
|
|
|
fieldmeta = {}
|
|
|
|
for field in meta.get('fields'):
|
2021-03-27 03:02:30 +00:00
|
|
|
if field.fieldname in fieldmap.keys():
|
|
|
|
new_name = fieldmap[field.fieldname]
|
|
|
|
fieldmeta[new_name] = {
|
2021-02-23 18:57:52 +00:00
|
|
|
"label": _(field.label),
|
2021-03-27 03:02:30 +00:00
|
|
|
"fieldname": new_name,
|
2021-02-23 18:57:52 +00:00
|
|
|
"fieldtype": field.fieldtype,
|
|
|
|
"options": field.options
|
|
|
|
}
|
|
|
|
# edit the columns to match the modified data
|
2021-03-27 03:02:30 +00:00
|
|
|
for field in fieldmap.values():
|
2021-02-23 18:57:52 +00:00
|
|
|
col = modify_report_columns(doctype, field, fieldmeta[field])
|
|
|
|
if col:
|
|
|
|
columns[col["fieldname"]] = col
|
|
|
|
# use of a dict ensures duplicate columns are removed
|
|
|
|
return list(columns.values())
|
|
|
|
|
|
|
|
def modify_report_columns(doctype, field, column):
|
|
|
|
"Because data is rearranged into other columns"
|
2021-03-30 17:03:16 +00:00
|
|
|
if doctype in ["Sales Invoice Item", "Purchase Invoice Item"]:
|
|
|
|
if field in ["item_tax_rate", "base_net_amount"]:
|
|
|
|
return None
|
2021-03-27 03:02:30 +00:00
|
|
|
|
2021-02-23 18:57:52 +00:00
|
|
|
if field == "taxes_and_charges":
|
|
|
|
column.update({"label": _("Taxes and Charges Template")})
|
|
|
|
return column
|
|
|
|
|
|
|
|
def modify_report_data(data):
|
|
|
|
import json
|
2021-03-30 17:03:16 +00:00
|
|
|
new_data = []
|
2021-02-23 18:57:52 +00:00
|
|
|
for line in data:
|
2021-03-30 17:03:16 +00:00
|
|
|
# Remove Invoice GL Tax Entries and generate Tax entries from the invoice lines
|
2021-03-27 03:02:30 +00:00
|
|
|
if "Invoice" in line.voucher_type:
|
2021-03-30 17:03:16 +00:00
|
|
|
if line.account_type != "Tax":
|
|
|
|
new_data += [line]
|
2021-03-27 03:02:30 +00:00
|
|
|
if line.item_tax_rate:
|
|
|
|
tax_rates = json.loads(line.item_tax_rate)
|
|
|
|
for account, rate in tax_rates.items():
|
2021-03-30 17:03:16 +00:00
|
|
|
tax_line = line.copy()
|
|
|
|
tax_line.account_type = "Tax"
|
|
|
|
tax_line.account = account
|
|
|
|
if line.voucher_type == "Sales Invoice":
|
|
|
|
line.credit = line.base_net_amount
|
|
|
|
tax_line.credit = line.base_net_amount * (rate / 100)
|
|
|
|
if line.voucher_type == "Purchase Invoice":
|
|
|
|
line.debit = line.base_net_amount
|
|
|
|
tax_line.debit = line.base_net_amount * (rate / 100)
|
|
|
|
new_data += [tax_line]
|
|
|
|
else:
|
|
|
|
new_data += [line]
|
|
|
|
return new_data
|
2021-02-23 18:57:52 +00:00
|
|
|
|
|
|
|
####### JS client utilities
|
|
|
|
|
|
|
|
custom_report_dict = {
|
|
|
|
'ref_doctype': 'GL Entry',
|
|
|
|
'report_type': 'Custom Report',
|
|
|
|
'reference_report': 'Tax Detail'
|
|
|
|
}
|
|
|
|
|
|
|
|
@frappe.whitelist()
|
2021-03-14 06:05:02 +00:00
|
|
|
def get_custom_reports(name=None):
|
|
|
|
filters = custom_report_dict.copy()
|
|
|
|
if name:
|
|
|
|
filters['name'] = name
|
2021-02-23 18:57:52 +00:00
|
|
|
reports = frappe.get_list('Report',
|
2021-03-14 06:05:02 +00:00
|
|
|
filters = filters,
|
2021-02-23 18:57:52 +00:00
|
|
|
fields = ['name', 'json'],
|
|
|
|
as_list=False
|
|
|
|
)
|
|
|
|
reports_dict = {rep.pop('name'): rep for rep in reports}
|
|
|
|
# Prevent custom reports with the same name
|
|
|
|
reports_dict['Tax Detail'] = {'json': None}
|
|
|
|
return reports_dict
|
|
|
|
|
|
|
|
@frappe.whitelist()
|
2021-03-19 23:05:19 +00:00
|
|
|
def save_custom_report(reference_report, report_name, data):
|
2021-03-14 06:05:02 +00:00
|
|
|
if reference_report != 'Tax Detail':
|
|
|
|
frappe.throw(_("The wrong report is referenced."))
|
|
|
|
if report_name == 'Tax Detail':
|
|
|
|
frappe.throw(_("The parent report cannot be overwritten."))
|
|
|
|
|
2021-02-23 18:57:52 +00:00
|
|
|
doc = {
|
|
|
|
'doctype': 'Report',
|
2021-03-14 06:05:02 +00:00
|
|
|
'report_name': report_name,
|
2021-02-23 18:57:52 +00:00
|
|
|
'is_standard': 'No',
|
2021-03-14 06:05:02 +00:00
|
|
|
'module': 'Accounts',
|
2021-03-19 23:05:19 +00:00
|
|
|
'json': data
|
2021-02-23 18:57:52 +00:00
|
|
|
}
|
|
|
|
doc.update(custom_report_dict)
|
|
|
|
|
2021-03-14 06:05:02 +00:00
|
|
|
try:
|
|
|
|
newdoc = frappe.get_doc(doc)
|
|
|
|
newdoc.insert()
|
|
|
|
frappe.msgprint(_("Report created successfully"))
|
2021-03-19 23:05:19 +00:00
|
|
|
except frappe.exceptions.DuplicateEntryError:
|
2021-03-14 06:05:02 +00:00
|
|
|
dbdoc = frappe.get_doc('Report', report_name)
|
|
|
|
dbdoc.update(doc)
|
|
|
|
dbdoc.save()
|
|
|
|
frappe.msgprint(_("Report updated successfully"))
|
|
|
|
return report_name
|