Merge branch 'develop' into shift-type-null-in-employee-checkin

This commit is contained in:
Rucha Mahabal 2022-04-19 13:39:32 +05:30 committed by GitHub
commit 789fa31b82
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 1955 additions and 2190 deletions

25
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Generate Semantic Release
on:
push:
branches:
- version-13
jobs:
release:
name: Release
runs-on: ubuntu-latest
steps:
- name: Checkout Entire Repository
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Setup Node.js v14
uses: actions/setup-node@v2
with:
node-version: 14
- name: Setup dependencies
run: |
npm install @semantic-release/git @semantic-release/exec --no-save
- name: Create Release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: npx semantic-release

View File

@ -129,6 +129,9 @@ jobs:
needs: test
runs-on: ubuntu-latest
steps:
- name: Clone
uses: actions/checkout@v2
- name: Download artifacts
uses: actions/download-artifact@v3

24
.releaserc Normal file
View File

@ -0,0 +1,24 @@
{
"branches": ["version-13"],
"plugins": [
"@semantic-release/commit-analyzer", {
"preset": "angular",
"releaseRules": [
{"breaking": true, "release": false}
]
},
"@semantic-release/release-notes-generator",
[
"@semantic-release/exec", {
"prepareCmd": 'sed -ir "s/[0-9]*\.[0-9]*\.[0-9]*/${nextRelease.version}/" erpnext/__init__.py'
}
],
[
"@semantic-release/git", {
"assets": ["erpnext/__init__.py"],
"message": "chore(release): Bumped to Version ${nextRelease.version}\n\n${nextRelease.notes}"
}
],
"@semantic-release/github"
]
}

View File

@ -18,7 +18,6 @@ from erpnext.accounts.doctype.tax_withholding_category.tax_withholding_category
)
from erpnext.accounts.party import get_party_account
from erpnext.accounts.utils import (
check_if_stock_and_account_balance_synced,
get_account_currency,
get_balance_on,
get_stock_accounts,
@ -88,9 +87,6 @@ class JournalEntry(AccountsController):
self.update_inter_company_jv()
self.update_invoice_discounting()
self.update_status_for_full_and_final_statement()
check_if_stock_and_account_balance_synced(
self.posting_date, self.company, self.doctype, self.name
)
def on_cancel(self):
from erpnext.accounts.utils import unlink_ref_doc_from_payment_entries

View File

@ -350,9 +350,13 @@ class PaymentReconciliation(Document):
)
if self.minimum_invoice_amount:
condition += " and `{0}` >= {1}".format(dr_or_cr, flt(self.minimum_invoice_amount))
condition += " and {dr_or_cr} >= {amount}".format(
dr_or_cr=dr_or_cr, amount=flt(self.minimum_invoice_amount)
)
if self.maximum_invoice_amount:
condition += " and `{0}` <= {1}".format(dr_or_cr, flt(self.maximum_invoice_amount))
condition += " and {dr_or_cr} <= {amount}".format(
dr_or_cr=dr_or_cr, amount=flt(self.maximum_invoice_amount)
)
elif get_return_invoices:
condition = " and doc.company = '{0}' ".format(self.company)
@ -367,15 +371,19 @@ class PaymentReconciliation(Document):
else ""
)
dr_or_cr = (
"gl.debit_in_account_currency"
"debit_in_account_currency"
if erpnext.get_party_account_type(self.party_type) == "Receivable"
else "gl.credit_in_account_currency"
else "credit_in_account_currency"
)
if self.minimum_invoice_amount:
condition += " and `{0}` >= {1}".format(dr_or_cr, flt(self.minimum_payment_amount))
condition += " and gl.{dr_or_cr} >= {amount}".format(
dr_or_cr=dr_or_cr, amount=flt(self.minimum_payment_amount)
)
if self.maximum_invoice_amount:
condition += " and `{0}` <= {1}".format(dr_or_cr, flt(self.maximum_payment_amount))
condition += " and gl.{dr_or_cr} <= {amount}".format(
dr_or_cr=dr_or_cr, amount=flt(self.maximum_payment_amount)
)
else:
condition += (

View File

@ -34,8 +34,9 @@ class ProcessStatementOfAccounts(Document):
frappe.throw(_("Customers not selected."))
if self.enable_auto_email:
self.to_date = self.start_date
self.from_date = add_months(self.to_date, -1 * self.filter_duration)
if self.start_date and getdate(self.start_date) >= getdate(today()):
self.to_date = self.start_date
self.from_date = add_months(self.to_date, -1 * self.filter_duration)
def get_report_pdf(doc, consolidated=True):

View File

@ -30,6 +30,9 @@ erpnext.accounts.PurchaseInvoice = class PurchaseInvoice extends erpnext.buying.
onload() {
super.onload();
// Ignore linked advances
this.frm.ignore_doctypes_on_cancel_all = ['Journal Entry', 'Payment Entry'];
if(!this.frm.doc.__islocal) {
// show credit_to in print format
if(!this.frm.doc.supplier && this.frm.doc.credit_to) {

View File

@ -811,7 +811,9 @@ class PurchaseInvoice(BuyingController):
if provisional_accounting_for_non_stock_items:
if item.purchase_receipt:
provisional_account = self.get_company_default("default_provisional_account")
provisional_account = frappe.db.get_value(
"Purchase Receipt Item", item.pr_detail, "provisional_expense_account"
) or self.get_company_default("default_provisional_account")
purchase_receipt_doc = purchase_receipt_doc_map.get(item.purchase_receipt)
if not purchase_receipt_doc:
@ -834,7 +836,7 @@ class PurchaseInvoice(BuyingController):
if expense_booked_in_pr:
# Intentionally passing purchase invoice item to handle partial billing
purchase_receipt_doc.add_provisional_gl_entry(
item, gl_entries, self.posting_date, reverse=1
item, gl_entries, self.posting_date, provisional_account, reverse=1
)
if not self.is_internal_transfer():

View File

@ -1482,7 +1482,8 @@ class TestPurchaseInvoice(unittest.TestCase):
self.assertEqual(payment_entry.taxes[0].allocated_amount, 0)
def test_provisional_accounting_entry(self):
item = create_item("_Test Non Stock Item", is_stock_item=0)
create_item("_Test Non Stock Item", is_stock_item=0)
provisional_account = create_account(
account_name="Provision Account",
parent_account="Current Liabilities - _TC",
@ -1505,6 +1506,8 @@ class TestPurchaseInvoice(unittest.TestCase):
pi.save()
pi.submit()
self.assertEquals(pr.items[0].provisional_expense_account, "Provision Account - _TC")
# Check GLE for Purchase Invoice
expected_gle = [
["Cost of Goods Sold - _TC", 250, 0, add_days(pr.posting_date, -1)],

View File

@ -33,7 +33,9 @@ erpnext.accounts.SalesInvoiceController = class SalesInvoiceController extends e
var me = this;
super.onload();
this.frm.ignore_doctypes_on_cancel_all = ['POS Invoice', 'Timesheet', 'POS Invoice Merge Log', 'POS Closing Entry'];
this.frm.ignore_doctypes_on_cancel_all = ['POS Invoice', 'Timesheet', 'POS Invoice Merge Log',
'POS Closing Entry', 'Journal Entry', 'Payment Entry'];
if(!this.frm.doc.__islocal && !this.frm.doc.customer && this.frm.doc.debit_to) {
// show debit_to in print format
this.frm.set_df_property("debit_to", "print_hide", 0);

View File

@ -3124,6 +3124,62 @@ class TestSalesInvoice(unittest.TestCase):
si.reload()
self.assertTrue(si.items[0].serial_no)
def test_gain_loss_with_advance_entry(self):
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
unlink_enabled = frappe.db.get_value(
"Accounts Settings", "Accounts Settings", "unlink_payment_on_cancel_of_invoice"
)
frappe.db.set_value(
"Accounts Settings", "Accounts Settings", "unlink_payment_on_cancel_of_invoice", 1
)
jv = make_journal_entry("_Test Receivable USD - _TC", "_Test Bank - _TC", -7000, save=False)
jv.accounts[0].exchange_rate = 70
jv.accounts[0].credit_in_account_currency = 100
jv.accounts[0].party_type = "Customer"
jv.accounts[0].party = "_Test Customer USD"
jv.save()
jv.submit()
si = create_sales_invoice(
customer="_Test Customer USD",
debit_to="_Test Receivable USD - _TC",
currency="USD",
conversion_rate=75,
do_not_save=1,
rate=100,
)
si.append(
"advances",
{
"reference_type": "Journal Entry",
"reference_name": jv.name,
"reference_row": jv.accounts[0].name,
"advance_amount": 100,
"allocated_amount": 100,
"ref_exchange_rate": 70,
},
)
si.save()
si.submit()
expected_gle = [
["_Test Receivable USD - _TC", 7500.0, 500],
["Exchange Gain/Loss - _TC", 500.0, 0.0],
["Sales - _TC", 0.0, 7500.0],
]
check_gl_entries(self, si.name, expected_gle, nowdate())
frappe.db.set_value(
"Accounts Settings", "Accounts Settings", "unlink_payment_on_cancel_of_invoice", unlink_enabled
)
def get_sales_invoice_for_e_invoice():
si = make_sales_invoice_for_ewaybill()

View File

@ -18,10 +18,6 @@ from erpnext.stock import get_warehouse_account_map
from erpnext.stock.utils import get_stock_value_on
class StockValueAndAccountBalanceOutOfSync(frappe.ValidationError):
pass
class FiscalYearError(frappe.ValidationError):
pass
@ -1246,47 +1242,6 @@ def compare_existing_and_expected_gle(existing_gle, expected_gle, precision):
return matched
def check_if_stock_and_account_balance_synced(
posting_date, company, voucher_type=None, voucher_no=None
):
if not cint(erpnext.is_perpetual_inventory_enabled(company)):
return
accounts = get_stock_accounts(company, voucher_type, voucher_no)
stock_adjustment_account = frappe.db.get_value("Company", company, "stock_adjustment_account")
for account in accounts:
account_bal, stock_bal, warehouse_list = get_stock_and_account_balance(
account, posting_date, company
)
if abs(account_bal - stock_bal) > 0.1:
precision = get_field_precision(
frappe.get_meta("GL Entry").get_field("debit"),
currency=frappe.get_cached_value("Company", company, "default_currency"),
)
diff = flt(stock_bal - account_bal, precision)
error_reason = _(
"Stock Value ({0}) and Account Balance ({1}) are out of sync for account {2} and it's linked warehouses as on {3}."
).format(stock_bal, account_bal, frappe.bold(account), posting_date)
error_resolution = _("Please create an adjustment Journal Entry for amount {0} on {1}").format(
frappe.bold(diff), frappe.bold(posting_date)
)
frappe.msgprint(
msg="""{0}<br></br>{1}<br></br>""".format(error_reason, error_resolution),
raise_exception=StockValueAndAccountBalanceOutOfSync,
title=_("Values Out Of Sync"),
primary_action={
"label": _("Make Journal Entry"),
"client_action": "erpnext.route_to_adjustment_jv",
"args": get_journal_entry(account, stock_adjustment_account, diff),
},
)
def get_stock_accounts(company, voucher_type=None, voucher_no=None):
stock_accounts = [
d.name

View File

@ -65,7 +65,6 @@ class TestRequestforQuotation(FrappeTestCase):
)
sq.submit()
frappe.form_dict = frappe.local("form_dict")
frappe.form_dict.name = rfq.name
self.assertEqual(check_supplier_has_docname_access(supplier_wt_appos[0].get("supplier")), True)

View File

@ -18,16 +18,16 @@
"tax_id",
"tax_category",
"tax_withholding_category",
"is_transporter",
"is_internal_supplier",
"represents_company",
"image",
"column_break0",
"supplier_group",
"supplier_type",
"allow_purchase_invoice_creation_without_purchase_order",
"allow_purchase_invoice_creation_without_purchase_receipt",
"is_internal_supplier",
"represents_company",
"disabled",
"is_transporter",
"warn_rfqs",
"warn_pos",
"prevent_rfqs",
@ -38,12 +38,6 @@
"default_currency",
"column_break_10",
"default_price_list",
"section_credit_limit",
"payment_terms",
"cb_21",
"on_hold",
"hold_type",
"release_date",
"address_contacts",
"address_html",
"column_break1",
@ -57,6 +51,12 @@
"primary_address",
"default_payable_accounts",
"accounts",
"section_credit_limit",
"payment_terms",
"cb_21",
"on_hold",
"hold_type",
"release_date",
"default_tax_withholding_config",
"column_break2",
"website",
@ -258,7 +258,7 @@
"collapsible": 1,
"fieldname": "section_credit_limit",
"fieldtype": "Section Break",
"label": "Credit Limit"
"label": "Payment Terms"
},
{
"fieldname": "payment_terms",
@ -432,7 +432,7 @@
"link_fieldname": "party"
}
],
"modified": "2021-10-20 22:03:33.147249",
"modified": "2022-04-16 18:02:27.838623",
"modified_by": "Administrator",
"module": "Buying",
"name": "Supplier",
@ -497,6 +497,7 @@
"show_name_in_global_search": 1,
"sort_field": "modified",
"sort_order": "ASC",
"states": [],
"title_field": "supplier_name",
"track_changes": 1
}

View File

@ -1997,12 +1997,13 @@ def get_advance_journal_entries(
reference_condition = " and (" + " or ".join(conditions) + ")" if conditions else ""
# nosemgrep
journal_entries = frappe.db.sql(
"""
select
"Journal Entry" as reference_type, t1.name as reference_name,
t1.remark as remarks, t2.{0} as amount, t2.name as reference_row,
t2.reference_name as against_order
t2.reference_name as against_order, t2.exchange_rate
from
`tabJournal Entry` t1, `tabJournal Entry Account` t2
where

View File

@ -37,11 +37,26 @@ def handle_end_call(**kwargs):
@frappe.whitelist(allow_guest=True)
def handle_missed_call(**kwargs):
update_call_log(kwargs, "Missed")
status = ""
call_type = kwargs.get("CallType")
dial_call_status = kwargs.get("DialCallStatus")
if call_type == "incomplete" and dial_call_status == "no-answer":
status = "No Answer"
elif call_type == "client-hangup" and dial_call_status == "canceled":
status = "Canceled"
elif call_type == "incomplete" and dial_call_status == "failed":
status = "Failed"
update_call_log(kwargs, status)
def update_call_log(call_payload, status="Ringing", call_log=None):
call_log = call_log or get_call_log(call_payload)
# for a new sid, call_log and get_call_log will be empty so create a new log
if not call_log:
call_log = create_call_log(call_payload)
if call_log:
call_log.status = status
call_log.to = call_payload.get("DialWhomNumber")
@ -53,16 +68,9 @@ def update_call_log(call_payload, status="Ringing", call_log=None):
def get_call_log(call_payload):
call_log = frappe.get_all(
"Call Log",
{
"id": call_payload.get("CallSid"),
},
limit=1,
)
if call_log:
return frappe.get_doc("Call Log", call_log[0].name)
call_log_id = call_payload.get("CallSid")
if frappe.db.exists("Call Log", call_log_id):
return frappe.get_doc("Call Log", call_log_id)
def create_call_log(call_payload):

View File

@ -59,6 +59,7 @@ treeviews = [
"Warehouse",
"Item Group",
"Customer Group",
"Supplier Group",
"Sales Person",
"Territory",
"Assessment Group",

View File

@ -4,7 +4,7 @@
"allow_import": 1,
"allow_rename": 1,
"autoname": "naming_series:",
"creation": "2013-03-07 09:04:18",
"creation": "2022-02-21 11:54:09.632218",
"doctype": "DocType",
"document_type": "Setup",
"editable_grid": 1,
@ -813,11 +813,12 @@
"idx": 24,
"image_field": "image",
"links": [],
"modified": "2021-06-17 11:31:37.730760",
"modified": "2022-03-22 13:44:37.088519",
"modified_by": "Administrator",
"module": "HR",
"name": "Employee",
"name_case": "Title Case",
"naming_rule": "By \"Naming Series\" field",
"owner": "Administrator",
"permissions": [
{
@ -857,7 +858,9 @@
],
"search_fields": "employee_name",
"show_name_in_global_search": 1,
"show_title_field_in_link": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "employee_name"
}

View File

@ -262,8 +262,6 @@ erpnext.patches.v13_0.make_non_standard_user_type #13-04-2021 #17-01-2022
erpnext.patches.v13_0.update_shipment_status
erpnext.patches.v13_0.remove_attribute_field_from_item_variant_setting
erpnext.patches.v12_0.add_ewaybill_validity_field
erpnext.patches.v13_0.germany_make_custom_fields
erpnext.patches.v13_0.germany_fill_debtor_creditor_number
erpnext.patches.v13_0.set_pos_closing_as_failed
erpnext.patches.v13_0.rename_stop_to_send_birthday_reminders
execute:frappe.rename_doc("Workspace", "Loan Management", "Loans", force=True)
@ -343,6 +341,7 @@ erpnext.patches.v14_0.delete_shopify_doctypes
erpnext.patches.v14_0.delete_hub_doctypes
erpnext.patches.v14_0.delete_hospitality_doctypes # 20-01-2022
erpnext.patches.v14_0.delete_agriculture_doctypes
erpnext.patches.v14_0.delete_datev_doctypes
erpnext.patches.v14_0.rearrange_company_fields
erpnext.patches.v14_0.update_leave_notification_template
erpnext.patches.v14_0.restore_einvoice_fields
@ -364,4 +363,4 @@ erpnext.patches.v13_0.add_cost_center_in_loans
erpnext.patches.v13_0.set_return_against_in_pos_invoice_references
erpnext.patches.v13_0.remove_unknown_links_to_prod_plan_items # 24-03-2022
erpnext.patches.v13_0.update_expense_claim_status_for_paid_advances
erpnext.patches.v13_0.create_gst_custom_fields_in_quotation
erpnext.patches.v13_0.create_gst_custom_fields_in_quotation

View File

@ -1,36 +0,0 @@
# Copyright (c) 2019, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
def execute():
"""Move account number into the new custom field debtor_creditor_number.
German companies used to use a dedicated payable/receivable account for
every party to mimick party accounts in the external accounting software
"DATEV". This is no longer necessary. The reference ID for DATEV will be
stored in a new custom field "debtor_creditor_number".
"""
company_list = frappe.get_all("Company", filters={"country": "Germany"})
for company in company_list:
party_account_list = frappe.get_all(
"Party Account",
filters={"company": company.name},
fields=["name", "account", "debtor_creditor_number"],
)
for party_account in party_account_list:
if (not party_account.account) or party_account.debtor_creditor_number:
# account empty or debtor_creditor_number already filled
continue
account_number = frappe.db.get_value("Account", party_account.account, "account_number")
if not account_number:
continue
frappe.db.set_value(
"Party Account", party_account.name, "debtor_creditor_number", account_number
)
frappe.db.set_value("Party Account", party_account.name, "account", "")

View File

@ -1,20 +0,0 @@
# Copyright (c) 2019, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
from erpnext.regional.germany.setup import make_custom_fields
def execute():
"""Execute the make_custom_fields method for german companies.
It is usually run once at setup of a new company. Since it's new, run it
once for existing companies as well.
"""
company_list = frappe.get_all("Company", filters={"country": "Germany"})
if not company_list:
return
make_custom_fields()

View File

@ -10,54 +10,58 @@ def execute():
frappe.reload_doc("hr", "doctype", "Leave Encashment")
additional_salaries = frappe.get_all(
"Additional Salary",
fields=["name", "salary_slip", "type", "salary_component"],
filters={"salary_slip": ["!=", ""]},
group_by="salary_slip",
)
leave_encashments = frappe.get_all(
"Leave Encashment",
fields=["name", "additional_salary"],
filters={"additional_salary": ["!=", ""]},
)
employee_incentives = frappe.get_all(
"Employee Incentive",
fields=["name", "additional_salary"],
filters={"additional_salary": ["!=", ""]},
)
for incentive in employee_incentives:
frappe.db.sql(
""" UPDATE `tabAdditional Salary`
SET ref_doctype = 'Employee Incentive', ref_docname = %s
WHERE name = %s
""",
(incentive["name"], incentive["additional_salary"]),
if frappe.db.has_column("Leave Encashment", "additional_salary"):
leave_encashments = frappe.get_all(
"Leave Encashment",
fields=["name", "additional_salary"],
filters={"additional_salary": ["!=", ""]},
)
for leave_encashment in leave_encashments:
frappe.db.sql(
""" UPDATE `tabAdditional Salary`
SET ref_doctype = 'Leave Encashment', ref_docname = %s
WHERE name = %s
""",
(leave_encashment["name"], leave_encashment["additional_salary"]),
)
salary_slips = [sal["salary_slip"] for sal in additional_salaries]
for salary in additional_salaries:
comp_type = "earnings" if salary["type"] == "Earning" else "deductions"
if salary["salary_slip"] and salary_slips.count(salary["salary_slip"]) == 1:
for leave_encashment in leave_encashments:
frappe.db.sql(
"""
UPDATE `tabSalary Detail`
SET additional_salary = %s
WHERE parenttype = 'Salary Slip'
and parentfield = %s
and parent = %s
and salary_component = %s
""" UPDATE `tabAdditional Salary`
SET ref_doctype = 'Leave Encashment', ref_docname = %s
WHERE name = %s
""",
(salary["name"], comp_type, salary["salary_slip"], salary["salary_component"]),
(leave_encashment["name"], leave_encashment["additional_salary"]),
)
if frappe.db.has_column("Employee Incentive", "additional_salary"):
employee_incentives = frappe.get_all(
"Employee Incentive",
fields=["name", "additional_salary"],
filters={"additional_salary": ["!=", ""]},
)
for incentive in employee_incentives:
frappe.db.sql(
""" UPDATE `tabAdditional Salary`
SET ref_doctype = 'Employee Incentive', ref_docname = %s
WHERE name = %s
""",
(incentive["name"], incentive["additional_salary"]),
)
if frappe.db.has_column("Additional Salary", "salary_slip"):
additional_salaries = frappe.get_all(
"Additional Salary",
fields=["name", "salary_slip", "type", "salary_component"],
filters={"salary_slip": ["!=", ""]},
group_by="salary_slip",
)
salary_slips = [sal["salary_slip"] for sal in additional_salaries]
for salary in additional_salaries:
comp_type = "earnings" if salary["type"] == "Earning" else "deductions"
if salary["salary_slip"] and salary_slips.count(salary["salary_slip"]) == 1:
frappe.db.sql(
"""
UPDATE `tabSalary Detail`
SET additional_salary = %s
WHERE parenttype = 'Salary Slip'
and parentfield = %s
and parent = %s
and salary_component = %s
""",
(salary["name"], comp_type, salary["salary_slip"], salary["salary_component"]),
)

View File

@ -0,0 +1,13 @@
import frappe
def execute():
install_apps = frappe.get_installed_apps()
if "erpnext_datev_uo" in install_apps or "erpnext_datev" in install_apps:
return
# doctypes
frappe.delete_doc("DocType", "DATEV Settings", ignore_missing=True, force=True)
# reports
frappe.delete_doc("Report", "DATEV", ignore_missing=True, force=True)

View File

@ -24,7 +24,9 @@ class TestGratuity(unittest.TestCase):
frappe.db.delete("Gratuity")
frappe.db.delete("Additional Salary", {"ref_doctype": "Gratuity"})
make_earning_salary_component(setup=True, test_tax=True, company_list=["_Test Company"])
make_earning_salary_component(
setup=True, test_tax=True, company_list=["_Test Company"], include_flexi_benefits=True
)
make_deduction_salary_component(setup=True, test_tax=True, company_list=["_Test Company"])
def test_get_last_salary_slip_should_return_none_for_new_employee(self):

View File

@ -952,8 +952,12 @@ class SalarySlip(TransactionBase):
)
# Structured tax amount
total_structured_tax_amount = self.calculate_tax_by_tax_slab(
total_taxable_earnings_without_full_tax_addl_components, tax_slab
eval_locals = self.get_data_for_eval()
total_structured_tax_amount = calculate_tax_by_tax_slab(
total_taxable_earnings_without_full_tax_addl_components,
tax_slab,
self.whitelisted_globals,
eval_locals,
)
current_structured_tax_amount = (
total_structured_tax_amount - previous_total_paid_taxes
@ -962,7 +966,9 @@ class SalarySlip(TransactionBase):
# Total taxable earnings with additional earnings with full tax
full_tax_on_additional_earnings = 0.0
if current_additional_earnings_with_full_tax:
total_tax_amount = self.calculate_tax_by_tax_slab(total_taxable_earnings, tax_slab)
total_tax_amount = calculate_tax_by_tax_slab(
total_taxable_earnings, tax_slab, self.whitelisted_globals, eval_locals
)
full_tax_on_additional_earnings = total_tax_amount - total_structured_tax_amount
current_tax_amount = current_structured_tax_amount + full_tax_on_additional_earnings
@ -1278,50 +1284,6 @@ class SalarySlip(TransactionBase):
fields="SUM(amount) as total_amount",
)[0].total_amount
def calculate_tax_by_tax_slab(self, annual_taxable_earning, tax_slab):
data = self.get_data_for_eval()
data.update({"annual_taxable_earning": annual_taxable_earning})
tax_amount = 0
for slab in tax_slab.slabs:
cond = cstr(slab.condition).strip()
if cond and not self.eval_tax_slab_condition(cond, data):
continue
if not slab.to_amount and annual_taxable_earning >= slab.from_amount:
tax_amount += (annual_taxable_earning - slab.from_amount + 1) * slab.percent_deduction * 0.01
continue
if annual_taxable_earning >= slab.from_amount and annual_taxable_earning < slab.to_amount:
tax_amount += (annual_taxable_earning - slab.from_amount + 1) * slab.percent_deduction * 0.01
elif annual_taxable_earning >= slab.from_amount and annual_taxable_earning >= slab.to_amount:
tax_amount += (slab.to_amount - slab.from_amount + 1) * slab.percent_deduction * 0.01
# other taxes and charges on income tax
for d in tax_slab.other_taxes_and_charges:
if flt(d.min_taxable_income) and flt(d.min_taxable_income) > annual_taxable_earning:
continue
if flt(d.max_taxable_income) and flt(d.max_taxable_income) < annual_taxable_earning:
continue
tax_amount += tax_amount * flt(d.percent) / 100
return tax_amount
def eval_tax_slab_condition(self, condition, data):
try:
condition = condition.strip()
if condition:
return frappe.safe_eval(condition, self.whitelisted_globals, data)
except NameError as err:
frappe.throw(
_("{0} <br> This error can be due to missing or deleted field.").format(err),
title=_("Name error"),
)
except SyntaxError as err:
frappe.throw(_("Syntax error in condition: {0}").format(err))
except Exception as e:
frappe.throw(_("Error in formula or condition: {0}").format(e))
raise
def get_component_totals(self, component_type, depends_on_payment_days=0):
joining_date, relieving_date = frappe.get_cached_value(
"Employee", self.employee, ["date_of_joining", "relieving_date"]
@ -1705,3 +1667,60 @@ def get_payroll_payable_account(company, payroll_entry):
)
return payroll_payable_account
def calculate_tax_by_tax_slab(
annual_taxable_earning, tax_slab, eval_globals=None, eval_locals=None
):
eval_locals.update({"annual_taxable_earning": annual_taxable_earning})
tax_amount = 0
for slab in tax_slab.slabs:
cond = cstr(slab.condition).strip()
if cond and not eval_tax_slab_condition(cond, eval_globals, eval_locals):
continue
if not slab.to_amount and annual_taxable_earning >= slab.from_amount:
tax_amount += (annual_taxable_earning - slab.from_amount + 1) * slab.percent_deduction * 0.01
continue
if annual_taxable_earning >= slab.from_amount and annual_taxable_earning < slab.to_amount:
tax_amount += (annual_taxable_earning - slab.from_amount + 1) * slab.percent_deduction * 0.01
elif annual_taxable_earning >= slab.from_amount and annual_taxable_earning >= slab.to_amount:
tax_amount += (slab.to_amount - slab.from_amount + 1) * slab.percent_deduction * 0.01
# other taxes and charges on income tax
for d in tax_slab.other_taxes_and_charges:
if flt(d.min_taxable_income) and flt(d.min_taxable_income) > annual_taxable_earning:
continue
if flt(d.max_taxable_income) and flt(d.max_taxable_income) < annual_taxable_earning:
continue
tax_amount += tax_amount * flt(d.percent) / 100
return tax_amount
def eval_tax_slab_condition(condition, eval_globals=None, eval_locals=None):
if not eval_globals:
eval_globals = {
"int": int,
"float": float,
"long": int,
"round": round,
"date": datetime.date,
"getdate": getdate,
}
try:
condition = condition.strip()
if condition:
return frappe.safe_eval(condition, eval_globals, eval_locals)
except NameError as err:
frappe.throw(
_("{0} <br> This error can be due to missing or deleted field.").format(err),
title=_("Name error"),
)
except SyntaxError as err:
frappe.throw(_("Syntax error in condition: {0} in Income Tax Slab").format(err))
except Exception as e:
frappe.throw(_("Error in formula or condition: {0} in Income Tax Slab").format(e))
raise

View File

@ -772,6 +772,7 @@ class TestSalarySlip(unittest.TestCase):
"Monthly",
other_details={"max_benefits": 100000},
test_tax=True,
include_flexi_benefits=True,
employee=employee,
payroll_period=payroll_period,
)
@ -875,6 +876,7 @@ class TestSalarySlip(unittest.TestCase):
"Monthly",
other_details={"max_benefits": 100000},
test_tax=True,
include_flexi_benefits=True,
employee=employee,
payroll_period=payroll_period,
)
@ -1022,7 +1024,9 @@ def create_account(account_name, company, parent_account, account_type=None):
return account
def make_earning_salary_component(setup=False, test_tax=False, company_list=None):
def make_earning_salary_component(
setup=False, test_tax=False, company_list=None, include_flexi_benefits=False
):
data = [
{
"salary_component": "Basic Salary",
@ -1043,7 +1047,7 @@ def make_earning_salary_component(setup=False, test_tax=False, company_list=None
},
{"salary_component": "Leave Encashment", "abbr": "LE", "type": "Earning"},
]
if test_tax:
if include_flexi_benefits:
data.extend(
[
{
@ -1063,11 +1067,18 @@ def make_earning_salary_component(setup=False, test_tax=False, company_list=None
"type": "Earning",
"max_benefit_amount": 15000,
},
]
)
if test_tax:
data.extend(
[
{"salary_component": "Performance Bonus", "abbr": "B", "type": "Earning"},
]
)
if setup or test_tax:
make_salary_component(data, test_tax, company_list)
data.append(
{
"salary_component": "Basic Salary",

View File

@ -149,6 +149,7 @@ def make_salary_structure(
company=None,
currency=erpnext.get_default_currency(),
payroll_period=None,
include_flexi_benefits=False,
):
if test_tax:
frappe.db.sql("""delete from `tabSalary Structure` where name=%s""", (salary_structure))
@ -161,7 +162,10 @@ def make_salary_structure(
"name": salary_structure,
"company": company or erpnext.get_default_company(),
"earnings": make_earning_salary_component(
setup=True, test_tax=test_tax, company_list=["_Test Company"]
setup=True,
test_tax=test_tax,
company_list=["_Test Company"],
include_flexi_benefits=include_flexi_benefits,
),
"deductions": make_deduction_salary_component(
setup=True, test_tax=test_tax, company_list=["_Test Company"]

View File

@ -0,0 +1,47 @@
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
/* eslint-disable */
frappe.query_reports["Income Tax Computation"] = {
"filters": [
{
"fieldname":"company",
"label": __("Company"),
"fieldtype": "Link",
"options": "Company",
"default": frappe.defaults.get_user_default("Company"),
"width": "100px",
"reqd": 1
},
{
"fieldname":"payroll_period",
"label": __("Payroll Period"),
"fieldtype": "Link",
"options": "Payroll Period",
"width": "100px",
"reqd": 1
},
{
"fieldname":"employee",
"label": __("Employee"),
"fieldtype": "Link",
"options": "Employee",
"width": "100px"
},
{
"fieldname":"department",
"label": __("Department"),
"fieldtype": "Link",
"options": "Department",
"width": "100px",
},
{
"fieldname":"consider_tax_exemption_declaration",
"label": __("Consider Tax Exemption Declaration"),
"fieldtype": "Check",
"width": "180px"
}
]
};

View File

@ -0,0 +1,36 @@
{
"add_total_row": 0,
"columns": [],
"creation": "2022-02-17 17:19:30.921422",
"disable_prepared_report": 0,
"disabled": 0,
"docstatus": 0,
"doctype": "Report",
"filters": [],
"idx": 0,
"is_standard": "Yes",
"letter_head": "",
"modified": "2022-02-23 13:07:30.347861",
"modified_by": "Administrator",
"module": "Payroll",
"name": "Income Tax Computation",
"owner": "Administrator",
"prepared_report": 0,
"ref_doctype": "Salary Slip",
"report_name": "Income Tax Computation",
"report_type": "Script Report",
"roles": [
{
"role": "Employee"
},
{
"role": "HR User"
},
{
"role": "HR Manager"
},
{
"role": "Employee Self Service"
}
]
}

View File

@ -0,0 +1,513 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe import _, scrub
from frappe.query_builder.functions import Sum
from frappe.utils import add_days, flt, getdate, rounded
from erpnext.payroll.doctype.payroll_entry.payroll_entry import get_start_end_dates
from erpnext.payroll.doctype.salary_slip.salary_slip import calculate_tax_by_tax_slab
def execute(filters=None):
return IncomeTaxComputationReport(filters).run()
class IncomeTaxComputationReport(object):
def __init__(self, filters=None):
self.filters = frappe._dict(filters or {})
self.columns = []
self.data = []
self.employees = frappe._dict()
self.payroll_period_start_date = None
self.payroll_period_end_date = None
if self.filters.payroll_period:
self.payroll_period_start_date, self.payroll_period_end_date = frappe.db.get_value(
"Payroll Period", self.filters.payroll_period, ["start_date", "end_date"]
)
def run(self):
self.get_fixed_columns()
self.get_data()
return self.columns, self.data
def get_data(self):
self.get_employee_details()
self.get_future_salary_slips()
self.get_ctc()
self.get_tax_exempted_earnings_and_deductions()
self.get_employee_tax_exemptions()
self.get_hra()
self.get_standard_tax_exemption()
self.get_total_taxable_amount()
self.get_applicable_tax()
self.get_total_deducted_tax()
self.get_payable_tax()
self.data = list(self.employees.values())
def get_employee_details(self):
filters, or_filters = self.get_employee_filters()
fields = [
"name as employee",
"employee_name",
"department",
"designation",
"date_of_joining",
"relieving_date",
]
employees = frappe.get_all("Employee", filters=filters, or_filters=or_filters, fields=fields)
ss_assignments = self.get_ss_assignments([d.employee for d in employees])
for d in employees:
if d.employee in list(ss_assignments.keys()):
d.update(ss_assignments[d.employee])
self.employees.setdefault(d.employee, d)
if not self.employees:
frappe.throw(_("No employees found with selected filters and active salary structure"))
def get_employee_filters(self):
filters = {"company": self.filters.company}
or_filters = {
"status": "Active",
"relieving_date": ["between", [self.payroll_period_start_date, self.payroll_period_end_date]],
}
if self.filters.employee:
filters = {"name": self.filters.employee}
elif self.filters.department:
filters.update({"department": self.filters.department})
return filters, or_filters
def get_ss_assignments(self, employees):
ss_assignments = frappe.get_all(
"Salary Structure Assignment",
filters={
"employee": ["in", employees],
"docstatus": 1,
"salary_structure": ["is", "set"],
"income_tax_slab": ["is", "set"],
},
fields=["employee", "income_tax_slab", "salary_structure"],
order_by="from_date desc",
)
employee_ss_assignments = frappe._dict()
for d in ss_assignments:
if d.employee not in list(employee_ss_assignments.keys()):
tax_slab = frappe.get_cached_value(
"Income Tax Slab", d.income_tax_slab, ["allow_tax_exemption", "disabled"], as_dict=1
)
if tax_slab and not tax_slab.disabled:
employee_ss_assignments.setdefault(
d.employee,
{
"salary_structure": d.salary_structure,
"income_tax_slab": d.income_tax_slab,
"allow_tax_exemption": tax_slab.allow_tax_exemption,
},
)
return employee_ss_assignments
def get_future_salary_slips(self):
self.future_salary_slips = frappe._dict()
for employee in list(self.employees.keys()):
last_ss = self.get_last_salary_slip(employee)
if last_ss and last_ss.end_date == self.payroll_period_end_date:
continue
relieving_date = self.employees[employee].get("relieving_date", "")
if last_ss:
ss_start_date = add_days(last_ss.end_date, 1)
else:
ss_start_date = self.payroll_period_start_date
last_ss = frappe._dict(
{
"payroll_frequency": "Monthly",
"salary_structure": self.employees[employee].get("salary_structure"),
}
)
while getdate(ss_start_date) < getdate(self.payroll_period_end_date) and (
not relieving_date or getdate(ss_start_date) < relieving_date
):
ss_end_date = get_start_end_dates(last_ss.payroll_frequency, ss_start_date).end_date
ss = frappe.new_doc("Salary Slip")
ss.employee = employee
ss.start_date = ss_start_date
ss.end_date = ss_end_date
ss.salary_structure = last_ss.salary_structure
ss.payroll_frequency = last_ss.payroll_frequency
ss.company = self.filters.company
try:
ss.process_salary_structure(for_preview=1)
self.future_salary_slips.setdefault(employee, []).append(ss.as_dict())
except Exception:
break
ss_start_date = add_days(ss_end_date, 1)
def get_last_salary_slip(self, employee):
last_salary_slip = frappe.db.get_value(
"Salary Slip",
{
"employee": employee,
"docstatus": 1,
"start_date": ["between", [self.payroll_period_start_date, self.payroll_period_end_date]],
},
["start_date", "end_date", "salary_structure", "payroll_frequency"],
order_by="start_date desc",
as_dict=1,
)
return last_salary_slip
def get_ctc(self):
# Get total earnings from existing salary slip
ss = frappe.qb.DocType("Salary Slip")
existing_ss = frappe._dict(
(
frappe.qb.from_(ss)
.select(ss.employee, Sum(ss.base_gross_pay).as_("amount"))
.where(ss.docstatus == 1)
.where(ss.employee.isin(list(self.employees.keys())))
.where(ss.start_date >= self.payroll_period_start_date)
.where(ss.end_date <= self.payroll_period_end_date)
.groupby(ss.employee)
).run()
)
for employee in list(self.employees.keys()):
future_ss_earnings = self.get_future_earnings(employee)
ctc = flt(existing_ss.get(employee)) + future_ss_earnings
self.employees[employee].setdefault("ctc", ctc)
def get_future_earnings(self, employee):
future_earnings = 0.0
for ss in self.future_salary_slips.get(employee, []):
future_earnings += flt(ss.base_gross_pay)
return future_earnings
def get_tax_exempted_earnings_and_deductions(self):
tax_exempted_components = self.get_tax_exempted_components()
# Get component totals from existing salary slips
ss = frappe.qb.DocType("Salary Slip")
ss_comps = frappe.qb.DocType("Salary Detail")
records = (
frappe.qb.from_(ss)
.inner_join(ss_comps)
.on(ss.name == ss_comps.parent)
.select(ss.name, ss.employee, ss_comps.salary_component, Sum(ss_comps.amount).as_("amount"))
.where(ss.docstatus == 1)
.where(ss.employee.isin(list(self.employees.keys())))
.where(ss_comps.salary_component.isin(tax_exempted_components))
.where(ss.start_date >= self.payroll_period_start_date)
.where(ss.end_date <= self.payroll_period_end_date)
.groupby(ss.employee, ss_comps.salary_component)
).run(as_dict=True)
existing_ss_exemptions = frappe._dict()
for d in records:
existing_ss_exemptions.setdefault(d.employee, {}).setdefault(
scrub(d.salary_component), d.amount
)
for employee in list(self.employees.keys()):
if not self.employees[employee]["allow_tax_exemption"]:
continue
exemptions = existing_ss_exemptions.get(employee, {})
self.add_exemptions_from_future_salary_slips(employee, exemptions)
self.employees[employee].update(exemptions)
total_exemptions = sum(list(exemptions.values()))
self.add_to_total_exemption(employee, total_exemptions)
def add_exemptions_from_future_salary_slips(self, employee, exemptions):
for ss in self.future_salary_slips.get(employee, []):
for e in ss.earnings:
if not e.is_tax_applicable:
exemptions.setdefault(scrub(e.salary_component), 0)
exemptions[scrub(e.salary_component)] += flt(e.amount)
for d in ss.deductions:
if d.exempted_from_income_tax:
exemptions.setdefault(scrub(d.salary_component), 0)
exemptions[scrub(d.salary_component)] += flt(d.amount)
return exemptions
def get_tax_exempted_components(self):
# nontaxable earning components
nontaxable_earning_components = [
d.name
for d in frappe.get_all(
"Salary Component", {"type": "Earning", "is_tax_applicable": 0, "disabled": 0}
)
]
# tax exempted deduction components
tax_exempted_deduction_components = [
d.name
for d in frappe.get_all(
"Salary Component", {"type": "Deduction", "exempted_from_income_tax": 1, "disabled": 0}
)
]
tax_exempted_components = nontaxable_earning_components + tax_exempted_deduction_components
# Add columns
for d in tax_exempted_components:
self.add_column(d)
return tax_exempted_components
def add_to_total_exemption(self, employee, amount):
self.employees[employee].setdefault("total_exemption", 0)
self.employees[employee]["total_exemption"] += amount
def get_employee_tax_exemptions(self):
# add columns
exemption_categories = frappe.get_all("Employee Tax Exemption Category", {"is_active": 1})
for d in exemption_categories:
self.add_column(d.name)
self.employees_with_proofs = []
self.get_tax_exemptions("Employee Tax Exemption Proof Submission")
if self.filters.consider_tax_exemption_declaration:
self.get_tax_exemptions("Employee Tax Exemption Declaration")
def get_tax_exemptions(self, source):
# Get category-wise exmeptions based on submitted proofs or declarations
if source == "Employee Tax Exemption Proof Submission":
child_doctype = "Employee Tax Exemption Proof Submission Detail"
else:
child_doctype = "Employee Tax Exemption Declaration Category"
max_exemptions = self.get_max_exemptions_based_on_category()
par = frappe.qb.DocType(source)
child = frappe.qb.DocType(child_doctype)
records = (
frappe.qb.from_(par)
.inner_join(child)
.on(par.name == child.parent)
.select(par.employee, child.exemption_category, Sum(child.amount).as_("amount"))
.where(par.docstatus == 1)
.where(par.employee.isin(list(self.employees.keys())))
.where(par.payroll_period == self.filters.payroll_period)
.groupby(par.employee, child.exemption_category)
).run(as_dict=True)
for d in records:
if not self.employees[d.employee]["allow_tax_exemption"]:
continue
if source == "Employee Tax Exemption Declaration" and d.employee in self.employees_with_proofs:
continue
amount = flt(d.amount)
max_eligible_amount = flt(max_exemptions.get(d.exemption_category))
if max_eligible_amount and amount > max_eligible_amount:
amount = max_eligible_amount
self.employees[d.employee].setdefault(scrub(d.exemption_category), amount)
self.add_to_total_exemption(d.employee, amount)
if (
source == "Employee Tax Exemption Proof Submission"
and d.employee not in self.employees_with_proofs
):
self.employees_with_proofs.append(d.employee)
def get_max_exemptions_based_on_category(self):
return dict(
frappe.get_all(
"Employee Tax Exemption Category",
filters={"is_active": 1},
fields=["name", "max_amount"],
as_list=1,
)
)
def get_hra(self):
if not frappe.get_meta("Employee Tax Exemption Declaration").has_field("monthly_house_rent"):
return
self.add_column("HRA")
self.employees_with_proofs = []
self.get_eligible_hra("Employee Tax Exemption Proof Submission")
if self.filters.consider_tax_exemption_declaration:
self.get_eligible_hra("Employee Tax Exemption Declaration")
def get_eligible_hra(self, source):
if source == "Employee Tax Exemption Proof Submission":
hra_amount_field = "total_eligible_hra_exemption"
else:
hra_amount_field = "annual_hra_exemption"
records = frappe.get_all(
source,
filters={
"docstatus": 1,
"employee": ["in", list(self.employees.keys())],
"payroll_period": self.filters.payroll_period,
},
fields=["employee", hra_amount_field],
as_list=1,
)
for d in records:
if not self.employees[d[0]]["allow_tax_exemption"]:
continue
if d[0] not in self.employees_with_proofs:
self.employees[d[0]].setdefault("hra", d[1])
self.add_to_total_exemption(d[0], d[1])
self.employees_with_proofs.append(d[0])
def get_standard_tax_exemption(self):
self.add_column("Standard Tax Exemption")
standard_exemptions_per_slab = dict(
frappe.get_all(
"Income Tax Slab",
filters={"company": self.filters.company, "docstatus": 1, "disabled": 0},
fields=["name", "standard_tax_exemption_amount"],
as_list=1,
)
)
for emp, emp_details in self.employees.items():
if not self.employees[emp]["allow_tax_exemption"]:
continue
income_tax_slab = emp_details.get("income_tax_slab")
standard_exemption = standard_exemptions_per_slab.get(income_tax_slab, 0)
emp_details["standard_tax_exemption"] = standard_exemption
self.add_to_total_exemption(emp, standard_exemption)
self.add_column("Total Exemption")
def get_total_taxable_amount(self):
self.add_column("Total Taxable Amount")
for emp, emp_details in self.employees.items():
emp_details["total_taxable_amount"] = flt(emp_details.get("ctc")) - flt(
emp_details.get("total_exemption")
)
def get_applicable_tax(self):
self.add_column("Applicable Tax")
is_tax_rounded = frappe.db.get_value(
"Salary Component",
{"variable_based_on_taxable_salary": 1, "disabled": 0},
"round_to_the_nearest_integer",
)
for emp, emp_details in self.employees.items():
tax_slab = emp_details.get("income_tax_slab")
if tax_slab:
tax_slab = frappe.get_cached_doc("Income Tax Slab", tax_slab)
employee_dict = frappe.get_doc("Employee", emp).as_dict()
tax_amount = calculate_tax_by_tax_slab(
emp_details["total_taxable_amount"], tax_slab, eval_globals=None, eval_locals=employee_dict
)
else:
tax_amount = 0.0
if is_tax_rounded:
tax_amount = rounded(tax_amount)
emp_details["applicable_tax"] = tax_amount
def get_total_deducted_tax(self):
self.add_column("Total Tax Deducted")
ss = frappe.qb.DocType("Salary Slip")
ss_ded = frappe.qb.DocType("Salary Detail")
records = (
frappe.qb.from_(ss)
.inner_join(ss_ded)
.on(ss.name == ss_ded.parent)
.select(ss.employee, Sum(ss_ded.amount).as_("amount"))
.where(ss.docstatus == 1)
.where(ss.employee.isin(list(self.employees.keys())))
.where(ss_ded.parentfield == "deductions")
.where(ss_ded.variable_based_on_taxable_salary == 1)
.where(ss.start_date >= self.payroll_period_start_date)
.where(ss.end_date <= self.payroll_period_end_date)
.groupby(ss.employee)
).run(as_dict=True)
for d in records:
self.employees[d.employee].setdefault("total_tax_deducted", d.amount)
def get_payable_tax(self):
self.add_column("Payable Tax")
for emp, emp_details in self.employees.items():
emp_details["payable_tax"] = flt(emp_details.get("applicable_tax")) - flt(
emp_details.get("total_tax_deducted")
)
def add_column(self, label, fieldname=None, fieldtype=None, options=None, width=None):
col = {
"label": _(label),
"fieldname": fieldname or scrub(label),
"fieldtype": fieldtype or "Currency",
"options": options,
"width": width or "140px",
}
self.columns.append(col)
def get_fixed_columns(self):
self.columns = [
{
"label": _("Employee"),
"fieldname": "employee",
"fieldtype": "Link",
"options": "Employee",
"width": "140px",
},
{
"label": _("Employee Name"),
"fieldname": "employee_name",
"fieldtype": "Data",
"width": "160px",
},
{
"label": _("Department"),
"fieldname": "department",
"fieldtype": "Link",
"options": "Department",
"width": "140px",
},
{
"label": _("Designation"),
"fieldname": "designation",
"fieldtype": "Link",
"options": "Designation",
"width": "140px",
},
{"label": _("Date of Joining"), "fieldname": "date_of_joining", "fieldtype": "Date"},
{
"label": _("Income Tax Slab"),
"fieldname": "income_tax_slab",
"fieldtype": "Link",
"options": "Income Tax Slab",
"width": "140px",
},
{"label": _("CTC"), "fieldname": "ctc", "fieldtype": "Currency", "width": "140px"},
]

View File

@ -0,0 +1,115 @@
import unittest
import frappe
from frappe.utils import getdate
from erpnext.hr.doctype.employee.test_employee import make_employee
from erpnext.payroll.doctype.employee_tax_exemption_declaration.test_employee_tax_exemption_declaration import (
create_payroll_period,
)
from erpnext.payroll.doctype.salary_slip.test_salary_slip import (
create_exemption_declaration,
create_salary_slips_for_payroll_period,
create_tax_slab,
)
from erpnext.payroll.doctype.salary_structure.test_salary_structure import make_salary_structure
from erpnext.payroll.report.income_tax_computation.income_tax_computation import execute
class TestIncomeTaxComputation(unittest.TestCase):
def setUp(self):
self.cleanup_records()
self.create_records()
def tearDown(self):
frappe.db.rollback()
def cleanup_records(self):
frappe.db.sql("delete from `tabEmployee Tax Exemption Declaration`")
frappe.db.sql("delete from `tabPayroll Period`")
frappe.db.sql("delete from `tabIncome Tax Slab`")
frappe.db.sql("delete from `tabSalary Component`")
frappe.db.sql("delete from `tabEmployee Benefit Application`")
frappe.db.sql("delete from `tabEmployee Benefit Claim`")
frappe.db.sql("delete from `tabEmployee` where company='_Test Company'")
frappe.db.sql("delete from `tabSalary Slip`")
def create_records(self):
self.employee = make_employee(
"employee_tax_computation@example.com",
company="_Test Company",
date_of_joining=getdate("01-10-2021"),
)
self.payroll_period = create_payroll_period(
name="_Test Payroll Period 1", company="_Test Company"
)
self.income_tax_slab = create_tax_slab(
self.payroll_period,
allow_tax_exemption=True,
effective_date=getdate("2019-04-01"),
company="_Test Company",
)
salary_structure = make_salary_structure(
"Monthly Salary Structure Test Income Tax Computation",
"Monthly",
employee=self.employee,
company="_Test Company",
currency="INR",
payroll_period=self.payroll_period,
test_tax=True,
)
create_exemption_declaration(self.employee, self.payroll_period.name)
create_salary_slips_for_payroll_period(
self.employee, salary_structure.name, self.payroll_period, deduct_random=False, num=3
)
def test_report(self):
filters = frappe._dict(
{
"company": "_Test Company",
"payroll_period": self.payroll_period.name,
"employee": self.employee,
}
)
result = execute(filters)
expected_data = {
"employee": self.employee,
"employee_name": "employee_tax_computation@example.com",
"department": "All Departments",
"income_tax_slab": self.income_tax_slab,
"ctc": 936000.0,
"professional_tax": 2400.0,
"standard_tax_exemption": 50000,
"total_exemption": 52400.0,
"total_taxable_amount": 883600.0,
"applicable_tax": 92789.0,
"total_tax_deducted": 17997.0,
"payable_tax": 74792,
}
for key, val in expected_data.items():
self.assertEqual(result[1][0].get(key), val)
# Run report considering tax exemption declaration
filters.consider_tax_exemption_declaration = 1
result = execute(filters)
expected_data.update(
{
"_test_category": 100000.0,
"total_exemption": 152400.0,
"total_taxable_amount": 783600.0,
"applicable_tax": 71989.0,
"payable_tax": 53992.0,
}
)
for key, val in expected_data.items():
self.assertEqual(result[1][0].get(key), val)

View File

@ -245,6 +245,17 @@
"onboard": 0,
"type": "Link"
},
{
"dependencies": "Salary Structure",
"hidden": 0,
"is_query_report": 1,
"label": "Income Tax Computation",
"link_count": 0,
"link_to": "Income Tax Computation",
"link_type": "Report",
"onboard": 0,
"type": "Link"
},
{
"dependencies": "Salary Slip",
"hidden": 0,
@ -312,7 +323,7 @@
"type": "Link"
}
],
"modified": "2022-01-13 17:41:19.098813",
"modified": "2022-02-23 17:41:19.098813",
"modified_by": "Administrator",
"module": "Payroll",
"name": "Payroll",

View File

@ -140,6 +140,14 @@ class CallPopup {
}, {
'fieldtype': 'Section Break',
'hide_border': 1,
}, {
'fieldname': 'call_type',
'label': 'Call Type',
'fieldtype': 'Link',
'options': 'Telephony Call Type',
}, {
'fieldtype': 'Section Break',
'hide_border': 1,
}, {
'fieldtype': 'Small Text',
'label': __('Call Summary'),
@ -149,10 +157,12 @@ class CallPopup {
'label': __('Save'),
'click': () => {
const call_summary = this.call_details.get_value('call_summary');
const call_type = this.call_details.get_value('call_type');
if (!call_summary) return;
frappe.xcall('erpnext.telephony.doctype.call_log.call_log.add_call_summary', {
frappe.xcall('erpnext.telephony.doctype.call_log.call_log.add_call_summary_and_call_type', {
'call_log': this.call_log.name,
'summary': call_summary,
'call_type': call_type,
}).then(() => {
this.close_modal();
frappe.show_alert({

View File

@ -1388,6 +1388,11 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
return;
}
// Target doc created from a mapped doc
if (this.frm.doc.__onload && this.frm.doc.__onload.ignore_price_list) {
return;
}
return this.frm.call({
method: "erpnext.accounts.doctype.pricing_rule.pricing_rule.apply_pricing_rule",
args: { args: args, doc: me.frm.doc },
@ -1504,7 +1509,7 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
me.remove_pricing_rule(frappe.get_doc(d.doctype, d.name));
}
if (d.free_item_data) {
if (d.free_item_data.length > 0) {
me.apply_product_discount(d);
}

View File

@ -19,7 +19,7 @@ class TestQualityProcedure(unittest.TestCase):
)
).insert()
frappe.form_dict = dict(
frappe.local.form_dict = frappe._dict(
doctype="Quality Procedure",
quality_procedure_name="Test Child 1",
parent_quality_procedure=procedure.name,

View File

@ -1,8 +0,0 @@
// Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on('DATEV Settings', {
refresh: function(frm) {
frm.add_custom_button(__('Show Report'), () => frappe.set_route('query-report', 'DATEV'), "fa fa-table");
}
});

View File

@ -1,125 +0,0 @@
{
"actions": [],
"autoname": "field:client",
"creation": "2019-08-13 23:56:34.259906",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"client",
"client_number",
"column_break_2",
"consultant_number",
"consultant",
"section_break_4",
"account_number_length",
"column_break_6",
"temporary_against_account_number"
],
"fields": [
{
"fieldname": "client",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Client",
"options": "Company",
"reqd": 1,
"unique": 1
},
{
"fieldname": "client_number",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Client ID",
"length": 5,
"reqd": 1
},
{
"fieldname": "consultant",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Consultant",
"options": "Supplier"
},
{
"fieldname": "consultant_number",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Consultant ID",
"length": 7,
"reqd": 1
},
{
"fieldname": "column_break_2",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_4",
"fieldtype": "Section Break"
},
{
"fieldname": "column_break_6",
"fieldtype": "Column Break"
},
{
"default": "4",
"fieldname": "account_number_length",
"fieldtype": "Int",
"label": "Account Number Length",
"reqd": 1
},
{
"allow_in_quick_entry": 1,
"fieldname": "temporary_against_account_number",
"fieldtype": "Data",
"label": "Temporary Against Account Number",
"reqd": 1
}
],
"links": [],
"modified": "2020-11-19 19:00:09.088816",
"modified_by": "Administrator",
"module": "Regional",
"name": "DATEV Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
},
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Accounts Manager",
"share": 1,
"write": 1
},
{
"create": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Accounts User",
"share": 1
}
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View File

@ -1,9 +0,0 @@
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
import unittest
class TestDATEVSettings(unittest.TestCase):
pass

View File

@ -1,35 +0,0 @@
import frappe
from frappe.custom.doctype.custom_field.custom_field import create_custom_fields
def setup(company=None, patch=True):
make_custom_fields()
add_custom_roles_for_reports()
def make_custom_fields():
custom_fields = {
"Party Account": [
dict(
fieldname="debtor_creditor_number",
label="Debtor/Creditor Number",
fieldtype="Data",
insert_after="account",
translatable=0,
)
]
}
create_custom_fields(custom_fields)
def add_custom_roles_for_reports():
"""Add Access Control to UAE VAT 201."""
if not frappe.db.get_value("Custom Role", dict(report="DATEV")):
frappe.get_doc(
dict(
doctype="Custom Role",
report="DATEV",
roles=[dict(role="Accounts User"), dict(role="Accounts Manager")],
)
).insert()

View File

@ -1,501 +0,0 @@
"""Constants used in datev.py."""
TRANSACTION_COLUMNS = [
# All possible columns must tbe listed here, because DATEV requires them to
# be present in the CSV.
# ---
# Umsatz
"Umsatz (ohne Soll/Haben-Kz)",
"Soll/Haben-Kennzeichen",
"WKZ Umsatz",
"Kurs",
"Basis-Umsatz",
"WKZ Basis-Umsatz",
# Konto/Gegenkonto
"Konto",
"Gegenkonto (ohne BU-Schlüssel)",
"BU-Schlüssel",
# Datum
"Belegdatum",
# Rechnungs- / Belegnummer
"Belegfeld 1",
# z.B. Fälligkeitsdatum Format: TTMMJJ
"Belegfeld 2",
# Skonto-Betrag / -Abzug (Der Wert 0 ist unzulässig)
"Skonto",
# Beschreibung des Buchungssatzes
"Buchungstext",
# Mahn- / Zahl-Sperre (1 = Postensperre)
"Postensperre",
"Diverse Adressnummer",
"Geschäftspartnerbank",
"Sachverhalt",
# Keine Mahnzinsen
"Zinssperre",
# Link auf den Buchungsbeleg (Programmkürzel + GUID)
"Beleglink",
# Beleginfo
"Beleginfo - Art 1",
"Beleginfo - Inhalt 1",
"Beleginfo - Art 2",
"Beleginfo - Inhalt 2",
"Beleginfo - Art 3",
"Beleginfo - Inhalt 3",
"Beleginfo - Art 4",
"Beleginfo - Inhalt 4",
"Beleginfo - Art 5",
"Beleginfo - Inhalt 5",
"Beleginfo - Art 6",
"Beleginfo - Inhalt 6",
"Beleginfo - Art 7",
"Beleginfo - Inhalt 7",
"Beleginfo - Art 8",
"Beleginfo - Inhalt 8",
# Zuordnung des Geschäftsvorfalls für die Kostenrechnung
"KOST1 - Kostenstelle",
"KOST2 - Kostenstelle",
"KOST-Menge",
# USt-ID-Nummer (Beispiel: DE133546770)
"EU-Mitgliedstaat u. USt-IdNr.",
# Der im EU-Bestimmungsland gültige Steuersatz
"EU-Steuersatz",
# I = Ist-Versteuerung,
# K = keine Umsatzsteuerrechnung
# P = Pauschalierung (z. B. für Land- und Forstwirtschaft),
# S = Soll-Versteuerung
"Abw. Versteuerungsart",
# Sachverhalte gem. § 13b Abs. 1 Satz 1 Nrn. 1.-5. UStG
"Sachverhalt L+L",
# Steuersatz / Funktion zum L+L-Sachverhalt (Beispiel: Wert 190 für 19%)
"Funktionsergänzung L+L",
# Bei Verwendung des BU-Schlüssels 49 für „andere Steuersätze“ muss der
# steuerliche Sachverhalt mitgegeben werden
"BU 49 Hauptfunktionstyp",
"BU 49 Hauptfunktionsnummer",
"BU 49 Funktionsergänzung",
# Zusatzinformationen, besitzen den Charakter eines Notizzettels und können
# frei erfasst werden.
"Zusatzinformation - Art 1",
"Zusatzinformation - Inhalt 1",
"Zusatzinformation - Art 2",
"Zusatzinformation - Inhalt 2",
"Zusatzinformation - Art 3",
"Zusatzinformation - Inhalt 3",
"Zusatzinformation - Art 4",
"Zusatzinformation - Inhalt 4",
"Zusatzinformation - Art 5",
"Zusatzinformation - Inhalt 5",
"Zusatzinformation - Art 6",
"Zusatzinformation - Inhalt 6",
"Zusatzinformation - Art 7",
"Zusatzinformation - Inhalt 7",
"Zusatzinformation - Art 8",
"Zusatzinformation - Inhalt 8",
"Zusatzinformation - Art 9",
"Zusatzinformation - Inhalt 9",
"Zusatzinformation - Art 10",
"Zusatzinformation - Inhalt 10",
"Zusatzinformation - Art 11",
"Zusatzinformation - Inhalt 11",
"Zusatzinformation - Art 12",
"Zusatzinformation - Inhalt 12",
"Zusatzinformation - Art 13",
"Zusatzinformation - Inhalt 13",
"Zusatzinformation - Art 14",
"Zusatzinformation - Inhalt 14",
"Zusatzinformation - Art 15",
"Zusatzinformation - Inhalt 15",
"Zusatzinformation - Art 16",
"Zusatzinformation - Inhalt 16",
"Zusatzinformation - Art 17",
"Zusatzinformation - Inhalt 17",
"Zusatzinformation - Art 18",
"Zusatzinformation - Inhalt 18",
"Zusatzinformation - Art 19",
"Zusatzinformation - Inhalt 19",
"Zusatzinformation - Art 20",
"Zusatzinformation - Inhalt 20",
# Wirkt sich nur bei Sachverhalt mit SKR 14 Land- und Forstwirtschaft aus,
# für andere SKR werden die Felder beim Import / Export überlesen bzw.
# leer exportiert.
"Stück",
"Gewicht",
# 1 = Lastschrift
# 2 = Mahnung
# 3 = Zahlung
"Zahlweise",
"Forderungsart",
# JJJJ
"Veranlagungsjahr",
# TTMMJJJJ
"Zugeordnete Fälligkeit",
# 1 = Einkauf von Waren
# 2 = Erwerb von Roh-Hilfs- und Betriebsstoffen
"Skontotyp",
# Allgemeine Bezeichnung, des Auftrags / Projekts.
"Auftragsnummer",
# AA = Angeforderte Anzahlung / Abschlagsrechnung
# AG = Erhaltene Anzahlung (Geldeingang)
# AV = Erhaltene Anzahlung (Verbindlichkeit)
# SR = Schlussrechnung
# SU = Schlussrechnung (Umbuchung)
# SG = Schlussrechnung (Geldeingang)
# SO = Sonstige
"Buchungstyp",
"USt-Schlüssel (Anzahlungen)",
"EU-Mitgliedstaat (Anzahlungen)",
"Sachverhalt L+L (Anzahlungen)",
"EU-Steuersatz (Anzahlungen)",
"Erlöskonto (Anzahlungen)",
# Wird beim Import durch SV (Stapelverarbeitung) ersetzt.
"Herkunft-Kz",
# Wird von DATEV verwendet.
"Leerfeld",
# Format TTMMJJJJ
"KOST-Datum",
# Vom Zahlungsempfänger individuell vergebenes Kennzeichen eines Mandats
# (z.B. Rechnungs- oder Kundennummer).
"SEPA-Mandatsreferenz",
# 1 = Skontosperre
# 0 = Keine Skontosperre
"Skontosperre",
# Gesellschafter und Sonderbilanzsachverhalt
"Gesellschaftername",
# Amtliche Nummer aus der Feststellungserklärung
"Beteiligtennummer",
"Identifikationsnummer",
"Zeichnernummer",
# Format TTMMJJJJ
"Postensperre bis",
# Gesellschafter und Sonderbilanzsachverhalt
"Bezeichnung SoBil-Sachverhalt",
"Kennzeichen SoBil-Buchung",
# 0 = keine Festschreibung
# 1 = Festschreibung
"Festschreibung",
# Format TTMMJJJJ
"Leistungsdatum",
# Format TTMMJJJJ
"Datum Zuord. Steuerperiode",
# OPOS-Informationen, Format TTMMJJJJ
"Fälligkeit",
# G oder 1 = Generalumkehr
# 0 = keine Generalumkehr
"Generalumkehr (GU)",
# Steuersatz für Steuerschlüssel
"Steuersatz",
# Beispiel: DE für Deutschland
"Land",
]
DEBTOR_CREDITOR_COLUMNS = [
# All possible columns must tbe listed here, because DATEV requires them to
# be present in the CSV.
# Columns "Leerfeld" have been replaced with "Leerfeld #" to not confuse pandas
# ---
"Konto",
"Name (Adressatentyp Unternehmen)",
"Unternehmensgegenstand",
"Name (Adressatentyp natürl. Person)",
"Vorname (Adressatentyp natürl. Person)",
"Name (Adressatentyp keine Angabe)",
"Adressatentyp",
"Kurzbezeichnung",
"EU-Land",
"EU-USt-IdNr.",
"Anrede",
"Titel/Akad. Grad",
"Adelstitel",
"Namensvorsatz",
"Adressart",
"Straße",
"Postfach",
"Postleitzahl",
"Ort",
"Land",
"Versandzusatz",
"Adresszusatz",
"Abweichende Anrede",
"Abw. Zustellbezeichnung 1",
"Abw. Zustellbezeichnung 2",
"Kennz. Korrespondenzadresse",
"Adresse gültig von",
"Adresse gültig bis",
"Telefon",
"Bemerkung (Telefon)",
"Telefon Geschäftsleitung",
"Bemerkung (Telefon GL)",
"E-Mail",
"Bemerkung (E-Mail)",
"Internet",
"Bemerkung (Internet)",
"Fax",
"Bemerkung (Fax)",
"Sonstige",
"Bemerkung (Sonstige)",
"Bankleitzahl 1",
"Bankbezeichnung 1",
"Bankkonto-Nummer 1",
"Länderkennzeichen 1",
"IBAN 1",
"Leerfeld 1",
"SWIFT-Code 1",
"Abw. Kontoinhaber 1",
"Kennz. Haupt-Bankverb. 1",
"Bankverb. 1 Gültig von",
"Bankverb. 1 Gültig bis",
"Bankleitzahl 2",
"Bankbezeichnung 2",
"Bankkonto-Nummer 2",
"Länderkennzeichen 2",
"IBAN 2",
"Leerfeld 2",
"SWIFT-Code 2",
"Abw. Kontoinhaber 2",
"Kennz. Haupt-Bankverb. 2",
"Bankverb. 2 gültig von",
"Bankverb. 2 gültig bis",
"Bankleitzahl 3",
"Bankbezeichnung 3",
"Bankkonto-Nummer 3",
"Länderkennzeichen 3",
"IBAN 3",
"Leerfeld 3",
"SWIFT-Code 3",
"Abw. Kontoinhaber 3",
"Kennz. Haupt-Bankverb. 3",
"Bankverb. 3 gültig von",
"Bankverb. 3 gültig bis",
"Bankleitzahl 4",
"Bankbezeichnung 4",
"Bankkonto-Nummer 4",
"Länderkennzeichen 4",
"IBAN 4",
"Leerfeld 4",
"SWIFT-Code 4",
"Abw. Kontoinhaber 4",
"Kennz. Haupt-Bankverb. 4",
"Bankverb. 4 Gültig von",
"Bankverb. 4 Gültig bis",
"Bankleitzahl 5",
"Bankbezeichnung 5",
"Bankkonto-Nummer 5",
"Länderkennzeichen 5",
"IBAN 5",
"Leerfeld 5",
"SWIFT-Code 5",
"Abw. Kontoinhaber 5",
"Kennz. Haupt-Bankverb. 5",
"Bankverb. 5 gültig von",
"Bankverb. 5 gültig bis",
"Leerfeld 6",
"Briefanrede",
"Grußformel",
"Kundennummer",
"Steuernummer",
"Sprache",
"Ansprechpartner",
"Vertreter",
"Sachbearbeiter",
"Diverse-Konto",
"Ausgabeziel",
"Währungssteuerung",
"Kreditlimit (Debitor)",
"Zahlungsbedingung",
"Fälligkeit in Tagen (Debitor)",
"Skonto in Prozent (Debitor)",
"Kreditoren-Ziel 1 (Tage)",
"Kreditoren-Skonto 1 (%)",
"Kreditoren-Ziel 2 (Tage)",
"Kreditoren-Skonto 2 (%)",
"Kreditoren-Ziel 3 Brutto (Tage)",
"Kreditoren-Ziel 4 (Tage)",
"Kreditoren-Skonto 4 (%)",
"Kreditoren-Ziel 5 (Tage)",
"Kreditoren-Skonto 5 (%)",
"Mahnung",
"Kontoauszug",
"Mahntext 1",
"Mahntext 2",
"Mahntext 3",
"Kontoauszugstext",
"Mahnlimit Betrag",
"Mahnlimit %",
"Zinsberechnung",
"Mahnzinssatz 1",
"Mahnzinssatz 2",
"Mahnzinssatz 3",
"Lastschrift",
"Verfahren",
"Mandantenbank",
"Zahlungsträger",
"Indiv. Feld 1",
"Indiv. Feld 2",
"Indiv. Feld 3",
"Indiv. Feld 4",
"Indiv. Feld 5",
"Indiv. Feld 6",
"Indiv. Feld 7",
"Indiv. Feld 8",
"Indiv. Feld 9",
"Indiv. Feld 10",
"Indiv. Feld 11",
"Indiv. Feld 12",
"Indiv. Feld 13",
"Indiv. Feld 14",
"Indiv. Feld 15",
"Abweichende Anrede (Rechnungsadresse)",
"Adressart (Rechnungsadresse)",
"Straße (Rechnungsadresse)",
"Postfach (Rechnungsadresse)",
"Postleitzahl (Rechnungsadresse)",
"Ort (Rechnungsadresse)",
"Land (Rechnungsadresse)",
"Versandzusatz (Rechnungsadresse)",
"Adresszusatz (Rechnungsadresse)",
"Abw. Zustellbezeichnung 1 (Rechnungsadresse)",
"Abw. Zustellbezeichnung 2 (Rechnungsadresse)",
"Adresse Gültig von (Rechnungsadresse)",
"Adresse Gültig bis (Rechnungsadresse)",
"Bankleitzahl 6",
"Bankbezeichnung 6",
"Bankkonto-Nummer 6",
"Länderkennzeichen 6",
"IBAN 6",
"Leerfeld 7",
"SWIFT-Code 6",
"Abw. Kontoinhaber 6",
"Kennz. Haupt-Bankverb. 6",
"Bankverb 6 gültig von",
"Bankverb 6 gültig bis",
"Bankleitzahl 7",
"Bankbezeichnung 7",
"Bankkonto-Nummer 7",
"Länderkennzeichen 7",
"IBAN 7",
"Leerfeld 8",
"SWIFT-Code 7",
"Abw. Kontoinhaber 7",
"Kennz. Haupt-Bankverb. 7",
"Bankverb 7 gültig von",
"Bankverb 7 gültig bis",
"Bankleitzahl 8",
"Bankbezeichnung 8",
"Bankkonto-Nummer 8",
"Länderkennzeichen 8",
"IBAN 8",
"Leerfeld 9",
"SWIFT-Code 8",
"Abw. Kontoinhaber 8",
"Kennz. Haupt-Bankverb. 8",
"Bankverb 8 gültig von",
"Bankverb 8 gültig bis",
"Bankleitzahl 9",
"Bankbezeichnung 9",
"Bankkonto-Nummer 9",
"Länderkennzeichen 9",
"IBAN 9",
"Leerfeld 10",
"SWIFT-Code 9",
"Abw. Kontoinhaber 9",
"Kennz. Haupt-Bankverb. 9",
"Bankverb 9 gültig von",
"Bankverb 9 gültig bis",
"Bankleitzahl 10",
"Bankbezeichnung 10",
"Bankkonto-Nummer 10",
"Länderkennzeichen 10",
"IBAN 10",
"Leerfeld 11",
"SWIFT-Code 10",
"Abw. Kontoinhaber 10",
"Kennz. Haupt-Bankverb. 10",
"Bankverb 10 gültig von",
"Bankverb 10 gültig bis",
"Nummer Fremdsystem",
"Insolvent",
"SEPA-Mandatsreferenz 1",
"SEPA-Mandatsreferenz 2",
"SEPA-Mandatsreferenz 3",
"SEPA-Mandatsreferenz 4",
"SEPA-Mandatsreferenz 5",
"SEPA-Mandatsreferenz 6",
"SEPA-Mandatsreferenz 7",
"SEPA-Mandatsreferenz 8",
"SEPA-Mandatsreferenz 9",
"SEPA-Mandatsreferenz 10",
"Verknüpftes OPOS-Konto",
"Mahnsperre bis",
"Lastschriftsperre bis",
"Zahlungssperre bis",
"Gebührenberechnung",
"Mahngebühr 1",
"Mahngebühr 2",
"Mahngebühr 3",
"Pauschalberechnung",
"Verzugspauschale 1",
"Verzugspauschale 2",
"Verzugspauschale 3",
"Alternativer Suchname",
"Status",
"Anschrift manuell geändert (Korrespondenzadresse)",
"Anschrift individuell (Korrespondenzadresse)",
"Anschrift manuell geändert (Rechnungsadresse)",
"Anschrift individuell (Rechnungsadresse)",
"Fristberechnung bei Debitor",
"Mahnfrist 1",
"Mahnfrist 2",
"Mahnfrist 3",
"Letzte Frist",
]
ACCOUNT_NAME_COLUMNS = [
# Account number
"Konto",
# Account name
"Kontenbeschriftung",
# Language of the account name
# "de-DE" or "en-GB"
"Sprach-ID",
]
class DataCategory:
"""Field of the CSV Header."""
DEBTORS_CREDITORS = "16"
ACCOUNT_NAMES = "20"
TRANSACTIONS = "21"
POSTING_TEXT_CONSTANTS = "67"
class FormatName:
"""Field of the CSV Header, corresponds to DataCategory."""
DEBTORS_CREDITORS = "Debitoren/Kreditoren"
ACCOUNT_NAMES = "Kontenbeschriftungen"
TRANSACTIONS = "Buchungsstapel"
POSTING_TEXT_CONSTANTS = "Buchungstextkonstanten"
class Transactions:
DATA_CATEGORY = DataCategory.TRANSACTIONS
FORMAT_NAME = FormatName.TRANSACTIONS
FORMAT_VERSION = "9"
COLUMNS = TRANSACTION_COLUMNS
class DebtorsCreditors:
DATA_CATEGORY = DataCategory.DEBTORS_CREDITORS
FORMAT_NAME = FormatName.DEBTORS_CREDITORS
FORMAT_VERSION = "5"
COLUMNS = DEBTOR_CREDITOR_COLUMNS
class AccountNames:
DATA_CATEGORY = DataCategory.ACCOUNT_NAMES
FORMAT_NAME = FormatName.ACCOUNT_NAMES
FORMAT_VERSION = "2"
COLUMNS = ACCOUNT_NAME_COLUMNS

View File

@ -1,184 +0,0 @@
import datetime
import zipfile
from csv import QUOTE_NONNUMERIC
from io import BytesIO
import frappe
import pandas as pd
from frappe import _
from .datev_constants import DataCategory
def get_datev_csv(data, filters, csv_class):
"""
Fill in missing columns and return a CSV in DATEV Format.
For automatic processing, DATEV requires the first line of the CSV file to
hold meta data such as the length of account numbers oder the category of
the data.
Arguments:
data -- array of dictionaries
filters -- dict
csv_class -- defines DATA_CATEGORY, FORMAT_NAME and COLUMNS
"""
empty_df = pd.DataFrame(columns=csv_class.COLUMNS)
data_df = pd.DataFrame.from_records(data)
result = empty_df.append(data_df, sort=True)
if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS:
result["Belegdatum"] = pd.to_datetime(result["Belegdatum"])
result["Beleginfo - Inhalt 6"] = pd.to_datetime(result["Beleginfo - Inhalt 6"])
result["Beleginfo - Inhalt 6"] = result["Beleginfo - Inhalt 6"].dt.strftime("%d%m%Y")
result["Fälligkeit"] = pd.to_datetime(result["Fälligkeit"])
result["Fälligkeit"] = result["Fälligkeit"].dt.strftime("%d%m%y")
result.sort_values(by="Belegdatum", inplace=True, kind="stable", ignore_index=True)
if csv_class.DATA_CATEGORY == DataCategory.ACCOUNT_NAMES:
result["Sprach-ID"] = "de-DE"
data = result.to_csv(
# Reason for str(';'): https://github.com/pandas-dev/pandas/issues/6035
sep=";",
# European decimal seperator
decimal=",",
# Windows "ANSI" encoding
encoding="latin_1",
# format date as DDMM
date_format="%d%m",
# Windows line terminator
line_terminator="\r\n",
# Do not number rows
index=False,
# Use all columns defined above
columns=csv_class.COLUMNS,
# Quote most fields, even currency values with "," separator
quoting=QUOTE_NONNUMERIC,
)
data = data.encode("latin_1", errors="replace")
header = get_header(filters, csv_class)
header = ";".join(header).encode("latin_1", errors="replace")
# 1st Row: Header with meta data
# 2nd Row: Data heading (Überschrift der Nutzdaten), included in `data` here.
# 3rd - nth Row: Data (Nutzdaten)
return header + b"\r\n" + data
def get_header(filters, csv_class):
description = filters.get("voucher_type", csv_class.FORMAT_NAME)
company = filters.get("company")
datev_settings = frappe.get_doc("DATEV Settings", {"client": company})
default_currency = frappe.get_value("Company", company, "default_currency")
coa = frappe.get_value("Company", company, "chart_of_accounts")
coa_short_code = "04" if "SKR04" in coa else ("03" if "SKR03" in coa else "")
header = [
# DATEV format
# "DTVF" = created by DATEV software,
# "EXTF" = created by other software
'"EXTF"',
# version of the DATEV format
# 141 = 1.41,
# 510 = 5.10,
# 720 = 7.20
"700",
csv_class.DATA_CATEGORY,
'"%s"' % csv_class.FORMAT_NAME,
# Format version (regarding format name)
csv_class.FORMAT_VERSION,
# Generated on
datetime.datetime.now().strftime("%Y%m%d%H%M%S") + "000",
# Imported on -- stays empty
"",
# Origin. Any two symbols, will be replaced by "SV" on import.
'"EN"',
# I = Exported by
'"%s"' % frappe.session.user,
# J = Imported by -- stays empty
"",
# K = Tax consultant number (Beraternummer)
datev_settings.get("consultant_number", "0000000"),
# L = Tax client number (Mandantennummer)
datev_settings.get("client_number", "00000"),
# M = Start of the fiscal year (Wirtschaftsjahresbeginn)
frappe.utils.formatdate(filters.get("fiscal_year_start"), "yyyyMMdd"),
# N = Length of account numbers (Sachkontenlänge)
str(filters.get("account_number_length", 4)),
# O = Transaction batch start date (YYYYMMDD)
frappe.utils.formatdate(filters.get("from_date"), "yyyyMMdd")
if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS
else "",
# P = Transaction batch end date (YYYYMMDD)
frappe.utils.formatdate(filters.get("to_date"), "yyyyMMdd")
if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS
else "",
# Q = Description (for example, "Sales Invoice") Max. 30 chars
'"{}"'.format(_(description)) if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS else "",
# R = Diktatkürzel
"",
# S = Buchungstyp
# 1 = Transaction batch (Finanzbuchführung),
# 2 = Annual financial statement (Jahresabschluss)
"1" if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS else "",
# T = Rechnungslegungszweck
# 0 oder leer = vom Rechnungslegungszweck unabhängig
# 50 = Handelsrecht
# 30 = Steuerrecht
# 64 = IFRS
# 40 = Kalkulatorik
# 11 = Reserviert
# 12 = Reserviert
"0" if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS else "",
# U = Festschreibung
# TODO: Filter by Accounting Period. In export for closed Accounting Period, this will be "1"
"0",
# V = Default currency, for example, "EUR"
'"%s"' % default_currency if csv_class.DATA_CATEGORY == DataCategory.TRANSACTIONS else "",
# reserviert
"",
# Derivatskennzeichen
"",
# reserviert
"",
# reserviert
"",
# SKR
'"%s"' % coa_short_code,
# Branchen-Lösungs-ID
"",
# reserviert
"",
# reserviert
"",
# Anwendungsinformation (Verarbeitungskennzeichen der abgebenden Anwendung)
"",
]
return header
def zip_and_download(zip_filename, csv_files):
"""
Put CSV files in a zip archive and send that to the client.
Params:
zip_filename Name of the zip file
csv_files list of dicts [{'file_name': 'my_file.csv', 'csv_data': 'comma,separated,values'}]
"""
zip_buffer = BytesIO()
zip_file = zipfile.ZipFile(zip_buffer, mode="w", compression=zipfile.ZIP_DEFLATED)
for csv_file in csv_files:
zip_file.writestr(csv_file.get("file_name"), csv_file.get("csv_data"))
zip_file.close()
frappe.response["filecontent"] = zip_buffer.getvalue()
frappe.response["filename"] = zip_filename
frappe.response["type"] = "binary"

View File

@ -1,56 +0,0 @@
frappe.query_reports["DATEV"] = {
"filters": [
{
"fieldname": "company",
"label": __("Company"),
"fieldtype": "Link",
"options": "Company",
"default": frappe.defaults.get_user_default("Company") || frappe.defaults.get_global_default("Company"),
"reqd": 1
},
{
"fieldname": "from_date",
"label": __("From Date"),
"default": moment().subtract(1, 'month').startOf('month').format(),
"fieldtype": "Date",
"reqd": 1
},
{
"fieldname": "to_date",
"label": __("To Date"),
"default": moment().subtract(1, 'month').endOf('month').format(),
"fieldtype": "Date",
"reqd": 1
},
{
"fieldname": "voucher_type",
"label": __("Voucher Type"),
"fieldtype": "Select",
"options": "\nSales Invoice\nPurchase Invoice\nPayment Entry\nExpense Claim\nPayroll Entry\nBank Reconciliation\nAsset\nStock Entry"
}
],
onload: function(query_report) {
let company = frappe.query_report.get_filter_value('company');
frappe.db.exists('DATEV Settings', company).then((settings_exist) => {
if (!settings_exist) {
frappe.confirm(__('DATEV Settings for your Company are missing. Would you like to create them now?'),
() => frappe.new_doc('DATEV Settings', {'company': company})
);
}
});
query_report.page.add_menu_item(__("Download DATEV File"), () => {
const filters = encodeURIComponent(
JSON.stringify(
query_report.get_values()
)
);
window.open(`/api/method/erpnext.regional.report.datev.datev.download_datev_csv?filters=${filters}`);
});
query_report.page.add_menu_item(__("Change DATEV Settings"), () => {
let company = frappe.query_report.get_filter_value('company'); // read company from filters again it might have changed by now.
frappe.set_route('Form', 'DATEV Settings', company);
});
}
};

View File

@ -1,22 +0,0 @@
{
"add_total_row": 0,
"columns": [],
"creation": "2019-04-24 08:45:16.650129",
"disable_prepared_report": 0,
"disabled": 0,
"docstatus": 0,
"doctype": "Report",
"filters": [],
"idx": 0,
"is_standard": "Yes",
"modified": "2021-04-06 12:23:00.379517",
"modified_by": "Administrator",
"module": "Regional",
"name": "DATEV",
"owner": "Administrator",
"prepared_report": 0,
"ref_doctype": "GL Entry",
"report_name": "DATEV",
"report_type": "Script Report",
"roles": []
}

View File

@ -1,570 +0,0 @@
"""
Provide a report and downloadable CSV according to the German DATEV format.
- Query report showing only the columns that contain data, formatted nicely for
dispay to the user.
- CSV download functionality `download_datev_csv` that provides a CSV file with
all required columns. Used to import the data into the DATEV Software.
"""
import json
import frappe
from frappe import _
from erpnext.accounts.utils import get_fiscal_year
from erpnext.regional.germany.utils.datev.datev_constants import (
AccountNames,
DebtorsCreditors,
Transactions,
)
from erpnext.regional.germany.utils.datev.datev_csv import get_datev_csv, zip_and_download
COLUMNS = [
{
"label": "Umsatz (ohne Soll/Haben-Kz)",
"fieldname": "Umsatz (ohne Soll/Haben-Kz)",
"fieldtype": "Currency",
"width": 100,
},
{
"label": "Soll/Haben-Kennzeichen",
"fieldname": "Soll/Haben-Kennzeichen",
"fieldtype": "Data",
"width": 100,
},
{"label": "Konto", "fieldname": "Konto", "fieldtype": "Data", "width": 100},
{
"label": "Gegenkonto (ohne BU-Schlüssel)",
"fieldname": "Gegenkonto (ohne BU-Schlüssel)",
"fieldtype": "Data",
"width": 100,
},
{"label": "BU-Schlüssel", "fieldname": "BU-Schlüssel", "fieldtype": "Data", "width": 100},
{"label": "Belegdatum", "fieldname": "Belegdatum", "fieldtype": "Date", "width": 100},
{"label": "Belegfeld 1", "fieldname": "Belegfeld 1", "fieldtype": "Data", "width": 150},
{"label": "Buchungstext", "fieldname": "Buchungstext", "fieldtype": "Text", "width": 300},
{
"label": "Beleginfo - Art 1",
"fieldname": "Beleginfo - Art 1",
"fieldtype": "Link",
"options": "DocType",
"width": 100,
},
{
"label": "Beleginfo - Inhalt 1",
"fieldname": "Beleginfo - Inhalt 1",
"fieldtype": "Dynamic Link",
"options": "Beleginfo - Art 1",
"width": 150,
},
{
"label": "Beleginfo - Art 2",
"fieldname": "Beleginfo - Art 2",
"fieldtype": "Link",
"options": "DocType",
"width": 100,
},
{
"label": "Beleginfo - Inhalt 2",
"fieldname": "Beleginfo - Inhalt 2",
"fieldtype": "Dynamic Link",
"options": "Beleginfo - Art 2",
"width": 150,
},
{
"label": "Beleginfo - Art 3",
"fieldname": "Beleginfo - Art 3",
"fieldtype": "Link",
"options": "DocType",
"width": 100,
},
{
"label": "Beleginfo - Inhalt 3",
"fieldname": "Beleginfo - Inhalt 3",
"fieldtype": "Dynamic Link",
"options": "Beleginfo - Art 3",
"width": 150,
},
{
"label": "Beleginfo - Art 4",
"fieldname": "Beleginfo - Art 4",
"fieldtype": "Data",
"width": 100,
},
{
"label": "Beleginfo - Inhalt 4",
"fieldname": "Beleginfo - Inhalt 4",
"fieldtype": "Data",
"width": 150,
},
{
"label": "Beleginfo - Art 5",
"fieldname": "Beleginfo - Art 5",
"fieldtype": "Data",
"width": 150,
},
{
"label": "Beleginfo - Inhalt 5",
"fieldname": "Beleginfo - Inhalt 5",
"fieldtype": "Data",
"width": 100,
},
{
"label": "Beleginfo - Art 6",
"fieldname": "Beleginfo - Art 6",
"fieldtype": "Data",
"width": 150,
},
{
"label": "Beleginfo - Inhalt 6",
"fieldname": "Beleginfo - Inhalt 6",
"fieldtype": "Date",
"width": 100,
},
{"label": "Fälligkeit", "fieldname": "Fälligkeit", "fieldtype": "Date", "width": 100},
]
def execute(filters=None):
"""Entry point for frappe."""
data = []
if filters and validate(filters):
fn = "temporary_against_account_number"
filters[fn] = frappe.get_value("DATEV Settings", filters.get("company"), fn)
data = get_transactions(filters, as_dict=0)
return COLUMNS, data
def validate(filters):
"""Make sure all mandatory filters and settings are present."""
company = filters.get("company")
if not company:
frappe.throw(_("<b>Company</b> is a mandatory filter."))
from_date = filters.get("from_date")
if not from_date:
frappe.throw(_("<b>From Date</b> is a mandatory filter."))
to_date = filters.get("to_date")
if not to_date:
frappe.throw(_("<b>To Date</b> is a mandatory filter."))
validate_fiscal_year(from_date, to_date, company)
if not frappe.db.exists("DATEV Settings", filters.get("company")):
msg = "Please create DATEV Settings for Company {}".format(filters.get("company"))
frappe.log_error(msg, title="DATEV Settings missing")
return False
return True
def validate_fiscal_year(from_date, to_date, company):
from_fiscal_year = get_fiscal_year(date=from_date, company=company)
to_fiscal_year = get_fiscal_year(date=to_date, company=company)
if from_fiscal_year != to_fiscal_year:
frappe.throw(_("Dates {} and {} are not in the same fiscal year.").format(from_date, to_date))
def get_transactions(filters, as_dict=1):
def run(params_method, filters):
extra_fields, extra_joins, extra_filters = params_method(filters)
return run_query(filters, extra_fields, extra_joins, extra_filters, as_dict=as_dict)
def sort_by(row):
# "Belegdatum" is in the fifth column when list format is used
return row["Belegdatum" if as_dict else 5]
type_map = {
# specific query methods for some voucher types
"Payment Entry": get_payment_entry_params,
"Sales Invoice": get_sales_invoice_params,
"Purchase Invoice": get_purchase_invoice_params,
}
only_voucher_type = filters.get("voucher_type")
transactions = []
for voucher_type, get_voucher_params in type_map.items():
if only_voucher_type and only_voucher_type != voucher_type:
continue
transactions.extend(run(params_method=get_voucher_params, filters=filters))
if not only_voucher_type or only_voucher_type not in type_map:
# generic query method for all other voucher types
filters["exclude_voucher_types"] = type_map.keys()
transactions.extend(run(params_method=get_generic_params, filters=filters))
return sorted(transactions, key=sort_by)
def get_payment_entry_params(filters):
extra_fields = """
, 'Zahlungsreferenz' as 'Beleginfo - Art 5'
, pe.reference_no as 'Beleginfo - Inhalt 5'
, 'Buchungstag' as 'Beleginfo - Art 6'
, pe.reference_date as 'Beleginfo - Inhalt 6'
, '' as 'Fälligkeit'
"""
extra_joins = """
LEFT JOIN `tabPayment Entry` pe
ON gl.voucher_no = pe.name
"""
extra_filters = """
AND gl.voucher_type = 'Payment Entry'
"""
return extra_fields, extra_joins, extra_filters
def get_sales_invoice_params(filters):
extra_fields = """
, '' as 'Beleginfo - Art 5'
, '' as 'Beleginfo - Inhalt 5'
, '' as 'Beleginfo - Art 6'
, '' as 'Beleginfo - Inhalt 6'
, si.due_date as 'Fälligkeit'
"""
extra_joins = """
LEFT JOIN `tabSales Invoice` si
ON gl.voucher_no = si.name
"""
extra_filters = """
AND gl.voucher_type = 'Sales Invoice'
"""
return extra_fields, extra_joins, extra_filters
def get_purchase_invoice_params(filters):
extra_fields = """
, 'Lieferanten-Rechnungsnummer' as 'Beleginfo - Art 5'
, pi.bill_no as 'Beleginfo - Inhalt 5'
, 'Lieferanten-Rechnungsdatum' as 'Beleginfo - Art 6'
, pi.bill_date as 'Beleginfo - Inhalt 6'
, pi.due_date as 'Fälligkeit'
"""
extra_joins = """
LEFT JOIN `tabPurchase Invoice` pi
ON gl.voucher_no = pi.name
"""
extra_filters = """
AND gl.voucher_type = 'Purchase Invoice'
"""
return extra_fields, extra_joins, extra_filters
def get_generic_params(filters):
# produce empty fields so all rows will have the same length
extra_fields = """
, '' as 'Beleginfo - Art 5'
, '' as 'Beleginfo - Inhalt 5'
, '' as 'Beleginfo - Art 6'
, '' as 'Beleginfo - Inhalt 6'
, '' as 'Fälligkeit'
"""
extra_joins = ""
if filters.get("exclude_voucher_types"):
# exclude voucher types that are queried by a dedicated method
exclude = "({})".format(
", ".join("'{}'".format(key) for key in filters.get("exclude_voucher_types"))
)
extra_filters = "AND gl.voucher_type NOT IN {}".format(exclude)
# if voucher type filter is set, allow only this type
if filters.get("voucher_type"):
extra_filters += " AND gl.voucher_type = %(voucher_type)s"
return extra_fields, extra_joins, extra_filters
def run_query(filters, extra_fields, extra_joins, extra_filters, as_dict=1):
"""
Get a list of accounting entries.
Select GL Entries joined with Account and Party Account in order to get the
account numbers. Returns a list of accounting entries.
Arguments:
filters -- dict of filters to be passed to the sql query
as_dict -- return as list of dicts [0,1]
"""
query = """
SELECT
/* either debit or credit amount; always positive */
case gl.debit when 0 then gl.credit else gl.debit end as 'Umsatz (ohne Soll/Haben-Kz)',
/* 'H' when credit, 'S' when debit */
case gl.debit when 0 then 'H' else 'S' end as 'Soll/Haben-Kennzeichen',
/* account number or, if empty, party account number */
acc.account_number as 'Konto',
/* against number or, if empty, party against number */
%(temporary_against_account_number)s as 'Gegenkonto (ohne BU-Schlüssel)',
'' as 'BU-Schlüssel',
gl.posting_date as 'Belegdatum',
gl.voucher_no as 'Belegfeld 1',
REPLACE(LEFT(gl.remarks, 60), '\n', ' ') as 'Buchungstext',
gl.voucher_type as 'Beleginfo - Art 1',
gl.voucher_no as 'Beleginfo - Inhalt 1',
gl.against_voucher_type as 'Beleginfo - Art 2',
gl.against_voucher as 'Beleginfo - Inhalt 2',
gl.party_type as 'Beleginfo - Art 3',
gl.party as 'Beleginfo - Inhalt 3',
case gl.party_type when 'Customer' then 'Debitorennummer' when 'Supplier' then 'Kreditorennummer' else NULL end as 'Beleginfo - Art 4',
par.debtor_creditor_number as 'Beleginfo - Inhalt 4'
{extra_fields}
FROM `tabGL Entry` gl
/* Kontonummer */
LEFT JOIN `tabAccount` acc
ON gl.account = acc.name
LEFT JOIN `tabParty Account` par
ON par.parent = gl.party
AND par.parenttype = gl.party_type
AND par.company = %(company)s
{extra_joins}
WHERE gl.company = %(company)s
AND DATE(gl.posting_date) >= %(from_date)s
AND DATE(gl.posting_date) <= %(to_date)s
{extra_filters}
ORDER BY 'Belegdatum', gl.voucher_no""".format(
extra_fields=extra_fields, extra_joins=extra_joins, extra_filters=extra_filters
)
gl_entries = frappe.db.sql(query, filters, as_dict=as_dict)
return gl_entries
def get_customers(filters):
"""
Get a list of Customers.
Arguments:
filters -- dict of filters to be passed to the sql query
"""
return frappe.db.sql(
"""
SELECT
par.debtor_creditor_number as 'Konto',
CASE cus.customer_type
WHEN 'Company' THEN cus.customer_name
ELSE null
END as 'Name (Adressatentyp Unternehmen)',
CASE cus.customer_type
WHEN 'Individual' THEN TRIM(SUBSTR(cus.customer_name, LOCATE(' ', cus.customer_name)))
ELSE null
END as 'Name (Adressatentyp natürl. Person)',
CASE cus.customer_type
WHEN 'Individual' THEN SUBSTRING_INDEX(SUBSTRING_INDEX(cus.customer_name, ' ', 1), ' ', -1)
ELSE null
END as 'Vorname (Adressatentyp natürl. Person)',
CASE cus.customer_type
WHEN 'Individual' THEN '1'
WHEN 'Company' THEN '2'
ELSE '0'
END as 'Adressatentyp',
adr.address_line1 as 'Straße',
adr.pincode as 'Postleitzahl',
adr.city as 'Ort',
UPPER(country.code) as 'Land',
adr.address_line2 as 'Adresszusatz',
adr.email_id as 'E-Mail',
adr.phone as 'Telefon',
adr.fax as 'Fax',
cus.website as 'Internet',
cus.tax_id as 'Steuernummer'
FROM `tabCustomer` cus
left join `tabParty Account` par
on par.parent = cus.name
and par.parenttype = 'Customer'
and par.company = %(company)s
left join `tabDynamic Link` dyn_adr
on dyn_adr.link_name = cus.name
and dyn_adr.link_doctype = 'Customer'
and dyn_adr.parenttype = 'Address'
left join `tabAddress` adr
on adr.name = dyn_adr.parent
and adr.is_primary_address = '1'
left join `tabCountry` country
on country.name = adr.country
WHERE adr.is_primary_address = '1'
""",
filters,
as_dict=1,
)
def get_suppliers(filters):
"""
Get a list of Suppliers.
Arguments:
filters -- dict of filters to be passed to the sql query
"""
return frappe.db.sql(
"""
SELECT
par.debtor_creditor_number as 'Konto',
CASE sup.supplier_type
WHEN 'Company' THEN sup.supplier_name
ELSE null
END as 'Name (Adressatentyp Unternehmen)',
CASE sup.supplier_type
WHEN 'Individual' THEN TRIM(SUBSTR(sup.supplier_name, LOCATE(' ', sup.supplier_name)))
ELSE null
END as 'Name (Adressatentyp natürl. Person)',
CASE sup.supplier_type
WHEN 'Individual' THEN SUBSTRING_INDEX(SUBSTRING_INDEX(sup.supplier_name, ' ', 1), ' ', -1)
ELSE null
END as 'Vorname (Adressatentyp natürl. Person)',
CASE sup.supplier_type
WHEN 'Individual' THEN '1'
WHEN 'Company' THEN '2'
ELSE '0'
END as 'Adressatentyp',
adr.address_line1 as 'Straße',
adr.pincode as 'Postleitzahl',
adr.city as 'Ort',
UPPER(country.code) as 'Land',
adr.address_line2 as 'Adresszusatz',
adr.email_id as 'E-Mail',
adr.phone as 'Telefon',
adr.fax as 'Fax',
sup.website as 'Internet',
sup.tax_id as 'Steuernummer',
case sup.on_hold when 1 then sup.release_date else null end as 'Zahlungssperre bis'
FROM `tabSupplier` sup
left join `tabParty Account` par
on par.parent = sup.name
and par.parenttype = 'Supplier'
and par.company = %(company)s
left join `tabDynamic Link` dyn_adr
on dyn_adr.link_name = sup.name
and dyn_adr.link_doctype = 'Supplier'
and dyn_adr.parenttype = 'Address'
left join `tabAddress` adr
on adr.name = dyn_adr.parent
and adr.is_primary_address = '1'
left join `tabCountry` country
on country.name = adr.country
WHERE adr.is_primary_address = '1'
""",
filters,
as_dict=1,
)
def get_account_names(filters):
return frappe.db.sql(
"""
SELECT
account_number as 'Konto',
LEFT(account_name, 40) as 'Kontenbeschriftung',
'de-DE' as 'Sprach-ID'
FROM `tabAccount`
WHERE company = %(company)s
AND is_group = 0
AND account_number != ''
""",
filters,
as_dict=1,
)
@frappe.whitelist()
def download_datev_csv(filters):
"""
Provide accounting entries for download in DATEV format.
Validate the filters, get the data, produce the CSV file and provide it for
download. Can be called like this:
GET /api/method/erpnext.regional.report.datev.datev.download_datev_csv
Arguments / Params:
filters -- dict of filters to be passed to the sql query
"""
if isinstance(filters, str):
filters = json.loads(filters)
validate(filters)
company = filters.get("company")
fiscal_year = get_fiscal_year(date=filters.get("from_date"), company=company)
filters["fiscal_year_start"] = fiscal_year[1]
# set chart of accounts used
coa = frappe.get_value("Company", company, "chart_of_accounts")
filters["skr"] = "04" if "SKR04" in coa else ("03" if "SKR03" in coa else "")
datev_settings = frappe.get_doc("DATEV Settings", company)
filters["account_number_length"] = datev_settings.account_number_length
filters["temporary_against_account_number"] = datev_settings.temporary_against_account_number
transactions = get_transactions(filters)
account_names = get_account_names(filters)
customers = get_customers(filters)
suppliers = get_suppliers(filters)
zip_name = "{} DATEV.zip".format(frappe.utils.datetime.date.today())
zip_and_download(
zip_name,
[
{
"file_name": "EXTF_Buchungsstapel.csv",
"csv_data": get_datev_csv(transactions, filters, csv_class=Transactions),
},
{
"file_name": "EXTF_Kontenbeschriftungen.csv",
"csv_data": get_datev_csv(account_names, filters, csv_class=AccountNames),
},
{
"file_name": "EXTF_Kunden.csv",
"csv_data": get_datev_csv(customers, filters, csv_class=DebtorsCreditors),
},
{
"file_name": "EXTF_Lieferanten.csv",
"csv_data": get_datev_csv(suppliers, filters, csv_class=DebtorsCreditors),
},
],
)

View File

@ -1,252 +0,0 @@
import zipfile
from io import BytesIO
from unittest import TestCase
import frappe
from frappe.utils import cstr, now_datetime, today
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.regional.germany.utils.datev.datev_constants import (
AccountNames,
DebtorsCreditors,
Transactions,
)
from erpnext.regional.germany.utils.datev.datev_csv import get_datev_csv, get_header
from erpnext.regional.report.datev.datev import (
download_datev_csv,
get_account_names,
get_customers,
get_suppliers,
get_transactions,
)
def make_company(company_name, abbr):
if not frappe.db.exists("Company", company_name):
company = frappe.get_doc(
{
"doctype": "Company",
"company_name": company_name,
"abbr": abbr,
"default_currency": "EUR",
"country": "Germany",
"create_chart_of_accounts_based_on": "Standard Template",
"chart_of_accounts": "SKR04 mit Kontonummern",
}
)
company.insert()
else:
company = frappe.get_doc("Company", company_name)
# indempotent
company.create_default_warehouses()
if not frappe.db.get_value("Cost Center", {"is_group": 0, "company": company.name}):
company.create_default_cost_center()
company.save()
return company
def setup_fiscal_year():
fiscal_year = None
year = cstr(now_datetime().year)
if not frappe.db.get_value("Fiscal Year", {"year": year}, "name"):
try:
fiscal_year = frappe.get_doc(
{
"doctype": "Fiscal Year",
"year": year,
"year_start_date": "{0}-01-01".format(year),
"year_end_date": "{0}-12-31".format(year),
}
)
fiscal_year.insert()
except frappe.NameError:
pass
if fiscal_year:
fiscal_year.set_as_default()
def make_customer_with_account(customer_name, company):
acc_name = frappe.db.get_value(
"Account", {"account_name": customer_name, "company": company.name}, "name"
)
if not acc_name:
acc = frappe.get_doc(
{
"doctype": "Account",
"parent_account": "1 - Forderungen aus Lieferungen und Leistungen - _TG",
"account_name": customer_name,
"company": company.name,
"account_type": "Receivable",
"account_number": "10001",
}
)
acc.insert()
acc_name = acc.name
if not frappe.db.exists("Customer", customer_name):
customer = frappe.get_doc(
{
"doctype": "Customer",
"customer_name": customer_name,
"customer_type": "Company",
"accounts": [{"company": company.name, "account": acc_name}],
}
)
customer.insert()
else:
customer = frappe.get_doc("Customer", customer_name)
return customer
def make_item(item_code, company):
warehouse_name = frappe.db.get_value(
"Warehouse", {"warehouse_name": "Stores", "company": company.name}, "name"
)
if not frappe.db.exists("Item", item_code):
item = frappe.get_doc(
{
"doctype": "Item",
"item_code": item_code,
"item_name": item_code,
"description": item_code,
"item_group": "All Item Groups",
"is_stock_item": 0,
"is_purchase_item": 0,
"is_customer_provided_item": 0,
"item_defaults": [{"default_warehouse": warehouse_name, "company": company.name}],
}
)
item.insert()
else:
item = frappe.get_doc("Item", item_code)
return item
def make_datev_settings(company):
if not frappe.db.exists("DATEV Settings", company.name):
frappe.get_doc(
{
"doctype": "DATEV Settings",
"client": company.name,
"client_number": "12345",
"consultant_number": "67890",
"temporary_against_account_number": "9999",
}
).insert()
class TestDatev(TestCase):
def setUp(self):
self.company = make_company("_Test GmbH", "_TG")
self.customer = make_customer_with_account("_Test Kunde GmbH", self.company)
self.filters = {
"company": self.company.name,
"from_date": today(),
"to_date": today(),
"temporary_against_account_number": "9999",
}
make_datev_settings(self.company)
item = make_item("_Test Item", self.company)
setup_fiscal_year()
warehouse = frappe.db.get_value(
"Item Default", {"parent": item.name, "company": self.company.name}, "default_warehouse"
)
income_account = frappe.db.get_value(
"Account", {"account_number": "4200", "company": self.company.name}, "name"
)
tax_account = frappe.db.get_value(
"Account", {"account_number": "3806", "company": self.company.name}, "name"
)
si = create_sales_invoice(
company=self.company.name,
customer=self.customer.name,
currency=self.company.default_currency,
debit_to=self.customer.accounts[0].account,
income_account="4200 - Erlöse - _TG",
expense_account="6990 - Herstellungskosten - _TG",
cost_center=self.company.cost_center,
warehouse=warehouse,
item=item.name,
do_not_save=1,
)
si.append(
"taxes",
{
"charge_type": "On Net Total",
"account_head": tax_account,
"description": "Umsatzsteuer 19 %",
"rate": 19,
"cost_center": self.company.cost_center,
},
)
si.cost_center = self.company.cost_center
si.save()
si.submit()
def test_columns(self):
def is_subset(get_data, allowed_keys):
"""
Validate that the dict contains only allowed keys.
Params:
get_data -- Function that returns a list of dicts.
allowed_keys -- List of allowed keys
"""
data = get_data(self.filters)
if data == []:
# No data and, therefore, no columns is okay
return True
actual_set = set(data[0].keys())
# allowed set must be interpreted as unicode to match the actual set
allowed_set = set({frappe.as_unicode(key) for key in allowed_keys})
return actual_set.issubset(allowed_set)
self.assertTrue(is_subset(get_transactions, Transactions.COLUMNS))
self.assertTrue(is_subset(get_customers, DebtorsCreditors.COLUMNS))
self.assertTrue(is_subset(get_suppliers, DebtorsCreditors.COLUMNS))
self.assertTrue(is_subset(get_account_names, AccountNames.COLUMNS))
def test_header(self):
self.assertTrue(Transactions.DATA_CATEGORY in get_header(self.filters, Transactions))
self.assertTrue(AccountNames.DATA_CATEGORY in get_header(self.filters, AccountNames))
self.assertTrue(DebtorsCreditors.DATA_CATEGORY in get_header(self.filters, DebtorsCreditors))
def test_csv(self):
test_data = [
{
"Umsatz (ohne Soll/Haben-Kz)": 100,
"Soll/Haben-Kennzeichen": "H",
"Kontonummer": "4200",
"Gegenkonto (ohne BU-Schlüssel)": "10000",
"Belegdatum": today(),
"Buchungstext": "No remark",
"Beleginfo - Art 1": "Sales Invoice",
"Beleginfo - Inhalt 1": "SINV-0001",
}
]
get_datev_csv(data=test_data, filters=self.filters, csv_class=Transactions)
def test_download(self):
"""Assert that the returned file is a ZIP file."""
download_datev_csv(self.filters)
# zipfile.is_zipfile() expects a file-like object
zip_buffer = BytesIO()
zip_buffer.write(frappe.response["filecontent"])
self.assertTrue(zipfile.is_zipfile(zip_buffer))

View File

@ -15,23 +15,23 @@
"salutation",
"customer_name",
"gender",
"customer_type",
"tax_withholding_category",
"default_bank_account",
"tax_id",
"tax_category",
"tax_withholding_category",
"lead_name",
"opportunity_name",
"image",
"column_break0",
"account_manager",
"customer_group",
"customer_type",
"territory",
"tax_id",
"tax_category",
"account_manager",
"so_required",
"dn_required",
"disabled",
"is_internal_customer",
"represents_company",
"disabled",
"allowed_to_transact_section",
"companies",
"currency_and_price_list",
@ -40,7 +40,6 @@
"default_price_list",
"address_contacts",
"address_html",
"website",
"column_break1",
"contact_html",
"primary_address_and_contact_detail",
@ -60,6 +59,7 @@
"column_break_45",
"market_segment",
"industry",
"website",
"language",
"is_frozen",
"column_break_38",
@ -100,7 +100,7 @@
"fieldname": "customer_name",
"fieldtype": "Data",
"in_global_search": 1,
"label": "Full Name",
"label": "Customer Name",
"no_copy": 1,
"oldfieldname": "customer_name",
"oldfieldtype": "Data",
@ -118,7 +118,7 @@
"default": "Company",
"fieldname": "customer_type",
"fieldtype": "Select",
"label": "Type",
"label": "Customer Type",
"oldfieldname": "customer_type",
"oldfieldtype": "Select",
"options": "Company\nIndividual",
@ -337,7 +337,7 @@
"collapsible": 1,
"fieldname": "default_receivable_accounts",
"fieldtype": "Section Break",
"label": "Accounting"
"label": "Default Receivable Accounts"
},
{
"description": "Mention if non-standard receivable account",
@ -511,7 +511,7 @@
"link_fieldname": "party"
}
],
"modified": "2021-10-20 22:07:52.485809",
"modified": "2022-04-16 20:32:34.000304",
"modified_by": "Administrator",
"module": "Selling",
"name": "Customer",
@ -595,6 +595,7 @@
"show_name_in_global_search": 1,
"sort_field": "modified",
"sort_order": "ASC",
"states": [],
"title_field": "customer_name",
"track_changes": 1
}

View File

@ -81,7 +81,7 @@ def get_data(conditions, filters):
ON sii.so_detail = soi.name and sii.docstatus = 1)
LEFT JOIN `tabDelivery Note Item` dni
on dni.so_detail = soi.name
RIGHT JOIN `tabDelivery Note` dn
LEFT JOIN `tabDelivery Note` dn
on dni.parent = dn.name and dn.docstatus = 1
WHERE
soi.parent = so.name

View File

@ -0,0 +1,166 @@
import frappe
from frappe.tests.utils import FrappeTestCase
from frappe.utils import add_days
from erpnext.selling.doctype.sales_order.sales_order import make_delivery_note, make_sales_invoice
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
from erpnext.selling.report.sales_order_analysis.sales_order_analysis import execute
from erpnext.stock.doctype.item.test_item import create_item
test_dependencies = ["Sales Order", "Item", "Sales Invoice", "Delivery Note"]
class TestSalesOrderAnalysis(FrappeTestCase):
def create_sales_order(self, transaction_date):
item = create_item(item_code="_Test Excavator", is_stock_item=0)
so = make_sales_order(
transaction_date=transaction_date,
item=item.item_code,
qty=10,
rate=100000,
do_not_save=True,
)
so.po_no = ""
so.taxes_and_charges = ""
so.taxes = ""
so.items[0].delivery_date = add_days(transaction_date, 15)
so.save()
so.submit()
return item, so
def create_sales_invoice(self, so):
sinv = make_sales_invoice(so.name)
sinv.posting_date = so.transaction_date
sinv.taxes_and_charges = ""
sinv.taxes = ""
sinv.insert()
sinv.submit()
return sinv
def create_delivery_note(self, so):
dn = make_delivery_note(so.name)
dn.set_posting_time = True
dn.posting_date = add_days(so.transaction_date, 1)
dn.save()
dn.submit()
return dn
def test_01_so_to_deliver_and_bill(self):
transaction_date = "2021-06-01"
item, so = self.create_sales_order(transaction_date)
columns, data, message, chart = execute(
{
"company": "_Test Company",
"from_date": "2021-06-01",
"to_date": "2021-06-30",
"status": ["To Deliver and Bill"],
}
)
expected_value = {
"status": "To Deliver and Bill",
"sales_order": so.name,
"delay_days": frappe.utils.date_diff(frappe.utils.datetime.date.today(), so.delivery_date),
"qty": 10,
"delivered_qty": 0,
"pending_qty": 10,
"qty_to_bill": 10,
"time_taken_to_deliver": 0,
}
self.assertEqual(len(data), 1)
for key, val in expected_value.items():
with self.subTest(key=key, val=val):
self.assertEqual(data[0][key], val)
def test_02_so_to_deliver(self):
transaction_date = "2021-06-01"
item, so = self.create_sales_order(transaction_date)
self.create_sales_invoice(so)
columns, data, message, chart = execute(
{
"company": "_Test Company",
"from_date": "2021-06-01",
"to_date": "2021-06-30",
"status": ["To Deliver"],
}
)
expected_value = {
"status": "To Deliver",
"sales_order": so.name,
"delay_days": frappe.utils.date_diff(frappe.utils.datetime.date.today(), so.delivery_date),
"qty": 10,
"delivered_qty": 0,
"pending_qty": 10,
"qty_to_bill": 0,
"time_taken_to_deliver": 0,
}
self.assertEqual(len(data), 1)
for key, val in expected_value.items():
with self.subTest(key=key, val=val):
self.assertEqual(data[0][key], val)
def test_03_so_to_bill(self):
transaction_date = "2021-06-01"
item, so = self.create_sales_order(transaction_date)
self.create_delivery_note(so)
columns, data, message, chart = execute(
{
"company": "_Test Company",
"from_date": "2021-06-01",
"to_date": "2021-06-30",
"status": ["To Bill"],
}
)
expected_value = {
"status": "To Bill",
"sales_order": so.name,
"delay_days": frappe.utils.date_diff(frappe.utils.datetime.date.today(), so.delivery_date),
"qty": 10,
"delivered_qty": 10,
"pending_qty": 0,
"qty_to_bill": 10,
"time_taken_to_deliver": 86400,
}
self.assertEqual(len(data), 1)
for key, val in expected_value.items():
with self.subTest(key=key, val=val):
self.assertEqual(data[0][key], val)
def test_04_so_completed(self):
transaction_date = "2021-06-01"
item, so = self.create_sales_order(transaction_date)
self.create_sales_invoice(so)
self.create_delivery_note(so)
columns, data, message, chart = execute(
{
"company": "_Test Company",
"from_date": "2021-06-01",
"to_date": "2021-06-30",
"status": ["Completed"],
}
)
expected_value = {
"status": "Completed",
"sales_order": so.name,
"delay_days": frappe.utils.date_diff(frappe.utils.datetime.date.today(), so.delivery_date),
"qty": 10,
"delivered_qty": 10,
"pending_qty": 0,
"qty_to_bill": 0,
"billed_qty": 10,
"time_taken_to_deliver": 86400,
}
self.assertEqual(len(data), 1)
for key, val in expected_value.items():
with self.subTest(key=key, val=val):
self.assertEqual(data[0][key], val)
def test_05_all_so_status(self):
columns, data, message, chart = execute(
{
"company": "_Test Company",
"from_date": "2021-06-01",
"to_date": "2021-06-30",
}
)
# SO's from first 4 test cases should be in output
self.assertEqual(len(data), 4)

View File

@ -233,7 +233,8 @@ erpnext.company.setup_queries = function(frm) {
["expenses_included_in_asset_valuation", {"account_type": "Expenses Included In Asset Valuation"}],
["capital_work_in_progress_account", {"account_type": "Capital Work in Progress"}],
["asset_received_but_not_billed", {"account_type": "Asset Received But Not Billed"}],
["unrealized_profit_loss_account", {"root_type": ["in", ["Liability", "Asset"]]}]
["unrealized_profit_loss_account", {"root_type": ["in", ["Liability", "Asset"]]}],
["default_provisional_account", {"root_type": ["in", ["Liability", "Asset"]]}]
], function(i, v) {
erpnext.company.set_custom_query(frm, v);
});

View File

@ -377,6 +377,17 @@ $.extend(erpnext.item, {
}
}
frm.set_query('default_provisional_account', 'item_defaults', (doc, cdt, cdn) => {
let row = locals[cdt][cdn];
return {
filters: {
"company": row.company,
"root_type": ["in", ["Liability", "Asset"]],
"is_group": 0
}
};
});
},
make_dashboard: function(frm) {

View File

@ -15,6 +15,7 @@
"default_supplier",
"column_break_8",
"expense_account",
"default_provisional_account",
"selling_defaults",
"selling_cost_center",
"column_break_12",
@ -101,11 +102,17 @@
"fieldtype": "Link",
"label": "Default Discount Account",
"options": "Account"
},
{
"fieldname": "default_provisional_account",
"fieldtype": "Link",
"label": "Default Provisional Account",
"options": "Account"
}
],
"istable": 1,
"links": [],
"modified": "2021-07-13 01:26:03.860065",
"modified": "2022-04-10 20:18:54.148195",
"modified_by": "Administrator",
"module": "Stock",
"name": "Item Default",
@ -114,5 +121,6 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View File

@ -106,8 +106,6 @@
"terms",
"bill_no",
"bill_date",
"accounting_details_section",
"provisional_expense_account",
"more_info",
"project",
"status",
@ -1145,26 +1143,13 @@
"label": "Represents Company",
"options": "Company",
"read_only": 1
},
{
"collapsible": 1,
"fieldname": "accounting_details_section",
"fieldtype": "Section Break",
"label": "Accounting Details"
},
{
"fieldname": "provisional_expense_account",
"fieldtype": "Link",
"hidden": 1,
"label": "Provisional Expense Account",
"options": "Account"
}
],
"icon": "fa fa-truck",
"idx": 261,
"is_submittable": 1,
"links": [],
"modified": "2022-03-10 11:40:52.690984",
"modified": "2022-04-10 22:50:37.761362",
"modified_by": "Administrator",
"module": "Stock",
"name": "Purchase Receipt",

View File

@ -145,10 +145,13 @@ class PurchaseReceipt(BuyingController):
)
)
if provisional_accounting_for_non_stock_items:
default_provisional_account = self.get_company_default("default_provisional_account")
if not self.provisional_expense_account:
self.provisional_expense_account = default_provisional_account
if not provisional_accounting_for_non_stock_items:
return
default_provisional_account = self.get_company_default("default_provisional_account")
for item in self.get("items"):
if not item.get("provisional_expense_account"):
item.provisional_expense_account = default_provisional_account
def validate_with_previous_doc(self):
super(PurchaseReceipt, self).validate_with_previous_doc(
@ -509,7 +512,9 @@ class PurchaseReceipt(BuyingController):
and flt(d.qty)
and provisional_accounting_for_non_stock_items
):
self.add_provisional_gl_entry(d, gl_entries, self.posting_date)
self.add_provisional_gl_entry(
d, gl_entries, self.posting_date, d.get("provisional_expense_account")
)
if warehouse_with_no_account:
frappe.msgprint(
@ -518,9 +523,10 @@ class PurchaseReceipt(BuyingController):
+ "\n".join(warehouse_with_no_account)
)
def add_provisional_gl_entry(self, item, gl_entries, posting_date, reverse=0):
provisional_expense_account = self.get("provisional_expense_account")
credit_currency = get_account_currency(provisional_expense_account)
def add_provisional_gl_entry(
self, item, gl_entries, posting_date, provisional_account, reverse=0
):
credit_currency = get_account_currency(provisional_account)
debit_currency = get_account_currency(item.expense_account)
expense_account = item.expense_account
remarks = self.get("remarks") or _("Accounting Entry for Service")
@ -534,7 +540,7 @@ class PurchaseReceipt(BuyingController):
self.add_gl_entry(
gl_entries=gl_entries,
account=provisional_expense_account,
account=provisional_account,
cost_center=item.cost_center,
debit=0.0,
credit=multiplication_factor * item.amount,
@ -554,7 +560,7 @@ class PurchaseReceipt(BuyingController):
debit=multiplication_factor * item.amount,
credit=0.0,
remarks=remarks,
against_account=provisional_expense_account,
against_account=provisional_account,
account_currency=debit_currency,
project=item.project,
voucher_detail_no=item.name,

View File

@ -747,14 +747,13 @@ class TestPurchaseReceipt(FrappeTestCase):
update_purchase_receipt_status,
)
pr = make_purchase_receipt()
item = make_item()
pr = make_purchase_receipt(item_code=item.name)
update_purchase_receipt_status(pr.name, "Closed")
self.assertEqual(frappe.db.get_value("Purchase Receipt", pr.name, "status"), "Closed")
pr.reload()
pr.cancel()
def test_pr_billing_status(self):
"""Flow:
1. PO -> PR1 -> PI

View File

@ -96,7 +96,6 @@
"include_exploded_items",
"batch_no",
"rejected_serial_no",
"expense_account",
"item_tax_rate",
"item_weight_details",
"weight_per_unit",
@ -107,6 +106,10 @@
"manufacturer",
"column_break_16",
"manufacturer_part_no",
"accounting_details_section",
"expense_account",
"column_break_102",
"provisional_expense_account",
"accounting_dimensions_section",
"project",
"dimension_col_break",
@ -971,12 +974,27 @@
"label": "Product Bundle",
"options": "Product Bundle",
"read_only": 1
},
{
"fieldname": "provisional_expense_account",
"fieldtype": "Link",
"label": "Provisional Expense Account",
"options": "Account"
},
{
"fieldname": "accounting_details_section",
"fieldtype": "Section Break",
"label": "Accounting Details"
},
{
"fieldname": "column_break_102",
"fieldtype": "Column Break"
}
],
"idx": 1,
"istable": 1,
"links": [],
"modified": "2022-02-01 11:32:27.980524",
"modified": "2022-04-11 13:07:32.061402",
"modified_by": "Administrator",
"module": "Stock",
"name": "Purchase Receipt Item",

View File

@ -4,15 +4,12 @@
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import cint, get_link_to_form, get_weekday, now, nowtime, today
from frappe.utils import cint, get_link_to_form, get_weekday, now, nowtime
from frappe.utils.user import get_users_with_role
from rq.timeouts import JobTimeoutException
import erpnext
from erpnext.accounts.utils import (
check_if_stock_and_account_balance_synced,
update_gl_entries_after,
)
from erpnext.accounts.utils import update_gl_entries_after
from erpnext.stock.stock_ledger import get_items_to_be_repost, repost_future_sle
@ -224,6 +221,10 @@ def notify_error_to_stock_managers(doc, traceback):
def repost_entries():
"""
Reposts 'Repost Item Valuation' entries in queue.
Called hourly via hooks.py.
"""
if not in_configured_timeslot():
return
@ -239,9 +240,6 @@ def repost_entries():
if riv_entries:
return
for d in frappe.get_all("Company", filters={"enable_perpetual_inventory": 1}):
check_if_stock_and_account_balance_synced(today(), d.name)
def get_repost_item_valuation_entries():
return frappe.db.sql(

View File

@ -345,6 +345,7 @@ def get_basic_details(args, item, overwrite_warehouse=True):
"expense_account": expense_account
or get_default_expense_account(args, item_defaults, item_group_defaults, brand_defaults),
"discount_account": get_default_discount_account(args, item_defaults),
"provisional_expense_account": get_provisional_account(args, item_defaults),
"cost_center": get_default_cost_center(
args, item_defaults, item_group_defaults, brand_defaults
),
@ -699,6 +700,10 @@ def get_default_expense_account(args, item, item_group, brand):
)
def get_provisional_account(args, item):
return item.get("default_provisional_account") or args.default_provisional_account
def get_default_discount_account(args, item):
return item.get("default_discount_account") or args.discount_account

View File

@ -3,24 +3,41 @@
from operator import itemgetter
from typing import Any, Dict, List, Optional, TypedDict
import frappe
from frappe import _
from frappe.query_builder.functions import CombineDatetime
from frappe.utils import cint, date_diff, flt, getdate
from frappe.utils.nestedset import get_descendants_of
from pypika.terms import ExistsCriterion
import erpnext
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots, get_average_age
from erpnext.stock.report.stock_ledger.stock_ledger import get_item_group_condition
from erpnext.stock.utils import add_additional_uom_columns, is_reposting_item_valuation_in_progress
def execute(filters=None):
class StockBalanceFilter(TypedDict):
company: Optional[str]
from_date: str
to_date: str
item_group: Optional[str]
item: Optional[str]
warehouse: Optional[str]
warehouse_type: Optional[str]
include_uom: Optional[str] # include extra info in converted UOM
show_stock_ageing_data: bool
show_variant_attributes: bool
SLEntry = Dict[str, Any]
def execute(filters: Optional[StockBalanceFilter] = None):
is_reposting_item_valuation_in_progress()
if not filters:
filters = {}
to_date = filters.get("to_date")
if filters.get("company"):
company_currency = erpnext.get_company_currency(filters.get("company"))
else:
@ -48,6 +65,7 @@ def execute(filters=None):
_func = itemgetter(1)
to_date = filters.get("to_date")
for (company, item, warehouse) in sorted(iwb_map):
if item_map.get(item):
qty_dict = iwb_map[(company, item, warehouse)]
@ -92,7 +110,7 @@ def execute(filters=None):
return columns, data
def get_columns(filters):
def get_columns(filters: StockBalanceFilter):
"""return columns"""
columns = [
{
@ -215,66 +233,77 @@ def get_columns(filters):
return columns
def get_conditions(filters):
conditions = ""
def apply_conditions(query, filters):
sle = frappe.qb.DocType("Stock Ledger Entry")
warehouse_table = frappe.qb.DocType("Warehouse")
if not filters.get("from_date"):
frappe.throw(_("'From Date' is required"))
if filters.get("to_date"):
conditions += " and sle.posting_date <= %s" % frappe.db.escape(filters.get("to_date"))
if to_date := filters.get("to_date"):
query = query.where(sle.posting_date <= to_date)
else:
frappe.throw(_("'To Date' is required"))
if filters.get("company"):
conditions += " and sle.company = %s" % frappe.db.escape(filters.get("company"))
if company := filters.get("company"):
query = query.where(sle.company == company)
if filters.get("warehouse"):
warehouse_details = frappe.db.get_value(
"Warehouse", filters.get("warehouse"), ["lft", "rgt"], as_dict=1
)
if warehouse_details:
conditions += (
" and exists (select name from `tabWarehouse` wh \
where wh.lft >= %s and wh.rgt <= %s and sle.warehouse = wh.name)"
% (warehouse_details.lft, warehouse_details.rgt)
if warehouse := filters.get("warehouse"):
lft, rgt = frappe.db.get_value("Warehouse", warehouse, ["lft", "rgt"])
chilren_subquery = (
frappe.qb.from_(warehouse_table)
.select(warehouse_table.name)
.where(
(warehouse_table.lft >= lft)
& (warehouse_table.rgt <= rgt)
& (warehouse_table.name == sle.warehouse)
)
if filters.get("warehouse_type") and not filters.get("warehouse"):
conditions += (
" and exists (select name from `tabWarehouse` wh \
where wh.warehouse_type = '%s' and sle.warehouse = wh.name)"
% (filters.get("warehouse_type"))
)
query = query.where(ExistsCriterion(chilren_subquery))
elif warehouse_type := filters.get("warehouse_type"):
query = (
query.join(warehouse_table)
.on(warehouse_table.name == sle.warehouse)
.where(warehouse_table.warehouse_type == warehouse_type)
)
return conditions
return query
def get_stock_ledger_entries(filters, items):
item_conditions_sql = ""
if items:
item_conditions_sql = " and sle.item_code in ({})".format(
", ".join(frappe.db.escape(i, percent=False) for i in items)
def get_stock_ledger_entries(filters: StockBalanceFilter, items: List[str]) -> List[SLEntry]:
sle = frappe.qb.DocType("Stock Ledger Entry")
query = (
frappe.qb.from_(sle)
.select(
sle.item_code,
sle.warehouse,
sle.posting_date,
sle.actual_qty,
sle.valuation_rate,
sle.company,
sle.voucher_type,
sle.qty_after_transaction,
sle.stock_value_difference,
sle.item_code.as_("name"),
sle.voucher_no,
sle.stock_value,
sle.batch_no,
)
conditions = get_conditions(filters)
return frappe.db.sql(
"""
select
sle.item_code, warehouse, sle.posting_date, sle.actual_qty, sle.valuation_rate,
sle.company, sle.voucher_type, sle.qty_after_transaction, sle.stock_value_difference,
sle.item_code as name, sle.voucher_no, sle.stock_value, sle.batch_no
from
`tabStock Ledger Entry` sle
where sle.docstatus < 2 %s %s
and is_cancelled = 0
order by sle.posting_date, sle.posting_time, sle.creation, sle.actual_qty"""
% (item_conditions_sql, conditions), # nosec
as_dict=1,
.where((sle.docstatus < 2) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.creation)
.orderby(sle.actual_qty)
)
if items:
query = query.where(sle.item_code.isin(items))
def get_item_warehouse_map(filters, sle):
query = apply_conditions(query, filters)
return query.run(as_dict=True)
def get_item_warehouse_map(filters: StockBalanceFilter, sle: List[SLEntry]):
iwb_map = {}
from_date = getdate(filters.get("from_date"))
to_date = getdate(filters.get("to_date"))
@ -332,7 +361,7 @@ def get_item_warehouse_map(filters, sle):
return iwb_map
def filter_items_with_no_transactions(iwb_map, float_precision):
def filter_items_with_no_transactions(iwb_map, float_precision: float):
for (company, item, warehouse) in sorted(iwb_map):
qty_dict = iwb_map[(company, item, warehouse)]
@ -349,26 +378,22 @@ def filter_items_with_no_transactions(iwb_map, float_precision):
return iwb_map
def get_items(filters):
def get_items(filters: StockBalanceFilter) -> List[str]:
"Get items based on item code, item group or brand."
conditions = []
if filters.get("item_code"):
conditions.append("item.name=%(item_code)s")
if item_code := filters.get("item_code"):
return [item_code]
else:
if filters.get("item_group"):
conditions.append(get_item_group_condition(filters.get("item_group")))
if filters.get("brand"): # used in stock analytics report
conditions.append("item.brand=%(brand)s")
item_filters = {}
if item_group := filters.get("item_group"):
children = get_descendants_of("Item Group", item_group, ignore_permissions=True)
item_filters["item_group"] = ("in", children + [item_group])
if brand := filters.get("brand"):
item_filters["brand"] = brand
items = []
if conditions:
items = frappe.db.sql_list(
"""select name from `tabItem` item where {}""".format(" and ".join(conditions)), filters
)
return items
return frappe.get_all("Item", filters=item_filters, pluck="name", order_by=None)
def get_item_details(items, sle, filters):
def get_item_details(items: List[str], sle: List[SLEntry], filters: StockBalanceFilter):
item_details = {}
if not items:
items = list(set(d.item_code for d in sle))
@ -376,33 +401,35 @@ def get_item_details(items, sle, filters):
if not items:
return item_details
cf_field = cf_join = ""
if filters.get("include_uom"):
cf_field = ", ucd.conversion_factor"
cf_join = (
"left join `tabUOM Conversion Detail` ucd on ucd.parent=item.name and ucd.uom=%s"
% frappe.db.escape(filters.get("include_uom"))
)
item_table = frappe.qb.DocType("Item")
res = frappe.db.sql(
"""
select
item.name, item.item_name, item.description, item.item_group, item.brand, item.stock_uom %s
from
`tabItem` item
%s
where
item.name in (%s)
"""
% (cf_field, cf_join, ",".join(["%s"] * len(items))),
items,
as_dict=1,
query = (
frappe.qb.from_(item_table)
.select(
item_table.name,
item_table.item_name,
item_table.description,
item_table.item_group,
item_table.brand,
item_table.stock_uom,
)
.where(item_table.name.isin(items))
)
for item in res:
item_details.setdefault(item.name, item)
if uom := filters.get("include_uom"):
uom_conv_detail = frappe.qb.DocType("UOM Conversion Detail")
query = (
query.left_join(uom_conv_detail)
.on((uom_conv_detail.parent == item_table.name) & (uom_conv_detail.uom == uom))
.select(uom_conv_detail.conversion_factor)
)
if filters.get("show_variant_attributes", 0) == 1:
result = query.run(as_dict=1)
for item_table in result:
item_details.setdefault(item_table.name, item_table)
if filters.get("show_variant_attributes"):
variant_values = get_variant_values_for(list(item_details))
item_details = {k: v.update(variant_values.get(k, {})) for k, v in item_details.items()}
@ -413,36 +440,33 @@ def get_item_reorder_details(items):
item_reorder_details = frappe._dict()
if items:
item_reorder_details = frappe.db.sql(
"""
select parent, warehouse, warehouse_reorder_qty, warehouse_reorder_level
from `tabItem Reorder`
where parent in ({0})
""".format(
", ".join(frappe.db.escape(i, percent=False) for i in items)
),
as_dict=1,
item_reorder_details = frappe.get_all(
"Item Reorder",
["parent", "warehouse", "warehouse_reorder_qty", "warehouse_reorder_level"],
filters={"parent": ("in", items)},
)
return dict((d.parent + d.warehouse, d) for d in item_reorder_details)
def get_variants_attributes():
def get_variants_attributes() -> List[str]:
"""Return all item variant attributes."""
return [i.name for i in frappe.get_all("Item Attribute")]
return frappe.get_all("Item Attribute", pluck="name")
def get_variant_values_for(items):
"""Returns variant values for items."""
attribute_map = {}
for attr in frappe.db.sql(
"""select parent, attribute, attribute_value
from `tabItem Variant Attribute` where parent in (%s)
"""
% ", ".join(["%s"] * len(items)),
tuple(items),
as_dict=1,
):
attribute_info = frappe.get_all(
"Item Variant Attribute",
["parent", "attribute", "attribute_value"],
{
"parent": ("in", items),
},
)
for attr in attribute_info:
attribute_map.setdefault(attr["parent"], {})
attribute_map[attr["parent"]].update({attr["attribute"]: attr["attribute_value"]})

View File

@ -0,0 +1,174 @@
from typing import Any, Dict
import frappe
from frappe import _dict
from frappe.tests.utils import FrappeTestCase
from frappe.utils import today
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
from erpnext.stock.report.stock_balance.stock_balance import execute
def stock_balance(filters):
"""Get rows from stock balance report"""
return [_dict(row) for row in execute(filters)[1]]
class TestStockBalance(FrappeTestCase):
# ----------- utils
def setUp(self):
self.item = make_item()
self.filters = _dict(
{
"company": "_Test Company",
"item_code": self.item.name,
"from_date": "2020-01-01",
"to_date": str(today()),
}
)
def tearDown(self):
frappe.db.rollback()
def assertPartialDictEq(self, expected: Dict[str, Any], actual: Dict[str, Any]):
for k, v in expected.items():
self.assertEqual(v, actual[k], msg=f"{expected=}\n{actual=}")
def generate_stock_ledger(self, item_code: str, movements):
for movement in map(_dict, movements):
if "to_warehouse" not in movement:
movement.to_warehouse = "_Test Warehouse - _TC"
make_stock_entry(item_code=item_code, **movement)
def assertInvariants(self, rows):
last_balance = frappe.db.sql(
"""
WITH last_balances AS (
SELECT item_code, warehouse,
stock_value, qty_after_transaction,
ROW_NUMBER() OVER (PARTITION BY item_code, warehouse
ORDER BY timestamp(posting_date, posting_time) desc, creation desc)
AS rn
FROM `tabStock Ledger Entry`
where is_cancelled=0
)
SELECT * FROM last_balances WHERE rn = 1""",
as_dict=True,
)
item_wh_stock = _dict()
for line in last_balance:
item_wh_stock.setdefault((line.item_code, line.warehouse), line)
for row in rows:
msg = f"Invariants not met for {rows=}"
# qty invariant
self.assertAlmostEqual(row.bal_qty, row.opening_qty + row.in_qty - row.out_qty, msg)
# value invariant
self.assertAlmostEqual(row.bal_val, row.opening_val + row.in_val - row.out_val, msg)
# check against SLE
last_sle = item_wh_stock[(row.item_code, row.warehouse)]
self.assertAlmostEqual(row.bal_qty, last_sle.qty_after_transaction, 3)
self.assertAlmostEqual(row.bal_val, last_sle.stock_value, 3)
# valuation rate
if not row.bal_qty:
continue
self.assertAlmostEqual(row.val_rate, row.bal_val / row.bal_qty, 3, msg)
# ----------- tests
def test_basic_stock_balance(self):
"""Check very basic functionality and item info"""
rows = stock_balance(self.filters)
self.assertEqual(rows, [])
self.generate_stock_ledger(self.item.name, [_dict(qty=5, rate=10)])
# check item info
rows = stock_balance(self.filters)
self.assertPartialDictEq(
{
"item_code": self.item.name,
"item_name": self.item.item_name,
"item_group": self.item.item_group,
"stock_uom": self.item.stock_uom,
"in_qty": 5,
"in_val": 50,
"val_rate": 10,
},
rows[0],
)
self.assertInvariants(rows)
def test_opening_balance(self):
self.generate_stock_ledger(
self.item.name,
[
_dict(qty=1, rate=1, posting_date="2021-01-01"),
_dict(qty=2, rate=2, posting_date="2021-01-02"),
_dict(qty=3, rate=3, posting_date="2021-01-03"),
],
)
rows = stock_balance(self.filters)
self.assertInvariants(rows)
rows = stock_balance(self.filters.update({"from_date": "2021-01-02"}))
self.assertInvariants(rows)
self.assertPartialDictEq({"opening_qty": 1, "in_qty": 5}, rows[0])
rows = stock_balance(self.filters.update({"from_date": "2022-01-01"}))
self.assertInvariants(rows)
self.assertPartialDictEq({"opening_qty": 6, "in_qty": 0}, rows[0])
def test_uom_converted_info(self):
self.item.append("uoms", {"conversion_factor": 5, "uom": "Box"})
self.item.save()
self.generate_stock_ledger(self.item.name, [_dict(qty=5, rate=10)])
rows = stock_balance(self.filters.update({"include_uom": "Box"}))
self.assertEqual(rows[0].bal_qty_alt, 1)
self.assertInvariants(rows)
def test_item_group(self):
self.filters.pop("item_code", None)
rows = stock_balance(self.filters.update({"item_group": self.item.item_group}))
self.assertTrue(all(r.item_group == self.item.item_group for r in rows))
def test_child_warehouse_balances(self):
# This is default
self.generate_stock_ledger(self.item.name, [_dict(qty=5, rate=10, to_warehouse="Stores - _TC")])
self.filters.pop("item_code", None)
rows = stock_balance(self.filters.update({"warehouse": "All Warehouses - _TC"}))
self.assertTrue(
any(r.item_code == self.item.name and r.warehouse == "Stores - _TC" for r in rows),
msg=f"Expected child warehouse balances \n{rows}",
)
def test_show_item_attr(self):
from erpnext.controllers.item_variant import create_variant
self.item.has_variants = True
self.item.append("attributes", {"attribute": "Test Size"})
self.item.save()
attributes = {"Test Size": "Large"}
variant = create_variant(self.item.name, attributes)
variant.save()
self.generate_stock_ledger(variant.name, [_dict(qty=5, rate=10)])
rows = stock_balance(
self.filters.update({"show_variant_attributes": 1, "item_code": variant.name})
)
self.assertPartialDictEq(attributes, rows[0])
self.assertInvariants(rows)

View File

@ -1,7 +1,7 @@
{
"actions": [],
"autoname": "field:id",
"creation": "2019-06-05 12:07:02.634534",
"creation": "2022-02-21 11:54:58.414784",
"doctype": "DocType",
"engine": "InnoDB",
"field_order": [
@ -9,6 +9,8 @@
"id",
"from",
"to",
"call_received_by",
"employee_user_id",
"medium",
"start_time",
"end_time",
@ -20,6 +22,7 @@
"recording_url",
"recording_html",
"section_break_11",
"type_of_call",
"summary",
"section_break_19",
"links"
@ -103,7 +106,8 @@
},
{
"fieldname": "summary",
"fieldtype": "Small Text"
"fieldtype": "Small Text",
"label": "Summary"
},
{
"fieldname": "section_break_11",
@ -134,15 +138,37 @@
"fieldname": "call_details_section",
"fieldtype": "Section Break",
"label": "Call Details"
},
{
"fieldname": "employee_user_id",
"fieldtype": "Link",
"hidden": 1,
"label": "Employee User Id",
"options": "User"
},
{
"fieldname": "type_of_call",
"fieldtype": "Link",
"label": "Type Of Call",
"options": "Telephony Call Type"
},
{
"depends_on": "to",
"fieldname": "call_received_by",
"fieldtype": "Link",
"label": "Call Received By",
"options": "Employee",
"read_only": 1
}
],
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2021-02-08 14:23:28.744844",
"modified": "2022-04-14 02:59:22.503202",
"modified_by": "Administrator",
"module": "Telephony",
"name": "Call Log",
"naming_rule": "By fieldname",
"owner": "Administrator",
"permissions": [
{
@ -164,6 +190,7 @@
],
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"title_field": "from",
"track_changes": 1,
"track_views": 1

View File

@ -32,6 +32,10 @@ class CallLog(Document):
if lead:
self.add_link(link_type="Lead", link_name=lead)
# Add Employee Name
if self.is_incoming_call():
self.update_received_by()
def after_insert(self):
self.trigger_call_popup()
@ -49,6 +53,9 @@ class CallLog(Document):
if not doc_before_save:
return
if self.is_incoming_call() and self.has_value_changed("to"):
self.update_received_by()
if _is_call_missed(doc_before_save, self):
frappe.publish_realtime("call_{id}_missed".format(id=self.id), self)
self.trigger_call_popup()
@ -65,7 +72,8 @@ class CallLog(Document):
def trigger_call_popup(self):
if self.is_incoming_call():
scheduled_employees = get_scheduled_employees_for_popup(self.medium)
employee_emails = get_employees_with_number(self.to)
employees = get_employees_with_number(self.to)
employee_emails = [employee.get("user_id") for employee in employees]
# check if employees with matched number are scheduled to receive popup
emails = set(scheduled_employees).intersection(employee_emails)
@ -85,10 +93,17 @@ class CallLog(Document):
for email in emails:
frappe.publish_realtime("show_call_popup", self, user=email)
def update_received_by(self):
if employees := get_employees_with_number(self.get("to")):
self.call_received_by = employees[0].get("name")
self.employee_user_id = employees[0].get("user_id")
@frappe.whitelist()
def add_call_summary(call_log, summary):
def add_call_summary_and_call_type(call_log, summary, call_type):
doc = frappe.get_doc("Call Log", call_log)
doc.type_of_call = call_type
doc.save()
doc.add_comment("Comment", frappe.bold(_("Call Summary")) + "<br><br>" + summary)
@ -97,20 +112,19 @@ def get_employees_with_number(number):
if not number:
return []
employee_emails = frappe.cache().hget("employees_with_number", number)
if employee_emails:
return employee_emails
employee_doc_name_and_emails = frappe.cache().hget("employees_with_number", number)
if employee_doc_name_and_emails:
return employee_doc_name_and_emails
employees = frappe.get_all(
employee_doc_name_and_emails = frappe.get_all(
"Employee",
filters={"cell_number": ["like", "%{}%".format(number)], "user_id": ["!=", ""]},
fields=["user_id"],
filters={"cell_number": ["like", f"%{number}%"], "user_id": ["!=", ""]},
fields=["name", "user_id"],
)
employee_emails = [employee.user_id for employee in employees]
frappe.cache().hset("employees_with_number", number, employee_emails)
frappe.cache().hset("employees_with_number", number, employee_doc_name_and_emails)
return employee_emails
return employee_doc_name_and_emails
def link_existing_conversations(doc, state):

View File

@ -0,0 +1,8 @@
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on('Telephony Call Type', {
// refresh: function(frm) {
// }
});

View File

@ -0,0 +1,58 @@
{
"actions": [],
"allow_rename": 1,
"autoname": "field:call_type",
"creation": "2022-02-25 16:13:37.321312",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"call_type",
"amended_from"
],
"fields": [
{
"fieldname": "call_type",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Call Type",
"reqd": 1,
"unique": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Telephony Call Type",
"print_hide": 1,
"read_only": 1
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2022-02-25 16:14:07.087461",
"modified_by": "Administrator",
"module": "Telephony",
"name": "Telephony Call Type",
"naming_rule": "By fieldname",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View File

@ -1,10 +1,9 @@
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class DATEVSettings(Document):
class TelephonyCallType(Document):
pass

View File

@ -0,0 +1,9 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
# import frappe
import unittest
class TestTelephonyCallType(unittest.TestCase):
pass

View File

@ -0,0 +1,122 @@
import frappe
call_initiation_data = frappe._dict(
{
"CallSid": "23c162077629863c1a2d7f29263a162m",
"CallFrom": "09999999991",
"CallTo": "09999999980",
"Direction": "incoming",
"Created": "Wed, 23 Feb 2022 12:31:59",
"From": "09999999991",
"To": "09999999988",
"CurrentTime": "2022-02-23 12:32:02",
"DialWhomNumber": "09999999999",
"Status": "busy",
"EventType": "Dial",
"AgentEmail": "test_employee_exotel@company.com",
}
)
call_end_data = frappe._dict(
{
"CallSid": "23c162077629863c1a2d7f29263a162m",
"CallFrom": "09999999991",
"CallTo": "09999999980",
"Direction": "incoming",
"ForwardedFrom": "null",
"Created": "Wed, 23 Feb 2022 12:31:59",
"DialCallDuration": "17",
"RecordingUrl": "https://s3-ap-southeast-1.amazonaws.com/random.mp3",
"StartTime": "2022-02-23 12:31:58",
"EndTime": "1970-01-01 05:30:00",
"DialCallStatus": "completed",
"CallType": "completed",
"DialWhomNumber": "09999999999",
"ProcessStatus": "null",
"flow_id": "228040",
"tenant_id": "67291",
"From": "09999999991",
"To": "09999999988",
"RecordingAvailableBy": "Wed, 23 Feb 2022 12:37:25",
"CurrentTime": "2022-02-23 12:32:25",
"OutgoingPhoneNumber": "09999999988",
"Legs": [
{
"Number": "09999999999",
"Type": "single",
"OnCallDuration": "10",
"CallerId": "09999999980",
"CauseCode": "NORMAL_CLEARING",
"Cause": "16",
}
],
}
)
call_disconnected_data = frappe._dict(
{
"CallSid": "d96421addce69e24bdc7ce5880d1162l",
"CallFrom": "09999999991",
"CallTo": "09999999980",
"Direction": "incoming",
"ForwardedFrom": "null",
"Created": "Mon, 21 Feb 2022 15:58:12",
"DialCallDuration": "0",
"StartTime": "2022-02-21 15:58:12",
"EndTime": "1970-01-01 05:30:00",
"DialCallStatus": "canceled",
"CallType": "client-hangup",
"DialWhomNumber": "09999999999",
"ProcessStatus": "null",
"flow_id": "228040",
"tenant_id": "67291",
"From": "09999999991",
"To": "09999999988",
"CurrentTime": "2022-02-21 15:58:47",
"OutgoingPhoneNumber": "09999999988",
"Legs": [
{
"Number": "09999999999",
"Type": "single",
"OnCallDuration": "0",
"CallerId": "09999999980",
"CauseCode": "RING_TIMEOUT",
"Cause": "1003",
}
],
}
)
call_not_answered_data = frappe._dict(
{
"CallSid": "fdb67a2b4b2d057b610a52ef43f81622",
"CallFrom": "09999999991",
"CallTo": "09999999980",
"Direction": "incoming",
"ForwardedFrom": "null",
"Created": "Mon, 21 Feb 2022 15:47:02",
"DialCallDuration": "0",
"StartTime": "2022-02-21 15:47:02",
"EndTime": "1970-01-01 05:30:00",
"DialCallStatus": "no-answer",
"CallType": "incomplete",
"DialWhomNumber": "09999999999",
"ProcessStatus": "null",
"flow_id": "228040",
"tenant_id": "67291",
"From": "09999999991",
"To": "09999999988",
"CurrentTime": "2022-02-21 15:47:40",
"OutgoingPhoneNumber": "09999999988",
"Legs": [
{
"Number": "09999999999",
"Type": "single",
"OnCallDuration": "0",
"CallerId": "09999999980",
"CauseCode": "RING_TIMEOUT",
"Cause": "1003",
}
],
}
)

View File

@ -0,0 +1,69 @@
import frappe
from frappe.contacts.doctype.contact.test_contact import create_contact
from frappe.tests.test_api import FrappeAPITestCase
from erpnext.hr.doctype.employee.test_employee import make_employee
class TestExotel(FrappeAPITestCase):
@classmethod
def setUpClass(cls):
cls.CURRENT_DB_CONNECTION = frappe.db
cls.test_employee_name = make_employee(
user="test_employee_exotel@company.com", cell_number="9999999999"
)
frappe.db.set_value("Exotel Settings", "Exotel Settings", "enabled", 1)
phones = [{"phone": "+91 9999999991", "is_primary_phone": 0, "is_primary_mobile_no": 1}]
create_contact(name="Test Contact", salutation="Mr", phones=phones)
frappe.db.commit()
def test_for_successful_call(self):
from .exotel_test_data import call_end_data, call_initiation_data
api_method = "handle_incoming_call"
end_call_api_method = "handle_end_call"
self.emulate_api_call_from_exotel(api_method, call_initiation_data)
self.emulate_api_call_from_exotel(end_call_api_method, call_end_data)
call_log = frappe.get_doc("Call Log", call_initiation_data.CallSid)
self.assertEqual(call_log.get("from"), call_initiation_data.CallFrom)
self.assertEqual(call_log.get("to"), call_initiation_data.DialWhomNumber)
self.assertEqual(call_log.get("call_received_by"), self.test_employee_name)
self.assertEqual(call_log.get("status"), "Completed")
def test_for_disconnected_call(self):
from .exotel_test_data import call_disconnected_data
api_method = "handle_missed_call"
self.emulate_api_call_from_exotel(api_method, call_disconnected_data)
call_log = frappe.get_doc("Call Log", call_disconnected_data.CallSid)
self.assertEqual(call_log.get("from"), call_disconnected_data.CallFrom)
self.assertEqual(call_log.get("to"), call_disconnected_data.DialWhomNumber)
self.assertEqual(call_log.get("call_received_by"), self.test_employee_name)
self.assertEqual(call_log.get("status"), "Canceled")
def test_for_call_not_answered(self):
from .exotel_test_data import call_not_answered_data
api_method = "handle_missed_call"
self.emulate_api_call_from_exotel(api_method, call_not_answered_data)
call_log = frappe.get_doc("Call Log", call_not_answered_data.CallSid)
self.assertEqual(call_log.get("from"), call_not_answered_data.CallFrom)
self.assertEqual(call_log.get("to"), call_not_answered_data.DialWhomNumber)
self.assertEqual(call_log.get("call_received_by"), self.test_employee_name)
self.assertEqual(call_log.get("status"), "No Answer")
def emulate_api_call_from_exotel(self, api_method, data):
self.post(
f"/api/method/erpnext.erpnext_integrations.exotel_integration.{api_method}",
data=frappe.as_json(data),
content_type="application/json",
as_tuple=True,
)
# restart db connection to get latest data
frappe.connect()
@classmethod
def tearDownClass(cls):
frappe.db = cls.CURRENT_DB_CONNECTION

View File

@ -271,7 +271,7 @@ Assessment Report,Rapport d'Évaluation,
Assessment Reports,Rapports d'évaluation,
Assessment Result,Résultat de l'Évaluation,
Assessment Result record {0} already exists.,Le Résultat d'Évaluation {0} existe déjà.,
Asset,Atout,
Asset,Actif - Immo.,
Asset Category,Catégorie d'Actif,
Asset Category is mandatory for Fixed Asset item,Catégorie d'Actif est obligatoire pour l'article Immobilisé,
Asset Maintenance,Maintenance des actifs,
@ -3037,6 +3037,7 @@ To Date must be greater than From Date,La date de fin doit être supérieure à
To Date should be within the Fiscal Year. Assuming To Date = {0},La Date Finale doit être dans l'exercice. En supposant Date Finale = {0},
To Datetime,À la Date,
To Deliver,À Livrer,
{} To Deliver,{} à livrer
To Deliver and Bill,À Livrer et Facturer,
To Fiscal Year,À l'année fiscale,
To GSTIN,GSTIN (Destination),
@ -9871,3 +9872,4 @@ Show Barcode Field in Stock Transactions,Afficher le champ Code Barre dans les t
Convert Item Description to Clean HTML in Transactions,Convertir les descriptions d'articles en HTML valide lors des transactions
Have Default Naming Series for Batch ID?,Nom de série par défaut pour les Lots ou Séries
"The percentage you are allowed to transfer more against the quantity ordered. For example, if you have ordered 100 units, and your Allowance is 10%, then you are allowed transfer 110 units","Le pourcentage de quantité que vous pourrez réceptionner en plus de la quantité commandée. Par exemple, vous avez commandé 100 unités, votre pourcentage de dépassement est de 10%, vous pourrez réceptionner 110 unités"
Unit Of Measure (UOM),Unité de mesure (UDM),

Can't render this file because it is too large.

View File

@ -1,7 +1,6 @@
# frappe # https://github.com/frappe/frappe is installed during bench-init
gocardless-pro~=1.22.0
googlemaps
pandas>=1.1.5,<2.0.0
plaid-python~=7.2.1
pycountry~=20.7.3
PyGithub~=1.55