Merge branch 'develop' into gross_profit_non_stock_item

This commit is contained in:
Deepesh Garg 2022-02-21 12:34:14 +05:30 committed by GitHub
commit e13d774e8e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
101 changed files with 3110 additions and 2256 deletions

View File

@ -40,10 +40,14 @@ if [ "$DB" == "postgres" ];then
echo "travis" | psql -h 127.0.0.1 -p 5432 -c "CREATE USER test_frappe WITH PASSWORD 'test_frappe'" -U postgres;
fi
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
sudo chmod o+x /usr/local/bin/wkhtmltopdf
install_whktml() {
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
sudo chmod o+x /usr/local/bin/wkhtmltopdf
}
install_whktml &
cd ~/frappe-bench || exit
@ -57,5 +61,5 @@ bench get-app erpnext "${GITHUB_WORKSPACE}"
if [ "$TYPE" == "server" ]; then bench setup requirements --dev; fi
bench start &> bench_run_logs.txt &
CI=Yes bench build --app frappe &
bench --site test_site reinstall --yes
bench build --app frappe

1
.github/stale.yml vendored
View File

@ -30,6 +30,7 @@ issues:
exemptLabels:
- valid
- to-validate
- QA
markComment: >
This issue has been automatically marked as inactive because it has not had
recent activity and it wasn't validated by maintainer team. It will be

View File

@ -2,8 +2,6 @@ import inspect
import frappe
from erpnext.hooks import regional_overrides
__version__ = '14.0.0-dev'
def get_default_company(user=None):
@ -121,14 +119,17 @@ def allow_regional(fn):
@erpnext.allow_regional
def myfunction():
pass'''
def caller(*args, **kwargs):
region = get_region()
fn_name = inspect.getmodule(fn).__name__ + '.' + fn.__name__
if region in regional_overrides and fn_name in regional_overrides[region]:
return frappe.get_attr(regional_overrides[region][fn_name])(*args, **kwargs)
else:
overrides = frappe.get_hooks("regional_overrides", {}).get(get_region())
function_path = f"{inspect.getmodule(fn).__name__}.{fn.__name__}"
if not overrides or function_path not in overrides:
return fn(*args, **kwargs)
# Priority given to last installed app
return frappe.get_attr(overrides[function_path][-1])(*args, **kwargs)
return caller
def get_last_membership(member):

View File

@ -14,6 +14,10 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
});
},
onload: function (frm) {
frm.trigger('bank_account');
},
refresh: function (frm) {
frappe.require("bank-reconciliation-tool.bundle.js", () =>
frm.trigger("make_reconciliation_tool")
@ -51,7 +55,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
bank_account: function (frm) {
frappe.db.get_value(
"Bank Account",
frm.bank_account,
frm.doc.bank_account,
"account",
(r) => {
frappe.db.get_value(
@ -60,6 +64,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
"account_currency",
(r) => {
frm.currency = r.account_currency;
frm.trigger("render_chart");
}
);
}
@ -124,7 +129,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
}
},
render_chart(frm) {
render_chart: frappe.utils.debounce((frm) => {
frm.cards_manager = new erpnext.accounts.bank_reconciliation.NumberCardManager(
{
$reconciliation_tool_cards: frm.get_field(
@ -136,7 +141,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
currency: frm.currency,
}
);
},
}, 500),
render(frm) {
if (frm.doc.bank_account) {

View File

@ -167,7 +167,8 @@ class OpeningInvoiceCreationTool(Document):
"is_pos": 0,
"doctype": "Sales Invoice" if self.invoice_type == "Sales" else "Purchase Invoice",
"update_stock": 0,
"invoice_number": row.invoice_number
"invoice_number": row.invoice_number,
"disable_rounded_total": 1
})
accounting_dimension = get_accounting_dimensions()

View File

@ -172,9 +172,10 @@ class POSInvoice(SalesInvoice):
frappe.throw(error_msg, title=_("Invalid Item"), as_list=True)
def validate_stock_availablility(self):
from erpnext.stock.stock_ledger import is_negative_stock_allowed
if self.is_return or self.docstatus != 1:
return
allow_negative_stock = frappe.db.get_single_value('Stock Settings', 'allow_negative_stock')
for d in self.get('items'):
is_service_item = not (frappe.db.get_value('Item', d.get('item_code'), 'is_stock_item'))
if is_service_item:
@ -186,7 +187,7 @@ class POSInvoice(SalesInvoice):
elif d.batch_no:
self.validate_pos_reserved_batch_qty(d)
else:
if allow_negative_stock:
if is_negative_stock_allowed(item_code=d.item_code):
return
available_stock, is_stock_item = get_stock_availability(d.item_code, d.warehouse)

View File

@ -586,23 +586,29 @@ class TestPOSInvoice(unittest.TestCase):
item_price.insert()
pr = make_pricing_rule(selling=1, priority=5, discount_percentage=10)
pr.save()
pos_inv = create_pos_invoice(qty=1, do_not_submit=1)
pos_inv.items[0].rate = 300
pos_inv.save()
self.assertEquals(pos_inv.items[0].discount_percentage, 10)
# rate shouldn't change
self.assertEquals(pos_inv.items[0].rate, 405)
pos_inv.ignore_pricing_rule = 1
pos_inv.items[0].rate = 300
pos_inv.save()
self.assertEquals(pos_inv.ignore_pricing_rule, 1)
# rate should change since pricing rules are ignored
self.assertEquals(pos_inv.items[0].rate, 300)
try:
pos_inv = create_pos_invoice(qty=1, do_not_submit=1)
pos_inv.items[0].rate = 300
pos_inv.save()
self.assertEquals(pos_inv.items[0].discount_percentage, 10)
# rate shouldn't change
self.assertEquals(pos_inv.items[0].rate, 405)
item_price.delete()
pos_inv.delete()
pr.delete()
pos_inv.ignore_pricing_rule = 1
pos_inv.save()
self.assertEquals(pos_inv.ignore_pricing_rule, 1)
# rate should reset since pricing rules are ignored
self.assertEquals(pos_inv.items[0].rate, 450)
pos_inv.items[0].rate = 300
pos_inv.save()
self.assertEquals(pos_inv.items[0].rate, 300)
finally:
item_price.delete()
pos_inv.delete()
pr.delete()
def create_pos_invoice(**args):

View File

@ -84,12 +84,20 @@ class POSInvoiceMergeLog(Document):
sales_invoice.set_posting_time = 1
sales_invoice.posting_date = getdate(self.posting_date)
sales_invoice.save()
self.write_off_fractional_amount(sales_invoice, data)
sales_invoice.submit()
self.consolidated_invoice = sales_invoice.name
return sales_invoice.name
def write_off_fractional_amount(self, invoice, data):
pos_invoice_grand_total = sum(d.grand_total for d in data)
if abs(pos_invoice_grand_total - invoice.grand_total) < 1:
invoice.write_off_amount += -1 * (pos_invoice_grand_total - invoice.grand_total)
invoice.save()
def process_merging_into_credit_note(self, data):
credit_note = self.get_new_sales_invoice()
credit_note.is_return = 1
@ -102,6 +110,7 @@ class POSInvoiceMergeLog(Document):
# TODO: return could be against multiple sales invoice which could also have been consolidated?
# credit_note.return_against = self.consolidated_invoice
credit_note.save()
self.write_off_fractional_amount(credit_note, data)
credit_note.submit()
self.consolidated_credit_note = credit_note.name
@ -135,9 +144,15 @@ class POSInvoiceMergeLog(Document):
i.uom == item.uom and i.net_rate == item.net_rate and i.warehouse == item.warehouse):
found = True
i.qty = i.qty + item.qty
i.amount = i.amount + item.net_amount
i.net_amount = i.amount
i.base_amount = i.base_amount + item.base_net_amount
i.base_net_amount = i.base_amount
if not found:
item.rate = item.net_rate
item.amount = item.net_amount
item.base_amount = item.base_net_amount
item.price_list_rate = 0
si_item = map_child_doc(item, invoice, {"doctype": "Sales Invoice Item"})
items.append(si_item)
@ -169,6 +184,7 @@ class POSInvoiceMergeLog(Document):
found = True
if not found:
payments.append(payment)
rounding_adjustment += doc.rounding_adjustment
rounded_total += doc.rounded_total
base_rounding_adjustment += doc.base_rounding_adjustment

View File

@ -12,6 +12,7 @@ from erpnext.accounts.doctype.pos_invoice.test_pos_invoice import create_pos_inv
from erpnext.accounts.doctype.pos_invoice_merge_log.pos_invoice_merge_log import (
consolidate_pos_invoices,
)
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
class TestPOSInvoiceMergeLog(unittest.TestCase):
@ -150,3 +151,132 @@ class TestPOSInvoiceMergeLog(unittest.TestCase):
frappe.set_user("Administrator")
frappe.db.sql("delete from `tabPOS Profile`")
frappe.db.sql("delete from `tabPOS Invoice`")
def test_consolidation_round_off_error_1(self):
'''
Test round off error in consolidated invoice creation if POS Invoice has inclusive tax
'''
frappe.db.sql("delete from `tabPOS Invoice`")
try:
make_stock_entry(
to_warehouse="_Test Warehouse - _TC",
item_code="_Test Item",
rate=8000,
qty=10,
)
init_user_and_profile()
inv = create_pos_invoice(qty=3, rate=10000, do_not_save=True)
inv.append("taxes", {
"account_head": "_Test Account VAT - _TC",
"charge_type": "On Net Total",
"cost_center": "_Test Cost Center - _TC",
"description": "VAT",
"doctype": "Sales Taxes and Charges",
"rate": 7.5,
"included_in_print_rate": 1
})
inv.append('payments', {
'mode_of_payment': 'Cash', 'account': 'Cash - _TC', 'amount': 30000
})
inv.insert()
inv.submit()
inv2 = create_pos_invoice(qty=3, rate=10000, do_not_save=True)
inv2.append("taxes", {
"account_head": "_Test Account VAT - _TC",
"charge_type": "On Net Total",
"cost_center": "_Test Cost Center - _TC",
"description": "VAT",
"doctype": "Sales Taxes and Charges",
"rate": 7.5,
"included_in_print_rate": 1
})
inv2.append('payments', {
'mode_of_payment': 'Cash', 'account': 'Cash - _TC', 'amount': 30000
})
inv2.insert()
inv2.submit()
consolidate_pos_invoices()
inv.load_from_db()
consolidated_invoice = frappe.get_doc('Sales Invoice', inv.consolidated_invoice)
self.assertEqual(consolidated_invoice.outstanding_amount, 0)
self.assertEqual(consolidated_invoice.status, 'Paid')
finally:
frappe.set_user("Administrator")
frappe.db.sql("delete from `tabPOS Profile`")
frappe.db.sql("delete from `tabPOS Invoice`")
def test_consolidation_round_off_error_2(self):
'''
Test the same case as above but with an Unpaid POS Invoice
'''
frappe.db.sql("delete from `tabPOS Invoice`")
try:
make_stock_entry(
to_warehouse="_Test Warehouse - _TC",
item_code="_Test Item",
rate=8000,
qty=10,
)
init_user_and_profile()
inv = create_pos_invoice(qty=6, rate=10000, do_not_save=True)
inv.append("taxes", {
"account_head": "_Test Account VAT - _TC",
"charge_type": "On Net Total",
"cost_center": "_Test Cost Center - _TC",
"description": "VAT",
"doctype": "Sales Taxes and Charges",
"rate": 7.5,
"included_in_print_rate": 1
})
inv.append('payments', {
'mode_of_payment': 'Cash', 'account': 'Cash - _TC', 'amount': 60000
})
inv.insert()
inv.submit()
inv2 = create_pos_invoice(qty=6, rate=10000, do_not_save=True)
inv2.append("taxes", {
"account_head": "_Test Account VAT - _TC",
"charge_type": "On Net Total",
"cost_center": "_Test Cost Center - _TC",
"description": "VAT",
"doctype": "Sales Taxes and Charges",
"rate": 7.5,
"included_in_print_rate": 1
})
inv2.append('payments', {
'mode_of_payment': 'Cash', 'account': 'Cash - _TC', 'amount': 60000
})
inv2.insert()
inv2.submit()
inv3 = create_pos_invoice(qty=3, rate=600, do_not_save=True)
inv3.append('payments', {
'mode_of_payment': 'Cash', 'account': 'Cash - _TC', 'amount': 1000
})
inv3.insert()
inv3.submit()
consolidate_pos_invoices()
inv.load_from_db()
consolidated_invoice = frappe.get_doc('Sales Invoice', inv.consolidated_invoice)
self.assertEqual(consolidated_invoice.outstanding_amount, 800)
self.assertNotEqual(consolidated_invoice.status, 'Paid')
finally:
frappe.set_user("Administrator")
frappe.db.sql("delete from `tabPOS Profile`")
frappe.db.sql("delete from `tabPOS Invoice`")

View File

@ -249,13 +249,17 @@ def get_pricing_rule_for_item(args, price_list_rate=0, doc=None, for_validate=Fa
"free_item_data": [],
"parent": args.parent,
"parenttype": args.parenttype,
"child_docname": args.get('child_docname')
"child_docname": args.get('child_docname'),
})
if args.ignore_pricing_rule or not args.item_code:
if frappe.db.exists(args.doctype, args.name) and args.get("pricing_rules"):
item_details = remove_pricing_rule_for_item(args.get("pricing_rules"),
item_details, args.get('item_code'))
item_details = remove_pricing_rule_for_item(
args.get("pricing_rules"),
item_details,
item_code=args.get("item_code"),
rate=args.get("price_list_rate"),
)
return item_details
update_args_for_pricing_rule(args)
@ -308,8 +312,12 @@ def get_pricing_rule_for_item(args, price_list_rate=0, doc=None, for_validate=Fa
if not doc: return item_details
elif args.get("pricing_rules"):
item_details = remove_pricing_rule_for_item(args.get("pricing_rules"),
item_details, args.get('item_code'))
item_details = remove_pricing_rule_for_item(
args.get("pricing_rules"),
item_details,
item_code=args.get("item_code"),
rate=args.get("price_list_rate"),
)
return item_details
@ -390,7 +398,7 @@ def apply_price_discount_rule(pricing_rule, item_details, args):
item_details[field] += (pricing_rule.get(field, 0)
if pricing_rule else args.get(field, 0))
def remove_pricing_rule_for_item(pricing_rules, item_details, item_code=None):
def remove_pricing_rule_for_item(pricing_rules, item_details, item_code=None, rate=None):
from erpnext.accounts.doctype.pricing_rule.utils import (
get_applied_pricing_rules,
get_pricing_rule_items,
@ -403,6 +411,7 @@ def remove_pricing_rule_for_item(pricing_rules, item_details, item_code=None):
if pricing_rule.rate_or_discount == 'Discount Percentage':
item_details.discount_percentage = 0.0
item_details.discount_amount = 0.0
item_details.rate = rate or 0.0
if pricing_rule.rate_or_discount == 'Discount Amount':
item_details.discount_amount = 0.0
@ -421,6 +430,7 @@ def remove_pricing_rule_for_item(pricing_rules, item_details, item_code=None):
item_details.applied_on_items = ','.join(items)
item_details.pricing_rules = ''
item_details.pricing_rule_removed = True
return item_details
@ -432,9 +442,12 @@ def remove_pricing_rules(item_list):
out = []
for item in item_list:
item = frappe._dict(item)
if item.get('pricing_rules'):
out.append(remove_pricing_rule_for_item(item.get("pricing_rules"),
item, item.item_code))
if item.get("pricing_rules"):
out.append(
remove_pricing_rule_for_item(
item.get("pricing_rules"), item, item.item_code, item.get("price_list_rate")
)
)
return out

View File

@ -628,6 +628,46 @@ class TestPricingRule(unittest.TestCase):
for doc in [si, si1]:
doc.delete()
def test_remove_pricing_rule(self):
item = make_item("Water Flask")
make_item_price("Water Flask", "_Test Price List", 100)
pricing_rule_record = {
"doctype": "Pricing Rule",
"title": "_Test Water Flask Rule",
"apply_on": "Item Code",
"price_or_product_discount": "Price",
"items": [{
"item_code": "Water Flask",
}],
"selling": 1,
"currency": "INR",
"rate_or_discount": "Discount Percentage",
"discount_percentage": 20,
"company": "_Test Company"
}
rule = frappe.get_doc(pricing_rule_record)
rule.insert()
si = create_sales_invoice(do_not_save=True, item_code="Water Flask")
si.selling_price_list = "_Test Price List"
si.save()
self.assertEqual(si.items[0].price_list_rate, 100)
self.assertEqual(si.items[0].discount_percentage, 20)
self.assertEqual(si.items[0].rate, 80)
si.ignore_pricing_rule = 1
si.save()
self.assertEqual(si.items[0].discount_percentage, 0)
self.assertEqual(si.items[0].rate, 100)
si.delete()
rule.delete()
frappe.get_doc("Item Price", {"item_code": "Water Flask"}).delete()
item.delete()
def test_multiple_pricing_rules_with_min_qty(self):
make_pricing_rule(discount_percentage=20, selling=1, priority=1, min_qty=4,
apply_multiple_pricing_rules=1, title="_Test Pricing Rule with Min Qty - 1")
@ -648,6 +688,7 @@ class TestPricingRule(unittest.TestCase):
frappe.delete_doc_if_exists("Pricing Rule", "_Test Pricing Rule with Min Qty - 1")
frappe.delete_doc_if_exists("Pricing Rule", "_Test Pricing Rule with Min Qty - 2")
test_dependencies = ["Campaign"]
def make_pricing_rule(**args):

View File

@ -285,7 +285,7 @@ class SalesInvoice(SellingController):
filters={ invoice_or_credit_note: self.name },
pluck="pos_closing_entry"
)
if pos_closing_entry:
if pos_closing_entry and pos_closing_entry[0]:
msg = _("To cancel a {} you need to cancel the POS Closing Entry {}.").format(
frappe.bold("Consolidated Sales Invoice"),
get_link_to_form("POS Closing Entry", pos_closing_entry[0])

View File

@ -319,13 +319,18 @@ def make_reverse_gl_entries(gl_entries=None, voucher_type=None, voucher_no=None,
"""
if not gl_entries:
gl_entries = frappe.get_all("GL Entry",
fields = ["*"],
filters = {
"voucher_type": voucher_type,
"voucher_no": voucher_no,
"is_cancelled": 0
})
gl_entry = frappe.qb.DocType("GL Entry")
gl_entries = (frappe.qb.from_(
gl_entry
).select(
'*'
).where(
gl_entry.voucher_type == voucher_type
).where(
gl_entry.voucher_no == voucher_no
).where(
gl_entry.is_cancelled == 0
).for_update()).run(as_dict=1)
if gl_entries:
validate_accounting_period(gl_entries)
@ -333,23 +338,24 @@ def make_reverse_gl_entries(gl_entries=None, voucher_type=None, voucher_no=None,
set_as_cancel(gl_entries[0]['voucher_type'], gl_entries[0]['voucher_no'])
for entry in gl_entries:
entry['name'] = None
debit = entry.get('debit', 0)
credit = entry.get('credit', 0)
new_gle = copy.deepcopy(entry)
new_gle['name'] = None
debit = new_gle.get('debit', 0)
credit = new_gle.get('credit', 0)
debit_in_account_currency = entry.get('debit_in_account_currency', 0)
credit_in_account_currency = entry.get('credit_in_account_currency', 0)
debit_in_account_currency = new_gle.get('debit_in_account_currency', 0)
credit_in_account_currency = new_gle.get('credit_in_account_currency', 0)
entry['debit'] = credit
entry['credit'] = debit
entry['debit_in_account_currency'] = credit_in_account_currency
entry['credit_in_account_currency'] = debit_in_account_currency
new_gle['debit'] = credit
new_gle['credit'] = debit
new_gle['debit_in_account_currency'] = credit_in_account_currency
new_gle['credit_in_account_currency'] = debit_in_account_currency
entry['remarks'] = "On cancellation of " + entry['voucher_no']
entry['is_cancelled'] = 1
new_gle['remarks'] = "On cancellation of " + new_gle['voucher_no']
new_gle['is_cancelled'] = 1
if entry['debit'] or entry['credit']:
make_entry(entry, adv_adj, "Yes")
if new_gle['debit'] or new_gle['credit']:
make_entry(new_gle, adv_adj, "Yes")
def check_freezing_date(posting_date, adv_adj=False):

View File

@ -1,23 +0,0 @@
{
"align_labels_right": 0,
"creation": "2017-08-08 12:33:04.773099",
"custom_format": 1,
"disabled": 0,
"doc_type": "Sales Invoice",
"docstatus": 0,
"doctype": "Print Format",
"font": "Default",
"html": "<style>\n\t.print-format table, .print-format tr, \n\t.print-format td, .print-format div, .print-format p {\n\t\tfont-family: Tahoma, sans-serif;\n\t\tline-height: 150%;\n\t\tvertical-align: middle;\n\t}\n\t@media screen {\n\t\t.print-format {\n\t\t\twidth: 4in;\n\t\t\tpadding: 0.25in;\n\t\t\tmin-height: 8in;\n\t\t}\n\t}\n</style>\n\n{% if letter_head %}\n {{ letter_head }}\n{% endif %}\n<p class=\"text-center\">\n\t{{ doc.company }}<br>\n\t{% if doc.company_address_display %}\n\t\t{% set company_address = doc.company_address_display.replace(\"\\n\", \" \").replace(\"<br>\", \" \") %}\n\t\t{% if \"GSTIN\" not in company_address %}\n\t\t\t{{ company_address }}\n\t\t\t<b>{{ _(\"GSTIN\") }}:</b>{{ doc.company_gstin }}\n\t\t{% else %}\n\t\t\t{{ company_address.replace(\"GSTIN\", \"<br>GSTIN\") }}\n\t\t{% endif %}\n\t{% endif %}\n\t<br>\n\t{% if doc.docstatus == 0 %}\n\t\t<b>{{ doc.status + \" \"+ (doc.select_print_heading or _(\"Invoice\")) }}</b><br>\n\t{% else %}\n\t\t<b>{{ doc.select_print_heading or _(\"Invoice\") }}</b><br>\n\t{% endif %}\n</p>\n<p>\n\t<b>{{ _(\"Receipt No\") }}:</b> {{ doc.name }}<br>\n\t<b>{{ _(\"Date\") }}:</b> {{ doc.get_formatted(\"posting_date\") }}<br>\n\t{% if doc.grand_total > 50000 %}\n\t\t{% set customer_address = doc.address_display.replace(\"\\n\", \" \").replace(\"<br>\", \" \") %}\n\t\t<b>{{ _(\"Customer\") }}:</b><br>\n\t\t{{ doc.customer_name }}<br>\n\t\t{{ customer_address }}\n\t{% endif %}\n</p>\n\n<hr>\n<table class=\"table table-condensed cart no-border\">\n\t<thead>\n\t\t<tr>\n\t\t\t<th width=\"50%\">{{ _(\"Item\") }}</b></th>\n\t\t\t<th width=\"25%\" class=\"text-right\">{{ _(\"Qty\") }}</th>\n\t\t\t<th width=\"25%\" class=\"text-right\">{{ _(\"Amount\") }}</th>\n\t\t</tr>\n\t</thead>\n\t<tbody>\n\t\t{%- for item in doc.items -%}\n\t\t<tr>\n\t\t\t<td>\n\t\t\t\t{{ item.item_code }}\n\t\t\t\t{%- if item.item_name != item.item_code -%}\n\t\t\t\t\t<br>{{ item.item_name }}\n\t\t\t\t{%- endif -%}\n\t\t\t\t{%- if item.gst_hsn_code -%}\n\t\t\t\t\t<br><b>{{ _(\"HSN/SAC\") }}:</b> {{ item.gst_hsn_code }}\n\t\t\t\t{%- endif -%}\n\t\t\t\t{%- if item.serial_no -%}\n\t\t\t\t\t<br><b>{{ _(\"Serial No\") }}:</b> {{ item.serial_no }}\n\t\t\t\t{%- endif -%}\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">{{ item.qty }}<br>@ {{ item.rate }}</td>\n\t\t\t<td class=\"text-right\">{{ item.get_formatted(\"amount\") }}</td>\n\t\t</tr>\n\t\t{%- endfor -%}\n\t</tbody>\n</table>\n<table class=\"table table-condensed no-border\">\n\t<tbody>\n\t\t<tr>\n\t\t\t{% if doc.flags.show_inclusive_tax_in_print %}\n\t\t\t\t<td class=\"text-right\" style=\"width: 70%\">\n\t\t\t\t\t{{ _(\"Total Excl. Tax\") }}\n\t\t\t\t</td>\n\t\t\t\t<td class=\"text-right\">\n\t\t\t\t\t{{ doc.get_formatted(\"net_total\", doc) }}\n\t\t\t\t</td>\n\t\t\t{% else %}\n\t\t\t\t<td class=\"text-right\" style=\"width: 70%\">\n\t\t\t\t\t{{ _(\"Total\") }}\n\t\t\t\t</td>\n\t\t\t\t<td class=\"text-right\">\n\t\t\t\t\t{{ doc.get_formatted(\"total\", doc) }}\n\t\t\t\t</td>\n\t\t\t{% endif %}\n\t\t</tr>\n\t\t{%- for row in doc.taxes -%}\n\t\t {%- if (not row.included_in_print_rate or doc.flags.show_inclusive_tax_in_print) and row.tax_amount != 0 -%}\n\t\t\t<tr>\n\t\t\t\t<td class=\"text-right\" style=\"width: 70%\">\n\t\t\t\t\t{{ row.description }}\n\t\t\t\t</td>\n\t\t\t\t<td class=\"text-right\">\n\t\t\t\t\t{{ row.get_formatted(\"tax_amount\", doc) }}\n\t\t\t\t</td>\n\t\t\t<tr>\n\t\t {%- endif -%}\n\t\t{%- endfor -%}\n\t\t{%- if doc.discount_amount -%}\n\t\t<tr>\n\t\t\t<td class=\"text-right\" style=\"width: 75%\">\n\t\t\t\t{{ _(\"Discount\") }}\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">\n\t\t\t\t{{ doc.get_formatted(\"discount_amount\") }}\n\t\t\t</td>\n\t\t</tr>\n\t\t{%- endif -%}\n\t\t<tr>\n\t\t\t<td class=\"text-right\" style=\"width: 75%\">\n\t\t\t\t<b>{{ _(\"Grand Total\") }}</b>\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">\n\t\t\t\t{{ doc.get_formatted(\"grand_total\") }}\n\t\t\t</td>\n\t\t</tr>\n\t\t{%- if doc.rounded_total -%}\n\t\t<tr>\n\t\t\t<td class=\"text-right\" style=\"width: 75%\">\n\t\t\t\t<b>{{ _(\"Rounded Total\") }}</b>\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">\n\t\t\t\t{{ doc.get_formatted(\"rounded_total\") }}\n\t\t\t</td>\n\t\t</tr>\n\t\t{%- endif -%}\n\t\t<tr>\n\t\t\t<td class=\"text-right\" style=\"width: 75%\">\n\t\t\t\t<b>{{ _(\"Paid Amount\") }}</b>\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">\n\t\t\t\t{{ doc.get_formatted(\"paid_amount\") }}\n\t\t\t</td>\n\t\t</tr>\n\t{%- if doc.change_amount -%}\n\t\t<tr>\n\t\t\t<td class=\"text-right\" style=\"width: 75%\">\n\t\t\t\t<b>{{ _(\"Change Amount\") }}</b>\n\t\t\t</td>\n\t\t\t<td class=\"text-right\">\n\t\t\t\t{{ doc.get_formatted(\"change_amount\") }}\n\t\t\t</td>\n\t\t</tr>\n\t{%- endif -%}\n\t</tbody>\n</table>\n<p>{{ doc.terms or \"\" }}</p>\n<p class=\"text-center\">{{ _(\"Thank you, please visit again.\") }}</p>",
"idx": 0,
"line_breaks": 0,
"modified": "2020-04-29 16:39:12.936215",
"modified_by": "Administrator",
"module": "Accounts",
"name": "GST POS Invoice",
"owner": "Administrator",
"print_format_builder": 0,
"print_format_type": "Jinja",
"raw_printing": 0,
"show_section_headings": 0,
"standard": "Yes"
}

View File

@ -354,9 +354,6 @@ def accumulate_values_into_parents(accounts, accounts_by_name, companies):
if d.parent_account:
account = d.parent_account_name
# if not accounts_by_name.get(account):
# continue
for company in companies:
accounts_by_name[account][company] = \
accounts_by_name[account].get(company, 0.0) + d.get(company, 0.0)
@ -367,7 +364,7 @@ def accumulate_values_into_parents(accounts, accounts_by_name, companies):
accounts_by_name[account].get("opening_balance", 0.0) + d.get("opening_balance", 0.0)
def get_account_heads(root_type, companies, filters):
accounts = get_accounts(root_type, filters)
accounts = get_accounts(root_type, companies)
if not accounts:
return None, None, None
@ -396,7 +393,7 @@ def update_parent_account_names(accounts):
for account in accounts:
if account.parent_account:
account["parent_account_name"] = name_to_account_map[account.parent_account]
account["parent_account_name"] = name_to_account_map.get(account.parent_account)
return accounts
@ -419,12 +416,19 @@ def get_subsidiary_companies(company):
return frappe.db.sql_list("""select name from `tabCompany`
where lft >= {0} and rgt <= {1} order by lft, rgt""".format(lft, rgt))
def get_accounts(root_type, filters):
return frappe.db.sql(""" select name, is_group, company,
parent_account, lft, rgt, root_type, report_type, account_name, account_number
from
`tabAccount` where company = %s and root_type = %s
""" , (filters.get('company'), root_type), as_dict=1)
def get_accounts(root_type, companies):
accounts = []
added_accounts = []
for company in companies:
for account in frappe.get_all("Account", fields=["name", "is_group", "company",
"parent_account", "lft", "rgt", "root_type", "report_type", "account_name", "account_number"],
filters={"company": company, "root_type": root_type}):
if account.account_name not in added_accounts:
accounts.append(account)
added_accounts.append(account.account_name)
return accounts
def prepare_data(accounts, start_date, end_date, balance_must_be, companies, company_currency, filters):
data = []

View File

@ -44,6 +44,11 @@ frappe.query_reports["Gross Profit"] = {
"parent_field": "parent_invoice",
"initial_depth": 3,
"formatter": function(value, row, column, data, default_formatter) {
if (column.fieldname == "sales_invoice" && column.options == "Item" && data.indent == 0) {
column._options = "Sales Invoice";
} else {
column._options = "Item";
}
value = default_formatter(value, row, column, data);
if (data && (data.indent == 0.0 || row[1].content == "Total")) {

View File

@ -23,7 +23,7 @@ def validate_filters(filters):
def get_result(filters, tds_docs, tds_accounts, tax_category_map):
supplier_map = get_supplier_pan_map()
tax_rate_map = get_tax_rate_map(filters)
gle_map = get_gle_map(filters, tds_docs)
gle_map = get_gle_map(tds_docs)
out = []
for name, details in gle_map.items():
@ -78,7 +78,7 @@ def get_supplier_pan_map():
return supplier_map
def get_gle_map(filters, documents):
def get_gle_map(documents):
# create gle_map of the form
# {"purchase_invoice": list of dict of all gle created for this invoice}
gle_map = {}
@ -86,7 +86,7 @@ def get_gle_map(filters, documents):
gle = frappe.db.get_all('GL Entry',
{
"voucher_no": ["in", documents],
"credit": (">", 0)
"is_cancelled": 0
},
["credit", "debit", "account", "voucher_no", "posting_date", "voucher_type", "against", "party"],
)
@ -184,21 +184,28 @@ def get_tds_docs(filters):
payment_entries = []
journal_entries = []
tax_category_map = {}
or_filters = {}
bank_accounts = frappe.get_all('Account', {'is_group': 0, 'account_type': 'Bank'}, pluck="name")
tds_accounts = frappe.get_all("Tax Withholding Account", {'company': filters.get('company')},
pluck="account")
query_filters = {
"credit": ('>', 0),
"account": ("in", tds_accounts),
"posting_date": ("between", [filters.get("from_date"), filters.get("to_date")]),
"is_cancelled": 0
"is_cancelled": 0,
"against": ("not in", bank_accounts)
}
if filters.get('supplier'):
query_filters.update({'against': filters.get('supplier')})
if filters.get("supplier"):
del query_filters["account"]
del query_filters["against"]
or_filters = {
"against": filters.get('supplier'),
"party": filters.get('supplier')
}
tds_docs = frappe.get_all("GL Entry", query_filters, ["voucher_no", "voucher_type", "against", "party"])
tds_docs = frappe.get_all("GL Entry", filters=query_filters, or_filters=or_filters, fields=["voucher_no", "voucher_type", "against", "party"])
for d in tds_docs:
if d.voucher_type == "Purchase Invoice":

View File

@ -682,17 +682,18 @@ class TestPurchaseOrder(unittest.TestCase):
bin1 = frappe.db.get_value("Bin",
filters={"warehouse": "_Test Warehouse - _TC", "item_code": "_Test Item"},
fieldname=["reserved_qty_for_sub_contract", "projected_qty"], as_dict=1)
fieldname=["reserved_qty_for_sub_contract", "projected_qty", "modified"], as_dict=1)
# Submit PO
po = create_purchase_order(item_code="_Test FG Item", is_subcontracted="Yes")
bin2 = frappe.db.get_value("Bin",
filters={"warehouse": "_Test Warehouse - _TC", "item_code": "_Test Item"},
fieldname=["reserved_qty_for_sub_contract", "projected_qty"], as_dict=1)
fieldname=["reserved_qty_for_sub_contract", "projected_qty", "modified"], as_dict=1)
self.assertEqual(bin2.reserved_qty_for_sub_contract, bin1.reserved_qty_for_sub_contract + 10)
self.assertEqual(bin2.projected_qty, bin1.projected_qty - 10)
self.assertNotEqual(bin1.modified, bin2.modified)
# Create stock transfer
rm_item = [{"item_code":"_Test FG Item","rm_item_code":"_Test Item","item_name":"_Test Item",

View File

@ -49,7 +49,7 @@ valid_scorecard = [
"min_grade":0.0,"name":"Very Poor",
"prevent_rfqs":1,
"notify_supplier":0,
"doctype":"Supplier Scorecard Standing",
"doctype":"Supplier Scorecard Scoring Standing",
"max_grade":30.0,
"prevent_pos":1,
"warn_pos":0,
@ -65,7 +65,7 @@ valid_scorecard = [
"name":"Poor",
"prevent_rfqs":1,
"notify_supplier":0,
"doctype":"Supplier Scorecard Standing",
"doctype":"Supplier Scorecard Scoring Standing",
"max_grade":50.0,
"prevent_pos":0,
"warn_pos":0,
@ -81,7 +81,7 @@ valid_scorecard = [
"name":"Average",
"prevent_rfqs":0,
"notify_supplier":0,
"doctype":"Supplier Scorecard Standing",
"doctype":"Supplier Scorecard Scoring Standing",
"max_grade":80.0,
"prevent_pos":0,
"warn_pos":0,
@ -97,7 +97,7 @@ valid_scorecard = [
"name":"Excellent",
"prevent_rfqs":0,
"notify_supplier":0,
"doctype":"Supplier Scorecard Standing",
"doctype":"Supplier Scorecard Scoring Standing",
"max_grade":100.0,
"prevent_pos":0,
"warn_pos":0,

View File

@ -407,6 +407,22 @@ class AccountsController(TransactionBase):
if item_qty != len(get_serial_nos(item.get('serial_no'))):
item.set(fieldname, value)
elif (
ret.get("pricing_rule_removed")
and value is not None
and fieldname
in [
"discount_percentage",
"discount_amount",
"rate",
"margin_rate_or_amount",
"margin_type",
"remove_free_item",
]
):
# reset pricing rule fields if pricing_rule_removed
item.set(fieldname, value)
if self.doctype in ["Purchase Invoice", "Sales Invoice"] and item.meta.get_field('is_fixed_asset'):
item.set('is_fixed_asset', ret.get('is_fixed_asset', 0))
@ -1318,6 +1334,9 @@ class AccountsController(TransactionBase):
payment_schedule['discount_type'] = schedule.discount_type
payment_schedule['discount'] = schedule.discount
if not schedule.invoice_portion:
payment_schedule['payment_amount'] = schedule.payment_amount
self.append("payment_schedule", payment_schedule)
def set_due_date(self):
@ -1936,7 +1955,8 @@ def update_bin_on_delete(row, doctype):
qty_dict["ordered_qty"] = get_ordered_qty(row.item_code, row.warehouse)
update_bin_qty(row.item_code, row.warehouse, qty_dict)
if row.warehouse:
update_bin_qty(row.item_code, row.warehouse, qty_dict)
def validate_and_delete_children(parent, data):
deleted_children = []

View File

@ -106,6 +106,9 @@ class calculate_taxes_and_totals(object):
self.doc.conversion_rate = flt(self.doc.conversion_rate)
def calculate_item_values(self):
if self.doc.get('is_consolidated'):
return
if not self.discount_amount_applied:
for item in self.doc.get("items"):
self.doc.round_floats_in(item)
@ -647,12 +650,12 @@ class calculate_taxes_and_totals(object):
def calculate_change_amount(self):
self.doc.change_amount = 0.0
self.doc.base_change_amount = 0.0
grand_total = self.doc.rounded_total or self.doc.grand_total
base_grand_total = self.doc.base_rounded_total or self.doc.base_grand_total
if self.doc.doctype == "Sales Invoice" \
and self.doc.paid_amount > self.doc.grand_total and not self.doc.is_return \
and self.doc.paid_amount > grand_total and not self.doc.is_return \
and any(d.type == "Cash" for d in self.doc.payments):
grand_total = self.doc.rounded_total or self.doc.grand_total
base_grand_total = self.doc.base_rounded_total or self.doc.base_grand_total
self.doc.change_amount = flt(self.doc.paid_amount - grand_total +
self.doc.write_off_amount, self.doc.precision("change_amount"))

View File

@ -3,7 +3,7 @@
"allow_events_in_timeline": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"allow_rename": 1,
"autoname": "field:lost_reason",
"beta": 0,
"creation": "2018-12-28 14:48:51.044975",
@ -57,7 +57,7 @@
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2018-12-28 14:49:43.336437",
"modified": "2022-02-16 10:49:43.336437",
"modified_by": "Administrator",
"module": "CRM",
"name": "Opportunity Lost Reason",
@ -150,4 +150,4 @@
"track_changes": 0,
"track_seen": 0,
"track_views": 0
}
}

View File

@ -66,26 +66,24 @@ class ItemVariantsCacheManager:
)
]
# join with Website Item
item_variants_data = frappe.get_all(
'Item Variant Attribute',
{'variant_of': parent_item_code},
['parent', 'attribute', 'attribute_value'],
order_by='name',
as_list=1
)
disabled_items = set(
[i.name for i in frappe.db.get_all('Item', {'disabled': 1})]
# Get Variants and tehir Attributes that are not disabled
iva = frappe.qb.DocType("Item Variant Attribute")
item = frappe.qb.DocType("Item")
query = (
frappe.qb.from_(iva)
.join(item).on(item.name == iva.parent)
.select(
iva.parent, iva.attribute, iva.attribute_value
).where(
(iva.variant_of == parent_item_code)
& (item.disabled == 0)
).orderby(iva.name)
)
item_variants_data = query.run()
attribute_value_item_map = frappe._dict()
item_attribute_value_map = frappe._dict()
# dont consider variants that are disabled
# pull all other variants
item_variants_data = [r for r in item_variants_data if r[0] not in disabled_items]
for row in item_variants_data:
item_code, attribute, attribute_value = row
# (attr, value) => [item1, item2]
@ -124,4 +122,7 @@ def build_cache(item_code):
def enqueue_build_cache(item_code):
if frappe.cache().hget('item_cache_build_in_progress', item_code):
return
frappe.enqueue(build_cache, item_code=item_code, queue='long')
frappe.enqueue(
"erpnext.e_commerce.variant_selector.item_variants_cache.build_cache",
item_code=item_code, queue='long'
)

View File

@ -104,6 +104,8 @@ class TestVariantSelector(ERPNextTestCase):
})
make_web_item_price(item_code="Test-Tshirt-Temp-S-R", price_list_rate=100)
frappe.local.shopping_cart_settings = None # clear cached settings values
next_values = get_next_attribute_and_values(
"Test-Tshirt-Temp",
selected_attributes={"Test Size": "Small", "Test Colour": "Red"}

View File

@ -1,524 +0,0 @@
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
import csv
import math
import time
from io import StringIO
import dateutil
import frappe
from frappe import _
import erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_mws_api as mws
#Get and Create Products
def get_products_details():
products = get_products_instance()
reports = get_reports_instance()
mws_settings = frappe.get_doc("Amazon MWS Settings")
market_place_list = return_as_list(mws_settings.market_place_id)
for marketplace in market_place_list:
report_id = request_and_fetch_report_id("_GET_FLAT_FILE_OPEN_LISTINGS_DATA_", None, None, market_place_list)
if report_id:
listings_response = reports.get_report(report_id=report_id)
#Get ASIN Codes
string_io = StringIO(frappe.safe_decode(listings_response.original))
csv_rows = list(csv.reader(string_io, delimiter='\t'))
asin_list = list(set([row[1] for row in csv_rows[1:]]))
#break into chunks of 10
asin_chunked_list = list(chunks(asin_list, 10))
#Map ASIN Codes to SKUs
sku_asin = [{"asin":row[1],"sku":row[0]} for row in csv_rows[1:]]
#Fetch Products List from ASIN
for asin_list in asin_chunked_list:
products_response = call_mws_method(products.get_matching_product,marketplaceid=marketplace,
asins=asin_list)
matching_products_list = products_response.parsed
for product in matching_products_list:
skus = [row["sku"] for row in sku_asin if row["asin"]==product.ASIN]
for sku in skus:
create_item_code(product, sku)
def get_products_instance():
mws_settings = frappe.get_doc("Amazon MWS Settings")
products = mws.Products(
account_id = mws_settings.seller_id,
access_key = mws_settings.aws_access_key_id,
secret_key = mws_settings.secret_key,
region = mws_settings.region,
domain = mws_settings.domain
)
return products
def get_reports_instance():
mws_settings = frappe.get_doc("Amazon MWS Settings")
reports = mws.Reports(
account_id = mws_settings.seller_id,
access_key = mws_settings.aws_access_key_id,
secret_key = mws_settings.secret_key,
region = mws_settings.region,
domain = mws_settings.domain
)
return reports
#returns list as expected by amazon API
def return_as_list(input_value):
if isinstance(input_value, list):
return input_value
else:
return [input_value]
#function to chunk product data
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
def request_and_fetch_report_id(report_type, start_date=None, end_date=None, marketplaceids=None):
reports = get_reports_instance()
report_response = reports.request_report(report_type=report_type,
start_date=start_date,
end_date=end_date,
marketplaceids=marketplaceids)
report_request_id = report_response.parsed["ReportRequestInfo"]["ReportRequestId"]["value"]
generated_report_id = None
#poll to get generated report
for x in range(1,10):
report_request_list_response = reports.get_report_request_list(requestids=[report_request_id])
report_status = report_request_list_response.parsed["ReportRequestInfo"]["ReportProcessingStatus"]["value"]
if report_status == "_SUBMITTED_" or report_status == "_IN_PROGRESS_":
#add time delay to wait for amazon to generate report
time.sleep(15)
continue
elif report_status == "_CANCELLED_":
break
elif report_status == "_DONE_NO_DATA_":
break
elif report_status == "_DONE_":
generated_report_id = report_request_list_response.parsed["ReportRequestInfo"]["GeneratedReportId"]["value"]
break
return generated_report_id
def call_mws_method(mws_method, *args, **kwargs):
mws_settings = frappe.get_doc("Amazon MWS Settings")
max_retries = mws_settings.max_retry_limit
for x in range(0, max_retries):
try:
response = mws_method(*args, **kwargs)
return response
except Exception as e:
delay = math.pow(4, x) * 125
frappe.log_error(message=e, title=f'Method "{mws_method.__name__}" failed')
time.sleep(delay)
continue
mws_settings.enable_sync = 0
mws_settings.save()
frappe.throw(_("Sync has been temporarily disabled because maximum retries have been exceeded"))
def create_item_code(amazon_item_json, sku):
if frappe.db.get_value("Item", sku):
return
item = frappe.new_doc("Item")
new_manufacturer = create_manufacturer(amazon_item_json)
new_brand = create_brand(amazon_item_json)
mws_settings = frappe.get_doc("Amazon MWS Settings")
item.item_code = sku
item.amazon_item_code = amazon_item_json.ASIN
item.item_group = mws_settings.item_group
item.description = amazon_item_json.Product.AttributeSets.ItemAttributes.Title
item.brand = new_brand
item.manufacturer = new_manufacturer
item.image = amazon_item_json.Product.AttributeSets.ItemAttributes.SmallImage.URL
temp_item_group = amazon_item_json.Product.AttributeSets.ItemAttributes.ProductGroup
item_group = frappe.db.get_value("Item Group",filters={"item_group_name": temp_item_group})
if not item_group:
igroup = frappe.new_doc("Item Group")
igroup.item_group_name = temp_item_group
igroup.parent_item_group = mws_settings.item_group
igroup.insert()
item.append("item_defaults", {'company':mws_settings.company})
item.insert(ignore_permissions=True)
create_item_price(amazon_item_json, item.item_code)
return item.name
def create_manufacturer(amazon_item_json):
if not amazon_item_json.Product.AttributeSets.ItemAttributes.Manufacturer:
return None
existing_manufacturer = frappe.db.get_value("Manufacturer",
filters={"short_name":amazon_item_json.Product.AttributeSets.ItemAttributes.Manufacturer})
if not existing_manufacturer:
manufacturer = frappe.new_doc("Manufacturer")
manufacturer.short_name = amazon_item_json.Product.AttributeSets.ItemAttributes.Manufacturer
manufacturer.insert()
return manufacturer.short_name
else:
return existing_manufacturer
def create_brand(amazon_item_json):
if not amazon_item_json.Product.AttributeSets.ItemAttributes.Brand:
return None
existing_brand = frappe.db.get_value("Brand",
filters={"brand":amazon_item_json.Product.AttributeSets.ItemAttributes.Brand})
if not existing_brand:
brand = frappe.new_doc("Brand")
brand.brand = amazon_item_json.Product.AttributeSets.ItemAttributes.Brand
brand.insert()
return brand.brand
else:
return existing_brand
def create_item_price(amazon_item_json, item_code):
item_price = frappe.new_doc("Item Price")
item_price.price_list = frappe.db.get_value("Amazon MWS Settings", "Amazon MWS Settings", "price_list")
if not("ListPrice" in amazon_item_json.Product.AttributeSets.ItemAttributes):
item_price.price_list_rate = 0
else:
item_price.price_list_rate = amazon_item_json.Product.AttributeSets.ItemAttributes.ListPrice.Amount
item_price.item_code = item_code
item_price.insert()
#Get and create Orders
def get_orders(after_date):
try:
orders = get_orders_instance()
statuses = ["PartiallyShipped", "Unshipped", "Shipped", "Canceled"]
mws_settings = frappe.get_doc("Amazon MWS Settings")
market_place_list = return_as_list(mws_settings.market_place_id)
orders_response = call_mws_method(orders.list_orders, marketplaceids=market_place_list,
fulfillment_channels=["MFN", "AFN"],
lastupdatedafter=after_date,
orderstatus=statuses,
max_results='50')
while True:
orders_list = []
if "Order" in orders_response.parsed.Orders:
orders_list = return_as_list(orders_response.parsed.Orders.Order)
if len(orders_list) == 0:
break
for order in orders_list:
create_sales_order(order, after_date)
if not "NextToken" in orders_response.parsed:
break
next_token = orders_response.parsed.NextToken
orders_response = call_mws_method(orders.list_orders_by_next_token, next_token)
except Exception as e:
frappe.log_error(title="get_orders", message=e)
def get_orders_instance():
mws_settings = frappe.get_doc("Amazon MWS Settings")
orders = mws.Orders(
account_id = mws_settings.seller_id,
access_key = mws_settings.aws_access_key_id,
secret_key = mws_settings.secret_key,
region= mws_settings.region,
domain= mws_settings.domain,
version="2013-09-01"
)
return orders
def create_sales_order(order_json,after_date):
customer_name = create_customer(order_json)
create_address(order_json, customer_name)
market_place_order_id = order_json.AmazonOrderId
so = frappe.db.get_value("Sales Order",
filters={"amazon_order_id": market_place_order_id},
fieldname="name")
taxes_and_charges = frappe.db.get_value("Amazon MWS Settings", "Amazon MWS Settings", "taxes_charges")
if so:
return
if not so:
items = get_order_items(market_place_order_id)
delivery_date = dateutil.parser.parse(order_json.LatestShipDate).strftime("%Y-%m-%d")
transaction_date = dateutil.parser.parse(order_json.PurchaseDate).strftime("%Y-%m-%d")
so = frappe.get_doc({
"doctype": "Sales Order",
"naming_series": "SO-",
"amazon_order_id": market_place_order_id,
"marketplace_id": order_json.MarketplaceId,
"customer": customer_name,
"delivery_date": delivery_date,
"transaction_date": transaction_date,
"items": items,
"company": frappe.db.get_value("Amazon MWS Settings", "Amazon MWS Settings", "company")
})
try:
if taxes_and_charges:
charges_and_fees = get_charges_and_fees(market_place_order_id)
for charge in charges_and_fees.get("charges"):
so.append('taxes', charge)
for fee in charges_and_fees.get("fees"):
so.append('taxes', fee)
so.insert(ignore_permissions=True)
so.submit()
except Exception as e:
import traceback
frappe.log_error(message=traceback.format_exc(), title="Create Sales Order")
def create_customer(order_json):
order_customer_name = ""
if not("BuyerName" in order_json):
order_customer_name = "Buyer - " + order_json.AmazonOrderId
else:
order_customer_name = order_json.BuyerName
existing_customer_name = frappe.db.get_value("Customer",
filters={"name": order_customer_name}, fieldname="name")
if existing_customer_name:
filters = [
["Dynamic Link", "link_doctype", "=", "Customer"],
["Dynamic Link", "link_name", "=", existing_customer_name],
["Dynamic Link", "parenttype", "=", "Contact"]
]
existing_contacts = frappe.get_list("Contact", filters)
if existing_contacts:
pass
else:
new_contact = frappe.new_doc("Contact")
new_contact.first_name = order_customer_name
new_contact.append('links', {
"link_doctype": "Customer",
"link_name": existing_customer_name
})
new_contact.insert()
return existing_customer_name
else:
mws_customer_settings = frappe.get_doc("Amazon MWS Settings")
new_customer = frappe.new_doc("Customer")
new_customer.customer_name = order_customer_name
new_customer.customer_group = mws_customer_settings.customer_group
new_customer.territory = mws_customer_settings.territory
new_customer.customer_type = mws_customer_settings.customer_type
new_customer.save()
new_contact = frappe.new_doc("Contact")
new_contact.first_name = order_customer_name
new_contact.append('links', {
"link_doctype": "Customer",
"link_name": new_customer.name
})
new_contact.insert()
return new_customer.name
def create_address(amazon_order_item_json, customer_name):
filters = [
["Dynamic Link", "link_doctype", "=", "Customer"],
["Dynamic Link", "link_name", "=", customer_name],
["Dynamic Link", "parenttype", "=", "Address"]
]
existing_address = frappe.get_list("Address", filters)
if not("ShippingAddress" in amazon_order_item_json):
return None
else:
make_address = frappe.new_doc("Address")
if "AddressLine1" in amazon_order_item_json.ShippingAddress:
make_address.address_line1 = amazon_order_item_json.ShippingAddress.AddressLine1
else:
make_address.address_line1 = "Not Provided"
if "City" in amazon_order_item_json.ShippingAddress:
make_address.city = amazon_order_item_json.ShippingAddress.City
else:
make_address.city = "Not Provided"
if "StateOrRegion" in amazon_order_item_json.ShippingAddress:
make_address.state = amazon_order_item_json.ShippingAddress.StateOrRegion
if "PostalCode" in amazon_order_item_json.ShippingAddress:
make_address.pincode = amazon_order_item_json.ShippingAddress.PostalCode
for address in existing_address:
address_doc = frappe.get_doc("Address", address["name"])
if (address_doc.address_line1 == make_address.address_line1 and
address_doc.pincode == make_address.pincode):
return address
make_address.append("links", {
"link_doctype": "Customer",
"link_name": customer_name
})
make_address.address_type = "Shipping"
make_address.insert()
def get_order_items(market_place_order_id):
mws_orders = get_orders_instance()
order_items_response = call_mws_method(mws_orders.list_order_items, amazon_order_id=market_place_order_id)
final_order_items = []
order_items_list = return_as_list(order_items_response.parsed.OrderItems.OrderItem)
warehouse = frappe.db.get_value("Amazon MWS Settings", "Amazon MWS Settings", "warehouse")
while True:
for order_item in order_items_list:
if not "ItemPrice" in order_item:
price = 0
else:
price = order_item.ItemPrice.Amount
final_order_items.append({
"item_code": get_item_code(order_item),
"item_name": order_item.SellerSKU,
"description": order_item.Title,
"rate": price,
"qty": order_item.QuantityOrdered,
"stock_uom": "Nos",
"warehouse": warehouse,
"conversion_factor": "1.0"
})
if not "NextToken" in order_items_response.parsed:
break
next_token = order_items_response.parsed.NextToken
order_items_response = call_mws_method(mws_orders.list_order_items_by_next_token, next_token)
order_items_list = return_as_list(order_items_response.parsed.OrderItems.OrderItem)
return final_order_items
def get_item_code(order_item):
sku = order_item.SellerSKU
item_code = frappe.db.get_value("Item", {"item_code": sku}, "item_code")
if item_code:
return item_code
def get_charges_and_fees(market_place_order_id):
finances = get_finances_instance()
charges_fees = {"charges":[], "fees":[]}
response = call_mws_method(finances.list_financial_events, amazon_order_id=market_place_order_id)
shipment_event_list = return_as_list(response.parsed.FinancialEvents.ShipmentEventList)
for shipment_event in shipment_event_list:
if shipment_event:
shipment_item_list = return_as_list(shipment_event.ShipmentEvent.ShipmentItemList.ShipmentItem)
for shipment_item in shipment_item_list:
charges, fees = [], []
if 'ItemChargeList' in shipment_item.keys():
charges = return_as_list(shipment_item.ItemChargeList.ChargeComponent)
if 'ItemFeeList' in shipment_item.keys():
fees = return_as_list(shipment_item.ItemFeeList.FeeComponent)
for charge in charges:
if(charge.ChargeType != "Principal") and float(charge.ChargeAmount.CurrencyAmount) != 0:
charge_account = get_account(charge.ChargeType)
charges_fees.get("charges").append({
"charge_type":"Actual",
"account_head": charge_account,
"tax_amount": charge.ChargeAmount.CurrencyAmount,
"description": charge.ChargeType + " for " + shipment_item.SellerSKU
})
for fee in fees:
if float(fee.FeeAmount.CurrencyAmount) != 0:
fee_account = get_account(fee.FeeType)
charges_fees.get("fees").append({
"charge_type":"Actual",
"account_head": fee_account,
"tax_amount": fee.FeeAmount.CurrencyAmount,
"description": fee.FeeType + " for " + shipment_item.SellerSKU
})
return charges_fees
def get_finances_instance():
mws_settings = frappe.get_doc("Amazon MWS Settings")
finances = mws.Finances(
account_id = mws_settings.seller_id,
access_key = mws_settings.aws_access_key_id,
secret_key = mws_settings.secret_key,
region= mws_settings.region,
domain= mws_settings.domain,
version="2015-05-01"
)
return finances
def get_account(name):
existing_account = frappe.db.get_value("Account", {"account_name": "Amazon {0}".format(name)})
account_name = existing_account
mws_settings = frappe.get_doc("Amazon MWS Settings")
if not existing_account:
try:
new_account = frappe.new_doc("Account")
new_account.account_name = "Amazon {0}".format(name)
new_account.company = mws_settings.company
new_account.parent_account = mws_settings.market_place_account_group
new_account.insert(ignore_permissions=True)
account_name = new_account.name
except Exception as e:
frappe.log_error(message=e, title="Create Account")
return account_name

View File

@ -1,651 +0,0 @@
#!/usr/bin/env python
#
# Basic interface to Amazon MWS
# Based on http://code.google.com/p/amazon-mws-python
# Extended to include finances object
import base64
import hashlib
import hmac
import re
from urllib.parse import quote
from erpnext.erpnext_integrations.doctype.amazon_mws_settings import xml_utils
try:
from xml.etree.ElementTree import ParseError as XMLError
except ImportError:
from xml.parsers.expat import ExpatError as XMLError
from time import gmtime, strftime
from requests import request
from requests.exceptions import HTTPError
__all__ = [
'Feeds',
'Inventory',
'MWSError',
'Reports',
'Orders',
'Products',
'Recommendations',
'Sellers',
'Finances'
]
# See https://images-na.ssl-images-amazon.com/images/G/01/mwsportal/doc/en_US/bde/MWSDeveloperGuide._V357736853_.pdf page 8
# for a list of the end points and marketplace IDs
MARKETPLACES = {
"CA": "https://mws.amazonservices.ca", #A2EUQ1WTGCTBG2
"US": "https://mws.amazonservices.com", #ATVPDKIKX0DER",
"DE": "https://mws-eu.amazonservices.com", #A1PA6795UKMFR9
"ES": "https://mws-eu.amazonservices.com", #A1RKKUPIHCS9HS
"FR": "https://mws-eu.amazonservices.com", #A13V1IB3VIYZZH
"IN": "https://mws.amazonservices.in", #A21TJRUUN4KGV
"IT": "https://mws-eu.amazonservices.com", #APJ6JRA9NG5V4
"UK": "https://mws-eu.amazonservices.com", #A1F83G8C2ARO7P
"JP": "https://mws.amazonservices.jp", #A1VC38T7YXB528
"CN": "https://mws.amazonservices.com.cn", #AAHKV2X7AFYLW
"AE": " https://mws.amazonservices.ae", #A2VIGQ35RCS4UG
"MX": "https://mws.amazonservices.com.mx", #A1AM78C64UM0Y8
"BR": "https://mws.amazonservices.com", #A2Q3Y263D00KWC
}
class MWSError(Exception):
"""
Main MWS Exception class
"""
# Allows quick access to the response object.
# Do not rely on this attribute, always check if its not None.
response = None
def calc_md5(string):
"""Calculates the MD5 encryption for the given string
"""
md = hashlib.md5()
md.update(string)
return base64.encodebytes(md.digest()).decode().strip()
def remove_empty(d):
"""
Helper function that removes all keys from a dictionary (d),
that have an empty value.
"""
for key in list(d):
if not d[key]:
del d[key]
return d
def remove_namespace(xml):
xml = xml.decode('utf-8')
regex = re.compile(' xmlns(:ns2)?="[^"]+"|(ns2:)|(xml:)')
return regex.sub('', xml)
class DictWrapper(object):
def __init__(self, xml, rootkey=None):
self.original = xml
self._rootkey = rootkey
self._mydict = xml_utils.xml2dict().fromstring(remove_namespace(xml))
self._response_dict = self._mydict.get(list(self._mydict)[0], self._mydict)
@property
def parsed(self):
if self._rootkey:
return self._response_dict.get(self._rootkey)
else:
return self._response_dict
class DataWrapper(object):
"""
Text wrapper in charge of validating the hash sent by Amazon.
"""
def __init__(self, data, header):
self.original = data
if 'content-md5' in header:
hash_ = calc_md5(self.original)
if header['content-md5'] != hash_:
raise MWSError("Wrong Contentlength, maybe amazon error...")
@property
def parsed(self):
return self.original
class MWS(object):
""" Base Amazon API class """
# This is used to post/get to the different uris used by amazon per api
# ie. /Orders/2011-01-01
# All subclasses must define their own URI only if needed
URI = "/"
# The API version varies in most amazon APIs
VERSION = "2009-01-01"
# There seem to be some xml namespace issues. therefore every api subclass
# is recommended to define its namespace, so that it can be referenced
# like so AmazonAPISubclass.NS.
# For more information see http://stackoverflow.com/a/8719461/389453
NS = ''
# Some APIs are available only to either a "Merchant" or "Seller"
# the type of account needs to be sent in every call to the amazon MWS.
# This constant defines the exact name of the parameter Amazon expects
# for the specific API being used.
# All subclasses need to define this if they require another account type
# like "Merchant" in which case you define it like so.
# ACCOUNT_TYPE = "Merchant"
# Which is the name of the parameter for that specific account type.
ACCOUNT_TYPE = "SellerId"
def __init__(self, access_key, secret_key, account_id, region='US', domain='', uri="", version=""):
self.access_key = access_key
self.secret_key = secret_key
self.account_id = account_id
self.version = version or self.VERSION
self.uri = uri or self.URI
if domain:
self.domain = domain
elif region in MARKETPLACES:
self.domain = MARKETPLACES[region]
else:
error_msg = "Incorrect region supplied ('%(region)s'). Must be one of the following: %(marketplaces)s" % {
"marketplaces" : ', '.join(MARKETPLACES.keys()),
"region" : region,
}
raise MWSError(error_msg)
def make_request(self, extra_data, method="GET", **kwargs):
"""Make request to Amazon MWS API with these parameters
"""
# Remove all keys with an empty value because
# Amazon's MWS does not allow such a thing.
extra_data = remove_empty(extra_data)
params = {
'AWSAccessKeyId': self.access_key,
self.ACCOUNT_TYPE: self.account_id,
'SignatureVersion': '2',
'Timestamp': self.get_timestamp(),
'Version': self.version,
'SignatureMethod': 'HmacSHA256',
}
params.update(extra_data)
request_description = '&'.join(['%s=%s' % (k, quote(params[k], safe='-_.~')) for k in sorted(params)])
signature = self.calc_signature(method, request_description)
url = '%s%s?%s&Signature=%s' % (self.domain, self.uri, request_description, quote(signature))
headers = {'User-Agent': 'python-amazon-mws/0.0.1 (Language=Python)'}
headers.update(kwargs.get('extra_headers', {}))
try:
# Some might wonder as to why i don't pass the params dict as the params argument to request.
# My answer is, here i have to get the url parsed string of params in order to sign it, so
# if i pass the params dict as params to request, request will repeat that step because it will need
# to convert the dict to a url parsed string, so why do it twice if i can just pass the full url :).
response = request(method, url, data=kwargs.get('body', ''), headers=headers)
response.raise_for_status()
# When retrieving data from the response object,
# be aware that response.content returns the content in bytes while response.text calls
# response.content and converts it to unicode.
data = response.content
# I do not check the headers to decide which content structure to server simply because sometimes
# Amazon's MWS API returns XML error responses with "text/plain" as the Content-Type.
try:
parsed_response = DictWrapper(data, extra_data.get("Action") + "Result")
except XMLError:
parsed_response = DataWrapper(data, response.headers)
except HTTPError as e:
error = MWSError(str(e))
error.response = e.response
raise error
# Store the response object in the parsed_response for quick access
parsed_response.response = response
return parsed_response
def get_service_status(self):
"""
Returns a GREEN, GREEN_I, YELLOW or RED status.
Depending on the status/availability of the API its being called from.
"""
return self.make_request(extra_data=dict(Action='GetServiceStatus'))
def calc_signature(self, method, request_description):
"""Calculate MWS signature to interface with Amazon
"""
sig_data = method + '\n' + self.domain.replace('https://', '').lower() + '\n' + self.uri + '\n' + request_description
sig_data = sig_data.encode('utf-8')
secret_key = self.secret_key.encode('utf-8')
digest = hmac.new(secret_key, sig_data, hashlib.sha256).digest()
return base64.b64encode(digest).decode('utf-8')
def get_timestamp(self):
"""
Returns the current timestamp in proper format.
"""
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def enumerate_param(self, param, values):
"""
Builds a dictionary of an enumerated parameter.
Takes any iterable and returns a dictionary.
ie.
enumerate_param('MarketplaceIdList.Id', (123, 345, 4343))
returns
{
MarketplaceIdList.Id.1: 123,
MarketplaceIdList.Id.2: 345,
MarketplaceIdList.Id.3: 4343
}
"""
params = {}
if values is not None:
if not param.endswith('.'):
param = "%s." % param
for num, value in enumerate(values):
params['%s%d' % (param, (num + 1))] = value
return params
class Feeds(MWS):
""" Amazon MWS Feeds API """
ACCOUNT_TYPE = "Merchant"
def submit_feed(self, feed, feed_type, marketplaceids=None,
content_type="text/xml", purge='false'):
"""
Uploads a feed ( xml or .tsv ) to the seller's inventory.
Can be used for creating/updating products on Amazon.
"""
data = dict(Action='SubmitFeed',
FeedType=feed_type,
PurgeAndReplace=purge)
data.update(self.enumerate_param('MarketplaceIdList.Id.', marketplaceids))
md = calc_md5(feed)
return self.make_request(data, method="POST", body=feed,
extra_headers={'Content-MD5': md, 'Content-Type': content_type})
def get_feed_submission_list(self, feedids=None, max_count=None, feedtypes=None,
processingstatuses=None, fromdate=None, todate=None):
"""
Returns a list of all feed submissions submitted in the previous 90 days.
That match the query parameters.
"""
data = dict(Action='GetFeedSubmissionList',
MaxCount=max_count,
SubmittedFromDate=fromdate,
SubmittedToDate=todate,)
data.update(self.enumerate_param('FeedSubmissionIdList.Id', feedids))
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_submission_list_by_next_token(self, token):
data = dict(Action='GetFeedSubmissionListByNextToken', NextToken=token)
return self.make_request(data)
def get_feed_submission_count(self, feedtypes=None, processingstatuses=None, fromdate=None, todate=None):
data = dict(Action='GetFeedSubmissionCount',
SubmittedFromDate=fromdate,
SubmittedToDate=todate)
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def cancel_feed_submissions(self, feedids=None, feedtypes=None, fromdate=None, todate=None):
data = dict(Action='CancelFeedSubmissions',
SubmittedFromDate=fromdate,
SubmittedToDate=todate)
data.update(self.enumerate_param('FeedSubmissionIdList.Id.', feedids))
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
return self.make_request(data)
def get_feed_submission_result(self, feedid):
data = dict(Action='GetFeedSubmissionResult', FeedSubmissionId=feedid)
return self.make_request(data)
class Reports(MWS):
""" Amazon MWS Reports API """
ACCOUNT_TYPE = "Merchant"
## REPORTS ###
def get_report(self, report_id):
data = dict(Action='GetReport', ReportId=report_id)
return self.make_request(data)
def get_report_count(self, report_types=(), acknowledged=None, fromdate=None, todate=None):
data = dict(Action='GetReportCount',
Acknowledged=acknowledged,
AvailableFromDate=fromdate,
AvailableToDate=todate)
data.update(self.enumerate_param('ReportTypeList.Type.', report_types))
return self.make_request(data)
def get_report_list(self, requestids=(), max_count=None, types=(), acknowledged=None,
fromdate=None, todate=None):
data = dict(Action='GetReportList',
Acknowledged=acknowledged,
AvailableFromDate=fromdate,
AvailableToDate=todate,
MaxCount=max_count)
data.update(self.enumerate_param('ReportRequestIdList.Id.', requestids))
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
def get_report_list_by_next_token(self, token):
data = dict(Action='GetReportListByNextToken', NextToken=token)
return self.make_request(data)
def get_report_request_count(self, report_types=(), processingstatuses=(), fromdate=None, todate=None):
data = dict(Action='GetReportRequestCount',
RequestedFromDate=fromdate,
RequestedToDate=todate)
data.update(self.enumerate_param('ReportTypeList.Type.', report_types))
data.update(self.enumerate_param('ReportProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_report_request_list(self, requestids=(), types=(), processingstatuses=(),
max_count=None, fromdate=None, todate=None):
data = dict(Action='GetReportRequestList',
MaxCount=max_count,
RequestedFromDate=fromdate,
RequestedToDate=todate)
data.update(self.enumerate_param('ReportRequestIdList.Id.', requestids))
data.update(self.enumerate_param('ReportTypeList.Type.', types))
data.update(self.enumerate_param('ReportProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_report_request_list_by_next_token(self, token):
data = dict(Action='GetReportRequestListByNextToken', NextToken=token)
return self.make_request(data)
def request_report(self, report_type, start_date=None, end_date=None, marketplaceids=()):
data = dict(Action='RequestReport',
ReportType=report_type,
StartDate=start_date,
EndDate=end_date)
data.update(self.enumerate_param('MarketplaceIdList.Id.', marketplaceids))
return self.make_request(data)
### ReportSchedule ###
def get_report_schedule_list(self, types=()):
data = dict(Action='GetReportScheduleList')
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
def get_report_schedule_count(self, types=()):
data = dict(Action='GetReportScheduleCount')
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
class Orders(MWS):
""" Amazon Orders API """
URI = "/Orders/2013-09-01"
VERSION = "2013-09-01"
NS = '{https://mws.amazonservices.com/Orders/2011-01-01}'
def list_orders(self, marketplaceids, created_after=None, created_before=None, lastupdatedafter=None,
lastupdatedbefore=None, orderstatus=(), fulfillment_channels=(),
payment_methods=(), buyer_email=None, seller_orderid=None, max_results='100'):
data = dict(Action='ListOrders',
CreatedAfter=created_after,
CreatedBefore=created_before,
LastUpdatedAfter=lastupdatedafter,
LastUpdatedBefore=lastupdatedbefore,
BuyerEmail=buyer_email,
SellerOrderId=seller_orderid,
MaxResultsPerPage=max_results,
)
data.update(self.enumerate_param('OrderStatus.Status.', orderstatus))
data.update(self.enumerate_param('MarketplaceId.Id.', marketplaceids))
data.update(self.enumerate_param('FulfillmentChannel.Channel.', fulfillment_channels))
data.update(self.enumerate_param('PaymentMethod.Method.', payment_methods))
return self.make_request(data)
def list_orders_by_next_token(self, token):
data = dict(Action='ListOrdersByNextToken', NextToken=token)
return self.make_request(data)
def get_order(self, amazon_order_ids):
data = dict(Action='GetOrder')
data.update(self.enumerate_param('AmazonOrderId.Id.', amazon_order_ids))
return self.make_request(data)
def list_order_items(self, amazon_order_id):
data = dict(Action='ListOrderItems', AmazonOrderId=amazon_order_id)
return self.make_request(data)
def list_order_items_by_next_token(self, token):
data = dict(Action='ListOrderItemsByNextToken', NextToken=token)
return self.make_request(data)
class Products(MWS):
""" Amazon MWS Products API """
URI = '/Products/2011-10-01'
VERSION = '2011-10-01'
NS = '{http://mws.amazonservices.com/schema/Products/2011-10-01}'
def list_matching_products(self, marketplaceid, query, contextid=None):
""" Returns a list of products and their attributes, ordered by
relevancy, based on a search query that you specify.
Your search query can be a phrase that describes the product
or it can be a product identifier such as a UPC, EAN, ISBN, or JAN.
"""
data = dict(Action='ListMatchingProducts',
MarketplaceId=marketplaceid,
Query=query,
QueryContextId=contextid)
return self.make_request(data)
def get_matching_product(self, marketplaceid, asins):
""" Returns a list of products and their attributes, based on a list of
ASIN values that you specify.
"""
data = dict(Action='GetMatchingProduct', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_matching_product_for_id(self, marketplaceid, type, id):
""" Returns a list of products and their attributes, based on a list of
product identifier values (asin, sellersku, upc, ean, isbn and JAN)
Added in Fourth Release, API version 2011-10-01
"""
data = dict(Action='GetMatchingProductForId',
MarketplaceId=marketplaceid,
IdType=type)
data.update(self.enumerate_param('IdList.Id', id))
return self.make_request(data)
def get_competitive_pricing_for_sku(self, marketplaceid, skus):
""" Returns the current competitive pricing of a product,
based on the SellerSKU and MarketplaceId that you specify.
"""
data = dict(Action='GetCompetitivePricingForSKU', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_competitive_pricing_for_asin(self, marketplaceid, asins):
""" Returns the current competitive pricing of a product,
based on the ASIN and MarketplaceId that you specify.
"""
data = dict(Action='GetCompetitivePricingForASIN', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_lowest_offer_listings_for_sku(self, marketplaceid, skus, condition="Any", excludeme="False"):
data = dict(Action='GetLowestOfferListingsForSKU',
MarketplaceId=marketplaceid,
ItemCondition=condition,
ExcludeMe=excludeme)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_lowest_offer_listings_for_asin(self, marketplaceid, asins, condition="Any", excludeme="False"):
data = dict(Action='GetLowestOfferListingsForASIN',
MarketplaceId=marketplaceid,
ItemCondition=condition,
ExcludeMe=excludeme)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_product_categories_for_sku(self, marketplaceid, sku):
data = dict(Action='GetProductCategoriesForSKU',
MarketplaceId=marketplaceid,
SellerSKU=sku)
return self.make_request(data)
def get_product_categories_for_asin(self, marketplaceid, asin):
data = dict(Action='GetProductCategoriesForASIN',
MarketplaceId=marketplaceid,
ASIN=asin)
return self.make_request(data)
def get_my_price_for_sku(self, marketplaceid, skus, condition=None):
data = dict(Action='GetMyPriceForSKU',
MarketplaceId=marketplaceid,
ItemCondition=condition)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_my_price_for_asin(self, marketplaceid, asins, condition=None):
data = dict(Action='GetMyPriceForASIN',
MarketplaceId=marketplaceid,
ItemCondition=condition)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
class Sellers(MWS):
""" Amazon MWS Sellers API """
URI = '/Sellers/2011-07-01'
VERSION = '2011-07-01'
NS = '{http://mws.amazonservices.com/schema/Sellers/2011-07-01}'
def list_marketplace_participations(self):
"""
Returns a list of marketplaces a seller can participate in and
a list of participations that include seller-specific information in that marketplace.
The operation returns only those marketplaces where the seller's account is in an active state.
"""
data = dict(Action='ListMarketplaceParticipations')
return self.make_request(data)
def list_marketplace_participations_by_next_token(self, token):
"""
Takes a "NextToken" and returns the same information as "list_marketplace_participations".
Based on the "NextToken".
"""
data = dict(Action='ListMarketplaceParticipations', NextToken=token)
return self.make_request(data)
#### Fulfillment APIs ####
class InboundShipments(MWS):
URI = "/FulfillmentInboundShipment/2010-10-01"
VERSION = '2010-10-01'
# To be completed
class Inventory(MWS):
""" Amazon MWS Inventory Fulfillment API """
URI = '/FulfillmentInventory/2010-10-01'
VERSION = '2010-10-01'
NS = "{http://mws.amazonaws.com/FulfillmentInventory/2010-10-01}"
def list_inventory_supply(self, skus=(), datetime=None, response_group='Basic'):
""" Returns information on available inventory """
data = dict(Action='ListInventorySupply',
QueryStartDateTime=datetime,
ResponseGroup=response_group,
)
data.update(self.enumerate_param('SellerSkus.member.', skus))
return self.make_request(data, "POST")
def list_inventory_supply_by_next_token(self, token):
data = dict(Action='ListInventorySupplyByNextToken', NextToken=token)
return self.make_request(data, "POST")
class OutboundShipments(MWS):
URI = "/FulfillmentOutboundShipment/2010-10-01"
VERSION = "2010-10-01"
# To be completed
class Recommendations(MWS):
""" Amazon MWS Recommendations API """
URI = '/Recommendations/2013-04-01'
VERSION = '2013-04-01'
NS = "{https://mws.amazonservices.com/Recommendations/2013-04-01}"
def get_last_updated_time_for_recommendations(self, marketplaceid):
"""
Checks whether there are active recommendations for each category for the given marketplace, and if there are,
returns the time when recommendations were last updated for each category.
"""
data = dict(Action='GetLastUpdatedTimeForRecommendations',
MarketplaceId=marketplaceid)
return self.make_request(data, "POST")
def list_recommendations(self, marketplaceid, recommendationcategory=None):
"""
Returns your active recommendations for a specific category or for all categories for a specific marketplace.
"""
data = dict(Action="ListRecommendations",
MarketplaceId=marketplaceid,
RecommendationCategory=recommendationcategory)
return self.make_request(data, "POST")
def list_recommendations_by_next_token(self, token):
"""
Returns the next page of recommendations using the NextToken parameter.
"""
data = dict(Action="ListRecommendationsByNextToken",
NextToken=token)
return self.make_request(data, "POST")
class Finances(MWS):
""" Amazon Finances API"""
URI = '/Finances/2015-05-01'
VERSION = '2015-05-01'
NS = "{https://mws.amazonservices.com/Finances/2015-05-01}"
def list_financial_events(self , posted_after=None, posted_before=None,
amazon_order_id=None, max_results='100'):
data = dict(Action='ListFinancialEvents',
PostedAfter=posted_after,
PostedBefore=posted_before,
AmazonOrderId=amazon_order_id,
MaxResultsPerPage=max_results,
)
return self.make_request(data)

View File

@ -1,2 +0,0 @@
// Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt

View File

@ -1,237 +0,0 @@
{
"actions": [],
"creation": "2018-07-31 05:51:41.357047",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enable_amazon",
"mws_credentials",
"seller_id",
"aws_access_key_id",
"mws_auth_token",
"secret_key",
"column_break_4",
"market_place_id",
"region",
"domain",
"section_break_13",
"company",
"warehouse",
"item_group",
"price_list",
"column_break_17",
"customer_group",
"territory",
"customer_type",
"market_place_account_group",
"section_break_12",
"after_date",
"taxes_charges",
"sync_products",
"sync_orders",
"column_break_10",
"enable_sync",
"max_retry_limit"
],
"fields": [
{
"default": "0",
"fieldname": "enable_amazon",
"fieldtype": "Check",
"label": "Enable Amazon"
},
{
"fieldname": "mws_credentials",
"fieldtype": "Section Break",
"label": "MWS Credentials"
},
{
"fieldname": "seller_id",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Seller ID",
"reqd": 1
},
{
"fieldname": "aws_access_key_id",
"fieldtype": "Data",
"in_list_view": 1,
"label": "AWS Access Key ID",
"reqd": 1
},
{
"fieldname": "mws_auth_token",
"fieldtype": "Data",
"in_list_view": 1,
"label": "MWS Auth Token",
"reqd": 1
},
{
"fieldname": "secret_key",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Secret Key",
"reqd": 1
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"fieldname": "market_place_id",
"fieldtype": "Data",
"label": "Market Place ID",
"reqd": 1
},
{
"fieldname": "region",
"fieldtype": "Select",
"label": "Region",
"options": "\nAE\nAU\nBR\nCA\nCN\nDE\nES\nFR\nIN\nJP\nIT\nMX\nUK\nUS",
"reqd": 1
},
{
"fieldname": "domain",
"fieldtype": "Data",
"label": "Domain",
"reqd": 1
},
{
"fieldname": "section_break_13",
"fieldtype": "Section Break"
},
{
"fieldname": "company",
"fieldtype": "Link",
"label": "Company",
"options": "Company",
"reqd": 1
},
{
"fieldname": "warehouse",
"fieldtype": "Link",
"label": "Warehouse",
"options": "Warehouse",
"reqd": 1
},
{
"fieldname": "item_group",
"fieldtype": "Link",
"label": "Item Group",
"options": "Item Group",
"reqd": 1
},
{
"fieldname": "price_list",
"fieldtype": "Link",
"label": "Price List",
"options": "Price List",
"reqd": 1
},
{
"fieldname": "column_break_17",
"fieldtype": "Column Break"
},
{
"fieldname": "customer_group",
"fieldtype": "Link",
"label": "Customer Group",
"options": "Customer Group",
"reqd": 1
},
{
"fieldname": "territory",
"fieldtype": "Link",
"label": "Territory",
"options": "Territory",
"reqd": 1
},
{
"fieldname": "customer_type",
"fieldtype": "Select",
"label": "Customer Type",
"options": "Individual\nCompany",
"reqd": 1
},
{
"fieldname": "market_place_account_group",
"fieldtype": "Link",
"label": "Market Place Account Group",
"options": "Account",
"reqd": 1
},
{
"fieldname": "section_break_12",
"fieldtype": "Section Break"
},
{
"description": "Amazon will synch data updated after this date",
"fieldname": "after_date",
"fieldtype": "Datetime",
"label": "After Date",
"reqd": 1
},
{
"default": "0",
"description": "Get financial breakup of Taxes and charges data by Amazon ",
"fieldname": "taxes_charges",
"fieldtype": "Check",
"label": "Sync Taxes and Charges"
},
{
"fieldname": "column_break_10",
"fieldtype": "Column Break"
},
{
"default": "3",
"fieldname": "max_retry_limit",
"fieldtype": "Int",
"label": "Max Retry Limit"
},
{
"description": "Always sync your products from Amazon MWS before synching the Orders details",
"fieldname": "sync_products",
"fieldtype": "Button",
"label": "Sync Products",
"options": "get_products_details"
},
{
"description": "Click this button to pull your Sales Order data from Amazon MWS.",
"fieldname": "sync_orders",
"fieldtype": "Button",
"label": "Sync Orders",
"options": "get_order_details"
},
{
"default": "0",
"description": "Check this to enable a scheduled Daily synchronization routine via scheduler",
"fieldname": "enable_sync",
"fieldtype": "Check",
"label": "Enable Scheduled Sync"
}
],
"issingle": 1,
"links": [],
"modified": "2020-04-07 14:26:20.174848",
"modified_by": "Administrator",
"module": "ERPNext Integrations",
"name": "Amazon MWS Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View File

@ -1,46 +0,0 @@
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
import dateutil
import frappe
from frappe.custom.doctype.custom_field.custom_field import create_custom_fields
from frappe.model.document import Document
from erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_methods import get_orders
class AmazonMWSSettings(Document):
def validate(self):
if self.enable_amazon == 1:
self.enable_sync = 1
setup_custom_fields()
else:
self.enable_sync = 0
@frappe.whitelist()
def get_products_details(self):
if self.enable_amazon == 1:
frappe.enqueue('erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_methods.get_products_details')
@frappe.whitelist()
def get_order_details(self):
if self.enable_amazon == 1:
after_date = dateutil.parser.parse(self.after_date).strftime("%Y-%m-%d")
frappe.enqueue('erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_methods.get_orders', after_date=after_date)
def schedule_get_order_details():
mws_settings = frappe.get_doc("Amazon MWS Settings")
if mws_settings.enable_sync and mws_settings.enable_amazon:
after_date = dateutil.parser.parse(mws_settings.after_date).strftime("%Y-%m-%d")
get_orders(after_date = after_date)
def setup_custom_fields():
custom_fields = {
"Item": [dict(fieldname='amazon_item_code', label='Amazon Item Code',
fieldtype='Data', insert_after='series', read_only=1, print_hide=1)],
"Sales Order": [dict(fieldname='amazon_order_id', label='Amazon Order ID',
fieldtype='Data', insert_after='title', read_only=1, print_hide=1)]
}
create_custom_fields(custom_fields)

View File

@ -1,8 +0,0 @@
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import unittest
class TestAmazonMWSSettings(unittest.TestCase):
pass

View File

@ -1,104 +0,0 @@
"""
Created on Tue Jun 26 15:42:07 2012
Borrowed from https://github.com/timotheus/ebaysdk-python
@author: pierre
"""
import re
import xml.etree.ElementTree as ET
class object_dict(dict):
"""object view of dict, you can
>>> a = object_dict()
>>> a.fish = 'fish'
>>> a['fish']
'fish'
>>> a['water'] = 'water'
>>> a.water
'water'
>>> a.test = {'value': 1}
>>> a.test2 = object_dict({'name': 'test2', 'value': 2})
>>> a.test, a.test2.name, a.test2.value
(1, 'test2', 2)
"""
def __init__(self, initd=None):
if initd is None:
initd = {}
dict.__init__(self, initd)
def __getattr__(self, item):
try:
d = self.__getitem__(item)
except KeyError:
return None
if isinstance(d, dict) and 'value' in d and len(d) == 1:
return d['value']
else:
return d
# if value is the only key in object, you can omit it
def __setstate__(self, item):
return False
def __setattr__(self, item, value):
self.__setitem__(item, value)
def getvalue(self, item, value=None):
return self.get(item, {}).get('value', value)
class xml2dict(object):
def __init__(self):
pass
def _parse_node(self, node):
node_tree = object_dict()
# Save attrs and text, hope there will not be a child with same name
if node.text:
node_tree.value = node.text
for (k, v) in node.attrib.items():
k, v = self._namespace_split(k, object_dict({'value':v}))
node_tree[k] = v
#Save childrens
for child in node.getchildren():
tag, tree = self._namespace_split(child.tag,
self._parse_node(child))
if tag not in node_tree: # the first time, so store it in dict
node_tree[tag] = tree
continue
old = node_tree[tag]
if not isinstance(old, list):
node_tree.pop(tag)
node_tree[tag] = [old] # multi times, so change old dict to a list
node_tree[tag].append(tree) # add the new one
return node_tree
def _namespace_split(self, tag, value):
"""
Split the tag '{http://cs.sfsu.edu/csc867/myscheduler}patients'
ns = http://cs.sfsu.edu/csc867/myscheduler
name = patients
"""
result = re.compile(r"\{(.*)\}(.*)").search(tag)
if result:
value.namespace, tag = result.groups()
return (tag, value)
def parse(self, file):
"""parse a xml file to a dict"""
f = open(file, 'r')
return self.fromstring(f.read())
def fromstring(self, s):
"""parse a string"""
t = ET.fromstring(s)
root_tag, root_tree = self._namespace_split(t.tag, self._parse_node(t))
return object_dict({root_tag: root_tree})

View File

@ -13,7 +13,7 @@ from frappe.utils import call_hook_method, cint, flt, get_url
class GoCardlessSettings(Document):
supported_currencies = ["EUR", "DKK", "GBP", "SEK"]
supported_currencies = ["EUR", "DKK", "GBP", "SEK", "AUD", "NZD", "CAD", "USD"]
def validate(self):
self.initialize_client()
@ -80,7 +80,7 @@ class GoCardlessSettings(Document):
def validate_transaction_currency(self, currency):
if currency not in self.supported_currencies:
frappe.throw(_("Please select another payment method. Stripe does not support transactions in currency '{0}'").format(currency))
frappe.throw(_("Please select another payment method. Go Cardless does not support transactions in currency '{0}'").format(currency))
def get_payment_url(self, **kwargs):
return get_url("./integrations/gocardless_checkout?{0}".format(urlencode(kwargs)))

View File

@ -29,17 +29,6 @@
"onboard": 0,
"type": "Link"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "Amazon MWS Settings",
"link_count": 0,
"link_to": "Amazon MWS Settings",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"hidden": 0,
"is_query_report": 0,

View File

@ -333,7 +333,6 @@ scheduler_events = {
"hourly": [
'erpnext.hr.doctype.daily_work_summary_group.daily_work_summary_group.trigger_emails',
"erpnext.accounts.doctype.subscription.subscription.process_all",
"erpnext.erpnext_integrations.doctype.amazon_mws_settings.amazon_mws_settings.schedule_get_order_details",
"erpnext.accounts.doctype.gl_entry.gl_entry.rename_gle_sle_docs",
"erpnext.erpnext_integrations.doctype.plaid_settings.plaid_settings.automatic_synchronization",
"erpnext.projects.doctype.project.project.hourly_reminder",

View File

@ -27,12 +27,13 @@
"fetch_from": "employee.user_id",
"fieldname": "user_id",
"fieldtype": "Data",
"in_list_view": 1,
"label": "ERPNext User ID",
"read_only": 1
}
],
"istable": 1,
"modified": "2019-06-06 10:41:20.313756",
"modified": "2022-02-13 19:44:21.302938",
"modified_by": "Administrator",
"module": "HR",
"name": "Employee Group Table",
@ -42,4 +43,4 @@
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}
}

View File

@ -546,7 +546,7 @@ class TestLeaveApplication(unittest.TestCase):
from erpnext.hr.utils import allocate_earned_leaves
i = 0
while(i<14):
allocate_earned_leaves()
allocate_earned_leaves(ignore_duplicates=True)
i += 1
self.assertEqual(get_leave_balance_on(employee.name, leave_type, nowdate()), 6)
@ -554,7 +554,7 @@ class TestLeaveApplication(unittest.TestCase):
frappe.db.set_value('Leave Type', leave_type, 'max_leaves_allowed', 0)
i = 0
while(i<6):
allocate_earned_leaves()
allocate_earned_leaves(ignore_duplicates=True)
i += 1
self.assertEqual(get_leave_balance_on(employee.name, leave_type, nowdate()), 9)

View File

@ -8,11 +8,10 @@ from math import ceil
import frappe
from frappe import _, bold
from frappe.model.document import Document
from frappe.utils import date_diff, flt, formatdate, get_datetime, getdate
from frappe.utils import date_diff, flt, formatdate, get_last_day, getdate
class LeavePolicyAssignment(Document):
def validate(self):
self.validate_policy_assignment_overlap()
self.set_dates()
@ -94,10 +93,12 @@ class LeavePolicyAssignment(Document):
new_leaves_allocated = 0
elif leave_type_details.get(leave_type).is_earned_leave == 1:
if self.assignment_based_on == "Leave Period":
new_leaves_allocated = self.get_leaves_for_passed_months(leave_type, new_leaves_allocated, leave_type_details, date_of_joining)
else:
if not self.assignment_based_on:
new_leaves_allocated = 0
else:
# get leaves for past months if assignment is based on Leave Period / Joining Date
new_leaves_allocated = self.get_leaves_for_passed_months(leave_type, new_leaves_allocated, leave_type_details, date_of_joining)
# Calculate leaves at pro-rata basis for employees joining after the beginning of the given leave period
elif getdate(date_of_joining) > getdate(self.effective_from):
remaining_period = ((date_diff(self.effective_to, date_of_joining) + 1) / (date_diff(self.effective_to, self.effective_from) + 1))
@ -108,21 +109,24 @@ class LeavePolicyAssignment(Document):
def get_leaves_for_passed_months(self, leave_type, new_leaves_allocated, leave_type_details, date_of_joining):
from erpnext.hr.utils import get_monthly_earned_leave
current_month = get_datetime().month
current_year = get_datetime().year
current_date = frappe.flags.current_date or getdate()
if current_date > getdate(self.effective_to):
current_date = getdate(self.effective_to)
from_date = frappe.db.get_value("Leave Period", self.leave_period, "from_date")
if getdate(date_of_joining) > getdate(from_date):
from_date = date_of_joining
from_date_month = get_datetime(from_date).month
from_date_year = get_datetime(from_date).year
from_date = getdate(self.effective_from)
if getdate(date_of_joining) > from_date:
from_date = getdate(date_of_joining)
months_passed = 0
if current_year == from_date_year and current_month > from_date_month:
months_passed = current_month - from_date_month
elif current_year > from_date_year:
months_passed = (12 - from_date_month) + current_month
based_on_doj = leave_type_details.get(leave_type).based_on_date_of_joining
if current_date.year == from_date.year and current_date.month >= from_date.month:
months_passed = current_date.month - from_date.month
months_passed = add_current_month_if_applicable(months_passed, date_of_joining, based_on_doj)
elif current_date.year > from_date.year:
months_passed = (12 - from_date.month) + current_date.month
months_passed = add_current_month_if_applicable(months_passed, date_of_joining, based_on_doj)
if months_passed > 0:
monthly_earned_leave = get_monthly_earned_leave(new_leaves_allocated,
@ -134,6 +138,23 @@ class LeavePolicyAssignment(Document):
return new_leaves_allocated
def add_current_month_if_applicable(months_passed, date_of_joining, based_on_doj):
date = getdate(frappe.flags.current_date) or getdate()
if based_on_doj:
# if leave type allocation is based on DOJ, and the date of assignment creation is same as DOJ,
# then the month should be considered
if date.day == date_of_joining.day:
months_passed += 1
else:
last_day_of_month = get_last_day(date)
# if its the last day of the month, then that month should be considered
if last_day_of_month == date:
months_passed += 1
return months_passed
@frappe.whitelist()
def create_assignment_for_multiple_employees(employees, data):
@ -168,7 +189,7 @@ def create_assignment_for_multiple_employees(employees, data):
def get_leave_type_details():
leave_type_details = frappe._dict()
leave_types = frappe.get_all("Leave Type",
fields=["name", "is_lwp", "is_earned_leave", "is_compensatory",
fields=["name", "is_lwp", "is_earned_leave", "is_compensatory", "based_on_date_of_joining",
"is_carry_forward", "expire_carry_forwarded_leaves_after_days", "earned_leave_frequency", "rounding"])
for d in leave_types:
leave_type_details.setdefault(d.name, d)

View File

@ -4,7 +4,7 @@
import unittest
import frappe
from frappe.utils import add_months, get_first_day, getdate
from frappe.utils import add_months, get_first_day, get_last_day, getdate
from erpnext.hr.doctype.leave_application.test_leave_application import (
get_employee,
@ -20,36 +20,31 @@ test_dependencies = ["Employee"]
class TestLeavePolicyAssignment(unittest.TestCase):
def setUp(self):
for doctype in ["Leave Period", "Leave Application", "Leave Allocation", "Leave Policy Assignment", "Leave Ledger Entry"]:
frappe.db.sql("delete from `tab{0}`".format(doctype)) #nosec
frappe.db.delete(doctype)
employee = get_employee()
self.original_doj = employee.date_of_joining
self.employee = employee
def test_grant_leaves(self):
leave_period = get_leave_period()
employee = get_employee()
# create the leave policy with leave type "_Test Leave Type", allocation = 10
# allocation = 10
leave_policy = create_leave_policy()
leave_policy.submit()
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([employee.name], frappe._dict(data))
leave_policy_assignment_doc = frappe.get_doc("Leave Policy Assignment", leave_policy_assignments[0])
leave_policy_assignment_doc.reload()
self.assertEqual(leave_policy_assignment_doc.leaves_allocated, 1)
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
self.assertEqual(frappe.db.get_value("Leave Policy Assignment", leave_policy_assignments[0], "leaves_allocated"), 1)
leave_allocation = frappe.get_list("Leave Allocation", filters={
"employee": employee.name,
"employee": self.employee.name,
"leave_policy":leave_policy.name,
"leave_policy_assignment": leave_policy_assignments[0],
"docstatus": 1})[0]
leave_alloc_doc = frappe.get_doc("Leave Allocation", leave_allocation)
self.assertEqual(leave_alloc_doc.new_leaves_allocated, 10)
@ -61,63 +56,46 @@ class TestLeavePolicyAssignment(unittest.TestCase):
def test_allow_to_grant_all_leave_after_cancellation_of_every_leave_allocation(self):
leave_period = get_leave_period()
employee = get_employee()
# create the leave policy with leave type "_Test Leave Type", allocation = 10
leave_policy = create_leave_policy()
leave_policy.submit()
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([employee.name], frappe._dict(data))
leave_policy_assignment_doc = frappe.get_doc("Leave Policy Assignment", leave_policy_assignments[0])
leave_policy_assignment_doc.reload()
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
# every leave is allocated no more leave can be granted now
self.assertEqual(leave_policy_assignment_doc.leaves_allocated, 1)
self.assertEqual(frappe.db.get_value("Leave Policy Assignment", leave_policy_assignments[0], "leaves_allocated"), 1)
leave_allocation = frappe.get_list("Leave Allocation", filters={
"employee": employee.name,
"employee": self.employee.name,
"leave_policy":leave_policy.name,
"leave_policy_assignment": leave_policy_assignments[0],
"docstatus": 1})[0]
leave_alloc_doc = frappe.get_doc("Leave Allocation", leave_allocation)
# User all allowed to grant leave when there is no allocation against assignment
leave_alloc_doc.cancel()
leave_alloc_doc.delete()
leave_policy_assignment_doc.reload()
# User are now allowed to grant leave
self.assertEqual(leave_policy_assignment_doc.leaves_allocated, 0)
self.assertEqual(frappe.db.get_value("Leave Policy Assignment", leave_policy_assignments[0], "leaves_allocated"), 0)
def test_earned_leave_allocation(self):
leave_period = create_leave_period("Test Earned Leave Period")
employee = get_employee()
leave_type = create_earned_leave_type("Test Earned Leave")
leave_policy = frappe.get_doc({
"doctype": "Leave Policy",
"title": "Test Leave Policy",
"leave_policy_details": [{"leave_type": leave_type.name, "annual_allocation": 6}]
}).insert()
}).submit()
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([employee.name], frappe._dict(data))
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
# leaves allocated should be 0 since it is an earned leave and allocation happens via scheduler based on set frequency
leaves_allocated = frappe.db.get_value("Leave Allocation", {
@ -125,11 +103,200 @@ class TestLeavePolicyAssignment(unittest.TestCase):
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 0)
def test_earned_leave_alloc_for_passed_months_based_on_leave_period(self):
leave_period, leave_policy = setup_leave_period_and_policy(get_first_day(add_months(getdate(), -1)))
# Case 1: assignment created one month after the leave period, should allocate 1 leave
frappe.flags.current_date = get_first_day(getdate())
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
leaves_allocated = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 1)
def test_earned_leave_alloc_for_passed_months_on_month_end_based_on_leave_period(self):
leave_period, leave_policy = setup_leave_period_and_policy(get_first_day(add_months(getdate(), -2)))
# Case 2: assignment created on the last day of the leave period's latter month
# should allocate 1 leave for current month even though the month has not ended
# since the daily job might have already executed
frappe.flags.current_date = get_last_day(getdate())
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
leaves_allocated = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
# if the daily job is not completed yet, there is another check present
# to ensure leave is not already allocated to avoid duplication
from erpnext.hr.utils import allocate_earned_leaves
allocate_earned_leaves()
leaves_allocated = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
def test_earned_leave_alloc_for_passed_months_with_cf_leaves_based_on_leave_period(self):
from erpnext.hr.doctype.leave_allocation.test_leave_allocation import create_leave_allocation
leave_period, leave_policy = setup_leave_period_and_policy(get_first_day(add_months(getdate(), -2)))
# initial leave allocation = 5
leave_allocation = create_leave_allocation(employee=self.employee.name, employee_name=self.employee.employee_name, leave_type="Test Earned Leave",
from_date=add_months(getdate(), -12), to_date=add_months(getdate(), -3), new_leaves_allocated=5, carry_forward=0)
leave_allocation.submit()
# Case 3: assignment created on the last day of the leave period's latter month with carry forwarding
frappe.flags.current_date = get_last_day(add_months(getdate(), -1))
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name,
"carry_forward": 1
}
# carry forwarded leaves = 5, 3 leaves allocated for passed months
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
details = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, ["total_leaves_allocated", "new_leaves_allocated", "unused_leaves", "name"], as_dict=True)
self.assertEqual(details.new_leaves_allocated, 2)
self.assertEqual(details.unused_leaves, 5)
self.assertEqual(details.total_leaves_allocated, 7)
# if the daily job is not completed yet, there is another check present
# to ensure leave is not already allocated to avoid duplication
from erpnext.hr.utils import is_earned_leave_already_allocated
frappe.flags.current_date = get_last_day(getdate())
allocation = frappe.get_doc("Leave Allocation", details.name)
# 1 leave is still pending to be allocated, irrespective of carry forwarded leaves
self.assertFalse(is_earned_leave_already_allocated(allocation, leave_policy.leave_policy_details[0].annual_allocation))
def test_earned_leave_alloc_for_passed_months_based_on_joining_date(self):
# tests leave alloc for earned leaves for assignment based on joining date in policy assignment
leave_type = create_earned_leave_type("Test Earned Leave")
leave_policy = frappe.get_doc({
"doctype": "Leave Policy",
"title": "Test Leave Policy",
"leave_policy_details": [{"leave_type": leave_type.name, "annual_allocation": 12}]
}).submit()
# joining date set to 2 months back
self.employee.date_of_joining = get_first_day(add_months(getdate(), -2))
self.employee.save()
# assignment created on the last day of the current month
frappe.flags.current_date = get_last_day(getdate())
data = {
"assignment_based_on": "Joining Date",
"leave_policy": leave_policy.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
leaves_allocated = frappe.db.get_value("Leave Allocation", {"leave_policy_assignment": leave_policy_assignments[0]},
"total_leaves_allocated")
effective_from = frappe.db.get_value("Leave Policy Assignment", leave_policy_assignments[0], "effective_from")
self.assertEqual(effective_from, self.employee.date_of_joining)
self.assertEqual(leaves_allocated, 3)
# to ensure leave is not already allocated to avoid duplication
from erpnext.hr.utils import allocate_earned_leaves
frappe.flags.current_date = get_last_day(getdate())
allocate_earned_leaves()
leaves_allocated = frappe.db.get_value("Leave Allocation", {"leave_policy_assignment": leave_policy_assignments[0]},
"total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
def test_grant_leaves_on_doj_for_earned_leaves_based_on_leave_period(self):
# tests leave alloc based on leave period for earned leaves with "based on doj" configuration in leave type
leave_period, leave_policy = setup_leave_period_and_policy(get_first_day(add_months(getdate(), -2)), based_on_doj=True)
# joining date set to 2 months back
self.employee.date_of_joining = get_first_day(add_months(getdate(), -2))
self.employee.save()
# assignment created on the same day of the current month, should allocate leaves including the current month
frappe.flags.current_date = get_first_day(getdate())
data = {
"assignment_based_on": "Leave Period",
"leave_policy": leave_policy.name,
"leave_period": leave_period.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
leaves_allocated = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
# if the daily job is not completed yet, there is another check present
# to ensure leave is not already allocated to avoid duplication
from erpnext.hr.utils import allocate_earned_leaves
frappe.flags.current_date = get_first_day(getdate())
allocate_earned_leaves()
leaves_allocated = frappe.db.get_value("Leave Allocation", {
"leave_policy_assignment": leave_policy_assignments[0]
}, "total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
def test_grant_leaves_on_doj_for_earned_leaves_based_on_joining_date(self):
# tests leave alloc based on joining date for earned leaves with "based on doj" configuration in leave type
leave_type = create_earned_leave_type("Test Earned Leave", based_on_doj=True)
leave_policy = frappe.get_doc({
"doctype": "Leave Policy",
"title": "Test Leave Policy",
"leave_policy_details": [{"leave_type": leave_type.name, "annual_allocation": 12}]
}).submit()
# joining date set to 2 months back
# leave should be allocated for current month too since this day is same as the joining day
self.employee.date_of_joining = get_first_day(add_months(getdate(), -2))
self.employee.save()
# assignment created on the first day of the current month
frappe.flags.current_date = get_first_day(getdate())
data = {
"assignment_based_on": "Joining Date",
"leave_policy": leave_policy.name
}
leave_policy_assignments = create_assignment_for_multiple_employees([self.employee.name], frappe._dict(data))
leaves_allocated = frappe.db.get_value("Leave Allocation", {"leave_policy_assignment": leave_policy_assignments[0]},
"total_leaves_allocated")
effective_from = frappe.db.get_value("Leave Policy Assignment", leave_policy_assignments[0], "effective_from")
self.assertEqual(effective_from, self.employee.date_of_joining)
self.assertEqual(leaves_allocated, 3)
# to ensure leave is not already allocated to avoid duplication
from erpnext.hr.utils import allocate_earned_leaves
frappe.flags.current_date = get_first_day(getdate())
allocate_earned_leaves()
leaves_allocated = frappe.db.get_value("Leave Allocation", {"leave_policy_assignment": leave_policy_assignments[0]},
"total_leaves_allocated")
self.assertEqual(leaves_allocated, 3)
def tearDown(self):
frappe.db.rollback()
frappe.db.set_value("Employee", self.employee.name, "date_of_joining", self.original_doj)
frappe.flags.current_date = None
def create_earned_leave_type(leave_type):
def create_earned_leave_type(leave_type, based_on_doj=False):
frappe.delete_doc_if_exists("Leave Type", leave_type, force=1)
return frappe.get_doc(dict(
@ -138,13 +305,15 @@ def create_earned_leave_type(leave_type):
is_earned_leave=1,
earned_leave_frequency="Monthly",
rounding=0.5,
max_leaves_allowed=6
is_carry_forward=1,
based_on_date_of_joining=based_on_doj
)).insert()
def create_leave_period(name):
def create_leave_period(name, start_date=None):
frappe.delete_doc_if_exists("Leave Period", name, force=1)
start_date = get_first_day(getdate())
if not start_date:
start_date = get_first_day(getdate())
return frappe.get_doc(dict(
name=name,
@ -153,4 +322,17 @@ def create_leave_period(name):
to_date=add_months(start_date, 12),
company="_Test Company",
is_active=1
)).insert()
)).insert()
def setup_leave_period_and_policy(start_date, based_on_doj=False):
leave_type = create_earned_leave_type("Test Earned Leave", based_on_doj)
leave_period = create_leave_period("Test Earned Leave Period",
start_date=start_date)
leave_policy = frappe.get_doc({
"doctype": "Leave Policy",
"title": "Test Leave Policy",
"leave_policy_details": [{"leave_type": leave_type.name, "annual_allocation": 12}]
}).insert()
return leave_period, leave_policy

View File

@ -237,7 +237,7 @@ def generate_leave_encashment():
create_leave_encashment(leave_allocation=leave_allocation)
def allocate_earned_leaves():
def allocate_earned_leaves(ignore_duplicates=False):
'''Allocate earned leaves to Employees'''
e_leave_types = get_earned_leaves()
today = getdate()
@ -261,13 +261,13 @@ def allocate_earned_leaves():
from_date=allocation.from_date
if e_leave_type.based_on_date_of_joining_date:
if e_leave_type.based_on_date_of_joining:
from_date = frappe.db.get_value("Employee", allocation.employee, "date_of_joining")
if check_effective_date(from_date, today, e_leave_type.earned_leave_frequency, e_leave_type.based_on_date_of_joining_date):
update_previous_leave_allocation(allocation, annual_allocation, e_leave_type)
if check_effective_date(from_date, today, e_leave_type.earned_leave_frequency, e_leave_type.based_on_date_of_joining):
update_previous_leave_allocation(allocation, annual_allocation, e_leave_type, ignore_duplicates)
def update_previous_leave_allocation(allocation, annual_allocation, e_leave_type):
def update_previous_leave_allocation(allocation, annual_allocation, e_leave_type, ignore_duplicates=False):
earned_leaves = get_monthly_earned_leave(annual_allocation, e_leave_type.earned_leave_frequency, e_leave_type.rounding)
allocation = frappe.get_doc('Leave Allocation', allocation.name)
@ -277,9 +277,12 @@ def update_previous_leave_allocation(allocation, annual_allocation, e_leave_type
new_allocation = e_leave_type.max_leaves_allowed
if new_allocation != allocation.total_leaves_allocated:
allocation.db_set("total_leaves_allocated", new_allocation, update_modified=False)
today_date = today()
create_additional_leave_ledger_entry(allocation, earned_leaves, today_date)
if ignore_duplicates or not is_earned_leave_already_allocated(allocation, annual_allocation):
allocation.db_set("total_leaves_allocated", new_allocation, update_modified=False)
create_additional_leave_ledger_entry(allocation, earned_leaves, today_date)
def get_monthly_earned_leave(annual_leaves, frequency, rounding):
earned_leaves = 0.0
@ -297,6 +300,28 @@ def get_monthly_earned_leave(annual_leaves, frequency, rounding):
return earned_leaves
def is_earned_leave_already_allocated(allocation, annual_allocation):
from erpnext.hr.doctype.leave_policy_assignment.leave_policy_assignment import (
get_leave_type_details,
)
leave_type_details = get_leave_type_details()
date_of_joining = frappe.db.get_value("Employee", allocation.employee, "date_of_joining")
assignment = frappe.get_doc("Leave Policy Assignment", allocation.leave_policy_assignment)
leaves_for_passed_months = assignment.get_leaves_for_passed_months(allocation.leave_type,
annual_allocation, leave_type_details, date_of_joining)
# exclude carry-forwarded leaves while checking for leave allocation for passed months
num_allocations = allocation.total_leaves_allocated
if allocation.unused_leaves:
num_allocations -= allocation.unused_leaves
if num_allocations >= leaves_for_passed_months:
return True
return False
def get_leave_allocations(date, leave_type):
return frappe.db.sql("""select name, employee, from_date, to_date, leave_policy_assignment, leave_policy
from `tabLeave Allocation`
@ -318,7 +343,7 @@ def create_additional_leave_ledger_entry(allocation, leaves, date):
allocation.unused_leaves = 0
allocation.create_leave_ledger_entry()
def check_effective_date(from_date, to_date, frequency, based_on_date_of_joining_date):
def check_effective_date(from_date, to_date, frequency, based_on_date_of_joining):
import calendar
from dateutil import relativedelta
@ -329,7 +354,7 @@ def check_effective_date(from_date, to_date, frequency, based_on_date_of_joining
#last day of month
last_day = calendar.monthrange(to_date.year, to_date.month)[1]
if (from_date.day == to_date.day and based_on_date_of_joining_date) or (not based_on_date_of_joining_date and to_date.day == last_day):
if (from_date.day == to_date.day and based_on_date_of_joining) or (not based_on_date_of_joining and to_date.day == last_day):
if frequency == "Monthly":
return True
elif frequency == "Quarterly" and rd.months % 3:

View File

@ -345,7 +345,7 @@ class LoanRepayment(AccountsController):
gle_map.append(
self.get_gl_dict({
"account": loan_details.penalty_income_account,
"against": payment_account,
"against": loan_details.loan_account,
"credit": self.total_penalty_paid,
"credit_in_account_currency": self.total_penalty_paid,
"against_voucher_type": "Loan",
@ -367,7 +367,9 @@ class LoanRepayment(AccountsController):
"against_voucher": self.against_loan,
"remarks": remarks,
"cost_center": self.cost_center,
"posting_date": getdate(self.posting_date)
"posting_date": getdate(self.posting_date),
"party_type": loan_details.applicant_type if self.repay_from_salary else '',
"party": loan_details.applicant if self.repay_from_salary else ''
})
)

View File

@ -28,9 +28,24 @@ from erpnext.setup.doctype.item_group.item_group import get_item_group_defaults
class ProductionPlan(Document):
def validate(self):
self.set_pending_qty_in_row_without_reference()
self.calculate_total_planned_qty()
self.set_status()
def set_pending_qty_in_row_without_reference(self):
"Set Pending Qty in independent rows (not from SO or MR)."
if self.docstatus > 0: # set only to initialise value before submit
return
for item in self.po_items:
if not item.get("sales_order") or not item.get("material_request"):
item.pending_qty = item.planned_qty
def calculate_total_planned_qty(self):
self.total_planned_qty = 0
for d in self.po_items:
self.total_planned_qty += flt(d.planned_qty)
def validate_data(self):
for d in self.get('po_items'):
if not d.bom_no:
@ -263,11 +278,6 @@ class ProductionPlan(Document):
'qty': so_detail['qty']
})
def calculate_total_planned_qty(self):
self.total_planned_qty = 0
for d in self.po_items:
self.total_planned_qty += flt(d.planned_qty)
def calculate_total_produced_qty(self):
self.total_produced_qty = 0
for d in self.po_items:
@ -275,10 +285,11 @@ class ProductionPlan(Document):
self.db_set("total_produced_qty", self.total_produced_qty, update_modified=False)
def update_produced_qty(self, produced_qty, production_plan_item):
def update_produced_pending_qty(self, produced_qty, production_plan_item):
for data in self.po_items:
if data.name == production_plan_item:
data.produced_qty = produced_qty
data.pending_qty = flt(data.planned_qty - produced_qty)
data.db_update()
self.calculate_total_produced_qty()
@ -308,7 +319,7 @@ class ProductionPlan(Document):
if self.total_produced_qty > 0:
self.status = "In Process"
if self.check_have_work_orders_completed():
if self.all_items_completed():
self.status = "Completed"
if self.status != 'Completed':
@ -341,6 +352,7 @@ class ProductionPlan(Document):
def get_production_items(self):
item_dict = {}
for d in self.po_items:
item_details = {
"production_item" : d.item_code,
@ -357,12 +369,12 @@ class ProductionPlan(Document):
"production_plan" : self.name,
"production_plan_item" : d.name,
"product_bundle_item" : d.product_bundle_item,
"planned_start_date" : d.planned_start_date
"planned_start_date" : d.planned_start_date,
"project" : self.project
}
item_details.update({
"project": self.project or frappe.db.get_value("Sales Order", d.sales_order, "project")
})
if not item_details['project'] and d.sales_order:
item_details['project'] = frappe.get_cached_value("Sales Order", d.sales_order, "project")
if self.get_items_from == "Material Request":
item_details.update({
@ -380,39 +392,59 @@ class ProductionPlan(Document):
@frappe.whitelist()
def make_work_order(self):
from erpnext.manufacturing.doctype.work_order.work_order import get_default_warehouse
wo_list, po_list = [], []
subcontracted_po = {}
default_warehouses = get_default_warehouse()
self.validate_data()
self.make_work_order_for_finished_goods(wo_list)
self.make_work_order_for_subassembly_items(wo_list, subcontracted_po)
self.make_work_order_for_finished_goods(wo_list, default_warehouses)
self.make_work_order_for_subassembly_items(wo_list, subcontracted_po, default_warehouses)
self.make_subcontracted_purchase_order(subcontracted_po, po_list)
self.show_list_created_message('Work Order', wo_list)
self.show_list_created_message('Purchase Order', po_list)
def make_work_order_for_finished_goods(self, wo_list):
def make_work_order_for_finished_goods(self, wo_list, default_warehouses):
items_data = self.get_production_items()
for key, item in items_data.items():
if self.sub_assembly_items:
item['use_multi_level_bom'] = 0
set_default_warehouses(item, default_warehouses)
work_order = self.create_work_order(item)
if work_order:
wo_list.append(work_order)
def make_work_order_for_subassembly_items(self, wo_list, subcontracted_po):
def make_work_order_for_subassembly_items(self, wo_list, subcontracted_po, default_warehouses):
for row in self.sub_assembly_items:
if row.type_of_manufacturing == 'Subcontract':
subcontracted_po.setdefault(row.supplier, []).append(row)
continue
args = {}
self.prepare_args_for_sub_assembly_items(row, args)
work_order = self.create_work_order(args)
work_order_data = {
'wip_warehouse': default_warehouses.get('wip_warehouse'),
'fg_warehouse': default_warehouses.get('fg_warehouse')
}
self.prepare_data_for_sub_assembly_items(row, work_order_data)
work_order = self.create_work_order(work_order_data)
if work_order:
wo_list.append(work_order)
def prepare_data_for_sub_assembly_items(self, row, wo_data):
for field in ["production_item", "item_name", "qty", "fg_warehouse",
"description", "bom_no", "stock_uom", "bom_level",
"production_plan_item", "schedule_date"]:
if row.get(field):
wo_data[field] = row.get(field)
wo_data.update({
"use_multi_level_bom": 0,
"production_plan": self.name,
"production_plan_sub_assembly_item": row.name
})
def make_subcontracted_purchase_order(self, subcontracted_po, purchase_orders):
if not subcontracted_po:
return
@ -423,7 +455,7 @@ class ProductionPlan(Document):
po.schedule_date = getdate(po_list[0].schedule_date) if po_list[0].schedule_date else nowdate()
po.is_subcontracted = 'Yes'
for row in po_list:
args = {
po_data = {
'item_code': row.production_item,
'warehouse': row.fg_warehouse,
'production_plan_sub_assembly_item': row.name,
@ -433,9 +465,9 @@ class ProductionPlan(Document):
for field in ['schedule_date', 'qty', 'uom', 'stock_uom', 'item_name',
'description', 'production_plan_item']:
args[field] = row.get(field)
po_data[field] = row.get(field)
po.append('items', args)
po.append('items', po_data)
po.set_missing_values()
po.flags.ignore_mandatory = True
@ -452,24 +484,9 @@ class ProductionPlan(Document):
doc_list = [get_link_to_form(doctype, p) for p in doc_list]
msgprint(_("{0} created").format(comma_and(doc_list)))
def prepare_args_for_sub_assembly_items(self, row, args):
for field in ["production_item", "item_name", "qty", "fg_warehouse",
"description", "bom_no", "stock_uom", "bom_level",
"production_plan_item", "schedule_date"]:
args[field] = row.get(field)
args.update({
"use_multi_level_bom": 0,
"production_plan": self.name,
"production_plan_sub_assembly_item": row.name
})
def create_work_order(self, item):
from erpnext.manufacturing.doctype.work_order.work_order import (
OverProductionError,
get_default_warehouse,
)
warehouse = get_default_warehouse()
from erpnext.manufacturing.doctype.work_order.work_order import OverProductionError
wo = frappe.new_doc("Work Order")
wo.update(item)
wo.planned_start_date = item.get('planned_start_date') or item.get('schedule_date')
@ -478,11 +495,11 @@ class ProductionPlan(Document):
wo.fg_warehouse = item.get("warehouse")
wo.set_work_order_operations()
wo.set_required_items()
if not wo.fg_warehouse:
wo.fg_warehouse = warehouse.get('fg_warehouse')
try:
wo.flags.ignore_mandatory = True
wo.flags.ignore_validate = True
wo.insert()
return wo.name
except OverProductionError:
@ -574,21 +591,32 @@ class ProductionPlan(Document):
self.append("sub_assembly_items", data)
def check_have_work_orders_completed(self):
wo_status = frappe.db.get_list(
def all_items_completed(self):
all_items_produced = all(flt(d.planned_qty) - flt(d.produced_qty) < 0.000001
for d in self.po_items)
if not all_items_produced:
return False
wo_status = frappe.get_all(
"Work Order",
filters={"production_plan": self.name},
filters={
"production_plan": self.name,
"status": ("not in", ["Closed", "Stopped"]),
"docstatus": ("<", 2),
},
fields="status",
pluck="status"
pluck="status",
)
return all(s == "Completed" for s in wo_status)
all_work_orders_completed = all(s == "Completed" for s in wo_status)
return all_work_orders_completed
@frappe.whitelist()
def download_raw_materials(doc, warehouses=None):
if isinstance(doc, str):
doc = frappe._dict(json.loads(doc))
item_list = [['Item Code', 'Description', 'Stock UOM', 'Warehouse', 'Required Qty as per BOM',
item_list = [['Item Code', 'Item Name', 'Description',
'Stock UOM', 'Warehouse', 'Required Qty as per BOM',
'Projected Qty', 'Available Qty In Hand', 'Ordered Qty', 'Planned Qty',
'Reserved Qty for Production', 'Safety Stock', 'Required Qty']]
@ -597,7 +625,8 @@ def download_raw_materials(doc, warehouses=None):
items = get_items_for_material_requests(doc, warehouses=warehouses, get_parent_warehouse_data=True)
for d in items:
item_list.append([d.get('item_code'), d.get('description'), d.get('stock_uom'), d.get('warehouse'),
item_list.append([d.get('item_code'), d.get('item_name'),
d.get('description'), d.get('stock_uom'), d.get('warehouse'),
d.get('required_bom_qty'), d.get('projected_qty'), d.get('actual_qty'), d.get('ordered_qty'),
d.get('planned_qty'), d.get('reserved_qty_for_production'), d.get('safety_stock'), d.get('quantity')])
@ -1023,3 +1052,8 @@ def get_sub_assembly_items(bom_no, bom_data, to_produce_qty, indent=0):
if d.value:
get_sub_assembly_items(d.value, bom_data, stock_qty, indent=indent+1)
def set_default_warehouses(row, default_warehouses):
for field in ['wip_warehouse', 'fg_warehouse']:
if not row.get(field):
row[field] = default_warehouses.get(field)

View File

@ -11,6 +11,7 @@ from erpnext.manufacturing.doctype.production_plan.production_plan import (
)
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
from erpnext.stock.doctype.item.test_item import create_item
from erpnext.stock.doctype.stock_entry.test_stock_entry import make_stock_entry
from erpnext.stock.doctype.stock_reconciliation.test_stock_reconciliation import (
create_stock_reconciliation,
)
@ -36,15 +37,21 @@ class TestProductionPlan(ERPNextTestCase):
if not frappe.db.get_value('BOM', {'item': item}):
make_bom(item = item, raw_materials = raw_materials)
def test_production_plan(self):
def test_production_plan_mr_creation(self):
"Test if MRs are created for unavailable raw materials."
pln = create_production_plan(item_code='Test Production Item 1')
self.assertTrue(len(pln.mr_items), 2)
pln.make_material_request()
pln = frappe.get_doc('Production Plan', pln.name)
pln.make_material_request()
pln.reload()
self.assertTrue(pln.status, 'Material Requested')
material_requests = frappe.get_all('Material Request Item', fields = ['distinct parent'],
filters = {'production_plan': pln.name}, as_list=1)
material_requests = frappe.get_all(
'Material Request Item',
fields = ['distinct parent'],
filters = {'production_plan': pln.name},
as_list=1
)
self.assertTrue(len(material_requests), 2)
@ -66,27 +73,42 @@ class TestProductionPlan(ERPNextTestCase):
pln.cancel()
def test_production_plan_start_date(self):
"Test if Work Order has same Planned Start Date as Prod Plan."
planned_date = add_to_date(date=None, days=3)
plan = create_production_plan(item_code='Test Production Item 1', planned_start_date=planned_date)
plan = create_production_plan(
item_code='Test Production Item 1',
planned_start_date=planned_date
)
plan.make_work_order()
work_orders = frappe.get_all('Work Order', fields = ['name', 'planned_start_date'],
filters = {'production_plan': plan.name})
work_orders = frappe.get_all(
'Work Order',
fields = ['name', 'planned_start_date'],
filters = {'production_plan': plan.name}
)
self.assertEqual(work_orders[0].planned_start_date, planned_date)
for wo in work_orders:
frappe.delete_doc('Work Order', wo.name)
frappe.get_doc('Production Plan', plan.name).cancel()
plan.reload()
plan.cancel()
def test_production_plan_for_existing_ordered_qty(self):
"""
- Enable 'ignore_existing_ordered_qty'.
- Test if MR Planning table pulls Raw Material Qty even if it is in stock.
"""
sr1 = create_stock_reconciliation(item_code="Raw Material Item 1",
target="_Test Warehouse - _TC", qty=1, rate=110)
sr2 = create_stock_reconciliation(item_code="Raw Material Item 2",
target="_Test Warehouse - _TC", qty=1, rate=120)
pln = create_production_plan(item_code='Test Production Item 1', ignore_existing_ordered_qty=0)
pln = create_production_plan(
item_code='Test Production Item 1',
ignore_existing_ordered_qty=1
)
self.assertTrue(len(pln.mr_items), 1)
self.assertTrue(flt(pln.mr_items[0].quantity), 1.0)
@ -95,23 +117,39 @@ class TestProductionPlan(ERPNextTestCase):
pln.cancel()
def test_production_plan_with_non_stock_item(self):
pln = create_production_plan(item_code='Test Production Item 1', include_non_stock_items=0)
"Test if MR Planning table includes Non Stock RM."
pln = create_production_plan(
item_code='Test Production Item 1',
include_non_stock_items=1
)
self.assertTrue(len(pln.mr_items), 3)
pln.cancel()
def test_production_plan_without_multi_level(self):
pln = create_production_plan(item_code='Test Production Item 1', use_multi_level_bom=0)
"Test MR Planning table for non exploded BOM."
pln = create_production_plan(
item_code='Test Production Item 1',
use_multi_level_bom=0
)
self.assertTrue(len(pln.mr_items), 2)
pln.cancel()
def test_production_plan_without_multi_level_for_existing_ordered_qty(self):
"""
- Disable 'ignore_existing_ordered_qty'.
- Test if MR Planning table avoids pulling Raw Material Qty as it is in stock for
non exploded BOM.
"""
sr1 = create_stock_reconciliation(item_code="Raw Material Item 1",
target="_Test Warehouse - _TC", qty=1, rate=130)
sr2 = create_stock_reconciliation(item_code="Subassembly Item 1",
target="_Test Warehouse - _TC", qty=1, rate=140)
pln = create_production_plan(item_code='Test Production Item 1',
use_multi_level_bom=0, ignore_existing_ordered_qty=0)
pln = create_production_plan(
item_code='Test Production Item 1',
use_multi_level_bom=0,
ignore_existing_ordered_qty=0
)
self.assertTrue(len(pln.mr_items), 0)
sr1.cancel()
@ -119,6 +157,7 @@ class TestProductionPlan(ERPNextTestCase):
pln.cancel()
def test_production_plan_sales_orders(self):
"Test if previously fulfilled SO (with WO) is pulled into Prod Plan."
item = 'Test Production Item 1'
so = make_sales_order(item_code=item, qty=1)
sales_order = so.name
@ -166,24 +205,25 @@ class TestProductionPlan(ERPNextTestCase):
self.assertEqual(sales_orders, [])
def test_production_plan_combine_items(self):
"Test combining FG items in Production Plan."
item = 'Test Production Item 1'
so = make_sales_order(item_code=item, qty=1)
so1 = make_sales_order(item_code=item, qty=1)
pln = frappe.new_doc('Production Plan')
pln.company = so.company
pln.company = so1.company
pln.get_items_from = 'Sales Order'
pln.append('sales_orders', {
'sales_order': so.name,
'sales_order_date': so.transaction_date,
'customer': so.customer,
'grand_total': so.grand_total
'sales_order': so1.name,
'sales_order_date': so1.transaction_date,
'customer': so1.customer,
'grand_total': so1.grand_total
})
so = make_sales_order(item_code=item, qty=2)
so2 = make_sales_order(item_code=item, qty=2)
pln.append('sales_orders', {
'sales_order': so.name,
'sales_order_date': so.transaction_date,
'customer': so.customer,
'grand_total': so.grand_total
'sales_order': so2.name,
'sales_order_date': so2.transaction_date,
'customer': so2.customer,
'grand_total': so2.grand_total
})
pln.combine_items = 1
pln.get_items()
@ -214,28 +254,37 @@ class TestProductionPlan(ERPNextTestCase):
so_wo_qty = frappe.db.get_value('Sales Order Item', so_item, 'work_order_qty')
self.assertEqual(so_wo_qty, 0.0)
latest_plan = frappe.get_doc('Production Plan', pln.name)
latest_plan.cancel()
pln.reload()
pln.cancel()
def test_pp_to_mr_customer_provided(self):
#Material Request from Production Plan for Customer Provided
" Test Material Request from Production Plan for Customer Provided Item."
create_item('CUST-0987', is_customer_provided_item = 1, customer = '_Test Customer', is_purchase_item = 0)
create_item('Production Item CUST')
for item, raw_materials in {'Production Item CUST': ['Raw Material Item 1', 'CUST-0987']}.items():
if not frappe.db.get_value('BOM', {'item': item}):
make_bom(item = item, raw_materials = raw_materials)
production_plan = create_production_plan(item_code = 'Production Item CUST')
production_plan.make_material_request()
material_request = frappe.db.get_value('Material Request Item', {'production_plan': production_plan.name, 'item_code': 'CUST-0987'}, 'parent')
material_request = frappe.db.get_value(
'Material Request Item',
{'production_plan': production_plan.name, 'item_code': 'CUST-0987'},
'parent'
)
mr = frappe.get_doc('Material Request', material_request)
self.assertTrue(mr.material_request_type, 'Customer Provided')
self.assertTrue(mr.customer, '_Test Customer')
def test_production_plan_with_multi_level_bom(self):
#|Item Code | Qty |
#|Test BOM 1 | 1 |
#| Test BOM 2 | 2 |
#| Test BOM 3 | 3 |
"""
Item Code | Qty |
|Test BOM 1 | 1 |
|Test BOM 2 | 2 |
|Test BOM 3 | 3 |
"""
for item_code in ["Test BOM 1", "Test BOM 2", "Test BOM 3", "Test RM BOM 1"]:
create_item(item_code, is_stock_item=1)
@ -264,15 +313,18 @@ class TestProductionPlan(ERPNextTestCase):
pln.make_work_order()
#last level sub-assembly work order produce qty
to_produce_qty = frappe.db.get_value("Work Order",
{"production_plan": pln.name, "production_item": "Test BOM 3"}, "qty")
to_produce_qty = frappe.db.get_value(
"Work Order",
{"production_plan": pln.name, "production_item": "Test BOM 3"},
"qty"
)
self.assertEqual(to_produce_qty, 18.0)
pln.cancel()
frappe.delete_doc("Production Plan", pln.name)
def test_get_warehouse_list_group(self):
"""Check if required warehouses are returned"""
"Check if required child warehouses are returned."
warehouse_json = '[{\"warehouse\":\"_Test Warehouse Group - _TC\"}]'
warehouses = set(get_warehouse_list(warehouse_json))
@ -284,6 +336,7 @@ class TestProductionPlan(ERPNextTestCase):
msg=f"Following warehouses were expected {', '.join(missing_warehouse)}")
def test_get_warehouse_list_single(self):
"Check if same warehouse is returned in absence of child warehouses."
warehouse_json = '[{\"warehouse\":\"_Test Scrap Warehouse - _TC\"}]'
warehouses = set(get_warehouse_list(warehouse_json))
@ -292,6 +345,7 @@ class TestProductionPlan(ERPNextTestCase):
self.assertEqual(warehouses, expected_warehouses)
def test_get_sales_order_with_variant(self):
"Check if Template BOM is fetched in absence of Variant BOM."
rm_item = create_item('PIV_RM', valuation_rate = 100)
if not frappe.db.exists('Item', {"item_code": 'PIV'}):
item = create_item('PIV', valuation_rate = 100)
@ -348,16 +402,13 @@ class TestProductionPlan(ERPNextTestCase):
frappe.db.rollback()
def test_subassmebly_sorting(self):
""" Test subassembly sorting in case of multiple items with nested BOMs"""
"Test subassembly sorting in case of multiple items with nested BOMs."
from erpnext.manufacturing.doctype.bom.test_bom import create_nested_bom
prefix = "_TestLevel_"
boms = {
"Assembly": {
"SubAssembly1": {"ChildPart1": {}, "ChildPart2": {},},
"SubAssembly2": {"ChildPart3": {}},
"SubAssembly3": {"SubSubAssy1": {"ChildPart4": {}}},
"ChildPart5": {},
"ChildPart6": {},
"SubAssembly4": {"SubSubAssy2": {"ChildPart7": {}}},
},
@ -386,6 +437,7 @@ class TestProductionPlan(ERPNextTestCase):
self.assertIn("SuperSecret", plan.sub_assembly_items[0].production_item)
def test_multiple_work_order_for_production_plan_item(self):
"Test producing Prod Plan (making WO) in parts."
def create_work_order(item, pln, qty):
# Get Production Items
items_data = pln.get_production_items()
@ -441,7 +493,121 @@ class TestProductionPlan(ERPNextTestCase):
pln.reload()
self.assertEqual(pln.po_items[0].ordered_qty, 0)
def test_production_plan_pending_qty_with_sales_order(self):
"""
Test Prod Plan impact via: SO -> Prod Plan -> WO -> SE -> SE (cancel)
"""
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
from erpnext.manufacturing.doctype.work_order.work_order import (
make_stock_entry as make_se_from_wo,
)
make_stock_entry(item_code="Raw Material Item 1",
target="Work In Progress - _TC",
qty=2, basic_rate=100
)
make_stock_entry(item_code="Raw Material Item 2",
target="Work In Progress - _TC",
qty=2, basic_rate=100
)
item = 'Test Production Item 1'
so = make_sales_order(item_code=item, qty=1)
pln = create_production_plan(
company=so.company,
get_items_from="Sales Order",
sales_order=so,
skip_getting_mr_items=True
)
self.assertEqual(pln.po_items[0].pending_qty, 1)
wo = make_wo_order_test_record(
item_code=item, qty=1,
company=so.company,
wip_warehouse='Work In Progress - _TC',
fg_warehouse='Finished Goods - _TC',
skip_transfer=1,
do_not_submit=True
)
wo.production_plan = pln.name
wo.production_plan_item = pln.po_items[0].name
wo.submit()
se = frappe.get_doc(make_se_from_wo(wo.name, "Manufacture", 1))
se.submit()
pln.reload()
self.assertEqual(pln.po_items[0].pending_qty, 0)
se.cancel()
pln.reload()
self.assertEqual(pln.po_items[0].pending_qty, 1)
def test_production_plan_pending_qty_independent_items(self):
"Test Prod Plan impact if items are added independently (no from SO or MR)."
from erpnext.manufacturing.doctype.work_order.test_work_order import make_wo_order_test_record
from erpnext.manufacturing.doctype.work_order.work_order import (
make_stock_entry as make_se_from_wo,
)
make_stock_entry(item_code="Raw Material Item 1",
target="Work In Progress - _TC",
qty=2, basic_rate=100
)
make_stock_entry(item_code="Raw Material Item 2",
target="Work In Progress - _TC",
qty=2, basic_rate=100
)
pln = create_production_plan(
item_code='Test Production Item 1',
skip_getting_mr_items=True
)
self.assertEqual(pln.po_items[0].pending_qty, 1)
wo = make_wo_order_test_record(
item_code='Test Production Item 1', qty=1,
company=pln.company,
wip_warehouse='Work In Progress - _TC',
fg_warehouse='Finished Goods - _TC',
skip_transfer=1,
do_not_submit=True
)
wo.production_plan = pln.name
wo.production_plan_item = pln.po_items[0].name
wo.submit()
se = frappe.get_doc(make_se_from_wo(wo.name, "Manufacture", 1))
se.submit()
pln.reload()
self.assertEqual(pln.po_items[0].pending_qty, 0)
se.cancel()
pln.reload()
self.assertEqual(pln.po_items[0].pending_qty, 1)
def test_qty_based_status(self):
pp = frappe.new_doc("Production Plan")
pp.po_items = [
frappe._dict(planned_qty=5, produce_qty=4)
]
self.assertFalse(pp.all_items_completed())
pp.po_items = [
frappe._dict(planned_qty=5, produce_qty=10),
frappe._dict(planned_qty=5, produce_qty=4)
]
self.assertFalse(pp.all_items_completed())
def create_production_plan(**args):
"""
sales_order (obj): Sales Order Doc Object
get_items_from (str): Sales Order/Material Request
skip_getting_mr_items (bool): Whether or not to plan for new MRs
"""
args = frappe._dict(args)
pln = frappe.get_doc({
@ -449,20 +615,35 @@ def create_production_plan(**args):
'company': args.company or '_Test Company',
'customer': args.customer or '_Test Customer',
'posting_date': nowdate(),
'include_non_stock_items': args.include_non_stock_items or 1,
'include_subcontracted_items': args.include_subcontracted_items or 1,
'ignore_existing_ordered_qty': args.ignore_existing_ordered_qty or 1,
'po_items': [{
'include_non_stock_items': args.include_non_stock_items or 0,
'include_subcontracted_items': args.include_subcontracted_items or 0,
'ignore_existing_ordered_qty': args.ignore_existing_ordered_qty or 0,
'get_items_from': 'Sales Order'
})
if not args.get("sales_order"):
pln.append('po_items', {
'use_multi_level_bom': args.use_multi_level_bom or 1,
'item_code': args.item_code,
'bom_no': frappe.db.get_value('Item', args.item_code, 'default_bom'),
'planned_qty': args.planned_qty or 1,
'planned_start_date': args.planned_start_date or now_datetime()
}]
})
mr_items = get_items_for_material_requests(pln.as_dict())
for d in mr_items:
pln.append('mr_items', d)
})
if args.get("get_items_from") == "Sales Order" and args.get("sales_order"):
so = args.get("sales_order")
pln.append('sales_orders', {
'sales_order': so.name,
'sales_order_date': so.transaction_date,
'customer': so.customer,
'grand_total': so.grand_total
})
pln.get_items()
if not args.get("skip_getting_mr_items"):
mr_items = get_items_for_material_requests(pln.as_dict())
for d in mr_items:
pln.append('mr_items', d)
if not args.do_not_save:
pln.insert()

View File

@ -201,6 +201,21 @@ class TestWorkOrder(ERPNextTestCase):
self.assertEqual(cint(bin1_on_end_production.reserved_qty_for_production),
cint(bin1_on_start_production.reserved_qty_for_production))
def test_reserved_qty_for_production_closed(self):
wo1 = make_wo_order_test_record(item="_Test FG Item", qty=2,
source_warehouse=self.warehouse)
item = wo1.required_items[0].item_code
bin_before = get_bin(item, self.warehouse)
bin_before.update_reserved_qty_for_production()
make_wo_order_test_record(item="_Test FG Item", qty=2,
source_warehouse=self.warehouse)
close_work_order(wo1.name, "Closed")
bin_after = get_bin(item, self.warehouse)
self.assertEqual(bin_before.reserved_qty_for_production, bin_after.reserved_qty_for_production)
def test_backflush_qty_for_overpduction_manufacture(self):
cancel_stock_entry = []
allow_overproduction("overproduction_percentage_for_work_order", 30)

View File

@ -8,6 +8,8 @@ from dateutil.relativedelta import relativedelta
from frappe import _
from frappe.model.document import Document
from frappe.model.mapper import get_mapped_doc
from frappe.query_builder import Case
from frappe.query_builder.functions import Sum
from frappe.utils import (
cint,
date_diff,
@ -74,7 +76,6 @@ class WorkOrder(Document):
self.set_required_items(reset_only_qty = len(self.get("required_items")))
def validate_sales_order(self):
if self.sales_order:
self.check_sales_order_on_hold_or_close()
@ -271,7 +272,7 @@ class WorkOrder(Document):
produced_qty = total_qty[0][0] if total_qty else 0
production_plan.run_method("update_produced_qty", produced_qty, self.production_plan_item)
production_plan.run_method("update_produced_pending_qty", produced_qty, self.production_plan_item)
def before_submit(self):
self.create_serial_no_batch_no()
@ -544,7 +545,7 @@ class WorkOrder(Document):
if node.is_bom:
operations.extend(_get_operations(node.name, qty=node.exploded_qty))
bom_qty = frappe.db.get_value("BOM", self.bom_no, "quantity")
bom_qty = frappe.get_cached_value("BOM", self.bom_no, "quantity")
operations.extend(_get_operations(self.bom_no, qty=1.0/bom_qty))
for correct_index, operation in enumerate(operations, start=1):
@ -625,7 +626,7 @@ class WorkOrder(Document):
frappe.delete_doc("Job Card", d.name)
def validate_production_item(self):
if frappe.db.get_value("Item", self.production_item, "has_variants"):
if frappe.get_cached_value("Item", self.production_item, "has_variants"):
frappe.throw(_("Work Order cannot be raised against a Item Template"), ItemHasVariantError)
if self.production_item:
@ -1175,3 +1176,27 @@ def create_pick_list(source_name, target_doc=None, for_qty=None):
doc.set_item_locations()
return doc
def get_reserved_qty_for_production(item_code: str, warehouse: str) -> float:
"""Get total reserved quantity for any item in specified warehouse"""
wo = frappe.qb.DocType("Work Order")
wo_item = frappe.qb.DocType("Work Order Item")
return (
frappe.qb
.from_(wo)
.from_(wo_item)
.select(Sum(Case()
.when(wo.skip_transfer == 0, wo_item.required_qty - wo_item.transferred_qty)
.else_(wo_item.required_qty - wo_item.consumed_qty))
)
.where(
(wo_item.item_code == item_code)
& (wo_item.parent == wo.name)
& (wo.docstatus == 1)
& (wo_item.source_warehouse == warehouse)
& (wo.status.notin(["Stopped", "Completed", "Closed"]))
& ((wo_item.required_qty > wo_item.transferred_qty)
| (wo_item.required_qty > wo_item.consumed_qty))
)
).run()[0][0] or 0.0

View File

@ -329,7 +329,6 @@ execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings'
erpnext.patches.v14_0.set_payroll_cost_centers
erpnext.patches.v13_0.agriculture_deprecation_warning
erpnext.patches.v13_0.hospitality_deprecation_warning
erpnext.patches.v13_0.update_exchange_rate_settings
erpnext.patches.v13_0.update_asset_quantity_field
erpnext.patches.v13_0.delete_bank_reconciliation_detail
erpnext.patches.v13_0.enable_provisional_accounting
@ -350,3 +349,7 @@ erpnext.patches.v14_0.migrate_cost_center_allocations
erpnext.patches.v13_0.convert_to_website_item_in_item_card_group_template
erpnext.patches.v13_0.shopping_cart_to_ecommerce
erpnext.patches.v13_0.update_disbursement_account
erpnext.patches.v13_0.update_reserved_qty_closed_wo
erpnext.patches.v13_0.update_exchange_rate_settings
erpnext.patches.v14_0.delete_amazon_mws_doctype
erpnext.patches.v13_0.set_work_order_qty_in_so_from_mr

View File

@ -9,6 +9,8 @@ def execute():
FROM `tabBin`""",as_dict=1)
for entry in bin_details:
if not (entry.item_code and entry.warehouse):
continue
update_bin_qty(entry.get("item_code"), entry.get("warehouse"), {
"indented_qty": get_indented_qty(entry.get("item_code"), entry.get("warehouse"))
})

View File

@ -1,12 +0,0 @@
# Copyright (c) 2020, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
def execute():
count = frappe.db.sql("SELECT COUNT(*) FROM `tabSingles` WHERE doctype='Amazon MWS Settings' AND field='enable_sync';")[0][0]
if count == 0:
frappe.db.sql("UPDATE `tabSingles` SET field='enable_sync' WHERE doctype='Amazon MWS Settings' AND field='enable_synch';")
frappe.reload_doc("ERPNext Integrations", "doctype", "Amazon MWS Settings")

View File

@ -0,0 +1,36 @@
import frappe
def execute():
"""
1. Get submitted Work Orders with MR, MR Item and SO set
2. Get SO Item detail from MR Item detail in WO, and set in WO
3. Update work_order_qty in SO
"""
work_order = frappe.qb.DocType("Work Order")
query = (
frappe.qb.from_(work_order)
.select(
work_order.name, work_order.produced_qty,
work_order.material_request,
work_order.material_request_item,
work_order.sales_order
).where(
(work_order.material_request.isnotnull())
& (work_order.material_request_item.isnotnull())
& (work_order.sales_order.isnotnull())
& (work_order.docstatus == 1)
& (work_order.produced_qty > 0)
)
)
results = query.run(as_dict=True)
for row in results:
so_item = frappe.get_value(
"Material Request Item", row.material_request_item, "sales_order_item"
)
frappe.db.set_value("Work Order", row.name, "sales_order_item", so_item)
if so_item:
wo = frappe.get_doc("Work Order", row.name)
wo.update_work_order_qty_in_so()

View File

@ -0,0 +1,28 @@
import frappe
from erpnext.stock.utils import get_bin
def execute():
wo = frappe.qb.DocType("Work Order")
wo_item = frappe.qb.DocType("Work Order Item")
incorrect_item_wh = (
frappe.qb
.from_(wo)
.join(wo_item).on(wo.name == wo_item.parent)
.select(wo_item.item_code, wo.source_warehouse).distinct()
.where(
(wo.status == "Closed")
& (wo.docstatus == 1)
& (wo.source_warehouse.notnull())
)
).run()
for item_code, warehouse in incorrect_item_wh:
if not (item_code and warehouse):
continue
bin = get_bin(item_code, warehouse)
bin.update_reserved_qty_for_production()

View File

@ -0,0 +1,5 @@
import frappe
def execute():
frappe.delete_doc("DocType", "Amazon MWS Settings", ignore_missing=True)

View File

@ -6,9 +6,6 @@ from erpnext.setup.utils import get_exchange_rate
def execute():
frappe.reload_doc('crm', 'doctype', 'opportunity')
frappe.reload_doc('crm', 'doctype', 'opportunity_item')
opportunities = frappe.db.get_list('Opportunity', filters={
'opportunity_amount': ['>', 0]
}, fields=['name', 'company', 'currency', 'opportunity_amount'])
@ -20,15 +17,11 @@ def execute():
if opportunity.currency != company_currency:
conversion_rate = get_exchange_rate(opportunity.currency, company_currency)
base_opportunity_amount = flt(conversion_rate) * flt(opportunity.opportunity_amount)
grand_total = flt(opportunity.opportunity_amount)
base_grand_total = flt(conversion_rate) * flt(opportunity.opportunity_amount)
else:
conversion_rate = 1
base_opportunity_amount = grand_total = base_grand_total = flt(opportunity.opportunity_amount)
base_opportunity_amount = flt(opportunity.opportunity_amount)
frappe.db.set_value('Opportunity', opportunity.name, {
'conversion_rate': conversion_rate,
'base_opportunity_amount': base_opportunity_amount,
'grand_total': grand_total,
'base_grand_total': base_grand_total
'base_opportunity_amount': base_opportunity_amount
}, update_modified=False)

View File

@ -29,9 +29,11 @@ def execute():
""")
for item_code, warehouse in repost_for:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
if not (item_code and warehouse):
continue
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
frappe.db.sql("""delete from tabBin
where exists(

View File

@ -14,6 +14,8 @@ def execute():
union
select item_code, warehouse from `tabStock Ledger Entry`) a"""):
try:
if not (item_code and warehouse):
continue
count += 1
update_bin_qty(item_code, warehouse, {
"indented_qty": get_indented_qty(item_code, warehouse),

View File

@ -3,6 +3,14 @@
frappe.ui.form.on('Gratuity', {
setup: function (frm) {
frm.set_query("salary_component", function () {
return {
filters: {
type: "Earning"
}
};
});
frm.set_query("expense_account", function () {
return {
filters: {
@ -24,7 +32,7 @@ frappe.ui.form.on('Gratuity', {
});
},
refresh: function (frm) {
if (frm.doc.docstatus == 1 && frm.doc.status == "Unpaid") {
if (frm.doc.docstatus == 1 && !frm.doc.pay_via_salary_slip && frm.doc.status == "Unpaid") {
frm.add_custom_button(__("Create Payment Entry"), function () {
return frappe.call({
method: 'erpnext.accounts.doctype.payment_entry.payment_entry.get_payment_entry',

View File

@ -1,7 +1,7 @@
{
"actions": [],
"autoname": "HR-GRA-PAY-.#####",
"creation": "2020-08-05 20:52:13.024683",
"creation": "2022-01-27 16:24:28.200061",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
@ -16,6 +16,9 @@
"company",
"gratuity_rule",
"section_break_5",
"pay_via_salary_slip",
"payroll_date",
"salary_component",
"payable_account",
"expense_account",
"mode_of_payment",
@ -78,18 +81,20 @@
"reqd": 1
},
{
"depends_on": "eval: !doc.pay_via_salary_slip",
"fieldname": "expense_account",
"fieldtype": "Link",
"label": "Expense Account",
"options": "Account",
"reqd": 1
"mandatory_depends_on": "eval: !doc.pay_via_salary_slip",
"options": "Account"
},
{
"depends_on": "eval: !doc.pay_via_salary_slip",
"fieldname": "mode_of_payment",
"fieldtype": "Link",
"label": "Mode of Payment",
"options": "Mode of Payment",
"reqd": 1
"mandatory_depends_on": "eval: !doc.pay_via_salary_slip",
"options": "Mode of Payment"
},
{
"fieldname": "gratuity_rule",
@ -151,23 +156,45 @@
"read_only": 1
},
{
"depends_on": "eval: !doc.pay_via_salary_slip",
"fieldname": "payable_account",
"fieldtype": "Link",
"label": "Payable Account",
"options": "Account",
"reqd": 1
"mandatory_depends_on": "eval: !doc.pay_via_salary_slip",
"options": "Account"
},
{
"fieldname": "cost_center",
"fieldtype": "Link",
"label": "Cost Center",
"options": "Cost Center"
},
{
"default": "1",
"fieldname": "pay_via_salary_slip",
"fieldtype": "Check",
"label": "Pay via Salary Slip"
},
{
"depends_on": "pay_via_salary_slip",
"fieldname": "payroll_date",
"fieldtype": "Date",
"label": "Payroll Date",
"mandatory_depends_on": "pay_via_salary_slip"
},
{
"depends_on": "pay_via_salary_slip",
"fieldname": "salary_component",
"fieldtype": "Link",
"label": "Salary Component",
"mandatory_depends_on": "pay_via_salary_slip",
"options": "Salary Component"
}
],
"index_web_pages_for_search": 1,
"is_submittable": 1,
"links": [],
"modified": "2022-01-19 12:54:37.306145",
"modified": "2022-02-02 14:00:45.536152",
"modified_by": "Administrator",
"module": "Payroll",
"name": "Gratuity",

View File

@ -21,7 +21,10 @@ class Gratuity(AccountsController):
self.status = "Unpaid"
def on_submit(self):
self.create_gl_entries()
if self.pay_via_salary_slip:
self.create_additional_salary()
else:
self.create_gl_entries()
def on_cancel(self):
self.ignore_linked_doctypes = ['GL Entry']
@ -64,6 +67,19 @@ class Gratuity(AccountsController):
return gl_entry
def create_additional_salary(self):
if self.pay_via_salary_slip:
additional_salary = frappe.new_doc('Additional Salary')
additional_salary.employee = self.employee
additional_salary.salary_component = self.salary_component
additional_salary.overwrite_salary_structure_amount = 0
additional_salary.amount = self.amount
additional_salary.payroll_date = self.payroll_date
additional_salary.company = self.company
additional_salary.ref_doctype = self.doctype
additional_salary.ref_docname = self.name
additional_salary.submit()
def set_total_advance_paid(self):
paid_amount = frappe.db.sql("""
select ifnull(sum(debit_in_account_currency), 0) as paid_amount

View File

@ -10,7 +10,7 @@ def get_data():
'transactions': [
{
'label': _('Payment'),
'items': ['Payment Entry']
'items': ['Payment Entry', 'Additional Salary']
}
]
}

View File

@ -18,27 +18,25 @@ from erpnext.regional.united_arab_emirates.setup import create_gratuity_rule
test_dependencies = ["Salary Component", "Salary Slip", "Account"]
class TestGratuity(unittest.TestCase):
@classmethod
def setUpClass(cls):
def setUp(self):
frappe.db.delete("Gratuity")
frappe.db.delete("Additional Salary", {"ref_doctype": "Gratuity"})
make_earning_salary_component(setup=True, test_tax=True, company_list=['_Test Company'])
make_deduction_salary_component(setup=True, test_tax=True, company_list=['_Test Company'])
def setUp(self):
frappe.db.sql("DELETE FROM `tabGratuity`")
def test_get_last_salary_slip_should_return_none_for_new_employee(self):
new_employee = make_employee("new_employee@salary.com", company='_Test Company')
salary_slip = get_last_salary_slip(new_employee)
assert salary_slip is None
def test_check_gratuity_amount_based_on_current_slab(self):
def test_check_gratuity_amount_based_on_current_slab_and_additional_salary_creation(self):
employee, sal_slip = create_employee_and_get_last_salary_slip()
rule = get_gratuity_rule("Rule Under Unlimited Contract on termination (UAE)")
gratuity = create_gratuity(pay_via_salary_slip=1, employee=employee, rule=rule.name)
gratuity = create_gratuity(employee=employee, rule=rule.name)
#work experience calculation
# work experience calculation
date_of_joining, relieving_date = frappe.db.get_value('Employee', employee, ['date_of_joining', 'relieving_date'])
employee_total_workings_days = (get_datetime(relieving_date) - get_datetime(date_of_joining)).days
@ -64,6 +62,9 @@ class TestGratuity(unittest.TestCase):
self.assertEqual(flt(gratuity_amount, 2), flt(gratuity.amount, 2))
# additional salary creation (Pay via salary slip)
self.assertTrue(frappe.db.exists("Additional Salary", {"ref_docname": gratuity.name}))
def test_check_gratuity_amount_based_on_all_previous_slabs(self):
employee, sal_slip = create_employee_and_get_last_salary_slip()
rule = get_gratuity_rule("Rule Under Limited Contract (UAE)")
@ -117,8 +118,8 @@ class TestGratuity(unittest.TestCase):
self.assertEqual(flt(gratuity.paid_amount,2), flt(gratuity.amount, 2))
def tearDown(self):
frappe.db.sql("DELETE FROM `tabGratuity`")
frappe.db.sql("DELETE FROM `tabAdditional Salary` WHERE ref_doctype = 'Gratuity'")
frappe.db.rollback()
def get_gratuity_rule(name):
rule = frappe.db.exists("Gratuity Rule", name)
@ -141,9 +142,14 @@ def create_gratuity(**args):
gratuity.employee = args.employee
gratuity.posting_date = getdate()
gratuity.gratuity_rule = args.rule or "Rule Under Limited Contract (UAE)"
gratuity.expense_account = args.expense_account or 'Payment Account - _TC'
gratuity.payable_account = args.payable_account or get_payable_account("_Test Company")
gratuity.mode_of_payment = args.mode_of_payment or 'Cash'
gratuity.pay_via_salary_slip = args.pay_via_salary_slip or 0
if gratuity.pay_via_salary_slip:
gratuity.payroll_date = getdate()
gratuity.salary_component = "Performance Bonus"
else:
gratuity.expense_account = args.expense_account or 'Payment Account - _TC'
gratuity.payable_account = args.payable_account or get_payable_account("_Test Company")
gratuity.mode_of_payment = args.mode_of_payment or 'Cash'
gratuity.save()
gratuity.submit()

View File

@ -527,11 +527,12 @@ def get_emp_list(sal_struct, cond, end_date, payroll_payable_account):
""" % cond, {"sal_struct": tuple(sal_struct), "from_date": end_date, "payroll_payable_account": payroll_payable_account}, as_dict=True)
def remove_payrolled_employees(emp_list, start_date, end_date):
new_emp_list = []
for employee_details in emp_list:
if frappe.db.exists("Salary Slip", {"employee": employee_details.employee, "start_date": start_date, "end_date": end_date, "docstatus": 1}):
emp_list.remove(employee_details)
if not frappe.db.exists("Salary Slip", {"employee": employee_details.employee, "start_date": start_date, "end_date": end_date, "docstatus": 1}):
new_emp_list.append(employee_details)
return emp_list
return new_emp_list
@frappe.whitelist()
def get_start_end_dates(payroll_frequency, start_date=None, company=None):

View File

@ -124,7 +124,7 @@ class TestPayrollEntry(unittest.TestCase):
if not frappe.db.exists("Account", "_Test Payroll Payable - _TC"):
create_account(account_name="_Test Payroll Payable",
company="_Test Company", parent_account="Current Liabilities - _TC")
company="_Test Company", parent_account="Current Liabilities - _TC", account_type="Payable")
if not frappe.db.get_value("Company", "_Test Company", "default_payroll_payable_account") or \
frappe.db.get_value("Company", "_Test Company", "default_payroll_payable_account") != "_Test Payroll Payable - _TC":

View File

@ -6,6 +6,7 @@ import random
import unittest
import frappe
from frappe.model.document import Document
from frappe.utils import (
add_days,
add_months,
@ -687,20 +688,25 @@ def make_employee_salary_slip(user, payroll_frequency, salary_structure=None):
def make_salary_component(salary_components, test_tax, company_list=None):
for salary_component in salary_components:
if not frappe.db.exists('Salary Component', salary_component["salary_component"]):
if test_tax:
if salary_component["type"] == "Earning":
salary_component["is_tax_applicable"] = 1
elif salary_component["salary_component"] == "TDS":
salary_component["variable_based_on_taxable_salary"] = 1
salary_component["amount_based_on_formula"] = 0
salary_component["amount"] = 0
salary_component["formula"] = ""
salary_component["condition"] = ""
salary_component["doctype"] = "Salary Component"
salary_component["salary_component_abbr"] = salary_component["abbr"]
frappe.get_doc(salary_component).insert()
get_salary_component_account(salary_component["salary_component"], company_list)
if frappe.db.exists('Salary Component', salary_component["salary_component"]):
continue
if test_tax:
if salary_component["type"] == "Earning":
salary_component["is_tax_applicable"] = 1
elif salary_component["salary_component"] == "TDS":
salary_component["variable_based_on_taxable_salary"] = 1
salary_component["amount_based_on_formula"] = 0
salary_component["amount"] = 0
salary_component["formula"] = ""
salary_component["condition"] = ""
salary_component["salary_component_abbr"] = salary_component["abbr"]
doc = frappe.new_doc("Salary Component")
doc.update(salary_component)
doc.insert()
get_salary_component_account(doc, company_list)
def get_salary_component_account(sal_comp, company_list=None):
company = erpnext.get_default_company()
@ -708,7 +714,9 @@ def get_salary_component_account(sal_comp, company_list=None):
if company_list and company not in company_list:
company_list.append(company)
sal_comp = frappe.get_doc("Salary Component", sal_comp)
if not isinstance(sal_comp, Document):
sal_comp = frappe.get_doc("Salary Component", sal_comp)
if not sal_comp.get("accounts"):
for d in company_list:
company_abbr = frappe.get_cached_value('Company', d, 'abbr')
@ -726,7 +734,7 @@ def get_salary_component_account(sal_comp, company_list=None):
})
sal_comp.save()
def create_account(account_name, company, parent_account):
def create_account(account_name, company, parent_account, account_type=None):
company_abbr = frappe.get_cached_value('Company', company, 'abbr')
account = frappe.db.get_value("Account", account_name + " - " + company_abbr)
if not account:

View File

@ -151,6 +151,35 @@ class TestTimesheet(unittest.TestCase):
settings.ignore_employee_time_overlap = initial_setting
settings.save()
def test_timesheet_not_overlapping_with_continuous_timelogs(self):
emp = make_employee("test_employee_6@salary.com")
update_activity_type("_Test Activity Type")
timesheet = frappe.new_doc("Timesheet")
timesheet.employee = emp
timesheet.append(
'time_logs',
{
"billable": 1,
"activity_type": "_Test Activity Type",
"from_time": now_datetime(),
"to_time": now_datetime() + datetime.timedelta(hours=3),
"company": "_Test Company"
}
)
timesheet.append(
'time_logs',
{
"billable": 1,
"activity_type": "_Test Activity Type",
"from_time": now_datetime() + datetime.timedelta(hours=3),
"to_time": now_datetime() + datetime.timedelta(hours=4),
"company": "_Test Company"
}
)
timesheet.save() # should not throw an error
def test_to_time(self):
emp = make_employee("test_employee_6@salary.com")
from_time = now_datetime()

View File

@ -7,7 +7,7 @@ import json
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import add_to_date, flt, getdate, time_diff_in_hours
from frappe.utils import add_to_date, flt, get_datetime, getdate, time_diff_in_hours
from erpnext.controllers.queries import get_match_cond
from erpnext.hr.utils import validate_active_employee
@ -145,7 +145,7 @@ class Timesheet(Document):
if not (data.from_time and data.hours):
return
_to_time = add_to_date(data.from_time, hours=data.hours, as_datetime=True)
_to_time = get_datetime(add_to_date(data.from_time, hours=data.hours, as_datetime=True))
if data.to_time != _to_time:
data.to_time = _to_time
@ -171,39 +171,54 @@ class Timesheet(Document):
.format(args.idx, self.name, existing.name), OverlapError)
def get_overlap_for(self, fieldname, args, value):
cond = "ts.`{0}`".format(fieldname)
if fieldname == 'workstation':
cond = "tsd.`{0}`".format(fieldname)
timesheet = frappe.qb.DocType("Timesheet")
timelog = frappe.qb.DocType("Timesheet Detail")
existing = frappe.db.sql("""select ts.name as name, tsd.from_time as from_time, tsd.to_time as to_time from
`tabTimesheet Detail` tsd, `tabTimesheet` ts where {0}=%(val)s and tsd.parent = ts.name and
(
(%(from_time)s > tsd.from_time and %(from_time)s < tsd.to_time) or
(%(to_time)s > tsd.from_time and %(to_time)s < tsd.to_time) or
(%(from_time)s <= tsd.from_time and %(to_time)s >= tsd.to_time))
and tsd.name!=%(name)s
and ts.name!=%(parent)s
and ts.docstatus < 2""".format(cond),
{
"val": value,
"from_time": args.from_time,
"to_time": args.to_time,
"name": args.name or "No Name",
"parent": args.parent or "No Name"
}, as_dict=True)
# check internal overlap
for time_log in self.time_logs:
if not (time_log.from_time and time_log.to_time
and args.from_time and args.to_time): continue
from_time = get_datetime(args.from_time)
to_time = get_datetime(args.to_time)
if (fieldname != 'workstation' or args.get(fieldname) == time_log.get(fieldname)) and \
args.idx != time_log.idx and ((args.from_time > time_log.from_time and args.from_time < time_log.to_time) or
(args.to_time > time_log.from_time and args.to_time < time_log.to_time) or
(args.from_time <= time_log.from_time and args.to_time >= time_log.to_time)):
return self
existing = (
frappe.qb.from_(timesheet)
.join(timelog)
.on(timelog.parent == timesheet.name)
.select(timesheet.name.as_('name'), timelog.from_time.as_('from_time'), timelog.to_time.as_('to_time'))
.where(
(timelog.name != (args.name or "No Name"))
& (timesheet.name != (args.parent or "No Name"))
& (timesheet.docstatus < 2)
& (timesheet[fieldname] == value)
& (
((from_time > timelog.from_time) & (from_time < timelog.to_time))
| ((to_time > timelog.from_time) & (to_time < timelog.to_time))
| ((from_time <= timelog.from_time) & (to_time >= timelog.to_time))
)
)
).run(as_dict=True)
if self.check_internal_overlap(fieldname, args):
return self
return existing[0] if existing else None
def check_internal_overlap(self, fieldname, args):
for time_log in self.time_logs:
if not (time_log.from_time and time_log.to_time
and args.from_time and args.to_time):
continue
from_time = get_datetime(time_log.from_time)
to_time = get_datetime(time_log.to_time)
args_from_time = get_datetime(args.from_time)
args_to_time = get_datetime(args.to_time)
if (args.get(fieldname) == time_log.get(fieldname)) and (args.idx != time_log.idx) and (
(args_from_time > from_time and args_from_time < to_time)
or (args_to_time > from_time and args_to_time < to_time)
or (args_from_time <= from_time and args_to_time >= to_time)
):
return True
return False
def update_cost(self):
for data in self.time_logs:
if data.activity_type or data.is_billable:

View File

@ -14,12 +14,6 @@
"to_time",
"hours",
"completed",
"section_break_7",
"completed_qty",
"workstation",
"column_break_12",
"operation",
"operation_id",
"project_details",
"project",
"project_name",
@ -83,43 +77,6 @@
"fieldtype": "Check",
"label": "Completed"
},
{
"fieldname": "section_break_7",
"fieldtype": "Section Break"
},
{
"depends_on": "eval:parent.work_order",
"fieldname": "completed_qty",
"fieldtype": "Float",
"label": "Completed Qty"
},
{
"depends_on": "eval:parent.work_order",
"fieldname": "workstation",
"fieldtype": "Link",
"label": "Workstation",
"options": "Workstation",
"read_only": 1
},
{
"fieldname": "column_break_12",
"fieldtype": "Column Break"
},
{
"depends_on": "eval:parent.work_order",
"fieldname": "operation",
"fieldtype": "Link",
"label": "Operation",
"options": "Operation",
"read_only": 1
},
{
"depends_on": "eval:parent.work_order",
"fieldname": "operation_id",
"fieldtype": "Data",
"hidden": 1,
"label": "Operation Id"
},
{
"fieldname": "project_details",
"fieldtype": "Section Break"
@ -267,7 +224,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2021-05-18 12:19:33.205940",
"modified": "2022-02-17 16:53:34.878798",
"modified_by": "Administrator",
"module": "Projects",
"name": "Timesheet Detail",
@ -275,5 +232,6 @@
"permissions": [],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "ASC"
"sort_order": "ASC",
"states": []
}

View File

@ -1463,7 +1463,8 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
"item_code": d.item_code,
"pricing_rules": d.pricing_rules,
"parenttype": d.parenttype,
"parent": d.parent
"parent": d.parent,
"price_list_rate": d.price_list_rate
})
}
});
@ -2283,20 +2284,12 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
coupon_code() {
var me = this;
if (this.frm.doc.coupon_code) {
frappe.run_serially([
() => this.frm.doc.ignore_pricing_rule=1,
() => me.ignore_pricing_rule(),
() => this.frm.doc.ignore_pricing_rule=0,
() => me.apply_pricing_rule(),
() => this.frm.save()
]);
} else {
frappe.run_serially([
() => this.frm.doc.ignore_pricing_rule=1,
() => me.ignore_pricing_rule()
]);
}
frappe.run_serially([
() => this.frm.doc.ignore_pricing_rule=1,
() => me.ignore_pricing_rule(),
() => this.frm.doc.ignore_pricing_rule=0,
() => me.apply_pricing_rule()
]);
}
};

View File

@ -219,7 +219,6 @@ def get_regional_address_details(party_details, doctype, company):
if not party_details.place_of_supply: return party_details
if not party_details.company_gstin: return party_details
if not party_details.supplier_gstin: return party_details
if ((doctype in ("Sales Invoice", "Delivery Note", "Sales Order") and party_details.company_gstin
and party_details.company_gstin[:2] != party_details.place_of_supply[:2]) or (doctype in ("Purchase Invoice",

View File

@ -40,7 +40,11 @@ frappe.query_reports["DATEV"] = {
});
query_report.page.add_menu_item(__("Download DATEV File"), () => {
const filters = JSON.stringify(query_report.get_values());
const filters = encodeURIComponent(
JSON.stringify(
query_report.get_values()
)
);
window.open(`/api/method/erpnext.regional.report.datev.datev.download_datev_csv?filters=${filters}`);
});

View File

@ -17,7 +17,7 @@ frappe.query_reports["GSTR-1"] = {
"fieldtype": "Link",
"options": "Address",
"get_query": function () {
var company = frappe.query_report.get_filter_value('company');
let company = frappe.query_report.get_filter_value('company');
if (company) {
return {
"query": 'frappe.contacts.doctype.address.address.address_query',
@ -26,6 +26,11 @@ frappe.query_reports["GSTR-1"] = {
}
}
},
{
"fieldname": "company_gstin",
"label": __("Company GSTIN"),
"fieldtype": "Select"
},
{
"fieldname": "from_date",
"label": __("From Date"),
@ -60,10 +65,21 @@ frappe.query_reports["GSTR-1"] = {
}
],
onload: function (report) {
let filters = report.get_values();
frappe.call({
method: 'erpnext.regional.report.gstr_1.gstr_1.get_company_gstins',
args: {
company: filters.company
},
callback: function(r) {
frappe.query_report.page.fields_dict.company_gstin.df.options = r.message;
frappe.query_report.page.fields_dict.company_gstin.refresh();
}
});
report.page.add_inner_button(__("Download as JSON"), function () {
var filters = report.get_values();
frappe.call({
method: 'erpnext.regional.report.gstr_1.gstr_1.get_json',
args: {

View File

@ -28,7 +28,7 @@ class Gstr1Report(object):
posting_date,
base_grand_total,
base_rounded_total,
COALESCE(NULLIF(customer_gstin,''), NULLIF(billing_address_gstin, '')) as customer_gstin,
NULLIF(billing_address_gstin, '') as billing_address_gstin,
place_of_supply,
ecommerce_gstin,
reverse_charge,
@ -253,13 +253,14 @@ class Gstr1Report(object):
for opts in (("company", " and company=%(company)s"),
("from_date", " and posting_date>=%(from_date)s"),
("to_date", " and posting_date<=%(to_date)s"),
("company_address", " and company_address=%(company_address)s")):
("company_address", " and company_address=%(company_address)s"),
("company_gstin", " and company_gstin=%(company_gstin)s")):
if self.filters.get(opts[0]):
conditions += opts[1]
if self.filters.get("type_of_business") == "B2B":
conditions += "AND IFNULL(gst_category, '') in ('Registered Regular', 'Deemed Export', 'SEZ') AND is_return != 1 AND is_debit_note !=1"
conditions += "AND IFNULL(gst_category, '') in ('Registered Regular', 'Registered Composition', 'Deemed Export', 'SEZ') AND is_return != 1 AND is_debit_note !=1"
if self.filters.get("type_of_business") in ("B2C Large", "B2C Small"):
b2c_limit = frappe.db.get_single_value('GST Settings', 'b2c_limit')
@ -383,7 +384,7 @@ class Gstr1Report(object):
for invoice, items in self.invoice_items.items():
if invoice not in self.items_based_on_tax_rate and invoice not in unidentified_gst_accounts_invoice \
and self.invoices.get(invoice, {}).get('export_type') == "Without Payment of Tax" \
and self.invoices.get(invoice, {}).get('gst_category') == "Overseas":
and self.invoices.get(invoice, {}).get('gst_category') in ("Overseas", "SEZ"):
self.items_based_on_tax_rate.setdefault(invoice, {}).setdefault(0, items.keys())
def get_columns(self):
@ -409,7 +410,7 @@ class Gstr1Report(object):
if self.filters.get("type_of_business") == "B2B":
self.invoice_columns = [
{
"fieldname": "customer_gstin",
"fieldname": "billing_address_gstin",
"label": "GSTIN/UIN of Recipient",
"fieldtype": "Data",
"width": 150
@ -516,7 +517,7 @@ class Gstr1Report(object):
elif self.filters.get("type_of_business") == "CDNR-REG":
self.invoice_columns = [
{
"fieldname": "customer_gstin",
"fieldname": "billing_address_gstin",
"label": "GSTIN/UIN of Recipient",
"fieldtype": "Data",
"width": 150
@ -817,7 +818,7 @@ def get_json(filters, report_name, data):
res = {}
if filters["type_of_business"] == "B2B":
for item in report_data[:-1]:
res.setdefault(item["customer_gstin"], {}).setdefault(item["invoice_number"],[]).append(item)
res.setdefault(item["billing_address_gstin"], {}).setdefault(item["invoice_number"],[]).append(item)
out = get_b2b_json(res, gstin)
gst_json["b2b"] = out
@ -841,7 +842,7 @@ def get_json(filters, report_name, data):
gst_json["exp"] = out
elif filters["type_of_business"] == "CDNR-REG":
for item in report_data[:-1]:
res.setdefault(item["customer_gstin"], {}).setdefault(item["invoice_number"],[]).append(item)
res.setdefault(item["billing_address_gstin"], {}).setdefault(item["invoice_number"],[]).append(item)
out = get_cdnr_reg_json(res, gstin)
gst_json["cdnr"] = out
@ -875,7 +876,7 @@ def get_json(filters, report_name, data):
}
def get_b2b_json(res, gstin):
inv_type, out = {"Registered Regular": "R", "Deemed Export": "DE", "URD": "URD", "SEZ": "SEZ"}, []
out = []
for gst_in in res:
b2b_item, inv = {"ctin": gst_in, "inv": []}, []
if not gst_in: continue
@ -889,7 +890,7 @@ def get_b2b_json(res, gstin):
inv_item = get_basic_invoice_detail(invoice[0])
inv_item["pos"] = "%02d" % int(invoice[0]["place_of_supply"].split('-')[0])
inv_item["rchrg"] = invoice[0]["reverse_charge"]
inv_item["inv_typ"] = inv_type.get(invoice[0].get("gst_category", ""),"")
inv_item["inv_typ"] = get_invoice_type(invoice[0])
if inv_item["pos"]=="00": continue
inv_item["itms"] = []
@ -1044,7 +1045,7 @@ def get_cdnr_reg_json(res, gstin):
"ntty": invoice[0]["document_type"],
"pos": "%02d" % int(invoice[0]["place_of_supply"].split('-')[0]),
"rchrg": invoice[0]["reverse_charge"],
"inv_typ": get_invoice_type_for_cdnr(invoice[0])
"inv_typ": get_invoice_type(invoice[0])
}
inv_item["itms"] = []
@ -1069,7 +1070,7 @@ def get_cdnr_unreg_json(res, gstin):
"val": abs(flt(items[0]["invoice_value"])),
"ntty": items[0]["document_type"],
"pos": "%02d" % int(items[0]["place_of_supply"].split('-')[0]),
"typ": get_invoice_type_for_cdnrur(items[0])
"typ": get_invoice_type(items[0])
}
inv_item["itms"] = []
@ -1110,29 +1111,21 @@ def get_exempted_json(data):
return out
def get_invoice_type_for_cdnr(row):
if row.get('gst_category') == 'SEZ':
if row.get('export_type') == 'WPAY':
invoice_type = 'SEWP'
else:
invoice_type = 'SEWOP'
elif row.get('gst_category') == 'Deemed Export':
invoice_type = 'DE'
elif row.get('gst_category') == 'Registered Regular':
invoice_type = 'R'
def get_invoice_type(row):
gst_category = row.get('gst_category')
return invoice_type
if gst_category == 'SEZ':
return 'SEWP' if row.get('export_type') == 'WPAY' else 'SEWOP'
def get_invoice_type_for_cdnrur(row):
if row.get('gst_category') == 'Overseas':
if row.get('export_type') == 'WPAY':
invoice_type = 'EXPWP'
else:
invoice_type = 'EXPWOP'
elif row.get('gst_category') == 'Unregistered':
invoice_type = 'B2CL'
if gst_category == 'Overseas':
return 'EXPWP' if row.get('export_type') == 'WPAY' else 'EXPWOP'
return invoice_type
return ({
'Deemed Export': 'DE',
'Registered Regular': 'R',
'Registered Composition': 'R',
'Unregistered': 'B2CL'
}).get(gst_category)
def get_basic_invoice_detail(row):
return {
@ -1154,7 +1147,7 @@ def get_rate_and_tax_details(row, gstin):
# calculate tax amount added
tax = flt((row["taxable_value"]*rate)/100.0, 2)
frappe.errprint([tax, tax/2])
if row.get("customer_gstin") and gstin[0:2] == row["customer_gstin"][0:2]:
if row.get("billing_address_gstin") and gstin[0:2] == row["billing_address_gstin"][0:2]:
itm_det.update({"camt": flt(tax/2.0, 2), "samt": flt(tax/2.0, 2)})
else:
itm_det.update({"iamt": tax})
@ -1199,4 +1192,24 @@ def is_inter_state(invoice_detail):
if invoice_detail.place_of_supply.split("-")[0] != invoice_detail.company_gstin[:2]:
return True
else:
return False
return False
@frappe.whitelist()
def get_company_gstins(company):
address = frappe.qb.DocType("Address")
links = frappe.qb.DocType("Dynamic Link")
addresses = frappe.qb.from_(address).inner_join(links).on(
address.name == links.parent
).select(
address.gstin
).where(
links.link_doctype == 'Company'
).where(
links.link_name == company
).run(as_dict=1)
address_list = [''] + [d.gstin for d in addresses]
return address_list

View File

@ -102,7 +102,7 @@ def make_custom_fields():
]
}
create_custom_fields(custom_fields, update=True)
create_custom_fields(custom_fields, ignore_validate=True, update=True)
def update_regional_tax_settings(country, company):
create_ksa_vat_setting(company)

View File

@ -6,7 +6,7 @@ import json
import frappe
import frappe.permissions
from frappe.core.doctype.user_permission.test_user_permission import create_user
from frappe.utils import add_days, flt, getdate, nowdate
from frappe.utils import add_days, flt, getdate, nowdate, today
from erpnext.controllers.accounts_controller import update_child_qty_rate
from erpnext.maintenance.doctype.maintenance_schedule.test_maintenance_schedule import (
@ -1399,6 +1399,48 @@ class TestSalesOrder(ERPNextTestCase):
so.load_from_db()
self.assertEqual(so.billing_status, 'Fully Billed')
def test_so_back_updated_from_wo_via_mr(self):
"SO -> MR (Manufacture) -> WO. Test if WO Qty is updated in SO."
from erpnext.manufacturing.doctype.work_order.work_order import (
make_stock_entry as make_se_from_wo,
)
from erpnext.stock.doctype.material_request.material_request import raise_work_orders
so = make_sales_order(item_list=[{"item_code": "_Test FG Item","qty": 2, "rate":100}])
mr = make_material_request(so.name)
mr.material_request_type = "Manufacture"
mr.schedule_date = today()
mr.submit()
# WO from MR
wo_name = raise_work_orders(mr.name)[0]
wo = frappe.get_doc("Work Order", wo_name)
wo.wip_warehouse = "Work In Progress - _TC"
wo.skip_transfer = True
self.assertEqual(wo.sales_order, so.name)
self.assertEqual(wo.sales_order_item, so.items[0].name)
wo.submit()
make_stock_entry(item_code="_Test Item", # Stock RM
target="Work In Progress - _TC",
qty=4, basic_rate=100
)
make_stock_entry(item_code="_Test Item Home Desktop 100", # Stock RM
target="Work In Progress - _TC",
qty=4, basic_rate=100
)
se = frappe.get_doc(make_se_from_wo(wo.name, "Manufacture", 2))
se.submit() # Finish WO
mr.reload()
wo.reload()
so.reload()
self.assertEqual(so.items[0].work_order_qty, wo.produced_qty)
self.assertEqual(mr.status, "Manufactured")
def automatically_fetch_payment_terms(enable=1):
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.automatically_fetch_payment_terms = enable

View File

@ -169,6 +169,21 @@ erpnext.PointOfSale.Payment = class {
}
});
frappe.ui.form.on('POS Invoice', 'coupon_code', (frm) => {
if (!frm.doc.ignore_pricing_rule) {
if (frm.doc.coupon_code) {
frappe.run_serially([
() => frm.doc.ignore_pricing_rule=1,
() => frm.trigger('ignore_pricing_rule'),
() => frm.doc.ignore_pricing_rule=0,
() => frm.trigger('apply_pricing_rule'),
() => frm.save(),
() => this.update_totals_section(frm.doc)
]);
}
}
});
this.setup_listener_for_payments();
this.$payment_modes.on('click', '.shortcut', function() {

View File

@ -0,0 +1,84 @@
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
/* eslint-disable */
function get_filters() {
let filters = [
{
"fieldname":"company",
"label": __("Company"),
"fieldtype": "Link",
"options": "Company",
"default": frappe.defaults.get_user_default("Company"),
"reqd": 1
},
{
"fieldname":"period_start_date",
"label": __("Start Date"),
"fieldtype": "Date",
"reqd": 1,
"default": frappe.datetime.add_months(frappe.datetime.get_today(), -1)
},
{
"fieldname":"period_end_date",
"label": __("End Date"),
"fieldtype": "Date",
"reqd": 1,
"default": frappe.datetime.get_today()
},
{
"fieldname":"sales_order",
"label": __("Sales Order"),
"fieldtype": "MultiSelectList",
"width": 100,
"options": "Sales Order",
"get_data": function(txt) {
return frappe.db.get_link_options("Sales Order", txt, this.filters());
},
"filters": () => {
return {
docstatus: 1,
payment_terms_template: ['not in', ['']],
company: frappe.query_report.get_filter_value("company"),
transaction_date: ['between', [frappe.query_report.get_filter_value("period_start_date"), frappe.query_report.get_filter_value("period_end_date")]]
}
},
on_change: function(){
frappe.query_report.refresh();
}
}
]
return filters;
}
frappe.query_reports["Payment Terms Status for Sales Order"] = {
"filters": get_filters(),
"formatter": function(value, row, column, data, default_formatter){
if(column.fieldname == 'invoices' && value) {
invoices = value.split(',');
const invoice_formatter = (prev_value, curr_value) => {
if(prev_value != "") {
return prev_value + ", " + default_formatter(curr_value, row, column, data);
}
else {
return default_formatter(curr_value, row, column, data);
}
}
return invoices.reduce(invoice_formatter, "")
}
else if (column.fieldname == 'paid_amount' && value){
formatted_value = default_formatter(value, row, column, data);
if(value > 0) {
formatted_value = "<span style='color:green;'>" + formatted_value + "</span>"
}
return formatted_value;
}
else if (column.fieldname == 'status' && value == 'Completed'){
return "<span style='color:green;'>" + default_formatter(value, row, column, data) + "</span>";
}
return default_formatter(value, row, column, data);
},
};

View File

@ -0,0 +1,38 @@
{
"add_total_row": 1,
"columns": [],
"creation": "2021-12-28 10:39:34.533964",
"disable_prepared_report": 0,
"disabled": 0,
"docstatus": 0,
"doctype": "Report",
"filters": [],
"idx": 0,
"is_standard": "Yes",
"modified": "2021-12-30 10:42:06.058457",
"modified_by": "Administrator",
"module": "Selling",
"name": "Payment Terms Status for Sales Order",
"owner": "Administrator",
"prepared_report": 0,
"ref_doctype": "Sales Order",
"report_name": "Payment Terms Status for Sales Order",
"report_type": "Script Report",
"roles": [
{
"role": "Sales User"
},
{
"role": "Sales Manager"
},
{
"role": "Maintenance User"
},
{
"role": "Accounts User"
},
{
"role": "Stock User"
}
]
}

View File

@ -0,0 +1,205 @@
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# License: MIT. See LICENSE
import frappe
from frappe import _, qb, query_builder
from frappe.query_builder import functions
def get_columns():
columns = [
{
"label": _("Sales Order"),
"fieldname": "name",
"fieldtype": "Link",
"options": "Sales Order",
},
{
"label": _("Posting Date"),
"fieldname": "submitted",
"fieldtype": "Date",
},
{
"label": _("Payment Term"),
"fieldname": "payment_term",
"fieldtype": "Data",
},
{
"label": _("Description"),
"fieldname": "description",
"fieldtype": "Data",
},
{
"label": _("Due Date"),
"fieldname": "due_date",
"fieldtype": "Date",
},
{
"label": _("Invoice Portion"),
"fieldname": "invoice_portion",
"fieldtype": "Percent",
},
{
"label": _("Payment Amount"),
"fieldname": "base_payment_amount",
"fieldtype": "Currency",
"options": "currency",
},
{
"label": _("Paid Amount"),
"fieldname": "paid_amount",
"fieldtype": "Currency",
"options": "currency",
},
{
"label": _("Invoices"),
"fieldname": "invoices",
"fieldtype": "Link",
"options": "Sales Invoice",
},
{
"label": _("Status"),
"fieldname": "status",
"fieldtype": "Data",
},
{
"label": _("Currency"),
"fieldname": "currency",
"fieldtype": "Currency",
"hidden": 1
}
]
return columns
def get_conditions(filters):
"""
Convert filter options to conditions used in query
"""
filters = frappe._dict(filters) if filters else frappe._dict({})
conditions = frappe._dict({})
conditions.company = filters.company or frappe.defaults.get_user_default("company")
conditions.end_date = filters.period_end_date or frappe.utils.today()
conditions.start_date = filters.period_start_date or frappe.utils.add_months(
conditions.end_date, -1
)
conditions.sales_order = filters.sales_order or []
return conditions
def get_so_with_invoices(filters):
"""
Get Sales Order with payment terms template with their associated Invoices
"""
sorders = []
so = qb.DocType("Sales Order")
ps = qb.DocType("Payment Schedule")
datediff = query_builder.CustomFunction("DATEDIFF", ["cur_date", "due_date"])
ifelse = query_builder.CustomFunction("IF", ["condition", "then", "else"])
conditions = get_conditions(filters)
query_so = (
qb.from_(so)
.join(ps)
.on(ps.parent == so.name)
.select(
so.name,
so.transaction_date.as_("submitted"),
ifelse(datediff(ps.due_date, functions.CurDate()) < 0, "Overdue", "Unpaid").as_("status"),
ps.payment_term,
ps.description,
ps.due_date,
ps.invoice_portion,
ps.base_payment_amount,
ps.paid_amount,
)
.where(
(so.docstatus == 1)
& (so.payment_terms_template != "NULL")
& (so.company == conditions.company)
& (so.transaction_date[conditions.start_date : conditions.end_date])
)
.orderby(so.name, so.transaction_date, ps.due_date)
)
if conditions.sales_order != []:
query_so = query_so.where(so.name.isin(conditions.sales_order))
sorders = query_so.run(as_dict=True)
invoices = []
if sorders != []:
soi = qb.DocType("Sales Order Item")
si = qb.DocType("Sales Invoice")
sii = qb.DocType("Sales Invoice Item")
query_inv = (
qb.from_(sii)
.right_join(si)
.on(si.name == sii.parent)
.inner_join(soi)
.on(soi.name == sii.so_detail)
.select(sii.sales_order, sii.parent.as_("invoice"), si.base_grand_total.as_("invoice_amount"))
.where((sii.sales_order.isin([x.name for x in sorders])) & (si.docstatus == 1))
.groupby(sii.parent)
)
invoices = query_inv.run(as_dict=True)
return sorders, invoices
def set_payment_terms_statuses(sales_orders, invoices, filters):
"""
compute status for payment terms with associated sales invoice using FIFO
"""
for so in sales_orders:
so.currency = frappe.get_cached_value('Company', filters.get('company'), 'default_currency')
so.invoices = ""
for inv in [x for x in invoices if x.sales_order == so.name and x.invoice_amount > 0]:
if so.base_payment_amount - so.paid_amount > 0:
amount = so.base_payment_amount - so.paid_amount
if inv.invoice_amount >= amount:
inv.invoice_amount -= amount
so.paid_amount += amount
so.invoices += "," + inv.invoice
so.status = "Completed"
break
else:
so.paid_amount += inv.invoice_amount
inv.invoice_amount = 0
so.invoices += "," + inv.invoice
so.status = "Partly Paid"
return sales_orders, invoices
def prepare_chart(s_orders):
if len(set([x.name for x in s_orders])) == 1:
chart = {
"data": {
"labels": [term.payment_term for term in s_orders],
"datasets": [
{"name": "Payment Amount", "values": [x.base_payment_amount for x in s_orders],},
{"name": "Paid Amount", "values": [x.paid_amount for x in s_orders],},
],
},
"type": "bar",
}
return chart
def execute(filters=None):
columns = get_columns()
sales_orders, so_invoices = get_so_with_invoices(filters)
sales_orders, so_invoices = set_payment_terms_statuses(sales_orders, so_invoices, filters)
prepare_chart(sales_orders)
data = sales_orders
message = []
chart = prepare_chart(sales_orders)
return columns, data, message, chart

View File

@ -0,0 +1,198 @@
import datetime
import frappe
from frappe.utils import add_days
from erpnext.selling.doctype.sales_order.sales_order import make_sales_invoice
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
from erpnext.selling.report.payment_terms_status_for_sales_order.payment_terms_status_for_sales_order import (
execute,
)
from erpnext.stock.doctype.item.test_item import create_item
from erpnext.tests.utils import ERPNextTestCase
test_dependencies = ["Sales Order", "Item", "Sales Invoice", "Payment Terms Template"]
class TestPaymentTermsStatusForSalesOrder(ERPNextTestCase):
def create_payment_terms_template(self):
# create template for 50-50 payments
template = None
if frappe.db.exists("Payment Terms Template", "_Test 50-50"):
template = frappe.get_doc("Payment Terms Template", "_Test 50-50")
else:
template = frappe.get_doc(
{
"doctype": "Payment Terms Template",
"template_name": "_Test 50-50",
"terms": [
{
"doctype": "Payment Terms Template Detail",
"due_date_based_on": "Day(s) after invoice date",
"payment_term_name": "_Test 50% on 15 Days",
"description": "_Test 50-50",
"invoice_portion": 50,
"credit_days": 15,
},
{
"doctype": "Payment Terms Template Detail",
"due_date_based_on": "Day(s) after invoice date",
"payment_term_name": "_Test 50% on 30 Days",
"description": "_Test 50-50",
"invoice_portion": 50,
"credit_days": 30,
},
],
}
)
template.insert()
self.template = template
def test_payment_terms_status(self):
self.create_payment_terms_template()
item = create_item(item_code="_Test Excavator", is_stock_item=0)
so = make_sales_order(
transaction_date="2021-06-15",
delivery_date=add_days("2021-06-15", -30),
item=item.item_code,
qty=10,
rate=100000,
do_not_save=True,
)
so.po_no = ""
so.taxes_and_charges = ""
so.taxes = ""
so.payment_terms_template = self.template.name
so.save()
so.submit()
# make invoice with 60% of the total sales order value
sinv = make_sales_invoice(so.name)
sinv.taxes_and_charges = ""
sinv.taxes = ""
sinv.items[0].qty = 6
sinv.insert()
sinv.submit()
columns, data, message, chart = execute(
{
"company": "_Test Company",
"period_start_date": "2021-06-01",
"period_end_date": "2021-06-30",
"sales_order": [so.name],
}
)
expected_value = [
{
"name": so.name,
"submitted": datetime.date(2021, 6, 15),
"status": "Completed",
"payment_term": None,
"description": "_Test 50-50",
"due_date": datetime.date(2021, 6, 30),
"invoice_portion": 50.0,
"currency": "INR",
"base_payment_amount": 500000.0,
"paid_amount": 500000.0,
"invoices": ","+sinv.name,
},
{
"name": so.name,
"submitted": datetime.date(2021, 6, 15),
"status": "Partly Paid",
"payment_term": None,
"description": "_Test 50-50",
"due_date": datetime.date(2021, 7, 15),
"invoice_portion": 50.0,
"currency": "INR",
"base_payment_amount": 500000.0,
"paid_amount": 100000.0,
"invoices": ","+sinv.name,
},
]
self.assertEqual(data, expected_value)
def create_exchange_rate(self, date):
# make an entry in Currency Exchange list. serves as a static exchange rate
if frappe.db.exists({'doctype': "Currency Exchange",'date': date,'from_currency': 'USD', 'to_currency':'INR'}):
return
else:
doc = frappe.get_doc({
'doctype': "Currency Exchange",
'date': date,
'from_currency': 'USD',
'to_currency': frappe.get_cached_value("Company", '_Test Company','default_currency'),
'exchange_rate': 70,
'for_buying': True,
'for_selling': True
})
doc.insert()
def test_alternate_currency(self):
transaction_date = "2021-06-15"
self.create_payment_terms_template()
self.create_exchange_rate(transaction_date)
item = create_item(item_code="_Test Excavator", is_stock_item=0)
so = make_sales_order(
transaction_date=transaction_date,
currency="USD",
delivery_date=add_days(transaction_date, -30),
item=item.item_code,
qty=10,
rate=10000,
do_not_save=True,
)
so.po_no = ""
so.taxes_and_charges = ""
so.taxes = ""
so.payment_terms_template = self.template.name
so.save()
so.submit()
# make invoice with 60% of the total sales order value
sinv = make_sales_invoice(so.name)
sinv.currency = "USD"
sinv.taxes_and_charges = ""
sinv.taxes = ""
sinv.items[0].qty = 6
sinv.insert()
sinv.submit()
columns, data, message, chart = execute(
{
"company": "_Test Company",
"period_start_date": "2021-06-01",
"period_end_date": "2021-06-30",
"sales_order": [so.name],
}
)
# report defaults to company currency.
expected_value = [
{
"name": so.name,
"submitted": datetime.date(2021, 6, 15),
"status": "Completed",
"payment_term": None,
"description": "_Test 50-50",
"due_date": datetime.date(2021, 6, 30),
"invoice_portion": 50.0,
"currency": frappe.get_cached_value("Company", '_Test Company','default_currency'),
"base_payment_amount": 3500000.0,
"paid_amount": 3500000.0,
"invoices": ","+sinv.name,
},
{
"name": so.name,
"submitted": datetime.date(2021, 6, 15),
"status": "Partly Paid",
"payment_term": None,
"description": "_Test 50-50",
"due_date": datetime.date(2021, 7, 15),
"invoice_portion": 50.0,
"currency": frappe.get_cached_value("Company", '_Test Company','default_currency'),
"base_payment_amount": 3500000.0,
"paid_amount": 700000.0,
"invoices": ","+sinv.name,
},
]
self.assertEqual(data, expected_value)

View File

@ -227,11 +227,11 @@ erpnext.selling.SellingController = class SellingController extends erpnext.Tran
},
callback:function(r){
if (in_list(['Delivery Note', 'Sales Invoice'], doc.doctype)) {
if (doc.doctype === 'Sales Invoice' && (!doc.update_stock)) return;
me.set_batch_number(cdt, cdn);
me.batch_no(doc, cdt, cdn);
if (has_batch_no) {
me.set_batch_number(cdt, cdn);
me.batch_no(doc, cdt, cdn);
}
}
}
});

View File

@ -20,43 +20,12 @@ class Bin(Document):
+ flt(self.indented_qty) + flt(self.planned_qty) - flt(self.reserved_qty)
- flt(self.reserved_qty_for_production) - flt(self.reserved_qty_for_sub_contract))
def get_first_sle(self):
sle = frappe.qb.DocType("Stock Ledger Entry")
first_sle = (
frappe.qb.from_(sle)
.select("*")
.where((sle.item_code == self.item_code) & (sle.warehouse == self.warehouse))
.orderby(sle.posting_date, sle.posting_time, sle.creation)
.limit(1)
).run(as_dict=True)
return first_sle and first_sle[0] or None
def update_reserved_qty_for_production(self):
'''Update qty reserved for production from Production Item tables
in open work orders'''
from erpnext.manufacturing.doctype.work_order.work_order import get_reserved_qty_for_production
wo = frappe.qb.DocType("Work Order")
wo_item = frappe.qb.DocType("Work Order Item")
self.reserved_qty_for_production = (
frappe.qb
.from_(wo)
.from_(wo_item)
.select(Sum(Case()
.when(wo.skip_transfer == 0, wo_item.required_qty - wo_item.transferred_qty)
.else_(wo_item.required_qty - wo_item.consumed_qty))
)
.where(
(wo_item.item_code == self.item_code)
& (wo_item.parent == wo.name)
& (wo.docstatus == 1)
& (wo_item.source_warehouse == self.warehouse)
& (wo.status.notin(["Stopped", "Completed"]))
& ((wo_item.required_qty > wo_item.transferred_qty)
| (wo_item.required_qty > wo_item.consumed_qty))
)
).run()[0][0] or 0.0
self.reserved_qty_for_production = get_reserved_qty_for_production(self.item_code, self.warehouse)
self.set_projected_qty()
@ -126,13 +95,6 @@ def on_doctype_update():
frappe.db.add_unique("Bin", ["item_code", "warehouse"], constraint_name="unique_item_warehouse")
def update_stock(bin_name, args, allow_negative_stock=False, via_landed_cost_voucher=False):
"""WARNING: This function is deprecated. Inline this function instead of using it."""
from erpnext.stock.stock_ledger import repost_current_voucher
repost_current_voucher(args, allow_negative_stock, via_landed_cost_voucher)
update_qty(bin_name, args)
def get_bin_details(bin_name):
return frappe.db.get_value('Bin', bin_name, ['actual_qty', 'ordered_qty',
'reserved_qty', 'indented_qty', 'planned_qty', 'reserved_qty_for_production',

View File

@ -545,7 +545,7 @@ $.extend(erpnext.item, {
let selected_attributes = {};
me.multiple_variant_dialog.$wrapper.find('.form-column').each((i, col) => {
if(i===0) return;
let attribute_name = $(col).find('label').html();
let attribute_name = $(col).find('label').html().trim();
selected_attributes[attribute_name] = [];
let checked_opts = $(col).find('.checkbox input');
checked_opts.each((i, opt) => {

View File

@ -48,6 +48,7 @@
"warranty_period",
"weight_per_unit",
"weight_uom",
"allow_negative_stock",
"reorder_section",
"reorder_levels",
"unit_of_measure_conversion",
@ -346,7 +347,7 @@
"fieldname": "valuation_method",
"fieldtype": "Select",
"label": "Valuation Method",
"options": "\nFIFO\nMoving Average"
"options": "\nFIFO\nMoving Average\nLIFO"
},
{
"depends_on": "is_stock_item",
@ -907,6 +908,12 @@
"fieldname": "is_grouped_asset",
"fieldtype": "Check",
"label": "Create Grouped Asset"
},
{
"default": "0",
"fieldname": "allow_negative_stock",
"fieldtype": "Check",
"label": "Allow Negative Stock"
}
],
"icon": "fa fa-tag",
@ -914,7 +921,7 @@
"image_field": "image",
"index_web_pages_for_search": 1,
"links": [],
"modified": "2022-01-18 12:57:54.273202",
"modified": "2022-02-11 08:07:46.663220",
"modified_by": "Administrator",
"module": "Stock",
"name": "Item",
@ -987,4 +994,4 @@
"states": [],
"title_field": "item_name",
"track_changes": 1
}
}

View File

@ -6,6 +6,7 @@ import json
import frappe
from frappe.test_runner import make_test_objects
from frappe.utils import add_days, today
from erpnext.controllers.item_variant import (
InvalidItemAttributeValueError,
@ -608,6 +609,45 @@ class TestItem(ERPNextTestCase):
item.item_group = "All Item Groups"
item.save() # if item code saved without item_code then series worked
@change_settings("Stock Settings", {"allow_negative_stock": 0})
def test_item_wise_negative_stock(self):
""" When global settings are disabled check that item that allows
negative stock can still consume material in all known stock
transactions that consume inventory."""
from erpnext.stock.stock_ledger import is_negative_stock_allowed
item = make_item("_TestNegativeItemSetting", {"allow_negative_stock": 1, "valuation_rate": 100})
self.assertTrue(is_negative_stock_allowed(item_code=item.name))
self.consume_item_code_with_differet_stock_transactions(item_code=item.name)
@change_settings("Stock Settings", {"allow_negative_stock": 0})
def test_backdated_negative_stock(self):
""" same as test above but backdated entries """
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
item = make_item("_TestNegativeItemSetting", {"allow_negative_stock": 1, "valuation_rate": 100})
# create a future entry so all new entries are backdated
make_stock_entry(qty=1, item_code=item.name, target="_Test Warehouse - _TC", posting_date = add_days(today(), 5))
self.consume_item_code_with_differet_stock_transactions(item_code=item.name)
def consume_item_code_with_differet_stock_transactions(self, item_code, warehouse="_Test Warehouse - _TC"):
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.stock.doctype.delivery_note.test_delivery_note import create_delivery_note
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import make_purchase_receipt
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
typical_args = {"item_code": item_code, "warehouse": warehouse}
create_delivery_note(**typical_args)
create_sales_invoice(update_stock=1, **typical_args)
make_stock_entry(item_code=item_code, source=warehouse, qty=1, purpose="Material Issue")
make_stock_entry(item_code=item_code, source=warehouse, target="Stores - _TC", qty=1)
# standalone return
make_purchase_receipt(is_return=True, qty=-1, **typical_args)
def set_item_variant_settings(fields):
doc = frappe.get_doc('Item Variant Settings')

View File

@ -533,6 +533,7 @@ def raise_work_orders(material_request):
"stock_uom": d.stock_uom,
"expected_delivery_date": d.schedule_date,
"sales_order": d.sales_order,
"sales_order_item": d.get("sales_order_item"),
"bom_no": get_item_details(d.item_code).bom_no,
"material_request": mr.name,
"material_request_item": d.name,

View File

@ -9,7 +9,7 @@ from collections import defaultdict
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import cint, floor, flt, nowdate
from frappe.utils import cint, cstr, floor, flt, nowdate
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
from erpnext.stock.utils import get_stock_balance
@ -142,11 +142,44 @@ def apply_putaway_rule(doctype, items, company, sync=None, purpose=None):
if items_not_accomodated:
show_unassigned_items_message(items_not_accomodated)
items[:] = updated_table if updated_table else items # modify items table
if updated_table and _items_changed(items, updated_table, doctype):
items[:] = updated_table
frappe.msgprint(_("Applied putaway rules."), alert=True)
if sync and json.loads(sync): # sync with client side
return items
def _items_changed(old, new, doctype: str) -> bool:
""" Check if any items changed by application of putaway rules.
If not, changing item table can have side effects since `name` items also changes.
"""
if len(old) != len(new):
return True
old = [frappe._dict(item) if isinstance(item, dict) else item for item in old]
if doctype == "Stock Entry":
compare_keys = ("item_code", "t_warehouse", "transfer_qty", "serial_no")
sort_key = lambda item: (item.item_code, cstr(item.t_warehouse), # noqa
flt(item.transfer_qty), cstr(item.serial_no))
else:
# purchase receipt / invoice
compare_keys = ("item_code", "warehouse", "stock_qty", "received_qty", "serial_no")
sort_key = lambda item: (item.item_code, cstr(item.warehouse), # noqa
flt(item.stock_qty), flt(item.received_qty), cstr(item.serial_no))
old_sorted = sorted(old, key=sort_key)
new_sorted = sorted(new, key=sort_key)
# Once sorted by all relevant keys both tables should align if they are same.
for old_item, new_item in zip(old_sorted, new_sorted):
for key in compare_keys:
if old_item.get(key) != new_item.get(key):
return True
return False
def get_ordered_putaway_rules(item_code, company, source_warehouse=None):
"""Returns an ordered list of putaway rules to apply on an item."""
filters = {

View File

@ -35,6 +35,18 @@ class TestPutawayRule(ERPNextTestCase):
new_uom.uom_name = "Bag"
new_uom.save()
def assertUnchangedItemsOnResave(self, doc):
""" Check if same items remain even after reapplication of rules.
This is required since some business logic like subcontracting
depends on `name` of items to be same if item isn't changed.
"""
doc.reload()
old_items = {d.name for d in doc.items}
doc.save()
new_items = {d.name for d in doc.items}
self.assertSetEqual(old_items, new_items)
def test_putaway_rules_priority(self):
"""Test if rule is applied by priority, irrespective of free space."""
rule_1 = create_putaway_rule(item_code="_Rice", warehouse=self.warehouse_1, capacity=200,
@ -50,6 +62,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(pr.items[1].qty, 100)
self.assertEqual(pr.items[1].warehouse, self.warehouse_2)
self.assertUnchangedItemsOnResave(pr)
pr.delete()
rule_1.delete()
rule_2.delete()
@ -162,6 +176,8 @@ class TestPutawayRule(ERPNextTestCase):
# leftover space was for 500 kg (0.5 Bag)
# Since Bag is a whole UOM, 1(out of 2) Bag will be unassigned
self.assertUnchangedItemsOnResave(pr)
pr.delete()
rule_1.delete()
rule_2.delete()
@ -196,6 +212,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(pr.items[1].warehouse, self.warehouse_1)
self.assertEqual(pr.items[1].putaway_rule, rule_1.name)
self.assertUnchangedItemsOnResave(pr)
pr.delete()
rule_1.delete()
@ -239,6 +257,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(stock_entry_item.qty, 100) # unassigned 100 out of 200 Kg
self.assertEqual(stock_entry_item.putaway_rule, rule_2.name)
self.assertUnchangedItemsOnResave(stock_entry)
stock_entry.delete()
rule_1.delete()
rule_2.delete()
@ -294,6 +314,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(stock_entry.items[2].qty, 200)
self.assertEqual(stock_entry.items[2].putaway_rule, rule_2.name)
self.assertUnchangedItemsOnResave(stock_entry)
stock_entry.delete()
rule_1.delete()
rule_2.delete()
@ -344,6 +366,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(stock_entry.items[1].serial_no, "\n".join(serial_nos[3:]))
self.assertEqual(stock_entry.items[1].batch_no, "BOTTL-BATCH-1")
self.assertUnchangedItemsOnResave(stock_entry)
stock_entry.delete()
pr.cancel()
rule_1.delete()
@ -366,6 +390,8 @@ class TestPutawayRule(ERPNextTestCase):
self.assertEqual(stock_entry_item.qty, 100)
self.assertEqual(stock_entry_item.putaway_rule, rule_1.name)
self.assertUnchangedItemsOnResave(stock_entry)
stock_entry.delete()
rule_1.delete()
rule_2.delete()

View File

@ -433,9 +433,10 @@ class StockEntry(StockController):
)
def set_actual_qty(self):
allow_negative_stock = cint(frappe.db.get_value("Stock Settings", None, "allow_negative_stock"))
from erpnext.stock.stock_ledger import is_negative_stock_allowed
for d in self.get('items'):
allow_negative_stock = is_negative_stock_allowed(item_code=d.item_code)
previous_sle = get_previous_sle({
"item_code": d.item_code,
"warehouse": d.s_warehouse or d.t_warehouse,

View File

@ -99,7 +99,7 @@
"fieldname": "valuation_method",
"fieldtype": "Select",
"label": "Default Valuation Method",
"options": "FIFO\nMoving Average"
"options": "FIFO\nMoving Average\nLIFO"
},
{
"description": "The percentage you are allowed to receive or deliver more against the quantity ordered. For example, if you have ordered 100 units, and your Allowance is 10%, then you are allowed to receive 110 units.",
@ -346,7 +346,7 @@
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2022-02-04 15:33:43.692736",
"modified": "2022-02-05 15:33:43.692736",
"modified_by": "Administrator",
"module": "Stock",
"name": "Stock Settings",

View File

@ -6,6 +6,7 @@ import json
import frappe
from frappe import _, throw
from frappe.model import child_table_fields, default_fields
from frappe.model.meta import get_field_precision
from frappe.utils import add_days, add_months, cint, cstr, flt, getdate
@ -119,8 +120,15 @@ def get_item_details(args, doc=None, for_validate=False, overwrite_warehouse=Tru
out.rate = args.rate or out.price_list_rate
out.amount = flt(args.qty) * flt(out.rate)
out = remove_standard_fields(out)
return out
def remove_standard_fields(details):
for key in child_table_fields + default_fields:
details.pop(key, None)
return details
def update_stock(args, out):
if (args.get("doctype") == "Delivery Note" or
(args.get("doctype") == "Sales Invoice" and args.get('update_stock'))) \
@ -343,6 +351,7 @@ def get_basic_details(args, item, overwrite_warehouse=True):
args.conversion_factor = out.conversion_factor
out.stock_qty = out.qty * out.conversion_factor
args.stock_qty = out.stock_qty
# calculate last purchase rate
if args.get('doctype') in purchase_doctypes:

View File

@ -12,6 +12,7 @@ from frappe.utils import cint, date_diff, flt
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
Filters = frappe._dict
precision = cint(frappe.db.get_single_value("System Settings", "float_precision"))
def execute(filters: Filters = None) -> Tuple:
to_date = filters["to_date"]
@ -48,10 +49,13 @@ def format_report_data(filters: Filters, item_details: Dict, to_date: str) -> Li
if filters.get("show_warehouse_wise_stock"):
row.append(details.warehouse)
row.extend([item_dict.get("total_qty"), average_age,
row.extend([
flt(item_dict.get("total_qty"), precision),
average_age,
range1, range2, range3, above_range3,
earliest_age, latest_age,
details.stock_uom])
details.stock_uom
])
data.append(row)
@ -79,13 +83,13 @@ def get_range_age(filters: Filters, fifo_queue: List, to_date: str, item_dict: D
qty = flt(item[0]) if not item_dict["has_serial_no"] else 1.0
if age <= filters.range1:
range1 += qty
range1 = flt(range1 + qty, precision)
elif age <= filters.range2:
range2 += qty
range2 = flt(range2 + qty, precision)
elif age <= filters.range3:
range3 += qty
range3 = flt(range3 + qty, precision)
else:
above_range3 += qty
above_range3 = flt(above_range3 + qty, precision)
return range1, range2, range3, above_range3
@ -252,6 +256,7 @@ class FIFOSlots:
key, fifo_queue, transferred_item_key = self.__init_key_stores(d)
if d.voucher_type == "Stock Reconciliation":
# get difference in qty shift as actual qty
prev_balance_qty = self.item_details[key].get("qty_after_transaction", 0)
d.actual_qty = flt(d.qty_after_transaction) - flt(prev_balance_qty)
@ -264,12 +269,16 @@ class FIFOSlots:
self.__update_balances(d, key)
if not self.filters.get("show_warehouse_wise_stock"):
# (Item 1, WH 1), (Item 1, WH 2) => (Item 1)
self.item_details = self.__aggregate_details_by_item(self.item_details)
return self.item_details
def __init_key_stores(self, row: Dict) -> Tuple:
"Initialise keys and FIFO Queue."
key = (row.name, row.warehouse) if self.filters.get('show_warehouse_wise_stock') else row.name
key = (row.name, row.warehouse)
self.item_details.setdefault(key, {"details": row, "fifo_queue": []})
fifo_queue = self.item_details[key]["fifo_queue"]
@ -281,14 +290,16 @@ class FIFOSlots:
def __compute_incoming_stock(self, row: Dict, fifo_queue: List, transfer_key: Tuple, serial_nos: List):
"Update FIFO Queue on inward stock."
if self.transferred_item_details.get(transfer_key):
transfer_data = self.transferred_item_details.get(transfer_key)
if transfer_data:
# inward/outward from same voucher, item & warehouse
slot = self.transferred_item_details[transfer_key].pop(0)
fifo_queue.append(slot)
# eg: Repack with same item, Stock reco for batch item
# consume transfer data and add stock to fifo queue
self.__adjust_incoming_transfer_qty(transfer_data, fifo_queue, row)
else:
if not serial_nos:
if fifo_queue and flt(fifo_queue[0][0]) < 0:
# neutralize negative stock by adding positive stock
if fifo_queue and flt(fifo_queue[0][0]) <= 0:
# neutralize 0/negative stock by adding positive stock
fifo_queue[0][0] += flt(row.actual_qty)
fifo_queue[0][1] = row.posting_date
else:
@ -319,7 +330,7 @@ class FIFOSlots:
elif not fifo_queue:
# negative stock, no balance but qty yet to consume
fifo_queue.append([-(qty_to_pop), row.posting_date])
self.transferred_item_details[transfer_key].append([row.actual_qty, row.posting_date])
self.transferred_item_details[transfer_key].append([qty_to_pop, row.posting_date])
qty_to_pop = 0
else:
# qty to pop < slot qty, ample balance
@ -328,6 +339,33 @@ class FIFOSlots:
self.transferred_item_details[transfer_key].append([qty_to_pop, slot[1]])
qty_to_pop = 0
def __adjust_incoming_transfer_qty(self, transfer_data: Dict, fifo_queue: List, row: Dict):
"Add previously removed stock back to FIFO Queue."
transfer_qty_to_pop = flt(row.actual_qty)
def add_to_fifo_queue(slot):
if fifo_queue and flt(fifo_queue[0][0]) <= 0:
# neutralize 0/negative stock by adding positive stock
fifo_queue[0][0] += flt(slot[0])
fifo_queue[0][1] = slot[1]
else:
fifo_queue.append(slot)
while transfer_qty_to_pop:
if transfer_data and 0 < transfer_data[0][0] <= transfer_qty_to_pop:
# bucket qty is not enough, consume whole
transfer_qty_to_pop -= transfer_data[0][0]
add_to_fifo_queue(transfer_data.pop(0))
elif not transfer_data:
# transfer bucket is empty, extra incoming qty
add_to_fifo_queue([transfer_qty_to_pop, row.posting_date])
transfer_qty_to_pop = 0
else:
# ample bucket qty to consume
transfer_data[0][0] -= transfer_qty_to_pop
add_to_fifo_queue([transfer_qty_to_pop, transfer_data[0][1]])
transfer_qty_to_pop = 0
def __update_balances(self, row: Dict, key: Union[Tuple, str]):
self.item_details[key]["qty_after_transaction"] = row.qty_after_transaction
@ -338,6 +376,27 @@ class FIFOSlots:
self.item_details[key]["has_serial_no"] = row.has_serial_no
def __aggregate_details_by_item(self, wh_wise_data: Dict) -> Dict:
"Aggregate Item-Wh wise data into single Item entry."
item_aggregated_data = {}
for key,row in wh_wise_data.items():
item = key[0]
if not item_aggregated_data.get(item):
item_aggregated_data.setdefault(item, {
"details": frappe._dict(),
"fifo_queue": [],
"qty_after_transaction": 0.0,
"total_qty": 0.0
})
item_row = item_aggregated_data.get(item)
item_row["details"].update(row["details"])
item_row["fifo_queue"].extend(row["fifo_queue"])
item_row["qty_after_transaction"] += flt(row["qty_after_transaction"])
item_row["total_qty"] += flt(row["total_qty"])
item_row["has_serial_no"] = row["has_serial_no"]
return item_aggregated_data
def __get_stock_ledger_entries(self) -> List[Dict]:
sle = frappe.qb.DocType("Stock Ledger Entry")
item = self.__get_item_query() # used as derived table in sle query

View File

@ -15,6 +15,7 @@ Here, the balance qty is 70.
50 qty is (today-the 1st) days old
20 qty is (today-the 2nd) days old
> Note: We generate FIFO slots warehouse wise as stock reconciliations from different warehouses can cause incorrect values.
### Calculation of FIFO Slots
#### Case 1: Outward from sufficient balance qty
@ -70,4 +71,39 @@ Date | Qty | Queue
2nd | -60 | [[-10, 1-12-2021]]
3rd | +5 | [[-5, 3-12-2021]]
4th | +10 | [[5, 4-12-2021]]
4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
### Concept of Transfer Qty Bucket
In the case of **Repack**, Quantity that comes in, isn't really incoming. It is just new stock repurposed from old stock, due to incoming-outgoing of the same warehouse.
Here, stock is consumed from the FIFO Queue. It is then re-added back to the queue.
While adding stock back to the queue we need to know how much to add.
For this we need to keep track of how much was previously consumed.
Hence we use **Transfer Qty Bucket**.
While re-adding stock, we try to add buckets that were consumed earlier (date intact), to maintain correctness.
#### Case 1: Same Item-Warehouse in Repack
Eg:
-------------------------------------------------------------------------------------
Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
-------------------------------------------------------------------------------------
1st | +500 | PR | [[500, 1-12-2021]] |
2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
2nd | +50 | Repack | [[450, 1-12-2021], [50, 1-12-2021]] | []
- The balance at the end is restored back to 500
- However, the initial 500 qty bucket is now split into 450 and 50, with the same date
- The net effect is the same as that before the Repack
#### Case 2: Same Item-Warehouse in Repack with Split Consumption rows
Eg:
-------------------------------------------------------------------------------------
Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
-------------------------------------------------------------------------------------
1st | +500 | PR | [[500, 1-12-2021]] |
2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
2nd | -50 | Repack | [[400, 1-12-2021]] | [[50, 1-12-2021],
- | | | |[50, 1-12-2021]]
2nd | +100 | Repack | [[400, 1-12-2021], [50, 1-12-2021], | []
- | | | [50, 1-12-2021]] |

View File

@ -3,7 +3,7 @@
import frappe
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots, format_report_data
from erpnext.tests.utils import ERPNextTestCase
@ -11,15 +11,17 @@ class TestStockAgeing(ERPNextTestCase):
def setUp(self) -> None:
self.filters = frappe._dict(
company="_Test Company",
to_date="2021-12-10"
to_date="2021-12-10",
range1=30, range2=60, range3=90
)
def test_normal_inward_outward_queue(self):
"Reference: Case 1 in stock_ageing_fifo_logic.md"
"Reference: Case 1 in stock_ageing_fifo_logic.md (same wh)"
sle = [
frappe._dict(
name="Flask Item",
actual_qty=30, qty_after_transaction=30,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
@ -27,6 +29,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=20, qty_after_transaction=50,
warehouse="WH 1",
posting_date="2021-12-02", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
@ -34,6 +37,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=(-10), qty_after_transaction=40,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="003",
has_serial_no=False, serial_no=None
@ -50,11 +54,12 @@ class TestStockAgeing(ERPNextTestCase):
self.assertEqual(queue[0][0], 20.0)
def test_insufficient_balance(self):
"Reference: Case 3 in stock_ageing_fifo_logic.md"
"Reference: Case 3 in stock_ageing_fifo_logic.md (same wh)"
sle = [
frappe._dict(
name="Flask Item",
actual_qty=(-30), qty_after_transaction=(-30),
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
@ -62,6 +67,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=20, qty_after_transaction=(-10),
warehouse="WH 1",
posting_date="2021-12-02", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
@ -69,6 +75,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=20, qty_after_transaction=10,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="003",
has_serial_no=False, serial_no=None
@ -76,6 +83,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=10, qty_after_transaction=20,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="004",
has_serial_no=False, serial_no=None
@ -91,11 +99,16 @@ class TestStockAgeing(ERPNextTestCase):
self.assertEqual(queue[0][0], 10.0)
self.assertEqual(queue[1][0], 10.0)
def test_stock_reconciliation(self):
def test_basic_stock_reconciliation(self):
"""
Ledger (same wh): [+30, reco reset >> 50, -10]
Bal: 40
"""
sle = [
frappe._dict(
name="Flask Item",
actual_qty=30, qty_after_transaction=30,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
@ -103,6 +116,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=0, qty_after_transaction=50,
warehouse="WH 1",
posting_date="2021-12-02", voucher_type="Stock Reconciliation",
voucher_no="002",
has_serial_no=False, serial_no=None
@ -110,6 +124,7 @@ class TestStockAgeing(ERPNextTestCase):
frappe._dict(
name="Flask Item",
actual_qty=(-10), qty_after_transaction=40,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="003",
has_serial_no=False, serial_no=None
@ -122,5 +137,477 @@ class TestStockAgeing(ERPNextTestCase):
queue = result["fifo_queue"]
self.assertEqual(result["qty_after_transaction"], result["total_qty"])
self.assertEqual(result["total_qty"], 40.0)
self.assertEqual(queue[0][0], 20.0)
self.assertEqual(queue[1][0], 20.0)
def test_sequential_stock_reco_same_warehouse(self):
"""
Test back to back stock recos (same warehouse).
Ledger: [reco opening >> +1000, reco reset >> 400, -10]
Bal: 390
"""
sle = [
frappe._dict(
name="Flask Item",
actual_qty=0, qty_after_transaction=1000,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Reconciliation",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=0, qty_after_transaction=400,
warehouse="WH 1",
posting_date="2021-12-02", voucher_type="Stock Reconciliation",
voucher_no="003",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-10), qty_after_transaction=390,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="003",
has_serial_no=False, serial_no=None
)
]
slots = FIFOSlots(self.filters, sle).generate()
result = slots["Flask Item"]
queue = result["fifo_queue"]
self.assertEqual(result["qty_after_transaction"], result["total_qty"])
self.assertEqual(result["total_qty"], 390.0)
self.assertEqual(queue[0][0], 390.0)
def test_sequential_stock_reco_different_warehouse(self):
"""
Ledger:
WH | Voucher | Qty
-------------------
WH1 | Reco | 1000
WH2 | Reco | 400
WH1 | SE | -10
Bal: WH1 bal + WH2 bal = 990 + 400 = 1390
"""
sle = [
frappe._dict(
name="Flask Item",
actual_qty=0, qty_after_transaction=1000,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Reconciliation",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=0, qty_after_transaction=400,
warehouse="WH 2",
posting_date="2021-12-02", voucher_type="Stock Reconciliation",
voucher_no="003",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-10), qty_after_transaction=990,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="004",
has_serial_no=False, serial_no=None
)
]
item_wise_slots, item_wh_wise_slots = generate_item_and_item_wh_wise_slots(
filters=self.filters,sle=sle
)
# test without 'show_warehouse_wise_stock'
item_result = item_wise_slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["qty_after_transaction"], item_result["total_qty"])
self.assertEqual(item_result["total_qty"], 1390.0)
self.assertEqual(queue[0][0], 990.0)
self.assertEqual(queue[1][0], 400.0)
# test with 'show_warehouse_wise_stock' checked
item_wh_balances = [item_wh_wise_slots.get(i).get("qty_after_transaction") for i in item_wh_wise_slots]
self.assertEqual(sum(item_wh_balances), item_result["qty_after_transaction"])
def test_repack_entry_same_item_split_rows(self):
"""
Split consumption rows and have single repacked item row (same warehouse).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | 500 | 001
Item 1 | -50 | 002 (repack)
Item 1 | -50 | 002 (repack)
Item 1 | 100 | 002 (repack)
Case most likely for batch items. Test time bucket computation.
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=500, qty_after_transaction=500,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=450,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=400,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=100, qty_after_transaction=500,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
]
slots = FIFOSlots(self.filters, sle).generate()
item_result = slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["total_qty"], 500.0)
self.assertEqual(queue[0][0], 400.0)
self.assertEqual(queue[1][0], 50.0)
self.assertEqual(queue[2][0], 50.0)
# check if time buckets add up to balance qty
self.assertEqual(sum([i[0] for i in queue]), 500.0)
def test_repack_entry_same_item_overconsume(self):
"""
Over consume item and have less repacked item qty (same warehouse).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | 500 | 001
Item 1 | -100 | 002 (repack)
Item 1 | 50 | 002 (repack)
Case most likely for batch items. Test time bucket computation.
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=500, qty_after_transaction=500,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-100), qty_after_transaction=400,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=50, qty_after_transaction=450,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
]
slots = FIFOSlots(self.filters, sle).generate()
item_result = slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["total_qty"], 450.0)
self.assertEqual(queue[0][0], 400.0)
self.assertEqual(queue[1][0], 50.0)
# check if time buckets add up to balance qty
self.assertEqual(sum([i[0] for i in queue]), 450.0)
def test_repack_entry_same_item_overconsume_with_split_rows(self):
"""
Over consume item and have less repacked item qty (same warehouse).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | 20 | 001
Item 1 | -50 | 002 (repack)
Item 1 | -50 | 002 (repack)
Item 1 | 50 | 002 (repack)
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=20, qty_after_transaction=20,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=(-30),
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=(-80),
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=50, qty_after_transaction=(-30),
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
]
fifo_slots = FIFOSlots(self.filters, sle)
slots = fifo_slots.generate()
item_result = slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["total_qty"], -30.0)
self.assertEqual(queue[0][0], -30.0)
# check transfer bucket
transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
self.assertEqual(transfer_bucket[0][0], 50)
def test_repack_entry_same_item_overproduce(self):
"""
Under consume item and have more repacked item qty (same warehouse).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | 500 | 001
Item 1 | -50 | 002 (repack)
Item 1 | 100 | 002 (repack)
Case most likely for batch items. Test time bucket computation.
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=500, qty_after_transaction=500,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=450,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=100, qty_after_transaction=550,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
]
slots = FIFOSlots(self.filters, sle).generate()
item_result = slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["total_qty"], 550.0)
self.assertEqual(queue[0][0], 450.0)
self.assertEqual(queue[1][0], 50.0)
self.assertEqual(queue[2][0], 50.0)
# check if time buckets add up to balance qty
self.assertEqual(sum([i[0] for i in queue]), 550.0)
def test_repack_entry_same_item_overproduce_with_split_rows(self):
"""
Over consume item and have less repacked item qty (same warehouse).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | 20 | 001
Item 1 | -50 | 002 (repack)
Item 1 | 50 | 002 (repack)
Item 1 | 50 | 002 (repack)
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=20, qty_after_transaction=20,
warehouse="WH 1",
posting_date="2021-12-03", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=(-50), qty_after_transaction=(-30),
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=50, qty_after_transaction=20,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
frappe._dict(
name="Flask Item",
actual_qty=50, qty_after_transaction=70,
warehouse="WH 1",
posting_date="2021-12-04", voucher_type="Stock Entry",
voucher_no="002",
has_serial_no=False, serial_no=None
),
]
fifo_slots = FIFOSlots(self.filters, sle)
slots = fifo_slots.generate()
item_result = slots["Flask Item"]
queue = item_result["fifo_queue"]
self.assertEqual(item_result["total_qty"], 70.0)
self.assertEqual(queue[0][0], 20.0)
self.assertEqual(queue[1][0], 50.0)
# check transfer bucket
transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
self.assertFalse(transfer_bucket)
def test_negative_stock_same_voucher(self):
"""
Test negative stock scenario in transfer bucket via repack entry (same wh).
Ledger:
Item | Qty | Voucher
------------------------
Item 1 | -50 | 001
Item 1 | -50 | 001
Item 1 | 30 | 001
Item 1 | 80 | 001
"""
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=(-50), qty_after_transaction=(-50),
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict( # stock up item
name="Flask Item",
actual_qty=(-50), qty_after_transaction=(-100),
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict( # stock up item
name="Flask Item",
actual_qty=30, qty_after_transaction=(-70),
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
]
fifo_slots = FIFOSlots(self.filters, sle)
slots = fifo_slots.generate()
item_result = slots["Flask Item"]
# check transfer bucket
transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
self.assertEqual(transfer_bucket[0][0], 20)
self.assertEqual(transfer_bucket[1][0], 50)
self.assertEqual(item_result["fifo_queue"][0][0], -70.0)
sle.append(frappe._dict(
name="Flask Item",
actual_qty=80, qty_after_transaction=10,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
))
fifo_slots = FIFOSlots(self.filters, sle)
slots = fifo_slots.generate()
item_result = slots["Flask Item"]
transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
self.assertFalse(transfer_bucket)
self.assertEqual(item_result["fifo_queue"][0][0], 10.0)
def test_precision(self):
"Test if final balance qty is rounded off correctly."
sle = [
frappe._dict( # stock up item
name="Flask Item",
actual_qty=0.3, qty_after_transaction=0.3,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
frappe._dict( # stock up item
name="Flask Item",
actual_qty=0.6, qty_after_transaction=0.9,
warehouse="WH 1",
posting_date="2021-12-01", voucher_type="Stock Entry",
voucher_no="001",
has_serial_no=False, serial_no=None
),
]
slots = FIFOSlots(self.filters, sle).generate()
report_data = format_report_data(self.filters, slots, self.filters["to_date"])
row = report_data[0] # first row in report
bal_qty = row[5]
range_qty_sum = sum([i for i in row[7:11]]) # get sum of range balance
# check if value of Available Qty column matches with range bucket post format
self.assertEqual(bal_qty, 0.9)
self.assertEqual(bal_qty, range_qty_sum)
def generate_item_and_item_wh_wise_slots(filters, sle):
"Return results with and without 'show_warehouse_wise_stock'"
item_wise_slots = FIFOSlots(filters, sle).generate()
filters.show_warehouse_wise_stock = True
item_wh_wise_slots = FIFOSlots(filters, sle).generate()
filters.show_warehouse_wise_stock = False
return item_wise_slots, item_wh_wise_slots

View File

@ -167,7 +167,7 @@ def get_columns():
{
"fieldname": "stock_queue",
"fieldtype": "Data",
"label": "FIFO Queue",
"label": "FIFO/LIFO Queue",
},
{

View File

@ -3,10 +3,9 @@
import frappe
from frappe.utils import cstr, flt, nowdate, nowtime
from frappe.utils import cstr, flt, now, nowdate, nowtime
from erpnext.controllers.stock_controller import create_repost_item_valuation_entry
from erpnext.stock.utils import update_bin
def repost(only_actual=False, allow_negative_stock=False, allow_zero_rate=False, only_bin=False):
@ -175,6 +174,7 @@ def update_bin_qty(item_code, warehouse, qty_dict=None):
bin.set(field, flt(value))
mismatch = True
bin.modified = now()
if mismatch:
bin.set_projected_qty()
bin.db_update()
@ -227,8 +227,6 @@ def set_stock_balance_as_per_serial_no(item_code=None, posting_date=None, postin
"sle_id": sle_doc.name
})
update_bin(args)
create_repost_item_valuation_entry({
"item_code": d[0],
"warehouse": d[1],

View File

@ -3,6 +3,7 @@
import copy
import json
from typing import Optional
import frappe
from frappe import _
@ -16,7 +17,7 @@ from erpnext.stock.utils import (
get_or_make_bin,
get_valuation_method,
)
from erpnext.stock.valuation import FIFOValuation
from erpnext.stock.valuation import FIFOValuation, LIFOValuation
class NegativeStockError(frappe.ValidationError): pass
@ -268,11 +269,10 @@ class update_entries_after(object):
self.verbose = verbose
self.allow_zero_rate = allow_zero_rate
self.via_landed_cost_voucher = via_landed_cost_voucher
self.allow_negative_stock = allow_negative_stock \
or cint(frappe.db.get_single_value("Stock Settings", "allow_negative_stock"))
self.item_code = args.get("item_code")
self.allow_negative_stock = allow_negative_stock or is_negative_stock_allowed(item_code=self.item_code)
self.args = frappe._dict(args)
self.item_code = args.get("item_code")
if self.args.sle_id:
self.args['name'] = self.args.sle_id
@ -461,7 +461,7 @@ class update_entries_after(object):
self.wh_data.qty_after_transaction += flt(sle.actual_qty)
self.wh_data.stock_value = flt(self.wh_data.qty_after_transaction) * flt(self.wh_data.valuation_rate)
else:
self.update_fifo_values(sle)
self.update_queue_values(sle)
self.wh_data.qty_after_transaction += flt(sle.actual_qty)
# rounding as per precision
@ -701,14 +701,18 @@ class update_entries_after(object):
sle.voucher_type, sle.voucher_no, self.allow_zero_rate,
currency=erpnext.get_company_currency(sle.company), company=sle.company)
def update_fifo_values(self, sle):
def update_queue_values(self, sle):
incoming_rate = flt(sle.incoming_rate)
actual_qty = flt(sle.actual_qty)
outgoing_rate = flt(sle.outgoing_rate)
fifo_queue = FIFOValuation(self.wh_data.stock_queue)
if self.valuation_method == "LIFO":
stock_queue = LIFOValuation(self.wh_data.stock_queue)
else:
stock_queue = FIFOValuation(self.wh_data.stock_queue)
if actual_qty > 0:
fifo_queue.add_stock(qty=actual_qty, rate=incoming_rate)
stock_queue.add_stock(qty=actual_qty, rate=incoming_rate)
else:
def rate_generator() -> float:
allow_zero_valuation_rate = self.check_if_allow_zero_valuation_rate(sle.voucher_type, sle.voucher_detail_no)
@ -719,11 +723,11 @@ class update_entries_after(object):
else:
return 0.0
fifo_queue.remove_stock(qty=abs(actual_qty), outgoing_rate=outgoing_rate, rate_generator=rate_generator)
stock_queue.remove_stock(qty=abs(actual_qty), outgoing_rate=outgoing_rate, rate_generator=rate_generator)
stock_qty, stock_value = fifo_queue.get_total_stock_and_value()
stock_qty, stock_value = stock_queue.get_total_stock_and_value()
self.wh_data.stock_queue = fifo_queue.get_state()
self.wh_data.stock_queue = stock_queue.state
self.wh_data.stock_value = stock_value
if stock_qty:
self.wh_data.valuation_rate = stock_value / stock_qty
@ -1045,10 +1049,7 @@ def get_datetime_limit_condition(detail):
)"""
def validate_negative_qty_in_future_sle(args, allow_negative_stock=False):
allow_negative_stock = cint(allow_negative_stock) \
or cint(frappe.db.get_single_value("Stock Settings", "allow_negative_stock"))
if allow_negative_stock:
if allow_negative_stock or is_negative_stock_allowed(item_code=args.item_code):
return
if not (args.actual_qty < 0 or args.voucher_type == "Stock Reconciliation"):
return
@ -1117,3 +1118,11 @@ def get_future_sle_with_negative_batch_qty(args):
and timestamp(posting_date, posting_time) >= timestamp(%(posting_date)s, %(posting_time)s)
limit 1
""", args, as_dict=1)
def is_negative_stock_allowed(*, item_code: Optional[str] = None) -> bool:
if cint(frappe.db.get_single_value("Stock Settings", "allow_negative_stock", cache=True)):
return True
if item_code and cint(frappe.db.get_value("Item", item_code, "allow_negative_stock", cache=True)):
return True
return False

View File

@ -1,16 +1,21 @@
import json
import unittest
import frappe
from hypothesis import given
from hypothesis import strategies as st
from erpnext.stock.valuation import FIFOValuation, _round_off_if_near_zero
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
from erpnext.stock.valuation import FIFOValuation, LIFOValuation, _round_off_if_near_zero
from erpnext.tests.utils import ERPNextTestCase
qty_gen = st.floats(min_value=-1e6, max_value=1e6)
value_gen = st.floats(min_value=1, max_value=1e6)
stock_queue_generator = st.lists(st.tuples(qty_gen, value_gen), min_size=10)
class TestFifoValuation(unittest.TestCase):
class TestFIFOValuation(unittest.TestCase):
def setUp(self):
self.queue = FIFOValuation([])
@ -164,3 +169,184 @@ class TestFifoValuation(unittest.TestCase):
total_value -= sum(q * r for q, r in consumed)
self.assertTotalQty(total_qty)
self.assertTotalValue(total_value)
class TestLIFOValuation(unittest.TestCase):
def setUp(self):
self.stack = LIFOValuation([])
def tearDown(self):
qty, value = self.stack.get_total_stock_and_value()
self.assertTotalQty(qty)
self.assertTotalValue(value)
def assertTotalQty(self, qty):
self.assertAlmostEqual(sum(q for q, _ in self.stack), qty, msg=f"stack: {self.stack}", places=4)
def assertTotalValue(self, value):
self.assertAlmostEqual(sum(q * r for q, r in self.stack), value, msg=f"stack: {self.stack}", places=2)
def test_simple_addition(self):
self.stack.add_stock(1, 10)
self.assertTotalQty(1)
def test_merge_new_stock(self):
self.stack.add_stock(1, 10)
self.stack.add_stock(1, 10)
self.assertEqual(self.stack, [[2, 10]])
def test_simple_removal(self):
self.stack.add_stock(1, 10)
self.stack.remove_stock(1)
self.assertTotalQty(0)
def test_adding_negative_stock_keeps_rate(self):
self.stack = LIFOValuation([[-5.0, 100]])
self.stack.add_stock(1, 10)
self.assertEqual(self.stack, [[-4, 100]])
def test_adding_negative_stock_updates_rate(self):
self.stack = LIFOValuation([[-5.0, 100]])
self.stack.add_stock(6, 10)
self.assertEqual(self.stack, [[1, 10]])
def test_rounding_off(self):
self.stack.add_stock(1.0, 1.0)
self.stack.remove_stock(1.0 - 1e-9)
self.assertTotalQty(0)
def test_lifo_consumption(self):
self.stack.add_stock(10, 10)
self.stack.add_stock(10, 20)
consumed = self.stack.remove_stock(15)
self.assertEqual(consumed, [[10, 20], [5, 10]])
self.assertTotalQty(5)
def test_lifo_consumption_going_negative(self):
self.stack.add_stock(10, 10)
self.stack.add_stock(10, 20)
consumed = self.stack.remove_stock(25)
self.assertEqual(consumed, [[10, 20], [10, 10], [5, 10]])
self.assertTotalQty(-5)
def test_lifo_consumption_multiple(self):
self.stack.add_stock(1, 1)
self.stack.add_stock(2, 2)
consumed = self.stack.remove_stock(1)
self.assertEqual(consumed, [[1, 2]])
self.stack.add_stock(3, 3)
consumed = self.stack.remove_stock(4)
self.assertEqual(consumed, [[3, 3], [1, 2]])
self.stack.add_stock(4, 4)
consumed = self.stack.remove_stock(5)
self.assertEqual(consumed, [[4, 4], [1, 1]])
self.stack.add_stock(5, 5)
consumed = self.stack.remove_stock(5)
self.assertEqual(consumed, [[5, 5]])
@given(stock_queue_generator)
def test_lifo_qty_hypothesis(self, stock_stack):
self.stack = LIFOValuation([])
total_qty = 0
for qty, rate in stock_stack:
if qty == 0:
continue
if qty > 0:
self.stack.add_stock(qty, rate)
total_qty += qty
else:
qty = abs(qty)
consumed = self.stack.remove_stock(qty)
self.assertAlmostEqual(qty, sum(q for q, _ in consumed), msg=f"incorrect consumption {consumed}")
total_qty -= qty
self.assertTotalQty(total_qty)
@given(stock_queue_generator)
def test_lifo_qty_value_nonneg_hypothesis(self, stock_stack):
self.stack = LIFOValuation([])
total_qty = 0.0
total_value = 0.0
for qty, rate in stock_stack:
# don't allow negative stock
if qty == 0 or total_qty + qty < 0 or abs(qty) < 0.1:
continue
if qty > 0:
self.stack.add_stock(qty, rate)
total_qty += qty
total_value += qty * rate
else:
qty = abs(qty)
consumed = self.stack.remove_stock(qty)
self.assertAlmostEqual(qty, sum(q for q, _ in consumed), msg=f"incorrect consumption {consumed}")
total_qty -= qty
total_value -= sum(q * r for q, r in consumed)
self.assertTotalQty(total_qty)
self.assertTotalValue(total_value)
class TestLIFOValuationSLE(ERPNextTestCase):
ITEM_CODE = "_Test LIFO item"
WAREHOUSE = "_Test Warehouse - _TC"
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
make_item(cls.ITEM_CODE, {"valuation_method": "LIFO"})
def _make_stock_entry(self, qty, rate=None):
kwargs = {
"item_code": self.ITEM_CODE,
"from_warehouse" if qty < 0 else "to_warehouse": self.WAREHOUSE,
"rate": rate,
"qty": abs(qty),
}
return make_stock_entry(**kwargs)
def assertStockQueue(self, se, expected_queue):
sle_name = frappe.db.get_value("Stock Ledger Entry", {"voucher_no": se.name, "is_cancelled": 0, "voucher_type": "Stock Entry"})
sle = frappe.get_doc("Stock Ledger Entry", sle_name)
stock_queue = json.loads(sle.stock_queue)
total_qty, total_value = LIFOValuation(stock_queue).get_total_stock_and_value()
self.assertEqual(sle.qty_after_transaction, total_qty)
self.assertEqual(sle.stock_value, total_value)
if total_qty > 0:
self.assertEqual(stock_queue, expected_queue)
def test_lifo_values(self):
in1 = self._make_stock_entry(1, 1)
self.assertStockQueue(in1, [[1, 1]])
in2 = self._make_stock_entry(2, 2)
self.assertStockQueue(in2, [[1, 1], [2, 2]])
out1 = self._make_stock_entry(-1)
self.assertStockQueue(out1, [[1, 1], [1, 2]])
in3 = self._make_stock_entry(3, 3)
self.assertStockQueue(in3, [[1, 1], [1, 2], [3, 3]])
out2 = self._make_stock_entry(-4)
self.assertStockQueue(out2, [[1, 1]])
in4 = self._make_stock_entry(4, 4)
self.assertStockQueue(in4, [[1, 1], [4,4]])
out3 = self._make_stock_entry(-5)
self.assertStockQueue(out3, [])
in5 = self._make_stock_entry(5, 5)
self.assertStockQueue(in5, [[5, 5]])
out5 = self._make_stock_entry(-5)
self.assertStockQueue(out5, [])

View File

@ -9,6 +9,7 @@ from frappe import _
from frappe.utils import cstr, flt, get_link_to_form, nowdate, nowtime
import erpnext
from erpnext.stock.valuation import FIFOValuation, LIFOValuation
class InvalidWarehouseCompany(frappe.ValidationError): pass
@ -205,16 +206,6 @@ def _create_bin(item_code, warehouse):
return bin_obj
def update_bin(args, allow_negative_stock=False, via_landed_cost_voucher=False):
"""WARNING: This function is deprecated. Inline this function instead of using it."""
from erpnext.stock.doctype.bin.bin import update_stock
is_stock_item = frappe.get_cached_value('Item', args.get("item_code"), 'is_stock_item')
if is_stock_item:
bin_name = get_or_make_bin(args.get("item_code"), args.get("warehouse"))
update_stock(bin_name, args, allow_negative_stock, via_landed_cost_voucher)
else:
frappe.msgprint(_("Item {0} ignored since it is not a stock item").format(args.get("item_code")))
@frappe.whitelist()
def get_incoming_rate(args, raise_error_if_no_rate=True):
"""Get Incoming Rate based on valuation method"""
@ -228,10 +219,10 @@ def get_incoming_rate(args, raise_error_if_no_rate=True):
else:
valuation_method = get_valuation_method(args.get("item_code"))
previous_sle = get_previous_sle(args)
if valuation_method == 'FIFO':
if valuation_method in ('FIFO', 'LIFO'):
if previous_sle:
previous_stock_queue = json.loads(previous_sle.get('stock_queue', '[]') or '[]')
in_rate = get_fifo_rate(previous_stock_queue, args.get("qty") or 0) if previous_stock_queue else 0
in_rate = _get_fifo_lifo_rate(previous_stock_queue, args.get("qty") or 0, valuation_method) if previous_stock_queue else 0
elif valuation_method == 'Moving Average':
in_rate = previous_sle.get('valuation_rate') or 0
@ -261,29 +252,25 @@ def get_valuation_method(item_code):
def get_fifo_rate(previous_stock_queue, qty):
"""get FIFO (average) Rate from Queue"""
if flt(qty) >= 0:
total = sum(f[0] for f in previous_stock_queue)
return sum(flt(f[0]) * flt(f[1]) for f in previous_stock_queue) / flt(total) if total else 0.0
else:
available_qty_for_outgoing, outgoing_cost = 0, 0
qty_to_pop = abs(flt(qty))
while qty_to_pop and previous_stock_queue:
batch = previous_stock_queue[0]
if 0 < batch[0] <= qty_to_pop:
# if batch qty > 0
# not enough or exactly same qty in current batch, clear batch
available_qty_for_outgoing += flt(batch[0])
outgoing_cost += flt(batch[0]) * flt(batch[1])
qty_to_pop -= batch[0]
previous_stock_queue.pop(0)
else:
# all from current batch
available_qty_for_outgoing += flt(qty_to_pop)
outgoing_cost += flt(qty_to_pop) * flt(batch[1])
batch[0] -= qty_to_pop
qty_to_pop = 0
return _get_fifo_lifo_rate(previous_stock_queue, qty, "FIFO")
return outgoing_cost / available_qty_for_outgoing
def get_lifo_rate(previous_stock_queue, qty):
"""get LIFO (average) Rate from Queue"""
return _get_fifo_lifo_rate(previous_stock_queue, qty, "LIFO")
def _get_fifo_lifo_rate(previous_stock_queue, qty, method):
ValuationKlass = LIFOValuation if method == "LIFO" else FIFOValuation
stock_queue = ValuationKlass(previous_stock_queue)
if flt(qty) >= 0:
total_qty, total_value = stock_queue.get_total_stock_and_value()
return total_value / total_qty if total_qty else 0.0
else:
popped_bins = stock_queue.remove_stock(abs(flt(qty)))
total_qty, total_value = ValuationKlass(popped_bins).get_total_stock_and_value()
return total_value / total_qty if total_qty else 0.0
def get_valid_serial_nos(sr_nos, qty=0, item_code=''):
"""split serial nos, validate and return list of valid serial nos"""

View File

@ -1,15 +1,54 @@
from abc import ABC, abstractmethod, abstractproperty
from typing import Callable, List, NewType, Optional, Tuple
from frappe.utils import flt
FifoBin = NewType("FifoBin", List[float])
StockBin = NewType("StockBin", List[float]) # [[qty, rate], ...]
# Indexes of values inside FIFO bin 2-tuple
QTY = 0
RATE = 1
class FIFOValuation:
class BinWiseValuation(ABC):
@abstractmethod
def add_stock(self, qty: float, rate: float) -> None:
pass
@abstractmethod
def remove_stock(
self, qty: float, outgoing_rate: float = 0.0, rate_generator: Callable[[], float] = None
) -> List[StockBin]:
pass
@abstractproperty
def state(self) -> List[StockBin]:
pass
def get_total_stock_and_value(self) -> Tuple[float, float]:
total_qty = 0.0
total_value = 0.0
for qty, rate in self.state:
total_qty += flt(qty)
total_value += flt(qty) * flt(rate)
return _round_off_if_near_zero(total_qty), _round_off_if_near_zero(total_value)
def __repr__(self):
return str(self.state)
def __iter__(self):
return iter(self.state)
def __eq__(self, other):
if isinstance(other, list):
return self.state == other
return type(self) == type(other) and self.state == other.state
class FIFOValuation(BinWiseValuation):
"""Valuation method where a queue of all the incoming stock is maintained.
New stock is added at end of the queue.
@ -24,34 +63,14 @@ class FIFOValuation:
# ref: https://docs.python.org/3/reference/datamodel.html#slots
__slots__ = ["queue",]
def __init__(self, state: Optional[List[FifoBin]]):
self.queue: List[FifoBin] = state if state is not None else []
def __init__(self, state: Optional[List[StockBin]]):
self.queue: List[StockBin] = state if state is not None else []
def __repr__(self):
return str(self.queue)
def __iter__(self):
return iter(self.queue)
def __eq__(self, other):
if isinstance(other, list):
return self.queue == other
return self.queue == other.queue
def get_state(self) -> List[FifoBin]:
@property
def state(self) -> List[StockBin]:
"""Get current state of queue."""
return self.queue
def get_total_stock_and_value(self) -> Tuple[float, float]:
total_qty = 0.0
total_value = 0.0
for qty, rate in self.queue:
total_qty += flt(qty)
total_value += flt(qty) * flt(rate)
return _round_off_if_near_zero(total_qty), _round_off_if_near_zero(total_value)
def add_stock(self, qty: float, rate: float) -> None:
"""Update fifo queue with new stock.
@ -78,7 +97,7 @@ class FIFOValuation:
def remove_stock(
self, qty: float, outgoing_rate: float = 0.0, rate_generator: Callable[[], float] = None
) -> List[FifoBin]:
) -> List[StockBin]:
"""Remove stock from the queue and return popped bins.
args:
@ -136,6 +155,101 @@ class FIFOValuation:
return consumed_bins
class LIFOValuation(BinWiseValuation):
"""Valuation method where a *stack* of all the incoming stock is maintained.
New stock is added at top of the stack.
Qty consumption happens on Last In First Out basis.
Stack is implemented using "bins" of [qty, rate].
ref: https://en.wikipedia.org/wiki/FIFO_and_LIFO_accounting
Implementation detail: appends and pops both at end of list.
"""
# specifying the attributes to save resources
# ref: https://docs.python.org/3/reference/datamodel.html#slots
__slots__ = ["stack",]
def __init__(self, state: Optional[List[StockBin]]):
self.stack: List[StockBin] = state if state is not None else []
@property
def state(self) -> List[StockBin]:
"""Get current state of stack."""
return self.stack
def add_stock(self, qty: float, rate: float) -> None:
"""Update lifo stack with new stock.
args:
qty: new quantity to add
rate: incoming rate of new quantity.
Behaviour of this is same as FIFO valuation.
"""
if not len(self.stack):
self.stack.append([0, 0])
# last row has the same rate, merge new bin.
if self.stack[-1][RATE] == rate:
self.stack[-1][QTY] += qty
else:
# Item has a positive balance qty, add new entry
if self.stack[-1][QTY] > 0:
self.stack.append([qty, rate])
else: # negative balance qty
qty = self.stack[-1][QTY] + qty
if qty > 0: # new balance qty is positive
self.stack[-1] = [qty, rate]
else: # new balance qty is still negative, maintain same rate
self.stack[-1][QTY] = qty
def remove_stock(
self, qty: float, outgoing_rate: float = 0.0, rate_generator: Callable[[], float] = None
) -> List[StockBin]:
"""Remove stock from the stack and return popped bins.
args:
qty: quantity to remove
rate: outgoing rate - ignored. Kept for backwards compatibility.
rate_generator: function to be called if stack is not found and rate is required.
"""
if not rate_generator:
rate_generator = lambda : 0.0 # noqa
consumed_bins = []
while qty:
if not len(self.stack):
# rely on rate generator.
self.stack.append([0, rate_generator()])
# start at the end.
index = -1
stock_bin = self.stack[index]
if qty >= stock_bin[QTY]:
# consume current bin
qty = _round_off_if_near_zero(qty - stock_bin[QTY])
to_consume = self.stack.pop(index)
consumed_bins.append(list(to_consume))
if not self.stack and qty:
# stock finished, qty still remains to be withdrawn
# negative stock, keep in as a negative bin
self.stack.append([-qty, outgoing_rate or stock_bin[RATE]])
consumed_bins.append([qty, outgoing_rate or stock_bin[RATE]])
break
else:
# qty found in current bin consume it and exit
stock_bin[QTY] = _round_off_if_near_zero(stock_bin[QTY] - qty)
consumed_bins.append([qty, stock_bin[RATE]])
qty = 0
return consumed_bins
def _round_off_if_near_zero(number: float, precision: int = 7) -> float:
"""Rounds off the number to zero only if number is close to zero for decimal
specified in precision. Precision defaults to 7.

View File

@ -1,15 +1,25 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import unittest
import frappe
from erpnext.accounts.doctype.pos_profile.test_pos_profile import make_pos_profile
from erpnext.selling.page.point_of_sale.point_of_sale import get_items
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
from erpnext.tests.utils import ERPNextTestCase
class TestPointOfSale(ERPNextTestCase):
class TestPointOfSale(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
frappe.db.savepoint('before_test_point_of_sale')
@classmethod
def tearDownClass(cls) -> None:
frappe.db.rollback(save_point='before_test_point_of_sale')
def test_item_search(self):
"""
Test Stock and Service Item Search.

View File

@ -913,6 +913,7 @@ Email Account,E-Mail-Konto,
Email Address,E-Mail-Adresse,
"Email Address must be unique, already exists for {0}","E-Mail-Adresse muss eindeutig sein, diese wird bereits für {0} verwendet",
Email Digest: ,E-Mail-Bericht:,
Email Digest Recipient,E-Mail-Berichtsempfänger,
Email Reminders will be sent to all parties with email contacts,E-Mail-Erinnerungen werden an alle Parteien mit E-Mail-Kontakten gesendet,
Email Sent,E-Mail wurde versandt,
Email Template,E-Mail-Vorlage,
@ -1596,6 +1597,7 @@ Method,Methode,
Middle Income,Mittleres Einkommen,
Middle Name,Zweiter Vorname,
Middle Name (Optional),Weiterer Vorname (optional),
Milestonde,Meilenstein,
Min Amt can not be greater than Max Amt,Min. Amt kann nicht größer als Max. Amt sein,
Min Qty can not be greater than Max Qty,Mindestmenge kann nicht größer als Maximalmenge sein,
Minimum Lead Age (Days),Mindest Lead-Alter (in Tagen),
@ -2944,7 +2946,7 @@ Temporary Accounts,Temporäre Konten,
Temporary Opening,Temporäre Eröffnungskonten,
Terms and Conditions,Allgemeine Geschäftsbedingungen,
Terms and Conditions Template,Vorlage für Allgemeine Geschäftsbedingungen,
Territory,Region,
Territory,Gebiet,
Test,Test,
Thank you,Danke,
Thank you for your business!,Vielen Dank für Ihr Unternehmen!,

Can't render this file because it is too large.

Some files were not shown because too many files have changed in this diff Show More