Merge branch 'develop' of https://github.com/frappe/erpnext into loan_bank_reco
This commit is contained in:
commit
b7c388976d
14
.github/helper/install.sh
vendored
14
.github/helper/install.sh
vendored
@ -40,10 +40,14 @@ if [ "$DB" == "postgres" ];then
|
|||||||
echo "travis" | psql -h 127.0.0.1 -p 5432 -c "CREATE USER test_frappe WITH PASSWORD 'test_frappe'" -U postgres;
|
echo "travis" | psql -h 127.0.0.1 -p 5432 -c "CREATE USER test_frappe WITH PASSWORD 'test_frappe'" -U postgres;
|
||||||
fi
|
fi
|
||||||
|
|
||||||
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
|
|
||||||
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
|
install_whktml() {
|
||||||
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
|
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
|
||||||
sudo chmod o+x /usr/local/bin/wkhtmltopdf
|
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
|
||||||
|
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
|
||||||
|
sudo chmod o+x /usr/local/bin/wkhtmltopdf
|
||||||
|
}
|
||||||
|
install_whktml &
|
||||||
|
|
||||||
cd ~/frappe-bench || exit
|
cd ~/frappe-bench || exit
|
||||||
|
|
||||||
@ -57,5 +61,5 @@ bench get-app erpnext "${GITHUB_WORKSPACE}"
|
|||||||
if [ "$TYPE" == "server" ]; then bench setup requirements --dev; fi
|
if [ "$TYPE" == "server" ]; then bench setup requirements --dev; fi
|
||||||
|
|
||||||
bench start &> bench_run_logs.txt &
|
bench start &> bench_run_logs.txt &
|
||||||
|
CI=Yes bench build --app frappe &
|
||||||
bench --site test_site reinstall --yes
|
bench --site test_site reinstall --yes
|
||||||
bench build --app frappe
|
|
||||||
|
30
.mergify.yml
30
.mergify.yml
@ -17,6 +17,36 @@ pull_request_rules:
|
|||||||
@{{author}}, thanks for the contribution, but we do not accept pull requests on a stable branch. Please raise PR on an appropriate hotfix branch.
|
@{{author}}, thanks for the contribution, but we do not accept pull requests on a stable branch. Please raise PR on an appropriate hotfix branch.
|
||||||
https://github.com/frappe/erpnext/wiki/Pull-Request-Checklist#which-branch
|
https://github.com/frappe/erpnext/wiki/Pull-Request-Checklist#which-branch
|
||||||
|
|
||||||
|
- name: backport to develop
|
||||||
|
conditions:
|
||||||
|
- label="backport develop"
|
||||||
|
actions:
|
||||||
|
backport:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
assignees:
|
||||||
|
- "{{ author }}"
|
||||||
|
|
||||||
|
- name: backport to version-14-hotfix
|
||||||
|
conditions:
|
||||||
|
- label="backport version-14-hotfix"
|
||||||
|
actions:
|
||||||
|
backport:
|
||||||
|
branches:
|
||||||
|
- version-14-hotfix
|
||||||
|
assignees:
|
||||||
|
- "{{ author }}"
|
||||||
|
|
||||||
|
- name: backport to version-14-pre-release
|
||||||
|
conditions:
|
||||||
|
- label="backport version-14-pre-release"
|
||||||
|
actions:
|
||||||
|
backport:
|
||||||
|
branches:
|
||||||
|
- version-14-pre-release
|
||||||
|
assignees:
|
||||||
|
- "{{ author }}"
|
||||||
|
|
||||||
- name: backport to version-13-hotfix
|
- name: backport to version-13-hotfix
|
||||||
conditions:
|
conditions:
|
||||||
- label="backport version-13-hotfix"
|
- label="backport version-13-hotfix"
|
||||||
|
@ -64,6 +64,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
|
|||||||
"account_currency",
|
"account_currency",
|
||||||
(r) => {
|
(r) => {
|
||||||
frm.currency = r.account_currency;
|
frm.currency = r.account_currency;
|
||||||
|
frm.trigger("render_chart");
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -128,7 +129,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
render_chart(frm) {
|
render_chart: frappe.utils.debounce((frm) => {
|
||||||
frm.cards_manager = new erpnext.accounts.bank_reconciliation.NumberCardManager(
|
frm.cards_manager = new erpnext.accounts.bank_reconciliation.NumberCardManager(
|
||||||
{
|
{
|
||||||
$reconciliation_tool_cards: frm.get_field(
|
$reconciliation_tool_cards: frm.get_field(
|
||||||
@ -140,7 +141,7 @@ frappe.ui.form.on("Bank Reconciliation Tool", {
|
|||||||
currency: frm.currency,
|
currency: frm.currency,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
},
|
}, 500),
|
||||||
|
|
||||||
render(frm) {
|
render(frm) {
|
||||||
if (frm.doc.bank_account) {
|
if (frm.doc.bank_account) {
|
||||||
|
@ -1,94 +1,34 @@
|
|||||||
{
|
{
|
||||||
"allow_copy": 0,
|
"actions": [],
|
||||||
"allow_guest_to_view": 0,
|
|
||||||
"allow_import": 0,
|
|
||||||
"allow_rename": 0,
|
|
||||||
"autoname": "field:mapping",
|
|
||||||
"beta": 0,
|
|
||||||
"creation": "2018-02-08 10:18:48.513608",
|
"creation": "2018-02-08 10:18:48.513608",
|
||||||
"custom": 0,
|
|
||||||
"docstatus": 0,
|
|
||||||
"doctype": "DocType",
|
"doctype": "DocType",
|
||||||
"document_type": "",
|
|
||||||
"editable_grid": 1,
|
"editable_grid": 1,
|
||||||
"engine": "InnoDB",
|
"engine": "InnoDB",
|
||||||
|
"field_order": [
|
||||||
|
"mapping"
|
||||||
|
],
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
{
|
||||||
"allow_bulk_edit": 0,
|
|
||||||
"allow_on_submit": 0,
|
|
||||||
"bold": 0,
|
|
||||||
"collapsible": 0,
|
|
||||||
"columns": 0,
|
|
||||||
"fieldname": "mapping",
|
"fieldname": "mapping",
|
||||||
"fieldtype": "Link",
|
"fieldtype": "Link",
|
||||||
"hidden": 0,
|
|
||||||
"ignore_user_permissions": 0,
|
|
||||||
"ignore_xss_filter": 0,
|
|
||||||
"in_filter": 0,
|
|
||||||
"in_global_search": 0,
|
|
||||||
"in_list_view": 1,
|
"in_list_view": 1,
|
||||||
"in_standard_filter": 0,
|
|
||||||
"label": "Mapping",
|
"label": "Mapping",
|
||||||
"length": 0,
|
|
||||||
"no_copy": 0,
|
|
||||||
"options": "Cash Flow Mapping",
|
"options": "Cash Flow Mapping",
|
||||||
"permlevel": 0,
|
|
||||||
"precision": "",
|
|
||||||
"print_hide": 0,
|
|
||||||
"print_hide_if_no_value": 0,
|
|
||||||
"read_only": 0,
|
|
||||||
"remember_last_selected_value": 0,
|
|
||||||
"report_hide": 0,
|
|
||||||
"reqd": 1,
|
"reqd": 1,
|
||||||
"search_index": 0,
|
"unique": 1
|
||||||
"set_only_once": 0,
|
|
||||||
"unique": 0
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"has_web_view": 0,
|
"istable": 1,
|
||||||
"hide_heading": 0,
|
"links": [],
|
||||||
"hide_toolbar": 0,
|
"modified": "2022-02-21 03:34:57.902332",
|
||||||
"idx": 0,
|
|
||||||
"image_view": 0,
|
|
||||||
"in_create": 0,
|
|
||||||
"is_submittable": 0,
|
|
||||||
"issingle": 0,
|
|
||||||
"istable": 0,
|
|
||||||
"max_attachments": 0,
|
|
||||||
"modified": "2018-02-08 10:33:39.413930",
|
|
||||||
"modified_by": "Administrator",
|
"modified_by": "Administrator",
|
||||||
"module": "Accounts",
|
"module": "Accounts",
|
||||||
"name": "Cash Flow Mapping Template Details",
|
"name": "Cash Flow Mapping Template Details",
|
||||||
"name_case": "",
|
|
||||||
"owner": "Administrator",
|
"owner": "Administrator",
|
||||||
"permissions": [
|
"permissions": [],
|
||||||
{
|
|
||||||
"amend": 0,
|
|
||||||
"apply_user_permissions": 0,
|
|
||||||
"cancel": 0,
|
|
||||||
"create": 1,
|
|
||||||
"delete": 1,
|
|
||||||
"email": 1,
|
|
||||||
"export": 1,
|
|
||||||
"if_owner": 0,
|
|
||||||
"import": 0,
|
|
||||||
"permlevel": 0,
|
|
||||||
"print": 1,
|
|
||||||
"read": 1,
|
|
||||||
"report": 1,
|
|
||||||
"role": "System Manager",
|
|
||||||
"set_user_permissions": 0,
|
|
||||||
"share": 1,
|
|
||||||
"submit": 0,
|
|
||||||
"write": 1
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"quick_entry": 1,
|
"quick_entry": 1,
|
||||||
"read_only": 0,
|
|
||||||
"read_only_onload": 0,
|
|
||||||
"show_name_in_global_search": 0,
|
|
||||||
"sort_field": "modified",
|
"sort_field": "modified",
|
||||||
"sort_order": "DESC",
|
"sort_order": "DESC",
|
||||||
"track_changes": 1,
|
"states": [],
|
||||||
"track_seen": 0
|
"track_changes": 1
|
||||||
}
|
}
|
@ -319,13 +319,18 @@ def make_reverse_gl_entries(gl_entries=None, voucher_type=None, voucher_no=None,
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if not gl_entries:
|
if not gl_entries:
|
||||||
gl_entries = frappe.get_all("GL Entry",
|
gl_entry = frappe.qb.DocType("GL Entry")
|
||||||
fields = ["*"],
|
gl_entries = (frappe.qb.from_(
|
||||||
filters = {
|
gl_entry
|
||||||
"voucher_type": voucher_type,
|
).select(
|
||||||
"voucher_no": voucher_no,
|
'*'
|
||||||
"is_cancelled": 0
|
).where(
|
||||||
})
|
gl_entry.voucher_type == voucher_type
|
||||||
|
).where(
|
||||||
|
gl_entry.voucher_no == voucher_no
|
||||||
|
).where(
|
||||||
|
gl_entry.is_cancelled == 0
|
||||||
|
).for_update()).run(as_dict=1)
|
||||||
|
|
||||||
if gl_entries:
|
if gl_entries:
|
||||||
validate_accounting_period(gl_entries)
|
validate_accounting_period(gl_entries)
|
||||||
@ -333,23 +338,24 @@ def make_reverse_gl_entries(gl_entries=None, voucher_type=None, voucher_no=None,
|
|||||||
set_as_cancel(gl_entries[0]['voucher_type'], gl_entries[0]['voucher_no'])
|
set_as_cancel(gl_entries[0]['voucher_type'], gl_entries[0]['voucher_no'])
|
||||||
|
|
||||||
for entry in gl_entries:
|
for entry in gl_entries:
|
||||||
entry['name'] = None
|
new_gle = copy.deepcopy(entry)
|
||||||
debit = entry.get('debit', 0)
|
new_gle['name'] = None
|
||||||
credit = entry.get('credit', 0)
|
debit = new_gle.get('debit', 0)
|
||||||
|
credit = new_gle.get('credit', 0)
|
||||||
|
|
||||||
debit_in_account_currency = entry.get('debit_in_account_currency', 0)
|
debit_in_account_currency = new_gle.get('debit_in_account_currency', 0)
|
||||||
credit_in_account_currency = entry.get('credit_in_account_currency', 0)
|
credit_in_account_currency = new_gle.get('credit_in_account_currency', 0)
|
||||||
|
|
||||||
entry['debit'] = credit
|
new_gle['debit'] = credit
|
||||||
entry['credit'] = debit
|
new_gle['credit'] = debit
|
||||||
entry['debit_in_account_currency'] = credit_in_account_currency
|
new_gle['debit_in_account_currency'] = credit_in_account_currency
|
||||||
entry['credit_in_account_currency'] = debit_in_account_currency
|
new_gle['credit_in_account_currency'] = debit_in_account_currency
|
||||||
|
|
||||||
entry['remarks'] = "On cancellation of " + entry['voucher_no']
|
new_gle['remarks'] = "On cancellation of " + new_gle['voucher_no']
|
||||||
entry['is_cancelled'] = 1
|
new_gle['is_cancelled'] = 1
|
||||||
|
|
||||||
if entry['debit'] or entry['credit']:
|
if new_gle['debit'] or new_gle['credit']:
|
||||||
make_entry(entry, adv_adj, "Yes")
|
make_entry(new_gle, adv_adj, "Yes")
|
||||||
|
|
||||||
|
|
||||||
def check_freezing_date(posting_date, adv_adj=False):
|
def check_freezing_date(posting_date, adv_adj=False):
|
||||||
|
@ -354,9 +354,6 @@ def accumulate_values_into_parents(accounts, accounts_by_name, companies):
|
|||||||
if d.parent_account:
|
if d.parent_account:
|
||||||
account = d.parent_account_name
|
account = d.parent_account_name
|
||||||
|
|
||||||
# if not accounts_by_name.get(account):
|
|
||||||
# continue
|
|
||||||
|
|
||||||
for company in companies:
|
for company in companies:
|
||||||
accounts_by_name[account][company] = \
|
accounts_by_name[account][company] = \
|
||||||
accounts_by_name[account].get(company, 0.0) + d.get(company, 0.0)
|
accounts_by_name[account].get(company, 0.0) + d.get(company, 0.0)
|
||||||
@ -367,7 +364,7 @@ def accumulate_values_into_parents(accounts, accounts_by_name, companies):
|
|||||||
accounts_by_name[account].get("opening_balance", 0.0) + d.get("opening_balance", 0.0)
|
accounts_by_name[account].get("opening_balance", 0.0) + d.get("opening_balance", 0.0)
|
||||||
|
|
||||||
def get_account_heads(root_type, companies, filters):
|
def get_account_heads(root_type, companies, filters):
|
||||||
accounts = get_accounts(root_type, filters)
|
accounts = get_accounts(root_type, companies)
|
||||||
|
|
||||||
if not accounts:
|
if not accounts:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
@ -396,7 +393,7 @@ def update_parent_account_names(accounts):
|
|||||||
|
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
if account.parent_account:
|
if account.parent_account:
|
||||||
account["parent_account_name"] = name_to_account_map[account.parent_account]
|
account["parent_account_name"] = name_to_account_map.get(account.parent_account)
|
||||||
|
|
||||||
return accounts
|
return accounts
|
||||||
|
|
||||||
@ -419,12 +416,19 @@ def get_subsidiary_companies(company):
|
|||||||
return frappe.db.sql_list("""select name from `tabCompany`
|
return frappe.db.sql_list("""select name from `tabCompany`
|
||||||
where lft >= {0} and rgt <= {1} order by lft, rgt""".format(lft, rgt))
|
where lft >= {0} and rgt <= {1} order by lft, rgt""".format(lft, rgt))
|
||||||
|
|
||||||
def get_accounts(root_type, filters):
|
def get_accounts(root_type, companies):
|
||||||
return frappe.db.sql(""" select name, is_group, company,
|
accounts = []
|
||||||
parent_account, lft, rgt, root_type, report_type, account_name, account_number
|
added_accounts = []
|
||||||
from
|
|
||||||
`tabAccount` where company = %s and root_type = %s
|
for company in companies:
|
||||||
""" , (filters.get('company'), root_type), as_dict=1)
|
for account in frappe.get_all("Account", fields=["name", "is_group", "company",
|
||||||
|
"parent_account", "lft", "rgt", "root_type", "report_type", "account_name", "account_number"],
|
||||||
|
filters={"company": company, "root_type": root_type}):
|
||||||
|
if account.account_name not in added_accounts:
|
||||||
|
accounts.append(account)
|
||||||
|
added_accounts.append(account.account_name)
|
||||||
|
|
||||||
|
return accounts
|
||||||
|
|
||||||
def prepare_data(accounts, start_date, end_date, balance_must_be, companies, company_currency, filters):
|
def prepare_data(accounts, start_date, end_date, balance_must_be, companies, company_currency, filters):
|
||||||
data = []
|
data = []
|
||||||
|
@ -23,7 +23,7 @@ def validate_filters(filters):
|
|||||||
def get_result(filters, tds_docs, tds_accounts, tax_category_map):
|
def get_result(filters, tds_docs, tds_accounts, tax_category_map):
|
||||||
supplier_map = get_supplier_pan_map()
|
supplier_map = get_supplier_pan_map()
|
||||||
tax_rate_map = get_tax_rate_map(filters)
|
tax_rate_map = get_tax_rate_map(filters)
|
||||||
gle_map = get_gle_map(filters, tds_docs)
|
gle_map = get_gle_map(tds_docs)
|
||||||
|
|
||||||
out = []
|
out = []
|
||||||
for name, details in gle_map.items():
|
for name, details in gle_map.items():
|
||||||
@ -43,7 +43,7 @@ def get_result(filters, tds_docs, tds_accounts, tax_category_map):
|
|||||||
if entry.account in tds_accounts:
|
if entry.account in tds_accounts:
|
||||||
tds_deducted += (entry.credit - entry.debit)
|
tds_deducted += (entry.credit - entry.debit)
|
||||||
|
|
||||||
total_amount_credited += (entry.credit - entry.debit)
|
total_amount_credited += entry.credit
|
||||||
|
|
||||||
if tds_deducted:
|
if tds_deducted:
|
||||||
row = {
|
row = {
|
||||||
@ -78,7 +78,7 @@ def get_supplier_pan_map():
|
|||||||
|
|
||||||
return supplier_map
|
return supplier_map
|
||||||
|
|
||||||
def get_gle_map(filters, documents):
|
def get_gle_map(documents):
|
||||||
# create gle_map of the form
|
# create gle_map of the form
|
||||||
# {"purchase_invoice": list of dict of all gle created for this invoice}
|
# {"purchase_invoice": list of dict of all gle created for this invoice}
|
||||||
gle_map = {}
|
gle_map = {}
|
||||||
@ -86,7 +86,7 @@ def get_gle_map(filters, documents):
|
|||||||
gle = frappe.db.get_all('GL Entry',
|
gle = frappe.db.get_all('GL Entry',
|
||||||
{
|
{
|
||||||
"voucher_no": ["in", documents],
|
"voucher_no": ["in", documents],
|
||||||
"credit": (">", 0)
|
"is_cancelled": 0
|
||||||
},
|
},
|
||||||
["credit", "debit", "account", "voucher_no", "posting_date", "voucher_type", "against", "party"],
|
["credit", "debit", "account", "voucher_no", "posting_date", "voucher_type", "against", "party"],
|
||||||
)
|
)
|
||||||
@ -184,21 +184,28 @@ def get_tds_docs(filters):
|
|||||||
payment_entries = []
|
payment_entries = []
|
||||||
journal_entries = []
|
journal_entries = []
|
||||||
tax_category_map = {}
|
tax_category_map = {}
|
||||||
|
or_filters = {}
|
||||||
|
bank_accounts = frappe.get_all('Account', {'is_group': 0, 'account_type': 'Bank'}, pluck="name")
|
||||||
|
|
||||||
tds_accounts = frappe.get_all("Tax Withholding Account", {'company': filters.get('company')},
|
tds_accounts = frappe.get_all("Tax Withholding Account", {'company': filters.get('company')},
|
||||||
pluck="account")
|
pluck="account")
|
||||||
|
|
||||||
query_filters = {
|
query_filters = {
|
||||||
"credit": ('>', 0),
|
|
||||||
"account": ("in", tds_accounts),
|
"account": ("in", tds_accounts),
|
||||||
"posting_date": ("between", [filters.get("from_date"), filters.get("to_date")]),
|
"posting_date": ("between", [filters.get("from_date"), filters.get("to_date")]),
|
||||||
"is_cancelled": 0
|
"is_cancelled": 0,
|
||||||
|
"against": ("not in", bank_accounts)
|
||||||
}
|
}
|
||||||
|
|
||||||
if filters.get('supplier'):
|
if filters.get("supplier"):
|
||||||
query_filters.update({'against': filters.get('supplier')})
|
del query_filters["account"]
|
||||||
|
del query_filters["against"]
|
||||||
|
or_filters = {
|
||||||
|
"against": filters.get('supplier'),
|
||||||
|
"party": filters.get('supplier')
|
||||||
|
}
|
||||||
|
|
||||||
tds_docs = frappe.get_all("GL Entry", query_filters, ["voucher_no", "voucher_type", "against", "party"])
|
tds_docs = frappe.get_all("GL Entry", filters=query_filters, or_filters=or_filters, fields=["voucher_no", "voucher_type", "against", "party"])
|
||||||
|
|
||||||
for d in tds_docs:
|
for d in tds_docs:
|
||||||
if d.voucher_type == "Purchase Invoice":
|
if d.voucher_type == "Purchase Invoice":
|
||||||
|
@ -49,7 +49,7 @@ valid_scorecard = [
|
|||||||
"min_grade":0.0,"name":"Very Poor",
|
"min_grade":0.0,"name":"Very Poor",
|
||||||
"prevent_rfqs":1,
|
"prevent_rfqs":1,
|
||||||
"notify_supplier":0,
|
"notify_supplier":0,
|
||||||
"doctype":"Supplier Scorecard Standing",
|
"doctype":"Supplier Scorecard Scoring Standing",
|
||||||
"max_grade":30.0,
|
"max_grade":30.0,
|
||||||
"prevent_pos":1,
|
"prevent_pos":1,
|
||||||
"warn_pos":0,
|
"warn_pos":0,
|
||||||
@ -65,7 +65,7 @@ valid_scorecard = [
|
|||||||
"name":"Poor",
|
"name":"Poor",
|
||||||
"prevent_rfqs":1,
|
"prevent_rfqs":1,
|
||||||
"notify_supplier":0,
|
"notify_supplier":0,
|
||||||
"doctype":"Supplier Scorecard Standing",
|
"doctype":"Supplier Scorecard Scoring Standing",
|
||||||
"max_grade":50.0,
|
"max_grade":50.0,
|
||||||
"prevent_pos":0,
|
"prevent_pos":0,
|
||||||
"warn_pos":0,
|
"warn_pos":0,
|
||||||
@ -81,7 +81,7 @@ valid_scorecard = [
|
|||||||
"name":"Average",
|
"name":"Average",
|
||||||
"prevent_rfqs":0,
|
"prevent_rfqs":0,
|
||||||
"notify_supplier":0,
|
"notify_supplier":0,
|
||||||
"doctype":"Supplier Scorecard Standing",
|
"doctype":"Supplier Scorecard Scoring Standing",
|
||||||
"max_grade":80.0,
|
"max_grade":80.0,
|
||||||
"prevent_pos":0,
|
"prevent_pos":0,
|
||||||
"warn_pos":0,
|
"warn_pos":0,
|
||||||
@ -97,7 +97,7 @@ valid_scorecard = [
|
|||||||
"name":"Excellent",
|
"name":"Excellent",
|
||||||
"prevent_rfqs":0,
|
"prevent_rfqs":0,
|
||||||
"notify_supplier":0,
|
"notify_supplier":0,
|
||||||
"doctype":"Supplier Scorecard Standing",
|
"doctype":"Supplier Scorecard Scoring Standing",
|
||||||
"max_grade":100.0,
|
"max_grade":100.0,
|
||||||
"prevent_pos":0,
|
"prevent_pos":0,
|
||||||
"warn_pos":0,
|
"warn_pos":0,
|
||||||
|
@ -1955,6 +1955,7 @@ def update_bin_on_delete(row, doctype):
|
|||||||
|
|
||||||
qty_dict["ordered_qty"] = get_ordered_qty(row.item_code, row.warehouse)
|
qty_dict["ordered_qty"] = get_ordered_qty(row.item_code, row.warehouse)
|
||||||
|
|
||||||
|
if row.warehouse:
|
||||||
update_bin_qty(row.item_code, row.warehouse, qty_dict)
|
update_bin_qty(row.item_code, row.warehouse, qty_dict)
|
||||||
|
|
||||||
def validate_and_delete_children(parent, data):
|
def validate_and_delete_children(parent, data):
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"allow_events_in_timeline": 0,
|
"allow_events_in_timeline": 0,
|
||||||
"allow_guest_to_view": 0,
|
"allow_guest_to_view": 0,
|
||||||
"allow_import": 0,
|
"allow_import": 0,
|
||||||
"allow_rename": 0,
|
"allow_rename": 1,
|
||||||
"autoname": "field:lost_reason",
|
"autoname": "field:lost_reason",
|
||||||
"beta": 0,
|
"beta": 0,
|
||||||
"creation": "2018-12-28 14:48:51.044975",
|
"creation": "2018-12-28 14:48:51.044975",
|
||||||
@ -57,7 +57,7 @@
|
|||||||
"issingle": 0,
|
"issingle": 0,
|
||||||
"istable": 0,
|
"istable": 0,
|
||||||
"max_attachments": 0,
|
"max_attachments": 0,
|
||||||
"modified": "2018-12-28 14:49:43.336437",
|
"modified": "2022-02-16 10:49:43.336437",
|
||||||
"modified_by": "Administrator",
|
"modified_by": "Administrator",
|
||||||
"module": "CRM",
|
"module": "CRM",
|
||||||
"name": "Opportunity Lost Reason",
|
"name": "Opportunity Lost Reason",
|
||||||
|
@ -66,26 +66,24 @@ class ItemVariantsCacheManager:
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# join with Website Item
|
# Get Variants and tehir Attributes that are not disabled
|
||||||
item_variants_data = frappe.get_all(
|
iva = frappe.qb.DocType("Item Variant Attribute")
|
||||||
'Item Variant Attribute',
|
item = frappe.qb.DocType("Item")
|
||||||
{'variant_of': parent_item_code},
|
query = (
|
||||||
['parent', 'attribute', 'attribute_value'],
|
frappe.qb.from_(iva)
|
||||||
order_by='name',
|
.join(item).on(item.name == iva.parent)
|
||||||
as_list=1
|
.select(
|
||||||
)
|
iva.parent, iva.attribute, iva.attribute_value
|
||||||
|
).where(
|
||||||
disabled_items = set(
|
(iva.variant_of == parent_item_code)
|
||||||
[i.name for i in frappe.db.get_all('Item', {'disabled': 1})]
|
& (item.disabled == 0)
|
||||||
|
).orderby(iva.name)
|
||||||
)
|
)
|
||||||
|
item_variants_data = query.run()
|
||||||
|
|
||||||
attribute_value_item_map = frappe._dict()
|
attribute_value_item_map = frappe._dict()
|
||||||
item_attribute_value_map = frappe._dict()
|
item_attribute_value_map = frappe._dict()
|
||||||
|
|
||||||
# dont consider variants that are disabled
|
|
||||||
# pull all other variants
|
|
||||||
item_variants_data = [r for r in item_variants_data if r[0] not in disabled_items]
|
|
||||||
|
|
||||||
for row in item_variants_data:
|
for row in item_variants_data:
|
||||||
item_code, attribute, attribute_value = row
|
item_code, attribute, attribute_value = row
|
||||||
# (attr, value) => [item1, item2]
|
# (attr, value) => [item1, item2]
|
||||||
@ -124,4 +122,7 @@ def build_cache(item_code):
|
|||||||
def enqueue_build_cache(item_code):
|
def enqueue_build_cache(item_code):
|
||||||
if frappe.cache().hget('item_cache_build_in_progress', item_code):
|
if frappe.cache().hget('item_cache_build_in_progress', item_code):
|
||||||
return
|
return
|
||||||
frappe.enqueue(build_cache, item_code=item_code, queue='long')
|
frappe.enqueue(
|
||||||
|
"erpnext.e_commerce.variant_selector.item_variants_cache.build_cache",
|
||||||
|
item_code=item_code, queue='long'
|
||||||
|
)
|
||||||
|
@ -104,6 +104,8 @@ class TestVariantSelector(ERPNextTestCase):
|
|||||||
})
|
})
|
||||||
|
|
||||||
make_web_item_price(item_code="Test-Tshirt-Temp-S-R", price_list_rate=100)
|
make_web_item_price(item_code="Test-Tshirt-Temp-S-R", price_list_rate=100)
|
||||||
|
|
||||||
|
frappe.local.shopping_cart_settings = None # clear cached settings values
|
||||||
next_values = get_next_attribute_and_values(
|
next_values = get_next_attribute_and_values(
|
||||||
"Test-Tshirt-Temp",
|
"Test-Tshirt-Temp",
|
||||||
selected_attributes={"Test Size": "Small", "Test Colour": "Red"}
|
selected_attributes={"Test Size": "Small", "Test Colour": "Red"}
|
||||||
|
@ -13,7 +13,7 @@ from frappe.utils import call_hook_method, cint, flt, get_url
|
|||||||
|
|
||||||
|
|
||||||
class GoCardlessSettings(Document):
|
class GoCardlessSettings(Document):
|
||||||
supported_currencies = ["EUR", "DKK", "GBP", "SEK"]
|
supported_currencies = ["EUR", "DKK", "GBP", "SEK", "AUD", "NZD", "CAD", "USD"]
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
self.initialize_client()
|
self.initialize_client()
|
||||||
@ -80,7 +80,7 @@ class GoCardlessSettings(Document):
|
|||||||
|
|
||||||
def validate_transaction_currency(self, currency):
|
def validate_transaction_currency(self, currency):
|
||||||
if currency not in self.supported_currencies:
|
if currency not in self.supported_currencies:
|
||||||
frappe.throw(_("Please select another payment method. Stripe does not support transactions in currency '{0}'").format(currency))
|
frappe.throw(_("Please select another payment method. Go Cardless does not support transactions in currency '{0}'").format(currency))
|
||||||
|
|
||||||
def get_payment_url(self, **kwargs):
|
def get_payment_url(self, **kwargs):
|
||||||
return get_url("./integrations/gocardless_checkout?{0}".format(urlencode(kwargs)))
|
return get_url("./integrations/gocardless_checkout?{0}".format(urlencode(kwargs)))
|
||||||
|
@ -74,39 +74,6 @@ class LoanInterestAccrual(AccountsController):
|
|||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.payable_principal_amount:
|
|
||||||
gle_map.append(
|
|
||||||
self.get_gl_dict({
|
|
||||||
"account": self.loan_account,
|
|
||||||
"party_type": self.applicant_type,
|
|
||||||
"party": self.applicant,
|
|
||||||
"against": self.interest_income_account,
|
|
||||||
"debit": self.payable_principal_amount,
|
|
||||||
"debit_in_account_currency": self.interest_amount,
|
|
||||||
"against_voucher_type": "Loan",
|
|
||||||
"against_voucher": self.loan,
|
|
||||||
"remarks": _("Interest accrued from {0} to {1} against loan: {2}").format(
|
|
||||||
self.last_accrual_date, self.posting_date, self.loan),
|
|
||||||
"cost_center": erpnext.get_default_cost_center(self.company),
|
|
||||||
"posting_date": self.posting_date
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
gle_map.append(
|
|
||||||
self.get_gl_dict({
|
|
||||||
"account": self.interest_income_account,
|
|
||||||
"against": self.loan_account,
|
|
||||||
"credit": self.payable_principal_amount,
|
|
||||||
"credit_in_account_currency": self.interest_amount,
|
|
||||||
"against_voucher_type": "Loan",
|
|
||||||
"against_voucher": self.loan,
|
|
||||||
"remarks": ("Interest accrued from {0} to {1} against loan: {2}").format(
|
|
||||||
self.last_accrual_date, self.posting_date, self.loan),
|
|
||||||
"cost_center": erpnext.get_default_cost_center(self.company),
|
|
||||||
"posting_date": self.posting_date
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
if gle_map:
|
if gle_map:
|
||||||
make_gl_entries(gle_map, cancel=cancel, adv_adj=adv_adj)
|
make_gl_entries(gle_map, cancel=cancel, adv_adj=adv_adj)
|
||||||
|
|
||||||
|
@ -319,7 +319,7 @@ class ProductionPlan(Document):
|
|||||||
|
|
||||||
if self.total_produced_qty > 0:
|
if self.total_produced_qty > 0:
|
||||||
self.status = "In Process"
|
self.status = "In Process"
|
||||||
if self.check_have_work_orders_completed():
|
if self.all_items_completed():
|
||||||
self.status = "Completed"
|
self.status = "Completed"
|
||||||
|
|
||||||
if self.status != 'Completed':
|
if self.status != 'Completed':
|
||||||
@ -591,21 +591,32 @@ class ProductionPlan(Document):
|
|||||||
|
|
||||||
self.append("sub_assembly_items", data)
|
self.append("sub_assembly_items", data)
|
||||||
|
|
||||||
def check_have_work_orders_completed(self):
|
def all_items_completed(self):
|
||||||
wo_status = frappe.db.get_list(
|
all_items_produced = all(flt(d.planned_qty) - flt(d.produced_qty) < 0.000001
|
||||||
|
for d in self.po_items)
|
||||||
|
if not all_items_produced:
|
||||||
|
return False
|
||||||
|
|
||||||
|
wo_status = frappe.get_all(
|
||||||
"Work Order",
|
"Work Order",
|
||||||
filters={"production_plan": self.name},
|
filters={
|
||||||
|
"production_plan": self.name,
|
||||||
|
"status": ("not in", ["Closed", "Stopped"]),
|
||||||
|
"docstatus": ("<", 2),
|
||||||
|
},
|
||||||
fields="status",
|
fields="status",
|
||||||
pluck="status"
|
pluck="status",
|
||||||
)
|
)
|
||||||
return all(s == "Completed" for s in wo_status)
|
all_work_orders_completed = all(s == "Completed" for s in wo_status)
|
||||||
|
return all_work_orders_completed
|
||||||
|
|
||||||
@frappe.whitelist()
|
@frappe.whitelist()
|
||||||
def download_raw_materials(doc, warehouses=None):
|
def download_raw_materials(doc, warehouses=None):
|
||||||
if isinstance(doc, str):
|
if isinstance(doc, str):
|
||||||
doc = frappe._dict(json.loads(doc))
|
doc = frappe._dict(json.loads(doc))
|
||||||
|
|
||||||
item_list = [['Item Code', 'Description', 'Stock UOM', 'Warehouse', 'Required Qty as per BOM',
|
item_list = [['Item Code', 'Item Name', 'Description',
|
||||||
|
'Stock UOM', 'Warehouse', 'Required Qty as per BOM',
|
||||||
'Projected Qty', 'Available Qty In Hand', 'Ordered Qty', 'Planned Qty',
|
'Projected Qty', 'Available Qty In Hand', 'Ordered Qty', 'Planned Qty',
|
||||||
'Reserved Qty for Production', 'Safety Stock', 'Required Qty']]
|
'Reserved Qty for Production', 'Safety Stock', 'Required Qty']]
|
||||||
|
|
||||||
@ -614,7 +625,8 @@ def download_raw_materials(doc, warehouses=None):
|
|||||||
items = get_items_for_material_requests(doc, warehouses=warehouses, get_parent_warehouse_data=True)
|
items = get_items_for_material_requests(doc, warehouses=warehouses, get_parent_warehouse_data=True)
|
||||||
|
|
||||||
for d in items:
|
for d in items:
|
||||||
item_list.append([d.get('item_code'), d.get('description'), d.get('stock_uom'), d.get('warehouse'),
|
item_list.append([d.get('item_code'), d.get('item_name'),
|
||||||
|
d.get('description'), d.get('stock_uom'), d.get('warehouse'),
|
||||||
d.get('required_bom_qty'), d.get('projected_qty'), d.get('actual_qty'), d.get('ordered_qty'),
|
d.get('required_bom_qty'), d.get('projected_qty'), d.get('actual_qty'), d.get('ordered_qty'),
|
||||||
d.get('planned_qty'), d.get('reserved_qty_for_production'), d.get('safety_stock'), d.get('quantity')])
|
d.get('planned_qty'), d.get('reserved_qty_for_production'), d.get('safety_stock'), d.get('quantity')])
|
||||||
|
|
||||||
|
@ -409,9 +409,6 @@ class TestProductionPlan(ERPNextTestCase):
|
|||||||
boms = {
|
boms = {
|
||||||
"Assembly": {
|
"Assembly": {
|
||||||
"SubAssembly1": {"ChildPart1": {}, "ChildPart2": {},},
|
"SubAssembly1": {"ChildPart1": {}, "ChildPart2": {},},
|
||||||
"SubAssembly2": {"ChildPart3": {}},
|
|
||||||
"SubAssembly3": {"SubSubAssy1": {"ChildPart4": {}}},
|
|
||||||
"ChildPart5": {},
|
|
||||||
"ChildPart6": {},
|
"ChildPart6": {},
|
||||||
"SubAssembly4": {"SubSubAssy2": {"ChildPart7": {}}},
|
"SubAssembly4": {"SubSubAssy2": {"ChildPart7": {}}},
|
||||||
},
|
},
|
||||||
@ -591,6 +588,20 @@ class TestProductionPlan(ERPNextTestCase):
|
|||||||
pln.reload()
|
pln.reload()
|
||||||
self.assertEqual(pln.po_items[0].pending_qty, 1)
|
self.assertEqual(pln.po_items[0].pending_qty, 1)
|
||||||
|
|
||||||
|
def test_qty_based_status(self):
|
||||||
|
pp = frappe.new_doc("Production Plan")
|
||||||
|
pp.po_items = [
|
||||||
|
frappe._dict(planned_qty=5, produce_qty=4)
|
||||||
|
]
|
||||||
|
self.assertFalse(pp.all_items_completed())
|
||||||
|
|
||||||
|
pp.po_items = [
|
||||||
|
frappe._dict(planned_qty=5, produce_qty=10),
|
||||||
|
frappe._dict(planned_qty=5, produce_qty=4)
|
||||||
|
]
|
||||||
|
self.assertFalse(pp.all_items_completed())
|
||||||
|
|
||||||
|
|
||||||
def create_production_plan(**args):
|
def create_production_plan(**args):
|
||||||
"""
|
"""
|
||||||
sales_order (obj): Sales Order Doc Object
|
sales_order (obj): Sales Order Doc Object
|
||||||
|
@ -329,7 +329,6 @@ execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings'
|
|||||||
erpnext.patches.v14_0.set_payroll_cost_centers
|
erpnext.patches.v14_0.set_payroll_cost_centers
|
||||||
erpnext.patches.v13_0.agriculture_deprecation_warning
|
erpnext.patches.v13_0.agriculture_deprecation_warning
|
||||||
erpnext.patches.v13_0.hospitality_deprecation_warning
|
erpnext.patches.v13_0.hospitality_deprecation_warning
|
||||||
erpnext.patches.v13_0.update_exchange_rate_settings
|
|
||||||
erpnext.patches.v13_0.update_asset_quantity_field
|
erpnext.patches.v13_0.update_asset_quantity_field
|
||||||
erpnext.patches.v13_0.delete_bank_reconciliation_detail
|
erpnext.patches.v13_0.delete_bank_reconciliation_detail
|
||||||
erpnext.patches.v13_0.enable_provisional_accounting
|
erpnext.patches.v13_0.enable_provisional_accounting
|
||||||
@ -351,5 +350,7 @@ erpnext.patches.v13_0.convert_to_website_item_in_item_card_group_template
|
|||||||
erpnext.patches.v13_0.shopping_cart_to_ecommerce
|
erpnext.patches.v13_0.shopping_cart_to_ecommerce
|
||||||
erpnext.patches.v13_0.update_disbursement_account
|
erpnext.patches.v13_0.update_disbursement_account
|
||||||
erpnext.patches.v13_0.update_reserved_qty_closed_wo
|
erpnext.patches.v13_0.update_reserved_qty_closed_wo
|
||||||
|
erpnext.patches.v13_0.update_exchange_rate_settings
|
||||||
erpnext.patches.v14_0.delete_amazon_mws_doctype
|
erpnext.patches.v14_0.delete_amazon_mws_doctype
|
||||||
|
erpnext.patches.v13_0.set_work_order_qty_in_so_from_mr
|
||||||
erpnext.patches.v13_0.update_accounts_in_loan_docs
|
erpnext.patches.v13_0.update_accounts_in_loan_docs
|
||||||
|
@ -9,6 +9,8 @@ def execute():
|
|||||||
FROM `tabBin`""",as_dict=1)
|
FROM `tabBin`""",as_dict=1)
|
||||||
|
|
||||||
for entry in bin_details:
|
for entry in bin_details:
|
||||||
|
if not (entry.item_code and entry.warehouse):
|
||||||
|
continue
|
||||||
update_bin_qty(entry.get("item_code"), entry.get("warehouse"), {
|
update_bin_qty(entry.get("item_code"), entry.get("warehouse"), {
|
||||||
"indented_qty": get_indented_qty(entry.get("item_code"), entry.get("warehouse"))
|
"indented_qty": get_indented_qty(entry.get("item_code"), entry.get("warehouse"))
|
||||||
})
|
})
|
||||||
|
36
erpnext/patches/v13_0/set_work_order_qty_in_so_from_mr.py
Normal file
36
erpnext/patches/v13_0/set_work_order_qty_in_so_from_mr.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import frappe
|
||||||
|
|
||||||
|
|
||||||
|
def execute():
|
||||||
|
"""
|
||||||
|
1. Get submitted Work Orders with MR, MR Item and SO set
|
||||||
|
2. Get SO Item detail from MR Item detail in WO, and set in WO
|
||||||
|
3. Update work_order_qty in SO
|
||||||
|
"""
|
||||||
|
work_order = frappe.qb.DocType("Work Order")
|
||||||
|
query = (
|
||||||
|
frappe.qb.from_(work_order)
|
||||||
|
.select(
|
||||||
|
work_order.name, work_order.produced_qty,
|
||||||
|
work_order.material_request,
|
||||||
|
work_order.material_request_item,
|
||||||
|
work_order.sales_order
|
||||||
|
).where(
|
||||||
|
(work_order.material_request.isnotnull())
|
||||||
|
& (work_order.material_request_item.isnotnull())
|
||||||
|
& (work_order.sales_order.isnotnull())
|
||||||
|
& (work_order.docstatus == 1)
|
||||||
|
& (work_order.produced_qty > 0)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
results = query.run(as_dict=True)
|
||||||
|
|
||||||
|
for row in results:
|
||||||
|
so_item = frappe.get_value(
|
||||||
|
"Material Request Item", row.material_request_item, "sales_order_item"
|
||||||
|
)
|
||||||
|
frappe.db.set_value("Work Order", row.name, "sales_order_item", so_item)
|
||||||
|
|
||||||
|
if so_item:
|
||||||
|
wo = frappe.get_doc("Work Order", row.name)
|
||||||
|
wo.update_work_order_qty_in_so()
|
@ -6,9 +6,6 @@ from erpnext.setup.utils import get_exchange_rate
|
|||||||
|
|
||||||
|
|
||||||
def execute():
|
def execute():
|
||||||
frappe.reload_doc('crm', 'doctype', 'opportunity')
|
|
||||||
frappe.reload_doc('crm', 'doctype', 'opportunity_item')
|
|
||||||
|
|
||||||
opportunities = frappe.db.get_list('Opportunity', filters={
|
opportunities = frappe.db.get_list('Opportunity', filters={
|
||||||
'opportunity_amount': ['>', 0]
|
'opportunity_amount': ['>', 0]
|
||||||
}, fields=['name', 'company', 'currency', 'opportunity_amount'])
|
}, fields=['name', 'company', 'currency', 'opportunity_amount'])
|
||||||
@ -20,15 +17,11 @@ def execute():
|
|||||||
if opportunity.currency != company_currency:
|
if opportunity.currency != company_currency:
|
||||||
conversion_rate = get_exchange_rate(opportunity.currency, company_currency)
|
conversion_rate = get_exchange_rate(opportunity.currency, company_currency)
|
||||||
base_opportunity_amount = flt(conversion_rate) * flt(opportunity.opportunity_amount)
|
base_opportunity_amount = flt(conversion_rate) * flt(opportunity.opportunity_amount)
|
||||||
grand_total = flt(opportunity.opportunity_amount)
|
|
||||||
base_grand_total = flt(conversion_rate) * flt(opportunity.opportunity_amount)
|
|
||||||
else:
|
else:
|
||||||
conversion_rate = 1
|
conversion_rate = 1
|
||||||
base_opportunity_amount = grand_total = base_grand_total = flt(opportunity.opportunity_amount)
|
base_opportunity_amount = flt(opportunity.opportunity_amount)
|
||||||
|
|
||||||
frappe.db.set_value('Opportunity', opportunity.name, {
|
frappe.db.set_value('Opportunity', opportunity.name, {
|
||||||
'conversion_rate': conversion_rate,
|
'conversion_rate': conversion_rate,
|
||||||
'base_opportunity_amount': base_opportunity_amount,
|
'base_opportunity_amount': base_opportunity_amount
|
||||||
'grand_total': grand_total,
|
|
||||||
'base_grand_total': base_grand_total
|
|
||||||
}, update_modified=False)
|
}, update_modified=False)
|
||||||
|
@ -29,6 +29,8 @@ def execute():
|
|||||||
""")
|
""")
|
||||||
|
|
||||||
for item_code, warehouse in repost_for:
|
for item_code, warehouse in repost_for:
|
||||||
|
if not (item_code and warehouse):
|
||||||
|
continue
|
||||||
update_bin_qty(item_code, warehouse, {
|
update_bin_qty(item_code, warehouse, {
|
||||||
"reserved_qty": get_reserved_qty(item_code, warehouse)
|
"reserved_qty": get_reserved_qty(item_code, warehouse)
|
||||||
})
|
})
|
||||||
|
@ -14,6 +14,8 @@ def execute():
|
|||||||
union
|
union
|
||||||
select item_code, warehouse from `tabStock Ledger Entry`) a"""):
|
select item_code, warehouse from `tabStock Ledger Entry`) a"""):
|
||||||
try:
|
try:
|
||||||
|
if not (item_code and warehouse):
|
||||||
|
continue
|
||||||
count += 1
|
count += 1
|
||||||
update_bin_qty(item_code, warehouse, {
|
update_bin_qty(item_code, warehouse, {
|
||||||
"indented_qty": get_indented_qty(item_code, warehouse),
|
"indented_qty": get_indented_qty(item_code, warehouse),
|
||||||
|
@ -6,6 +6,7 @@ import random
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import frappe
|
import frappe
|
||||||
|
from frappe.model.document import Document
|
||||||
from frappe.utils import (
|
from frappe.utils import (
|
||||||
add_days,
|
add_days,
|
||||||
add_months,
|
add_months,
|
||||||
@ -687,7 +688,9 @@ def make_employee_salary_slip(user, payroll_frequency, salary_structure=None):
|
|||||||
|
|
||||||
def make_salary_component(salary_components, test_tax, company_list=None):
|
def make_salary_component(salary_components, test_tax, company_list=None):
|
||||||
for salary_component in salary_components:
|
for salary_component in salary_components:
|
||||||
if not frappe.db.exists('Salary Component', salary_component["salary_component"]):
|
if frappe.db.exists('Salary Component', salary_component["salary_component"]):
|
||||||
|
continue
|
||||||
|
|
||||||
if test_tax:
|
if test_tax:
|
||||||
if salary_component["type"] == "Earning":
|
if salary_component["type"] == "Earning":
|
||||||
salary_component["is_tax_applicable"] = 1
|
salary_component["is_tax_applicable"] = 1
|
||||||
@ -697,10 +700,13 @@ def make_salary_component(salary_components, test_tax, company_list=None):
|
|||||||
salary_component["amount"] = 0
|
salary_component["amount"] = 0
|
||||||
salary_component["formula"] = ""
|
salary_component["formula"] = ""
|
||||||
salary_component["condition"] = ""
|
salary_component["condition"] = ""
|
||||||
salary_component["doctype"] = "Salary Component"
|
|
||||||
salary_component["salary_component_abbr"] = salary_component["abbr"]
|
salary_component["salary_component_abbr"] = salary_component["abbr"]
|
||||||
frappe.get_doc(salary_component).insert()
|
doc = frappe.new_doc("Salary Component")
|
||||||
get_salary_component_account(salary_component["salary_component"], company_list)
|
doc.update(salary_component)
|
||||||
|
doc.insert()
|
||||||
|
|
||||||
|
get_salary_component_account(doc, company_list)
|
||||||
|
|
||||||
def get_salary_component_account(sal_comp, company_list=None):
|
def get_salary_component_account(sal_comp, company_list=None):
|
||||||
company = erpnext.get_default_company()
|
company = erpnext.get_default_company()
|
||||||
@ -708,7 +714,9 @@ def get_salary_component_account(sal_comp, company_list=None):
|
|||||||
if company_list and company not in company_list:
|
if company_list and company not in company_list:
|
||||||
company_list.append(company)
|
company_list.append(company)
|
||||||
|
|
||||||
|
if not isinstance(sal_comp, Document):
|
||||||
sal_comp = frappe.get_doc("Salary Component", sal_comp)
|
sal_comp = frappe.get_doc("Salary Component", sal_comp)
|
||||||
|
|
||||||
if not sal_comp.get("accounts"):
|
if not sal_comp.get("accounts"):
|
||||||
for d in company_list:
|
for d in company_list:
|
||||||
company_abbr = frappe.get_cached_value('Company', d, 'abbr')
|
company_abbr = frappe.get_cached_value('Company', d, 'abbr')
|
||||||
|
@ -151,6 +151,35 @@ class TestTimesheet(unittest.TestCase):
|
|||||||
settings.ignore_employee_time_overlap = initial_setting
|
settings.ignore_employee_time_overlap = initial_setting
|
||||||
settings.save()
|
settings.save()
|
||||||
|
|
||||||
|
def test_timesheet_not_overlapping_with_continuous_timelogs(self):
|
||||||
|
emp = make_employee("test_employee_6@salary.com")
|
||||||
|
|
||||||
|
update_activity_type("_Test Activity Type")
|
||||||
|
timesheet = frappe.new_doc("Timesheet")
|
||||||
|
timesheet.employee = emp
|
||||||
|
timesheet.append(
|
||||||
|
'time_logs',
|
||||||
|
{
|
||||||
|
"billable": 1,
|
||||||
|
"activity_type": "_Test Activity Type",
|
||||||
|
"from_time": now_datetime(),
|
||||||
|
"to_time": now_datetime() + datetime.timedelta(hours=3),
|
||||||
|
"company": "_Test Company"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
timesheet.append(
|
||||||
|
'time_logs',
|
||||||
|
{
|
||||||
|
"billable": 1,
|
||||||
|
"activity_type": "_Test Activity Type",
|
||||||
|
"from_time": now_datetime() + datetime.timedelta(hours=3),
|
||||||
|
"to_time": now_datetime() + datetime.timedelta(hours=4),
|
||||||
|
"company": "_Test Company"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
timesheet.save() # should not throw an error
|
||||||
|
|
||||||
def test_to_time(self):
|
def test_to_time(self):
|
||||||
emp = make_employee("test_employee_6@salary.com")
|
emp = make_employee("test_employee_6@salary.com")
|
||||||
from_time = now_datetime()
|
from_time = now_datetime()
|
||||||
|
@ -7,7 +7,7 @@ import json
|
|||||||
import frappe
|
import frappe
|
||||||
from frappe import _
|
from frappe import _
|
||||||
from frappe.model.document import Document
|
from frappe.model.document import Document
|
||||||
from frappe.utils import add_to_date, flt, getdate, time_diff_in_hours
|
from frappe.utils import add_to_date, flt, get_datetime, getdate, time_diff_in_hours
|
||||||
|
|
||||||
from erpnext.controllers.queries import get_match_cond
|
from erpnext.controllers.queries import get_match_cond
|
||||||
from erpnext.hr.utils import validate_active_employee
|
from erpnext.hr.utils import validate_active_employee
|
||||||
@ -145,7 +145,7 @@ class Timesheet(Document):
|
|||||||
if not (data.from_time and data.hours):
|
if not (data.from_time and data.hours):
|
||||||
return
|
return
|
||||||
|
|
||||||
_to_time = add_to_date(data.from_time, hours=data.hours, as_datetime=True)
|
_to_time = get_datetime(add_to_date(data.from_time, hours=data.hours, as_datetime=True))
|
||||||
if data.to_time != _to_time:
|
if data.to_time != _to_time:
|
||||||
data.to_time = _to_time
|
data.to_time = _to_time
|
||||||
|
|
||||||
@ -171,39 +171,54 @@ class Timesheet(Document):
|
|||||||
.format(args.idx, self.name, existing.name), OverlapError)
|
.format(args.idx, self.name, existing.name), OverlapError)
|
||||||
|
|
||||||
def get_overlap_for(self, fieldname, args, value):
|
def get_overlap_for(self, fieldname, args, value):
|
||||||
cond = "ts.`{0}`".format(fieldname)
|
timesheet = frappe.qb.DocType("Timesheet")
|
||||||
if fieldname == 'workstation':
|
timelog = frappe.qb.DocType("Timesheet Detail")
|
||||||
cond = "tsd.`{0}`".format(fieldname)
|
|
||||||
|
|
||||||
existing = frappe.db.sql("""select ts.name as name, tsd.from_time as from_time, tsd.to_time as to_time from
|
from_time = get_datetime(args.from_time)
|
||||||
`tabTimesheet Detail` tsd, `tabTimesheet` ts where {0}=%(val)s and tsd.parent = ts.name and
|
to_time = get_datetime(args.to_time)
|
||||||
(
|
|
||||||
(%(from_time)s > tsd.from_time and %(from_time)s < tsd.to_time) or
|
|
||||||
(%(to_time)s > tsd.from_time and %(to_time)s < tsd.to_time) or
|
|
||||||
(%(from_time)s <= tsd.from_time and %(to_time)s >= tsd.to_time))
|
|
||||||
and tsd.name!=%(name)s
|
|
||||||
and ts.name!=%(parent)s
|
|
||||||
and ts.docstatus < 2""".format(cond),
|
|
||||||
{
|
|
||||||
"val": value,
|
|
||||||
"from_time": args.from_time,
|
|
||||||
"to_time": args.to_time,
|
|
||||||
"name": args.name or "No Name",
|
|
||||||
"parent": args.parent or "No Name"
|
|
||||||
}, as_dict=True)
|
|
||||||
# check internal overlap
|
|
||||||
for time_log in self.time_logs:
|
|
||||||
if not (time_log.from_time and time_log.to_time
|
|
||||||
and args.from_time and args.to_time): continue
|
|
||||||
|
|
||||||
if (fieldname != 'workstation' or args.get(fieldname) == time_log.get(fieldname)) and \
|
existing = (
|
||||||
args.idx != time_log.idx and ((args.from_time > time_log.from_time and args.from_time < time_log.to_time) or
|
frappe.qb.from_(timesheet)
|
||||||
(args.to_time > time_log.from_time and args.to_time < time_log.to_time) or
|
.join(timelog)
|
||||||
(args.from_time <= time_log.from_time and args.to_time >= time_log.to_time)):
|
.on(timelog.parent == timesheet.name)
|
||||||
|
.select(timesheet.name.as_('name'), timelog.from_time.as_('from_time'), timelog.to_time.as_('to_time'))
|
||||||
|
.where(
|
||||||
|
(timelog.name != (args.name or "No Name"))
|
||||||
|
& (timesheet.name != (args.parent or "No Name"))
|
||||||
|
& (timesheet.docstatus < 2)
|
||||||
|
& (timesheet[fieldname] == value)
|
||||||
|
& (
|
||||||
|
((from_time > timelog.from_time) & (from_time < timelog.to_time))
|
||||||
|
| ((to_time > timelog.from_time) & (to_time < timelog.to_time))
|
||||||
|
| ((from_time <= timelog.from_time) & (to_time >= timelog.to_time))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).run(as_dict=True)
|
||||||
|
|
||||||
|
if self.check_internal_overlap(fieldname, args):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
return existing[0] if existing else None
|
return existing[0] if existing else None
|
||||||
|
|
||||||
|
def check_internal_overlap(self, fieldname, args):
|
||||||
|
for time_log in self.time_logs:
|
||||||
|
if not (time_log.from_time and time_log.to_time
|
||||||
|
and args.from_time and args.to_time):
|
||||||
|
continue
|
||||||
|
|
||||||
|
from_time = get_datetime(time_log.from_time)
|
||||||
|
to_time = get_datetime(time_log.to_time)
|
||||||
|
args_from_time = get_datetime(args.from_time)
|
||||||
|
args_to_time = get_datetime(args.to_time)
|
||||||
|
|
||||||
|
if (args.get(fieldname) == time_log.get(fieldname)) and (args.idx != time_log.idx) and (
|
||||||
|
(args_from_time > from_time and args_from_time < to_time)
|
||||||
|
or (args_to_time > from_time and args_to_time < to_time)
|
||||||
|
or (args_from_time <= from_time and args_to_time >= to_time)
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def update_cost(self):
|
def update_cost(self):
|
||||||
for data in self.time_logs:
|
for data in self.time_logs:
|
||||||
if data.activity_type or data.is_billable:
|
if data.activity_type or data.is_billable:
|
||||||
|
@ -14,12 +14,6 @@
|
|||||||
"to_time",
|
"to_time",
|
||||||
"hours",
|
"hours",
|
||||||
"completed",
|
"completed",
|
||||||
"section_break_7",
|
|
||||||
"completed_qty",
|
|
||||||
"workstation",
|
|
||||||
"column_break_12",
|
|
||||||
"operation",
|
|
||||||
"operation_id",
|
|
||||||
"project_details",
|
"project_details",
|
||||||
"project",
|
"project",
|
||||||
"project_name",
|
"project_name",
|
||||||
@ -83,43 +77,6 @@
|
|||||||
"fieldtype": "Check",
|
"fieldtype": "Check",
|
||||||
"label": "Completed"
|
"label": "Completed"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"fieldname": "section_break_7",
|
|
||||||
"fieldtype": "Section Break"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends_on": "eval:parent.work_order",
|
|
||||||
"fieldname": "completed_qty",
|
|
||||||
"fieldtype": "Float",
|
|
||||||
"label": "Completed Qty"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends_on": "eval:parent.work_order",
|
|
||||||
"fieldname": "workstation",
|
|
||||||
"fieldtype": "Link",
|
|
||||||
"label": "Workstation",
|
|
||||||
"options": "Workstation",
|
|
||||||
"read_only": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"fieldname": "column_break_12",
|
|
||||||
"fieldtype": "Column Break"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends_on": "eval:parent.work_order",
|
|
||||||
"fieldname": "operation",
|
|
||||||
"fieldtype": "Link",
|
|
||||||
"label": "Operation",
|
|
||||||
"options": "Operation",
|
|
||||||
"read_only": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"depends_on": "eval:parent.work_order",
|
|
||||||
"fieldname": "operation_id",
|
|
||||||
"fieldtype": "Data",
|
|
||||||
"hidden": 1,
|
|
||||||
"label": "Operation Id"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"fieldname": "project_details",
|
"fieldname": "project_details",
|
||||||
"fieldtype": "Section Break"
|
"fieldtype": "Section Break"
|
||||||
@ -267,7 +224,7 @@
|
|||||||
"idx": 1,
|
"idx": 1,
|
||||||
"istable": 1,
|
"istable": 1,
|
||||||
"links": [],
|
"links": [],
|
||||||
"modified": "2021-05-18 12:19:33.205940",
|
"modified": "2022-02-17 16:53:34.878798",
|
||||||
"modified_by": "Administrator",
|
"modified_by": "Administrator",
|
||||||
"module": "Projects",
|
"module": "Projects",
|
||||||
"name": "Timesheet Detail",
|
"name": "Timesheet Detail",
|
||||||
@ -275,5 +232,6 @@
|
|||||||
"permissions": [],
|
"permissions": [],
|
||||||
"quick_entry": 1,
|
"quick_entry": 1,
|
||||||
"sort_field": "modified",
|
"sort_field": "modified",
|
||||||
"sort_order": "ASC"
|
"sort_order": "ASC",
|
||||||
|
"states": []
|
||||||
}
|
}
|
@ -2284,20 +2284,12 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
|
|||||||
|
|
||||||
coupon_code() {
|
coupon_code() {
|
||||||
var me = this;
|
var me = this;
|
||||||
if (this.frm.doc.coupon_code) {
|
|
||||||
frappe.run_serially([
|
frappe.run_serially([
|
||||||
() => this.frm.doc.ignore_pricing_rule=1,
|
() => this.frm.doc.ignore_pricing_rule=1,
|
||||||
() => me.ignore_pricing_rule(),
|
() => me.ignore_pricing_rule(),
|
||||||
() => this.frm.doc.ignore_pricing_rule=0,
|
() => this.frm.doc.ignore_pricing_rule=0,
|
||||||
() => me.apply_pricing_rule(),
|
() => me.apply_pricing_rule()
|
||||||
() => this.frm.save()
|
|
||||||
]);
|
]);
|
||||||
} else {
|
|
||||||
frappe.run_serially([
|
|
||||||
() => this.frm.doc.ignore_pricing_rule=1,
|
|
||||||
() => me.ignore_pricing_rule()
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ frappe.query_reports["GSTR-1"] = {
|
|||||||
"fieldtype": "Link",
|
"fieldtype": "Link",
|
||||||
"options": "Address",
|
"options": "Address",
|
||||||
"get_query": function () {
|
"get_query": function () {
|
||||||
var company = frappe.query_report.get_filter_value('company');
|
let company = frappe.query_report.get_filter_value('company');
|
||||||
if (company) {
|
if (company) {
|
||||||
return {
|
return {
|
||||||
"query": 'frappe.contacts.doctype.address.address.address_query',
|
"query": 'frappe.contacts.doctype.address.address.address_query',
|
||||||
@ -26,6 +26,11 @@ frappe.query_reports["GSTR-1"] = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"fieldname": "company_gstin",
|
||||||
|
"label": __("Company GSTIN"),
|
||||||
|
"fieldtype": "Select"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"fieldname": "from_date",
|
"fieldname": "from_date",
|
||||||
"label": __("From Date"),
|
"label": __("From Date"),
|
||||||
@ -60,10 +65,21 @@ frappe.query_reports["GSTR-1"] = {
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
onload: function (report) {
|
onload: function (report) {
|
||||||
|
let filters = report.get_values();
|
||||||
|
|
||||||
|
frappe.call({
|
||||||
|
method: 'erpnext.regional.report.gstr_1.gstr_1.get_company_gstins',
|
||||||
|
args: {
|
||||||
|
company: filters.company
|
||||||
|
},
|
||||||
|
callback: function(r) {
|
||||||
|
frappe.query_report.page.fields_dict.company_gstin.df.options = r.message;
|
||||||
|
frappe.query_report.page.fields_dict.company_gstin.refresh();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
report.page.add_inner_button(__("Download as JSON"), function () {
|
report.page.add_inner_button(__("Download as JSON"), function () {
|
||||||
var filters = report.get_values();
|
|
||||||
|
|
||||||
frappe.call({
|
frappe.call({
|
||||||
method: 'erpnext.regional.report.gstr_1.gstr_1.get_json',
|
method: 'erpnext.regional.report.gstr_1.gstr_1.get_json',
|
||||||
args: {
|
args: {
|
||||||
|
@ -253,7 +253,8 @@ class Gstr1Report(object):
|
|||||||
for opts in (("company", " and company=%(company)s"),
|
for opts in (("company", " and company=%(company)s"),
|
||||||
("from_date", " and posting_date>=%(from_date)s"),
|
("from_date", " and posting_date>=%(from_date)s"),
|
||||||
("to_date", " and posting_date<=%(to_date)s"),
|
("to_date", " and posting_date<=%(to_date)s"),
|
||||||
("company_address", " and company_address=%(company_address)s")):
|
("company_address", " and company_address=%(company_address)s"),
|
||||||
|
("company_gstin", " and company_gstin=%(company_gstin)s")):
|
||||||
if self.filters.get(opts[0]):
|
if self.filters.get(opts[0]):
|
||||||
conditions += opts[1]
|
conditions += opts[1]
|
||||||
|
|
||||||
@ -1192,3 +1193,23 @@ def is_inter_state(invoice_detail):
|
|||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@frappe.whitelist()
|
||||||
|
def get_company_gstins(company):
|
||||||
|
address = frappe.qb.DocType("Address")
|
||||||
|
links = frappe.qb.DocType("Dynamic Link")
|
||||||
|
|
||||||
|
addresses = frappe.qb.from_(address).inner_join(links).on(
|
||||||
|
address.name == links.parent
|
||||||
|
).select(
|
||||||
|
address.gstin
|
||||||
|
).where(
|
||||||
|
links.link_doctype == 'Company'
|
||||||
|
).where(
|
||||||
|
links.link_name == company
|
||||||
|
).run(as_dict=1)
|
||||||
|
|
||||||
|
address_list = [''] + [d.gstin for d in addresses]
|
||||||
|
|
||||||
|
return address_list
|
@ -102,7 +102,7 @@ def make_custom_fields():
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
create_custom_fields(custom_fields, update=True)
|
create_custom_fields(custom_fields, ignore_validate=True, update=True)
|
||||||
|
|
||||||
def update_regional_tax_settings(country, company):
|
def update_regional_tax_settings(country, company):
|
||||||
create_ksa_vat_setting(company)
|
create_ksa_vat_setting(company)
|
||||||
|
@ -6,7 +6,7 @@ import json
|
|||||||
import frappe
|
import frappe
|
||||||
import frappe.permissions
|
import frappe.permissions
|
||||||
from frappe.core.doctype.user_permission.test_user_permission import create_user
|
from frappe.core.doctype.user_permission.test_user_permission import create_user
|
||||||
from frappe.utils import add_days, flt, getdate, nowdate
|
from frappe.utils import add_days, flt, getdate, nowdate, today
|
||||||
|
|
||||||
from erpnext.controllers.accounts_controller import update_child_qty_rate
|
from erpnext.controllers.accounts_controller import update_child_qty_rate
|
||||||
from erpnext.maintenance.doctype.maintenance_schedule.test_maintenance_schedule import (
|
from erpnext.maintenance.doctype.maintenance_schedule.test_maintenance_schedule import (
|
||||||
@ -1399,6 +1399,48 @@ class TestSalesOrder(ERPNextTestCase):
|
|||||||
so.load_from_db()
|
so.load_from_db()
|
||||||
self.assertEqual(so.billing_status, 'Fully Billed')
|
self.assertEqual(so.billing_status, 'Fully Billed')
|
||||||
|
|
||||||
|
def test_so_back_updated_from_wo_via_mr(self):
|
||||||
|
"SO -> MR (Manufacture) -> WO. Test if WO Qty is updated in SO."
|
||||||
|
from erpnext.manufacturing.doctype.work_order.work_order import (
|
||||||
|
make_stock_entry as make_se_from_wo,
|
||||||
|
)
|
||||||
|
from erpnext.stock.doctype.material_request.material_request import raise_work_orders
|
||||||
|
|
||||||
|
so = make_sales_order(item_list=[{"item_code": "_Test FG Item","qty": 2, "rate":100}])
|
||||||
|
|
||||||
|
mr = make_material_request(so.name)
|
||||||
|
mr.material_request_type = "Manufacture"
|
||||||
|
mr.schedule_date = today()
|
||||||
|
mr.submit()
|
||||||
|
|
||||||
|
# WO from MR
|
||||||
|
wo_name = raise_work_orders(mr.name)[0]
|
||||||
|
wo = frappe.get_doc("Work Order", wo_name)
|
||||||
|
wo.wip_warehouse = "Work In Progress - _TC"
|
||||||
|
wo.skip_transfer = True
|
||||||
|
|
||||||
|
self.assertEqual(wo.sales_order, so.name)
|
||||||
|
self.assertEqual(wo.sales_order_item, so.items[0].name)
|
||||||
|
|
||||||
|
wo.submit()
|
||||||
|
make_stock_entry(item_code="_Test Item", # Stock RM
|
||||||
|
target="Work In Progress - _TC",
|
||||||
|
qty=4, basic_rate=100
|
||||||
|
)
|
||||||
|
make_stock_entry(item_code="_Test Item Home Desktop 100", # Stock RM
|
||||||
|
target="Work In Progress - _TC",
|
||||||
|
qty=4, basic_rate=100
|
||||||
|
)
|
||||||
|
|
||||||
|
se = frappe.get_doc(make_se_from_wo(wo.name, "Manufacture", 2))
|
||||||
|
se.submit() # Finish WO
|
||||||
|
|
||||||
|
mr.reload()
|
||||||
|
wo.reload()
|
||||||
|
so.reload()
|
||||||
|
self.assertEqual(so.items[0].work_order_qty, wo.produced_qty)
|
||||||
|
self.assertEqual(mr.status, "Manufactured")
|
||||||
|
|
||||||
def automatically_fetch_payment_terms(enable=1):
|
def automatically_fetch_payment_terms(enable=1):
|
||||||
accounts_settings = frappe.get_doc("Accounts Settings")
|
accounts_settings = frappe.get_doc("Accounts Settings")
|
||||||
accounts_settings.automatically_fetch_payment_terms = enable
|
accounts_settings.automatically_fetch_payment_terms = enable
|
||||||
|
@ -83,8 +83,8 @@
|
|||||||
"planned_qty",
|
"planned_qty",
|
||||||
"column_break_69",
|
"column_break_69",
|
||||||
"work_order_qty",
|
"work_order_qty",
|
||||||
"delivered_qty",
|
|
||||||
"produced_qty",
|
"produced_qty",
|
||||||
|
"delivered_qty",
|
||||||
"returned_qty",
|
"returned_qty",
|
||||||
"shopping_cart_section",
|
"shopping_cart_section",
|
||||||
"additional_notes",
|
"additional_notes",
|
||||||
@ -701,10 +701,8 @@
|
|||||||
"width": "50px"
|
"width": "50px"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"description": "For Production",
|
|
||||||
"fieldname": "produced_qty",
|
"fieldname": "produced_qty",
|
||||||
"fieldtype": "Float",
|
"fieldtype": "Float",
|
||||||
"hidden": 1,
|
|
||||||
"label": "Produced Quantity",
|
"label": "Produced Quantity",
|
||||||
"oldfieldname": "produced_qty",
|
"oldfieldname": "produced_qty",
|
||||||
"oldfieldtype": "Currency",
|
"oldfieldtype": "Currency",
|
||||||
@ -802,7 +800,7 @@
|
|||||||
"idx": 1,
|
"idx": 1,
|
||||||
"istable": 1,
|
"istable": 1,
|
||||||
"links": [],
|
"links": [],
|
||||||
"modified": "2021-10-05 12:27:25.014789",
|
"modified": "2022-02-21 13:55:08.883104",
|
||||||
"modified_by": "Administrator",
|
"modified_by": "Administrator",
|
||||||
"module": "Selling",
|
"module": "Selling",
|
||||||
"name": "Sales Order Item",
|
"name": "Sales Order Item",
|
||||||
@ -811,5 +809,6 @@
|
|||||||
"permissions": [],
|
"permissions": [],
|
||||||
"sort_field": "modified",
|
"sort_field": "modified",
|
||||||
"sort_order": "DESC",
|
"sort_order": "DESC",
|
||||||
|
"states": [],
|
||||||
"track_changes": 1
|
"track_changes": 1
|
||||||
}
|
}
|
@ -169,6 +169,21 @@ erpnext.PointOfSale.Payment = class {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
frappe.ui.form.on('POS Invoice', 'coupon_code', (frm) => {
|
||||||
|
if (!frm.doc.ignore_pricing_rule) {
|
||||||
|
if (frm.doc.coupon_code) {
|
||||||
|
frappe.run_serially([
|
||||||
|
() => frm.doc.ignore_pricing_rule=1,
|
||||||
|
() => frm.trigger('ignore_pricing_rule'),
|
||||||
|
() => frm.doc.ignore_pricing_rule=0,
|
||||||
|
() => frm.trigger('apply_pricing_rule'),
|
||||||
|
() => frm.save(),
|
||||||
|
() => this.update_totals_section(frm.doc)
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
this.setup_listener_for_payments();
|
this.setup_listener_for_payments();
|
||||||
|
|
||||||
this.$payment_modes.on('click', '.shortcut', function() {
|
this.$payment_modes.on('click', '.shortcut', function() {
|
||||||
|
@ -0,0 +1,84 @@
|
|||||||
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
|
||||||
|
// For license information, please see license.txt
|
||||||
|
/* eslint-disable */
|
||||||
|
|
||||||
|
function get_filters() {
|
||||||
|
let filters = [
|
||||||
|
{
|
||||||
|
"fieldname":"company",
|
||||||
|
"label": __("Company"),
|
||||||
|
"fieldtype": "Link",
|
||||||
|
"options": "Company",
|
||||||
|
"default": frappe.defaults.get_user_default("Company"),
|
||||||
|
"reqd": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fieldname":"period_start_date",
|
||||||
|
"label": __("Start Date"),
|
||||||
|
"fieldtype": "Date",
|
||||||
|
"reqd": 1,
|
||||||
|
"default": frappe.datetime.add_months(frappe.datetime.get_today(), -1)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fieldname":"period_end_date",
|
||||||
|
"label": __("End Date"),
|
||||||
|
"fieldtype": "Date",
|
||||||
|
"reqd": 1,
|
||||||
|
"default": frappe.datetime.get_today()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fieldname":"sales_order",
|
||||||
|
"label": __("Sales Order"),
|
||||||
|
"fieldtype": "MultiSelectList",
|
||||||
|
"width": 100,
|
||||||
|
"options": "Sales Order",
|
||||||
|
"get_data": function(txt) {
|
||||||
|
return frappe.db.get_link_options("Sales Order", txt, this.filters());
|
||||||
|
},
|
||||||
|
"filters": () => {
|
||||||
|
return {
|
||||||
|
docstatus: 1,
|
||||||
|
payment_terms_template: ['not in', ['']],
|
||||||
|
company: frappe.query_report.get_filter_value("company"),
|
||||||
|
transaction_date: ['between', [frappe.query_report.get_filter_value("period_start_date"), frappe.query_report.get_filter_value("period_end_date")]]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
on_change: function(){
|
||||||
|
frappe.query_report.refresh();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
return filters;
|
||||||
|
}
|
||||||
|
|
||||||
|
frappe.query_reports["Payment Terms Status for Sales Order"] = {
|
||||||
|
"filters": get_filters(),
|
||||||
|
"formatter": function(value, row, column, data, default_formatter){
|
||||||
|
if(column.fieldname == 'invoices' && value) {
|
||||||
|
invoices = value.split(',');
|
||||||
|
const invoice_formatter = (prev_value, curr_value) => {
|
||||||
|
if(prev_value != "") {
|
||||||
|
return prev_value + ", " + default_formatter(curr_value, row, column, data);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return default_formatter(curr_value, row, column, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return invoices.reduce(invoice_formatter, "")
|
||||||
|
}
|
||||||
|
else if (column.fieldname == 'paid_amount' && value){
|
||||||
|
formatted_value = default_formatter(value, row, column, data);
|
||||||
|
if(value > 0) {
|
||||||
|
formatted_value = "<span style='color:green;'>" + formatted_value + "</span>"
|
||||||
|
}
|
||||||
|
return formatted_value;
|
||||||
|
}
|
||||||
|
else if (column.fieldname == 'status' && value == 'Completed'){
|
||||||
|
return "<span style='color:green;'>" + default_formatter(value, row, column, data) + "</span>";
|
||||||
|
}
|
||||||
|
|
||||||
|
return default_formatter(value, row, column, data);
|
||||||
|
},
|
||||||
|
|
||||||
|
};
|
@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"add_total_row": 1,
|
||||||
|
"columns": [],
|
||||||
|
"creation": "2021-12-28 10:39:34.533964",
|
||||||
|
"disable_prepared_report": 0,
|
||||||
|
"disabled": 0,
|
||||||
|
"docstatus": 0,
|
||||||
|
"doctype": "Report",
|
||||||
|
"filters": [],
|
||||||
|
"idx": 0,
|
||||||
|
"is_standard": "Yes",
|
||||||
|
"modified": "2021-12-30 10:42:06.058457",
|
||||||
|
"modified_by": "Administrator",
|
||||||
|
"module": "Selling",
|
||||||
|
"name": "Payment Terms Status for Sales Order",
|
||||||
|
"owner": "Administrator",
|
||||||
|
"prepared_report": 0,
|
||||||
|
"ref_doctype": "Sales Order",
|
||||||
|
"report_name": "Payment Terms Status for Sales Order",
|
||||||
|
"report_type": "Script Report",
|
||||||
|
"roles": [
|
||||||
|
{
|
||||||
|
"role": "Sales User"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Sales Manager"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Maintenance User"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Accounts User"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Stock User"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,205 @@
|
|||||||
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
|
||||||
|
# License: MIT. See LICENSE
|
||||||
|
|
||||||
|
import frappe
|
||||||
|
from frappe import _, qb, query_builder
|
||||||
|
from frappe.query_builder import functions
|
||||||
|
|
||||||
|
|
||||||
|
def get_columns():
|
||||||
|
columns = [
|
||||||
|
{
|
||||||
|
"label": _("Sales Order"),
|
||||||
|
"fieldname": "name",
|
||||||
|
"fieldtype": "Link",
|
||||||
|
"options": "Sales Order",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Posting Date"),
|
||||||
|
"fieldname": "submitted",
|
||||||
|
"fieldtype": "Date",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Payment Term"),
|
||||||
|
"fieldname": "payment_term",
|
||||||
|
"fieldtype": "Data",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Description"),
|
||||||
|
"fieldname": "description",
|
||||||
|
"fieldtype": "Data",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Due Date"),
|
||||||
|
"fieldname": "due_date",
|
||||||
|
"fieldtype": "Date",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Invoice Portion"),
|
||||||
|
"fieldname": "invoice_portion",
|
||||||
|
"fieldtype": "Percent",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Payment Amount"),
|
||||||
|
"fieldname": "base_payment_amount",
|
||||||
|
"fieldtype": "Currency",
|
||||||
|
"options": "currency",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Paid Amount"),
|
||||||
|
"fieldname": "paid_amount",
|
||||||
|
"fieldtype": "Currency",
|
||||||
|
"options": "currency",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Invoices"),
|
||||||
|
"fieldname": "invoices",
|
||||||
|
"fieldtype": "Link",
|
||||||
|
"options": "Sales Invoice",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Status"),
|
||||||
|
"fieldname": "status",
|
||||||
|
"fieldtype": "Data",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": _("Currency"),
|
||||||
|
"fieldname": "currency",
|
||||||
|
"fieldtype": "Currency",
|
||||||
|
"hidden": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return columns
|
||||||
|
|
||||||
|
|
||||||
|
def get_conditions(filters):
|
||||||
|
"""
|
||||||
|
Convert filter options to conditions used in query
|
||||||
|
"""
|
||||||
|
filters = frappe._dict(filters) if filters else frappe._dict({})
|
||||||
|
conditions = frappe._dict({})
|
||||||
|
|
||||||
|
conditions.company = filters.company or frappe.defaults.get_user_default("company")
|
||||||
|
conditions.end_date = filters.period_end_date or frappe.utils.today()
|
||||||
|
conditions.start_date = filters.period_start_date or frappe.utils.add_months(
|
||||||
|
conditions.end_date, -1
|
||||||
|
)
|
||||||
|
conditions.sales_order = filters.sales_order or []
|
||||||
|
|
||||||
|
return conditions
|
||||||
|
|
||||||
|
|
||||||
|
def get_so_with_invoices(filters):
|
||||||
|
"""
|
||||||
|
Get Sales Order with payment terms template with their associated Invoices
|
||||||
|
"""
|
||||||
|
sorders = []
|
||||||
|
|
||||||
|
so = qb.DocType("Sales Order")
|
||||||
|
ps = qb.DocType("Payment Schedule")
|
||||||
|
datediff = query_builder.CustomFunction("DATEDIFF", ["cur_date", "due_date"])
|
||||||
|
ifelse = query_builder.CustomFunction("IF", ["condition", "then", "else"])
|
||||||
|
|
||||||
|
conditions = get_conditions(filters)
|
||||||
|
query_so = (
|
||||||
|
qb.from_(so)
|
||||||
|
.join(ps)
|
||||||
|
.on(ps.parent == so.name)
|
||||||
|
.select(
|
||||||
|
so.name,
|
||||||
|
so.transaction_date.as_("submitted"),
|
||||||
|
ifelse(datediff(ps.due_date, functions.CurDate()) < 0, "Overdue", "Unpaid").as_("status"),
|
||||||
|
ps.payment_term,
|
||||||
|
ps.description,
|
||||||
|
ps.due_date,
|
||||||
|
ps.invoice_portion,
|
||||||
|
ps.base_payment_amount,
|
||||||
|
ps.paid_amount,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
(so.docstatus == 1)
|
||||||
|
& (so.payment_terms_template != "NULL")
|
||||||
|
& (so.company == conditions.company)
|
||||||
|
& (so.transaction_date[conditions.start_date : conditions.end_date])
|
||||||
|
)
|
||||||
|
.orderby(so.name, so.transaction_date, ps.due_date)
|
||||||
|
)
|
||||||
|
|
||||||
|
if conditions.sales_order != []:
|
||||||
|
query_so = query_so.where(so.name.isin(conditions.sales_order))
|
||||||
|
|
||||||
|
sorders = query_so.run(as_dict=True)
|
||||||
|
|
||||||
|
invoices = []
|
||||||
|
if sorders != []:
|
||||||
|
soi = qb.DocType("Sales Order Item")
|
||||||
|
si = qb.DocType("Sales Invoice")
|
||||||
|
sii = qb.DocType("Sales Invoice Item")
|
||||||
|
query_inv = (
|
||||||
|
qb.from_(sii)
|
||||||
|
.right_join(si)
|
||||||
|
.on(si.name == sii.parent)
|
||||||
|
.inner_join(soi)
|
||||||
|
.on(soi.name == sii.so_detail)
|
||||||
|
.select(sii.sales_order, sii.parent.as_("invoice"), si.base_grand_total.as_("invoice_amount"))
|
||||||
|
.where((sii.sales_order.isin([x.name for x in sorders])) & (si.docstatus == 1))
|
||||||
|
.groupby(sii.parent)
|
||||||
|
)
|
||||||
|
invoices = query_inv.run(as_dict=True)
|
||||||
|
|
||||||
|
return sorders, invoices
|
||||||
|
|
||||||
|
|
||||||
|
def set_payment_terms_statuses(sales_orders, invoices, filters):
|
||||||
|
"""
|
||||||
|
compute status for payment terms with associated sales invoice using FIFO
|
||||||
|
"""
|
||||||
|
|
||||||
|
for so in sales_orders:
|
||||||
|
so.currency = frappe.get_cached_value('Company', filters.get('company'), 'default_currency')
|
||||||
|
so.invoices = ""
|
||||||
|
for inv in [x for x in invoices if x.sales_order == so.name and x.invoice_amount > 0]:
|
||||||
|
if so.base_payment_amount - so.paid_amount > 0:
|
||||||
|
amount = so.base_payment_amount - so.paid_amount
|
||||||
|
if inv.invoice_amount >= amount:
|
||||||
|
inv.invoice_amount -= amount
|
||||||
|
so.paid_amount += amount
|
||||||
|
so.invoices += "," + inv.invoice
|
||||||
|
so.status = "Completed"
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
so.paid_amount += inv.invoice_amount
|
||||||
|
inv.invoice_amount = 0
|
||||||
|
so.invoices += "," + inv.invoice
|
||||||
|
so.status = "Partly Paid"
|
||||||
|
|
||||||
|
return sales_orders, invoices
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_chart(s_orders):
|
||||||
|
if len(set([x.name for x in s_orders])) == 1:
|
||||||
|
chart = {
|
||||||
|
"data": {
|
||||||
|
"labels": [term.payment_term for term in s_orders],
|
||||||
|
"datasets": [
|
||||||
|
{"name": "Payment Amount", "values": [x.base_payment_amount for x in s_orders],},
|
||||||
|
{"name": "Paid Amount", "values": [x.paid_amount for x in s_orders],},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"type": "bar",
|
||||||
|
}
|
||||||
|
return chart
|
||||||
|
|
||||||
|
|
||||||
|
def execute(filters=None):
|
||||||
|
columns = get_columns()
|
||||||
|
sales_orders, so_invoices = get_so_with_invoices(filters)
|
||||||
|
sales_orders, so_invoices = set_payment_terms_statuses(sales_orders, so_invoices, filters)
|
||||||
|
|
||||||
|
prepare_chart(sales_orders)
|
||||||
|
|
||||||
|
data = sales_orders
|
||||||
|
message = []
|
||||||
|
chart = prepare_chart(sales_orders)
|
||||||
|
|
||||||
|
return columns, data, message, chart
|
@ -0,0 +1,198 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
import frappe
|
||||||
|
from frappe.utils import add_days
|
||||||
|
|
||||||
|
from erpnext.selling.doctype.sales_order.sales_order import make_sales_invoice
|
||||||
|
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
|
||||||
|
from erpnext.selling.report.payment_terms_status_for_sales_order.payment_terms_status_for_sales_order import (
|
||||||
|
execute,
|
||||||
|
)
|
||||||
|
from erpnext.stock.doctype.item.test_item import create_item
|
||||||
|
from erpnext.tests.utils import ERPNextTestCase
|
||||||
|
|
||||||
|
test_dependencies = ["Sales Order", "Item", "Sales Invoice", "Payment Terms Template"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestPaymentTermsStatusForSalesOrder(ERPNextTestCase):
|
||||||
|
def create_payment_terms_template(self):
|
||||||
|
# create template for 50-50 payments
|
||||||
|
template = None
|
||||||
|
if frappe.db.exists("Payment Terms Template", "_Test 50-50"):
|
||||||
|
template = frappe.get_doc("Payment Terms Template", "_Test 50-50")
|
||||||
|
else:
|
||||||
|
template = frappe.get_doc(
|
||||||
|
{
|
||||||
|
"doctype": "Payment Terms Template",
|
||||||
|
"template_name": "_Test 50-50",
|
||||||
|
"terms": [
|
||||||
|
{
|
||||||
|
"doctype": "Payment Terms Template Detail",
|
||||||
|
"due_date_based_on": "Day(s) after invoice date",
|
||||||
|
"payment_term_name": "_Test 50% on 15 Days",
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"invoice_portion": 50,
|
||||||
|
"credit_days": 15,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doctype": "Payment Terms Template Detail",
|
||||||
|
"due_date_based_on": "Day(s) after invoice date",
|
||||||
|
"payment_term_name": "_Test 50% on 30 Days",
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"invoice_portion": 50,
|
||||||
|
"credit_days": 30,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
template.insert()
|
||||||
|
self.template = template
|
||||||
|
|
||||||
|
def test_payment_terms_status(self):
|
||||||
|
self.create_payment_terms_template()
|
||||||
|
item = create_item(item_code="_Test Excavator", is_stock_item=0)
|
||||||
|
so = make_sales_order(
|
||||||
|
transaction_date="2021-06-15",
|
||||||
|
delivery_date=add_days("2021-06-15", -30),
|
||||||
|
item=item.item_code,
|
||||||
|
qty=10,
|
||||||
|
rate=100000,
|
||||||
|
do_not_save=True,
|
||||||
|
)
|
||||||
|
so.po_no = ""
|
||||||
|
so.taxes_and_charges = ""
|
||||||
|
so.taxes = ""
|
||||||
|
so.payment_terms_template = self.template.name
|
||||||
|
so.save()
|
||||||
|
so.submit()
|
||||||
|
|
||||||
|
# make invoice with 60% of the total sales order value
|
||||||
|
sinv = make_sales_invoice(so.name)
|
||||||
|
sinv.taxes_and_charges = ""
|
||||||
|
sinv.taxes = ""
|
||||||
|
sinv.items[0].qty = 6
|
||||||
|
sinv.insert()
|
||||||
|
sinv.submit()
|
||||||
|
columns, data, message, chart = execute(
|
||||||
|
{
|
||||||
|
"company": "_Test Company",
|
||||||
|
"period_start_date": "2021-06-01",
|
||||||
|
"period_end_date": "2021-06-30",
|
||||||
|
"sales_order": [so.name],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_value = [
|
||||||
|
{
|
||||||
|
"name": so.name,
|
||||||
|
"submitted": datetime.date(2021, 6, 15),
|
||||||
|
"status": "Completed",
|
||||||
|
"payment_term": None,
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"due_date": datetime.date(2021, 6, 30),
|
||||||
|
"invoice_portion": 50.0,
|
||||||
|
"currency": "INR",
|
||||||
|
"base_payment_amount": 500000.0,
|
||||||
|
"paid_amount": 500000.0,
|
||||||
|
"invoices": ","+sinv.name,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": so.name,
|
||||||
|
"submitted": datetime.date(2021, 6, 15),
|
||||||
|
"status": "Partly Paid",
|
||||||
|
"payment_term": None,
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"due_date": datetime.date(2021, 7, 15),
|
||||||
|
"invoice_portion": 50.0,
|
||||||
|
"currency": "INR",
|
||||||
|
"base_payment_amount": 500000.0,
|
||||||
|
"paid_amount": 100000.0,
|
||||||
|
"invoices": ","+sinv.name,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
self.assertEqual(data, expected_value)
|
||||||
|
|
||||||
|
def create_exchange_rate(self, date):
|
||||||
|
# make an entry in Currency Exchange list. serves as a static exchange rate
|
||||||
|
if frappe.db.exists({'doctype': "Currency Exchange",'date': date,'from_currency': 'USD', 'to_currency':'INR'}):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
doc = frappe.get_doc({
|
||||||
|
'doctype': "Currency Exchange",
|
||||||
|
'date': date,
|
||||||
|
'from_currency': 'USD',
|
||||||
|
'to_currency': frappe.get_cached_value("Company", '_Test Company','default_currency'),
|
||||||
|
'exchange_rate': 70,
|
||||||
|
'for_buying': True,
|
||||||
|
'for_selling': True
|
||||||
|
})
|
||||||
|
doc.insert()
|
||||||
|
|
||||||
|
def test_alternate_currency(self):
|
||||||
|
transaction_date = "2021-06-15"
|
||||||
|
self.create_payment_terms_template()
|
||||||
|
self.create_exchange_rate(transaction_date)
|
||||||
|
item = create_item(item_code="_Test Excavator", is_stock_item=0)
|
||||||
|
so = make_sales_order(
|
||||||
|
transaction_date=transaction_date,
|
||||||
|
currency="USD",
|
||||||
|
delivery_date=add_days(transaction_date, -30),
|
||||||
|
item=item.item_code,
|
||||||
|
qty=10,
|
||||||
|
rate=10000,
|
||||||
|
do_not_save=True,
|
||||||
|
)
|
||||||
|
so.po_no = ""
|
||||||
|
so.taxes_and_charges = ""
|
||||||
|
so.taxes = ""
|
||||||
|
so.payment_terms_template = self.template.name
|
||||||
|
so.save()
|
||||||
|
so.submit()
|
||||||
|
|
||||||
|
# make invoice with 60% of the total sales order value
|
||||||
|
sinv = make_sales_invoice(so.name)
|
||||||
|
sinv.currency = "USD"
|
||||||
|
sinv.taxes_and_charges = ""
|
||||||
|
sinv.taxes = ""
|
||||||
|
sinv.items[0].qty = 6
|
||||||
|
sinv.insert()
|
||||||
|
sinv.submit()
|
||||||
|
columns, data, message, chart = execute(
|
||||||
|
{
|
||||||
|
"company": "_Test Company",
|
||||||
|
"period_start_date": "2021-06-01",
|
||||||
|
"period_end_date": "2021-06-30",
|
||||||
|
"sales_order": [so.name],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# report defaults to company currency.
|
||||||
|
expected_value = [
|
||||||
|
{
|
||||||
|
"name": so.name,
|
||||||
|
"submitted": datetime.date(2021, 6, 15),
|
||||||
|
"status": "Completed",
|
||||||
|
"payment_term": None,
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"due_date": datetime.date(2021, 6, 30),
|
||||||
|
"invoice_portion": 50.0,
|
||||||
|
"currency": frappe.get_cached_value("Company", '_Test Company','default_currency'),
|
||||||
|
"base_payment_amount": 3500000.0,
|
||||||
|
"paid_amount": 3500000.0,
|
||||||
|
"invoices": ","+sinv.name,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": so.name,
|
||||||
|
"submitted": datetime.date(2021, 6, 15),
|
||||||
|
"status": "Partly Paid",
|
||||||
|
"payment_term": None,
|
||||||
|
"description": "_Test 50-50",
|
||||||
|
"due_date": datetime.date(2021, 7, 15),
|
||||||
|
"invoice_portion": 50.0,
|
||||||
|
"currency": frappe.get_cached_value("Company", '_Test Company','default_currency'),
|
||||||
|
"base_payment_amount": 3500000.0,
|
||||||
|
"paid_amount": 700000.0,
|
||||||
|
"invoices": ","+sinv.name,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
self.assertEqual(data, expected_value)
|
@ -227,13 +227,13 @@ erpnext.selling.SellingController = class SellingController extends erpnext.Tran
|
|||||||
},
|
},
|
||||||
callback:function(r){
|
callback:function(r){
|
||||||
if (in_list(['Delivery Note', 'Sales Invoice'], doc.doctype)) {
|
if (in_list(['Delivery Note', 'Sales Invoice'], doc.doctype)) {
|
||||||
|
|
||||||
if (doc.doctype === 'Sales Invoice' && (!doc.update_stock)) return;
|
if (doc.doctype === 'Sales Invoice' && (!doc.update_stock)) return;
|
||||||
|
if (has_batch_no) {
|
||||||
me.set_batch_number(cdt, cdn);
|
me.set_batch_number(cdt, cdn);
|
||||||
me.batch_no(doc, cdt, cdn);
|
me.batch_no(doc, cdt, cdn);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -545,7 +545,7 @@ $.extend(erpnext.item, {
|
|||||||
let selected_attributes = {};
|
let selected_attributes = {};
|
||||||
me.multiple_variant_dialog.$wrapper.find('.form-column').each((i, col) => {
|
me.multiple_variant_dialog.$wrapper.find('.form-column').each((i, col) => {
|
||||||
if(i===0) return;
|
if(i===0) return;
|
||||||
let attribute_name = $(col).find('label').html();
|
let attribute_name = $(col).find('label').html().trim();
|
||||||
selected_attributes[attribute_name] = [];
|
selected_attributes[attribute_name] = [];
|
||||||
let checked_opts = $(col).find('.checkbox input');
|
let checked_opts = $(col).find('.checkbox input');
|
||||||
checked_opts.each((i, opt) => {
|
checked_opts.each((i, opt) => {
|
||||||
|
@ -533,6 +533,7 @@ def raise_work_orders(material_request):
|
|||||||
"stock_uom": d.stock_uom,
|
"stock_uom": d.stock_uom,
|
||||||
"expected_delivery_date": d.schedule_date,
|
"expected_delivery_date": d.schedule_date,
|
||||||
"sales_order": d.sales_order,
|
"sales_order": d.sales_order,
|
||||||
|
"sales_order_item": d.get("sales_order_item"),
|
||||||
"bom_no": get_item_details(d.item_code).bom_no,
|
"bom_no": get_item_details(d.item_code).bom_no,
|
||||||
"material_request": mr.name,
|
"material_request": mr.name,
|
||||||
"material_request_item": d.name,
|
"material_request_item": d.name,
|
||||||
|
@ -9,7 +9,7 @@ from collections import defaultdict
|
|||||||
import frappe
|
import frappe
|
||||||
from frappe import _
|
from frappe import _
|
||||||
from frappe.model.document import Document
|
from frappe.model.document import Document
|
||||||
from frappe.utils import cint, floor, flt, nowdate
|
from frappe.utils import cint, cstr, floor, flt, nowdate
|
||||||
|
|
||||||
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
|
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
|
||||||
from erpnext.stock.utils import get_stock_balance
|
from erpnext.stock.utils import get_stock_balance
|
||||||
@ -142,11 +142,44 @@ def apply_putaway_rule(doctype, items, company, sync=None, purpose=None):
|
|||||||
if items_not_accomodated:
|
if items_not_accomodated:
|
||||||
show_unassigned_items_message(items_not_accomodated)
|
show_unassigned_items_message(items_not_accomodated)
|
||||||
|
|
||||||
items[:] = updated_table if updated_table else items # modify items table
|
if updated_table and _items_changed(items, updated_table, doctype):
|
||||||
|
items[:] = updated_table
|
||||||
|
frappe.msgprint(_("Applied putaway rules."), alert=True)
|
||||||
|
|
||||||
if sync and json.loads(sync): # sync with client side
|
if sync and json.loads(sync): # sync with client side
|
||||||
return items
|
return items
|
||||||
|
|
||||||
|
def _items_changed(old, new, doctype: str) -> bool:
|
||||||
|
""" Check if any items changed by application of putaway rules.
|
||||||
|
|
||||||
|
If not, changing item table can have side effects since `name` items also changes.
|
||||||
|
"""
|
||||||
|
if len(old) != len(new):
|
||||||
|
return True
|
||||||
|
|
||||||
|
old = [frappe._dict(item) if isinstance(item, dict) else item for item in old]
|
||||||
|
|
||||||
|
if doctype == "Stock Entry":
|
||||||
|
compare_keys = ("item_code", "t_warehouse", "transfer_qty", "serial_no")
|
||||||
|
sort_key = lambda item: (item.item_code, cstr(item.t_warehouse), # noqa
|
||||||
|
flt(item.transfer_qty), cstr(item.serial_no))
|
||||||
|
else:
|
||||||
|
# purchase receipt / invoice
|
||||||
|
compare_keys = ("item_code", "warehouse", "stock_qty", "received_qty", "serial_no")
|
||||||
|
sort_key = lambda item: (item.item_code, cstr(item.warehouse), # noqa
|
||||||
|
flt(item.stock_qty), flt(item.received_qty), cstr(item.serial_no))
|
||||||
|
|
||||||
|
old_sorted = sorted(old, key=sort_key)
|
||||||
|
new_sorted = sorted(new, key=sort_key)
|
||||||
|
|
||||||
|
# Once sorted by all relevant keys both tables should align if they are same.
|
||||||
|
for old_item, new_item in zip(old_sorted, new_sorted):
|
||||||
|
for key in compare_keys:
|
||||||
|
if old_item.get(key) != new_item.get(key):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_ordered_putaway_rules(item_code, company, source_warehouse=None):
|
def get_ordered_putaway_rules(item_code, company, source_warehouse=None):
|
||||||
"""Returns an ordered list of putaway rules to apply on an item."""
|
"""Returns an ordered list of putaway rules to apply on an item."""
|
||||||
filters = {
|
filters = {
|
||||||
|
@ -35,6 +35,18 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
new_uom.uom_name = "Bag"
|
new_uom.uom_name = "Bag"
|
||||||
new_uom.save()
|
new_uom.save()
|
||||||
|
|
||||||
|
def assertUnchangedItemsOnResave(self, doc):
|
||||||
|
""" Check if same items remain even after reapplication of rules.
|
||||||
|
|
||||||
|
This is required since some business logic like subcontracting
|
||||||
|
depends on `name` of items to be same if item isn't changed.
|
||||||
|
"""
|
||||||
|
doc.reload()
|
||||||
|
old_items = {d.name for d in doc.items}
|
||||||
|
doc.save()
|
||||||
|
new_items = {d.name for d in doc.items}
|
||||||
|
self.assertSetEqual(old_items, new_items)
|
||||||
|
|
||||||
def test_putaway_rules_priority(self):
|
def test_putaway_rules_priority(self):
|
||||||
"""Test if rule is applied by priority, irrespective of free space."""
|
"""Test if rule is applied by priority, irrespective of free space."""
|
||||||
rule_1 = create_putaway_rule(item_code="_Rice", warehouse=self.warehouse_1, capacity=200,
|
rule_1 = create_putaway_rule(item_code="_Rice", warehouse=self.warehouse_1, capacity=200,
|
||||||
@ -50,6 +62,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(pr.items[1].qty, 100)
|
self.assertEqual(pr.items[1].qty, 100)
|
||||||
self.assertEqual(pr.items[1].warehouse, self.warehouse_2)
|
self.assertEqual(pr.items[1].warehouse, self.warehouse_2)
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(pr)
|
||||||
|
|
||||||
pr.delete()
|
pr.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
rule_2.delete()
|
rule_2.delete()
|
||||||
@ -162,6 +176,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
# leftover space was for 500 kg (0.5 Bag)
|
# leftover space was for 500 kg (0.5 Bag)
|
||||||
# Since Bag is a whole UOM, 1(out of 2) Bag will be unassigned
|
# Since Bag is a whole UOM, 1(out of 2) Bag will be unassigned
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(pr)
|
||||||
|
|
||||||
pr.delete()
|
pr.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
rule_2.delete()
|
rule_2.delete()
|
||||||
@ -196,6 +212,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(pr.items[1].warehouse, self.warehouse_1)
|
self.assertEqual(pr.items[1].warehouse, self.warehouse_1)
|
||||||
self.assertEqual(pr.items[1].putaway_rule, rule_1.name)
|
self.assertEqual(pr.items[1].putaway_rule, rule_1.name)
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(pr)
|
||||||
|
|
||||||
pr.delete()
|
pr.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
|
|
||||||
@ -239,6 +257,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(stock_entry_item.qty, 100) # unassigned 100 out of 200 Kg
|
self.assertEqual(stock_entry_item.qty, 100) # unassigned 100 out of 200 Kg
|
||||||
self.assertEqual(stock_entry_item.putaway_rule, rule_2.name)
|
self.assertEqual(stock_entry_item.putaway_rule, rule_2.name)
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(stock_entry)
|
||||||
|
|
||||||
stock_entry.delete()
|
stock_entry.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
rule_2.delete()
|
rule_2.delete()
|
||||||
@ -294,6 +314,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(stock_entry.items[2].qty, 200)
|
self.assertEqual(stock_entry.items[2].qty, 200)
|
||||||
self.assertEqual(stock_entry.items[2].putaway_rule, rule_2.name)
|
self.assertEqual(stock_entry.items[2].putaway_rule, rule_2.name)
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(stock_entry)
|
||||||
|
|
||||||
stock_entry.delete()
|
stock_entry.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
rule_2.delete()
|
rule_2.delete()
|
||||||
@ -344,6 +366,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(stock_entry.items[1].serial_no, "\n".join(serial_nos[3:]))
|
self.assertEqual(stock_entry.items[1].serial_no, "\n".join(serial_nos[3:]))
|
||||||
self.assertEqual(stock_entry.items[1].batch_no, "BOTTL-BATCH-1")
|
self.assertEqual(stock_entry.items[1].batch_no, "BOTTL-BATCH-1")
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(stock_entry)
|
||||||
|
|
||||||
stock_entry.delete()
|
stock_entry.delete()
|
||||||
pr.cancel()
|
pr.cancel()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
@ -366,6 +390,8 @@ class TestPutawayRule(ERPNextTestCase):
|
|||||||
self.assertEqual(stock_entry_item.qty, 100)
|
self.assertEqual(stock_entry_item.qty, 100)
|
||||||
self.assertEqual(stock_entry_item.putaway_rule, rule_1.name)
|
self.assertEqual(stock_entry_item.putaway_rule, rule_1.name)
|
||||||
|
|
||||||
|
self.assertUnchangedItemsOnResave(stock_entry)
|
||||||
|
|
||||||
stock_entry.delete()
|
stock_entry.delete()
|
||||||
rule_1.delete()
|
rule_1.delete()
|
||||||
rule_2.delete()
|
rule_2.delete()
|
||||||
|
@ -12,6 +12,7 @@ from frappe.utils import cint, date_diff, flt
|
|||||||
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
|
from erpnext.stock.doctype.serial_no.serial_no import get_serial_nos
|
||||||
|
|
||||||
Filters = frappe._dict
|
Filters = frappe._dict
|
||||||
|
precision = cint(frappe.db.get_single_value("System Settings", "float_precision"))
|
||||||
|
|
||||||
def execute(filters: Filters = None) -> Tuple:
|
def execute(filters: Filters = None) -> Tuple:
|
||||||
to_date = filters["to_date"]
|
to_date = filters["to_date"]
|
||||||
@ -48,10 +49,13 @@ def format_report_data(filters: Filters, item_details: Dict, to_date: str) -> Li
|
|||||||
if filters.get("show_warehouse_wise_stock"):
|
if filters.get("show_warehouse_wise_stock"):
|
||||||
row.append(details.warehouse)
|
row.append(details.warehouse)
|
||||||
|
|
||||||
row.extend([item_dict.get("total_qty"), average_age,
|
row.extend([
|
||||||
|
flt(item_dict.get("total_qty"), precision),
|
||||||
|
average_age,
|
||||||
range1, range2, range3, above_range3,
|
range1, range2, range3, above_range3,
|
||||||
earliest_age, latest_age,
|
earliest_age, latest_age,
|
||||||
details.stock_uom])
|
details.stock_uom
|
||||||
|
])
|
||||||
|
|
||||||
data.append(row)
|
data.append(row)
|
||||||
|
|
||||||
@ -79,13 +83,13 @@ def get_range_age(filters: Filters, fifo_queue: List, to_date: str, item_dict: D
|
|||||||
qty = flt(item[0]) if not item_dict["has_serial_no"] else 1.0
|
qty = flt(item[0]) if not item_dict["has_serial_no"] else 1.0
|
||||||
|
|
||||||
if age <= filters.range1:
|
if age <= filters.range1:
|
||||||
range1 += qty
|
range1 = flt(range1 + qty, precision)
|
||||||
elif age <= filters.range2:
|
elif age <= filters.range2:
|
||||||
range2 += qty
|
range2 = flt(range2 + qty, precision)
|
||||||
elif age <= filters.range3:
|
elif age <= filters.range3:
|
||||||
range3 += qty
|
range3 = flt(range3 + qty, precision)
|
||||||
else:
|
else:
|
||||||
above_range3 += qty
|
above_range3 = flt(above_range3 + qty, precision)
|
||||||
|
|
||||||
return range1, range2, range3, above_range3
|
return range1, range2, range3, above_range3
|
||||||
|
|
||||||
@ -286,14 +290,16 @@ class FIFOSlots:
|
|||||||
def __compute_incoming_stock(self, row: Dict, fifo_queue: List, transfer_key: Tuple, serial_nos: List):
|
def __compute_incoming_stock(self, row: Dict, fifo_queue: List, transfer_key: Tuple, serial_nos: List):
|
||||||
"Update FIFO Queue on inward stock."
|
"Update FIFO Queue on inward stock."
|
||||||
|
|
||||||
if self.transferred_item_details.get(transfer_key):
|
transfer_data = self.transferred_item_details.get(transfer_key)
|
||||||
|
if transfer_data:
|
||||||
# inward/outward from same voucher, item & warehouse
|
# inward/outward from same voucher, item & warehouse
|
||||||
slot = self.transferred_item_details[transfer_key].pop(0)
|
# eg: Repack with same item, Stock reco for batch item
|
||||||
fifo_queue.append(slot)
|
# consume transfer data and add stock to fifo queue
|
||||||
|
self.__adjust_incoming_transfer_qty(transfer_data, fifo_queue, row)
|
||||||
else:
|
else:
|
||||||
if not serial_nos:
|
if not serial_nos:
|
||||||
if fifo_queue and flt(fifo_queue[0][0]) < 0:
|
if fifo_queue and flt(fifo_queue[0][0]) <= 0:
|
||||||
# neutralize negative stock by adding positive stock
|
# neutralize 0/negative stock by adding positive stock
|
||||||
fifo_queue[0][0] += flt(row.actual_qty)
|
fifo_queue[0][0] += flt(row.actual_qty)
|
||||||
fifo_queue[0][1] = row.posting_date
|
fifo_queue[0][1] = row.posting_date
|
||||||
else:
|
else:
|
||||||
@ -324,7 +330,7 @@ class FIFOSlots:
|
|||||||
elif not fifo_queue:
|
elif not fifo_queue:
|
||||||
# negative stock, no balance but qty yet to consume
|
# negative stock, no balance but qty yet to consume
|
||||||
fifo_queue.append([-(qty_to_pop), row.posting_date])
|
fifo_queue.append([-(qty_to_pop), row.posting_date])
|
||||||
self.transferred_item_details[transfer_key].append([row.actual_qty, row.posting_date])
|
self.transferred_item_details[transfer_key].append([qty_to_pop, row.posting_date])
|
||||||
qty_to_pop = 0
|
qty_to_pop = 0
|
||||||
else:
|
else:
|
||||||
# qty to pop < slot qty, ample balance
|
# qty to pop < slot qty, ample balance
|
||||||
@ -333,6 +339,33 @@ class FIFOSlots:
|
|||||||
self.transferred_item_details[transfer_key].append([qty_to_pop, slot[1]])
|
self.transferred_item_details[transfer_key].append([qty_to_pop, slot[1]])
|
||||||
qty_to_pop = 0
|
qty_to_pop = 0
|
||||||
|
|
||||||
|
def __adjust_incoming_transfer_qty(self, transfer_data: Dict, fifo_queue: List, row: Dict):
|
||||||
|
"Add previously removed stock back to FIFO Queue."
|
||||||
|
transfer_qty_to_pop = flt(row.actual_qty)
|
||||||
|
|
||||||
|
def add_to_fifo_queue(slot):
|
||||||
|
if fifo_queue and flt(fifo_queue[0][0]) <= 0:
|
||||||
|
# neutralize 0/negative stock by adding positive stock
|
||||||
|
fifo_queue[0][0] += flt(slot[0])
|
||||||
|
fifo_queue[0][1] = slot[1]
|
||||||
|
else:
|
||||||
|
fifo_queue.append(slot)
|
||||||
|
|
||||||
|
while transfer_qty_to_pop:
|
||||||
|
if transfer_data and 0 < transfer_data[0][0] <= transfer_qty_to_pop:
|
||||||
|
# bucket qty is not enough, consume whole
|
||||||
|
transfer_qty_to_pop -= transfer_data[0][0]
|
||||||
|
add_to_fifo_queue(transfer_data.pop(0))
|
||||||
|
elif not transfer_data:
|
||||||
|
# transfer bucket is empty, extra incoming qty
|
||||||
|
add_to_fifo_queue([transfer_qty_to_pop, row.posting_date])
|
||||||
|
transfer_qty_to_pop = 0
|
||||||
|
else:
|
||||||
|
# ample bucket qty to consume
|
||||||
|
transfer_data[0][0] -= transfer_qty_to_pop
|
||||||
|
add_to_fifo_queue([transfer_qty_to_pop, transfer_data[0][1]])
|
||||||
|
transfer_qty_to_pop = 0
|
||||||
|
|
||||||
def __update_balances(self, row: Dict, key: Union[Tuple, str]):
|
def __update_balances(self, row: Dict, key: Union[Tuple, str]):
|
||||||
self.item_details[key]["qty_after_transaction"] = row.qty_after_transaction
|
self.item_details[key]["qty_after_transaction"] = row.qty_after_transaction
|
||||||
|
|
||||||
|
@ -72,3 +72,38 @@ Date | Qty | Queue
|
|||||||
3rd | +5 | [[-5, 3-12-2021]]
|
3rd | +5 | [[-5, 3-12-2021]]
|
||||||
4th | +10 | [[5, 4-12-2021]]
|
4th | +10 | [[5, 4-12-2021]]
|
||||||
4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
|
4th | +20 | [[5, 4-12-2021], [20, 4-12-2021]]
|
||||||
|
|
||||||
|
### Concept of Transfer Qty Bucket
|
||||||
|
In the case of **Repack**, Quantity that comes in, isn't really incoming. It is just new stock repurposed from old stock, due to incoming-outgoing of the same warehouse.
|
||||||
|
|
||||||
|
Here, stock is consumed from the FIFO Queue. It is then re-added back to the queue.
|
||||||
|
While adding stock back to the queue we need to know how much to add.
|
||||||
|
For this we need to keep track of how much was previously consumed.
|
||||||
|
Hence we use **Transfer Qty Bucket**.
|
||||||
|
|
||||||
|
While re-adding stock, we try to add buckets that were consumed earlier (date intact), to maintain correctness.
|
||||||
|
|
||||||
|
#### Case 1: Same Item-Warehouse in Repack
|
||||||
|
Eg:
|
||||||
|
-------------------------------------------------------------------------------------
|
||||||
|
Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
|
||||||
|
-------------------------------------------------------------------------------------
|
||||||
|
1st | +500 | PR | [[500, 1-12-2021]] |
|
||||||
|
2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
|
||||||
|
2nd | +50 | Repack | [[450, 1-12-2021], [50, 1-12-2021]] | []
|
||||||
|
|
||||||
|
- The balance at the end is restored back to 500
|
||||||
|
- However, the initial 500 qty bucket is now split into 450 and 50, with the same date
|
||||||
|
- The net effect is the same as that before the Repack
|
||||||
|
|
||||||
|
#### Case 2: Same Item-Warehouse in Repack with Split Consumption rows
|
||||||
|
Eg:
|
||||||
|
-------------------------------------------------------------------------------------
|
||||||
|
Date | Qty | Voucher | FIFO Queue | Transfer Qty Buckets
|
||||||
|
-------------------------------------------------------------------------------------
|
||||||
|
1st | +500 | PR | [[500, 1-12-2021]] |
|
||||||
|
2nd | -50 | Repack | [[450, 1-12-2021]] | [[50, 1-12-2021]]
|
||||||
|
2nd | -50 | Repack | [[400, 1-12-2021]] | [[50, 1-12-2021],
|
||||||
|
- | | | |[50, 1-12-2021]]
|
||||||
|
2nd | +100 | Repack | [[400, 1-12-2021], [50, 1-12-2021], | []
|
||||||
|
- | | | [50, 1-12-2021]] |
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
import frappe
|
import frappe
|
||||||
|
|
||||||
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots
|
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots, format_report_data
|
||||||
from erpnext.tests.utils import ERPNextTestCase
|
from erpnext.tests.utils import ERPNextTestCase
|
||||||
|
|
||||||
|
|
||||||
@ -11,7 +11,8 @@ class TestStockAgeing(ERPNextTestCase):
|
|||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.filters = frappe._dict(
|
self.filters = frappe._dict(
|
||||||
company="_Test Company",
|
company="_Test Company",
|
||||||
to_date="2021-12-10"
|
to_date="2021-12-10",
|
||||||
|
range1=30, range2=60, range3=90
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_normal_inward_outward_queue(self):
|
def test_normal_inward_outward_queue(self):
|
||||||
@ -236,6 +237,371 @@ class TestStockAgeing(ERPNextTestCase):
|
|||||||
item_wh_balances = [item_wh_wise_slots.get(i).get("qty_after_transaction") for i in item_wh_wise_slots]
|
item_wh_balances = [item_wh_wise_slots.get(i).get("qty_after_transaction") for i in item_wh_wise_slots]
|
||||||
self.assertEqual(sum(item_wh_balances), item_result["qty_after_transaction"])
|
self.assertEqual(sum(item_wh_balances), item_result["qty_after_transaction"])
|
||||||
|
|
||||||
|
def test_repack_entry_same_item_split_rows(self):
|
||||||
|
"""
|
||||||
|
Split consumption rows and have single repacked item row (same warehouse).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | 500 | 001
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | 100 | 002 (repack)
|
||||||
|
|
||||||
|
Case most likely for batch items. Test time bucket computation.
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=500, qty_after_transaction=500,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-03", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=450,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=400,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=100, qty_after_transaction=500,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
slots = FIFOSlots(self.filters, sle).generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
queue = item_result["fifo_queue"]
|
||||||
|
|
||||||
|
self.assertEqual(item_result["total_qty"], 500.0)
|
||||||
|
self.assertEqual(queue[0][0], 400.0)
|
||||||
|
self.assertEqual(queue[1][0], 50.0)
|
||||||
|
self.assertEqual(queue[2][0], 50.0)
|
||||||
|
# check if time buckets add up to balance qty
|
||||||
|
self.assertEqual(sum([i[0] for i in queue]), 500.0)
|
||||||
|
|
||||||
|
def test_repack_entry_same_item_overconsume(self):
|
||||||
|
"""
|
||||||
|
Over consume item and have less repacked item qty (same warehouse).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | 500 | 001
|
||||||
|
Item 1 | -100 | 002 (repack)
|
||||||
|
Item 1 | 50 | 002 (repack)
|
||||||
|
|
||||||
|
Case most likely for batch items. Test time bucket computation.
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=500, qty_after_transaction=500,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-03", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-100), qty_after_transaction=400,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=50, qty_after_transaction=450,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
slots = FIFOSlots(self.filters, sle).generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
queue = item_result["fifo_queue"]
|
||||||
|
|
||||||
|
self.assertEqual(item_result["total_qty"], 450.0)
|
||||||
|
self.assertEqual(queue[0][0], 400.0)
|
||||||
|
self.assertEqual(queue[1][0], 50.0)
|
||||||
|
# check if time buckets add up to balance qty
|
||||||
|
self.assertEqual(sum([i[0] for i in queue]), 450.0)
|
||||||
|
|
||||||
|
def test_repack_entry_same_item_overconsume_with_split_rows(self):
|
||||||
|
"""
|
||||||
|
Over consume item and have less repacked item qty (same warehouse).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | 20 | 001
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | 50 | 002 (repack)
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=20, qty_after_transaction=20,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-03", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=(-30),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=(-80),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=50, qty_after_transaction=(-30),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
fifo_slots = FIFOSlots(self.filters, sle)
|
||||||
|
slots = fifo_slots.generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
queue = item_result["fifo_queue"]
|
||||||
|
|
||||||
|
self.assertEqual(item_result["total_qty"], -30.0)
|
||||||
|
self.assertEqual(queue[0][0], -30.0)
|
||||||
|
|
||||||
|
# check transfer bucket
|
||||||
|
transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
|
||||||
|
self.assertEqual(transfer_bucket[0][0], 50)
|
||||||
|
|
||||||
|
def test_repack_entry_same_item_overproduce(self):
|
||||||
|
"""
|
||||||
|
Under consume item and have more repacked item qty (same warehouse).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | 500 | 001
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | 100 | 002 (repack)
|
||||||
|
|
||||||
|
Case most likely for batch items. Test time bucket computation.
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=500, qty_after_transaction=500,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-03", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=450,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=100, qty_after_transaction=550,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
slots = FIFOSlots(self.filters, sle).generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
queue = item_result["fifo_queue"]
|
||||||
|
|
||||||
|
self.assertEqual(item_result["total_qty"], 550.0)
|
||||||
|
self.assertEqual(queue[0][0], 450.0)
|
||||||
|
self.assertEqual(queue[1][0], 50.0)
|
||||||
|
self.assertEqual(queue[2][0], 50.0)
|
||||||
|
# check if time buckets add up to balance qty
|
||||||
|
self.assertEqual(sum([i[0] for i in queue]), 550.0)
|
||||||
|
|
||||||
|
def test_repack_entry_same_item_overproduce_with_split_rows(self):
|
||||||
|
"""
|
||||||
|
Over consume item and have less repacked item qty (same warehouse).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | 20 | 001
|
||||||
|
Item 1 | -50 | 002 (repack)
|
||||||
|
Item 1 | 50 | 002 (repack)
|
||||||
|
Item 1 | 50 | 002 (repack)
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=20, qty_after_transaction=20,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-03", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=(-30),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=50, qty_after_transaction=20,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=50, qty_after_transaction=70,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-04", voucher_type="Stock Entry",
|
||||||
|
voucher_no="002",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
fifo_slots = FIFOSlots(self.filters, sle)
|
||||||
|
slots = fifo_slots.generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
queue = item_result["fifo_queue"]
|
||||||
|
|
||||||
|
self.assertEqual(item_result["total_qty"], 70.0)
|
||||||
|
self.assertEqual(queue[0][0], 20.0)
|
||||||
|
self.assertEqual(queue[1][0], 50.0)
|
||||||
|
|
||||||
|
# check transfer bucket
|
||||||
|
transfer_bucket = fifo_slots.transferred_item_details[('002', 'Flask Item', 'WH 1')]
|
||||||
|
self.assertFalse(transfer_bucket)
|
||||||
|
|
||||||
|
def test_negative_stock_same_voucher(self):
|
||||||
|
"""
|
||||||
|
Test negative stock scenario in transfer bucket via repack entry (same wh).
|
||||||
|
Ledger:
|
||||||
|
Item | Qty | Voucher
|
||||||
|
------------------------
|
||||||
|
Item 1 | -50 | 001
|
||||||
|
Item 1 | -50 | 001
|
||||||
|
Item 1 | 30 | 001
|
||||||
|
Item 1 | 80 | 001
|
||||||
|
"""
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=(-50),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=(-50), qty_after_transaction=(-100),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=30, qty_after_transaction=(-70),
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
fifo_slots = FIFOSlots(self.filters, sle)
|
||||||
|
slots = fifo_slots.generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
|
||||||
|
# check transfer bucket
|
||||||
|
transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
|
||||||
|
self.assertEqual(transfer_bucket[0][0], 20)
|
||||||
|
self.assertEqual(transfer_bucket[1][0], 50)
|
||||||
|
self.assertEqual(item_result["fifo_queue"][0][0], -70.0)
|
||||||
|
|
||||||
|
sle.append(frappe._dict(
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=80, qty_after_transaction=10,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
))
|
||||||
|
|
||||||
|
fifo_slots = FIFOSlots(self.filters, sle)
|
||||||
|
slots = fifo_slots.generate()
|
||||||
|
item_result = slots["Flask Item"]
|
||||||
|
|
||||||
|
transfer_bucket = fifo_slots.transferred_item_details[('001', 'Flask Item', 'WH 1')]
|
||||||
|
self.assertFalse(transfer_bucket)
|
||||||
|
self.assertEqual(item_result["fifo_queue"][0][0], 10.0)
|
||||||
|
|
||||||
|
def test_precision(self):
|
||||||
|
"Test if final balance qty is rounded off correctly."
|
||||||
|
sle = [
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=0.3, qty_after_transaction=0.3,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
frappe._dict( # stock up item
|
||||||
|
name="Flask Item",
|
||||||
|
actual_qty=0.6, qty_after_transaction=0.9,
|
||||||
|
warehouse="WH 1",
|
||||||
|
posting_date="2021-12-01", voucher_type="Stock Entry",
|
||||||
|
voucher_no="001",
|
||||||
|
has_serial_no=False, serial_no=None
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
slots = FIFOSlots(self.filters, sle).generate()
|
||||||
|
report_data = format_report_data(self.filters, slots, self.filters["to_date"])
|
||||||
|
row = report_data[0] # first row in report
|
||||||
|
bal_qty = row[5]
|
||||||
|
range_qty_sum = sum([i for i in row[7:11]]) # get sum of range balance
|
||||||
|
|
||||||
|
# check if value of Available Qty column matches with range bucket post format
|
||||||
|
self.assertEqual(bal_qty, 0.9)
|
||||||
|
self.assertEqual(bal_qty, range_qty_sum)
|
||||||
|
|
||||||
def generate_item_and_item_wh_wise_slots(filters, sle):
|
def generate_item_and_item_wh_wise_slots(filters, sle):
|
||||||
"Return results with and without 'show_warehouse_wise_stock'"
|
"Return results with and without 'show_warehouse_wise_stock'"
|
||||||
item_wise_slots = FIFOSlots(filters, sle).generate()
|
item_wise_slots = FIFOSlots(filters, sle).generate()
|
||||||
|
30
erpnext/tests/test_zform_loads.py
Normal file
30
erpnext/tests/test_zform_loads.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
""" dumb test to check all function calls on known form loads """
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import frappe
|
||||||
|
from frappe.desk.form.load import getdoc
|
||||||
|
|
||||||
|
|
||||||
|
class TestFormLoads(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_load(self):
|
||||||
|
erpnext_modules = frappe.get_all("Module Def", filters={"app_name": "erpnext"}, pluck="name")
|
||||||
|
doctypes = frappe.get_all("DocType", {"istable": 0, "issingle": 0, "is_virtual": 0, "module": ("in", erpnext_modules)}, pluck="name")
|
||||||
|
|
||||||
|
for doctype in doctypes:
|
||||||
|
last_doc = frappe.db.get_value(doctype, {}, "name", order_by="modified desc")
|
||||||
|
if not last_doc:
|
||||||
|
continue
|
||||||
|
with self.subTest(msg=f"Loading {doctype} - {last_doc}", doctype=doctype, last_doc=last_doc):
|
||||||
|
try:
|
||||||
|
# reset previous response
|
||||||
|
frappe.response = frappe._dict({"docs":[]})
|
||||||
|
frappe.response.docinfo = None
|
||||||
|
|
||||||
|
getdoc(doctype, last_doc)
|
||||||
|
except Exception as e:
|
||||||
|
self.fail(f"Failed to load {doctype} - {last_doc}: {e}")
|
||||||
|
|
||||||
|
self.assertTrue(frappe.response.docs, msg=f"expected document in reponse, found: {frappe.response.docs}")
|
||||||
|
self.assertTrue(frappe.response.docinfo, msg=f"expected docinfo in reponse, found: {frappe.response.docinfo}")
|
@ -1597,6 +1597,7 @@ Method,Methode,
|
|||||||
Middle Income,Mittleres Einkommen,
|
Middle Income,Mittleres Einkommen,
|
||||||
Middle Name,Zweiter Vorname,
|
Middle Name,Zweiter Vorname,
|
||||||
Middle Name (Optional),Weiterer Vorname (optional),
|
Middle Name (Optional),Weiterer Vorname (optional),
|
||||||
|
Milestonde,Meilenstein,
|
||||||
Min Amt can not be greater than Max Amt,Min. Amt kann nicht größer als Max. Amt sein,
|
Min Amt can not be greater than Max Amt,Min. Amt kann nicht größer als Max. Amt sein,
|
||||||
Min Qty can not be greater than Max Qty,Mindestmenge kann nicht größer als Maximalmenge sein,
|
Min Qty can not be greater than Max Qty,Mindestmenge kann nicht größer als Maximalmenge sein,
|
||||||
Minimum Lead Age (Days),Mindest Lead-Alter (in Tagen),
|
Minimum Lead Age (Days),Mindest Lead-Alter (in Tagen),
|
||||||
|
Can't render this file because it is too large.
|
Loading…
x
Reference in New Issue
Block a user