Merge pull request #35552 from rohitwaghchaure/fixed-serial-batch-get-query

fix: get_query for batch number and incorrect batch qty
This commit is contained in:
rohitwaghchaure 2023-06-04 16:56:46 +05:30 committed by GitHub
commit 64f767b95d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 259 additions and 136 deletions

View File

@ -3,12 +3,13 @@
import json import json
from collections import defaultdict from collections import OrderedDict, defaultdict
import frappe import frappe
from frappe import scrub from frappe import scrub
from frappe.desk.reportview import get_filters_cond, get_match_cond from frappe.desk.reportview import get_filters_cond, get_match_cond
from frappe.utils import nowdate, unique from frappe.query_builder.functions import Concat, Sum
from frappe.utils import nowdate, today, unique
import erpnext import erpnext
from erpnext.stock.get_item_details import _get_item_tax_template from erpnext.stock.get_item_details import _get_item_tax_template
@ -412,95 +413,136 @@ def get_delivery_notes_to_be_billed(doctype, txt, searchfield, start, page_len,
@frappe.validate_and_sanitize_search_inputs @frappe.validate_and_sanitize_search_inputs
def get_batch_no(doctype, txt, searchfield, start, page_len, filters): def get_batch_no(doctype, txt, searchfield, start, page_len, filters):
doctype = "Batch" doctype = "Batch"
cond = ""
if filters.get("posting_date"):
cond = "and (batch.expiry_date is null or batch.expiry_date >= %(posting_date)s)"
batch_nos = None
args = {
"item_code": filters.get("item_code"),
"warehouse": filters.get("warehouse"),
"posting_date": filters.get("posting_date"),
"txt": "%{0}%".format(txt),
"start": start,
"page_len": page_len,
}
having_clause = "having sum(sle.actual_qty) > 0"
if filters.get("is_return"):
having_clause = ""
meta = frappe.get_meta(doctype, cached=True) meta = frappe.get_meta(doctype, cached=True)
searchfields = meta.get_search_fields() searchfields = meta.get_search_fields()
search_columns = "" query = get_batches_from_stock_ledger_entries(searchfields, txt, filters)
search_cond = "" bundle_query = get_batches_from_serial_and_batch_bundle(searchfields, txt, filters)
if searchfields: data = (
search_columns = ", " + ", ".join(searchfields) frappe.qb.from_((query) + (bundle_query))
search_cond = " or " + " or ".join([field + " like %(txt)s" for field in searchfields]) .select("batch_no", "qty", "manufacturing_date", "expiry_date")
.offset(start)
.limit(page_len)
)
if args.get("warehouse"): for field in searchfields:
searchfields = ["batch." + field for field in searchfields] data = data.select(field)
if searchfields:
search_columns = ", " + ", ".join(searchfields)
search_cond = " or " + " or ".join([field + " like %(txt)s" for field in searchfields])
batch_nos = frappe.db.sql( data = data.run()
"""select sle.batch_no, round(sum(sle.actual_qty),2), sle.stock_uom, data = get_filterd_batches(data)
concat('MFG-',batch.manufacturing_date), concat('EXP-',batch.expiry_date)
{search_columns} return data
from `tabStock Ledger Entry` sle
INNER JOIN `tabBatch` batch on sle.batch_no = batch.name
where def get_filterd_batches(data):
batch.disabled = 0 batches = OrderedDict()
and sle.is_cancelled = 0
and sle.item_code = %(item_code)s for batch_data in data:
and sle.warehouse = %(warehouse)s if batch_data[0] not in batches:
and (sle.batch_no like %(txt)s batches[batch_data[0]] = list(batch_data)
or batch.expiry_date like %(txt)s else:
or batch.manufacturing_date like %(txt)s batches[batch_data[0]][1] += batch_data[1]
{search_cond})
and batch.docstatus < 2 filterd_batch = []
{cond} for batch, batch_data in batches.items():
{match_conditions} if batch_data[1] > 0:
group by batch_no {having_clause} filterd_batch.append(tuple(batch_data))
order by batch.expiry_date, sle.batch_no desc
limit %(page_len)s offset %(start)s""".format( return filterd_batch
search_columns=search_columns,
cond=cond,
match_conditions=get_match_cond(doctype), def get_batches_from_stock_ledger_entries(searchfields, txt, filters):
having_clause=having_clause, stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
search_cond=search_cond, batch_table = frappe.qb.DocType("Batch")
),
args, expiry_date = filters.get("posting_date") or today()
query = (
frappe.qb.from_(stock_ledger_entry)
.inner_join(batch_table)
.on(batch_table.name == stock_ledger_entry.batch_no)
.select(
stock_ledger_entry.batch_no,
Sum(stock_ledger_entry.actual_qty).as_("qty"),
) )
.where(((batch_table.expiry_date >= expiry_date) | (batch_table.expiry_date.isnull())))
return batch_nos .where(stock_ledger_entry.is_cancelled == 0)
else: .where(
return frappe.db.sql( (stock_ledger_entry.item_code == filters.get("item_code"))
"""select name, concat('MFG-', manufacturing_date), concat('EXP-',expiry_date) & (batch_table.disabled == 0)
{search_columns} & (stock_ledger_entry.batch_no.isnotnull())
from `tabBatch` batch
where batch.disabled = 0
and item = %(item_code)s
and (name like %(txt)s
or expiry_date like %(txt)s
or manufacturing_date like %(txt)s
{search_cond})
and docstatus < 2
{0}
{match_conditions}
order by expiry_date, name desc
limit %(page_len)s offset %(start)s""".format(
cond,
search_columns=search_columns,
search_cond=search_cond,
match_conditions=get_match_cond(doctype),
),
args,
) )
.groupby(stock_ledger_entry.batch_no, stock_ledger_entry.warehouse)
)
query = query.select(
Concat("MFG-", batch_table.manufacturing_date).as_("manufacturing_date"),
Concat("EXP-", batch_table.expiry_date).as_("expiry_date"),
)
if filters.get("warehouse"):
query = query.where(stock_ledger_entry.warehouse == filters.get("warehouse"))
for field in searchfields:
query = query.select(batch_table[field])
if txt:
txt_condition = batch_table.name.like(txt)
for field in searchfields + ["name"]:
txt_condition |= batch_table[field].like(txt)
query = query.where(txt_condition)
return query
def get_batches_from_serial_and_batch_bundle(searchfields, txt, filters):
bundle = frappe.qb.DocType("Serial and Batch Entry")
stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
batch_table = frappe.qb.DocType("Batch")
expiry_date = filters.get("posting_date") or today()
bundle_query = (
frappe.qb.from_(bundle)
.inner_join(stock_ledger_entry)
.on(bundle.parent == stock_ledger_entry.serial_and_batch_bundle)
.inner_join(batch_table)
.on(batch_table.name == bundle.batch_no)
.select(
bundle.batch_no,
Sum(bundle.qty).as_("qty"),
)
.where(((batch_table.expiry_date >= expiry_date) | (batch_table.expiry_date.isnull())))
.where(stock_ledger_entry.is_cancelled == 0)
.where(
(stock_ledger_entry.item_code == filters.get("item_code"))
& (batch_table.disabled == 0)
& (stock_ledger_entry.serial_and_batch_bundle.isnotnull())
)
.groupby(bundle.batch_no, bundle.warehouse)
)
bundle_query = bundle_query.select(
Concat("MFG-", batch_table.manufacturing_date),
Concat("EXP-", batch_table.expiry_date),
)
if filters.get("warehouse"):
bundle_query = bundle_query.where(stock_ledger_entry.warehouse == filters.get("warehouse"))
for field in searchfields:
bundle_query = bundle_query.select(batch_table[field])
if txt:
txt_condition = batch_table.name.like(txt)
for field in searchfields + ["name"]:
txt_condition |= batch_table[field].like(txt)
bundle_query = bundle_query.where(txt_condition)
return bundle_query
@frappe.whitelist() @frappe.whitelist()

View File

@ -363,10 +363,16 @@ erpnext.buying.BuyingController = class BuyingController extends erpnext.Transac
new erpnext.SerialBatchPackageSelector( new erpnext.SerialBatchPackageSelector(
me.frm, item, (r) => { me.frm, item, (r) => {
if (r) { if (r) {
frappe.model.set_value(item.doctype, item.name, { let update_values = {
"serial_and_batch_bundle": r.name, "serial_and_batch_bundle": r.name,
"qty": Math.abs(r.total_qty) "qty": Math.abs(r.total_qty)
}); }
if (r.warehouse) {
update_values["warehouse"] = r.warehouse;
}
frappe.model.set_value(item.doctype, item.name, update_values);
} }
} }
); );
@ -392,10 +398,16 @@ erpnext.buying.BuyingController = class BuyingController extends erpnext.Transac
new erpnext.SerialBatchPackageSelector( new erpnext.SerialBatchPackageSelector(
me.frm, item, (r) => { me.frm, item, (r) => {
if (r) { if (r) {
frappe.model.set_value(item.doctype, item.name, { let update_values = {
"rejected_serial_and_batch_bundle": r.name, "serial_and_batch_bundle": r.name,
"rejected_qty": Math.abs(r.total_qty) "rejected_qty": Math.abs(r.total_qty)
}); }
if (r.warehouse) {
update_values["rejected_warehouse"] = r.warehouse;
}
frappe.model.set_value(item.doctype, item.name, update_values);
} }
} }
); );

View File

@ -2292,8 +2292,9 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
}; };
erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close, show_dialog) { erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close, show_dialog) {
debugger
let warehouse, receiving_stock, existing_stock; let warehouse, receiving_stock, existing_stock;
let warehouse_field = "warehouse";
if (frm.doc.is_return) { if (frm.doc.is_return) {
if (["Purchase Receipt", "Purchase Invoice"].includes(frm.doc.doctype)) { if (["Purchase Receipt", "Purchase Invoice"].includes(frm.doc.doctype)) {
existing_stock = true; existing_stock = true;
@ -2309,6 +2310,19 @@ erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close
existing_stock = true; existing_stock = true;
warehouse = item_row.s_warehouse; warehouse = item_row.s_warehouse;
} }
if (in_list([
"Material Transfer",
"Send to Subcontractor",
"Material Issue",
"Material Consumption for Manufacture",
"Material Transfer for Manufacture"
], frm.doc.purpose)
) {
warehouse_field = "s_warehouse";
} else {
warehouse_field = "t_warehouse";
}
} else { } else {
existing_stock = true; existing_stock = true;
warehouse = item_row.warehouse; warehouse = item_row.warehouse;
@ -2335,10 +2349,16 @@ erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close
new erpnext.SerialBatchPackageSelector(frm, item_row, (r) => { new erpnext.SerialBatchPackageSelector(frm, item_row, (r) => {
if (r) { if (r) {
frappe.model.set_value(item_row.doctype, item_row.name, { let update_values = {
"serial_and_batch_bundle": r.name, "serial_and_batch_bundle": r.name,
"qty": Math.abs(r.total_qty) "qty": Math.abs(r.total_qty)
}); }
if (r.warehouse) {
update_values[warehouse_field] = r.warehouse;
}
frappe.model.set_value(item_row.doctype, item_row.name, update_values);
} }
}); });
}); });

View File

@ -48,6 +48,30 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
get_dialog_fields() { get_dialog_fields() {
let fields = []; let fields = [];
fields.push({
fieldtype: 'Link',
fieldname: 'warehouse',
label: __('Warehouse'),
options: 'Warehouse',
default: this.get_warehouse(),
onchange: () => {
this.item.warehouse = this.dialog.get_value('warehouse');
this.get_auto_data()
},
get_query: () => {
return {
filters: {
'is_group': 0,
'company': this.frm.doc.company,
}
};
}
});
fields.push({
fieldtype: 'Column Break',
});
if (this.item.has_serial_no) { if (this.item.has_serial_no) {
fields.push({ fields.push({
fieldtype: 'Data', fieldtype: 'Data',
@ -73,13 +97,6 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
fieldtype: 'Data', fieldtype: 'Data',
fieldname: 'scan_batch_no', fieldname: 'scan_batch_no',
label: __('Scan Batch No'), label: __('Scan Batch No'),
get_query: () => {
return {
filters: {
'item': this.item.item_code
}
};
},
onchange: () => this.update_serial_batch_no() onchange: () => this.update_serial_batch_no()
}); });
} }
@ -246,11 +263,21 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
label: __('Batch No'), label: __('Batch No'),
in_list_view: 1, in_list_view: 1,
get_query: () => { get_query: () => {
return { if (!this.item.outward) {
filters: { return {
'item': this.item.item_code filters: {
'item': this.item.item_code,
}
} }
}; } else {
return {
query : "erpnext.controllers.queries.get_batch_no",
filters: {
'item_code': this.item.item_code,
'warehouse': this.get_warehouse()
}
}
}
}, },
} }
] ]
@ -278,29 +305,31 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
} }
get_auto_data() { get_auto_data() {
const { qty, based_on } = this.dialog.get_values(); let { qty, based_on } = this.dialog.get_values();
if (!based_on) { if (!based_on) {
based_on = 'FIFO'; based_on = 'FIFO';
} }
frappe.call({ if (qty) {
method: 'erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle.get_auto_data', frappe.call({
args: { method: 'erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle.get_auto_data',
item_code: this.item.item_code, args: {
warehouse: this.item.warehouse || this.item.s_warehouse, item_code: this.item.item_code,
has_serial_no: this.item.has_serial_no, warehouse: this.item.warehouse || this.item.s_warehouse,
has_batch_no: this.item.has_batch_no, has_serial_no: this.item.has_serial_no,
qty: qty, has_batch_no: this.item.has_batch_no,
based_on: based_on qty: qty,
}, based_on: based_on
callback: (r) => { },
if (r.message) { callback: (r) => {
this.dialog.fields_dict.entries.df.data = r.message; if (r.message) {
this.dialog.fields_dict.entries.grid.refresh(); this.dialog.fields_dict.entries.df.data = r.message;
this.dialog.fields_dict.entries.grid.refresh();
}
} }
} });
}); }
} }
update_serial_batch_no() { update_serial_batch_no() {
@ -325,6 +354,7 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
update_ledgers() { update_ledgers() {
let entries = this.dialog.get_values().entries; let entries = this.dialog.get_values().entries;
let warehouse = this.dialog.get_value('warehouse');
if (entries && !entries.length || !entries) { if (entries && !entries.length || !entries) {
frappe.throw(__('Please add atleast one Serial No / Batch No')); frappe.throw(__('Please add atleast one Serial No / Batch No'));
@ -336,6 +366,7 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
entries: entries, entries: entries,
child_row: this.item, child_row: this.item,
doc: this.frm.doc, doc: this.frm.doc,
warehouse: warehouse,
} }
}).then(r => { }).then(r => {
this.callback && this.callback(r.message); this.callback && this.callback(r.message);

View File

@ -916,7 +916,7 @@ def get_filters_for_bundle(item_code, docstatus=None, voucher_no=None, name=None
@frappe.whitelist() @frappe.whitelist()
def add_serial_batch_ledgers(entries, child_row, doc) -> object: def add_serial_batch_ledgers(entries, child_row, doc, warehouse) -> object:
if isinstance(child_row, str): if isinstance(child_row, str):
child_row = frappe._dict(parse_json(child_row)) child_row = frappe._dict(parse_json(child_row))
@ -927,21 +927,23 @@ def add_serial_batch_ledgers(entries, child_row, doc) -> object:
parent_doc = parse_json(doc) parent_doc = parse_json(doc)
if frappe.db.exists("Serial and Batch Bundle", child_row.serial_and_batch_bundle): if frappe.db.exists("Serial and Batch Bundle", child_row.serial_and_batch_bundle):
doc = update_serial_batch_no_ledgers(entries, child_row, parent_doc) doc = update_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse)
else: else:
doc = create_serial_batch_no_ledgers(entries, child_row, parent_doc) doc = create_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse)
return doc return doc
def create_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object: def create_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse=None) -> object:
warehouse = child_row.rejected_warhouse if child_row.is_rejected else child_row.warehouse warehouse = warehouse or (
child_row.rejected_warehouse if child_row.is_rejected else child_row.warehouse
)
type_of_transaction = child_row.type_of_transaction type_of_transaction = child_row.type_of_transaction
if parent_doc.get("doctype") == "Stock Entry": if parent_doc.get("doctype") == "Stock Entry":
type_of_transaction = "Outward" if child_row.s_warehouse else "Inward" type_of_transaction = "Outward" if child_row.s_warehouse else "Inward"
warehouse = child_row.s_warehouse or child_row.t_warehouse warehouse = warehouse or child_row.s_warehouse or child_row.t_warehouse
doc = frappe.get_doc( doc = frappe.get_doc(
{ {
@ -977,11 +979,12 @@ def create_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
return doc return doc
def update_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object: def update_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse=None) -> object:
doc = frappe.get_doc("Serial and Batch Bundle", child_row.serial_and_batch_bundle) doc = frappe.get_doc("Serial and Batch Bundle", child_row.serial_and_batch_bundle)
doc.voucher_detail_no = child_row.name doc.voucher_detail_no = child_row.name
doc.posting_date = parent_doc.posting_date doc.posting_date = parent_doc.posting_date
doc.posting_time = parent_doc.posting_time doc.posting_time = parent_doc.posting_time
doc.warehouse = warehouse or doc.warehouse
doc.set("entries", []) doc.set("entries", [])
for d in entries: for d in entries:
@ -989,7 +992,7 @@ def update_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
"entries", "entries",
{ {
"qty": d.get("qty") * (1 if doc.type_of_transaction == "Inward" else -1), "qty": d.get("qty") * (1 if doc.type_of_transaction == "Inward" else -1),
"warehouse": d.get("warehouse"), "warehouse": warehouse or d.get("warehouse"),
"batch_no": d.get("batch_no"), "batch_no": d.get("batch_no"),
"serial_no": d.get("serial_no"), "serial_no": d.get("serial_no"),
}, },
@ -1223,13 +1226,14 @@ def get_reserved_serial_nos_for_pos(kwargs):
def get_auto_batch_nos(kwargs): def get_auto_batch_nos(kwargs):
available_batches = get_available_batches(kwargs) available_batches = get_available_batches(kwargs)
qty = flt(kwargs.qty) qty = flt(kwargs.qty)
stock_ledgers_batches = get_stock_ledgers_batches(kwargs) stock_ledgers_batches = get_stock_ledgers_batches(kwargs)
if stock_ledgers_batches: if stock_ledgers_batches:
update_available_batches(available_batches, stock_ledgers_batches) update_available_batches(available_batches, stock_ledgers_batches)
available_batches = list(filter(lambda x: x.qty > 0, available_batches))
if not qty: if not qty:
return available_batches return available_batches
@ -1264,9 +1268,15 @@ def get_auto_batch_nos(kwargs):
def update_available_batches(available_batches, reserved_batches): def update_available_batches(available_batches, reserved_batches):
for batch in available_batches: for batch_no, data in reserved_batches.items():
if batch.batch_no and batch.batch_no in reserved_batches: batch_not_exists = True
batch.qty -= reserved_batches[batch.batch_no] for batch in available_batches:
if batch.batch_no == batch_no:
batch.qty += data.qty
batch_not_exists = False
if batch_not_exists:
available_batches.append(data)
def get_available_batches(kwargs): def get_available_batches(kwargs):
@ -1287,7 +1297,7 @@ def get_available_batches(kwargs):
) )
.where(((batch_table.expiry_date >= today()) | (batch_table.expiry_date.isnull()))) .where(((batch_table.expiry_date >= today()) | (batch_table.expiry_date.isnull())))
.where(stock_ledger_entry.is_cancelled == 0) .where(stock_ledger_entry.is_cancelled == 0)
.groupby(batch_ledger.batch_no) .groupby(batch_ledger.batch_no, batch_ledger.warehouse)
) )
if kwargs.get("posting_date"): if kwargs.get("posting_date"):
@ -1326,7 +1336,6 @@ def get_available_batches(kwargs):
query = query.where(stock_ledger_entry.voucher_no.notin(kwargs.get("ignore_voucher_nos"))) query = query.where(stock_ledger_entry.voucher_no.notin(kwargs.get("ignore_voucher_nos")))
data = query.run(as_dict=True) data = query.run(as_dict=True)
data = list(filter(lambda x: x.qty > 0, data))
return data return data
@ -1452,9 +1461,12 @@ def get_stock_ledgers_for_serial_nos(kwargs):
def get_stock_ledgers_batches(kwargs): def get_stock_ledgers_batches(kwargs):
stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry") stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
batch_table = frappe.qb.DocType("Batch")
query = ( query = (
frappe.qb.from_(stock_ledger_entry) frappe.qb.from_(stock_ledger_entry)
.inner_join(batch_table)
.on(stock_ledger_entry.batch_no == batch_table.name)
.select( .select(
stock_ledger_entry.warehouse, stock_ledger_entry.warehouse,
stock_ledger_entry.item_code, stock_ledger_entry.item_code,
@ -1474,10 +1486,16 @@ def get_stock_ledgers_batches(kwargs):
else: else:
query = query.where(stock_ledger_entry[field] == kwargs.get(field)) query = query.where(stock_ledger_entry[field] == kwargs.get(field))
data = query.run(as_dict=True) if kwargs.based_on == "LIFO":
query = query.orderby(batch_table.creation, order=frappe.qb.desc)
elif kwargs.based_on == "Expiry":
query = query.orderby(batch_table.expiry_date)
else:
query = query.orderby(batch_table.creation)
batches = defaultdict(float) data = query.run(as_dict=True)
batches = {}
for d in data: for d in data:
batches[d.batch_no] += d.qty batches[d.batch_no] = d
return batches return batches