fix: get_query for batch number and incorrect batch qty

This commit is contained in:
Rohit Waghchaure 2023-06-04 16:09:01 +05:30
parent 14292ffc6f
commit acd12c5830
5 changed files with 259 additions and 136 deletions

View File

@ -3,12 +3,13 @@
import json
from collections import defaultdict
from collections import OrderedDict, defaultdict
import frappe
from frappe import scrub
from frappe.desk.reportview import get_filters_cond, get_match_cond
from frappe.utils import nowdate, unique
from frappe.query_builder.functions import Concat, Sum
from frappe.utils import nowdate, today, unique
import erpnext
from erpnext.stock.get_item_details import _get_item_tax_template
@ -412,95 +413,136 @@ def get_delivery_notes_to_be_billed(doctype, txt, searchfield, start, page_len,
@frappe.validate_and_sanitize_search_inputs
def get_batch_no(doctype, txt, searchfield, start, page_len, filters):
doctype = "Batch"
cond = ""
if filters.get("posting_date"):
cond = "and (batch.expiry_date is null or batch.expiry_date >= %(posting_date)s)"
batch_nos = None
args = {
"item_code": filters.get("item_code"),
"warehouse": filters.get("warehouse"),
"posting_date": filters.get("posting_date"),
"txt": "%{0}%".format(txt),
"start": start,
"page_len": page_len,
}
having_clause = "having sum(sle.actual_qty) > 0"
if filters.get("is_return"):
having_clause = ""
meta = frappe.get_meta(doctype, cached=True)
searchfields = meta.get_search_fields()
search_columns = ""
search_cond = ""
query = get_batches_from_stock_ledger_entries(searchfields, txt, filters)
bundle_query = get_batches_from_serial_and_batch_bundle(searchfields, txt, filters)
if searchfields:
search_columns = ", " + ", ".join(searchfields)
search_cond = " or " + " or ".join([field + " like %(txt)s" for field in searchfields])
data = (
frappe.qb.from_((query) + (bundle_query))
.select("batch_no", "qty", "manufacturing_date", "expiry_date")
.offset(start)
.limit(page_len)
)
if args.get("warehouse"):
searchfields = ["batch." + field for field in searchfields]
if searchfields:
search_columns = ", " + ", ".join(searchfields)
search_cond = " or " + " or ".join([field + " like %(txt)s" for field in searchfields])
for field in searchfields:
data = data.select(field)
batch_nos = frappe.db.sql(
"""select sle.batch_no, round(sum(sle.actual_qty),2), sle.stock_uom,
concat('MFG-',batch.manufacturing_date), concat('EXP-',batch.expiry_date)
{search_columns}
from `tabStock Ledger Entry` sle
INNER JOIN `tabBatch` batch on sle.batch_no = batch.name
where
batch.disabled = 0
and sle.is_cancelled = 0
and sle.item_code = %(item_code)s
and sle.warehouse = %(warehouse)s
and (sle.batch_no like %(txt)s
or batch.expiry_date like %(txt)s
or batch.manufacturing_date like %(txt)s
{search_cond})
and batch.docstatus < 2
{cond}
{match_conditions}
group by batch_no {having_clause}
order by batch.expiry_date, sle.batch_no desc
limit %(page_len)s offset %(start)s""".format(
search_columns=search_columns,
cond=cond,
match_conditions=get_match_cond(doctype),
having_clause=having_clause,
search_cond=search_cond,
),
args,
data = data.run()
data = get_filterd_batches(data)
return data
def get_filterd_batches(data):
batches = OrderedDict()
for batch_data in data:
if batch_data[0] not in batches:
batches[batch_data[0]] = list(batch_data)
else:
batches[batch_data[0]][1] += batch_data[1]
filterd_batch = []
for batch, batch_data in batches.items():
if batch_data[1] > 0:
filterd_batch.append(tuple(batch_data))
return filterd_batch
def get_batches_from_stock_ledger_entries(searchfields, txt, filters):
stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
batch_table = frappe.qb.DocType("Batch")
expiry_date = filters.get("posting_date") or today()
query = (
frappe.qb.from_(stock_ledger_entry)
.inner_join(batch_table)
.on(batch_table.name == stock_ledger_entry.batch_no)
.select(
stock_ledger_entry.batch_no,
Sum(stock_ledger_entry.actual_qty).as_("qty"),
)
return batch_nos
else:
return frappe.db.sql(
"""select name, concat('MFG-', manufacturing_date), concat('EXP-',expiry_date)
{search_columns}
from `tabBatch` batch
where batch.disabled = 0
and item = %(item_code)s
and (name like %(txt)s
or expiry_date like %(txt)s
or manufacturing_date like %(txt)s
{search_cond})
and docstatus < 2
{0}
{match_conditions}
order by expiry_date, name desc
limit %(page_len)s offset %(start)s""".format(
cond,
search_columns=search_columns,
search_cond=search_cond,
match_conditions=get_match_cond(doctype),
),
args,
.where(((batch_table.expiry_date >= expiry_date) | (batch_table.expiry_date.isnull())))
.where(stock_ledger_entry.is_cancelled == 0)
.where(
(stock_ledger_entry.item_code == filters.get("item_code"))
& (batch_table.disabled == 0)
& (stock_ledger_entry.batch_no.isnotnull())
)
.groupby(stock_ledger_entry.batch_no, stock_ledger_entry.warehouse)
)
query = query.select(
Concat("MFG-", batch_table.manufacturing_date).as_("manufacturing_date"),
Concat("EXP-", batch_table.expiry_date).as_("expiry_date"),
)
if filters.get("warehouse"):
query = query.where(stock_ledger_entry.warehouse == filters.get("warehouse"))
for field in searchfields:
query = query.select(batch_table[field])
if txt:
txt_condition = batch_table.name.like(txt)
for field in searchfields + ["name"]:
txt_condition |= batch_table[field].like(txt)
query = query.where(txt_condition)
return query
def get_batches_from_serial_and_batch_bundle(searchfields, txt, filters):
bundle = frappe.qb.DocType("Serial and Batch Entry")
stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
batch_table = frappe.qb.DocType("Batch")
expiry_date = filters.get("posting_date") or today()
bundle_query = (
frappe.qb.from_(bundle)
.inner_join(stock_ledger_entry)
.on(bundle.parent == stock_ledger_entry.serial_and_batch_bundle)
.inner_join(batch_table)
.on(batch_table.name == bundle.batch_no)
.select(
bundle.batch_no,
Sum(bundle.qty).as_("qty"),
)
.where(((batch_table.expiry_date >= expiry_date) | (batch_table.expiry_date.isnull())))
.where(stock_ledger_entry.is_cancelled == 0)
.where(
(stock_ledger_entry.item_code == filters.get("item_code"))
& (batch_table.disabled == 0)
& (stock_ledger_entry.serial_and_batch_bundle.isnotnull())
)
.groupby(bundle.batch_no, bundle.warehouse)
)
bundle_query = bundle_query.select(
Concat("MFG-", batch_table.manufacturing_date),
Concat("EXP-", batch_table.expiry_date),
)
if filters.get("warehouse"):
bundle_query = bundle_query.where(stock_ledger_entry.warehouse == filters.get("warehouse"))
for field in searchfields:
bundle_query = bundle_query.select(batch_table[field])
if txt:
txt_condition = batch_table.name.like(txt)
for field in searchfields + ["name"]:
txt_condition |= batch_table[field].like(txt)
bundle_query = bundle_query.where(txt_condition)
return bundle_query
@frappe.whitelist()

View File

@ -363,10 +363,16 @@ erpnext.buying.BuyingController = class BuyingController extends erpnext.Transac
new erpnext.SerialBatchPackageSelector(
me.frm, item, (r) => {
if (r) {
frappe.model.set_value(item.doctype, item.name, {
let update_values = {
"serial_and_batch_bundle": r.name,
"qty": Math.abs(r.total_qty)
});
}
if (r.warehouse) {
update_values["warehouse"] = r.warehouse;
}
frappe.model.set_value(item.doctype, item.name, update_values);
}
}
);
@ -392,10 +398,16 @@ erpnext.buying.BuyingController = class BuyingController extends erpnext.Transac
new erpnext.SerialBatchPackageSelector(
me.frm, item, (r) => {
if (r) {
frappe.model.set_value(item.doctype, item.name, {
"rejected_serial_and_batch_bundle": r.name,
let update_values = {
"serial_and_batch_bundle": r.name,
"rejected_qty": Math.abs(r.total_qty)
});
}
if (r.warehouse) {
update_values["rejected_warehouse"] = r.warehouse;
}
frappe.model.set_value(item.doctype, item.name, update_values);
}
}
);

View File

@ -2292,8 +2292,9 @@ erpnext.TransactionController = class TransactionController extends erpnext.taxe
};
erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close, show_dialog) {
debugger
let warehouse, receiving_stock, existing_stock;
let warehouse_field = "warehouse";
if (frm.doc.is_return) {
if (["Purchase Receipt", "Purchase Invoice"].includes(frm.doc.doctype)) {
existing_stock = true;
@ -2309,6 +2310,19 @@ erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close
existing_stock = true;
warehouse = item_row.s_warehouse;
}
if (in_list([
"Material Transfer",
"Send to Subcontractor",
"Material Issue",
"Material Consumption for Manufacture",
"Material Transfer for Manufacture"
], frm.doc.purpose)
) {
warehouse_field = "s_warehouse";
} else {
warehouse_field = "t_warehouse";
}
} else {
existing_stock = true;
warehouse = item_row.warehouse;
@ -2335,10 +2349,16 @@ erpnext.show_serial_batch_selector = function (frm, item_row, callback, on_close
new erpnext.SerialBatchPackageSelector(frm, item_row, (r) => {
if (r) {
frappe.model.set_value(item_row.doctype, item_row.name, {
let update_values = {
"serial_and_batch_bundle": r.name,
"qty": Math.abs(r.total_qty)
});
}
if (r.warehouse) {
update_values[warehouse_field] = r.warehouse;
}
frappe.model.set_value(item_row.doctype, item_row.name, update_values);
}
});
});

View File

@ -48,6 +48,30 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
get_dialog_fields() {
let fields = [];
fields.push({
fieldtype: 'Link',
fieldname: 'warehouse',
label: __('Warehouse'),
options: 'Warehouse',
default: this.get_warehouse(),
onchange: () => {
this.item.warehouse = this.dialog.get_value('warehouse');
this.get_auto_data()
},
get_query: () => {
return {
filters: {
'is_group': 0,
'company': this.frm.doc.company,
}
};
}
});
fields.push({
fieldtype: 'Column Break',
});
if (this.item.has_serial_no) {
fields.push({
fieldtype: 'Data',
@ -73,13 +97,6 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
fieldtype: 'Data',
fieldname: 'scan_batch_no',
label: __('Scan Batch No'),
get_query: () => {
return {
filters: {
'item': this.item.item_code
}
};
},
onchange: () => this.update_serial_batch_no()
});
}
@ -246,11 +263,21 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
label: __('Batch No'),
in_list_view: 1,
get_query: () => {
return {
filters: {
'item': this.item.item_code
if (!this.item.outward) {
return {
filters: {
'item': this.item.item_code,
}
}
};
} else {
return {
query : "erpnext.controllers.queries.get_batch_no",
filters: {
'item_code': this.item.item_code,
'warehouse': this.get_warehouse()
}
}
}
},
}
]
@ -278,29 +305,31 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
}
get_auto_data() {
const { qty, based_on } = this.dialog.get_values();
let { qty, based_on } = this.dialog.get_values();
if (!based_on) {
based_on = 'FIFO';
}
frappe.call({
method: 'erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle.get_auto_data',
args: {
item_code: this.item.item_code,
warehouse: this.item.warehouse || this.item.s_warehouse,
has_serial_no: this.item.has_serial_no,
has_batch_no: this.item.has_batch_no,
qty: qty,
based_on: based_on
},
callback: (r) => {
if (r.message) {
this.dialog.fields_dict.entries.df.data = r.message;
this.dialog.fields_dict.entries.grid.refresh();
if (qty) {
frappe.call({
method: 'erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle.get_auto_data',
args: {
item_code: this.item.item_code,
warehouse: this.item.warehouse || this.item.s_warehouse,
has_serial_no: this.item.has_serial_no,
has_batch_no: this.item.has_batch_no,
qty: qty,
based_on: based_on
},
callback: (r) => {
if (r.message) {
this.dialog.fields_dict.entries.df.data = r.message;
this.dialog.fields_dict.entries.grid.refresh();
}
}
}
});
});
}
}
update_serial_batch_no() {
@ -325,6 +354,7 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
update_ledgers() {
let entries = this.dialog.get_values().entries;
let warehouse = this.dialog.get_value('warehouse');
if (entries && !entries.length || !entries) {
frappe.throw(__('Please add atleast one Serial No / Batch No'));
@ -336,6 +366,7 @@ erpnext.SerialBatchPackageSelector = class SerialNoBatchBundleUpdate {
entries: entries,
child_row: this.item,
doc: this.frm.doc,
warehouse: warehouse,
}
}).then(r => {
this.callback && this.callback(r.message);

View File

@ -916,7 +916,7 @@ def get_filters_for_bundle(item_code, docstatus=None, voucher_no=None, name=None
@frappe.whitelist()
def add_serial_batch_ledgers(entries, child_row, doc) -> object:
def add_serial_batch_ledgers(entries, child_row, doc, warehouse) -> object:
if isinstance(child_row, str):
child_row = frappe._dict(parse_json(child_row))
@ -927,21 +927,23 @@ def add_serial_batch_ledgers(entries, child_row, doc) -> object:
parent_doc = parse_json(doc)
if frappe.db.exists("Serial and Batch Bundle", child_row.serial_and_batch_bundle):
doc = update_serial_batch_no_ledgers(entries, child_row, parent_doc)
doc = update_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse)
else:
doc = create_serial_batch_no_ledgers(entries, child_row, parent_doc)
doc = create_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse)
return doc
def create_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
def create_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse=None) -> object:
warehouse = child_row.rejected_warhouse if child_row.is_rejected else child_row.warehouse
warehouse = warehouse or (
child_row.rejected_warehouse if child_row.is_rejected else child_row.warehouse
)
type_of_transaction = child_row.type_of_transaction
if parent_doc.get("doctype") == "Stock Entry":
type_of_transaction = "Outward" if child_row.s_warehouse else "Inward"
warehouse = child_row.s_warehouse or child_row.t_warehouse
warehouse = warehouse or child_row.s_warehouse or child_row.t_warehouse
doc = frappe.get_doc(
{
@ -977,11 +979,12 @@ def create_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
return doc
def update_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
def update_serial_batch_no_ledgers(entries, child_row, parent_doc, warehouse=None) -> object:
doc = frappe.get_doc("Serial and Batch Bundle", child_row.serial_and_batch_bundle)
doc.voucher_detail_no = child_row.name
doc.posting_date = parent_doc.posting_date
doc.posting_time = parent_doc.posting_time
doc.warehouse = warehouse or doc.warehouse
doc.set("entries", [])
for d in entries:
@ -989,7 +992,7 @@ def update_serial_batch_no_ledgers(entries, child_row, parent_doc) -> object:
"entries",
{
"qty": d.get("qty") * (1 if doc.type_of_transaction == "Inward" else -1),
"warehouse": d.get("warehouse"),
"warehouse": warehouse or d.get("warehouse"),
"batch_no": d.get("batch_no"),
"serial_no": d.get("serial_no"),
},
@ -1223,13 +1226,14 @@ def get_reserved_serial_nos_for_pos(kwargs):
def get_auto_batch_nos(kwargs):
available_batches = get_available_batches(kwargs)
qty = flt(kwargs.qty)
stock_ledgers_batches = get_stock_ledgers_batches(kwargs)
if stock_ledgers_batches:
update_available_batches(available_batches, stock_ledgers_batches)
available_batches = list(filter(lambda x: x.qty > 0, available_batches))
if not qty:
return available_batches
@ -1264,9 +1268,15 @@ def get_auto_batch_nos(kwargs):
def update_available_batches(available_batches, reserved_batches):
for batch in available_batches:
if batch.batch_no and batch.batch_no in reserved_batches:
batch.qty -= reserved_batches[batch.batch_no]
for batch_no, data in reserved_batches.items():
batch_not_exists = True
for batch in available_batches:
if batch.batch_no == batch_no:
batch.qty += data.qty
batch_not_exists = False
if batch_not_exists:
available_batches.append(data)
def get_available_batches(kwargs):
@ -1287,7 +1297,7 @@ def get_available_batches(kwargs):
)
.where(((batch_table.expiry_date >= today()) | (batch_table.expiry_date.isnull())))
.where(stock_ledger_entry.is_cancelled == 0)
.groupby(batch_ledger.batch_no)
.groupby(batch_ledger.batch_no, batch_ledger.warehouse)
)
if kwargs.get("posting_date"):
@ -1326,7 +1336,6 @@ def get_available_batches(kwargs):
query = query.where(stock_ledger_entry.voucher_no.notin(kwargs.get("ignore_voucher_nos")))
data = query.run(as_dict=True)
data = list(filter(lambda x: x.qty > 0, data))
return data
@ -1452,9 +1461,12 @@ def get_stock_ledgers_for_serial_nos(kwargs):
def get_stock_ledgers_batches(kwargs):
stock_ledger_entry = frappe.qb.DocType("Stock Ledger Entry")
batch_table = frappe.qb.DocType("Batch")
query = (
frappe.qb.from_(stock_ledger_entry)
.inner_join(batch_table)
.on(stock_ledger_entry.batch_no == batch_table.name)
.select(
stock_ledger_entry.warehouse,
stock_ledger_entry.item_code,
@ -1474,10 +1486,16 @@ def get_stock_ledgers_batches(kwargs):
else:
query = query.where(stock_ledger_entry[field] == kwargs.get(field))
data = query.run(as_dict=True)
if kwargs.based_on == "LIFO":
query = query.orderby(batch_table.creation, order=frappe.qb.desc)
elif kwargs.based_on == "Expiry":
query = query.orderby(batch_table.expiry_date)
else:
query = query.orderby(batch_table.creation)
batches = defaultdict(float)
data = query.run(as_dict=True)
batches = {}
for d in data:
batches[d.batch_no] += d.qty
batches[d.batch_no] = d
return batches