2015-03-03 09:25:30 +00:00
|
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
|
2013-08-05 09:29:54 +00:00
|
|
|
# License: GNU General Public License v3. See license.txt
|
2013-06-05 06:06:24 +00:00
|
|
|
|
2021-09-02 11:14:59 +00:00
|
|
|
|
2021-09-01 09:23:21 +00:00
|
|
|
from operator import itemgetter
|
2022-04-14 08:22:07 +00:00
|
|
|
from typing import Any, Dict, List, Optional, TypedDict
|
2021-09-02 11:14:59 +00:00
|
|
|
|
2020-07-17 05:54:40 +00:00
|
|
|
import frappe
|
2014-04-15 09:06:12 +00:00
|
|
|
from frappe import _
|
2023-05-17 18:22:03 +00:00
|
|
|
from frappe.query_builder import Order
|
2023-03-17 08:23:49 +00:00
|
|
|
from frappe.query_builder.functions import Coalesce, CombineDatetime
|
2023-05-17 18:22:03 +00:00
|
|
|
from frappe.utils import add_days, cint, date_diff, flt, getdate
|
2022-04-11 10:04:53 +00:00
|
|
|
from frappe.utils.nestedset import get_descendants_of
|
2021-09-02 11:14:59 +00:00
|
|
|
|
2019-09-16 14:27:04 +00:00
|
|
|
import erpnext
|
2022-06-22 06:54:08 +00:00
|
|
|
from erpnext.stock.doctype.inventory_dimension.inventory_dimension import get_inventory_dimensions
|
2022-10-17 08:48:55 +00:00
|
|
|
from erpnext.stock.doctype.warehouse.warehouse import apply_warehouse_filter
|
2021-12-20 16:23:47 +00:00
|
|
|
from erpnext.stock.report.stock_ageing.stock_ageing import FIFOSlots, get_average_age
|
2023-05-17 18:22:03 +00:00
|
|
|
from erpnext.stock.utils import add_additional_uom_columns
|
2019-09-16 14:27:04 +00:00
|
|
|
|
2018-02-15 05:58:55 +00:00
|
|
|
|
2022-04-11 09:01:15 +00:00
|
|
|
class StockBalanceFilter(TypedDict):
|
|
|
|
company: Optional[str]
|
|
|
|
from_date: str
|
|
|
|
to_date: str
|
|
|
|
item_group: Optional[str]
|
|
|
|
item: Optional[str]
|
|
|
|
warehouse: Optional[str]
|
|
|
|
warehouse_type: Optional[str]
|
|
|
|
include_uom: Optional[str] # include extra info in converted UOM
|
|
|
|
show_stock_ageing_data: bool
|
|
|
|
show_variant_attributes: bool
|
|
|
|
|
|
|
|
|
2022-04-14 08:22:07 +00:00
|
|
|
SLEntry = Dict[str, Any]
|
|
|
|
|
|
|
|
|
2022-04-11 09:01:15 +00:00
|
|
|
def execute(filters: Optional[StockBalanceFilter] = None):
|
2023-05-17 18:22:03 +00:00
|
|
|
return StockBalanceReport(filters).run()
|
2016-07-14 11:29:58 +00:00
|
|
|
|
2020-07-17 05:54:40 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
class StockBalanceReport(object):
|
|
|
|
def __init__(self, filters: Optional[StockBalanceFilter]) -> None:
|
|
|
|
self.filters = filters
|
|
|
|
self.from_date = getdate(filters.get("from_date"))
|
|
|
|
self.to_date = getdate(filters.get("to_date"))
|
2018-07-23 06:01:04 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
self.start_from = None
|
|
|
|
self.data = []
|
|
|
|
self.columns = []
|
|
|
|
self.sle_entries: List[SLEntry] = []
|
|
|
|
self.set_company_currency()
|
2019-09-16 14:27:04 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def set_company_currency(self) -> None:
|
|
|
|
if self.filters.get("company"):
|
|
|
|
self.company_currency = erpnext.get_company_currency(self.filters.get("company"))
|
|
|
|
else:
|
|
|
|
self.company_currency = frappe.db.get_single_value("Global Defaults", "default_currency")
|
2018-07-23 06:01:04 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def run(self):
|
|
|
|
self.float_precision = cint(frappe.db.get_default("float_precision")) or 3
|
2014-04-15 09:06:12 +00:00
|
|
|
|
2023-05-17 18:35:54 +00:00
|
|
|
self.inventory_dimensions = self.get_inventory_dimension_fields()
|
2023-05-17 18:22:03 +00:00
|
|
|
self.prepare_opening_data_from_closing_balance()
|
|
|
|
self.prepare_stock_ledger_entries()
|
|
|
|
self.prepare_new_data()
|
2019-10-20 14:17:52 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
if not self.columns:
|
|
|
|
self.columns = self.get_columns()
|
2019-10-20 14:17:52 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
self.add_additional_uom_columns()
|
2022-06-22 06:54:08 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
return self.columns, self.data
|
|
|
|
|
|
|
|
def prepare_opening_data_from_closing_balance(self) -> None:
|
|
|
|
self.opening_data = frappe._dict({})
|
|
|
|
|
|
|
|
closing_balance = self.get_closing_balance()
|
|
|
|
if not closing_balance:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.start_from = add_days(closing_balance[0].to_date, 1)
|
|
|
|
res = frappe.get_doc("Closing Stock Balance", closing_balance[0].name).get_prepared_data()
|
|
|
|
|
|
|
|
for entry in res.data:
|
|
|
|
entry = frappe._dict(entry)
|
|
|
|
|
|
|
|
group_by_key = self.get_group_by_key(entry)
|
|
|
|
if group_by_key not in self.opening_data:
|
|
|
|
self.opening_data.setdefault(group_by_key, entry)
|
|
|
|
|
|
|
|
def prepare_new_data(self):
|
|
|
|
if not self.sle_entries:
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.filters.get("show_stock_ageing_data"):
|
|
|
|
self.filters["show_warehouse_wise_stock"] = True
|
|
|
|
item_wise_fifo_queue = FIFOSlots(self.filters, self.sle_entries).generate()
|
2018-07-09 11:26:41 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
_func = itemgetter(1)
|
2018-10-18 12:29:47 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
self.item_warehouse_map = self.get_item_warehouse_map()
|
2023-05-25 10:27:06 +00:00
|
|
|
sre_details = self.get_sre_reserved_qty_details()
|
2023-05-17 18:22:03 +00:00
|
|
|
|
|
|
|
variant_values = {}
|
|
|
|
if self.filters.get("show_variant_attributes"):
|
|
|
|
variant_values = self.get_variant_values_for()
|
|
|
|
|
|
|
|
for key, report_data in self.item_warehouse_map.items():
|
|
|
|
if variant_data := variant_values.get(report_data.item_code):
|
|
|
|
report_data.update(variant_data)
|
|
|
|
|
|
|
|
if self.filters.get("show_stock_ageing_data"):
|
2023-05-17 18:35:54 +00:00
|
|
|
opening_fifo_queue = self.get_opening_fifo_queue(report_data) or []
|
|
|
|
|
|
|
|
fifo_queue = []
|
|
|
|
if fifo_queue := item_wise_fifo_queue.get((report_data.item_code, report_data.warehouse)):
|
|
|
|
fifo_queue = fifo_queue.get("fifo_queue")
|
2017-10-22 21:58:44 +00:00
|
|
|
|
2019-09-16 14:27:04 +00:00
|
|
|
if fifo_queue:
|
2023-05-17 18:35:54 +00:00
|
|
|
opening_fifo_queue.extend(fifo_queue)
|
|
|
|
|
|
|
|
stock_ageing_data = {"average_age": 0, "earliest_age": 0, "latest_age": 0}
|
|
|
|
if opening_fifo_queue:
|
|
|
|
fifo_queue = sorted(filter(_func, opening_fifo_queue), key=_func)
|
2019-10-20 14:17:52 +00:00
|
|
|
if not fifo_queue:
|
|
|
|
continue
|
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
to_date = self.to_date
|
2019-09-16 14:27:04 +00:00
|
|
|
stock_ageing_data["average_age"] = get_average_age(fifo_queue, to_date)
|
|
|
|
stock_ageing_data["earliest_age"] = date_diff(to_date, fifo_queue[0][1])
|
|
|
|
stock_ageing_data["latest_age"] = date_diff(to_date, fifo_queue[-1][1])
|
2023-05-17 18:35:54 +00:00
|
|
|
stock_ageing_data["fifo_queue"] = fifo_queue
|
2017-11-15 09:13:06 +00:00
|
|
|
|
2019-09-16 14:27:04 +00:00
|
|
|
report_data.update(stock_ageing_data)
|
|
|
|
|
2023-05-25 10:27:06 +00:00
|
|
|
report_data.update(
|
|
|
|
{"reserved_stock": sre_details.get((report_data.item_code, report_data.warehouse), 0.0)}
|
|
|
|
)
|
2023-05-17 18:22:03 +00:00
|
|
|
self.data.append(report_data)
|
|
|
|
|
|
|
|
def get_item_warehouse_map(self):
|
|
|
|
item_warehouse_map = {}
|
2023-05-19 11:01:17 +00:00
|
|
|
self.opening_vouchers = self.get_opening_vouchers()
|
2023-05-17 18:22:03 +00:00
|
|
|
|
|
|
|
for entry in self.sle_entries:
|
|
|
|
group_by_key = self.get_group_by_key(entry)
|
|
|
|
if group_by_key not in item_warehouse_map:
|
|
|
|
self.initialize_data(item_warehouse_map, group_by_key, entry)
|
|
|
|
|
|
|
|
self.prepare_item_warehouse_map(item_warehouse_map, entry, group_by_key)
|
|
|
|
|
|
|
|
if self.opening_data.get(group_by_key):
|
|
|
|
del self.opening_data[group_by_key]
|
|
|
|
|
|
|
|
for group_by_key, entry in self.opening_data.items():
|
|
|
|
if group_by_key not in item_warehouse_map:
|
|
|
|
self.initialize_data(item_warehouse_map, group_by_key, entry)
|
|
|
|
|
|
|
|
item_warehouse_map = filter_items_with_no_transactions(
|
|
|
|
item_warehouse_map, self.float_precision, self.inventory_dimensions
|
|
|
|
)
|
|
|
|
|
|
|
|
return item_warehouse_map
|
|
|
|
|
2023-05-25 10:27:06 +00:00
|
|
|
def get_sre_reserved_qty_details(self) -> dict:
|
|
|
|
from erpnext.stock.doctype.stock_reservation_entry.stock_reservation_entry import (
|
|
|
|
get_sre_reserved_qty_details_for_item_and_warehouse as get_reserved_qty_details,
|
|
|
|
)
|
|
|
|
|
|
|
|
item_code_list, warehouse_list = [], []
|
|
|
|
for d in self.item_warehouse_map:
|
|
|
|
item_code_list.append(d[1])
|
|
|
|
warehouse_list.append(d[2])
|
|
|
|
|
|
|
|
return get_reserved_qty_details(item_code_list, warehouse_list)
|
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def prepare_item_warehouse_map(self, item_warehouse_map, entry, group_by_key):
|
|
|
|
qty_dict = item_warehouse_map[group_by_key]
|
|
|
|
for field in self.inventory_dimensions:
|
|
|
|
qty_dict[field] = entry.get(field)
|
|
|
|
|
2023-05-19 11:01:17 +00:00
|
|
|
if entry.voucher_type == "Stock Reconciliation" and (not entry.batch_no or entry.serial_no):
|
2023-05-17 18:22:03 +00:00
|
|
|
qty_diff = flt(entry.qty_after_transaction) - flt(qty_dict.bal_qty)
|
|
|
|
else:
|
|
|
|
qty_diff = flt(entry.actual_qty)
|
|
|
|
|
|
|
|
value_diff = flt(entry.stock_value_difference)
|
|
|
|
|
2023-05-19 11:01:17 +00:00
|
|
|
if entry.posting_date < self.from_date or entry.voucher_no in self.opening_vouchers.get(
|
2023-05-17 18:22:03 +00:00
|
|
|
entry.voucher_type, []
|
|
|
|
):
|
|
|
|
qty_dict.opening_qty += qty_diff
|
|
|
|
qty_dict.opening_val += value_diff
|
|
|
|
|
|
|
|
elif entry.posting_date >= self.from_date and entry.posting_date <= self.to_date:
|
|
|
|
|
|
|
|
if flt(qty_diff, self.float_precision) >= 0:
|
|
|
|
qty_dict.in_qty += qty_diff
|
|
|
|
qty_dict.in_val += value_diff
|
|
|
|
else:
|
|
|
|
qty_dict.out_qty += abs(qty_diff)
|
|
|
|
qty_dict.out_val += abs(value_diff)
|
|
|
|
|
|
|
|
qty_dict.val_rate = entry.valuation_rate
|
|
|
|
qty_dict.bal_qty += qty_diff
|
|
|
|
qty_dict.bal_val += value_diff
|
|
|
|
|
|
|
|
def initialize_data(self, item_warehouse_map, group_by_key, entry):
|
|
|
|
opening_data = self.opening_data.get(group_by_key, {})
|
|
|
|
|
|
|
|
item_warehouse_map[group_by_key] = frappe._dict(
|
2022-06-22 06:54:08 +00:00
|
|
|
{
|
2023-05-17 18:22:03 +00:00
|
|
|
"item_code": entry.item_code,
|
|
|
|
"warehouse": entry.warehouse,
|
|
|
|
"item_group": entry.item_group,
|
|
|
|
"company": entry.company,
|
|
|
|
"currency": self.company_currency,
|
|
|
|
"stock_uom": entry.stock_uom,
|
|
|
|
"item_name": entry.item_name,
|
|
|
|
"opening_qty": opening_data.get("bal_qty") or 0.0,
|
|
|
|
"opening_val": opening_data.get("bal_val") or 0.0,
|
2023-05-17 18:35:54 +00:00
|
|
|
"opening_fifo_queue": opening_data.get("fifo_queue") or [],
|
2023-05-17 18:22:03 +00:00
|
|
|
"in_qty": 0.0,
|
|
|
|
"in_val": 0.0,
|
|
|
|
"out_qty": 0.0,
|
|
|
|
"out_val": 0.0,
|
2023-05-17 18:35:54 +00:00
|
|
|
"bal_qty": opening_data.get("bal_qty") or 0.0,
|
|
|
|
"bal_val": opening_data.get("bal_val") or 0.0,
|
2023-05-17 18:22:03 +00:00
|
|
|
"val_rate": 0.0,
|
2022-06-22 06:54:08 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def get_group_by_key(self, row) -> tuple:
|
|
|
|
group_by_key = [row.company, row.item_code, row.warehouse]
|
|
|
|
|
|
|
|
for fieldname in self.inventory_dimensions:
|
|
|
|
if self.filters.get(fieldname):
|
|
|
|
group_by_key.append(row.get(fieldname))
|
|
|
|
|
|
|
|
return tuple(group_by_key)
|
|
|
|
|
|
|
|
def get_closing_balance(self) -> List[Dict[str, Any]]:
|
|
|
|
if self.filters.get("ignore_closing_balance"):
|
|
|
|
return []
|
|
|
|
|
|
|
|
table = frappe.qb.DocType("Closing Stock Balance")
|
|
|
|
|
|
|
|
query = (
|
|
|
|
frappe.qb.from_(table)
|
|
|
|
.select(table.name, table.to_date)
|
|
|
|
.where(
|
|
|
|
(table.docstatus == 1)
|
|
|
|
& (table.company == self.filters.company)
|
|
|
|
& ((table.to_date <= self.from_date))
|
|
|
|
)
|
|
|
|
.orderby(table.to_date, order=Order.desc)
|
|
|
|
.limit(1)
|
|
|
|
)
|
|
|
|
|
|
|
|
for fieldname in ["warehouse", "item_code", "item_group", "warehouse_type"]:
|
|
|
|
if self.filters.get(fieldname):
|
|
|
|
query = query.where(table[fieldname] == self.filters.get(fieldname))
|
|
|
|
|
|
|
|
return query.run(as_dict=True)
|
|
|
|
|
|
|
|
def prepare_stock_ledger_entries(self):
|
|
|
|
sle = frappe.qb.DocType("Stock Ledger Entry")
|
|
|
|
item_table = frappe.qb.DocType("Item")
|
|
|
|
|
|
|
|
query = (
|
|
|
|
frappe.qb.from_(sle)
|
|
|
|
.inner_join(item_table)
|
|
|
|
.on(sle.item_code == item_table.name)
|
|
|
|
.select(
|
|
|
|
sle.item_code,
|
|
|
|
sle.warehouse,
|
|
|
|
sle.posting_date,
|
|
|
|
sle.actual_qty,
|
|
|
|
sle.valuation_rate,
|
|
|
|
sle.company,
|
|
|
|
sle.voucher_type,
|
|
|
|
sle.qty_after_transaction,
|
|
|
|
sle.stock_value_difference,
|
|
|
|
sle.item_code.as_("name"),
|
|
|
|
sle.voucher_no,
|
|
|
|
sle.stock_value,
|
|
|
|
sle.batch_no,
|
2023-05-19 11:01:17 +00:00
|
|
|
sle.serial_no,
|
2023-05-17 18:22:03 +00:00
|
|
|
item_table.item_group,
|
|
|
|
item_table.stock_uom,
|
|
|
|
item_table.item_name,
|
|
|
|
)
|
|
|
|
.where((sle.docstatus < 2) & (sle.is_cancelled == 0))
|
|
|
|
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
|
|
|
|
.orderby(sle.creation)
|
|
|
|
.orderby(sle.actual_qty)
|
|
|
|
)
|
|
|
|
|
|
|
|
query = self.apply_inventory_dimensions_filters(query, sle)
|
|
|
|
query = self.apply_warehouse_filters(query, sle)
|
|
|
|
query = self.apply_items_filters(query, item_table)
|
|
|
|
query = self.apply_date_filters(query, sle)
|
|
|
|
|
|
|
|
if self.filters.get("company"):
|
|
|
|
query = query.where(sle.company == self.filters.get("company"))
|
|
|
|
|
|
|
|
self.sle_entries = query.run(as_dict=True)
|
|
|
|
|
|
|
|
def apply_inventory_dimensions_filters(self, query, sle) -> str:
|
2023-05-17 18:35:54 +00:00
|
|
|
inventory_dimension_fields = self.get_inventory_dimension_fields()
|
2023-05-17 18:22:03 +00:00
|
|
|
if inventory_dimension_fields:
|
|
|
|
for fieldname in inventory_dimension_fields:
|
|
|
|
query = query.select(fieldname)
|
|
|
|
if self.filters.get(fieldname):
|
|
|
|
query = query.where(sle[fieldname].isin(self.filters.get(fieldname)))
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
def apply_warehouse_filters(self, query, sle) -> str:
|
|
|
|
warehouse_table = frappe.qb.DocType("Warehouse")
|
|
|
|
|
|
|
|
if self.filters.get("warehouse"):
|
|
|
|
query = apply_warehouse_filter(query, sle, self.filters)
|
|
|
|
elif warehouse_type := self.filters.get("warehouse_type"):
|
|
|
|
query = (
|
|
|
|
query.join(warehouse_table)
|
|
|
|
.on(warehouse_table.name == sle.warehouse)
|
|
|
|
.where(warehouse_table.warehouse_type == warehouse_type)
|
|
|
|
)
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
def apply_items_filters(self, query, item_table) -> str:
|
|
|
|
if item_group := self.filters.get("item_group"):
|
|
|
|
children = get_descendants_of("Item Group", item_group, ignore_permissions=True)
|
|
|
|
query = query.where(item_table.item_group.isin(children + [item_group]))
|
|
|
|
|
|
|
|
for field in ["item_code", "brand"]:
|
|
|
|
if not self.filters.get(field):
|
|
|
|
continue
|
|
|
|
|
|
|
|
query = query.where(item_table[field] == self.filters.get(field))
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
def apply_date_filters(self, query, sle) -> str:
|
|
|
|
if not self.filters.ignore_closing_balance and self.start_from:
|
|
|
|
query = query.where(sle.posting_date >= self.start_from)
|
|
|
|
|
|
|
|
if self.to_date:
|
|
|
|
query = query.where(sle.posting_date <= self.to_date)
|
|
|
|
|
|
|
|
return query
|
|
|
|
|
|
|
|
def get_columns(self):
|
|
|
|
columns = [
|
2022-06-22 06:54:08 +00:00
|
|
|
{
|
2023-05-17 18:22:03 +00:00
|
|
|
"label": _("Item"),
|
|
|
|
"fieldname": "item_code",
|
2022-06-22 06:54:08 +00:00
|
|
|
"fieldtype": "Link",
|
2023-05-17 18:22:03 +00:00
|
|
|
"options": "Item",
|
2022-06-22 06:54:08 +00:00
|
|
|
"width": 100,
|
|
|
|
},
|
2023-05-17 18:22:03 +00:00
|
|
|
{"label": _("Item Name"), "fieldname": "item_name", "width": 150},
|
2022-06-22 06:54:08 +00:00
|
|
|
{
|
2023-05-17 18:22:03 +00:00
|
|
|
"label": _("Item Group"),
|
|
|
|
"fieldname": "item_group",
|
|
|
|
"fieldtype": "Link",
|
|
|
|
"options": "Item Group",
|
2022-06-22 06:54:08 +00:00
|
|
|
"width": 100,
|
|
|
|
},
|
|
|
|
{
|
2023-05-17 18:22:03 +00:00
|
|
|
"label": _("Warehouse"),
|
|
|
|
"fieldname": "warehouse",
|
2022-06-22 06:54:08 +00:00
|
|
|
"fieldtype": "Link",
|
2023-05-17 18:22:03 +00:00
|
|
|
"options": "Warehouse",
|
2022-06-22 06:54:08 +00:00
|
|
|
"width": 100,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
for dimension in get_inventory_dimensions():
|
|
|
|
columns.append(
|
|
|
|
{
|
|
|
|
"label": _(dimension.doctype),
|
|
|
|
"fieldname": dimension.fieldname,
|
|
|
|
"fieldtype": "Link",
|
|
|
|
"options": dimension.doctype,
|
|
|
|
"width": 110,
|
|
|
|
}
|
|
|
|
)
|
2019-09-16 14:27:04 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
columns.extend(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"label": _("Stock UOM"),
|
|
|
|
"fieldname": "stock_uom",
|
|
|
|
"fieldtype": "Link",
|
|
|
|
"options": "UOM",
|
|
|
|
"width": 90,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"label": _("Balance Qty"),
|
|
|
|
"fieldname": "bal_qty",
|
|
|
|
"fieldtype": "Float",
|
|
|
|
"width": 100,
|
|
|
|
"convertible": "qty",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"label": _("Balance Value"),
|
|
|
|
"fieldname": "bal_val",
|
|
|
|
"fieldtype": "Currency",
|
|
|
|
"width": 100,
|
|
|
|
"options": "currency",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"label": _("Opening Qty"),
|
|
|
|
"fieldname": "opening_qty",
|
|
|
|
"fieldtype": "Float",
|
|
|
|
"width": 100,
|
|
|
|
"convertible": "qty",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"label": _("Opening Value"),
|
|
|
|
"fieldname": "opening_val",
|
|
|
|
"fieldtype": "Currency",
|
|
|
|
"width": 110,
|
|
|
|
"options": "currency",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"label": _("In Qty"),
|
|
|
|
"fieldname": "in_qty",
|
|
|
|
"fieldtype": "Float",
|
|
|
|
"width": 80,
|
|
|
|
"convertible": "qty",
|
|
|
|
},
|
|
|
|
{"label": _("In Value"), "fieldname": "in_val", "fieldtype": "Float", "width": 80},
|
|
|
|
{
|
|
|
|
"label": _("Out Qty"),
|
|
|
|
"fieldname": "out_qty",
|
|
|
|
"fieldtype": "Float",
|
|
|
|
"width": 80,
|
|
|
|
"convertible": "qty",
|
|
|
|
},
|
|
|
|
{"label": _("Out Value"), "fieldname": "out_val", "fieldtype": "Float", "width": 80},
|
|
|
|
{
|
|
|
|
"label": _("Valuation Rate"),
|
|
|
|
"fieldname": "val_rate",
|
2023-08-29 11:23:00 +00:00
|
|
|
"fieldtype": self.filters.valuation_field_type or "Currency",
|
2023-05-17 18:22:03 +00:00
|
|
|
"width": 90,
|
|
|
|
"convertible": "rate",
|
2023-08-29 11:23:00 +00:00
|
|
|
"options": "Company:company:default_currency"
|
|
|
|
if self.filters.valuation_field_type == "Currency"
|
|
|
|
else None,
|
2023-05-17 18:22:03 +00:00
|
|
|
},
|
2023-05-25 10:27:06 +00:00
|
|
|
{
|
|
|
|
"label": _("Reserved Stock"),
|
|
|
|
"fieldname": "reserved_stock",
|
|
|
|
"fieldtype": "Float",
|
|
|
|
"width": 80,
|
|
|
|
"convertible": "qty",
|
|
|
|
},
|
2023-05-17 18:22:03 +00:00
|
|
|
{
|
|
|
|
"label": _("Company"),
|
|
|
|
"fieldname": "company",
|
|
|
|
"fieldtype": "Link",
|
|
|
|
"options": "Company",
|
|
|
|
"width": 100,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
)
|
2019-09-16 14:27:04 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
if self.filters.get("show_stock_ageing_data"):
|
|
|
|
columns += [
|
|
|
|
{"label": _("Average Age"), "fieldname": "average_age", "width": 100},
|
|
|
|
{"label": _("Earliest Age"), "fieldname": "earliest_age", "width": 100},
|
|
|
|
{"label": _("Latest Age"), "fieldname": "latest_age", "width": 100},
|
|
|
|
]
|
2013-06-05 06:06:24 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
if self.filters.get("show_variant_attributes"):
|
|
|
|
columns += [
|
|
|
|
{"label": att_name, "fieldname": att_name, "width": 100}
|
|
|
|
for att_name in get_variants_attributes()
|
|
|
|
]
|
2022-03-28 13:22:46 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
return columns
|
2022-04-11 10:04:53 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def add_additional_uom_columns(self):
|
|
|
|
if not self.filters.get("include_uom"):
|
|
|
|
return
|
2013-06-05 06:06:24 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
conversion_factors = self.get_itemwise_conversion_factor()
|
|
|
|
add_additional_uom_columns(self.columns, self.data, self.filters.include_uom, conversion_factors)
|
2014-04-15 09:06:12 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def get_itemwise_conversion_factor(self):
|
|
|
|
items = []
|
|
|
|
if self.filters.item_code or self.filters.item_group:
|
|
|
|
items = [d.item_code for d in self.data]
|
2022-04-11 10:04:53 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
table = frappe.qb.DocType("UOM Conversion Detail")
|
2022-04-11 10:04:53 +00:00
|
|
|
query = (
|
2023-05-17 18:22:03 +00:00
|
|
|
frappe.qb.from_(table)
|
2023-05-19 11:01:17 +00:00
|
|
|
.select(
|
|
|
|
table.conversion_factor,
|
|
|
|
table.parent,
|
|
|
|
)
|
2023-05-17 18:22:03 +00:00
|
|
|
.where((table.parenttype == "Item") & (table.uom == self.filters.include_uom))
|
2022-03-28 13:22:46 +00:00
|
|
|
)
|
2019-05-28 05:38:14 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
if items:
|
|
|
|
query = query.where(table.parent.isin(items))
|
2023-03-17 08:23:49 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
result = query.run(as_dict=1)
|
|
|
|
if not result:
|
|
|
|
return {}
|
2023-03-17 08:23:49 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
return {d.parent: d.conversion_factor for d in result}
|
2023-03-17 08:23:49 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def get_variant_values_for(self):
|
|
|
|
"""Returns variant values for items."""
|
|
|
|
attribute_map = {}
|
|
|
|
items = []
|
|
|
|
if self.filters.item_code or self.filters.item_group:
|
|
|
|
items = [d.item_code for d in self.data]
|
2023-03-17 08:23:49 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
filters = {}
|
|
|
|
if items:
|
|
|
|
filters = {"parent": ("in", items)}
|
2014-10-14 06:11:44 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
attribute_info = frappe.get_all(
|
|
|
|
"Item Variant Attribute",
|
|
|
|
fields=["parent", "attribute", "attribute_value"],
|
|
|
|
filters=filters,
|
|
|
|
)
|
2016-02-11 15:08:01 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
for attr in attribute_info:
|
|
|
|
attribute_map.setdefault(attr["parent"], {})
|
|
|
|
attribute_map[attr["parent"]].update({attr["attribute"]: attr["attribute_value"]})
|
2016-02-11 15:08:01 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
return attribute_map
|
2016-02-11 15:08:01 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
def get_opening_vouchers(self):
|
|
|
|
opening_vouchers = {"Stock Entry": [], "Stock Reconciliation": []}
|
2018-09-21 04:50:52 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
se = frappe.qb.DocType("Stock Entry")
|
|
|
|
sr = frappe.qb.DocType("Stock Reconciliation")
|
2017-01-10 10:40:40 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
vouchers_data = (
|
|
|
|
frappe.qb.from_(
|
|
|
|
(
|
|
|
|
frappe.qb.from_(se)
|
|
|
|
.select(se.name, Coalesce("Stock Entry").as_("voucher_type"))
|
|
|
|
.where((se.docstatus == 1) & (se.posting_date <= self.to_date) & (se.is_opening == "Yes"))
|
|
|
|
)
|
|
|
|
+ (
|
|
|
|
frappe.qb.from_(sr)
|
|
|
|
.select(sr.name, Coalesce("Stock Reconciliation").as_("voucher_type"))
|
|
|
|
.where(
|
|
|
|
(sr.docstatus == 1) & (sr.posting_date <= self.to_date) & (sr.purpose == "Opening Stock")
|
|
|
|
)
|
|
|
|
)
|
|
|
|
).select("voucher_type", "name")
|
|
|
|
).run(as_dict=True)
|
2018-09-21 04:50:52 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
if vouchers_data:
|
|
|
|
for d in vouchers_data:
|
|
|
|
opening_vouchers[d.voucher_type].append(d.name)
|
2022-03-28 13:22:46 +00:00
|
|
|
|
2023-05-17 18:22:03 +00:00
|
|
|
return opening_vouchers
|
2022-06-22 06:54:08 +00:00
|
|
|
|
2023-05-17 18:35:54 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_inventory_dimension_fields():
|
|
|
|
return [dimension.fieldname for dimension in get_inventory_dimensions()]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_opening_fifo_queue(report_data):
|
|
|
|
opening_fifo_queue = report_data.get("opening_fifo_queue") or []
|
|
|
|
for row in opening_fifo_queue:
|
|
|
|
row[1] = getdate(row[1])
|
2022-06-22 06:54:08 +00:00
|
|
|
|
2023-05-17 18:35:54 +00:00
|
|
|
return opening_fifo_queue
|
2022-06-22 06:54:08 +00:00
|
|
|
|
|
|
|
|
2023-05-19 11:01:17 +00:00
|
|
|
def filter_items_with_no_transactions(
|
|
|
|
iwb_map, float_precision: float, inventory_dimensions: list = None
|
|
|
|
):
|
2022-06-30 13:42:06 +00:00
|
|
|
pop_keys = []
|
2022-06-22 06:54:08 +00:00
|
|
|
for group_by_key in iwb_map:
|
|
|
|
qty_dict = iwb_map[group_by_key]
|
2018-09-21 04:50:52 +00:00
|
|
|
|
2017-01-10 10:40:40 +00:00
|
|
|
no_transactions = True
|
2018-05-23 06:01:24 +00:00
|
|
|
for key, val in qty_dict.items():
|
2023-05-19 11:01:17 +00:00
|
|
|
if inventory_dimensions and key in inventory_dimensions:
|
2022-06-22 06:54:08 +00:00
|
|
|
continue
|
|
|
|
|
2023-05-17 18:35:54 +00:00
|
|
|
if key in [
|
|
|
|
"item_code",
|
|
|
|
"warehouse",
|
|
|
|
"item_name",
|
|
|
|
"item_group",
|
2023-05-25 10:33:15 +00:00
|
|
|
"project",
|
2023-05-17 18:35:54 +00:00
|
|
|
"stock_uom",
|
|
|
|
"company",
|
|
|
|
"opening_fifo_queue",
|
|
|
|
]:
|
2023-05-17 18:22:03 +00:00
|
|
|
continue
|
|
|
|
|
2017-06-13 13:28:47 +00:00
|
|
|
val = flt(val, float_precision)
|
2017-01-10 10:40:40 +00:00
|
|
|
qty_dict[key] = val
|
|
|
|
if key != "val_rate" and val:
|
|
|
|
no_transactions = False
|
2018-09-21 04:50:52 +00:00
|
|
|
|
2017-01-10 10:40:40 +00:00
|
|
|
if no_transactions:
|
2022-06-30 13:42:06 +00:00
|
|
|
pop_keys.append(group_by_key)
|
|
|
|
|
|
|
|
for key in pop_keys:
|
|
|
|
iwb_map.pop(key)
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
return iwb_map
|
|
|
|
|
2022-03-28 13:22:46 +00:00
|
|
|
|
2022-04-14 08:22:07 +00:00
|
|
|
def get_variants_attributes() -> List[str]:
|
2017-10-22 21:58:44 +00:00
|
|
|
"""Return all item variant attributes."""
|
2022-04-11 10:04:53 +00:00
|
|
|
return frappe.get_all("Item Attribute", pluck="name")
|