2015-03-03 09:25:30 +00:00
|
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
|
2013-08-05 09:29:54 +00:00
|
|
|
# License: GNU General Public License v3. See license.txt
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
from __future__ import unicode_literals
|
2014-02-14 10:17:51 +00:00
|
|
|
import frappe
|
2014-04-15 09:06:12 +00:00
|
|
|
from frappe import _
|
2018-04-12 07:55:03 +00:00
|
|
|
from frappe.utils import flt, cint, getdate, now
|
|
|
|
from erpnext.stock.report.stock_ledger.stock_ledger import get_item_group_condition
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
def execute(filters=None):
|
|
|
|
if not filters: filters = {}
|
2016-07-14 11:29:58 +00:00
|
|
|
|
2016-03-14 09:29:21 +00:00
|
|
|
validate_filters(filters)
|
2014-04-15 09:06:12 +00:00
|
|
|
|
2016-03-07 07:41:59 +00:00
|
|
|
columns = get_columns()
|
2018-04-12 07:55:03 +00:00
|
|
|
items = get_items(filters)
|
|
|
|
sle = get_stock_ledger_entries(filters, items)
|
|
|
|
iwb_map = get_item_warehouse_map(filters, sle)
|
|
|
|
item_map = get_item_details(items, sle, filters)
|
|
|
|
item_reorder_detail_map = get_item_reorder_details(item_map.keys())
|
2014-04-15 09:06:12 +00:00
|
|
|
|
2013-06-05 06:06:24 +00:00
|
|
|
data = []
|
2016-02-11 15:08:01 +00:00
|
|
|
for (company, item, warehouse) in sorted(iwb_map):
|
|
|
|
qty_dict = iwb_map[(company, item, warehouse)]
|
2017-07-17 11:58:44 +00:00
|
|
|
item_reorder_level = 0
|
|
|
|
item_reorder_qty = 0
|
|
|
|
if item + warehouse in item_reorder_detail_map:
|
|
|
|
item_reorder_level = item_reorder_detail_map[item + warehouse]["warehouse_reorder_level"]
|
|
|
|
item_reorder_qty = item_reorder_detail_map[item + warehouse]["warehouse_reorder_qty"]
|
2017-10-22 21:58:44 +00:00
|
|
|
|
|
|
|
report_data = [item, item_map[item]["item_name"],
|
2016-02-11 15:08:01 +00:00
|
|
|
item_map[item]["item_group"],
|
|
|
|
item_map[item]["brand"],
|
|
|
|
item_map[item]["description"], warehouse,
|
|
|
|
item_map[item]["stock_uom"], qty_dict.opening_qty,
|
|
|
|
qty_dict.opening_val, qty_dict.in_qty,
|
|
|
|
qty_dict.in_val, qty_dict.out_qty,
|
|
|
|
qty_dict.out_val, qty_dict.bal_qty,
|
|
|
|
qty_dict.bal_val, qty_dict.val_rate,
|
2017-07-17 11:58:44 +00:00
|
|
|
item_reorder_level,
|
|
|
|
item_reorder_qty,
|
2017-11-15 09:13:06 +00:00
|
|
|
company
|
2017-10-22 21:58:44 +00:00
|
|
|
]
|
|
|
|
|
2017-11-15 09:13:06 +00:00
|
|
|
if filters.get('show_variant_attributes', 0) == 1:
|
2017-10-22 21:58:44 +00:00
|
|
|
variants_attributes = get_variants_attributes()
|
2017-11-15 09:13:06 +00:00
|
|
|
report_data += [item_map[item].get(i) for i in variants_attributes]
|
|
|
|
|
2017-10-22 21:58:44 +00:00
|
|
|
data.append(report_data)
|
|
|
|
|
2017-11-15 09:13:06 +00:00
|
|
|
if filters.get('show_variant_attributes', 0) == 1:
|
2017-10-22 21:58:44 +00:00
|
|
|
columns += ["{}:Data:100".format(i) for i in get_variants_attributes()]
|
2017-11-15 09:13:06 +00:00
|
|
|
|
2013-06-05 06:06:24 +00:00
|
|
|
return columns, data
|
|
|
|
|
2016-03-07 07:41:59 +00:00
|
|
|
def get_columns():
|
|
|
|
"""return columns"""
|
2014-04-15 09:06:12 +00:00
|
|
|
|
2016-02-04 11:52:52 +00:00
|
|
|
columns = [
|
2016-02-11 15:08:01 +00:00
|
|
|
_("Item")+":Link/Item:100",
|
|
|
|
_("Item Name")+"::150",
|
2018-02-23 11:28:55 +00:00
|
|
|
_("Item Group")+":Link/Item Group:100",
|
|
|
|
_("Brand")+":Link/Brand:90",
|
2016-02-11 15:08:01 +00:00
|
|
|
_("Description")+"::140",
|
|
|
|
_("Warehouse")+":Link/Warehouse:100",
|
|
|
|
_("Stock UOM")+":Link/UOM:90",
|
|
|
|
_("Opening Qty")+":Float:100",
|
|
|
|
_("Opening Value")+":Float:110",
|
|
|
|
_("In Qty")+":Float:80",
|
|
|
|
_("In Value")+":Float:80",
|
|
|
|
_("Out Qty")+":Float:80",
|
|
|
|
_("Out Value")+":Float:80",
|
|
|
|
_("Balance Qty")+":Float:100",
|
|
|
|
_("Balance Value")+":Float:100",
|
|
|
|
_("Valuation Rate")+":Float:90",
|
2017-07-18 05:47:32 +00:00
|
|
|
_("Reorder Level")+":Float:80",
|
2017-07-17 11:58:44 +00:00
|
|
|
_("Reorder Qty")+":Float:80",
|
2017-11-15 09:13:06 +00:00
|
|
|
_("Company")+":Link/Company:100"
|
2016-02-04 11:52:52 +00:00
|
|
|
]
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
return columns
|
|
|
|
|
|
|
|
def get_conditions(filters):
|
|
|
|
conditions = ""
|
|
|
|
if not filters.get("from_date"):
|
2014-04-15 09:06:12 +00:00
|
|
|
frappe.throw(_("'From Date' is required"))
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
if filters.get("to_date"):
|
2017-04-14 08:18:55 +00:00
|
|
|
conditions += " and sle.posting_date <= '%s'" % frappe.db.escape(filters.get("to_date"))
|
2013-06-05 06:06:24 +00:00
|
|
|
else:
|
2014-04-15 09:06:12 +00:00
|
|
|
frappe.throw(_("'To Date' is required"))
|
|
|
|
|
2016-03-07 07:41:59 +00:00
|
|
|
if filters.get("warehouse"):
|
2018-04-12 07:55:03 +00:00
|
|
|
warehouse_details = frappe.db.get_value("Warehouse",
|
|
|
|
filters.get("warehouse"), ["lft", "rgt"], as_dict=1)
|
2016-07-27 06:12:38 +00:00
|
|
|
if warehouse_details:
|
|
|
|
conditions += " and exists (select name from `tabWarehouse` wh \
|
|
|
|
where wh.lft >= %s and wh.rgt <= %s and sle.warehouse = wh.name)"%(warehouse_details.lft,
|
|
|
|
warehouse_details.rgt)
|
2016-03-07 07:41:59 +00:00
|
|
|
|
2013-06-05 06:06:24 +00:00
|
|
|
return conditions
|
|
|
|
|
2018-04-12 07:55:03 +00:00
|
|
|
def get_stock_ledger_entries(filters, items):
|
|
|
|
item_conditions_sql = ''
|
|
|
|
if items:
|
|
|
|
item_conditions_sql = ' and sle.item_code in ({})'\
|
2018-04-17 06:26:46 +00:00
|
|
|
.format(', '.join(['"' + frappe.db.escape(i, percent=False) + '"' for i in items]))
|
2018-04-12 07:55:03 +00:00
|
|
|
|
2013-06-05 06:06:24 +00:00
|
|
|
conditions = get_conditions(filters)
|
2018-04-12 07:55:03 +00:00
|
|
|
|
2017-03-29 12:38:10 +00:00
|
|
|
return frappe.db.sql("""
|
|
|
|
select
|
|
|
|
sle.item_code, warehouse, sle.posting_date, sle.actual_qty, sle.valuation_rate,
|
2017-03-26 10:25:18 +00:00
|
|
|
sle.company, sle.voucher_type, sle.qty_after_transaction, sle.stock_value_difference
|
2017-03-29 12:38:10 +00:00
|
|
|
from
|
2018-04-12 07:55:03 +00:00
|
|
|
`tabStock Ledger Entry` sle force index (posting_sort_index)
|
|
|
|
where sle.docstatus < 2 %s %s
|
2017-03-29 12:38:10 +00:00
|
|
|
order by sle.posting_date, sle.posting_time, sle.name""" %
|
2018-04-12 07:55:03 +00:00
|
|
|
(item_conditions_sql, conditions), as_dict=1)
|
2013-06-05 06:06:24 +00:00
|
|
|
|
2018-04-12 07:55:03 +00:00
|
|
|
def get_item_warehouse_map(filters, sle):
|
2013-06-05 06:06:24 +00:00
|
|
|
iwb_map = {}
|
2017-04-14 08:18:55 +00:00
|
|
|
from_date = getdate(filters.get("from_date"))
|
|
|
|
to_date = getdate(filters.get("to_date"))
|
2016-02-11 15:08:01 +00:00
|
|
|
|
2013-06-05 06:06:24 +00:00
|
|
|
for d in sle:
|
2016-02-11 15:08:01 +00:00
|
|
|
key = (d.company, d.item_code, d.warehouse)
|
|
|
|
if key not in iwb_map:
|
|
|
|
iwb_map[key] = frappe._dict({
|
2014-09-29 06:05:52 +00:00
|
|
|
"opening_qty": 0.0, "opening_val": 0.0,
|
|
|
|
"in_qty": 0.0, "in_val": 0.0,
|
|
|
|
"out_qty": 0.0, "out_val": 0.0,
|
|
|
|
"bal_qty": 0.0, "bal_val": 0.0,
|
2017-01-10 10:40:40 +00:00
|
|
|
"val_rate": 0.0
|
2016-02-11 15:08:01 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
qty_dict = iwb_map[(d.company, d.item_code, d.warehouse)]
|
2014-09-11 11:17:19 +00:00
|
|
|
|
2014-10-10 12:32:23 +00:00
|
|
|
if d.voucher_type == "Stock Reconciliation":
|
|
|
|
qty_diff = flt(d.qty_after_transaction) - qty_dict.bal_qty
|
|
|
|
else:
|
|
|
|
qty_diff = flt(d.actual_qty)
|
2014-10-14 06:11:44 +00:00
|
|
|
|
|
|
|
value_diff = flt(d.stock_value_difference)
|
2016-02-11 15:08:01 +00:00
|
|
|
|
|
|
|
if d.posting_date < from_date:
|
2014-10-10 12:32:23 +00:00
|
|
|
qty_dict.opening_qty += qty_diff
|
|
|
|
qty_dict.opening_val += value_diff
|
2016-02-11 15:08:01 +00:00
|
|
|
|
|
|
|
elif d.posting_date >= from_date and d.posting_date <= to_date:
|
2014-10-10 12:32:23 +00:00
|
|
|
if qty_diff > 0:
|
|
|
|
qty_dict.in_qty += qty_diff
|
|
|
|
qty_dict.in_val += value_diff
|
2013-06-05 06:06:24 +00:00
|
|
|
else:
|
2014-10-10 12:32:23 +00:00
|
|
|
qty_dict.out_qty += abs(qty_diff)
|
|
|
|
qty_dict.out_val += abs(value_diff)
|
2016-02-11 15:08:01 +00:00
|
|
|
|
2015-10-06 13:16:56 +00:00
|
|
|
qty_dict.val_rate = d.valuation_rate
|
2014-10-10 12:32:23 +00:00
|
|
|
qty_dict.bal_qty += qty_diff
|
|
|
|
qty_dict.bal_val += value_diff
|
2017-01-10 10:40:40 +00:00
|
|
|
|
|
|
|
iwb_map = filter_items_with_no_transactions(iwb_map)
|
|
|
|
|
|
|
|
return iwb_map
|
|
|
|
|
|
|
|
def filter_items_with_no_transactions(iwb_map):
|
|
|
|
for (company, item, warehouse) in sorted(iwb_map):
|
|
|
|
qty_dict = iwb_map[(company, item, warehouse)]
|
|
|
|
|
|
|
|
no_transactions = True
|
2017-06-13 13:28:47 +00:00
|
|
|
float_precision = cint(frappe.db.get_default("float_precision")) or 3
|
2017-01-10 10:40:40 +00:00
|
|
|
for key, val in qty_dict.items():
|
2017-06-13 13:28:47 +00:00
|
|
|
val = flt(val, float_precision)
|
2017-01-10 10:40:40 +00:00
|
|
|
qty_dict[key] = val
|
|
|
|
if key != "val_rate" and val:
|
|
|
|
no_transactions = False
|
|
|
|
|
|
|
|
if no_transactions:
|
|
|
|
iwb_map.pop((company, item, warehouse))
|
2013-06-05 06:06:24 +00:00
|
|
|
|
|
|
|
return iwb_map
|
|
|
|
|
2018-04-12 07:55:03 +00:00
|
|
|
def get_items(filters):
|
|
|
|
conditions = []
|
2016-03-07 07:41:59 +00:00
|
|
|
if filters.get("item_code"):
|
2018-04-12 07:55:03 +00:00
|
|
|
conditions.append("item.name=%(item_code)s")
|
|
|
|
else:
|
|
|
|
if filters.get("brand"):
|
|
|
|
conditions.append("item.brand=%(brand)s")
|
|
|
|
if filters.get("item_group"):
|
|
|
|
conditions.append(get_item_group_condition(filters.get("item_group")))
|
|
|
|
|
|
|
|
items = []
|
|
|
|
if conditions:
|
|
|
|
items = frappe.db.sql_list("""select name from `tabItem` item where {}"""
|
|
|
|
.format(" and ".join(conditions)), filters)
|
|
|
|
return items
|
|
|
|
|
|
|
|
def get_item_details(items, sle, filters):
|
|
|
|
item_details = {}
|
|
|
|
if not items:
|
|
|
|
items = list(set([d.item_code for d in sle]))
|
|
|
|
|
|
|
|
for item in frappe.db.sql("""
|
|
|
|
select name, item_name, description, item_group, brand, stock_uom
|
|
|
|
from `tabItem`
|
|
|
|
where name in ({0})
|
2018-04-17 06:26:46 +00:00
|
|
|
""".format(', '.join(['"' + frappe.db.escape(i, percent=False) + '"' for i in items])), as_dict=1):
|
2018-04-12 07:55:03 +00:00
|
|
|
item_details.setdefault(item.name, item)
|
2017-11-15 09:13:06 +00:00
|
|
|
|
|
|
|
if filters.get('show_variant_attributes', 0) == 1:
|
|
|
|
variant_values = get_variant_values_for(item_details.keys())
|
|
|
|
item_details = {k: v.update(variant_values.get(k, {})) for k, v in item_details.iteritems()}
|
|
|
|
|
2017-10-22 21:58:44 +00:00
|
|
|
return item_details
|
2017-07-17 11:58:44 +00:00
|
|
|
|
2018-04-12 07:55:03 +00:00
|
|
|
def get_item_reorder_details(items):
|
2017-11-15 09:13:06 +00:00
|
|
|
item_reorder_details = frappe.db.sql("""
|
|
|
|
select parent, warehouse, warehouse_reorder_qty, warehouse_reorder_level
|
|
|
|
from `tabItem Reorder`
|
2018-04-12 07:55:03 +00:00
|
|
|
where parent in ({0})
|
2018-04-17 06:26:46 +00:00
|
|
|
""".format(', '.join(['"' + frappe.db.escape(i, percent=False) + '"' for i in items])), as_dict=1)
|
2017-07-17 11:58:44 +00:00
|
|
|
|
|
|
|
return dict((d.parent + d.warehouse, d) for d in item_reorder_details)
|
2016-03-14 09:29:21 +00:00
|
|
|
|
|
|
|
def validate_filters(filters):
|
|
|
|
if not (filters.get("item_code") or filters.get("warehouse")):
|
|
|
|
sle_count = flt(frappe.db.sql("""select count(name) from `tabStock Ledger Entry`""")[0][0])
|
|
|
|
if sle_count > 500000:
|
|
|
|
frappe.throw(_("Please set filter based on Item or Warehouse"))
|
2017-10-22 21:58:44 +00:00
|
|
|
|
|
|
|
def get_variants_attributes():
|
|
|
|
'''Return all item variant attributes.'''
|
|
|
|
return [i.name for i in frappe.get_all('Item Attribute')]
|
|
|
|
|
2017-11-15 09:13:06 +00:00
|
|
|
def get_variant_values_for(items):
|
2017-10-22 21:58:44 +00:00
|
|
|
'''Returns variant values for items.'''
|
2017-10-23 09:04:00 +00:00
|
|
|
attribute_map = {}
|
2017-11-15 09:13:06 +00:00
|
|
|
for attr in frappe.db.sql('''select parent, attribute, attribute_value
|
|
|
|
from `tabItem Variant Attribute` where parent in (%s)
|
|
|
|
''' % ", ".join(["%s"] * len(items)), tuple(items), as_dict=1):
|
|
|
|
attribute_map.setdefault(attr['parent'], {})
|
|
|
|
attribute_map[attr['parent']].update({attr['attribute']: attr['attribute_value']})
|
|
|
|
|
|
|
|
return attribute_map
|