Merge pull request #32161 from s-aga-r/refactor/report/process-loss-report

refactor: rewrite Process Loss Report queries in QB
This commit is contained in:
Sagar Sharma 2022-09-11 19:47:38 +05:30 committed by GitHub
commit ada1ab3509
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -5,6 +5,7 @@ from typing import Dict, List, Tuple
import frappe
from frappe import _
from frappe.query_builder.functions import Sum
Filters = frappe._dict
Row = frappe._dict
@ -14,15 +15,50 @@ QueryArgs = Dict[str, str]
def execute(filters: Filters) -> Tuple[Columns, Data]:
filters = frappe._dict(filters or {})
columns = get_columns()
data = get_data(filters)
return columns, data
def get_data(filters: Filters) -> Data:
query_args = get_query_args(filters)
data = run_query(query_args)
wo = frappe.qb.DocType("Work Order")
se = frappe.qb.DocType("Stock Entry")
query = (
frappe.qb.from_(wo)
.inner_join(se)
.on(wo.name == se.work_order)
.select(
wo.name,
wo.status,
wo.production_item,
wo.qty,
wo.produced_qty,
wo.process_loss_qty,
(wo.produced_qty - wo.process_loss_qty).as_("actual_produced_qty"),
Sum(se.total_incoming_value).as_("total_fg_value"),
Sum(se.total_outgoing_value).as_("total_rm_value"),
)
.where(
(wo.process_loss_qty > 0)
& (wo.company == filters.company)
& (se.docstatus == 1)
& (se.posting_date.between(filters.from_date, filters.to_date))
)
.groupby(se.work_order)
)
if "item" in filters:
query.where(wo.production_item == filters.item)
if "work_order" in filters:
query.where(wo.name == filters.work_order)
data = query.run(as_dict=True)
update_data_with_total_pl_value(data)
return data
@ -67,54 +103,7 @@ def get_columns() -> Columns:
]
def get_query_args(filters: Filters) -> QueryArgs:
query_args = {}
query_args.update(filters)
query_args.update(get_filter_conditions(filters))
return query_args
def run_query(query_args: QueryArgs) -> Data:
return frappe.db.sql(
"""
SELECT
wo.name, wo.status, wo.production_item, wo.qty,
wo.produced_qty, wo.process_loss_qty,
(wo.produced_qty - wo.process_loss_qty) as actual_produced_qty,
sum(se.total_incoming_value) as total_fg_value,
sum(se.total_outgoing_value) as total_rm_value
FROM
`tabWork Order` wo INNER JOIN `tabStock Entry` se
ON wo.name=se.work_order
WHERE
process_loss_qty > 0
AND wo.company = %(company)s
AND se.docstatus = 1
AND se.posting_date BETWEEN %(from_date)s AND %(to_date)s
{item_filter}
{work_order_filter}
GROUP BY
se.work_order
""".format(
**query_args
),
query_args,
as_dict=1,
)
def update_data_with_total_pl_value(data: Data) -> None:
for row in data:
value_per_unit_fg = row["total_fg_value"] / row["actual_produced_qty"]
row["total_pl_value"] = row["process_loss_qty"] * value_per_unit_fg
def get_filter_conditions(filters: Filters) -> QueryArgs:
filter_conditions = dict(item_filter="", work_order_filter="")
if "item" in filters:
production_item = filters.get("item")
filter_conditions.update({"item_filter": f"AND wo.production_item='{production_item}'"})
if "work_order" in filters:
work_order_name = filters.get("work_order")
filter_conditions.update({"work_order_filter": f"AND wo.name='{work_order_name}'"})
return filter_conditions