perf: new column Posting Datetime in SLE to optimize stock ledger related queries (backport #39800) (#40004)

* perf: new column posting datetime in SLE to optimize stock ledger related queries

(cherry picked from commit d80ca523a43e9d1073dc008ead7d140fb0811a89)

# Conflicts:
#	erpnext/stock/doctype/purchase_receipt/test_purchase_receipt.py

* test: test cases to test clash timestamp entries

(cherry picked from commit f04676aaed69d669e43099077c85d93fedbf7e07)

* chore: remove microsecond from posting_datetime

(cherry picked from commit a73ba2c0d26b9d27bb5a75bc6c9739e49035f266)

* chore: fix conflicts

---------

Co-authored-by: Rohit Waghchaure <rohitw1991@gmail.com>
This commit is contained in:
mergify[bot] 2024-02-21 21:14:28 +05:30 committed by GitHub
parent 66a05087b8
commit b9181e85dc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 298 additions and 141 deletions

View File

@ -975,7 +975,7 @@ class GrossProfitGenerator(object):
& (sle.is_cancelled == 0)
)
.orderby(sle.item_code)
.orderby(sle.warehouse, sle.posting_date, sle.posting_time, sle.creation, order=Order.desc)
.orderby(sle.warehouse, sle.posting_datetime, sle.creation, order=Order.desc)
.run(as_dict=True)
)

View File

@ -970,46 +970,6 @@ def get_currency_precision():
return precision
def get_stock_rbnb_difference(posting_date, company):
stock_items = frappe.db.sql_list(
"""select distinct item_code
from `tabStock Ledger Entry` where company=%s""",
company,
)
pr_valuation_amount = frappe.db.sql(
"""
select sum(pr_item.valuation_rate * pr_item.qty * pr_item.conversion_factor)
from `tabPurchase Receipt Item` pr_item, `tabPurchase Receipt` pr
where pr.name = pr_item.parent and pr.docstatus=1 and pr.company=%s
and pr.posting_date <= %s and pr_item.item_code in (%s)"""
% ("%s", "%s", ", ".join(["%s"] * len(stock_items))),
tuple([company, posting_date] + stock_items),
)[0][0]
pi_valuation_amount = frappe.db.sql(
"""
select sum(pi_item.valuation_rate * pi_item.qty * pi_item.conversion_factor)
from `tabPurchase Invoice Item` pi_item, `tabPurchase Invoice` pi
where pi.name = pi_item.parent and pi.docstatus=1 and pi.company=%s
and pi.posting_date <= %s and pi_item.item_code in (%s)"""
% ("%s", "%s", ", ".join(["%s"] * len(stock_items))),
tuple([company, posting_date] + stock_items),
)[0][0]
# Balance should be
stock_rbnb = flt(pr_valuation_amount, 2) - flt(pi_valuation_amount, 2)
# Balance as per system
stock_rbnb_account = "Stock Received But Not Billed - " + frappe.get_cached_value(
"Company", company, "abbr"
)
sys_bal = get_balance_on(stock_rbnb_account, posting_date, in_account_currency=False)
# Amount should be credited
return flt(stock_rbnb) + flt(sys_bal)
def get_held_invoices(party_type, party):
"""
Returns a list of names Purchase Invoices for the given party that are on hold
@ -1412,8 +1372,7 @@ def sort_stock_vouchers_by_posting_date(
.select(sle.voucher_type, sle.voucher_no, sle.posting_date, sle.posting_time, sle.creation)
.where((sle.is_cancelled == 0) & (sle.voucher_no.isin(voucher_nos)))
.groupby(sle.voucher_type, sle.voucher_no)
.orderby(sle.posting_date)
.orderby(sle.posting_time)
.orderby(sle.posting_datetime)
.orderby(sle.creation)
).run(as_dict=True)
sorted_vouchers = [(sle.voucher_type, sle.voucher_no) for sle in sles]

View File

@ -1071,8 +1071,7 @@ def get_valuation_rate(data):
frappe.qb.from_(sle)
.select(sle.valuation_rate)
.where((sle.item_code == item_code) & (sle.valuation_rate > 0) & (sle.is_cancelled == 0))
.orderby(sle.posting_date, order=frappe.qb.desc)
.orderby(sle.posting_time, order=frappe.qb.desc)
.orderby(sle.posting_datetime, order=frappe.qb.desc)
.orderby(sle.creation, order=frappe.qb.desc)
.limit(1)
).run(as_dict=True)

View File

@ -58,7 +58,7 @@ def get_data(filters):
query_filters["creation"] = ("between", [filters.get("from_date"), filters.get("to_date")])
data = frappe.get_all(
"Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc", debug=1
"Work Order", fields=fields, filters=query_filters, order_by="planned_start_date asc"
)
res = []

View File

@ -264,6 +264,7 @@ execute:frappe.rename_doc("Report", "TDS Payable Monthly", "Tax Withholding Deta
[post_model_sync]
execute:frappe.delete_doc_if_exists('Workspace', 'ERPNext Integrations Settings')
erpnext.patches.v14_0.update_posting_datetime_and_dropped_indexes
erpnext.patches.v14_0.rename_ongoing_status_in_sla_documents
erpnext.patches.v14_0.delete_shopify_doctypes
erpnext.patches.v14_0.delete_healthcare_doctypes

View File

@ -0,0 +1,19 @@
import frappe
def execute():
frappe.db.sql(
"""
UPDATE `tabStock Ledger Entry`
SET posting_datetime = timestamp(posting_date, posting_time)
"""
)
drop_indexes()
def drop_indexes():
if not frappe.db.has_index("tabStock Ledger Entry", "posting_sort_index"):
return
frappe.db.sql_ddl("ALTER TABLE `tabStock Ledger Entry` DROP INDEX `posting_sort_index`")

View File

@ -2304,6 +2304,95 @@ class TestPurchaseReceipt(FrappeTestCase):
frappe.db.set_single_value("Stock Settings", "use_serial_batch_fields", 0)
def test_sle_qty_after_transaction(self):
item = make_item(
"_Test Item Qty After Transaction",
properties={"is_stock_item": 1, "valuation_method": "FIFO"},
).name
posting_date = today()
posting_time = nowtime()
# Step 1: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 1 + index)
# Step 2: Create Purchase Receipt
pr = make_purchase_receipt(
item_code=item,
qty=1,
rate=100,
posting_date=posting_date,
posting_time=posting_time,
do_not_save=1,
)
for i in range(9):
pr.append(
"items",
{
"item_code": item,
"qty": 1,
"rate": 100,
"warehouse": pr.items[0].warehouse,
"cost_center": pr.items[0].cost_center,
"expense_account": pr.items[0].expense_account,
"uom": pr.items[0].uom,
"stock_uom": pr.items[0].stock_uom,
"conversion_factor": pr.items[0].conversion_factor,
},
)
self.assertEqual(len(pr.items), 10)
pr.save()
pr.submit()
data = frappe.get_all(
"Stock Ledger Entry",
fields=["qty_after_transaction", "creation", "posting_datetime"],
filters={"voucher_no": pr.name, "is_cancelled": 0},
order_by="creation",
)
for index, d in enumerate(data):
self.assertEqual(d.qty_after_transaction, 11 + index)
def prepare_data_for_internal_transfer():
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_internal_supplier

View File

@ -5,7 +5,7 @@ import json
import frappe
from frappe.tests.utils import FrappeTestCase, change_settings
from frappe.utils import add_days, add_to_date, flt, nowdate, nowtime, today
from frappe.utils import flt, nowtime, today
from erpnext.stock.doctype.item.test_item import make_item
from erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle import (
@ -191,6 +191,7 @@ class TestSerialandBatchBundle(FrappeTestCase):
doc.flags.ignore_links = True
doc.flags.ignore_validate = True
doc.submit()
doc.reload()
bundle_doc = make_serial_batch_bundle(
{

View File

@ -1897,6 +1897,7 @@ class StockEntry(StockController):
return
id = create_serial_and_batch_bundle(
self,
row,
frappe._dict(
{
@ -2167,7 +2168,7 @@ class StockEntry(StockController):
"to_warehouse": "",
"qty": qty,
"item_name": item.item_name,
"serial_and_batch_bundle": create_serial_and_batch_bundle(row, item, "Outward"),
"serial_and_batch_bundle": create_serial_and_batch_bundle(self, row, item, "Outward"),
"description": item.description,
"stock_uom": item.stock_uom,
"expense_account": item.expense_account,
@ -2545,6 +2546,7 @@ class StockEntry(StockController):
row = frappe._dict({"serial_nos": serial_nos[0 : cint(d.qty)]})
id = create_serial_and_batch_bundle(
self,
row,
frappe._dict(
{
@ -3068,7 +3070,7 @@ def get_stock_entry_data(work_order):
return data
def create_serial_and_batch_bundle(row, child, type_of_transaction=None):
def create_serial_and_batch_bundle(parent_doc, row, child, type_of_transaction=None):
item_details = frappe.get_cached_value(
"Item", child.item_code, ["has_serial_no", "has_batch_no"], as_dict=1
)
@ -3086,6 +3088,8 @@ def create_serial_and_batch_bundle(row, child, type_of_transaction=None):
"item_code": child.item_code,
"warehouse": child.warehouse,
"type_of_transaction": type_of_transaction,
"posting_date": parent_doc.posting_date,
"posting_time": parent_doc.posting_time,
}
)

View File

@ -1602,24 +1602,22 @@ class TestStockEntry(FrappeTestCase):
item_code = "Test Negative Item - 001"
item_doc = create_item(item_code=item_code, is_stock_item=1, valuation_rate=10)
make_stock_entry(
se1 = make_stock_entry(
item_code=item_code,
posting_date=add_days(today(), -3),
posting_time="00:00:00",
purpose="Material Receipt",
target="_Test Warehouse - _TC",
qty=10,
to_warehouse="_Test Warehouse - _TC",
do_not_save=True,
)
make_stock_entry(
se2 = make_stock_entry(
item_code=item_code,
posting_date=today(),
posting_time="00:00:00",
purpose="Material Receipt",
source="_Test Warehouse - _TC",
qty=8,
from_warehouse="_Test Warehouse - _TC",
do_not_save=True,
)
sr_doc = create_stock_reconciliation(

View File

@ -11,6 +11,7 @@
"warehouse",
"posting_date",
"posting_time",
"posting_datetime",
"is_adjustment_entry",
"auto_created_serial_and_batch_bundle",
"column_break_6",
@ -100,7 +101,6 @@
"oldfieldtype": "Date",
"print_width": "100px",
"read_only": 1,
"search_index": 1,
"width": "100px"
},
{
@ -253,7 +253,6 @@
"options": "Company",
"print_width": "150px",
"read_only": 1,
"search_index": 1,
"width": "150px"
},
{
@ -348,6 +347,11 @@
"fieldname": "auto_created_serial_and_batch_bundle",
"fieldtype": "Check",
"label": "Auto Created Serial and Batch Bundle"
},
{
"fieldname": "posting_datetime",
"fieldtype": "Datetime",
"label": "Posting Datetime"
}
],
"hide_toolbar": 1,
@ -356,7 +360,7 @@
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2023-11-14 16:47:39.791967",
"modified": "2024-02-07 09:18:13.999231",
"modified_by": "Administrator",
"module": "Stock",
"name": "Stock Ledger Entry",

View File

@ -51,6 +51,7 @@ class StockLedgerEntry(Document):
item_code: DF.Link | None
outgoing_rate: DF.Currency
posting_date: DF.Date | None
posting_datetime: DF.Datetime | None
posting_time: DF.Time | None
project: DF.Link | None
qty_after_transaction: DF.Float
@ -92,6 +93,12 @@ class StockLedgerEntry(Document):
self.validate_with_last_transaction_posting_time()
self.validate_inventory_dimension_negative_stock()
def set_posting_datetime(self):
from erpnext.stock.utils import get_combine_datetime
self.posting_datetime = get_combine_datetime(self.posting_date, self.posting_time)
self.db_set("posting_datetime", self.posting_datetime)
def validate_inventory_dimension_negative_stock(self):
if self.is_cancelled:
return
@ -162,6 +169,7 @@ class StockLedgerEntry(Document):
return inv_dimension_dict
def on_submit(self):
self.set_posting_datetime()
self.check_stock_frozen_date()
# Added to handle few test cases where serial_and_batch_bundles are not required
@ -330,9 +338,7 @@ class StockLedgerEntry(Document):
def on_doctype_update():
frappe.db.add_index(
"Stock Ledger Entry", fields=["posting_date", "posting_time"], index_name="posting_sort_index"
)
frappe.db.add_index("Stock Ledger Entry", ["voucher_no", "voucher_type"])
frappe.db.add_index("Stock Ledger Entry", ["batch_no", "item_code", "warehouse"])
frappe.db.add_index("Stock Ledger Entry", ["warehouse", "item_code"], "item_warehouse")
frappe.db.add_index("Stock Ledger Entry", ["posting_datetime", "creation"])

View File

@ -2,6 +2,7 @@
# See license.txt
import json
import time
from uuid import uuid4
import frappe
@ -1077,7 +1078,7 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
frappe.qb.from_(sle)
.select("qty_after_transaction")
.where((sle.item_code == item) & (sle.warehouse == warehouse) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
).run(pluck=True)
@ -1154,6 +1155,89 @@ class TestStockLedgerEntry(FrappeTestCase, StockTestMixin):
except Exception as e:
self.fail("Double processing of qty for clashing timestamp.")
def test_previous_sle_with_clashed_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
time.sleep(3)
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 100)
self.assertEqual(sle[0].actual_qty, 100)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 5)
def test_backdated_sle_with_same_timestamp(self):
item = make_item().name
warehouse = "_Test Warehouse - _TC"
reciept1 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=5,
posting_date="2021-01-01",
rate=10,
posting_time="02:00:00.1234",
)
time.sleep(3)
# backdated entry with same timestamp but different ms part
reciept2 = make_stock_entry(
item_code=item,
to_warehouse=warehouse,
qty=100,
rate=10,
posting_date="2021-01-01",
posting_time="02:00:00",
)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept1.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 5)
self.assertEqual(sle[0].actual_qty, 5)
sle = frappe.get_all(
"Stock Ledger Entry",
filters={"voucher_no": reciept2.name},
fields=["qty_after_transaction", "actual_qty"],
)
self.assertEqual(sle[0].qty_after_transaction, 105)
self.assertEqual(sle[0].actual_qty, 100)
@change_settings("System Settings", {"float_precision": 3, "currency_precision": 2})
def test_transfer_invariants(self):
"""Extact stock value should be transferred."""

View File

@ -7,7 +7,7 @@ import frappe
from frappe import _
from frappe.model.document import Document
from frappe.query_builder.functions import Sum
from frappe.utils import cint, flt
from frappe.utils import cint, flt, nowdate, nowtime
from erpnext.stock.utils import get_or_make_bin, get_stock_balance
@ -866,6 +866,8 @@ def get_ssb_bundle_for_voucher(sre: dict) -> object:
bundle = frappe.new_doc("Serial and Batch Bundle")
bundle.type_of_transaction = "Outward"
bundle.voucher_type = "Delivery Note"
bundle.posting_date = nowdate()
bundle.posting_time = nowtime()
for field in ("item_code", "warehouse", "has_serial_no", "has_batch_no"):
setattr(bundle, field, sre[field])

View File

@ -5,7 +5,7 @@
import frappe
from frappe import _
from frappe.query_builder import Field
from frappe.query_builder.functions import CombineDatetime, Min
from frappe.query_builder.functions import Min
from frappe.utils import add_days, getdate, today
import erpnext
@ -75,7 +75,7 @@ def get_data(report_filters):
& (sle.company == report_filters.company)
& (sle.is_cancelled == 0)
)
.orderby(CombineDatetime(sle.posting_date, sle.posting_time), sle.creation)
.orderby(sle.posting_datetime, sle.creation)
).run(as_dict=True)
for d in data:

View File

@ -213,13 +213,11 @@ def get_stock_ledger_entries(filters, items):
query = (
frappe.qb.from_(sle)
.force_index("posting_sort_index")
.left_join(sle2)
.on(
(sle.item_code == sle2.item_code)
& (sle.warehouse == sle2.warehouse)
& (sle.posting_date < sle2.posting_date)
& (sle.posting_time < sle2.posting_time)
& (sle.posting_datetime < sle2.posting_datetime)
& (sle.name < sle2.name)
)
.select(sle.item_code, sle.warehouse, sle.qty_after_transaction, sle.company)

View File

@ -8,7 +8,7 @@ from typing import Any, Dict, List, Optional, TypedDict
import frappe
from frappe import _
from frappe.query_builder import Order
from frappe.query_builder.functions import Coalesce, CombineDatetime
from frappe.query_builder.functions import Coalesce
from frappe.utils import add_days, cint, date_diff, flt, getdate
from frappe.utils.nestedset import get_descendants_of
@ -300,7 +300,7 @@ class StockBalanceReport(object):
item_table.item_name,
)
.where((sle.docstatus < 2) & (sle.is_cancelled == 0))
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
.orderby(sle.actual_qty)
)

View File

@ -345,7 +345,7 @@ def get_stock_ledger_entries(filters, items):
frappe.qb.from_(sle)
.select(
sle.item_code,
CombineDatetime(sle.posting_date, sle.posting_time).as_("date"),
sle.posting_datetime.as_("date"),
sle.warehouse,
sle.posting_date,
sle.posting_time,

View File

@ -9,7 +9,7 @@ from typing import Optional, Set, Tuple
import frappe
from frappe import _, scrub
from frappe.model.meta import get_field_precision
from frappe.query_builder.functions import CombineDatetime, Sum
from frappe.query_builder.functions import Sum
from frappe.utils import (
cint,
cstr,
@ -33,6 +33,7 @@ from erpnext.stock.doctype.stock_reservation_entry.stock_reservation_entry impor
get_sre_reserved_serial_nos_details,
)
from erpnext.stock.utils import (
get_combine_datetime,
get_incoming_outgoing_rate_for_cancel,
get_incoming_rate,
get_or_make_bin,
@ -95,6 +96,7 @@ def make_sl_entries(sl_entries, allow_negative_stock=False, via_landed_cost_vouc
sle_doc = make_entry(sle, allow_negative_stock, via_landed_cost_voucher)
args = sle_doc.as_dict()
args["posting_datetime"] = get_combine_datetime(args.posting_date, args.posting_time)
if sle.get("voucher_type") == "Stock Reconciliation":
# preserve previous_qty_after_transaction for qty reposting
@ -616,12 +618,14 @@ class update_entries_after(object):
self.process_sle(sle)
def get_sle_against_current_voucher(self):
self.args["time_format"] = "%H:%i:%s"
self.args["posting_datetime"] = get_combine_datetime(
self.args.posting_date, self.args.posting_time
)
return frappe.db.sql(
"""
select
*, timestamp(posting_date, posting_time) as "timestamp"
*, posting_datetime as "timestamp"
from
`tabStock Ledger Entry`
where
@ -629,8 +633,7 @@ class update_entries_after(object):
and warehouse = %(warehouse)s
and is_cancelled = 0
and (
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) = time_format(%(posting_time)s, %(time_format)s)
posting_datetime = %(posting_datetime)s
)
order by
creation ASC
@ -1399,11 +1402,11 @@ class update_entries_after(object):
def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_voucher=False):
"""get stock ledger entries filtered by specific posting datetime conditions"""
args["time_format"] = "%H:%i:%s"
if not args.get("posting_date"):
args["posting_date"] = "1900-01-01"
if not args.get("posting_time"):
args["posting_time"] = "00:00"
args["posting_datetime"] = "1900-01-01 00:00:00"
if not args.get("posting_datetime"):
args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
voucher_condition = ""
if exclude_current_voucher:
@ -1412,23 +1415,20 @@ def get_previous_sle_of_current_voucher(args, operator="<", exclude_current_vouc
sle = frappe.db.sql(
"""
select *, timestamp(posting_date, posting_time) as "timestamp"
select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry`
where item_code = %(item_code)s
and warehouse = %(warehouse)s
and is_cancelled = 0
{voucher_condition}
and (
posting_date < %(posting_date)s or
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) {operator} time_format(%(posting_time)s, %(time_format)s)
)
posting_datetime {operator} %(posting_datetime)s
)
order by timestamp(posting_date, posting_time) desc, creation desc
order by posting_datetime desc, creation desc
limit 1
for update""".format(
operator=operator, voucher_condition=voucher_condition
operator=operator,
voucher_condition=voucher_condition,
),
args,
as_dict=1,
@ -1469,9 +1469,7 @@ def get_stock_ledger_entries(
extra_cond=None,
):
"""get stock ledger entries filtered by specific posting datetime conditions"""
conditions = " and timestamp(posting_date, posting_time) {0} timestamp(%(posting_date)s, %(posting_time)s)".format(
operator
)
conditions = " and posting_datetime {0} %(posting_datetime)s".format(operator)
if previous_sle.get("warehouse"):
conditions += " and warehouse = %(warehouse)s"
elif previous_sle.get("warehouse_condition"):
@ -1497,9 +1495,11 @@ def get_stock_ledger_entries(
)
if not previous_sle.get("posting_date"):
previous_sle["posting_date"] = "1900-01-01"
if not previous_sle.get("posting_time"):
previous_sle["posting_time"] = "00:00"
previous_sle["posting_datetime"] = "1900-01-01 00:00:00"
else:
previous_sle["posting_datetime"] = get_combine_datetime(
previous_sle["posting_date"], previous_sle["posting_time"]
)
if operator in (">", "<=") and previous_sle.get("name"):
conditions += " and name!=%(name)s"
@ -1509,12 +1509,12 @@ def get_stock_ledger_entries(
return frappe.db.sql(
"""
select *, timestamp(posting_date, posting_time) as "timestamp"
select *, posting_datetime as "timestamp"
from `tabStock Ledger Entry`
where item_code = %%(item_code)s
and is_cancelled = 0
%(conditions)s
order by timestamp(posting_date, posting_time) %(order)s, creation %(order)s
order by posting_datetime %(order)s, creation %(order)s
%(limit)s %(for_update)s"""
% {
"conditions": conditions,
@ -1540,7 +1540,7 @@ def get_sle_by_voucher_detail_no(voucher_detail_no, excluded_sle=None):
"posting_date",
"posting_time",
"voucher_detail_no",
"timestamp(posting_date, posting_time) as timestamp",
"posting_datetime as timestamp",
],
as_dict=1,
)
@ -1552,13 +1552,10 @@ def get_batch_incoming_rate(
sle = frappe.qb.DocType("Stock Ledger Entry")
timestamp_condition = CombineDatetime(sle.posting_date, sle.posting_time) < CombineDatetime(
posting_date, posting_time
)
timestamp_condition = sle.posting_datetime < get_combine_datetime(posting_date, posting_time)
if creation:
timestamp_condition |= (
CombineDatetime(sle.posting_date, sle.posting_time)
== CombineDatetime(posting_date, posting_time)
sle.posting_datetime == get_combine_datetime(posting_date, posting_time)
) & (sle.creation < creation)
batch_details = (
@ -1639,7 +1636,7 @@ def get_valuation_rate(
AND valuation_rate >= 0
AND is_cancelled = 0
AND NOT (voucher_no = %s AND voucher_type = %s)
order by posting_date desc, posting_time desc, name desc limit 1""",
order by posting_datetime desc, name desc limit 1""",
(item_code, warehouse, voucher_no, voucher_type),
):
return flt(last_valuation_rate[0][0])
@ -1698,7 +1695,7 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
datetime_limit_condition = ""
qty_shift = args.actual_qty
args["time_format"] = "%H:%i:%s"
args["posting_datetime"] = get_combine_datetime(args["posting_date"], args["posting_time"])
# find difference/shift in qty caused by stock reconciliation
if args.voucher_type == "Stock Reconciliation":
@ -1708,8 +1705,6 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
next_stock_reco_detail = get_next_stock_reco(args)
if next_stock_reco_detail:
detail = next_stock_reco_detail[0]
# add condition to update SLEs before this date & time
datetime_limit_condition = get_datetime_limit_condition(detail)
frappe.db.sql(
@ -1722,13 +1717,9 @@ def update_qty_in_future_sle(args, allow_negative_stock=False):
and voucher_no != %(voucher_no)s
and is_cancelled = 0
and (
posting_date > %(posting_date)s or
(
posting_date = %(posting_date)s and
time_format(posting_time, %(time_format)s) > time_format(%(posting_time)s, %(time_format)s)
)
posting_datetime > %(posting_datetime)s
)
{datetime_limit_condition}
{datetime_limit_condition}
""",
args,
)
@ -1785,20 +1776,11 @@ def get_next_stock_reco(kwargs):
& (sle.voucher_no != kwargs.get("voucher_no"))
& (sle.is_cancelled == 0)
& (
(
CombineDatetime(sle.posting_date, sle.posting_time)
> CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
| (
(
CombineDatetime(sle.posting_date, sle.posting_time)
== CombineDatetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
& (sle.creation > kwargs.get("creation"))
)
sle.posting_datetime
>= get_combine_datetime(kwargs.get("posting_date"), kwargs.get("posting_time"))
)
)
.orderby(CombineDatetime(sle.posting_date, sle.posting_time))
.orderby(sle.posting_datetime)
.orderby(sle.creation)
.limit(1)
)
@ -1810,11 +1792,13 @@ def get_next_stock_reco(kwargs):
def get_datetime_limit_condition(detail):
posting_datetime = get_combine_datetime(detail.posting_date, detail.posting_time)
return f"""
and
(timestamp(posting_date, posting_time) < timestamp('{detail.posting_date}', '{detail.posting_time}')
(posting_datetime < '{posting_datetime}'
or (
timestamp(posting_date, posting_time) = timestamp('{detail.posting_date}', '{detail.posting_time}')
posting_datetime = '{posting_datetime}'
and creation < '{detail.creation}'
)
)"""
@ -1888,10 +1872,10 @@ def get_future_sle_with_negative_qty(args):
item_code = %(item_code)s
and warehouse = %(warehouse)s
and voucher_no != %(voucher_no)s
and timestamp(posting_date, posting_time) >= timestamp(%(posting_date)s, %(posting_time)s)
and posting_datetime >= %(posting_datetime)s
and is_cancelled = 0
and qty_after_transaction < 0
order by timestamp(posting_date, posting_time) asc
order by posting_datetime asc
limit 1
""",
args,
@ -1904,20 +1888,20 @@ def get_future_sle_with_negative_batch_qty(args):
"""
with batch_ledger as (
select
posting_date, posting_time, voucher_type, voucher_no,
sum(actual_qty) over (order by posting_date, posting_time, creation) as cumulative_total
posting_date, posting_time, posting_datetime, voucher_type, voucher_no,
sum(actual_qty) over (order by posting_datetime, creation) as cumulative_total
from `tabStock Ledger Entry`
where
item_code = %(item_code)s
and warehouse = %(warehouse)s
and batch_no=%(batch_no)s
and is_cancelled = 0
order by posting_date, posting_time, creation
order by posting_datetime, creation
)
select * from batch_ledger
where
cumulative_total < 0.0
and timestamp(posting_date, posting_time) >= timestamp(%(posting_date)s, %(posting_time)s)
and posting_datetime >= %(posting_datetime)s
limit 1
""",
args,
@ -2059,6 +2043,7 @@ def is_internal_transfer(sle):
def get_stock_value_difference(item_code, warehouse, posting_date, posting_time, voucher_no=None):
table = frappe.qb.DocType("Stock Ledger Entry")
posting_datetime = get_combine_datetime(posting_date, posting_time)
query = (
frappe.qb.from_(table)
@ -2067,10 +2052,7 @@ def get_stock_value_difference(item_code, warehouse, posting_date, posting_time,
(table.is_cancelled == 0)
& (table.item_code == item_code)
& (table.warehouse == warehouse)
& (
(table.posting_date < posting_date)
| ((table.posting_date == posting_date) & (table.posting_time <= posting_time))
)
& (table.posting_datetime <= posting_datetime)
)
)

View File

@ -8,7 +8,7 @@ from typing import Dict, Optional
import frappe
from frappe import _
from frappe.query_builder.functions import CombineDatetime, IfNull, Sum
from frappe.utils import cstr, flt, get_link_to_form, nowdate, nowtime
from frappe.utils import cstr, flt, get_link_to_form, get_time, getdate, nowdate, nowtime
import erpnext
from erpnext.stock.doctype.serial_and_batch_bundle.serial_and_batch_bundle import (
@ -657,3 +657,18 @@ def _update_item_info(scan_result: Dict[str, Optional[str]]) -> Dict[str, Option
):
scan_result.update(item_info)
return scan_result
def get_combine_datetime(posting_date, posting_time):
import datetime
if isinstance(posting_date, str):
posting_date = getdate(posting_date)
if isinstance(posting_time, str):
posting_time = get_time(posting_time)
if isinstance(posting_time, datetime.timedelta):
posting_time = (datetime.datetime.min + posting_time).time()
return datetime.datetime.combine(posting_date, posting_time).replace(microsecond=0)

View File

@ -643,10 +643,6 @@ class TestSubcontractingReceipt(FrappeTestCase):
)
scr = make_subcontracting_receipt(sco.name)
scr.save()
for row in scr.supplied_items:
self.assertNotEqual(row.rate, 300.00)
self.assertFalse(row.serial_and_batch_bundle)
scr.submit()
scr.reload()