refactor: support list view filters

(cherry picked from commit 93295bf25b67069936e23d03f5a1c559294bc25f)
This commit is contained in:
ruthra kumar 2023-11-11 05:10:16 +05:30 committed by Mergify
parent 76f3d4a31c
commit df5fcbee71
2 changed files with 36 additions and 14 deletions

View File

@ -2,6 +2,7 @@
"actions": [], "actions": [],
"allow_copy": 1, "allow_copy": 1,
"creation": "2023-11-09 20:14:45.139593", "creation": "2023-11-09 20:14:45.139593",
"default_view": "List",
"doctype": "DocType", "doctype": "DocType",
"engine": "InnoDB", "engine": "InnoDB",
"field_order": [ "field_order": [
@ -17,6 +18,8 @@
{ {
"fieldname": "date", "fieldname": "date",
"fieldtype": "Date", "fieldtype": "Date",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Date", "label": "Date",
"read_only": 1 "read_only": 1
}, },
@ -55,7 +58,7 @@
"in_create": 1, "in_create": 1,
"is_virtual": 1, "is_virtual": 1,
"links": [], "links": [],
"modified": "2023-11-10 11:13:52.733076", "modified": "2023-11-11 04:52:49.347376",
"modified_by": "Administrator", "modified_by": "Administrator",
"module": "Bulk Transaction", "module": "Bulk Transaction",
"name": "Bulk Transaction Log", "name": "Bulk Transaction Log",
@ -76,5 +79,6 @@
], ],
"sort_field": "modified", "sort_field": "modified",
"sort_order": "DESC", "sort_order": "DESC",
"states": [] "states": [],
"title_field": "date"
} }

View File

@ -40,26 +40,33 @@ class BulkTransactionLog(Document):
@staticmethod @staticmethod
def get_list(args): def get_list(args):
log_detail = qb.DocType("Bulk Transaction Log Detail") filter_date = parse_list_filters(args)
limit = cint(args.get("page_length")) or 20 limit = cint(args.get("page_length")) or 20
dates = ( log_detail = qb.DocType("Bulk Transaction Log Detail")
dates_query = (
qb.from_(log_detail) qb.from_(log_detail)
.select(log_detail.date) .select(log_detail.date)
.distinct() .distinct()
.orderby(log_detail.date, order=Order.desc) .orderby(log_detail.date, order=Order.desc)
.limit(limit) .limit(limit)
.run()
) )
if filter_date:
dates_query = dates_query.where(log_detail.date == filter_date)
dates = dates_query.run()
transaction_logs = ( transaction_logs = []
if dates:
transaction_logs_query = (
qb.from_(log_detail) qb.from_(log_detail)
.select(log_detail.date.as_("date"), Count(log_detail.date).as_("count")) .select(log_detail.date.as_("date"), Count(log_detail.date).as_("count"))
.where(log_detail.date.isin(dates)) .where(log_detail.date.isin(dates))
.orderby(log_detail.date, order=Order.desc) .orderby(log_detail.date, order=Order.desc)
.groupby(log_detail.date) .groupby(log_detail.date)
.limit(limit) .limit(limit)
.run(as_dict=True)
) )
transaction_logs = transaction_logs_query.run(as_dict=True)
return [serialize_transaction_log(x) for x in transaction_logs] return [serialize_transaction_log(x) for x in transaction_logs]
@staticmethod @staticmethod
@ -85,3 +92,14 @@ def serialize_transaction_log(data):
succeeded=data.succeeded, succeeded=data.succeeded,
failed=data.failed, failed=data.failed,
) )
def parse_list_filters(args):
# parse date filter
filter_date = None
for fil in args.get("filters"):
if isinstance(fil, list):
for elem in fil:
if elem == "date":
filter_date = fil[3]
return filter_date