hook jobs up with real data
This commit is contained in:
parent
f4d04e90a9
commit
942e9beeab
@ -204,4 +204,113 @@ def upsert_client(data):
|
||||
"customer": customer_doc,
|
||||
"address": address_doc,
|
||||
"success": True
|
||||
}
|
||||
}
|
||||
|
||||
@frappe.whitelist()
|
||||
def get_jobs(options):
|
||||
options = json.loads(options)
|
||||
print("DEBUG: Raw job options received:", options)
|
||||
defaultOptions = {
|
||||
"fields": ["*"],
|
||||
"filters": {},
|
||||
"sorting": {},
|
||||
"page": 1,
|
||||
"page_size": 10,
|
||||
"for_table": False
|
||||
}
|
||||
options = {**defaultOptions, **options}
|
||||
print("DEBUG: Final job options:", options)
|
||||
|
||||
jobs = []
|
||||
tableRows = []
|
||||
|
||||
# Map frontend field names to backend field names for Project doctype
|
||||
def map_job_field_name(frontend_field):
|
||||
field_mapping = {
|
||||
"name": "name",
|
||||
"customInstallationAddress": "custom_installation_address",
|
||||
"customer": "customer",
|
||||
"status": "status",
|
||||
"percentComplete": "percent_complete"
|
||||
}
|
||||
return field_mapping.get(frontend_field, frontend_field)
|
||||
|
||||
# Process filters from PrimeVue format to Frappe format
|
||||
processed_filters = {}
|
||||
if options["filters"]:
|
||||
for field_name, filter_obj in options["filters"].items():
|
||||
if isinstance(filter_obj, dict) and "value" in filter_obj:
|
||||
if filter_obj["value"] is not None and filter_obj["value"] != "":
|
||||
# Map frontend field names to backend field names
|
||||
backend_field = map_job_field_name(field_name)
|
||||
|
||||
# Handle different match modes
|
||||
match_mode = filter_obj.get("matchMode", "contains")
|
||||
if isinstance(match_mode, str):
|
||||
match_mode = match_mode.lower()
|
||||
|
||||
if match_mode in ("contains", "contains"):
|
||||
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}%"]
|
||||
elif match_mode in ("startswith", "startsWith"):
|
||||
processed_filters[backend_field] = ["like", f"{filter_obj['value']}%"]
|
||||
elif match_mode in ("endswith", "endsWith"):
|
||||
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}"]
|
||||
elif match_mode in ("equals", "equals"):
|
||||
processed_filters[backend_field] = filter_obj["value"]
|
||||
else:
|
||||
# Default to contains
|
||||
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}%"]
|
||||
|
||||
# Process sorting
|
||||
order_by = None
|
||||
if options.get("sorting") and options["sorting"]:
|
||||
sorting_str = options["sorting"]
|
||||
if sorting_str and sorting_str.strip():
|
||||
# Parse "field_name asc/desc" format
|
||||
parts = sorting_str.strip().split()
|
||||
if len(parts) >= 2:
|
||||
sort_field = parts[0]
|
||||
sort_direction = parts[1].lower()
|
||||
# Map frontend field to backend field
|
||||
backend_sort_field = map_job_field_name(sort_field)
|
||||
order_by = f"{backend_sort_field} {sort_direction}"
|
||||
|
||||
print("DEBUG: Processed job filters:", processed_filters)
|
||||
print("DEBUG: Job order by:", order_by)
|
||||
|
||||
count = frappe.db.count("Project", filters=processed_filters)
|
||||
print("DEBUG: Total projects count:", count)
|
||||
|
||||
projects = frappe.db.get_all(
|
||||
"Project",
|
||||
fields=options["fields"],
|
||||
filters=processed_filters,
|
||||
limit=options["page_size"],
|
||||
start=(options["page"] - 1) * options["page_size"],
|
||||
order_by=order_by
|
||||
)
|
||||
|
||||
for project in projects:
|
||||
job = {}
|
||||
tableRow = {}
|
||||
|
||||
tableRow["id"] = project["name"]
|
||||
tableRow["name"] = project["name"]
|
||||
tableRow["customInstallationAddress"] = project.get("custom_installation_address", "")
|
||||
tableRow["customer"] = project.get("customer", "")
|
||||
tableRow["status"] = project.get("status", "")
|
||||
tableRow["percentComplete"] = project.get("percent_complete", 0)
|
||||
tableRows.append(tableRow)
|
||||
|
||||
job["project"] = project
|
||||
jobs.append(job)
|
||||
|
||||
return {
|
||||
"pagination": {
|
||||
"total": count,
|
||||
"page": options["page"],
|
||||
"page_size": options["page_size"],
|
||||
"total_pages": (count + options["page_size"] - 1) // options["page_size"]
|
||||
},
|
||||
"data": tableRows if options["for_table"] else jobs
|
||||
}
|
||||
@ -272,6 +272,37 @@ class Api {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get paginated job data with filtering and sorting
|
||||
* @param {Object} paginationParams - Pagination parameters from store
|
||||
* @param {Object} filters - Filter parameters from store
|
||||
* @param {Object} sorting - Sorting parameters from store (optional)
|
||||
* @returns {Promise<{data: Array, pagination: Object}>}
|
||||
*/
|
||||
static async getPaginatedJobDetails(paginationParams = {}, filters = {}, sorting = null) {
|
||||
const { page = 0, pageSize = 10, sortField = null, sortOrder = null } = paginationParams;
|
||||
|
||||
// Use sorting from the dedicated sorting parameter first, then fall back to pagination params
|
||||
const actualSortField = sorting?.field || sortField;
|
||||
const actualSortOrder = sorting?.order || sortOrder;
|
||||
|
||||
const options = {
|
||||
page: page + 1, // Backend expects 1-based pages
|
||||
page_size: pageSize,
|
||||
filters,
|
||||
sorting:
|
||||
actualSortField && actualSortOrder
|
||||
? `${actualSortField} ${actualSortOrder === -1 ? "desc" : "asc"}`
|
||||
: null,
|
||||
for_table: true,
|
||||
};
|
||||
|
||||
console.log("DEBUG: API - Sending job options to backend:", options);
|
||||
|
||||
const result = await this.request("custom_ui.api.db.get_jobs", { options });
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a list of documents from a specific doctype.
|
||||
*
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
:lazy="true"
|
||||
:totalRecords="totalRecords"
|
||||
:loading="isLoading"
|
||||
:onLazyLoad="handleLazyLoad"
|
||||
@lazy-load="handleLazyLoad"
|
||||
/>
|
||||
</div>
|
||||
@ -44,6 +43,10 @@ const handleLazyLoad = async (event) => {
|
||||
try {
|
||||
isLoading.value = true;
|
||||
|
||||
// Get sorting information from filters store first (needed for cache key)
|
||||
const sorting = filtersStore.getTableSorting("jobs");
|
||||
console.log("Current sorting state:", sorting);
|
||||
|
||||
// Get pagination parameters
|
||||
const paginationParams = {
|
||||
page: event.page || 0,
|
||||
@ -62,13 +65,20 @@ const handleLazyLoad = async (event) => {
|
||||
});
|
||||
}
|
||||
|
||||
// Clear cache when filters or sorting are active to ensure fresh data
|
||||
const hasActiveFilters = Object.keys(filters).length > 0;
|
||||
const hasActiveSorting = paginationParams.sortField && paginationParams.sortOrder;
|
||||
if (hasActiveFilters || hasActiveSorting) {
|
||||
paginationStore.clearTableCache("jobs");
|
||||
}
|
||||
|
||||
// Check cache first
|
||||
const cachedData = paginationStore.getCachedPage(
|
||||
"jobs",
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
paginationParams.sortField,
|
||||
paginationParams.sortOrder,
|
||||
sorting.field || paginationParams.sortField,
|
||||
sorting.order || paginationParams.sortOrder,
|
||||
filters,
|
||||
);
|
||||
|
||||
@ -88,43 +98,45 @@ const handleLazyLoad = async (event) => {
|
||||
|
||||
console.log("Making API call with:", { paginationParams, filters });
|
||||
|
||||
// For now, use existing API but we should create a paginated version
|
||||
// TODO: Create Api.getPaginatedJobDetails() method
|
||||
let data = await Api.getJobDetails();
|
||||
// Call API with pagination, filters, and sorting
|
||||
const result = await Api.getPaginatedJobDetails(paginationParams, filters, sorting);
|
||||
|
||||
// Simulate pagination on client side for now
|
||||
const startIndex = paginationParams.page * paginationParams.pageSize;
|
||||
const endIndex = startIndex + paginationParams.pageSize;
|
||||
const paginatedData = data.slice(startIndex, endIndex);
|
||||
|
||||
// Update local state
|
||||
tableData.value = paginatedData;
|
||||
totalRecords.value = data.length;
|
||||
// Update local state - extract from pagination structure
|
||||
tableData.value = result.data;
|
||||
totalRecords.value = result.pagination.total;
|
||||
|
||||
// Update pagination store with new total
|
||||
paginationStore.setTotalRecords("jobs", data.length);
|
||||
paginationStore.setTotalRecords("jobs", result.pagination.total);
|
||||
|
||||
console.log("Updated pagination state:", {
|
||||
tableData: tableData.value.length,
|
||||
totalRecords: totalRecords.value,
|
||||
storeTotal: paginationStore.getTablePagination("jobs").totalRecords,
|
||||
storeTotalPages: paginationStore.getTotalPages("jobs"),
|
||||
});
|
||||
|
||||
// Cache the result
|
||||
paginationStore.setCachedPage(
|
||||
"jobs",
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
paginationParams.sortField,
|
||||
paginationParams.sortOrder,
|
||||
sorting.field || paginationParams.sortField,
|
||||
sorting.order || paginationParams.sortOrder,
|
||||
filters,
|
||||
{
|
||||
records: paginatedData,
|
||||
totalRecords: data.length,
|
||||
records: result.data,
|
||||
totalRecords: result.pagination.total,
|
||||
},
|
||||
);
|
||||
|
||||
console.log("Loaded from API:", {
|
||||
records: paginatedData.length,
|
||||
total: data.length,
|
||||
records: result.data.length,
|
||||
total: result.pagination.total,
|
||||
page: paginationParams.page + 1,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error loading job data:", error);
|
||||
// You could also show a toast or other error notification here
|
||||
tableData.value = [];
|
||||
totalRecords.value = 0;
|
||||
} finally {
|
||||
@ -137,17 +149,19 @@ onMounted(async () => {
|
||||
// Initialize pagination and filters
|
||||
paginationStore.initializeTablePagination("jobs", { rows: 10 });
|
||||
filtersStore.initializeTableFilters("jobs", columns);
|
||||
filtersStore.initializeTableSorting("jobs");
|
||||
|
||||
// Load first page
|
||||
const initialPagination = paginationStore.getTablePagination("jobs");
|
||||
const initialFilters = filtersStore.getTableFilters("jobs");
|
||||
const initialSorting = filtersStore.getTableSorting("jobs");
|
||||
|
||||
await handleLazyLoad({
|
||||
page: initialPagination.page,
|
||||
rows: initialPagination.rows,
|
||||
first: initialPagination.first,
|
||||
sortField: initialPagination.sortField,
|
||||
sortOrder: initialPagination.sortOrder,
|
||||
sortField: initialSorting.field || initialPagination.sortField,
|
||||
sortOrder: initialSorting.order || initialPagination.sortOrder,
|
||||
filters: initialFilters,
|
||||
});
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user