hook jobs up with real data

This commit is contained in:
Casey Wittrock 2025-11-07 08:03:36 -06:00
parent f4d04e90a9
commit 942e9beeab
3 changed files with 178 additions and 24 deletions

View File

@ -205,3 +205,112 @@ def upsert_client(data):
"address": address_doc, "address": address_doc,
"success": True "success": True
} }
@frappe.whitelist()
def get_jobs(options):
options = json.loads(options)
print("DEBUG: Raw job options received:", options)
defaultOptions = {
"fields": ["*"],
"filters": {},
"sorting": {},
"page": 1,
"page_size": 10,
"for_table": False
}
options = {**defaultOptions, **options}
print("DEBUG: Final job options:", options)
jobs = []
tableRows = []
# Map frontend field names to backend field names for Project doctype
def map_job_field_name(frontend_field):
field_mapping = {
"name": "name",
"customInstallationAddress": "custom_installation_address",
"customer": "customer",
"status": "status",
"percentComplete": "percent_complete"
}
return field_mapping.get(frontend_field, frontend_field)
# Process filters from PrimeVue format to Frappe format
processed_filters = {}
if options["filters"]:
for field_name, filter_obj in options["filters"].items():
if isinstance(filter_obj, dict) and "value" in filter_obj:
if filter_obj["value"] is not None and filter_obj["value"] != "":
# Map frontend field names to backend field names
backend_field = map_job_field_name(field_name)
# Handle different match modes
match_mode = filter_obj.get("matchMode", "contains")
if isinstance(match_mode, str):
match_mode = match_mode.lower()
if match_mode in ("contains", "contains"):
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}%"]
elif match_mode in ("startswith", "startsWith"):
processed_filters[backend_field] = ["like", f"{filter_obj['value']}%"]
elif match_mode in ("endswith", "endsWith"):
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}"]
elif match_mode in ("equals", "equals"):
processed_filters[backend_field] = filter_obj["value"]
else:
# Default to contains
processed_filters[backend_field] = ["like", f"%{filter_obj['value']}%"]
# Process sorting
order_by = None
if options.get("sorting") and options["sorting"]:
sorting_str = options["sorting"]
if sorting_str and sorting_str.strip():
# Parse "field_name asc/desc" format
parts = sorting_str.strip().split()
if len(parts) >= 2:
sort_field = parts[0]
sort_direction = parts[1].lower()
# Map frontend field to backend field
backend_sort_field = map_job_field_name(sort_field)
order_by = f"{backend_sort_field} {sort_direction}"
print("DEBUG: Processed job filters:", processed_filters)
print("DEBUG: Job order by:", order_by)
count = frappe.db.count("Project", filters=processed_filters)
print("DEBUG: Total projects count:", count)
projects = frappe.db.get_all(
"Project",
fields=options["fields"],
filters=processed_filters,
limit=options["page_size"],
start=(options["page"] - 1) * options["page_size"],
order_by=order_by
)
for project in projects:
job = {}
tableRow = {}
tableRow["id"] = project["name"]
tableRow["name"] = project["name"]
tableRow["customInstallationAddress"] = project.get("custom_installation_address", "")
tableRow["customer"] = project.get("customer", "")
tableRow["status"] = project.get("status", "")
tableRow["percentComplete"] = project.get("percent_complete", 0)
tableRows.append(tableRow)
job["project"] = project
jobs.append(job)
return {
"pagination": {
"total": count,
"page": options["page"],
"page_size": options["page_size"],
"total_pages": (count + options["page_size"] - 1) // options["page_size"]
},
"data": tableRows if options["for_table"] else jobs
}

View File

@ -272,6 +272,37 @@ class Api {
return result; return result;
} }
/**
* Get paginated job data with filtering and sorting
* @param {Object} paginationParams - Pagination parameters from store
* @param {Object} filters - Filter parameters from store
* @param {Object} sorting - Sorting parameters from store (optional)
* @returns {Promise<{data: Array, pagination: Object}>}
*/
static async getPaginatedJobDetails(paginationParams = {}, filters = {}, sorting = null) {
const { page = 0, pageSize = 10, sortField = null, sortOrder = null } = paginationParams;
// Use sorting from the dedicated sorting parameter first, then fall back to pagination params
const actualSortField = sorting?.field || sortField;
const actualSortOrder = sorting?.order || sortOrder;
const options = {
page: page + 1, // Backend expects 1-based pages
page_size: pageSize,
filters,
sorting:
actualSortField && actualSortOrder
? `${actualSortField} ${actualSortOrder === -1 ? "desc" : "asc"}`
: null,
for_table: true,
};
console.log("DEBUG: API - Sending job options to backend:", options);
const result = await this.request("custom_ui.api.db.get_jobs", { options });
return result;
}
/** /**
* Fetch a list of documents from a specific doctype. * Fetch a list of documents from a specific doctype.
* *

View File

@ -8,7 +8,6 @@
:lazy="true" :lazy="true"
:totalRecords="totalRecords" :totalRecords="totalRecords"
:loading="isLoading" :loading="isLoading"
:onLazyLoad="handleLazyLoad"
@lazy-load="handleLazyLoad" @lazy-load="handleLazyLoad"
/> />
</div> </div>
@ -44,6 +43,10 @@ const handleLazyLoad = async (event) => {
try { try {
isLoading.value = true; isLoading.value = true;
// Get sorting information from filters store first (needed for cache key)
const sorting = filtersStore.getTableSorting("jobs");
console.log("Current sorting state:", sorting);
// Get pagination parameters // Get pagination parameters
const paginationParams = { const paginationParams = {
page: event.page || 0, page: event.page || 0,
@ -62,13 +65,20 @@ const handleLazyLoad = async (event) => {
}); });
} }
// Clear cache when filters or sorting are active to ensure fresh data
const hasActiveFilters = Object.keys(filters).length > 0;
const hasActiveSorting = paginationParams.sortField && paginationParams.sortOrder;
if (hasActiveFilters || hasActiveSorting) {
paginationStore.clearTableCache("jobs");
}
// Check cache first // Check cache first
const cachedData = paginationStore.getCachedPage( const cachedData = paginationStore.getCachedPage(
"jobs", "jobs",
paginationParams.page, paginationParams.page,
paginationParams.pageSize, paginationParams.pageSize,
paginationParams.sortField, sorting.field || paginationParams.sortField,
paginationParams.sortOrder, sorting.order || paginationParams.sortOrder,
filters, filters,
); );
@ -88,43 +98,45 @@ const handleLazyLoad = async (event) => {
console.log("Making API call with:", { paginationParams, filters }); console.log("Making API call with:", { paginationParams, filters });
// For now, use existing API but we should create a paginated version // Call API with pagination, filters, and sorting
// TODO: Create Api.getPaginatedJobDetails() method const result = await Api.getPaginatedJobDetails(paginationParams, filters, sorting);
let data = await Api.getJobDetails();
// Simulate pagination on client side for now // Update local state - extract from pagination structure
const startIndex = paginationParams.page * paginationParams.pageSize; tableData.value = result.data;
const endIndex = startIndex + paginationParams.pageSize; totalRecords.value = result.pagination.total;
const paginatedData = data.slice(startIndex, endIndex);
// Update local state
tableData.value = paginatedData;
totalRecords.value = data.length;
// Update pagination store with new total // Update pagination store with new total
paginationStore.setTotalRecords("jobs", data.length); paginationStore.setTotalRecords("jobs", result.pagination.total);
console.log("Updated pagination state:", {
tableData: tableData.value.length,
totalRecords: totalRecords.value,
storeTotal: paginationStore.getTablePagination("jobs").totalRecords,
storeTotalPages: paginationStore.getTotalPages("jobs"),
});
// Cache the result // Cache the result
paginationStore.setCachedPage( paginationStore.setCachedPage(
"jobs", "jobs",
paginationParams.page, paginationParams.page,
paginationParams.pageSize, paginationParams.pageSize,
paginationParams.sortField, sorting.field || paginationParams.sortField,
paginationParams.sortOrder, sorting.order || paginationParams.sortOrder,
filters, filters,
{ {
records: paginatedData, records: result.data,
totalRecords: data.length, totalRecords: result.pagination.total,
}, },
); );
console.log("Loaded from API:", { console.log("Loaded from API:", {
records: paginatedData.length, records: result.data.length,
total: data.length, total: result.pagination.total,
page: paginationParams.page + 1, page: paginationParams.page + 1,
}); });
} catch (error) { } catch (error) {
console.error("Error loading job data:", error); console.error("Error loading job data:", error);
// You could also show a toast or other error notification here
tableData.value = []; tableData.value = [];
totalRecords.value = 0; totalRecords.value = 0;
} finally { } finally {
@ -137,17 +149,19 @@ onMounted(async () => {
// Initialize pagination and filters // Initialize pagination and filters
paginationStore.initializeTablePagination("jobs", { rows: 10 }); paginationStore.initializeTablePagination("jobs", { rows: 10 });
filtersStore.initializeTableFilters("jobs", columns); filtersStore.initializeTableFilters("jobs", columns);
filtersStore.initializeTableSorting("jobs");
// Load first page // Load first page
const initialPagination = paginationStore.getTablePagination("jobs"); const initialPagination = paginationStore.getTablePagination("jobs");
const initialFilters = filtersStore.getTableFilters("jobs"); const initialFilters = filtersStore.getTableFilters("jobs");
const initialSorting = filtersStore.getTableSorting("jobs");
await handleLazyLoad({ await handleLazyLoad({
page: initialPagination.page, page: initialPagination.page,
rows: initialPagination.rows, rows: initialPagination.rows,
first: initialPagination.first, first: initialPagination.first,
sortField: initialPagination.sortField, sortField: initialSorting.field || initialPagination.sortField,
sortOrder: initialPagination.sortOrder, sortOrder: initialSorting.order || initialPagination.sortOrder,
filters: initialFilters, filters: initialFilters,
}); });
}); });