Hello from MCP server
import { getSQLite } from "@/dataAccess/getSQLite";
import { getApi } from "@/dataAccess/getApi";
import { createCollections } from "./collections";
import { ChangesetProcessor, Change } from "./changeset-processor";
import {
BooksRecord,
BooksResponse,
CostsMaterialRecord,
CostsTimeRecord,
OffersRecord,
PricebookChangesRecord,
TypedPocketBase,
} from "@/pocketbase-types";
import {
SQLiteConnection,
SQLiteDBConnection,
} from "@capacitor-community/sqlite";
// TODO: Need to track in the database who signed into the client application, because
// if a new account signs in, then we should have to delete the database and start over
// so we can either show a warning and suggest deleteing the data, or if it is a real
// security concern, then show a warning and say that the app can't be used by that user
// until the data is deleted. Hopefully this is never really an issue
// //
// OR - just delete everythign on logout, I think that is probably simpler and better
async function checkConnection() {
try {
const response = await fetch("https://jsonplaceholder.typicode.com", {
method: "HEAD",
});
return response.ok;
} catch {
return false;
}
}
async function refreshDb(collections: any) {
await collections.currencies.refresh();
await collections.books.refresh();
await collections.organizations.refresh();
await collections.appStatus.setRefreshNow();
}
/**
* Sync pricebook changes from the API.
*
* For each book in the org:
* 1. Get the parent book (if any) and download its changeset records
* 2. Download the book's own changeset records
* 3. Apply all changesets in order from oldest to most recent
*
* Local changesets (shipped with the app and applied on login) are preserved -
* we only download records newer than the most recent local record.
*
* Duplicate refIds between parent and child books are expected and allowed
* for merging purposes.
*/
async function syncChanges(
pb: TypedPocketBase,
db: SQLiteDBConnection,
collections: any,
saveDb: () => Promise<void>,
) {
const activeOrg = pb.authStore.record?.activeOrg;
if (!activeOrg) {
console.log('[syncChanges] No active org, skipping sync');
return;
}
console.log('[syncChanges] Starting sync for org:', activeOrg);
// Get all books for this org
const orgBooks = await pb.collection("books").getFullList({
filter: `org = '${activeOrg}'`,
expand: "parent",
});
console.log('[syncChanges] Found', orgBooks.length, 'books for org');
// Collect all book IDs we need to fetch changes for (org books + their parents)
const bookIdsToSync = new Set<string>();
for (const book of orgBooks) {
bookIdsToSync.add(book.id);
// Add parent if it exists
if (book.parent) {
bookIdsToSync.add(book.parent);
}
}
console.log('[syncChanges] Syncing changes for', bookIdsToSync.size, 'books (including parents)');
// Get the most recent local changeset timestamp across all books
// This ensures we don't re-download changes we already have
const lastLocalChange = await db.query(
`SELECT MAX(created) as lastCreated FROM pricebookChanges`
);
const lastCreated = lastLocalChange.values?.[0]?.lastCreated || null;
console.log('[syncChanges] Last local changeset:', lastCreated || 'none');
// Download changesets for all books
let allChanges: PricebookChangesRecord[] = [];
for (const bookId of bookIdsToSync) {
try {
let filter = `book = '${bookId}'`;
if (lastCreated) {
filter += ` && created > "${lastCreated}"`;
}
const bookChanges = await pb.collection("pricebookChanges").getFullList({
filter,
sort: "created",
});
console.log(`[syncChanges] Book ${bookId}: downloaded ${bookChanges.length} changes`);
allChanges = allChanges.concat(bookChanges);
} catch (e) {
console.error(`[syncChanges] Failed to fetch changes for book ${bookId}:`, e);
}
}
// Sort all changes by created date (oldest first)
allChanges.sort(
(a, b) =>
Number(new Date(a.created as string)) -
Number(new Date(b.created as string)),
);
console.log('[syncChanges] Total changes to apply:', allChanges.length);
if (allChanges.length === 0) {
console.log('[syncChanges] No new changes to apply');
return;
}
// Apply each changeset in order
const processor = new ChangesetProcessor(db);
let successCount = 0;
let errorCount = 0;
for (const change of allChanges) {
try {
// Apply the changeset
await processor.processChangeset(
change.changeset as Change[],
"clientApp",
true, // use transaction
change.book,
);
await saveDb();
// Log success
await collections.changeMessages.save(
change.id,
change.book!,
change.org || 'unknown',
change.changeset as Change[],
change.created!,
'success'
);
successCount++;
} catch (e: any) {
console.error("[syncChanges] Error processing changeset:", e);
// Log failure
await collections.changeMessages.save(
change.id,
change.book!,
change.org || 'unknown',
change.changeset as Change[],
change.created!,
'error',
e.message || String(e)
);
errorCount++;
}
}
console.log(`[syncChanges] Complete: ${successCount} succeeded, ${errorCount} failed`);
}
export type OfferData = {
offer: OffersRecord;
costsMaterial: CostsMaterialRecord[];
costsTime: CostsTimeRecord[];
};
// Singleton promise to prevent concurrent database initializations
let dbPromise: Promise<any> | null = null;
// Exported function to manually trigger sync
export async function syncDatabase() {
try {
const { pb } = await getApi();
const sql = await getSQLite();
const collections = await createCollections(sql.dbConn, pb, sql.saveDb);
if (await checkConnection()) {
await refreshDb(collections);
await syncChanges(pb, sql.dbConn, collections, sql.saveDb);
await collections.appStatus.setRefreshNow();
return { success: true, connected: true };
}
return { success: false, connected: false };
} catch (error) {
console.error("Sync failed:", error);
return { success: false, connected: false, error };
}
}
// Get last sync time
export async function getLastSyncTime() {
try {
const { pb } = await getApi();
const sql = await getSQLite();
const collections = await createCollections(sql.dbConn, pb, sql.saveDb);
const lastRefresh = await collections.appStatus.lastRefresh();
return lastRefresh;
} catch (error) {
console.error("Failed to get last sync time:", error);
return null;
}
}
// Check if we're connected
export async function checkConnectionStatus() {
return await checkConnection();
}
export async function getDb() {
// If already initializing, return the existing promise
if (dbPromise) {
return dbPromise;
}
// Create the promise and store it
dbPromise = _getDbWithRetries();
try {
const result = await dbPromise;
return result;
} catch (error) {
// Reset promise on error so we can retry later
dbPromise = null;
throw error;
}
}
async function _getDbWithRetries() {
let retries = 5;
let baseWait = 250;
let db = null;
let lastError = null;
const startTime = Date.now();
for (let attempt = 0; attempt <= retries; attempt++) {
try {
db = await _getDbInternal();
if (db) {
return db;
}
} catch (error) {
console.error(`Attempt ${attempt + 1} failed:`, error);
lastError = error;
}
// If this wasn't the last attempt, wait before retrying
if (attempt < retries) {
const waitTime = baseWait * Math.pow(2, attempt); // Exponential backoff
await new Promise((resolve) => setTimeout(resolve, waitTime));
}
}
throw new Error(
`Database connection failed after ${retries + 1} attempts: ${(lastError as Error)?.message || "Unknown error"}`,
);
}
async function _getDbInternal() {
const { pb } = await getApi();
if (pb.authStore.isValid) {
const sql = await getSQLite();
const collections = await createCollections(sql.dbConn, pb, sql.saveDb);
const lastRefresh = await collections.appStatus.lastRefresh();
const frequencyMin = 10;
const nextRefresh = new Date(
new Date(lastRefresh).getTime() + frequencyMin * 60 * 1000,
);
const now = new Date();
// Auto-sync every 10 minutes
if (nextRefresh < now) {
await syncDatabase();
}
async function byId(collection: string, id: string, toParse: string[]) {
const r = await sql.dbConn.query(
`select * from ${collection} where id = '${id}'`,
);
if (r.values) {
const record = r.values[0];
for (const key of toParse) {
try {
record[key] = JSON.parse(record[key]);
} catch {
// Ignore parse errors for optional JSON fields
}
}
return record;
}
}
async function byJsonRelation(
collection: string,
relation: string,
id: string,
toParse: string[],
) {
const q = `select * from ${collection}
where json_extract(${relation}, '$[0]') = '${id}'`;
const r = await sql.dbConn.query(q);
if (r.values) {
for (const record of r.values) {
for (const key of toParse) {
try {
record[key] = JSON.parse(record[key]);
} catch {
// Ignore parse errors for optional JSON fields
}
}
}
return r.values;
}
}
async function selectAll(collection: string) {
const r = await sql.dbConn.query(`select * from ${collection}`);
return r.values;
}
async function offerData(id: string): Promise<OfferData> {
const offerData: OfferData = {
offer: {} as OffersRecord,
costsMaterial: [],
costsTime: [],
};
offerData.offer = await byId("offers", id, [
"costsMaterial",
"costsTime",
]);
for (const costId of offerData.offer.costsMaterial || []) {
offerData.costsMaterial.push(await byId("costsMaterial", costId, []));
}
for (const costId of offerData.offer.costsTime || []) {
offerData.costsTime.push(await byId("costsTime", costId, []));
}
return offerData;
}
return {
byId,
selectAll,
byJsonRelation,
offerData,
books: collections.books,
changeMessages: collections.changeMessages,
costsMaterial: collections.costsMaterial,
currencies: collections.currencies,
dbConn: sql.dbConn,
menus: collections.menus,
offers: collections.offers,
organizations: collections.organizations,
problems: collections.problems,
sessions: collections.sessions,
saveDb: sql.saveDb,
};
}
}