This commit is contained in:
shafi54 2026-03-27 01:59:26 +05:30
parent 1b042819af
commit 18f36107d8
33 changed files with 1793 additions and 1164 deletions

View file

@ -1,7 +1,6 @@
# Agent Instructions for Meat Farmer Monorepo # Agent Instructions for Meat Farmer Monorepo
## Important instructions ## Important instructions
- Don't try to build the code or run or compile it. Just make changes and leave the rest for the user.
- Don't run any drizzle migrations. User will handle it. - Don't run any drizzle migrations. User will handle it.
## Code Style Guidelines ## Code Style Guidelines
@ -48,6 +47,4 @@ react-native. They are available in the common-ui as MyText, MyTextInput, MyTouc
- Database: Drizzle ORM with PostgreSQL - Database: Drizzle ORM with PostgreSQL
## Important Notes ## Important Notes
- **Do not run build, compile, or migration commands** - These should be handled manually by developers
- Avoid running `npm run build`, `tsc`, `drizzle-kit generate`, or similar compilation/migration commands
- Don't do anything with git. Don't do git add or git commit. That will be managed entirely by the user - Don't do anything with git. Don't do git add or git commit. That will be managed entirely by the user

View file

@ -1,17 +1,8 @@
import 'dotenv/config'; import 'dotenv/config';
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { logger } from 'hono/logger';
import { serve } from '@hono/node-server'; import { serve } from '@hono/node-server';
import { trpcServer } from '@hono/trpc-server';
import { getStaffUserById, isUserSuspended } from '@/src/dbService';
import mainRouter from '@/src/main-router';
import initFunc from '@/src/lib/init'; import initFunc from '@/src/lib/init';
import { appRouter } from '@/src/trpc/router'; import { createApp } from '@/src/app'
import { TRPCError } from '@trpc/server'; // import signedUrlCache from '@/src/lib/signed-url-cache';
import { jwtVerify } from 'jose'
import { encodedJwtSecret } from '@/src/lib/env-exporter';
import signedUrlCache from '@/src/lib/signed-url-cache';
import { seed } from '@/src/lib/seed'; import { seed } from '@/src/lib/seed';
import '@/src/jobs/jobs-index'; import '@/src/jobs/jobs-index';
import { startAutomatedJobs } from '@/src/lib/automatedJobs'; import { startAutomatedJobs } from '@/src/lib/automatedJobs';
@ -20,120 +11,9 @@ seed()
initFunc() initFunc()
startAutomatedJobs() startAutomatedJobs()
const app = new Hono(); // signedUrlCache.loadFromDisk(); // Disabled for Workers compatibility
// CORS middleware const app = createApp()
app.use(cors({
origin: 'http://localhost:5174',
allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
allowHeaders: ['Origin', 'X-Requested-With', 'Content-Type', 'Accept', 'Authorization'],
credentials: true,
}));
signedUrlCache.loadFromDisk();
// Logger middleware
app.use(logger());
// tRPC middleware
app.use('/api/trpc', trpcServer({
router: appRouter,
createContext: async ({ req }) => {
let user = null;
let staffUser = null;
const authHeader = req.headers.get('authorization');
if (authHeader?.startsWith('Bearer ')) {
const token = authHeader.substring(7);
try {
const { payload } = await jwtVerify(token, encodedJwtSecret);
const decoded = payload as any;
// Check if this is a staff token (has staffId)
if (decoded.staffId) {
// This is a staff token, verify staff exists
const staff = await getStaffUserById(decoded.staffId);
if (staff) {
user = staffUser
staffUser = {
id: staff.id,
name: staff.name,
};
}
} else {
// This is a regular user token
user = decoded;
// Check if user is suspended
const suspended = await isUserSuspended(user.userId);
if (suspended) {
throw new TRPCError({
code: 'FORBIDDEN',
message: 'Account suspended',
});
}
}
} catch (err) {
// Invalid token, both user and staffUser remain null
}
}
return { req, user, staffUser };
},
onError({ error, path, type, ctx }) {
console.error('🚨 tRPC Error :', {
path,
type,
code: error.code,
message: error.message,
userId: ctx?.user?.userId,
stack: error.stack,
});
},
}));
// Mount main router
app.route('/api', mainRouter);
// Global error handler
app.onError((err, c) => {
console.error(err);
// Handle different error types
let status = 500;
let message = 'Internal Server Error';
if (err instanceof TRPCError) {
// Map TRPC error codes to HTTP status codes
const trpcStatusMap: Record<string, number> = {
'BAD_REQUEST': 400,
'UNAUTHORIZED': 401,
'FORBIDDEN': 403,
'NOT_FOUND': 404,
'TIMEOUT': 408,
'CONFLICT': 409,
'PRECONDITION_FAILED': 412,
'PAYLOAD_TOO_LARGE': 413,
'METHOD_NOT_SUPPORTED': 405,
'UNPROCESSABLE_CONTENT': 422,
'TOO_MANY_REQUESTS': 429,
'INTERNAL_SERVER_ERROR': 500,
};
status = trpcStatusMap[err.code] || 500;
message = err.message;
} else if ((err as any).statusCode) {
status = (err as any).statusCode;
message = err.message;
} else if ((err as any).status) {
status = (err as any).status;
message = err.message;
} else if (err.message) {
message = err.message;
}
return c.json({ message }, status as any);
});
serve({ serve({
fetch: app.fetch, fetch: app.fetch,

View file

@ -10,6 +10,8 @@
"dev2": "tsx watch index.ts", "dev2": "tsx watch index.ts",
"dev_node": "tsx watch index.ts", "dev_node": "tsx watch index.ts",
"dev": "bun --watch index.ts", "dev": "bun --watch index.ts",
"wrangler:dev": "wrangler dev worker.ts --config wrangler.toml",
"wrangler:deploy": "wrangler deploy worker.ts --config wrangler.toml",
"docker:build": "cd .. && docker buildx build --platform linux/amd64 -t mohdshafiuddin54/health_petal:latest --progress=plain -f backend/Dockerfile .", "docker:build": "cd .. && docker buildx build --platform linux/amd64 -t mohdshafiuddin54/health_petal:latest --progress=plain -f backend/Dockerfile .",
"docker:push": "docker push mohdshafiuddin54/health_petal:latest" "docker:push": "docker push mohdshafiuddin54/health_petal:latest"
}, },
@ -36,11 +38,13 @@
"zod": "^4.1.12" "zod": "^4.1.12"
}, },
"devDependencies": { "devDependencies": {
"@cloudflare/workers-types": "^4.20260304.0",
"@types/node": "^24.5.2", "@types/node": "^24.5.2",
"rimraf": "^6.1.2", "rimraf": "^6.1.2",
"ts-node-dev": "^2.0.0", "ts-node-dev": "^2.0.0",
"tsc-alias": "^1.8.16", "tsc-alias": "^1.8.16",
"tsx": "^4.20.5", "tsx": "^4.20.5",
"typescript": "^5.9.2" "typescript": "^5.9.2",
"wrangler": "^3.114.0"
} }
} }

View file

@ -1,236 +0,0 @@
import { Context } from 'hono';
import {
checkProductExistsByName,
checkUnitExists,
createProduct as createProductRecord,
createSpecialDealsForProduct,
getProductImagesById,
replaceProductTags,
updateProduct as updateProductRecord,
updateProductDeals,
} from '@/src/dbService'
import { ApiError } from "@/src/lib/api-error";
import type { AdminSpecialDeal } from '@packages/shared'
import { imageUploadS3, getOriginalUrlFromSignedUrl } from "@/src/lib/s3-client";
import { deleteS3Image } from "@/src/lib/delete-image";
import { scheduleStoreInitialization } from '@/src/stores/store-initializer';
type CreateDeal = {
quantity: number;
price: number;
validTill: string;
};
/**
* Create a new product
*/
export const createProduct = async (c: Context) => {
const body = await c.req.parseBody({ all: true });
const { name, shortDescription, longDescription, unitId, storeId, price, marketPrice, incrementStep, productQuantity, isSuspended, isFlashAvailable, flashPrice, deals, tagIds } = body;
// Validate required fields
if (!name || !unitId || !storeId || !price) {
throw new ApiError("Name, unitId, storeId, and price are required", 400);
}
// Check for duplicate name
const existingProduct = await checkProductExistsByName((name as string).trim())
if (existingProduct) {
throw new ApiError("A product with this name already exists", 400);
}
// Check if unit exists
const unitExists = await checkUnitExists(parseInt(unitId as string))
if (!unitExists) {
throw new ApiError("Invalid unit ID", 400);
}
// Extract images from body
const images = body.images;
let uploadedImageUrls: string[] = [];
if (images) {
const imageFiles = Array.isArray(images) ? images : [images];
const imageUploadPromises = imageFiles.map((file, index) => {
if (file instanceof File) {
const key = `product-images/${Date.now()}-${index}`;
return imageUploadS3(Buffer.from(file.stream() as any), file.type, key);
}
return null;
}).filter(Boolean);
uploadedImageUrls = await Promise.all(imageUploadPromises as Promise<string>[]);
}
// Create product
const productData: any = {
name: name as string,
shortDescription: shortDescription as string | undefined,
longDescription: longDescription as string | undefined,
unitId: parseInt(unitId as string),
storeId: parseInt(storeId as string),
price: parseFloat(price as string),
marketPrice: marketPrice ? parseFloat(marketPrice as string) : null,
incrementStep: incrementStep ? parseInt(incrementStep as string) : 1,
productQuantity: productQuantity ? parseInt(productQuantity as string) : 1,
isSuspended: isSuspended === 'true',
isFlashAvailable: isFlashAvailable === 'true',
images: uploadedImageUrls,
};
if (flashPrice) {
productData.flashPrice = parseFloat(flashPrice as string);
}
const newProduct = await createProductRecord(productData)
// Handle deals if provided
let createdDeals: AdminSpecialDeal[] = []
if (deals) {
const parsedDeals = typeof deals === 'string' ? JSON.parse(deals) : deals;
if (Array.isArray(parsedDeals)) {
createdDeals = await createSpecialDealsForProduct(newProduct.id, parsedDeals)
}
}
// Handle tag assignments if provided
if (tagIds) {
const parsedTagIds = typeof tagIds === 'string' ? JSON.parse(tagIds) : tagIds;
if (Array.isArray(parsedTagIds)) {
await replaceProductTags(newProduct.id, parsedTagIds)
}
}
// Reinitialize stores to reflect changes
scheduleStoreInitialization()
// Send response first
return c.json({
product: newProduct,
deals: createdDeals,
message: "Product created successfully",
}, 201);
};
/**
* Update a product
*/
export const updateProduct = async (c: Context) => {
const id = c.req.param('id')
const body = await c.req.parseBody({ all: true });
const { name, shortDescription, longDescription, unitId, storeId, price, marketPrice, incrementStep, productQuantity, isSuspended, isFlashAvailable, flashPrice, deals:dealsRaw, imagesToDelete:imagesToDeleteRaw, tagIds } = body;
const deals = dealsRaw ? (typeof dealsRaw === 'string' ? JSON.parse(dealsRaw) : dealsRaw) : null;
const imagesToDelete = imagesToDeleteRaw ? (typeof imagesToDeleteRaw === 'string' ? JSON.parse(imagesToDeleteRaw) : imagesToDeleteRaw) : [];
if (!name || !unitId || !storeId || !price) {
throw new ApiError("Name, unitId, storeId, and price are required", 400);
}
// Check if unit exists
const unitExists = await checkUnitExists(parseInt(unitId as string))
if (!unitExists) {
throw new ApiError("Invalid unit ID", 400);
}
// Get current product to handle image updates
const currentImages = await getProductImagesById(parseInt(id as string))
if (!currentImages) {
throw new ApiError("Product not found", 404);
}
// Handle image deletions
let updatedImages = currentImages || []
if (imagesToDelete && imagesToDelete.length > 0) {
// Convert signed URLs to original S3 URLs for comparison
const originalUrlsToDelete = imagesToDelete
.map((signedUrl: string) => getOriginalUrlFromSignedUrl(signedUrl))
.filter(Boolean); // Remove nulls
// Find which stored images match the ones to delete
const imagesToRemoveFromDb = updatedImages.filter(storedUrl =>
originalUrlsToDelete.includes(storedUrl)
);
// Delete the matching images from S3
const deletePromises = imagesToRemoveFromDb.map(imageUrl => deleteS3Image(imageUrl));
await Promise.all(deletePromises);
// Remove deleted images from current images array
updatedImages = updatedImages.filter(img => !imagesToRemoveFromDb.includes(img));
}
// Extract new images from body
const images = body.images;
let uploadedImageUrls: string[] = [];
if (images) {
const imageFiles = Array.isArray(images) ? images : [images];
const imageUploadPromises = imageFiles.map((file, index) => {
if (file instanceof File) {
const key = `product-images/${Date.now()}-${index}`;
return imageUploadS3(Buffer.from(file.stream() as any), file.type, key);
}
return null;
}).filter(Boolean);
uploadedImageUrls = await Promise.all(imageUploadPromises as Promise<string>[]);
}
// Combine remaining current images with new uploaded images
const finalImages = [...updatedImages, ...uploadedImageUrls];
const updateData: any = {
name: name as string,
shortDescription: shortDescription as string | undefined,
longDescription: longDescription as string | undefined,
unitId: parseInt(unitId as string),
storeId: parseInt(storeId as string),
price: parseFloat(price as string),
marketPrice: marketPrice ? parseFloat(marketPrice as string) : null,
incrementStep: incrementStep ? parseInt(incrementStep as string) : 1,
productQuantity: productQuantity ? parseInt(productQuantity as string) : 1,
isSuspended: isSuspended === 'true',
images: finalImages.length > 0 ? finalImages : undefined,
};
if (isFlashAvailable !== undefined) {
updateData.isFlashAvailable = isFlashAvailable === 'true';
}
if (flashPrice !== undefined) {
updateData.flashPrice = flashPrice ? parseFloat(flashPrice as string) : null;
}
const updatedProduct = await updateProductRecord(parseInt(id as string), updateData)
if (!updatedProduct) {
throw new ApiError("Product not found", 404);
}
// Handle deals if provided
if (deals && Array.isArray(deals)) {
await updateProductDeals(parseInt(id as string), deals)
}
// Handle tag assignments if provided
if (tagIds) {
const parsedTagIds = typeof tagIds === 'string' ? [parseInt(tagIds)] : (Array.isArray(tagIds) ? tagIds.map((t: any) => parseInt(t)) : [parseInt(tagIds as any)])
await replaceProductTags(parseInt(id as string), parsedTagIds)
}
// Reinitialize stores to reflect changes
scheduleStoreInitialization()
// Send response first
return c.json({
product: updatedProduct,
message: "Product updated successfully",
}, 200);
};

126
apps/backend/src/app.ts Normal file
View file

@ -0,0 +1,126 @@
import { Hono } from 'hono'
import { cors } from 'hono/cors'
import { logger } from 'hono/logger'
import { trpcServer } from '@hono/trpc-server'
import { getStaffUserById, isUserSuspended } from '@/src/dbService'
import mainRouter from '@/src/main-router'
import { appRouter } from '@/src/trpc/router'
import { TRPCError } from '@trpc/server'
import { jwtVerify } from 'jose'
import { encodedJwtSecret } from '@/src/lib/env-exporter'
export const createApp = () => {
const app = new Hono()
// CORS middleware
app.use(cors({
origin: 'http://localhost:5174',
allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
allowHeaders: ['Origin', 'X-Requested-With', 'Content-Type', 'Accept', 'Authorization'],
credentials: true,
}))
// Logger middleware
app.use(logger())
// tRPC middleware
app.use('/api/trpc', trpcServer({
router: appRouter,
createContext: async ({ req }) => {
let user = null
let staffUser = null
const authHeader = req.headers.get('authorization')
if (authHeader?.startsWith('Bearer ')) {
const token = authHeader.substring(7)
try {
const { payload } = await jwtVerify(token, encodedJwtSecret)
const decoded = payload as any
// Check if this is a staff token (has staffId)
if (decoded.staffId) {
// This is a staff token, verify staff exists
const staff = await getStaffUserById(decoded.staffId)
if (staff) {
user = staffUser
staffUser = {
id: staff.id,
name: staff.name,
}
}
} else {
// This is a regular user token
user = decoded
// Check if user is suspended
const suspended = await isUserSuspended(user.userId)
if (suspended) {
throw new TRPCError({
code: 'FORBIDDEN',
message: 'Account suspended',
})
}
}
} catch (err) {
// Invalid token, both user and staffUser remain null
}
}
return { req, user, staffUser }
},
onError({ error, path, type, ctx }) {
console.error('🚨 tRPC Error :', {
path,
type,
code: error.code,
message: error.message,
userId: ctx?.user?.userId,
stack: error.stack,
})
},
}))
// Mount main router
app.route('/api', mainRouter)
// Global error handler
app.onError((err, c) => {
console.error(err)
// Handle different error types
let status = 500
let message = 'Internal Server Error'
if (err instanceof TRPCError) {
// Map TRPC error codes to HTTP status codes
const trpcStatusMap: Record<string, number> = {
BAD_REQUEST: 400,
UNAUTHORIZED: 401,
FORBIDDEN: 403,
NOT_FOUND: 404,
TIMEOUT: 408,
CONFLICT: 409,
PRECONDITION_FAILED: 412,
PAYLOAD_TOO_LARGE: 413,
METHOD_NOT_SUPPORTED: 405,
UNPROCESSABLE_CONTENT: 422,
TOO_MANY_REQUESTS: 429,
INTERNAL_SERVER_ERROR: 500,
}
status = trpcStatusMap[err.code] || 500
message = err.message
} else if ((err as any).statusCode) {
status = (err as any).statusCode
message = err.message
} else if ((err as any).status) {
status = (err as any).status
message = err.message
} else if (err.message) {
message = err.message
}
return c.json({ message }, status as any)
})
return app
}

View file

@ -2,10 +2,13 @@
// This file re-exports everything from postgresImporter to provide a clean abstraction layer // This file re-exports everything from postgresImporter to provide a clean abstraction layer
import type { AdminOrderDetails } from '@packages/shared' import type { AdminOrderDetails } from '@packages/shared'
import { getOrderDetails } from '@/src/postgresImporter' // import { getOrderDetails } from '@/src/postgresImporter'
import { getOrderDetails } from '@/src/sqliteImporter'
// Re-export everything from postgresImporter // Re-export everything from postgresImporter
export * from '@/src/postgresImporter' // export * from '@/src/postgresImporter'
export * from '@/src/sqliteImporter'
// Re-export getOrderDetails with the correct signature // Re-export getOrderDetails with the correct signature
export async function getOrderDetailsWrapper(orderId: number): Promise<AdminOrderDetails | null> { export async function getOrderDetailsWrapper(orderId: number): Promise<AdminOrderDetails | null> {

View file

@ -1,57 +1,116 @@
export const appUrl = process.env.APP_URL as string; // Old env loading (Node only)
// export const appUrl = process.env.APP_URL as string;
//
// export const jwtSecret: string = process.env.JWT_SECRET as string
//
// export const defaultRoleName = 'gen_user';
//
// export const encodedJwtSecret = new TextEncoder().encode(jwtSecret)
//
// export const s3AccessKeyId = process.env.S3_ACCESS_KEY_ID as string
//
// export const s3SecretAccessKey = process.env.S3_SECRET_ACCESS_KEY as string
//
// export const s3BucketName = process.env.S3_BUCKET_NAME as string
//
// export const s3Region = process.env.S3_REGION as string
//
// export const assetsDomain = process.env.ASSETS_DOMAIN as string;
//
// export const apiCacheKey = process.env.API_CACHE_KEY as string;
//
// export const cloudflareApiToken = process.env.CLOUDFLARE_API_TOKEN as string;
//
// export const cloudflareZoneId = process.env.CLOUDFLARE_ZONE_ID as string;
//
// export const s3Url = process.env.S3_URL as string
//
// export const redisUrl = process.env.REDIS_URL as string
//
//
// export const expoAccessToken = process.env.EXPO_ACCESS_TOKEN as string;
//
// export const phonePeBaseUrl = process.env.PHONE_PE_BASE_URL as string;
//
// export const phonePeClientId = process.env.PHONE_PE_CLIENT_ID as string;
//
// export const phonePeClientVersion = Number(process.env.PHONE_PE_CLIENT_VERSION as string);
//
// export const phonePeClientSecret = process.env.PHONE_PE_CLIENT_SECRET as string;
//
// export const phonePeMerchantId = process.env.PHONE_PE_MERCHANT_ID as string;
//
// export const razorpayId = process.env.RAZORPAY_KEY as string;
//
// export const razorpaySecret = process.env.RAZORPAY_SECRET as string;
//
// export const otpSenderAuthToken = process.env.OTP_SENDER_AUTH_TOKEN as string;
//
// export const minOrderValue = Number(process.env.MIN_ORDER_VALUE as string);
//
// export const deliveryCharge = Number(process.env.DELIVERY_CHARGE as string);
//
// export const telegramBotToken = process.env.TELEGRAM_BOT_TOKEN as string;
//
// export const telegramChatIds = (process.env.TELEGRAM_CHAT_IDS as string)?.split(',').map(id => id.trim()) || [];
//
// export const isDevMode = (process.env.ENV_MODE as string) === 'dev';
export const jwtSecret: string = process.env.JWT_SECRET as string const runtimeEnv = (globalThis as any).ENV || (globalThis as any).process?.env || {}
export const appUrl = runtimeEnv.APP_URL as string
export const jwtSecret: string = runtimeEnv.JWT_SECRET as string
export const defaultRoleName = 'gen_user'; export const defaultRoleName = 'gen_user';
export const encodedJwtSecret = new TextEncoder().encode(jwtSecret) export const encodedJwtSecret = new TextEncoder().encode(jwtSecret)
export const s3AccessKeyId = process.env.S3_ACCESS_KEY_ID as string export const s3AccessKeyId = runtimeEnv.S3_ACCESS_KEY_ID as string
export const s3SecretAccessKey = process.env.S3_SECRET_ACCESS_KEY as string export const s3SecretAccessKey = runtimeEnv.S3_SECRET_ACCESS_KEY as string
export const s3BucketName = process.env.S3_BUCKET_NAME as string export const s3BucketName = runtimeEnv.S3_BUCKET_NAME as string
export const s3Region = process.env.S3_REGION as string export const s3Region = runtimeEnv.S3_REGION as string
export const assetsDomain = process.env.ASSETS_DOMAIN as string; export const assetsDomain = runtimeEnv.ASSETS_DOMAIN as string
export const apiCacheKey = process.env.API_CACHE_KEY as string; export const apiCacheKey = runtimeEnv.API_CACHE_KEY as string
export const cloudflareApiToken = process.env.CLOUDFLARE_API_TOKEN as string; export const cloudflareApiToken = runtimeEnv.CLOUDFLARE_API_TOKEN as string
export const cloudflareZoneId = process.env.CLOUDFLARE_ZONE_ID as string; export const cloudflareZoneId = runtimeEnv.CLOUDFLARE_ZONE_ID as string
export const s3Url = process.env.S3_URL as string export const s3Url = runtimeEnv.S3_URL as string
export const redisUrl = process.env.REDIS_URL as string export const redisUrl = runtimeEnv.REDIS_URL as string
export const expoAccessToken = runtimeEnv.EXPO_ACCESS_TOKEN as string
export const expoAccessToken = process.env.EXPO_ACCESS_TOKEN as string; export const phonePeBaseUrl = runtimeEnv.PHONE_PE_BASE_URL as string
export const phonePeBaseUrl = process.env.PHONE_PE_BASE_URL as string; export const phonePeClientId = runtimeEnv.PHONE_PE_CLIENT_ID as string
export const phonePeClientId = process.env.PHONE_PE_CLIENT_ID as string; export const phonePeClientVersion = Number(runtimeEnv.PHONE_PE_CLIENT_VERSION as string)
export const phonePeClientVersion = Number(process.env.PHONE_PE_CLIENT_VERSION as string); export const phonePeClientSecret = runtimeEnv.PHONE_PE_CLIENT_SECRET as string
export const phonePeClientSecret = process.env.PHONE_PE_CLIENT_SECRET as string; export const phonePeMerchantId = runtimeEnv.PHONE_PE_MERCHANT_ID as string
export const phonePeMerchantId = process.env.PHONE_PE_MERCHANT_ID as string; export const razorpayId = runtimeEnv.RAZORPAY_KEY as string
export const razorpayId = process.env.RAZORPAY_KEY as string; export const razorpaySecret = runtimeEnv.RAZORPAY_SECRET as string
export const razorpaySecret = process.env.RAZORPAY_SECRET as string; export const otpSenderAuthToken = runtimeEnv.OTP_SENDER_AUTH_TOKEN as string
export const otpSenderAuthToken = process.env.OTP_SENDER_AUTH_TOKEN as string; export const minOrderValue = Number(runtimeEnv.MIN_ORDER_VALUE as string)
export const minOrderValue = Number(process.env.MIN_ORDER_VALUE as string); export const deliveryCharge = Number(runtimeEnv.DELIVERY_CHARGE as string)
export const deliveryCharge = Number(process.env.DELIVERY_CHARGE as string); export const telegramBotToken = runtimeEnv.TELEGRAM_BOT_TOKEN as string
export const telegramBotToken = process.env.TELEGRAM_BOT_TOKEN as string; export const telegramChatIds = (runtimeEnv.TELEGRAM_CHAT_IDS as string)?.split(',').map(id => id.trim()) || []
export const telegramChatIds = (process.env.TELEGRAM_CHAT_IDS as string)?.split(',').map(id => id.trim()) || []; export const isDevMode = (runtimeEnv.ENV_MODE as string) === 'dev'
export const isDevMode = (process.env.ENV_MODE as string) === 'dev';

View file

@ -1,7 +1,7 @@
// import { s3A, awsBucketName, awsRegion, awsSecretAccessKey } from "@/src/lib/env-exporter" // import { s3A, awsBucketName, awsRegion, awsSecretAccessKey } from "@/src/lib/env-exporter"
import { DeleteObjectCommand, DeleteObjectsCommand, PutObjectCommand, S3Client, GetObjectCommand } from "@aws-sdk/client-s3" import { DeleteObjectCommand, DeleteObjectsCommand, PutObjectCommand, S3Client, GetObjectCommand } from "@aws-sdk/client-s3"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner" import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import signedUrlCache from "@/src/lib/signed-url-cache" // import signedUrlCache from "@/src/lib/signed-url-cache" // Disabled for Workers compatibility
import { claimUploadUrlStatus, createUploadUrlStatus } from '@/src/dbService' import { claimUploadUrlStatus, createUploadUrlStatus } from '@/src/dbService'
import { s3AccessKeyId, s3Region, s3Url, s3SecretAccessKey, s3BucketName, assetsDomain } from "@/src/lib/env-exporter" import { s3AccessKeyId, s3Region, s3Url, s3SecretAccessKey, s3BucketName, assetsDomain } from "@/src/lib/env-exporter"
@ -89,12 +89,11 @@ export async function generateSignedUrlFromS3Url(s3UrlRaw: string|null, expiresI
const s3Url = s3UrlRaw const s3Url = s3UrlRaw
try { try {
// Check if we have a cached signed URL // Cache disabled for Workers compatibility
const cachedUrl = signedUrlCache.get(s3Url); // const cachedUrl = signedUrlCache.get(s3Url);
if (cachedUrl) { // if (cachedUrl) {
// Found in cache, return it // return cachedUrl;
return cachedUrl; // }
}
// Create the command to get the object // Create the command to get the object
const command = new GetObjectCommand({ const command = new GetObjectCommand({
@ -105,8 +104,8 @@ export async function generateSignedUrlFromS3Url(s3UrlRaw: string|null, expiresI
// Generate the signed URL // Generate the signed URL
const signedUrl = await getSignedUrl(s3Client, command, { expiresIn }); const signedUrl = await getSignedUrl(s3Client, command, { expiresIn });
// Cache the signed URL with TTL matching the expiration time (convert seconds to milliseconds) // Cache disabled for Workers compatibility
signedUrlCache.set(s3Url, signedUrl, (expiresIn * 1000) - 60000); // Subtract 1 minute to ensure it doesn't expire before use // signedUrlCache.set(s3Url, signedUrl, (expiresIn * 1000) - 60000);
return signedUrl; return signedUrl;
} catch (error) { } catch (error) {
@ -121,16 +120,11 @@ export async function generateSignedUrlFromS3Url(s3UrlRaw: string|null, expiresI
* @returns The original S3 URL if found in cache, otherwise null * @returns The original S3 URL if found in cache, otherwise null
*/ */
export function getOriginalUrlFromSignedUrl(signedUrl: string|null): string|null { export function getOriginalUrlFromSignedUrl(signedUrl: string|null): string|null {
if (!signedUrl) { // Cache disabled for Workers compatibility - cannot retrieve original URL without cache
// To re-enable, migrate signed-url-cache to object storage (R2/S3)
return null; return null;
} }
// Try to find the original URL in our cache
const originalUrl = signedUrlCache.getOriginalUrl(signedUrl);
return originalUrl || null;
}
/** /**
* Generate signed URLs for multiple S3 URLs * Generate signed URLs for multiple S3 URLs
* @param s3Urls Array of S3 URLs or null values * @param s3Urls Array of S3 URLs or null values

View file

@ -0,0 +1,263 @@
import fs from 'fs';
import path from 'path';
const CACHE_FILE_PATH = path.join('.', 'assets', 'signed-url-cache.json');
// Interface for cache entries with TTL
interface CacheEntry {
value: string;
expiresAt: number; // Timestamp when this entry expires
}
class SignedURLCache {
private originalToSignedCache: Map<string, CacheEntry>;
private signedToOriginalCache: Map<string, CacheEntry>;
constructor() {
this.originalToSignedCache = new Map();
this.signedToOriginalCache = new Map();
// Create cache directory if it doesn't exist
const cacheDir = path.dirname(CACHE_FILE_PATH);
if (!fs.existsSync(cacheDir)) {
console.log('creating the directory')
fs.mkdirSync(cacheDir, { recursive: true });
}
else {
console.log('the directory is already present')
}
}
/**
* Get a signed URL from the cache using an original URL as the key
*/
get(originalUrl: string): string | undefined {
const entry = this.originalToSignedCache.get(originalUrl);
// If no entry or entry has expired, return undefined
if (!entry || Date.now() > entry.expiresAt) {
if (entry) {
// Remove expired entry
this.originalToSignedCache.delete(originalUrl);
// Also remove from reverse mapping if it exists
this.signedToOriginalCache.delete(entry.value);
}
return undefined;
}
return entry.value;
}
/**
* Get the original URL from the cache using a signed URL as the key
*/
getOriginalUrl(signedUrl: string): string | undefined {
const entry = this.signedToOriginalCache.get(signedUrl);
// If no entry or entry has expired, return undefined
if (!entry || Date.now() > entry.expiresAt) {
if (entry) {
// Remove expired entry
this.signedToOriginalCache.delete(signedUrl);
// Also remove from primary mapping if it exists
this.originalToSignedCache.delete(entry.value);
}
return undefined;
}
return entry.value;
}
/**
* Set a value in the cache with a TTL (Time To Live)
* @param originalUrl The original S3 URL
* @param signedUrl The signed URL
* @param ttlMs Time to live in milliseconds (default: 3 days)
*/
set(originalUrl: string, signedUrl: string, ttlMs: number = 259200000): void {
const expiresAt = Date.now() + ttlMs;
const entry: CacheEntry = {
value: signedUrl,
expiresAt
};
const reverseEntry: CacheEntry = {
value: originalUrl,
expiresAt
};
this.originalToSignedCache.set(originalUrl, entry);
this.signedToOriginalCache.set(signedUrl, reverseEntry);
}
has(originalUrl: string): boolean {
const entry = this.originalToSignedCache.get(originalUrl);
// Entry exists and hasn't expired
return !!entry && Date.now() <= entry.expiresAt;
}
hasSignedUrl(signedUrl: string): boolean {
const entry = this.signedToOriginalCache.get(signedUrl);
// Entry exists and hasn't expired
return !!entry && Date.now() <= entry.expiresAt;
}
clear(): void {
this.originalToSignedCache.clear();
this.signedToOriginalCache.clear();
this.saveToDisk();
}
/**
* Remove all expired entries from the cache
* @returns The number of expired entries that were removed
*/
clearExpired(): number {
const now = Date.now();
let removedCount = 0;
// Clear expired entries from original to signed cache
for (const [originalUrl, entry] of this.originalToSignedCache.entries()) {
if (now > entry.expiresAt) {
this.originalToSignedCache.delete(originalUrl);
removedCount++;
}
}
// Clear expired entries from signed to original cache
for (const [signedUrl, entry] of this.signedToOriginalCache.entries()) {
if (now > entry.expiresAt) {
this.signedToOriginalCache.delete(signedUrl);
// No need to increment removedCount as we've already counted these in the first loop
}
}
if (removedCount > 0) {
console.log(`SignedURLCache: Cleared ${removedCount} expired entries`);
}
return removedCount;
}
/**
* Save the cache to disk
*/
saveToDisk(): void {
try {
// Remove expired entries before saving
const removedCount = this.clearExpired();
// Convert Maps to serializable objects
const serializedOriginalToSigned: Record<string, { value: string; expiresAt: number }> = {};
const serializedSignedToOriginal: Record<string, { value: string; expiresAt: number }> = {};
for (const [originalUrl, entry] of this.originalToSignedCache.entries()) {
serializedOriginalToSigned[originalUrl] = {
value: entry.value,
expiresAt: entry.expiresAt
};
}
for (const [signedUrl, entry] of this.signedToOriginalCache.entries()) {
serializedSignedToOriginal[signedUrl] = {
value: entry.value,
expiresAt: entry.expiresAt
};
}
const serializedCache = {
originalToSigned: serializedOriginalToSigned,
signedToOriginal: serializedSignedToOriginal
};
// Write to file
fs.writeFileSync(
CACHE_FILE_PATH,
JSON.stringify(serializedCache),
'utf8'
);
console.log(`SignedURLCache: Saved ${this.originalToSignedCache.size} entries to disk`);
} catch (error) {
console.error('Error saving SignedURLCache to disk:', error);
}
}
/**
* Load the cache from disk
*/
loadFromDisk(): void {
try {
if (fs.existsSync(CACHE_FILE_PATH)) {
// Read from file
const data = fs.readFileSync(CACHE_FILE_PATH, 'utf8');
// Parse the data
const parsedData = JSON.parse(data) as {
originalToSigned: Record<string, { value: string; expiresAt: number }>,
signedToOriginal: Record<string, { value: string; expiresAt: number }>
};
// Only load entries that haven't expired yet
const now = Date.now();
let loadedCount = 0;
let expiredCount = 0;
// Load original to signed mappings
if (parsedData.originalToSigned) {
for (const [originalUrl, entry] of Object.entries(parsedData.originalToSigned)) {
if (now <= entry.expiresAt) {
this.originalToSignedCache.set(originalUrl, entry);
loadedCount++;
} else {
expiredCount++;
}
}
}
// Load signed to original mappings
if (parsedData.signedToOriginal) {
for (const [signedUrl, entry] of Object.entries(parsedData.signedToOriginal)) {
if (now <= entry.expiresAt) {
this.signedToOriginalCache.set(signedUrl, entry);
// Don't increment loadedCount as these are pairs of what we already counted
} else {
// Don't increment expiredCount as these are pairs of what we already counted
}
}
}
console.log(`SignedURLCache: Loaded ${loadedCount} valid entries from disk (skipped ${expiredCount} expired entries)`);
} else {
console.log('SignedURLCache: No cache file found, starting with empty cache');
}
} catch (error) {
console.error('Error loading SignedURLCache from disk:', error);
// Start with empty caches if loading fails
this.originalToSignedCache = new Map();
this.signedToOriginalCache = new Map();
}
}
}
// Create a singleton instance to be used throughout the application
const signedUrlCache = new SignedURLCache();
process.on('SIGINT', () => {
console.log('SignedURLCache: Saving cache before shutdown...');
signedUrlCache.saveToDisk();
process.exit(0);
});
process.on('SIGTERM', () => {
console.log('SignedURLCache: Saving cache before shutdown...');
signedUrlCache.saveToDisk();
process.exit(0);
});
export default signedUrlCache;

283
apps/backend/src/lib/signed-url-cache.ts Executable file → Normal file
View file

@ -1,263 +1,24 @@
import fs from 'fs'; // SIGNED URL CACHE - DISABLED
import path from 'path'; // This file has been disabled to make the backend compatible with Cloudflare Workers.
// File system operations are not available in the Workers environment.
//
// To re-enable caching, migrate to Cloudflare R2 or another object storage solution.
// Original file saved as: signed-url-cache-old.ts
//
// Impact of disabling:
// - S3 signed URLs are generated fresh on every request
// - Increased AWS API calls (higher costs)
// - Slightly slower image loading
// - No file system dependencies (Workers-compatible)
const CACHE_FILE_PATH = path.join('.', 'assets', 'signed-url-cache.json'); export default {
get: () => undefined,
// Interface for cache entries with TTL set: () => {},
interface CacheEntry { getOriginalUrl: () => undefined,
value: string; has: () => false,
expiresAt: number; // Timestamp when this entry expires hasSignedUrl: () => false,
} clear: () => {},
clearExpired: () => 0,
class SignedURLCache { saveToDisk: () => {},
private originalToSignedCache: Map<string, CacheEntry>; loadFromDisk: () => {},
private signedToOriginalCache: Map<string, CacheEntry>;
constructor() {
this.originalToSignedCache = new Map();
this.signedToOriginalCache = new Map();
// Create cache directory if it doesn't exist
const cacheDir = path.dirname(CACHE_FILE_PATH);
if (!fs.existsSync(cacheDir)) {
console.log('creating the directory')
fs.mkdirSync(cacheDir, { recursive: true });
}
else {
console.log('the directory is already present')
}
}
/**
* Get a signed URL from the cache using an original URL as the key
*/
get(originalUrl: string): string | undefined {
const entry = this.originalToSignedCache.get(originalUrl);
// If no entry or entry has expired, return undefined
if (!entry || Date.now() > entry.expiresAt) {
if (entry) {
// Remove expired entry
this.originalToSignedCache.delete(originalUrl);
// Also remove from reverse mapping if it exists
this.signedToOriginalCache.delete(entry.value);
}
return undefined;
}
return entry.value;
}
/**
* Get the original URL from the cache using a signed URL as the key
*/
getOriginalUrl(signedUrl: string): string | undefined {
const entry = this.signedToOriginalCache.get(signedUrl);
// If no entry or entry has expired, return undefined
if (!entry || Date.now() > entry.expiresAt) {
if (entry) {
// Remove expired entry
this.signedToOriginalCache.delete(signedUrl);
// Also remove from primary mapping if it exists
this.originalToSignedCache.delete(entry.value);
}
return undefined;
}
return entry.value;
}
/**
* Set a value in the cache with a TTL (Time To Live)
* @param originalUrl The original S3 URL
* @param signedUrl The signed URL
* @param ttlMs Time to live in milliseconds (default: 3 days)
*/
set(originalUrl: string, signedUrl: string, ttlMs: number = 259200000): void {
const expiresAt = Date.now() + ttlMs;
const entry: CacheEntry = {
value: signedUrl,
expiresAt
}; };
const reverseEntry: CacheEntry = {
value: originalUrl,
expiresAt
};
this.originalToSignedCache.set(originalUrl, entry);
this.signedToOriginalCache.set(signedUrl, reverseEntry);
}
has(originalUrl: string): boolean {
const entry = this.originalToSignedCache.get(originalUrl);
// Entry exists and hasn't expired
return !!entry && Date.now() <= entry.expiresAt;
}
hasSignedUrl(signedUrl: string): boolean {
const entry = this.signedToOriginalCache.get(signedUrl);
// Entry exists and hasn't expired
return !!entry && Date.now() <= entry.expiresAt;
}
clear(): void {
this.originalToSignedCache.clear();
this.signedToOriginalCache.clear();
this.saveToDisk();
}
/**
* Remove all expired entries from the cache
* @returns The number of expired entries that were removed
*/
clearExpired(): number {
const now = Date.now();
let removedCount = 0;
// Clear expired entries from original to signed cache
for (const [originalUrl, entry] of this.originalToSignedCache.entries()) {
if (now > entry.expiresAt) {
this.originalToSignedCache.delete(originalUrl);
removedCount++;
}
}
// Clear expired entries from signed to original cache
for (const [signedUrl, entry] of this.signedToOriginalCache.entries()) {
if (now > entry.expiresAt) {
this.signedToOriginalCache.delete(signedUrl);
// No need to increment removedCount as we've already counted these in the first loop
}
}
if (removedCount > 0) {
console.log(`SignedURLCache: Cleared ${removedCount} expired entries`);
}
return removedCount;
}
/**
* Save the cache to disk
*/
saveToDisk(): void {
try {
// Remove expired entries before saving
const removedCount = this.clearExpired();
// Convert Maps to serializable objects
const serializedOriginalToSigned: Record<string, { value: string; expiresAt: number }> = {};
const serializedSignedToOriginal: Record<string, { value: string; expiresAt: number }> = {};
for (const [originalUrl, entry] of this.originalToSignedCache.entries()) {
serializedOriginalToSigned[originalUrl] = {
value: entry.value,
expiresAt: entry.expiresAt
};
}
for (const [signedUrl, entry] of this.signedToOriginalCache.entries()) {
serializedSignedToOriginal[signedUrl] = {
value: entry.value,
expiresAt: entry.expiresAt
};
}
const serializedCache = {
originalToSigned: serializedOriginalToSigned,
signedToOriginal: serializedSignedToOriginal
};
// Write to file
fs.writeFileSync(
CACHE_FILE_PATH,
JSON.stringify(serializedCache),
'utf8'
);
console.log(`SignedURLCache: Saved ${this.originalToSignedCache.size} entries to disk`);
} catch (error) {
console.error('Error saving SignedURLCache to disk:', error);
}
}
/**
* Load the cache from disk
*/
loadFromDisk(): void {
try {
if (fs.existsSync(CACHE_FILE_PATH)) {
// Read from file
const data = fs.readFileSync(CACHE_FILE_PATH, 'utf8');
// Parse the data
const parsedData = JSON.parse(data) as {
originalToSigned: Record<string, { value: string; expiresAt: number }>,
signedToOriginal: Record<string, { value: string; expiresAt: number }>
};
// Only load entries that haven't expired yet
const now = Date.now();
let loadedCount = 0;
let expiredCount = 0;
// Load original to signed mappings
if (parsedData.originalToSigned) {
for (const [originalUrl, entry] of Object.entries(parsedData.originalToSigned)) {
if (now <= entry.expiresAt) {
this.originalToSignedCache.set(originalUrl, entry);
loadedCount++;
} else {
expiredCount++;
}
}
}
// Load signed to original mappings
if (parsedData.signedToOriginal) {
for (const [signedUrl, entry] of Object.entries(parsedData.signedToOriginal)) {
if (now <= entry.expiresAt) {
this.signedToOriginalCache.set(signedUrl, entry);
// Don't increment loadedCount as these are pairs of what we already counted
} else {
// Don't increment expiredCount as these are pairs of what we already counted
}
}
}
console.log(`SignedURLCache: Loaded ${loadedCount} valid entries from disk (skipped ${expiredCount} expired entries)`);
} else {
console.log('SignedURLCache: No cache file found, starting with empty cache');
}
} catch (error) {
console.error('Error loading SignedURLCache from disk:', error);
// Start with empty caches if loading fails
this.originalToSignedCache = new Map();
this.signedToOriginalCache = new Map();
}
}
}
// Create a singleton instance to be used throughout the application
const signedUrlCache = new SignedURLCache();
process.on('SIGINT', () => {
console.log('SignedURLCache: Saving cache before shutdown...');
signedUrlCache.saveToDisk();
process.exit(0);
});
process.on('SIGTERM', () => {
console.log('SignedURLCache: Saving cache before shutdown...');
signedUrlCache.saveToDisk();
process.exit(0);
});
export default signedUrlCache;

View file

@ -0,0 +1,294 @@
// SQLite Importer - Intermediate layer to avoid direct db_helper_sqlite imports in dbService
// This file re-exports everything from sqliteService
// Re-export database connection
export { db, initDb } from 'sqliteService'
// Re-export all schema exports
export * from 'sqliteService'
// Re-export all helper methods from sqliteService
export {
// Admin - Banner
getBanners,
getBannerById,
createBanner,
updateBanner,
deleteBanner,
// Admin - Complaint
getComplaints,
resolveComplaint,
// Admin - Constants
getAllConstants,
upsertConstants,
// Admin - Coupon
getAllCoupons,
getCouponById,
invalidateCoupon,
validateCoupon,
getReservedCoupons,
getUsersForCoupon,
createCouponWithRelations,
updateCouponWithRelations,
generateCancellationCoupon,
createReservedCouponWithProducts,
createCouponForUser,
checkUsersExist,
checkCouponExists,
checkReservedCouponExists,
getOrderWithUser,
// Admin - Order
updateOrderNotes,
getOrderDetails,
updateOrderPackaged,
updateOrderDelivered,
updateOrderItemPackaging,
removeDeliveryCharge,
getSlotOrders,
updateAddressCoords,
getAllOrders,
rebalanceSlots,
cancelOrder,
deleteOrderById,
// Admin - Product
getAllProducts,
getProductById,
deleteProduct,
createProduct,
updateProduct,
checkProductExistsByName,
checkUnitExists,
getProductImagesById,
createSpecialDealsForProduct,
updateProductDeals,
replaceProductTags,
toggleProductOutOfStock,
updateSlotProducts,
getSlotProductIds,
getSlotsProductIds,
getAllUnits,
getAllProductTags,
getAllProductTagInfos,
getProductTagInfoById,
createProductTag,
getProductTagById,
updateProductTag,
deleteProductTag,
checkProductTagExistsByName,
getProductReviews,
respondToReview,
getAllProductGroups,
createProductGroup,
updateProductGroup,
deleteProductGroup,
addProductToGroup,
removeProductFromGroup,
updateProductPrices,
// Admin - Slots
getActiveSlotsWithProducts,
getActiveSlots,
getSlotsAfterDate,
getSlotByIdWithRelations,
createSlotWithRelations,
updateSlotWithRelations,
deleteSlotById,
updateSlotCapacity,
getSlotDeliverySequence,
updateSlotDeliverySequence,
// Admin - Staff User
getStaffUserByName,
getStaffUserById,
getAllStaff,
getAllUsers,
getUserWithDetails,
updateUserSuspensionStatus,
checkStaffUserExists,
checkStaffRoleExists,
createStaffUser,
getAllRoles,
// Admin - Store
getAllStores,
getStoreById,
createStore,
updateStore,
deleteStore,
// Admin - User
createUserByMobile,
getUserByMobile,
getUnresolvedComplaintsCount,
getAllUsersWithFilters,
getOrderCountsByUserIds,
getLastOrdersByUserIds,
getSuspensionStatusesByUserIds,
getUserBasicInfo,
getUserSuspensionStatus,
getUserOrders,
getOrderStatusesByOrderIds,
getItemCountsByOrderIds,
upsertUserSuspension,
searchUsers,
getAllNotifCreds,
getAllUnloggedTokens,
getNotifTokensByUserIds,
getUserIncidentsWithRelations,
createUserIncident,
// Admin - Vendor Snippets
checkVendorSnippetExists,
getVendorSnippetById,
getVendorSnippetByCode,
getAllVendorSnippets,
createVendorSnippet,
updateVendorSnippet,
deleteVendorSnippet,
getProductsByIds,
getVendorSlotById,
getVendorOrdersBySlotId,
getOrderItemsByOrderIds,
getOrderStatusByOrderIds,
updateVendorOrderItemPackaging,
getVendorOrders,
// User - Address
getUserDefaultAddress,
getUserAddresses,
getUserAddressById,
clearUserDefaultAddress,
createUserAddress,
updateUserAddress,
deleteUserAddress,
hasOngoingOrdersForAddress,
// User - Banners
getUserActiveBanners,
// User - Cart
getUserCartItemsWithProducts,
getUserProductById,
getUserCartItemByUserProduct,
incrementUserCartItemQuantity,
insertUserCartItem,
updateUserCartItemQuantity,
deleteUserCartItem,
clearUserCart,
// User - Complaint
getUserComplaints,
createUserComplaint,
// User - Stores
getUserStoreSummaries,
getUserStoreDetail,
// User - Product
getUserProductDetailById,
getUserProductReviews,
getUserProductByIdBasic,
createUserProductReview,
getAllProductsWithUnits,
type ProductSummaryData,
// User - Slots
getUserActiveSlotsList,
getUserProductAvailability,
// User - Payments
getUserPaymentOrderById,
getUserPaymentByOrderId,
getUserPaymentByMerchantOrderId,
updateUserPaymentSuccess,
updateUserOrderPaymentStatus,
markUserPaymentFailed,
// User - Auth
getUserAuthByEmail,
getUserAuthByMobile,
getUserAuthById,
getUserAuthCreds,
getUserAuthDetails,
isUserSuspended,
createUserAuthWithCreds,
createUserAuthWithMobile,
upsertUserAuthPassword,
deleteUserAuthAccount,
// UV API helpers
createUserWithProfile,
getUserDetailsByUserId,
updateUserProfile,
// User - Coupon
getUserActiveCouponsWithRelations,
getUserAllCouponsWithRelations,
getUserReservedCouponByCode,
redeemUserReservedCoupon,
// User - Profile
getUserProfileById,
getUserProfileDetailById,
getUserWithCreds,
getUserNotifCred,
upsertUserNotifCred,
deleteUserUnloggedToken,
getUserUnloggedToken,
upsertUserUnloggedToken,
// User - Order
validateAndGetUserCoupon,
applyDiscountToUserOrder,
getUserAddressByIdAndUser,
getOrderProductById,
checkUserSuspended,
getUserSlotCapacityStatus,
placeUserOrderTransaction,
deleteUserCartItemsForOrder,
recordUserCouponUsage,
getUserOrdersWithRelations,
getUserOrderCount,
getUserOrderByIdWithRelations,
getUserCouponUsageForOrder,
getUserOrderBasic,
cancelUserOrderTransaction,
updateUserOrderNotes,
getUserRecentlyDeliveredOrderIds,
getUserProductIdsFromOrders,
getUserProductsForRecentOrders,
// Store Helpers
getAllBannersForCache,
getAllProductsForCache,
getAllStoresForCache,
getAllDeliverySlotsForCache,
getAllSpecialDealsForCache,
getAllProductTagsForCache,
getAllTagsForCache,
getAllTagProductMappings,
getAllSlotsWithProductsForCache,
getAllUserNegativityScores,
getUserNegativityScore,
type BannerData,
type ProductBasicData,
type StoreBasicData,
type DeliverySlotData,
type SpecialDealData,
type ProductTagData,
type TagBasicData,
type TagProductMapping,
type SlotWithProductsData,
type UserNegativityData,
// Automated Jobs
toggleFlashDeliveryForItems,
toggleKeyVal,
getAllKeyValStore,
// Post-order handler helpers
getOrdersByIdsWithFullData,
getOrderByIdWithFullData,
type OrderWithFullData,
type OrderWithCancellationData,
// Common API helpers
getSuspendedProductIds,
getNextDeliveryDateWithCapacity,
getStoresSummary,
healthCheck,
// Delete orders helper
deleteOrdersWithRelations,
// Seed helpers
seedUnits,
seedStaffRoles,
seedStaffPermissions,
seedRolePermissions,
seedKeyValStore,
type UnitSeedData,
type RolePermissionAssignment,
type KeyValSeedData,
type StaffRoleName,
type StaffPermissionName,
// Upload URL Helpers
createUploadUrlStatus,
claimUploadUrlStatus,
} from 'sqliteService'

View file

@ -661,7 +661,7 @@ export const productRouter = router({
return { return {
groups: groups.map(group => ({ groups: groups.map(group => ({
...group, ...group,
products: group.memberships.map(m => ({ products: group.memberships.map((m: any) => ({
...(m.product as AdminProduct), ...(m.product as AdminProduct),
images: (m.product.images as string[]) || null, images: (m.product.images as string[]) || null,
})), })),

View file

@ -420,9 +420,9 @@ export const vendorSnippetsRouter = router({
productName: item.product.name, productName: item.product.name,
quantity: parseFloat(item.quantity), quantity: parseFloat(item.quantity),
productSize: item.product.productQuantity, productSize: item.product.productQuantity,
price: parseFloat(item.price.toString()), price: parseFloat((item.price ?? 0).toString()),
unit: item.product.unit?.shortNotation || 'unit', unit: item.product.unit?.shortNotation || 'unit',
subtotal: parseFloat(item.price.toString()) * parseFloat(item.quantity), subtotal: parseFloat((item.price ?? 0).toString()) * parseFloat(item.quantity),
is_packaged: item.is_packaged, is_packaged: item.is_packaged,
is_package_verified: item.is_package_verified, is_package_verified: item.is_package_verified,
})); }));
@ -604,9 +604,9 @@ export const vendorSnippetsRouter = router({
productId: item.productId, productId: item.productId,
productName: item.product.name, productName: item.product.name,
quantity: parseFloat(item.quantity), quantity: parseFloat(item.quantity),
price: parseFloat(item.price.toString()), price: parseFloat((item.price ?? 0).toString()),
unit: item.product.unit?.shortNotation || 'unit', unit: item.product.unit?.shortNotation || 'unit',
subtotal: parseFloat(item.price.toString()) * parseFloat(item.quantity), subtotal: parseFloat((item.price ?? 0).toString()) * parseFloat(item.quantity),
productSize: item.product.productQuantity, productSize: item.product.productQuantity,
is_packaged: item.is_packaged, is_packaged: item.is_packaged,
is_package_verified: item.is_package_verified, is_package_verified: item.is_package_verified,

View file

@ -110,7 +110,9 @@ export const authRouter = router({
createdAt: foundUser.createdAt.toISOString(), createdAt: foundUser.createdAt.toISOString(),
profileImage: profileImageSignedUrl, profileImage: profileImageSignedUrl,
bio: userDetail?.bio || null, bio: userDetail?.bio || null,
dateOfBirth: userDetail?.dateOfBirth || null, dateOfBirth: userDetail?.dateOfBirth
? new Date(userDetail.dateOfBirth as any).toISOString()
: null,
gender: userDetail?.gender || null, gender: userDetail?.gender || null,
occupation: userDetail?.occupation || null, occupation: userDetail?.occupation || null,
}, },
@ -370,7 +372,9 @@ export const authRouter = router({
createdAt: updatedUser.createdAt?.toISOString?.() || new Date().toISOString(), createdAt: updatedUser.createdAt?.toISOString?.() || new Date().toISOString(),
profileImage: profileImageSignedUrl, profileImage: profileImageSignedUrl,
bio: userDetail?.bio || null, bio: userDetail?.bio || null,
dateOfBirth: userDetail?.dateOfBirth || null, dateOfBirth: userDetail?.dateOfBirth
? new Date(userDetail.dateOfBirth as any).toISOString()
: null,
gender: userDetail?.gender || null, gender: userDetail?.gender || null,
occupation: userDetail?.occupation || null, occupation: userDetail?.occupation || null,
}, },

View file

@ -1,25 +1,29 @@
import { router, protectedProcedure } from "@/src/trpc/trpc-index"; import { router, protectedProcedure } from "@/src/trpc/trpc-index";
import { z } from "zod"; import { z } from "zod";
import { import {
validateAndGetUserCoupon,
applyDiscountToUserOrder, applyDiscountToUserOrder,
getUserAddressByIdAndUser, cancelUserOrderTransaction,
getOrderProductById,
checkUserSuspended, checkUserSuspended,
getUserSlotCapacityStatus, db,
placeUserOrderTransaction,
deleteUserCartItemsForOrder, deleteUserCartItemsForOrder,
recordUserCouponUsage, getOrderProductById,
getUserOrdersWithRelations, getUserAddressByIdAndUser,
getUserOrderCount,
getUserOrderByIdWithRelations,
getUserCouponUsageForOrder, getUserCouponUsageForOrder,
getUserOrderBasic, getUserOrderBasic,
cancelUserOrderTransaction, getUserOrderByIdWithRelations,
updateUserOrderNotes, getUserOrderCount,
getUserRecentlyDeliveredOrderIds, getUserOrdersWithRelations,
getUserProductIdsFromOrders, getUserProductIdsFromOrders,
getUserProductsForRecentOrders, getUserProductsForRecentOrders,
getUserRecentlyDeliveredOrderIds,
getUserSlotCapacityStatus,
orders,
orderItems,
orderStatus,
placeUserOrderTransaction,
recordUserCouponUsage,
updateUserOrderNotes,
validateAndGetUserCoupon,
} from "@/src/dbService"; } from "@/src/dbService";
import { getNextDeliveryDate } from "@/src/trpc/apis/common-apis/common"; import { getNextDeliveryDate } from "@/src/trpc/apis/common-apis/common";
import { scaffoldAssetUrl } from "@/src/lib/s3-client"; import { scaffoldAssetUrl } from "@/src/lib/s3-client";
@ -115,9 +119,10 @@ const placeOrderUtil = async (params: {
const orderTotal = items.reduce( const orderTotal = items.reduce(
(sum, item) => { (sum, item) => {
if (!item.product) return sum if (!item.product) return sum
const itemPrice = params.isFlash const basePrice = params.isFlash
? parseFloat((item.product.flashPrice || item.product.price).toString()) ? (item.product.flashPrice ?? item.product.price)
: parseFloat(item.product.price.toString()); : item.product.price
const itemPrice = parseFloat((basePrice ?? 0).toString())
return sum + itemPrice * item.quantity; return sum + itemPrice * item.quantity;
}, },
0 0
@ -132,9 +137,6 @@ const placeOrderUtil = async (params: {
const totalWithDelivery = totalAmount + expectedDeliveryCharge; const totalWithDelivery = totalAmount + expectedDeliveryCharge;
const { db } = await import("postgresService");
const { orders, orderItems, orderStatus } = await import("postgresService");
type OrderData = { type OrderData = {
order: Omit<typeof orders.$inferInsert, "id">; order: Omit<typeof orders.$inferInsert, "id">;
orderItems: Omit<typeof orderItems.$inferInsert, "id">[]; orderItems: Omit<typeof orderItems.$inferInsert, "id">[];
@ -148,9 +150,10 @@ const placeOrderUtil = async (params: {
const subOrderTotal = items.reduce( const subOrderTotal = items.reduce(
(sum, item) => { (sum, item) => {
if (!item.product) return sum if (!item.product) return sum
const itemPrice = params.isFlash const basePrice = params.isFlash
? parseFloat((item.product.flashPrice || item.product.price).toString()) ? (item.product.flashPrice ?? item.product.price)
: parseFloat(item.product.price.toString()); : item.product.price
const itemPrice = parseFloat((basePrice ?? 0).toString())
return sum + itemPrice * item.quantity; return sum + itemPrice * item.quantity;
}, },
0 0
@ -182,22 +185,25 @@ const placeOrderUtil = async (params: {
isFlashDelivery: params.isFlash, isFlashDelivery: params.isFlash,
}; };
const orderItemsData: Omit<typeof orderItems.$inferInsert, "id">[] = items const validItems = items.filter(
.filter((item) => item.product !== null && item.product !== undefined) (item): item is typeof item & { product: NonNullable<typeof item.product> } =>
.map( item.product !== null && item.product !== undefined
(item) => ({ )
const orderItemsData: Omit<typeof orderItems.$inferInsert, "id">[] = validItems.map(
(item) => {
const basePrice = params.isFlash
? (item.product.flashPrice ?? item.product.price)
: item.product.price
const priceString = (basePrice ?? 0).toString()
return {
orderId: 0, orderId: 0,
productId: item.productId, productId: item.productId,
quantity: item.quantity.toString(), quantity: item.quantity.toString(),
price: params.isFlash price: priceString,
? item.product!.flashPrice || item.product!.price discountedPrice: priceString,
: item.product!.price, }
discountedPrice: ( }
params.isFlash
? item.product!.flashPrice || item.product!.price
: item.product!.price
).toString(),
})
); );
const orderStatusData: Omit<typeof orderStatus.$inferInsert, "id"> = { const orderStatusData: Omit<typeof orderStatus.$inferInsert, "id"> = {

View file

@ -66,6 +66,9 @@ export const paymentRouter = router({
} }
// Create Razorpay order and insert payment record // Create Razorpay order and insert payment record
if (order.totalAmount === null) {
throw new ApiError('Order total is missing', 400)
}
const razorpayOrder = await RazorpayPaymentService.createOrder(parseInt(orderId), order.totalAmount); const razorpayOrder = await RazorpayPaymentService.createOrder(parseInt(orderId), order.totalAmount);
await RazorpayPaymentService.insertPaymentRecord(parseInt(orderId), razorpayOrder); await RazorpayPaymentService.insertPaymentRecord(parseInt(orderId), razorpayOrder);

View file

@ -59,7 +59,9 @@ export const userRouter = router({
mobile: user.mobile, mobile: user.mobile,
profileImage: profileImageSignedUrl, profileImage: profileImageSignedUrl,
bio: userDetail?.bio || null, bio: userDetail?.bio || null,
dateOfBirth: userDetail?.dateOfBirth || null, dateOfBirth: userDetail?.dateOfBirth
? new Date(userDetail.dateOfBirth as any).toISOString()
: null,
gender: userDetail?.gender || null, gender: userDetail?.gender || null,
occupation: userDetail?.occupation || null, occupation: userDetail?.occupation || null,
}, },

View file

@ -35,8 +35,10 @@
"@commonTypes/*": ["../../packages/ui/shared-types/*"], "@commonTypes/*": ["../../packages/ui/shared-types/*"],
"@packages/shared": ["../../packages/shared"], "@packages/shared": ["../../packages/shared"],
"@packages/shared/*": ["../../packages/shared/*"], "@packages/shared/*": ["../../packages/shared/*"],
"postgresService": ["../../packages/db_helper_postgres"], // "postgresService": ["../../packages/db_helper_postgres"],
"postgresService/*": ["../../packages/db_helper_postgres/*"], // "postgresService/*": ["../../packages/db_helper_postgres/*"],
"sqliteService": ["../../packages/db_helper_sqlite"],
"sqliteService/*": ["../../packages/db_helper_sqlite/*"],
"global-shared": ["../../packages/shared"], "global-shared": ["../../packages/shared"],
"global-shared/*": ["../../packages/shared/*"] "global-shared/*": ["../../packages/shared/*"]
}, },
@ -122,6 +124,5 @@
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */ "skipLibCheck": true /* Skip type checking all .d.ts files. */
}, },
"include": ["src", "types", "index.ts", "../shared-types", "../../packages/shared"] "include": ["src", "types", "index.ts", "worker.ts", "../shared-types", "../../packages/shared"]
} }

10
apps/backend/worker.ts Normal file
View file

@ -0,0 +1,10 @@
import type { ExecutionContext } from '@cloudflare/workers-types'
export default {
async fetch(request: Request, env: Record<string, string>, ctx: ExecutionContext) {
;(globalThis as any).ENV = env
const { createApp } = await import('./src/app')
const app = createApp()
return app.fetch(request, env, ctx)
},
}

View file

@ -0,0 +1,32 @@
name = "freshyo-backend"
main = "worker.ts"
compatibility_date = "2024-12-01"
compatibility_flags = ["nodejs_compat"]
[vars]
APP_URL = "http://localhost:4000"
JWT_SECRET = "your-jwt-secret"
S3_ACCESS_KEY_ID = ""
S3_SECRET_ACCESS_KEY = ""
S3_BUCKET_NAME = ""
S3_REGION = ""
ASSETS_DOMAIN = ""
API_CACHE_KEY = ""
CLOUDFLARE_API_TOKEN = ""
CLOUDFLARE_ZONE_ID = ""
S3_URL = ""
REDIS_URL = ""
EXPO_ACCESS_TOKEN = ""
PHONE_PE_BASE_URL = ""
PHONE_PE_CLIENT_ID = ""
PHONE_PE_CLIENT_VERSION = ""
PHONE_PE_CLIENT_SECRET = ""
PHONE_PE_MERCHANT_ID = ""
RAZORPAY_KEY = ""
RAZORPAY_SECRET = ""
OTP_SENDER_AUTH_TOKEN = ""
MIN_ORDER_VALUE = ""
DELIVERY_CHARGE = ""
TELEGRAM_BOT_TOKEN = ""
TELEGRAM_CHAT_IDS = ""
ENV_MODE = "dev"

396
bun.lock

File diff suppressed because it is too large Load diff

View file

@ -487,7 +487,7 @@ export async function getUsersForCoupon(
}) })
return { return {
users: userList.map((user: typeof users.$inferSelect) => ({ users: userList.map((user) => ({
id: user.id, id: user.id,
name: user.name || 'Unknown', name: user.name || 'Unknown',
mobile: user.mobile, mobile: user.mobile,

View file

@ -143,7 +143,7 @@ export async function getOrderDetails(orderId: number): Promise<AdminOrderDetail
let couponData = null let couponData = null
if (couponUsageData.length > 0) { if (couponUsageData.length > 0) {
let totalDiscountAmount = 0 let totalDiscountAmount = 0
const orderTotal = parseFloat(orderData.totalAmount.toString()) const orderTotal = parseFloat((orderData.totalAmount ?? '0').toString())
for (const usage of couponUsageData) { for (const usage of couponUsageData) {
let discountAmount = 0 let discountAmount = 0

View file

@ -407,7 +407,7 @@ export async function getOrdersWithRelations(
}, },
}, },
}, },
orderBy: (ordersTable: typeof orders) => [desc(ordersTable.createdAt)], orderBy: [desc(orders.createdAt)],
limit: pageSize, limit: pageSize,
offset: offset, offset: offset,
}) as Promise<OrderWithRelations[]> }) as Promise<OrderWithRelations[]>

BIN
packages/migrated.db Normal file

Binary file not shown.

View file

@ -0,0 +1,84 @@
# @packages/migrator
Database migration tool for moving data between PostgreSQL and SQLite.
## Setup
Install dependencies:
```bash
npm install
```
## Configuration
Edit `src/config.ts` directly to configure database settings:
```typescript
// PostgreSQL Configuration
export const postgresConfig = {
connectionString: 'postgresql://postgres:postgres@localhost:5432/freshyo',
ssl: false,
};
// SQLite Configuration
export const sqliteConfig = {
filename: './data/migrated.db',
};
// Migration Settings
export const migrationConfig = {
batchSize: 1000, // Rows per batch
truncateBeforeInsert: true, // Clear tables before migration
excludedTables: [], // Tables to skip
includedTables: [], // Tables to include (empty = all)
};
// Logging
export const logConfig = {
verbose: true,
logFile: './migration.log',
};
```
## Usage
### PostgreSQL to SQLite
Migrate data from PostgreSQL to SQLite:
```bash
npm run migrate:pg-to-sqlite
```
### SQLite to PostgreSQL
Migrate data from SQLite to PostgreSQL:
```bash
npm run migrate:sqlite-to-pg
```
### Full Cycle (Testing)
Run both migrations in sequence:
```bash
npm run migrate:full-cycle
```
## Features
- ✅ Automatic schema conversion between PostgreSQL and SQLite
- ✅ Batch processing for large datasets
- ✅ Type mapping between databases
- ✅ JSON/array handling
- ✅ Configurable table filtering
- ✅ Progress logging
- ✅ Transaction support
## Notes
- Arrays and JSON data are stored as TEXT in SQLite and parsed back when migrating to PostgreSQL
- Date/timestamps are stored as ISO strings in SQLite
- Foreign key constraints are enabled in SQLite
- Edit `src/config.ts` to change any settings

Binary file not shown.

View file

@ -0,0 +1,26 @@
{
"name": "@packages/migrator",
"version": "1.0.0",
"description": "Database migration tool between PostgreSQL and SQLite",
"main": "index.ts",
"types": "index.ts",
"private": true,
"scripts": {
"migrate:pg-to-sqlite": "tsx src/postgresToSqlite/index.ts",
"migrate:sqlite-to-pg": "tsx src/sqliteToPostgres/index.ts",
"migrate:full-cycle": "npm run migrate:pg-to-sqlite && npm run migrate:sqlite-to-pg"
},
"dependencies": {
"better-sqlite3": "^12.1.0",
"dotenv": "^17.2.1",
"drizzle-orm": "^0.44.5",
"pg": "^8.16.3"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.13",
"@types/node": "^24.5.2",
"@types/pg": "^8.15.5",
"tsx": "^4.20.5",
"typescript": "^5.9.2"
}
}

View file

@ -0,0 +1,36 @@
/**
* Database migration configuration
* Edit this file directly to configure migration settings
*/
// PostgreSQL Configuration
export const postgresConfig = {
connectionString: 'postgresql://postgres:meatfarmer_master_password@57.128.212.174:7447/meatfarmer',
ssl: false as boolean | { rejectUnauthorized: boolean },
};
// SQLite Configuration
export const sqliteConfig = {
filename: './data/migrated.db',
};
// Migration Settings
export const migrationConfig = {
// Batch size for bulk inserts (to avoid memory issues)
batchSize: 1000,
// Enable/disable table truncation before migration
truncateBeforeInsert: true,
// Tables to exclude from migration
excludedTables: [] as string[],
// Tables to include (if empty, includes all)
includedTables: [] as string[],
};
// Logging
export const logConfig = {
verbose: true,
logFile: './migration.log',
};

View file

@ -0,0 +1,288 @@
import { Client } from 'pg';
import Database from 'better-sqlite3';
import { postgresConfig, sqliteConfig, migrationConfig, logConfig } from '../config';
import * as fs from 'fs';
import * as path from 'path';
interface TableInfo {
tableName: string;
columns: ColumnInfo[];
}
interface ColumnInfo {
name: string;
type: string;
isNullable: boolean;
defaultValue: string | null;
}
/**
* Maps PostgreSQL data types to SQLite data types
*/
function mapPostgresTypeToSqlite(pgType: string): string {
const typeMap: Record<string, string> = {
'bigint': 'INTEGER',
'bigserial': 'INTEGER',
'boolean': 'INTEGER',
'character': 'TEXT',
'character varying': 'TEXT',
'date': 'TEXT',
'double precision': 'REAL',
'integer': 'INTEGER',
'json': 'TEXT',
'jsonb': 'TEXT',
'numeric': 'REAL',
'real': 'REAL',
'serial': 'INTEGER',
'smallint': 'INTEGER',
'text': 'TEXT',
'timestamp with time zone': 'TEXT',
'timestamp without time zone': 'TEXT',
'uuid': 'TEXT',
'ARRAY': 'TEXT', // Arrays stored as JSON text
};
// Check for array types
if (pgType.endsWith('[]')) {
return 'TEXT';
}
return typeMap[pgType] || 'TEXT';
}
/**
* Gets all table names from PostgreSQL
*/
async function getPostgresTables(client: Client): Promise<string[]> {
const result = await client.query(`
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_type = 'BASE TABLE'
ORDER BY table_name
`);
return result.rows.map(row => row.table_name);
}
/**
* Gets column information for a specific table
*/
async function getTableColumns(client: Client, tableName: string): Promise<ColumnInfo[]> {
const result = await client.query(`
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM information_schema.columns
WHERE table_name = $1
AND table_schema = 'public'
ORDER BY ordinal_position
`, [tableName]);
return result.rows.map(row => ({
name: row.column_name,
type: row.data_type,
isNullable: row.is_nullable === 'YES',
defaultValue: row.column_default,
}));
}
/**
* Creates SQLite table based on PostgreSQL schema
*/
function createSqliteTable(db: Database.Database, tableName: string, columns: ColumnInfo[]): void {
const columnDefs = columns.map(col => {
let def = `"${col.name}" ${mapPostgresTypeToSqlite(col.type)}`;
if (!col.isNullable) {
def += ' NOT NULL';
}
if (col.defaultValue !== null) {
// Convert PostgreSQL default values to SQLite
let defaultVal = col.defaultValue;
if (defaultVal.includes('nextval')) {
// Skip auto-increment defaults, SQLite handles this with INTEGER PRIMARY KEY
} else if (defaultVal === 'now()' || defaultVal.includes('CURRENT_TIMESTAMP')) {
def += ` DEFAULT CURRENT_TIMESTAMP`;
} else {
def += ` DEFAULT ${defaultVal}`;
}
}
return def;
}).join(', ');
const createSql = `CREATE TABLE IF NOT EXISTS "${tableName}" (${columnDefs})`;
if (logConfig.verbose) {
console.log(`Creating table: ${tableName}`);
console.log(createSql);
}
db.exec(createSql);
}
/**
* Migrates data from PostgreSQL to SQLite
*/
async function migrateTableData(
pgClient: Client,
sqliteDb: Database.Database,
tableName: string,
columns: ColumnInfo[]
): Promise<number> {
// Check if table should be excluded
if (migrationConfig.excludedTables.includes(tableName)) {
console.log(`Skipping excluded table: ${tableName}`);
return 0;
}
// Check if only specific tables should be included
if (migrationConfig.includedTables.length > 0 && !migrationConfig.includedTables.includes(tableName)) {
console.log(`Skipping table not in include list: ${tableName}`);
return 0;
}
console.log(`Migrating table: ${tableName}`);
// Get total count first
const countResult = await pgClient.query(`SELECT COUNT(*) FROM "${tableName}"`);
const totalRows = parseInt(countResult.rows[0].count);
console.log(` Total rows to migrate: ${totalRows}`);
if (totalRows === 0) {
console.log(` No data to migrate`);
return 0;
}
// Clear existing data if configured
if (migrationConfig.truncateBeforeInsert) {
sqliteDb.exec(`DELETE FROM "${tableName}"`);
console.log(` Cleared existing data`);
}
// Get column names
const columnNames = columns.map(c => `"${c.name}"`).join(', ');
const placeholders = columns.map(() => '?').join(', ');
const insertStmt = sqliteDb.prepare(`INSERT INTO "${tableName}" (${columnNames}) VALUES (${placeholders})`);
let migratedCount = 0;
let offset = 0;
while (offset < totalRows) {
const result = await pgClient.query(
`SELECT * FROM "${tableName}" ORDER BY 1 LIMIT $1 OFFSET $2`,
[migrationConfig.batchSize, offset]
);
const insertMany = sqliteDb.transaction((rows) => {
for (const row of rows) {
const values = columns.map(col => {
const val = row[col.name];
// Handle special cases
if (val === null || val === undefined) return null;
if (typeof val === 'boolean') return val ? 1 : 0;
if (val instanceof Date) return val.toISOString();
if (Array.isArray(val)) return JSON.stringify(val);
if (typeof val === 'object') return JSON.stringify(val);
// Ensure it's a primitive type SQLite can handle
if (typeof val === 'number') return val;
if (typeof val === 'bigint') return Number(val);
return String(val);
});
insertStmt.run(values);
}
});
insertMany(result.rows);
migratedCount += result.rows.length;
offset += migrationConfig.batchSize;
if (logConfig.verbose || offset % 10000 === 0) {
console.log(` Progress: ${migratedCount}/${totalRows} rows`);
}
}
console.log(` Completed: ${migratedCount} rows migrated`);
return migratedCount;
}
/**
* Main migration function
*/
async function migratePostgresToSqlite(): Promise<void> {
console.log('Starting PostgreSQL to SQLite migration...\n');
// Ensure SQLite directory exists
const sqliteDir = path.dirname(sqliteConfig.filename);
if (!fs.existsSync(sqliteDir)) {
fs.mkdirSync(sqliteDir, { recursive: true });
}
// Remove existing SQLite file if starting fresh
if (migrationConfig.truncateBeforeInsert && fs.existsSync(sqliteConfig.filename)) {
fs.unlinkSync(sqliteConfig.filename);
console.log('Removed existing SQLite database');
}
// Connect to PostgreSQL
const pgClient = new Client(postgresConfig);
await pgClient.connect();
console.log('Connected to PostgreSQL');
// Connect to SQLite
const sqliteDb = new Database(sqliteConfig.filename);
console.log('Connected to SQLite');
// Enable foreign keys
sqliteDb.exec('PRAGMA foreign_keys = ON');
try {
// Get all tables
const tables = await getPostgresTables(pgClient);
console.log(`\nFound ${tables.length} tables to migrate\n`);
let totalMigrated = 0;
// Migrate each table
for (const tableName of tables) {
try {
const columns = await getTableColumns(pgClient, tableName);
// Create table in SQLite
createSqliteTable(sqliteDb, tableName, columns);
// Migrate data
const migrated = await migrateTableData(pgClient, sqliteDb, tableName, columns);
totalMigrated += migrated;
console.log('');
} catch (error) {
console.error(`Error migrating table ${tableName}:`, error);
if (logConfig.verbose) {
throw error;
}
}
}
console.log('=================================');
console.log('Migration completed successfully!');
console.log(`Total rows migrated: ${totalMigrated}`);
console.log(`SQLite database: ${sqliteConfig.filename}`);
console.log('=================================');
} catch (error) {
console.error('Migration failed:', error);
throw error;
} finally {
await pgClient.end();
sqliteDb.close();
}
}
// Run migration if called directly
if (require.main === module) {
migratePostgresToSqlite().catch(console.error);
}
export { migratePostgresToSqlite };

View file

@ -0,0 +1,273 @@
import { Client } from 'pg';
import Database from 'better-sqlite3';
import { postgresConfig, sqliteConfig, migrationConfig, logConfig } from '../config';
interface TableInfo {
tableName: string;
columns: ColumnInfo[];
}
interface ColumnInfo {
name: string;
type: string;
notNull: boolean;
defaultValue: string | null;
primaryKey: boolean;
}
/**
* Maps SQLite data types back to PostgreSQL data types
* This is a best-effort mapping and may need adjustment based on specific use cases
*/
function mapSqliteTypeToPostgres(sqliteType: string): string {
const typeMap: Record<string, string> = {
'INTEGER': 'INTEGER',
'REAL': 'DOUBLE PRECISION',
'TEXT': 'TEXT',
'BLOB': 'BYTEA',
'NUMERIC': 'NUMERIC',
};
return typeMap[sqliteType.toUpperCase()] || 'TEXT';
}
/**
* Gets all table names from SQLite
*/
function getSqliteTables(db: Database.Database): string[] {
const result = db.prepare(`
SELECT name FROM sqlite_master
WHERE type = 'table'
AND name NOT LIKE 'sqlite_%'
ORDER BY name
`).all() as { name: string }[];
return result.map(row => row.name);
}
/**
* Gets column information for a specific table from SQLite
*/
function getSqliteTableColumns(db: Database.Database, tableName: string): ColumnInfo[] {
const result = db.prepare(`PRAGMA table_info("${tableName}")`).all() as any[];
return result.map(row => ({
name: row.name,
type: row.type,
notNull: row.notnull === 1,
defaultValue: row.dflt_value,
primaryKey: row.pk === 1,
}));
}
/**
* Creates PostgreSQL table based on SQLite schema
*/
async function createPostgresTable(
pgClient: Client,
tableName: string,
columns: ColumnInfo[]
): Promise<void> {
// Check if table exists
const existsResult = await pgClient.query(`
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = $1
)
`, [tableName]);
const tableExists = existsResult.rows[0].exists;
if (tableExists && migrationConfig.truncateBeforeInsert) {
// Drop existing table to recreate
await pgClient.query(`DROP TABLE IF EXISTS "${tableName}" CASCADE`);
console.log(` Dropped existing table: ${tableName}`);
} else if (tableExists) {
console.log(` Table already exists, will append data: ${tableName}`);
return;
}
const columnDefs = columns.map(col => {
let def = `"${col.name}" ${mapSqliteTypeToPostgres(col.type)}`;
if (col.notNull) {
def += ' NOT NULL';
}
if (col.primaryKey) {
def += ' PRIMARY KEY';
}
if (col.defaultValue !== null) {
def += ` DEFAULT ${col.defaultValue}`;
}
return def;
}).join(', ');
const createSql = `CREATE TABLE "${tableName}" (${columnDefs})`;
if (logConfig.verbose) {
console.log(`Creating table: ${tableName}`);
console.log(createSql);
}
await pgClient.query(createSql);
}
/**
* Attempts to parse JSON values that were stored as TEXT in SQLite
*/
function parseValue(value: any, columnName: string): any {
if (value === null) return null;
if (typeof value !== 'string') return value;
// Try to parse as JSON (for arrays/objects that were stringified)
if ((value.startsWith('[') && value.endsWith(']')) ||
(value.startsWith('{') && value.endsWith('}'))) {
try {
return JSON.parse(value);
} catch {
return value;
}
}
return value;
}
/**
* Migrates data from SQLite to PostgreSQL
*/
async function migrateTableData(
sqliteDb: Database.Database,
pgClient: Client,
tableName: string,
columns: ColumnInfo[]
): Promise<number> {
// Check if table should be excluded
if (migrationConfig.excludedTables.includes(tableName)) {
console.log(`Skipping excluded table: ${tableName}`);
return 0;
}
// Check if only specific tables should be included
if (migrationConfig.includedTables.length > 0 && !migrationConfig.includedTables.includes(tableName)) {
console.log(`Skipping table not in include list: ${tableName}`);
return 0;
}
console.log(`Migrating table: ${tableName}`);
// Get total count
const countResult = sqliteDb.prepare(`SELECT COUNT(*) as count FROM "${tableName}"`).get() as { count: number };
const totalRows = countResult.count;
console.log(` Total rows to migrate: ${totalRows}`);
if (totalRows === 0) {
console.log(` No data to migrate`);
return 0;
}
// Clear existing data if configured (and table wasn't just created)
if (migrationConfig.truncateBeforeInsert) {
await pgClient.query(`DELETE FROM "${tableName}"`);
console.log(` Cleared existing data`);
}
// Get column names
const columnNames = columns.map(c => `"${c.name}"`).join(', ');
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
let migratedCount = 0;
let offset = 0;
while (offset < totalRows) {
const rows = sqliteDb.prepare(`
SELECT * FROM "${tableName}"
ORDER BY ROWID
LIMIT ? OFFSET ?
`).all(migrationConfig.batchSize, offset) as any[];
for (const row of rows) {
const values = columns.map(col => parseValue(row[col.name], col.name));
await pgClient.query(
`INSERT INTO "${tableName}" (${columnNames}) VALUES (${placeholders})`,
values
);
}
migratedCount += rows.length;
offset += migrationConfig.batchSize;
if (logConfig.verbose || offset % 10000 === 0) {
console.log(` Progress: ${migratedCount}/${totalRows} rows`);
}
}
console.log(` Completed: ${migratedCount} rows migrated`);
return migratedCount;
}
/**
* Main migration function
*/
async function migrateSqliteToPostgres(): Promise<void> {
console.log('Starting SQLite to PostgreSQL migration...\n');
// Connect to SQLite
const sqliteDb = new Database(sqliteConfig.filename);
console.log('Connected to SQLite');
// Connect to PostgreSQL
const pgClient = new Client(postgresConfig);
await pgClient.connect();
console.log('Connected to PostgreSQL');
try {
// Get all tables
const tables = getSqliteTables(sqliteDb);
console.log(`\nFound ${tables.length} tables to migrate\n`);
let totalMigrated = 0;
// Migrate each table
for (const tableName of tables) {
try {
const columns = getSqliteTableColumns(sqliteDb, tableName);
// Create table in PostgreSQL
await createPostgresTable(pgClient, tableName, columns);
// Migrate data
const migrated = await migrateTableData(sqliteDb, pgClient, tableName, columns);
totalMigrated += migrated;
console.log('');
} catch (error) {
console.error(`Error migrating table ${tableName}:`, error);
if (logConfig.verbose) {
throw error;
}
}
}
console.log('=================================');
console.log('Migration completed successfully!');
console.log(`Total rows migrated: ${totalMigrated}`);
console.log(`Source: ${sqliteConfig.filename}`);
console.log(`Target: PostgreSQL`);
console.log('=================================');
} catch (error) {
console.error('Migration failed:', error);
throw error;
} finally {
sqliteDb.close();
await pgClient.end();
}
}
// Run migration if called directly
if (require.main === module) {
migrateSqliteToPostgres().catch(console.error);
}
export { migrateSqliteToPostgres };

View file

@ -0,0 +1,18 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"rootDir": ".",
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": ["src/**/*", "index.ts"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,183 +0,0 @@
# Progress Summary - Meat Farmer Monorepo
## User UI Development Progress
### Profile Image & User Details System
#### Database Schema Updates
- **Added `user_details` table** with fields: bio, dateOfBirth, gender, occupation, profileImage
- **Established one-to-one relationship** between users and user_details tables
- **Updated relations** in schema for proper data fetching
#### Backend API Enhancements
- **Updated auth controller** (`login`, `register`, `getProfile`) to query userDetails and return complete user information
- **Added `updateProfile` API** with comprehensive validation and transaction-based updates
- **Implemented signed URLs** for secure profile image access (3-day expiration)
- **Enhanced tRPC `getSelfData`** to include all user details with signed URLs
#### Frontend Auth System
- **Extended AuthContext** with `userDetails` state and `updateUserDetails` function
- **Modified tRPC queries** for fresh data on every app startup (no caching)
- **Added `useUserDetails` hook** for accessing detailed user information
- **Updated login/register flows** to populate complete user data
### Edit Profile Functionality
#### Page Implementation
- **Created edit-profile page** with pre-populated form values
- **Conditional form rendering** - password fields hidden in edit mode
- **Profile image upload** support with existing image display
- **Form validation** adjusted for edit vs registration modes
#### API Integration
- **Added `useUpdateProfile` hook** using React Query for profile updates
- **Multipart form data handling** for profile image uploads
- **Error handling and loading states** with proper user feedback
- **Context synchronization** after successful profile updates
### UI/UX Improvements
#### Drawer Layout Enhancements
- **Profile image display** in drawer header with fallback to person icon
- **User details integration** showing name and mobile from userDetails
- **Circular avatar styling** for profile images
#### Me Page Redesign
- **2x2 grid layout** replacing vertical button list
- **MaterialIcons integration** with relevant icons for each section:
- Orders: `shopping-bag` (blue)
- Complaints: `report-problem` (green)
- Coupons: `local-offer` (purple)
- Profile: `person` (orange)
- **Enhanced styling** with rounded corners, shadows, and better typography
- **Responsive design** with proper spacing and touch targets
### Registration Form Updates
- **Edit mode support** with `isEdit` prop
- **Conditional field rendering** (passwords/terms hidden in edit mode)
- **Initial values support** for pre-populating form data
- **Profile image handling** for both new uploads and existing images
## Files Modified
### Backend
- `apps/backend/src/db/schema.ts` - Added user_details table, vendor_snippets table, and relations definitions
- `apps/backend/src/uv-apis/auth.controller.ts` - Enhanced auth APIs with userDetails and signed URLs
- `apps/backend/src/uv-apis/auth.router.ts` - Added update profile route
- `apps/backend/src/trpc/user-apis/user.ts` - Updated getSelfData with userDetails
- `apps/backend/src/trpc/admin-apis/vendor-snippets.ts` - Complete CRUD API for vendor snippets management
### Frontend
- `apps/user-ui/src/types/auth.ts` - Added UserDetails interface and updated AuthState
- `apps/user-ui/src/contexts/AuthContext.tsx` - Enhanced with userDetails state and hooks
- `apps/user-ui/src/api-hooks/auth.api.ts` - Added updateProfile API and hook
- `apps/user-ui/components/registration-form.tsx` - Added edit mode support
- `apps/user-ui/app/(drawer)/edit-profile/index.tsx` - New edit profile page
- `apps/user-ui/app/(drawer)/_layout.tsx` - Updated drawer with profile image
- `apps/user-ui/app/(drawer)/me/index.tsx` - Redesigned with 2x2 grid and icons
### Admin UI (New Vendor Snippets Feature)
- `apps/admin-ui/app/(drawer)/vendor-snippets/index.tsx` - Main vendor snippets management page
- `apps/admin-ui/app/(drawer)/_layout.tsx` - Added vendor snippets to drawer navigation
- `apps/admin-ui/components/VendorSnippetForm.tsx` - Create/edit form with validation
- `apps/admin-ui/components/SnippetOrdersView.tsx` - Orders viewing component with matching highlights
- `apps/admin-ui/src/api-hooks/vendor-snippets.api.ts` - tRPC hooks for vendor snippets operations
- `apps/admin-ui/src/trpc-client.ts` - Updated imports for tRPC client usage
## Key Features Implemented
### User UI Features
**Complete user profile system** with detailed information storage
**Secure image handling** with signed URLs and S3 integration
**Edit profile functionality** with pre-populated forms and validation
**Beautiful UI components** with icons and modern design patterns
**Fresh data fetching** on app startup with no caching
**Transaction-safe updates** with proper error handling
**Responsive grid layouts** optimized for mobile experience
### Admin UI Features (Vendor Snippets)
**Complete vendor snippets management system** with full CRUD operations
**Advanced order matching logic** that finds orders by slot and product criteria
**Interactive forms** with slot/product selection and validation
**Orders viewing interface** with product matching highlights and statistics
**Automatic data refresh** using focus callbacks for fresh data
**Proper relations handling** in Drizzle ORM with foreign key relationships
**Error handling and loading states** throughout the user journey
**Navigation integration** with drawer menu and proper routing
## Admin UI Changes
### Vendor Snippets Management System
#### Database Schema Updates
- **Added `vendor_snippets` table** with fields: id, snippetCode, slotId, productIds, validTill, createdAt
- **Established foreign key relationship** between vendorSnippets and deliverySlotInfo tables
- **Added relations definition** (`vendorSnippetsRelations`) for proper Drizzle ORM queries
- **Array field support** for storing multiple product IDs per snippet
#### Backend API Implementation
- **Complete CRUD operations** for vendor snippets:
- `create`: Validates slot/product existence, prevents duplicate codes
- `getAll`: Returns snippets with slot relations, ordered by creation date
- `getById`: Fetches individual snippet with slot details
- `update`: Partial updates with validation and uniqueness checks
- `delete`: Soft delete by setting expiry to current time
- **`getOrdersBySnippet` API**: Advanced order matching logic that:
- Finds orders with matching delivery slots
- Filters orders containing at least one snippet product
- Returns formatted order data with product matching highlights
- Includes customer details, pricing, and delivery information
#### Admin UI Implementation
- **Vendor Snippets List Page**: Complete management interface with:
- Snippet cards showing code, slot info, product count, expiry dates
- Action buttons for View Orders, Edit, and Delete operations
- Empty state with call-to-action for first snippet creation
- Loading states and error handling
- **Create/Edit Forms**: Comprehensive form components using:
- BottomDropdown for slot selection (replacing custom dropdowns)
- MultiSelectDropdown for product selection with search
- DatePicker for expiry date management
- Form validation with real-time error feedback
- Auto-generated snippet codes for new entries
#### Orders Viewing System
- **SnippetOrdersView Component**: Dedicated screen for viewing matched orders:
- Order cards with customer details, amounts, and delivery slots
- Product lists with matching highlights (⭐ indicators)
- Summary statistics (total orders, revenue)
- Responsive design with proper spacing and typography
#### Navigation & UX Enhancements
- **Drawer Integration**: Added "Vendor Snippets" to admin navigation menu
- **Focus-based Refetching**: Implemented `useFocusCallback` for automatic data refresh when returning to the screen
- **Error Handling**: Fixed tRPC client vs hooks usage (`trpcClient` for direct queries)
- **Loading States**: Proper loading indicators and user feedback throughout the flow
#### Technical Improvements
- **Relations Fix**: Resolved Drizzle ORM error by adding missing relations definition
- **API Client Usage**: Corrected tRPC usage patterns (hooks vs direct client)
- **Type Safety**: Proper TypeScript interfaces and error handling
- **Performance**: Efficient queries with proper indexing and filtering
### Previous Admin UI Changes
#### Slot Selection Centralization
- **Moved slot dropdown** from individual pages to Manage Orders hub page
- **Updated navigation** with slotId query parameters
- **Streamlined child pages** with URL param reading
#### UI Cleanup & Improvements
- **Removed redundant elements** from drawer navigation
- **Compacted order displays** for better space utilization
- **Enhanced delivery sequences** layout
## Important Notes
- **Do not run build, compile, or migration commands** - These should be handled manually by developers
- Avoid running `npm run build`, `tsc`, `drizzle-kit generate`, or similar compilation/migration commands
- Schema changes should be committed and migrations generated manually
- **Signed URLs** are used for secure image access with 3-day expiration
- **React Query** handles all API state management with proper loading/error states
- **Vendor Snippets**: Relations definitions are critical for Drizzle ORM queries - always define relations for foreign key relationships
- **tRPC Usage**: Use `trpc` for React hooks and `trpcClient` for direct API calls outside components
- **Focus Callbacks**: Implemented for automatic data refresh when screens regain focus