Fix multiple issues: export empty datasets, reset large data, geo-location tracking, and timezone consistency

This commit is contained in:
AYUSH PANDEY 2025-11-08 23:09:49 +05:30
parent 6ba9c1c40c
commit 372d1d86f4
9 changed files with 441 additions and 80 deletions

View file

@ -0,0 +1,109 @@
import { resetWebsite } from '../website';
// Mock the prisma client
jest.mock('@/lib/prisma', () => ({
default: {
client: {
eventData: {
deleteMany: jest.fn().mockResolvedValue({ count: 0 }),
},
sessionData: {
deleteMany: jest.fn().mockResolvedValue({ count: 0 }),
},
websiteEvent: {
deleteMany: jest.fn().mockResolvedValue({ count: 0 }),
},
session: {
deleteMany: jest.fn().mockResolvedValue({ count: 0 }),
},
website: {
update: jest.fn().mockResolvedValue({ id: 'test-website' }),
},
},
},
}));
jest.mock('@/lib/redis', () => ({
default: {
client: {
set: jest.fn(),
del: jest.fn(),
},
},
}));
describe('resetWebsite', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should reset website data in batches to avoid transaction timeouts', async () => {
const websiteId = 'test-website';
// Mock deleteMany to return 10000 records deleted on first call, then 0
require('@/lib/prisma').default.client.eventData.deleteMany
.mockResolvedValueOnce({ count: 10000 })
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.sessionData.deleteMany
.mockResolvedValueOnce({ count: 10000 })
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.websiteEvent.deleteMany
.mockResolvedValueOnce({ count: 10000 })
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.session.deleteMany
.mockResolvedValueOnce({ count: 10000 })
.mockResolvedValueOnce({ count: 0 });
await resetWebsite(websiteId);
// Verify that deleteMany was called with the correct parameters
expect(require('@/lib/prisma').default.client.eventData.deleteMany).toHaveBeenCalledWith({
where: { websiteId },
take: 10000,
});
// Verify that the website update was called
expect(require('@/lib/prisma').default.client.website.update).toHaveBeenCalledWith({
where: { id: websiteId },
data: {
resetAt: expect.any(Date),
},
});
});
it('should handle single batch deletion when data is small', async () => {
const websiteId = 'test-website';
// Mock deleteMany to return 0 records deleted (no more data)
require('@/lib/prisma').default.client.eventData.deleteMany
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.sessionData.deleteMany
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.websiteEvent.deleteMany
.mockResolvedValueOnce({ count: 0 });
require('@/lib/prisma').default.client.session.deleteMany
.mockResolvedValueOnce({ count: 0 });
await resetWebsite(websiteId);
// Verify that deleteMany was called once for each model
expect(require('@/lib/prisma').default.client.eventData.deleteMany).toHaveBeenCalledTimes(1);
expect(require('@/lib/prisma').default.client.sessionData.deleteMany).toHaveBeenCalledTimes(1);
expect(require('@/lib/prisma').default.client.websiteEvent.deleteMany).toHaveBeenCalledTimes(1);
expect(require('@/lib/prisma').default.client.session.deleteMany).toHaveBeenCalledTimes(1);
// Verify that the website update was called
expect(require('@/lib/prisma').default.client.website.update).toHaveBeenCalledWith({
where: { id: websiteId },
data: {
resetAt: expect.any(Date),
},
});
});
});

View file

@ -132,38 +132,41 @@ export async function updateWebsite(
}
export async function resetWebsite(websiteId: string) {
const { client, transaction } = prisma;
const { client } = prisma;
const cloudMode = !!process.env.CLOUD_MODE;
return transaction([
client.eventData.deleteMany({
where: { websiteId },
}),
client.sessionData.deleteMany({
where: { websiteId },
}),
client.websiteEvent.deleteMany({
where: { websiteId },
}),
client.session.deleteMany({
where: { websiteId },
}),
client.website.update({
where: { id: websiteId },
data: {
resetAt: new Date(),
},
}),
]).then(async data => {
if (cloudMode) {
await redis.client.set(
`website:${websiteId}`,
data.find(website => website.id),
);
}
// For large datasets, we need to delete data in chunks to avoid transaction timeouts
// We'll delete data in batches of 10000 records at a time
const deleteInBatches = async (model: any, where: any) => {
let deletedCount;
do {
const result = await model.deleteMany({
where,
take: 10000, // Limit to 10000 records per batch
});
deletedCount = result.count;
} while (deletedCount === 10000); // Continue until we delete less than 10000 records
};
return data;
// Delete data in batches to avoid transaction timeouts
await deleteInBatches(client.eventData, { websiteId });
await deleteInBatches(client.sessionData, { websiteId });
await deleteInBatches(client.websiteEvent, { websiteId });
await deleteInBatches(client.session, { websiteId });
// Update the website reset timestamp
const data = await client.website.update({
where: { id: websiteId },
data: {
resetAt: new Date(),
},
});
if (cloudMode) {
await redis.client.set(`website:${websiteId}`, data);
}
return data;
}
export async function deleteWebsite(websiteId: string) {