mirror of
https://github.com/umami-software/umami.git
synced 2026-02-08 14:47:14 +01:00
Fix multiple issues: tracker multiple execution, credentials configurable, exclude-hash, and other fixes
This commit is contained in:
parent
d590c6b078
commit
46532f0778
23 changed files with 553 additions and 30 deletions
|
|
@ -134,19 +134,26 @@ export async function POST(request: Request) {
|
|||
|
||||
// Create a session if not found
|
||||
if (!clickhouse.enabled && !cache?.sessionId) {
|
||||
await createSession({
|
||||
id: sessionId,
|
||||
websiteId: sourceId,
|
||||
browser,
|
||||
os,
|
||||
device,
|
||||
screen,
|
||||
language,
|
||||
country,
|
||||
region,
|
||||
city,
|
||||
distinctId: id,
|
||||
});
|
||||
try {
|
||||
await createSession({
|
||||
id: sessionId,
|
||||
websiteId: sourceId,
|
||||
browser,
|
||||
os,
|
||||
device,
|
||||
screen,
|
||||
language,
|
||||
country,
|
||||
region,
|
||||
city,
|
||||
distinctId: id,
|
||||
});
|
||||
} catch (e: any) {
|
||||
// Ignore duplicate session errors
|
||||
if (!e.message.toLowerCase().includes('unique constraint')) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Visit info
|
||||
|
|
|
|||
|
|
@ -37,28 +37,30 @@ export async function GET(
|
|||
}
|
||||
|
||||
const { type, limit, offset, search } = query;
|
||||
// Map 'url' to 'path' for backward compatibility
|
||||
const metricType = type === 'url' ? 'path' : type;
|
||||
const filters = await getQueryFilters(query, websiteId);
|
||||
|
||||
if (search) {
|
||||
filters[type] = `c.${search}`;
|
||||
filters[metricType] = `c.${search}`;
|
||||
}
|
||||
|
||||
if (SESSION_COLUMNS.includes(type)) {
|
||||
const data = await getSessionMetrics(websiteId, { type, limit, offset }, filters);
|
||||
if (SESSION_COLUMNS.includes(metricType)) {
|
||||
const data = await getSessionMetrics(websiteId, { type: metricType, limit, offset }, filters);
|
||||
|
||||
return json(data);
|
||||
}
|
||||
|
||||
if (EVENT_COLUMNS.includes(type)) {
|
||||
if (type === 'event') {
|
||||
if (EVENT_COLUMNS.includes(metricType)) {
|
||||
if (metricType === 'event') {
|
||||
filters.eventType = EVENT_TYPE.customEvent;
|
||||
return json(await getEventMetrics(websiteId, { type, limit, offset }, filters));
|
||||
return json(await getEventMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
} else {
|
||||
return json(await getPageviewMetrics(websiteId, { type, limit, offset }, filters));
|
||||
return json(await getPageviewMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
}
|
||||
}
|
||||
|
||||
if (type === 'channel') {
|
||||
if (metricType === 'channel') {
|
||||
return json(await getChannelMetrics(websiteId, filters));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -110,4 +110,37 @@ describe('getLocation', () => {
|
|||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle IPv6 addresses correctly', async () => {
|
||||
(isLocalhost as jest.Mock).mockResolvedValue(false);
|
||||
|
||||
const mockMaxmindDb = {
|
||||
get: jest.fn().mockReturnValue({
|
||||
country: { iso_code: 'US' },
|
||||
subdivisions: [{ iso_code: 'CA' }],
|
||||
city: { names: { en: 'Los Angeles' } },
|
||||
}),
|
||||
};
|
||||
|
||||
(maxmind.open as jest.Mock).mockResolvedValue(mockMaxmindDb);
|
||||
|
||||
// Test IPv6 with port
|
||||
const result1 = await getLocation('[2001:db8::1]:8080', new Headers(), false);
|
||||
expect(result1).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Test IPv6 without port
|
||||
const result2 = await getLocation('2001:db8::1', new Headers(), false);
|
||||
expect(result2).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Verify that the MaxMind database is called with the cleaned IP
|
||||
expect(mockMaxmindDb.get).toHaveBeenCalledWith('2001:db8::1');
|
||||
});
|
||||
});
|
||||
|
|
@ -105,6 +105,7 @@ export const DATE_FORMATS = {
|
|||
|
||||
const TIMEZONE_MAPPINGS: Record<string, string> = {
|
||||
'Asia/Calcutta': 'Asia/Kolkata',
|
||||
'Asia/Saigon': 'Asia/Ho_Chi_Minh',
|
||||
};
|
||||
|
||||
export function normalizeTimezone(timezone: string): string {
|
||||
|
|
@ -339,14 +340,15 @@ export function generateTimeSeries(
|
|||
) {
|
||||
const add = DATE_FUNCTIONS[unit].add;
|
||||
const start = DATE_FUNCTIONS[unit].start;
|
||||
const end = DATE_FUNCTIONS[unit].end;
|
||||
const fmt = DATE_FORMATS[unit];
|
||||
|
||||
let current = start(minDate);
|
||||
const end = start(maxDate);
|
||||
const endDate = end(maxDate);
|
||||
|
||||
const timeseries: string[] = [];
|
||||
|
||||
while (isBefore(current, end) || isEqual(current, end)) {
|
||||
while (isBefore(current, endDate) || isEqual(current, endDate)) {
|
||||
timeseries.push(formatDate(current, fmt, locale));
|
||||
current = add(current, 1);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,11 +99,11 @@ export async function getLocation(ip: string = '', headers: Headers, hasPayloadI
|
|||
|
||||
if (result) {
|
||||
// Try multiple sources for country code to ensure we get a value
|
||||
// Note: We don't use continent code as a fallback because it's not a valid ISO country code
|
||||
const country =
|
||||
result.country?.iso_code ||
|
||||
result.registered_country?.iso_code ||
|
||||
result.represented_country?.iso_code ||
|
||||
result.continent?.code;
|
||||
result.represented_country?.iso_code;
|
||||
|
||||
const region = result.subdivisions?.[0]?.iso_code;
|
||||
const city = result.city?.names?.en;
|
||||
|
|
|
|||
|
|
@ -140,12 +140,31 @@ export async function resetWebsite(websiteId: string) {
|
|||
const deleteInBatches = async (model: any, where: any) => {
|
||||
let deletedCount;
|
||||
do {
|
||||
const result = await model.deleteMany({
|
||||
// First, find records to delete (up to 10000)
|
||||
const recordsToDelete = await model.findMany({
|
||||
where,
|
||||
take: 10000, // Limit to 10000 records per batch
|
||||
take: 10000,
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (recordsToDelete.length === 0) {
|
||||
deletedCount = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Then delete those records by their IDs
|
||||
const result = await model.deleteMany({
|
||||
where: {
|
||||
id: {
|
||||
in: recordsToDelete.map((record: any) => record.id),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
deletedCount = result.count;
|
||||
} while (deletedCount === 10000); // Continue until we delete less than 10000 records
|
||||
} while (deletedCount > 0);
|
||||
};
|
||||
|
||||
// Delete data in batches to avoid transaction timeouts
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ async function clickhouseQuery(
|
|||
websiteId: string,
|
||||
filters: QueryFilters,
|
||||
): Promise<{ x: string; t: string; y: number }[]> {
|
||||
const { timezone = 'UTC', unit = 'day' } = filters;
|
||||
const { timezone = 'utc', unit = 'day' } = filters;
|
||||
const { rawQuery, getDateSQL, parseFilters } = clickhouse;
|
||||
const { filterQuery, cohortQuery, queryParams } = parseFilters({
|
||||
...filters,
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ export async function getRealtimeData(websiteId: string, filters: QueryFilters)
|
|||
const statsFilters = { ...filters, timezone };
|
||||
|
||||
const [activity, pageviews, sessions] = await Promise.all([
|
||||
getRealtimeActivity(websiteId, filters),
|
||||
getRealtimeActivity(websiteId, statsFilters),
|
||||
getPageviewStats(websiteId, statsFilters),
|
||||
getSessionStats(websiteId, statsFilters),
|
||||
]);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue