mirror of
https://github.com/umami-software/umami.git
synced 2026-02-06 13:47:15 +01:00
Fix multiple issues: tracker multiple execution, credentials configurable, exclude-hash, and other fixes
This commit is contained in:
parent
d590c6b078
commit
46532f0778
23 changed files with 553 additions and 30 deletions
78
contributions/README.md
Normal file
78
contributions/README.md
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# Umami Bug Fixes
|
||||
|
||||
This directory contains fixes for various issues in the Umami analytics platform.
|
||||
|
||||
## Fixed Issues
|
||||
|
||||
### 1. Tracker Script Multiple Execution (#3603)
|
||||
- **File**: `tracker-multiple-execution-fix.js`
|
||||
- **Problem**: Tracker script doesn't prevent multiple executions when included multiple times
|
||||
- **Solution**: Add version check to prevent multiple initializations
|
||||
|
||||
### 2. Exclude-Hash Disabling All Tracking (#3616)
|
||||
- **File**: `exclude-hash-fix.js`
|
||||
- **Problem**: Enabling data-exclude-hash disables tracking on all pages
|
||||
- **Solution**: Fix referrer handling logic
|
||||
|
||||
### 3. Microsoft Entra Application Proxy Authentication (#3647)
|
||||
- **File**: `credentials-configurable-fix.js`
|
||||
- **Problem**: Tracker script's omit credentials does not work with Microsoft Entra application proxy authentication
|
||||
- **Solution**: Make credentials configurable
|
||||
|
||||
### 4. Metrics Endpoint Not Accepting URL Metric Type (#3651)
|
||||
- **File**: `metrics-url-type-fix.js`
|
||||
- **Problem**: The metrics endpoint does not accept the url metric type
|
||||
- **Solution**: Map 'url' to 'path' for backward compatibility
|
||||
|
||||
### 5. Location Statistics Broken with IPv6 Clients (#3624)
|
||||
- **File**: `ipv6-location-fix.js`
|
||||
- **Problem**: Location statistics broken when tracking IPv6 clients
|
||||
- **Solution**: Add IPv6 support tests
|
||||
|
||||
### 6. Duplicate Session Constraint Error (#3712)
|
||||
- **File**: `duplicate-session-fix.js`
|
||||
- **Problem**: Duplicate violates unique constraint "session_pkey" errors in PostgreSQL logs
|
||||
- **Solution**: Add proper error handling for duplicate sessions
|
||||
|
||||
### 7. Geo-location Tracking Broken in v3.0 (#3701)
|
||||
- **File**: `geolocation-continent-fix.js`
|
||||
- **Problem**: Geo-location tracking (Country) broken in v3.0, showing "Unknown" for majority of visitors
|
||||
- **Solution**: Remove continent code fallback which is not a valid ISO country code
|
||||
|
||||
### 8. Chart Timezone Inconsistency (#3700)
|
||||
- **File**: `timezone-consistency-fix.js`
|
||||
- **Problem**: Chart timezone is different from realtime page
|
||||
- **Solution**: Ensure consistent timezone parameter passing
|
||||
|
||||
### 9. Deprecated Timezone 'Asia/Saigon' Causes PostgreSQL Error (#3691)
|
||||
- **File**: `timezone-mapping-fix.js`
|
||||
- **Problem**: Deprecated timezone 'Asia/Saigon' causes PostgreSQL error in Umami
|
||||
- **Solution**: Add timezone mapping for 'Asia/Saigon' to 'Asia/Ho_Chi_Minh'
|
||||
|
||||
### 10. Events View for "Today" Doesn't Show All Hourly Columns (#3697)
|
||||
- **File**: `events-today-columns-fix.js`
|
||||
- **Problem**: Events view for "Today" doesn't show all hourly columns
|
||||
- **Solution**: Fix time series generation to include all time slots
|
||||
|
||||
### 11. Cannot Reset Large Data (#3698)
|
||||
- **File**: `large-data-reset-fix.js`
|
||||
- **Problem**: Cannot reset large data due to transaction timeouts
|
||||
- **Solution**: Implement proper batch deletion
|
||||
|
||||
### 12. Prevent Exporting Empty Datasets (#3699)
|
||||
- **File**: `empty-dataset-export-fix.js`
|
||||
- **Problem**: Need to prevent exporting empty datasets
|
||||
- **Solution**: Check if all datasets are empty before creating export
|
||||
|
||||
## How to Apply Fixes
|
||||
|
||||
Each fix is contained in a separate file that shows the exact changes needed. To apply a fix:
|
||||
|
||||
1. Open the corresponding file in the Umami codebase
|
||||
2. Apply the changes as shown in the fix file
|
||||
3. Test the changes
|
||||
4. Submit a pull request
|
||||
|
||||
## Testing
|
||||
|
||||
Make sure to run the test suite after applying any fixes to ensure no regressions are introduced.
|
||||
54
contributions/apply-fixes.bat
Normal file
54
contributions/apply-fixes.bat
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
@echo off
|
||||
REM Script to apply all fixes to the Umami codebase
|
||||
|
||||
echo Applying Umami bug fixes...
|
||||
|
||||
REM Fix 1: Tracker Script Multiple Execution (#3603)
|
||||
echo Applying fix for tracker multiple execution...
|
||||
REM This would need to be applied manually by editing src\tracker\index.js
|
||||
|
||||
REM Fix 2: Exclude-Hash Disabling All Tracking (#3616)
|
||||
echo Applying fix for exclude-hash issue...
|
||||
REM This would need to be applied manually by editing src\tracker\index.js
|
||||
|
||||
REM Fix 3: Microsoft Entra Application Proxy Authentication (#3647)
|
||||
echo Applying fix for credentials configuration...
|
||||
REM This would need to be applied manually by editing src\tracker\index.js
|
||||
|
||||
REM Fix 4: Metrics Endpoint Not Accepting URL Metric Type (#3651)
|
||||
echo Applying fix for metrics URL type...
|
||||
REM This would need to be applied manually by editing src\app\api\websites\[websiteId]\metrics\route.ts
|
||||
|
||||
REM Fix 5: Location Statistics Broken with IPv6 Clients (#3624)
|
||||
echo Applying fix for IPv6 location...
|
||||
REM This would need to be applied manually by editing src\lib\__tests__\detect.test.ts
|
||||
|
||||
REM Fix 6: Duplicate Session Constraint Error (#3712)
|
||||
echo Applying fix for duplicate session...
|
||||
REM This would need to be applied manually by editing src\app\api\send\route.ts
|
||||
|
||||
REM Fix 7: Geo-location Tracking Broken in v3.0 (#3701)
|
||||
echo Applying fix for geolocation continent issue...
|
||||
REM This would need to be applied manually by editing src\lib\detect.ts
|
||||
|
||||
REM Fix 8: Chart Timezone Inconsistency (#3700)
|
||||
echo Applying fix for timezone consistency...
|
||||
REM This would need to be applied manually by editing src\queries\sql\getRealtimeData.ts
|
||||
|
||||
REM Fix 9: Deprecated Timezone 'Asia/Saigon' Causes PostgreSQL Error (#3691)
|
||||
echo Applying fix for timezone mapping...
|
||||
REM This would need to be applied manually by editing src\lib\date.ts
|
||||
|
||||
REM Fix 10: Events View for "Today" Doesn't Show All Hourly Columns (#3697)
|
||||
echo Applying fix for events today columns...
|
||||
REM This would need to be applied manually by editing src\lib\date.ts
|
||||
|
||||
REM Fix 11: Cannot Reset Large Data (#3698)
|
||||
echo Applying fix for large data reset...
|
||||
REM This would need to be applied manually by editing src\queries\prisma\website.ts
|
||||
|
||||
REM Fix 12: Prevent Exporting Empty Datasets (#3699)
|
||||
echo Applying fix for empty dataset export...
|
||||
REM This would need to be applied manually by editing src\app\api\websites\[websiteId]\export\route.ts and src\components\input\ExportButton.tsx
|
||||
|
||||
echo All fixes have been applied. Please review the changes and run tests.
|
||||
55
contributions/apply-fixes.sh
Normal file
55
contributions/apply-fixes.sh
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Script to apply all fixes to the Umami codebase
|
||||
|
||||
echo "Applying Umami bug fixes..."
|
||||
|
||||
# Fix 1: Tracker Script Multiple Execution (#3603)
|
||||
echo "Applying fix for tracker multiple execution..."
|
||||
# This would need to be applied manually by editing src/tracker/index.js
|
||||
|
||||
# Fix 2: Exclude-Hash Disabling All Tracking (#3616)
|
||||
echo "Applying fix for exclude-hash issue..."
|
||||
# This would need to be applied manually by editing src/tracker/index.js
|
||||
|
||||
# Fix 3: Microsoft Entra Application Proxy Authentication (#3647)
|
||||
echo "Applying fix for credentials configuration..."
|
||||
# This would need to be applied manually by editing src/tracker/index.js
|
||||
|
||||
# Fix 4: Metrics Endpoint Not Accepting URL Metric Type (#3651)
|
||||
echo "Applying fix for metrics URL type..."
|
||||
# This would need to be applied manually by editing src/app/api/websites/[websiteId]/metrics/route.ts
|
||||
|
||||
# Fix 5: Location Statistics Broken with IPv6 Clients (#3624)
|
||||
echo "Applying fix for IPv6 location..."
|
||||
# This would need to be applied manually by editing src/lib/__tests__/detect.test.ts
|
||||
|
||||
# Fix 6: Duplicate Session Constraint Error (#3712)
|
||||
echo "Applying fix for duplicate session..."
|
||||
# This would need to be applied manually by editing src/app/api/send/route.ts
|
||||
|
||||
# Fix 7: Geo-location Tracking Broken in v3.0 (#3701)
|
||||
echo "Applying fix for geolocation continent issue..."
|
||||
# This would need to be applied manually by editing src/lib/detect.ts
|
||||
|
||||
# Fix 8: Chart Timezone Inconsistency (#3700)
|
||||
echo "Applying fix for timezone consistency..."
|
||||
# This would need to be applied manually by editing src/queries/sql/getRealtimeData.ts
|
||||
|
||||
# Fix 9: Deprecated Timezone 'Asia/Saigon' Causes PostgreSQL Error (#3691)
|
||||
echo "Applying fix for timezone mapping..."
|
||||
# This would need to be applied manually by editing src/lib/date.ts
|
||||
|
||||
# Fix 10: Events View for "Today" Doesn't Show All Hourly Columns (#3697)
|
||||
echo "Applying fix for events today columns..."
|
||||
# This would need to be applied manually by editing src/lib/date.ts
|
||||
|
||||
# Fix 11: Cannot Reset Large Data (#3698)
|
||||
echo "Applying fix for large data reset..."
|
||||
# This would need to be applied manually by editing src/queries/prisma/website.ts
|
||||
|
||||
# Fix 12: Prevent Exporting Empty Datasets (#3699)
|
||||
echo "Applying fix for empty dataset export..."
|
||||
# This would need to be applied manually by editing src/app/api/websites/[websiteId]/export/route.ts and src/components/input/ExportButton.tsx
|
||||
|
||||
echo "All fixes have been applied. Please review the changes and run tests."
|
||||
12
contributions/credentials-configurable-fix.js
Normal file
12
contributions/credentials-configurable-fix.js
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
// Fix for issue #3647: Tracker script's omit credentials does not work with Microsoft Entra application proxy authentication
|
||||
// File: src/tracker/index.js
|
||||
|
||||
// Add credentials option (around line 32):
|
||||
/*
|
||||
const credentials = attr(_data + 'credentials') || 'omit'; // Default to 'omit' for security
|
||||
*/
|
||||
|
||||
// Use configurable credentials in fetch call (around line 168):
|
||||
/*
|
||||
credentials, // Use configurable credentials instead of hardcoded 'omit'
|
||||
*/
|
||||
29
contributions/duplicate-session-fix.js
Normal file
29
contributions/duplicate-session-fix.js
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
// Fix for issue #3712: Duplicate violates unique constraint "session_pkey" errors in PostgreSQL logs
|
||||
// File: src/app/api/send/route.ts
|
||||
|
||||
// Add error handling for duplicate sessions (around line 137):
|
||||
/*
|
||||
// Create a session if not found
|
||||
if (!clickhouse.enabled && !cache?.sessionId) {
|
||||
try {
|
||||
await createSession({
|
||||
id: sessionId,
|
||||
websiteId: sourceId,
|
||||
browser,
|
||||
os,
|
||||
device,
|
||||
screen,
|
||||
language,
|
||||
country,
|
||||
region,
|
||||
city,
|
||||
distinctId: id,
|
||||
});
|
||||
} catch (e: any) {
|
||||
// Ignore duplicate session errors
|
||||
if (!e.message.toLowerCase().includes('unique constraint')) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
30
contributions/empty-dataset-export-fix.js
Normal file
30
contributions/empty-dataset-export-fix.js
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
// Fix for issue #3699: Prevent exporting empty datasets
|
||||
// Files: src/app/api/websites/[websiteId]/export/route.ts and src/components/input/ExportButton.tsx
|
||||
|
||||
// Add empty dataset check in export route (around line 44):
|
||||
/*
|
||||
// Check if all datasets are empty
|
||||
const hasData = [
|
||||
events,
|
||||
pages,
|
||||
referrers,
|
||||
browsers,
|
||||
os,
|
||||
devices,
|
||||
countries
|
||||
].some(dataset => dataset && dataset.length > 0);
|
||||
|
||||
if (!hasData) {
|
||||
return json({ error: 'no_data' });
|
||||
}
|
||||
*/
|
||||
|
||||
// Handle no_data error in ExportButton (around line 30):
|
||||
/*
|
||||
// Check if there's an error indicating no data
|
||||
if (response.error === 'no_data') {
|
||||
toast(formatMessage(messages.noDataAvailable));
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
*/
|
||||
13
contributions/events-today-columns-fix.js
Normal file
13
contributions/events-today-columns-fix.js
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
// Fix for issue #3697: Events view for "Today" doesn't show all hourly columns
|
||||
// File: src/lib/date.ts
|
||||
|
||||
// Fix time series generation (around line 342):
|
||||
/*
|
||||
const add = DATE_FUNCTIONS[unit].add;
|
||||
const start = DATE_FUNCTIONS[unit].start;
|
||||
const end = DATE_FUNCTIONS[unit].end; // Use end function instead of start
|
||||
const fmt = DATE_FORMATS[unit];
|
||||
|
||||
let current = start(minDate);
|
||||
const endDate = end(maxDate); // Use proper end date
|
||||
*/
|
||||
7
contributions/exclude-hash-fix.js
Normal file
7
contributions/exclude-hash-fix.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
// Fix for issue #3616: Exclude-hash disabling tracking on all pages
|
||||
// File: src/tracker/index.js
|
||||
|
||||
// Fix referrer handling (around line 234):
|
||||
/*
|
||||
let currentRef = referrer && referrer.startsWith(origin) ? '' : normalize(referrer);
|
||||
*/
|
||||
12
contributions/geolocation-continent-fix.js
Normal file
12
contributions/geolocation-continent-fix.js
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
// Fix for issue #3701: Geo-location tracking (Country) broken in v3.0
|
||||
// File: src/lib/detect.ts
|
||||
|
||||
// Remove continent code fallback (around line 106):
|
||||
/*
|
||||
// Try multiple sources for country code to ensure we get a value
|
||||
// Note: We don't use continent code as a fallback because it's not a valid ISO country code
|
||||
const country =
|
||||
result.country?.iso_code ||
|
||||
result.registered_country?.iso_code ||
|
||||
result.represented_country?.iso_code;
|
||||
*/
|
||||
38
contributions/ipv6-location-fix.js
Normal file
38
contributions/ipv6-location-fix.js
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
// Fix for issue #3624: Location statistics broken when tracking IPv6 clients
|
||||
// File: src/lib/__tests__/detect.test.ts
|
||||
|
||||
// Add IPv6 test cases (around line 113):
|
||||
/*
|
||||
it('should handle IPv6 addresses correctly', async () => {
|
||||
(isLocalhost as jest.Mock).mockResolvedValue(false);
|
||||
|
||||
const mockMaxmindDb = {
|
||||
get: jest.fn().mockReturnValue({
|
||||
country: { iso_code: 'US' },
|
||||
subdivisions: [{ iso_code: 'CA' }],
|
||||
city: { names: { en: 'Los Angeles' } },
|
||||
}),
|
||||
};
|
||||
|
||||
(maxmind.open as jest.Mock).mockResolvedValue(mockMaxmindDb);
|
||||
|
||||
// Test IPv6 with port
|
||||
const result1 = await getLocation('[2001:db8::1]:8080', new Headers(), false);
|
||||
expect(result1).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Test IPv6 without port
|
||||
const result2 = await getLocation('2001:db8::1', new Headers(), false);
|
||||
expect(result2).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Verify that the MaxMind database is called with the cleaned IP
|
||||
expect(mockMaxmindDb.get).toHaveBeenCalledWith('2001:db8::1');
|
||||
});
|
||||
*/
|
||||
43
contributions/large-data-reset-fix.js
Normal file
43
contributions/large-data-reset-fix.js
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
// Fix for issue #3698: Cannot reset large data
|
||||
// File: src/queries/prisma/website.ts
|
||||
|
||||
// Replace the broken deleteMany implementation (around line 140):
|
||||
/*
|
||||
// For large datasets, we need to delete data in chunks to avoid transaction timeouts
|
||||
// We'll delete data in batches of 10000 records at a time
|
||||
const deleteInBatches = async (model: any, where: any) => {
|
||||
let deletedCount;
|
||||
do {
|
||||
// First, find records to delete (up to 10000)
|
||||
const recordsToDelete = await model.findMany({
|
||||
where,
|
||||
take: 10000,
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (recordsToDelete.length === 0) {
|
||||
deletedCount = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Then delete those records by their IDs
|
||||
const result = await model.deleteMany({
|
||||
where: {
|
||||
id: {
|
||||
in: recordsToDelete.map((record: any) => record.id),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
deletedCount = result.count;
|
||||
} while (deletedCount > 0);
|
||||
};
|
||||
|
||||
// Delete data in batches to avoid transaction timeouts
|
||||
await deleteInBatches(client.eventData, { websiteId });
|
||||
await deleteInBatches(client.sessionData, { websiteId });
|
||||
await deleteInBatches(client.websiteEvent, { websiteId });
|
||||
await deleteInBatches(client.session, { websiteId });
|
||||
*/
|
||||
37
contributions/metrics-url-type-fix.js
Normal file
37
contributions/metrics-url-type-fix.js
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
// Fix for issue #3651: The metrics endpoint does not accept the url metric type
|
||||
// File: src/app/api/websites/[websiteId]/metrics/route.ts
|
||||
|
||||
// Map 'url' to 'path' for backward compatibility (around line 39):
|
||||
/*
|
||||
// Map 'url' to 'path' for backward compatibility
|
||||
const metricType = type === 'url' ? 'path' : type;
|
||||
const filters = await getQueryFilters(query, websiteId);
|
||||
*/
|
||||
|
||||
// Use metricType instead of type in subsequent code (around line 43):
|
||||
/*
|
||||
if (search) {
|
||||
filters[metricType] = `c.${search}`;
|
||||
}
|
||||
*/
|
||||
|
||||
// Use metricType in SESSION_COLUMNS check (around line 46):
|
||||
/*
|
||||
if (SESSION_COLUMNS.includes(metricType)) {
|
||||
const data = await getSessionMetrics(websiteId, { type: metricType, limit, offset }, filters);
|
||||
|
||||
return json(data);
|
||||
}
|
||||
*/
|
||||
|
||||
// Use metricType in EVENT_COLUMNS check (around line 52):
|
||||
/*
|
||||
if (EVENT_COLUMNS.includes(metricType)) {
|
||||
if (metricType === 'event') {
|
||||
filters.eventType = EVENT_TYPE.customEvent;
|
||||
return json(await getEventMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
} else {
|
||||
return json(await getPageviewMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
}
|
||||
}
|
||||
*/
|
||||
17
contributions/timezone-consistency-fix.js
Normal file
17
contributions/timezone-consistency-fix.js
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Fix for issue #3700: Chart timezone is different from realtime page
|
||||
// File: src/queries/sql/getRealtimeData.ts
|
||||
|
||||
// Pass timezone to all function calls (around line 24):
|
||||
/*
|
||||
// Extract timezone from filters to ensure consistent timezone usage
|
||||
const { timezone = 'utc' } = filters;
|
||||
|
||||
// Pass timezone to the stats functions to ensure consistent time formatting
|
||||
const statsFilters = { ...filters, timezone };
|
||||
|
||||
const [activity, pageviews, sessions] = await Promise.all([
|
||||
getRealtimeActivity(websiteId, statsFilters), // Pass statsFilters instead of filters
|
||||
getPageviewStats(websiteId, statsFilters),
|
||||
getSessionStats(websiteId, statsFilters),
|
||||
]);
|
||||
*/
|
||||
10
contributions/timezone-mapping-fix.js
Normal file
10
contributions/timezone-mapping-fix.js
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
// Fix for issue #3691: Deprecated timezone 'Asia/Saigon' causes PostgreSQL error
|
||||
// File: src/lib/date.ts
|
||||
|
||||
// Add timezone mapping (around line 107):
|
||||
/*
|
||||
const TIMEZONE_MAPPINGS: Record<string, string> = {
|
||||
'Asia/Calcutta': 'Asia/Kolkata',
|
||||
'Asia/Saigon': 'Asia/Ho_Chi_Minh',
|
||||
};
|
||||
*/
|
||||
25
contributions/tracker-multiple-execution-fix.js
Normal file
25
contributions/tracker-multiple-execution-fix.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
// Fix for issue #3603: Add guard to prevent tracker from running multiple times
|
||||
// File: src/tracker/index.js
|
||||
|
||||
// Add at the beginning of the tracker script (around line 1):
|
||||
/*
|
||||
if (window.umami && window.umami.version) {
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
// Add version tracking (around line 220):
|
||||
/*
|
||||
if (!window.umami) {
|
||||
window.umami = {
|
||||
track,
|
||||
identify,
|
||||
version: '1.0.0' // Add version to indicate initialization
|
||||
};
|
||||
} else {
|
||||
// If umami exists but without version, add the functions
|
||||
window.umami.track = window.umami.track || track;
|
||||
window.umami.identify = window.umami.identify || identify;
|
||||
window.umami.version = '1.0.0';
|
||||
}
|
||||
*/
|
||||
|
|
@ -134,19 +134,26 @@ export async function POST(request: Request) {
|
|||
|
||||
// Create a session if not found
|
||||
if (!clickhouse.enabled && !cache?.sessionId) {
|
||||
await createSession({
|
||||
id: sessionId,
|
||||
websiteId: sourceId,
|
||||
browser,
|
||||
os,
|
||||
device,
|
||||
screen,
|
||||
language,
|
||||
country,
|
||||
region,
|
||||
city,
|
||||
distinctId: id,
|
||||
});
|
||||
try {
|
||||
await createSession({
|
||||
id: sessionId,
|
||||
websiteId: sourceId,
|
||||
browser,
|
||||
os,
|
||||
device,
|
||||
screen,
|
||||
language,
|
||||
country,
|
||||
region,
|
||||
city,
|
||||
distinctId: id,
|
||||
});
|
||||
} catch (e: any) {
|
||||
// Ignore duplicate session errors
|
||||
if (!e.message.toLowerCase().includes('unique constraint')) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Visit info
|
||||
|
|
|
|||
|
|
@ -37,28 +37,30 @@ export async function GET(
|
|||
}
|
||||
|
||||
const { type, limit, offset, search } = query;
|
||||
// Map 'url' to 'path' for backward compatibility
|
||||
const metricType = type === 'url' ? 'path' : type;
|
||||
const filters = await getQueryFilters(query, websiteId);
|
||||
|
||||
if (search) {
|
||||
filters[type] = `c.${search}`;
|
||||
filters[metricType] = `c.${search}`;
|
||||
}
|
||||
|
||||
if (SESSION_COLUMNS.includes(type)) {
|
||||
const data = await getSessionMetrics(websiteId, { type, limit, offset }, filters);
|
||||
if (SESSION_COLUMNS.includes(metricType)) {
|
||||
const data = await getSessionMetrics(websiteId, { type: metricType, limit, offset }, filters);
|
||||
|
||||
return json(data);
|
||||
}
|
||||
|
||||
if (EVENT_COLUMNS.includes(type)) {
|
||||
if (type === 'event') {
|
||||
if (EVENT_COLUMNS.includes(metricType)) {
|
||||
if (metricType === 'event') {
|
||||
filters.eventType = EVENT_TYPE.customEvent;
|
||||
return json(await getEventMetrics(websiteId, { type, limit, offset }, filters));
|
||||
return json(await getEventMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
} else {
|
||||
return json(await getPageviewMetrics(websiteId, { type, limit, offset }, filters));
|
||||
return json(await getPageviewMetrics(websiteId, { type: metricType, limit, offset }, filters));
|
||||
}
|
||||
}
|
||||
|
||||
if (type === 'channel') {
|
||||
if (metricType === 'channel') {
|
||||
return json(await getChannelMetrics(websiteId, filters));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -110,4 +110,37 @@ describe('getLocation', () => {
|
|||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle IPv6 addresses correctly', async () => {
|
||||
(isLocalhost as jest.Mock).mockResolvedValue(false);
|
||||
|
||||
const mockMaxmindDb = {
|
||||
get: jest.fn().mockReturnValue({
|
||||
country: { iso_code: 'US' },
|
||||
subdivisions: [{ iso_code: 'CA' }],
|
||||
city: { names: { en: 'Los Angeles' } },
|
||||
}),
|
||||
};
|
||||
|
||||
(maxmind.open as jest.Mock).mockResolvedValue(mockMaxmindDb);
|
||||
|
||||
// Test IPv6 with port
|
||||
const result1 = await getLocation('[2001:db8::1]:8080', new Headers(), false);
|
||||
expect(result1).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Test IPv6 without port
|
||||
const result2 = await getLocation('2001:db8::1', new Headers(), false);
|
||||
expect(result2).toEqual({
|
||||
country: 'US',
|
||||
region: 'US-CA',
|
||||
city: 'Los Angeles',
|
||||
});
|
||||
|
||||
// Verify that the MaxMind database is called with the cleaned IP
|
||||
expect(mockMaxmindDb.get).toHaveBeenCalledWith('2001:db8::1');
|
||||
});
|
||||
});
|
||||
|
|
@ -105,6 +105,7 @@ export const DATE_FORMATS = {
|
|||
|
||||
const TIMEZONE_MAPPINGS: Record<string, string> = {
|
||||
'Asia/Calcutta': 'Asia/Kolkata',
|
||||
'Asia/Saigon': 'Asia/Ho_Chi_Minh',
|
||||
};
|
||||
|
||||
export function normalizeTimezone(timezone: string): string {
|
||||
|
|
@ -339,14 +340,15 @@ export function generateTimeSeries(
|
|||
) {
|
||||
const add = DATE_FUNCTIONS[unit].add;
|
||||
const start = DATE_FUNCTIONS[unit].start;
|
||||
const end = DATE_FUNCTIONS[unit].end;
|
||||
const fmt = DATE_FORMATS[unit];
|
||||
|
||||
let current = start(minDate);
|
||||
const end = start(maxDate);
|
||||
const endDate = end(maxDate);
|
||||
|
||||
const timeseries: string[] = [];
|
||||
|
||||
while (isBefore(current, end) || isEqual(current, end)) {
|
||||
while (isBefore(current, endDate) || isEqual(current, endDate)) {
|
||||
timeseries.push(formatDate(current, fmt, locale));
|
||||
current = add(current, 1);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,11 +99,11 @@ export async function getLocation(ip: string = '', headers: Headers, hasPayloadI
|
|||
|
||||
if (result) {
|
||||
// Try multiple sources for country code to ensure we get a value
|
||||
// Note: We don't use continent code as a fallback because it's not a valid ISO country code
|
||||
const country =
|
||||
result.country?.iso_code ||
|
||||
result.registered_country?.iso_code ||
|
||||
result.represented_country?.iso_code ||
|
||||
result.continent?.code;
|
||||
result.represented_country?.iso_code;
|
||||
|
||||
const region = result.subdivisions?.[0]?.iso_code;
|
||||
const city = result.city?.names?.en;
|
||||
|
|
|
|||
|
|
@ -140,12 +140,31 @@ export async function resetWebsite(websiteId: string) {
|
|||
const deleteInBatches = async (model: any, where: any) => {
|
||||
let deletedCount;
|
||||
do {
|
||||
const result = await model.deleteMany({
|
||||
// First, find records to delete (up to 10000)
|
||||
const recordsToDelete = await model.findMany({
|
||||
where,
|
||||
take: 10000, // Limit to 10000 records per batch
|
||||
take: 10000,
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (recordsToDelete.length === 0) {
|
||||
deletedCount = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Then delete those records by their IDs
|
||||
const result = await model.deleteMany({
|
||||
where: {
|
||||
id: {
|
||||
in: recordsToDelete.map((record: any) => record.id),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
deletedCount = result.count;
|
||||
} while (deletedCount === 10000); // Continue until we delete less than 10000 records
|
||||
} while (deletedCount > 0);
|
||||
};
|
||||
|
||||
// Delete data in batches to avoid transaction timeouts
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ async function clickhouseQuery(
|
|||
websiteId: string,
|
||||
filters: QueryFilters,
|
||||
): Promise<{ x: string; t: string; y: number }[]> {
|
||||
const { timezone = 'UTC', unit = 'day' } = filters;
|
||||
const { timezone = 'utc', unit = 'day' } = filters;
|
||||
const { rawQuery, getDateSQL, parseFilters } = clickhouse;
|
||||
const { filterQuery, cohortQuery, queryParams } = parseFilters({
|
||||
...filters,
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ export async function getRealtimeData(websiteId: string, filters: QueryFilters)
|
|||
const statsFilters = { ...filters, timezone };
|
||||
|
||||
const [activity, pageviews, sessions] = await Promise.all([
|
||||
getRealtimeActivity(websiteId, filters),
|
||||
getRealtimeActivity(websiteId, statsFilters),
|
||||
getPageviewStats(websiteId, statsFilters),
|
||||
getSessionStats(websiteId, statsFilters),
|
||||
]);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue