mirror of
https://github.com/umami-software/umami.git
synced 2026-02-13 00:55:37 +01:00
Feat/um 202 event data new (#1841)
* Add event_data base. * Add url_path. * Add eventData back. * Finish event_data relational. * resolve comments.
This commit is contained in:
parent
c2789d70bc
commit
9979672de5
27 changed files with 719 additions and 130 deletions
117
queries/analytics/eventData/getEventData.ts
Normal file
117
queries/analytics/eventData/getEventData.ts
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
import cache from 'lib/cache';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
import { WebsiteEventDataMetric } from 'lib/types';
|
||||
|
||||
export async function getEventData(
|
||||
...args: [
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
eventName: string;
|
||||
urlPath?: string;
|
||||
filters: [
|
||||
{
|
||||
eventKey?: string;
|
||||
eventValue?: string | number | boolean | Date;
|
||||
},
|
||||
];
|
||||
},
|
||||
]
|
||||
): Promise<WebsiteEventDataMetric[]> {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timeSeries?: {
|
||||
unit: string;
|
||||
timezone: string;
|
||||
};
|
||||
eventName: string;
|
||||
urlPath?: string;
|
||||
filters: [
|
||||
{
|
||||
eventKey?: string;
|
||||
eventValue?: string | number | boolean | Date;
|
||||
},
|
||||
];
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
|
||||
const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma;
|
||||
const params: any = [websiteId, startDate, endDate, eventName || ''];
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
count(*) x
|
||||
${eventName ? `,event_name eventName` : ''}
|
||||
${urlPath ? `,url_path urlPath` : ''}
|
||||
${
|
||||
timeSeries ? `,${getDateQuery('created_at', timeSeries.unit, timeSeries.timezone)} t` : ''
|
||||
}
|
||||
from event_data
|
||||
${
|
||||
eventName || urlPath
|
||||
? 'join website_event on event_data.id = website_event.website_event_id'
|
||||
: ''
|
||||
}
|
||||
where website_id = $1${toUuid()}
|
||||
and created_at between $2 and $3
|
||||
${eventName ? `and eventName = $4` : ''}
|
||||
${getEventDataFilterQuery(filters, params)}
|
||||
${timeSeries ? 'group by t' : ''}`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timeSeries?: {
|
||||
unit: string;
|
||||
timezone: string;
|
||||
};
|
||||
eventName?: string;
|
||||
urlPath?: string;
|
||||
filters: [
|
||||
{
|
||||
eventKey?: string;
|
||||
eventValue?: string | number | boolean | Date;
|
||||
},
|
||||
];
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
|
||||
const { rawQuery, getBetweenDates, getDateQuery, getEventDataFilterQuery } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = { websiteId, revId: website?.revId || 0 };
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
count(*) x
|
||||
${eventName ? `,event_name eventName` : ''}
|
||||
${urlPath ? `,url_path urlPath` : ''}
|
||||
${
|
||||
timeSeries ? `,${getDateQuery('created_at', timeSeries.unit, timeSeries.timezone)} t` : ''
|
||||
}
|
||||
from event_data
|
||||
where website_id = {websiteId:UUID}
|
||||
and rev_id = {revId:UInt32}
|
||||
${eventName ? `and eventName = ${eventName}` : ''}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${getEventDataFilterQuery(filters, params)}
|
||||
${timeSeries ? 'group by t' : ''}`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
96
queries/analytics/eventData/saveEventData.ts
Normal file
96
queries/analytics/eventData/saveEventData.ts
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import { Prisma } from '@prisma/client';
|
||||
import { EVENT_DATA_TYPE } from 'lib/constants';
|
||||
import { uuid } from 'lib/crypto';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import { flattenJSON } from 'lib/eventData';
|
||||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import { EventData } from 'lib/types';
|
||||
|
||||
export async function saveEventData(args: {
|
||||
websiteId: string;
|
||||
eventId: string;
|
||||
sessionId?: string;
|
||||
revId?: number;
|
||||
urlPath?: string;
|
||||
eventName?: string;
|
||||
eventData: EventData;
|
||||
createdAt?: string;
|
||||
}) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(data: {
|
||||
websiteId: string;
|
||||
eventId: string;
|
||||
eventData: EventData;
|
||||
}): Promise<Prisma.BatchPayload> {
|
||||
const { websiteId, eventId, eventData } = data;
|
||||
|
||||
const jsonKeys = flattenJSON(eventData);
|
||||
|
||||
//id, websiteEventId, eventStringValue
|
||||
const flattendData = jsonKeys.map(a => ({
|
||||
id: uuid(),
|
||||
websiteEventId: eventId,
|
||||
websiteId,
|
||||
eventKey: a.key,
|
||||
eventStringValue:
|
||||
a.eventDataType === EVENT_DATA_TYPE.string ||
|
||||
a.eventDataType === EVENT_DATA_TYPE.boolean ||
|
||||
a.eventDataType === EVENT_DATA_TYPE.array
|
||||
? a.value
|
||||
: null,
|
||||
eventNumericValue: a.eventDataType === EVENT_DATA_TYPE.number ? a.value : null,
|
||||
eventDateValue: a.eventDataType === EVENT_DATA_TYPE.date ? new Date(a.value) : null,
|
||||
eventDataType: a.eventDataType,
|
||||
}));
|
||||
|
||||
return prisma.client.eventData.createMany({
|
||||
data: flattendData,
|
||||
});
|
||||
}
|
||||
|
||||
async function clickhouseQuery(data: {
|
||||
websiteId: string;
|
||||
eventId: string;
|
||||
sessionId?: string;
|
||||
revId?: number;
|
||||
urlPath?: string;
|
||||
eventName?: string;
|
||||
eventData: EventData;
|
||||
createdAt?: string;
|
||||
}) {
|
||||
const { websiteId, sessionId, eventId, revId, urlPath, eventName, eventData, createdAt } = data;
|
||||
|
||||
const { getDateFormat, sendMessages } = kafka;
|
||||
|
||||
const jsonKeys = flattenJSON(eventData);
|
||||
|
||||
const messages = jsonKeys.map(a => ({
|
||||
website_id: websiteId,
|
||||
session_id: sessionId,
|
||||
event_id: eventId,
|
||||
rev_id: revId,
|
||||
url_path: urlPath,
|
||||
event_name: eventName,
|
||||
event_key: a.key,
|
||||
event_string_value:
|
||||
a.eventDataType === EVENT_DATA_TYPE.string ||
|
||||
a.eventDataType === EVENT_DATA_TYPE.boolean ||
|
||||
a.eventDataType === EVENT_DATA_TYPE.array
|
||||
? a.value
|
||||
: null,
|
||||
event_numeric_value: a.eventDataType === EVENT_DATA_TYPE.number ? a.value : null,
|
||||
event_date_value: a.eventDataType === EVENT_DATA_TYPE.date ? getDateFormat(a.value) : null,
|
||||
event_data_type: a.eventDataType,
|
||||
created_at: createdAt,
|
||||
}));
|
||||
|
||||
await sendMessages(messages, 'event_data');
|
||||
|
||||
return data;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue