mirror of
https://github.com/umami-software/umami.git
synced 2026-02-11 16:17:13 +01:00
Feat/um 145 re add events (#1733)
* Re-add events. Fix event_type queries. * Re-add eventData * revert CSS.
This commit is contained in:
parent
796c65fa29
commit
fcb1767eb1
13 changed files with 222 additions and 167 deletions
|
|
@ -12,8 +12,8 @@ export async function getPageviewMetrics(
|
|||
startDate: Date;
|
||||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
table: string;
|
||||
filters: object;
|
||||
type: string;
|
||||
},
|
||||
]
|
||||
) {
|
||||
|
|
@ -30,11 +30,16 @@ async function relationalQuery(
|
|||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
filters: object;
|
||||
type: string;
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, column, filters = {} } = data;
|
||||
const { startDate, endDate, column, filters = {}, type } = data;
|
||||
const { rawQuery, parseFilters } = prisma;
|
||||
const params = [startDate, endDate];
|
||||
const params: any = [
|
||||
startDate,
|
||||
endDate,
|
||||
type === 'event' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
|
||||
];
|
||||
const { filterQuery, joinSession } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
|
|
@ -43,7 +48,7 @@ async function relationalQuery(
|
|||
${joinSession}
|
||||
where website_id='${websiteId}'
|
||||
and website_event.created_at between $1 and $2
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
and event_type = $3
|
||||
${filterQuery}
|
||||
group by 1
|
||||
order by 2 desc`,
|
||||
|
|
@ -58,12 +63,17 @@ async function clickhouseQuery(
|
|||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
filters: object;
|
||||
type: string;
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, column, filters = {} } = data;
|
||||
const { startDate, endDate, column, filters = {}, type } = data;
|
||||
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0, EVENT_TYPE.pageView];
|
||||
const params = [
|
||||
websiteId,
|
||||
website?.revId || 0,
|
||||
type === 'event' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
|
||||
];
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ async function clickhouseQuery(websites, startAt) {
|
|||
created_at,
|
||||
url
|
||||
from event
|
||||
where event_name = ''
|
||||
where event_type = 1
|
||||
and ${
|
||||
websites && websites.length > 0
|
||||
? `website_id in (${getCommaSeparatedStringFormat(websites)})`
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ async function clickhouseQuery(
|
|||
from event as x
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
and event_name = ''
|
||||
and event_type = 1
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by x
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ async function clickhouseQuery(
|
|||
min(created_at) min_time,
|
||||
max(created_at) max_time
|
||||
from event
|
||||
where event_name = ''
|
||||
where event_type = 1
|
||||
and website_id = $1
|
||||
and rev_id = $2
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue