mirror of
https://github.com/umami-software/umami.git
synced 2026-02-08 14:47:14 +01:00
update schema and queries to implement reset_at
This commit is contained in:
parent
43ef6884df
commit
14e4a090bb
15 changed files with 74 additions and 59 deletions
|
|
@ -35,8 +35,11 @@ async function relationalQuery(
|
|||
) {
|
||||
const { startDate, endDate, column, filters = {}, type } = data;
|
||||
const { rawQuery, parseFilters, toUuid } = prisma;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const resetDate = website?.resetAt || website?.createdAt;
|
||||
const params: any = [
|
||||
websiteId,
|
||||
resetDate,
|
||||
startDate,
|
||||
endDate,
|
||||
type === 'event' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
|
||||
|
|
@ -48,8 +51,9 @@ async function relationalQuery(
|
|||
from website_event
|
||||
${joinSession}
|
||||
where website_event.website_id = $1${toUuid()}
|
||||
and website_event.created_at between $2 and $3
|
||||
and event_type = $4
|
||||
and website_event.created_at >= $2
|
||||
and website_event.created_at between $3 and $4
|
||||
and event_type = $5
|
||||
${filterQuery}
|
||||
group by 1
|
||||
order by 2 desc
|
||||
|
|
@ -69,11 +73,11 @@ async function clickhouseQuery(
|
|||
},
|
||||
) {
|
||||
const { startDate, endDate, column, filters = {}, type } = data;
|
||||
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
|
||||
const { rawQuery, getDateFormat, parseFilters, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const resetDate = website?.resetAt || website?.createdAt;
|
||||
const params = {
|
||||
websiteId,
|
||||
revId: website?.revId || 0,
|
||||
eventType: type === 'event' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
|
||||
};
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
|
@ -82,8 +86,8 @@ async function clickhouseQuery(
|
|||
`select ${column} x, count(*) y
|
||||
from event
|
||||
where website_id = {websiteId:UUID}
|
||||
and rev_id = {revId:UInt32}
|
||||
and event_type = {eventType:UInt32}
|
||||
and created_at >= ${getDateFormat(resetDate)}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by x
|
||||
|
|
|
|||
|
|
@ -46,7 +46,9 @@ async function relationalQuery(
|
|||
sessionKey = 'session_id',
|
||||
} = data;
|
||||
const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma;
|
||||
const params: any = [websiteId, startDate, endDate];
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const resetDate = website?.resetAt || website?.createdAt;
|
||||
const params: any = [websiteId, resetDate, startDate, endDate];
|
||||
const { filterQuery, joinSession } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
|
|
@ -55,7 +57,8 @@ async function relationalQuery(
|
|||
from website_event
|
||||
${joinSession}
|
||||
where website_event.website_id = $1${toUuid()}
|
||||
and website_event.created_at between $2 and $3
|
||||
and website_event.created_at >= $2
|
||||
and website_event.created_at between $3 and $4
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
${filterQuery}
|
||||
group by 1`,
|
||||
|
|
@ -76,9 +79,17 @@ async function clickhouseQuery(
|
|||
},
|
||||
) {
|
||||
const { startDate, endDate, timezone = 'UTC', unit = 'day', count = '*', filters = {} } = data;
|
||||
const { parseFilters, rawQuery, getDateStringQuery, getDateQuery, getBetweenDates } = clickhouse;
|
||||
const {
|
||||
parseFilters,
|
||||
getDateFormat,
|
||||
rawQuery,
|
||||
getDateStringQuery,
|
||||
getDateQuery,
|
||||
getBetweenDates,
|
||||
} = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = { websiteId, revId: website?.revId || 0 };
|
||||
const resetDate = website?.resetAt || website?.createdAt;
|
||||
const params = { websiteId };
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
|
|
@ -91,8 +102,8 @@ async function clickhouseQuery(
|
|||
count(${count !== '*' ? 'distinct session_id' : count}) y
|
||||
from event
|
||||
where website_id = {websiteId:UUID}
|
||||
and rev_id = {revId:UInt32}
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
and created_at >= ${getDateFormat(resetDate)}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by t) g
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import { URL_LENGTH, EVENT_TYPE } from 'lib/constants';
|
|||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import cache from 'lib/cache';
|
||||
import { uuid } from 'lib/crypto';
|
||||
|
||||
export async function savePageView(args: {
|
||||
|
|
@ -104,13 +103,11 @@ async function clickhouseQuery(data: {
|
|||
...args
|
||||
} = data;
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
|
||||
const message = {
|
||||
website_id: websiteId,
|
||||
session_id: sessionId,
|
||||
event_id: uuid(),
|
||||
rev_id: website?.revId || 0,
|
||||
country: country ? country : null,
|
||||
subdivision1: subdivision1 ? subdivision1 : null,
|
||||
subdivision2: subdivision2 ? subdivision2 : null,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue