Merge branch 'dev' into analytics

This commit is contained in:
Mike Cao 2024-08-17 13:26:42 -07:00
commit 25d002cefc
26 changed files with 385 additions and 44 deletions

View file

@ -135,10 +135,10 @@ async function clickhouseQuery(data: {
city,
...args
} = data;
const { insert } = clickhouse;
const { insert, getUTCString } = clickhouse;
const { sendMessage } = kafka;
const eventId = uuid();
const createdAt = new Date().toISOString();
const createdAt = getUTCString();
const message = {
...args,

View file

@ -23,6 +23,7 @@ async function relationalQuery(
websiteId,
createdAt: { gte: startDate, lte: endDate },
},
take: 500,
});
}
@ -37,8 +38,6 @@ async function clickhouseQuery(
return rawQuery(
`
select
session_id as id,
website_id as websiteId,
created_at as createdAt,
url_path as urlPath,
url_query as urlQuery,
@ -52,6 +51,7 @@ async function clickhouseQuery(
and session_id = {sessionId:UUID}
and created_at between {startDate:DateTime64} and {endDate:DateTime64}
order by created_at desc
limit 500
`,
{ websiteId, sessionId, startDate, endDate },
);

View file

@ -19,7 +19,7 @@ async function relationalQuery(websiteId: string, sessionId: string) {
}
async function clickhouseQuery(websiteId: string, sessionId: string) {
const { rawQuery } = clickhouse;
const { rawQuery, getDateStringSQL } = clickhouse;
return rawQuery(
`
@ -34,8 +34,8 @@ async function clickhouseQuery(websiteId: string, sessionId: string) {
country,
subdivision1,
city,
min(min_time) as firstAt,
max(max_time) as lastAt,
${getDateStringSQL('min(min_time)')} as firstAt,
${getDateStringSQL('max(max_time)')} as lastAt,
uniq(visit_id) visits,
sum(views) as views,
sum(events) as events,

View file

@ -24,7 +24,7 @@ async function relationalQuery(websiteId: string, filters: QueryFilters, pagePar
}
async function clickhouseQuery(websiteId: string, filters: QueryFilters, pageParams?: PageParams) {
const { pagedQuery, parseFilters } = clickhouse;
const { pagedQuery, parseFilters, getDateStringSQL } = clickhouse;
const { params, dateQuery, filterQuery } = await parseFilters(websiteId, filters);
return pagedQuery(
@ -42,8 +42,8 @@ async function clickhouseQuery(websiteId: string, filters: QueryFilters, pagePar
country,
subdivision1,
city,
min(min_time) as firstAt,
max(max_time) as lastAt,
${getDateStringSQL('min(min_time)')} as firstAt,
${getDateStringSQL('max(max_time)')} as lastAt,
uniq(visit_id) as visits,
sumIf(views, event_type = 1) as views
from website_event_stats_hourly

View file

@ -0,0 +1,69 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, PRISMA, CLICKHOUSE } from 'lib/db';
import { QueryFilters } from 'lib/types';
export async function getWebsiteSessionsWeekly(
...args: [websiteId: string, filters?: QueryFilters]
) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
async function relationalQuery(websiteId: string, filters: QueryFilters) {
const { rawQuery, getDateWeeklySQL, parseFilters } = prisma;
const { params } = await parseFilters(websiteId, filters);
return rawQuery(
`
select
${getDateWeeklySQL('created_at')} as time,
count(distinct session_id) as value
from website_event_stats_hourly
where website_id = {{websiteId::uuid}}
and created_at between {{startDate}} and {{endDate}}
group by time
order by 2
`,
params,
).then(formatResults);
}
async function clickhouseQuery(websiteId: string, filters: QueryFilters) {
const { rawQuery } = clickhouse;
const { startDate, endDate } = filters;
return rawQuery(
`
select
formatDateTime(created_at, '%w:%H') as time,
count(distinct session_id) as value
from website_event_stats_hourly
where website_id = {websiteId:UUID}
and created_at between {startDate:DateTime64} and {endDate:DateTime64}
group by time
order by time
`,
{ websiteId, startDate, endDate },
).then(formatResults);
}
function formatResults(data: any) {
const days = [];
for (let i = 0; i < 7; i++) {
days.push([]);
for (let j = 0; j < 24; j++) {
days[i].push(
Number(
data.find(({ time }) => time === `${i}:${j.toString().padStart(2, '0')}`)?.value || 0,
),
);
}
}
return days;
}

View file

@ -80,9 +80,9 @@ async function clickhouseQuery(data: {
}) {
const { websiteId, sessionId, sessionData } = data;
const { insert } = clickhouse;
const { insert, getUTCString } = clickhouse;
const { sendMessages } = kafka;
const createdAt = new Date().toISOString();
const createdAt = getUTCString();
const jsonKeys = flattenJSON(sessionData);

View file

@ -26,6 +26,7 @@ export * from './analytics/sessions/getSessionDataProperties';
export * from './analytics/sessions/getSessionDataValues';
export * from './analytics/sessions/getSessionMetrics';
export * from './analytics/sessions/getWebsiteSessions';
export * from './analytics/sessions/getWebsiteSessionsWeekly';
export * from './analytics/sessions/getSessionActivity';
export * from './analytics/sessions/getSessionStats';
export * from './analytics/sessions/saveSessionData';