mirror of
https://github.com/umami-software/umami.git
synced 2026-02-07 14:17:13 +01:00
Session properties.
This commit is contained in:
parent
deb9dd60df
commit
fc1fc5807e
10 changed files with 64 additions and 22 deletions
|
|
@ -135,10 +135,10 @@ async function clickhouseQuery(data: {
|
|||
city,
|
||||
...args
|
||||
} = data;
|
||||
const { insert } = clickhouse;
|
||||
const { insert, getUTCString } = clickhouse;
|
||||
const { sendMessage } = kafka;
|
||||
const eventId = uuid();
|
||||
const createdAt = new Date().toISOString();
|
||||
const createdAt = getUTCString();
|
||||
|
||||
const message = {
|
||||
...args,
|
||||
|
|
|
|||
|
|
@ -2,32 +2,43 @@ import clickhouse from 'lib/clickhouse';
|
|||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
|
||||
export async function getSessionActivity(...args: [websiteId: string, sessionId: string]) {
|
||||
export async function getSessionActivity(
|
||||
...args: [websiteId: string, sessionId: string, startDate: Date, endDate: Date]
|
||||
) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId: string, sessionId: string) {
|
||||
async function relationalQuery(
|
||||
websiteId: string,
|
||||
sessionId: string,
|
||||
startDate: Date,
|
||||
endDate: Date,
|
||||
) {
|
||||
return prisma.client.websiteEvent.findMany({
|
||||
where: {
|
||||
id: sessionId,
|
||||
websiteId,
|
||||
createdAt: { gte: startDate, lte: endDate },
|
||||
},
|
||||
take: 500,
|
||||
});
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId: string, sessionId: string) {
|
||||
const { rawQuery, getDateStringSQL } = clickhouse;
|
||||
async function clickhouseQuery(
|
||||
websiteId: string,
|
||||
sessionId: string,
|
||||
startDate: Date,
|
||||
endDate: Date,
|
||||
) {
|
||||
const { rawQuery } = clickhouse;
|
||||
|
||||
return rawQuery(
|
||||
`
|
||||
select
|
||||
session_id as id,
|
||||
website_id as websiteId,
|
||||
${getDateStringSQL('created_at')} as createdAt,
|
||||
created_at as createdAt,
|
||||
url_path as urlPath,
|
||||
url_query as urlQuery,
|
||||
referrer_domain as referrerDomain,
|
||||
|
|
@ -38,9 +49,10 @@ async function clickhouseQuery(websiteId: string, sessionId: string) {
|
|||
from website_event
|
||||
where website_id = {websiteId:UUID}
|
||||
and session_id = {sessionId:UUID}
|
||||
and created_at between {startDate:DateTime64} and {endDate:DateTime64}
|
||||
order by created_at desc
|
||||
limit 500
|
||||
`,
|
||||
{ websiteId, sessionId },
|
||||
{ websiteId, sessionId, startDate, endDate },
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -80,9 +80,9 @@ async function clickhouseQuery(data: {
|
|||
}) {
|
||||
const { websiteId, sessionId, sessionData } = data;
|
||||
|
||||
const { insert } = clickhouse;
|
||||
const { insert, getUTCString } = clickhouse;
|
||||
const { sendMessages } = kafka;
|
||||
const createdAt = new Date().toISOString();
|
||||
const createdAt = getUTCString();
|
||||
|
||||
const jsonKeys = flattenJSON(sessionData);
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue