mirror of
https://github.com/umami-software/umami.git
synced 2026-02-10 23:57:12 +01:00
Added rev_id column. Updated redis calls.
This commit is contained in:
parent
a9112f39ec
commit
3485b6268b
18 changed files with 133 additions and 79 deletions
|
|
@ -1,11 +1,16 @@
|
|||
import clickhouse from 'lib/clickhouse';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getEventData(...args) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
}).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -21,7 +26,7 @@ async function relationalQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
on event.website_id = website.website_id
|
||||
join event_data
|
||||
on event.event_id = event_data.event_id
|
||||
where website.website_id='${websiteId}'
|
||||
where website.website_id ='${websiteId}'
|
||||
and event.created_at between $1 and $2
|
||||
${event_name ? `and event_name = ${event_name}` : ''}
|
||||
${
|
||||
|
|
@ -30,23 +35,21 @@ async function relationalQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
: ''
|
||||
}`,
|
||||
params,
|
||||
).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId, { startDate, endDate, event_name, columns, filters }) {
|
||||
const { rawQuery, getBetweenDates, getEventDataColumnsQuery, getEventDataFilterQuery } =
|
||||
clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
${getEventDataColumnsQuery('event_data', columns)}
|
||||
from event
|
||||
where website_id= $1
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
${event_name ? `and event_name = ${event_name}` : ''}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${
|
||||
|
|
@ -55,9 +58,5 @@ async function clickhouseQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
: ''
|
||||
}`,
|
||||
params,
|
||||
).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getEventMetrics(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -46,7 +47,8 @@ async function clickhouseQuery(
|
|||
filters = {},
|
||||
) {
|
||||
const { rawQuery, getDateQuery, getBetweenDates, getFilterQuery } = clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
|
|
@ -55,7 +57,8 @@ async function clickhouseQuery(
|
|||
count(*) y
|
||||
from event
|
||||
where event_name != ''
|
||||
and website_id= $1
|
||||
and website_id = $1
|
||||
anbd rev_id = $2
|
||||
and ${getBetweenDates('created_at', start_at, end_at)}
|
||||
${getFilterQuery('event', filters, params)}
|
||||
group by x, t
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
|||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import { uuid } from 'lib/crypto';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function saveEvent(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -43,11 +44,13 @@ async function clickhouseQuery(
|
|||
{ session: { country, sessionUuid, ...sessionArgs }, eventUuid, url, eventName, eventData },
|
||||
) {
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
|
||||
const params = {
|
||||
session_id: sessionUuid,
|
||||
event_id: eventUuid,
|
||||
website_id: websiteId,
|
||||
rev_id: website?.revId || 0,
|
||||
created_at: getDateFormat(new Date()),
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
event_name: eventName?.substring(0, EVENT_NAME_LENGTH),
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getPageviewMetrics(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -37,13 +38,15 @@ async function relationalQuery(websiteId, { startDate, endDate, column, table, f
|
|||
|
||||
async function clickhouseQuery(websiteId, { startDate, endDate, column, filters = {} }) {
|
||||
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery, eventQuery } = parseFilters(column, filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${column} x, count(*) y
|
||||
from event
|
||||
where website_id= $1
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
${column !== 'event_name' ? `and event_name = ''` : `and event_name != ''`}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${pageviewQuery}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getPageviewStats(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -51,7 +52,8 @@ async function clickhouseQuery(
|
|||
{ start_at, end_at, timezone = 'UTC', unit = 'day', count = '*', filters = {} },
|
||||
) {
|
||||
const { parseFilters, rawQuery, getDateStringQuery, getDateQuery, getBetweenDates } = clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
|
||||
|
||||
return rawQuery(
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { URL_LENGTH } from 'lib/constants';
|
|||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function savePageView(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -29,6 +30,7 @@ async function clickhouseQuery(
|
|||
websiteId,
|
||||
{ session: { country, id: sessionId, ...sessionArgs }, url, referrer },
|
||||
) {
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const params = {
|
||||
session_id: sessionId,
|
||||
|
|
@ -36,6 +38,7 @@ async function clickhouseQuery(
|
|||
created_at: getDateFormat(new Date()),
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
referrer: referrer?.substring(0, URL_LENGTH),
|
||||
rev_id: website?.revId || 0,
|
||||
...sessionArgs,
|
||||
country: country ? country : null,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -7,34 +7,32 @@ export async function createSession(...args) {
|
|||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
}).then(async data => {
|
||||
if (redis.enabled && data) {
|
||||
await redis.set(`session:${data.id}`, data);
|
||||
}
|
||||
|
||||
return data;
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId, data) {
|
||||
return prisma.client.session
|
||||
.create({
|
||||
data: {
|
||||
websiteId,
|
||||
...data,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
hostname: true,
|
||||
browser: true,
|
||||
os: true,
|
||||
screen: true,
|
||||
language: true,
|
||||
country: true,
|
||||
device: true,
|
||||
},
|
||||
})
|
||||
.then(async res => {
|
||||
if (redis.enabled && res) {
|
||||
await redis.set(`session:${res.id}`, 1);
|
||||
}
|
||||
|
||||
return res;
|
||||
});
|
||||
return prisma.client.session.create({
|
||||
data: {
|
||||
websiteId,
|
||||
...data,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
hostname: true,
|
||||
browser: true,
|
||||
os: true,
|
||||
screen: true,
|
||||
language: true,
|
||||
country: true,
|
||||
device: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
|
|
@ -42,10 +40,12 @@ async function clickhouseQuery(
|
|||
{ sessionId, hostname, browser, os, screen, language, country, device },
|
||||
) {
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
|
||||
const params = {
|
||||
const data = {
|
||||
sessionId,
|
||||
website_id: websiteId,
|
||||
rev_id: website?.revId || 0,
|
||||
created_at: getDateFormat(new Date()),
|
||||
hostname,
|
||||
browser,
|
||||
|
|
@ -56,9 +56,7 @@ async function clickhouseQuery(
|
|||
country: country ? country : null,
|
||||
};
|
||||
|
||||
await sendMessage(params, 'event');
|
||||
await sendMessage(data, 'event');
|
||||
|
||||
if (redis.enabled) {
|
||||
await redis.set(`session:${sessionId}`, 1);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,26 +7,24 @@ export async function getSession(...args) {
|
|||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
}).then(async data => {
|
||||
if (redis.enabled && data) {
|
||||
await redis.set(`session:${data.id}`, data);
|
||||
}
|
||||
|
||||
return data;
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(where) {
|
||||
return prisma.client.session
|
||||
.findUnique({
|
||||
where,
|
||||
})
|
||||
.then(async res => {
|
||||
if (redis.enabled && res) {
|
||||
await redis.set(`session:${res.sessionUuid}`, 1);
|
||||
}
|
||||
|
||||
return res;
|
||||
});
|
||||
return prisma.client.session.findUnique({
|
||||
where,
|
||||
});
|
||||
}
|
||||
|
||||
async function clickhouseQuery(sessionUuid) {
|
||||
async function clickhouseQuery(sessionId) {
|
||||
const { rawQuery, findFirst } = clickhouse;
|
||||
const params = [sessionUuid];
|
||||
const params = [sessionId];
|
||||
|
||||
return rawQuery(
|
||||
`select distinct
|
||||
|
|
@ -43,13 +41,5 @@ async function clickhouseQuery(sessionUuid) {
|
|||
from event
|
||||
where session_id = $1`,
|
||||
params,
|
||||
)
|
||||
.then(result => findFirst(result))
|
||||
.then(async res => {
|
||||
if (redis.enabled && res) {
|
||||
await redis.set(`session:${res.id}`, 1);
|
||||
}
|
||||
|
||||
return res;
|
||||
});
|
||||
).then(result => findFirst(result));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getSessionMetrics(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -36,13 +37,15 @@ async function relationalQuery(websiteId, { startDate, endDate, field, filters =
|
|||
|
||||
async function clickhouseQuery(websiteId, { startDate, endDate, field, filters = {} }) {
|
||||
const { parseFilters, getBetweenDates, rawQuery } = clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${field} x, count(*) y
|
||||
from event as x
|
||||
where website_id=$1
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
and event_name = ''
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${pageviewQuery}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue