mirror of
https://github.com/umami-software/umami.git
synced 2026-02-12 00:27:11 +01:00
Added rev_id column. Updated redis calls.
This commit is contained in:
parent
a9112f39ec
commit
3485b6268b
18 changed files with 133 additions and 79 deletions
|
|
@ -1,11 +1,16 @@
|
|||
import clickhouse from 'lib/clickhouse';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getEventData(...args) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
}).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -21,7 +26,7 @@ async function relationalQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
on event.website_id = website.website_id
|
||||
join event_data
|
||||
on event.event_id = event_data.event_id
|
||||
where website.website_id='${websiteId}'
|
||||
where website.website_id ='${websiteId}'
|
||||
and event.created_at between $1 and $2
|
||||
${event_name ? `and event_name = ${event_name}` : ''}
|
||||
${
|
||||
|
|
@ -30,23 +35,21 @@ async function relationalQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
: ''
|
||||
}`,
|
||||
params,
|
||||
).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId, { startDate, endDate, event_name, columns, filters }) {
|
||||
const { rawQuery, getBetweenDates, getEventDataColumnsQuery, getEventDataFilterQuery } =
|
||||
clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
${getEventDataColumnsQuery('event_data', columns)}
|
||||
from event
|
||||
where website_id= $1
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
${event_name ? `and event_name = ${event_name}` : ''}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${
|
||||
|
|
@ -55,9 +58,5 @@ async function clickhouseQuery(websiteId, { startDate, endDate, event_name, colu
|
|||
: ''
|
||||
}`,
|
||||
params,
|
||||
).then(results => {
|
||||
return Object.keys(results[0]).map(a => {
|
||||
return { x: a, y: results[0][`${a}`] };
|
||||
});
|
||||
});
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function getEventMetrics(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -46,7 +47,8 @@ async function clickhouseQuery(
|
|||
filters = {},
|
||||
) {
|
||||
const { rawQuery, getDateQuery, getBetweenDates, getFilterQuery } = clickhouse;
|
||||
const params = [websiteId];
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
|
|
@ -55,7 +57,8 @@ async function clickhouseQuery(
|
|||
count(*) y
|
||||
from event
|
||||
where event_name != ''
|
||||
and website_id= $1
|
||||
and website_id = $1
|
||||
anbd rev_id = $2
|
||||
and ${getBetweenDates('created_at', start_at, end_at)}
|
||||
${getFilterQuery('event', filters, params)}
|
||||
group by x, t
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
|||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import { uuid } from 'lib/crypto';
|
||||
import redis from 'lib/redis';
|
||||
|
||||
export async function saveEvent(...args) {
|
||||
return runQuery({
|
||||
|
|
@ -43,11 +44,13 @@ async function clickhouseQuery(
|
|||
{ session: { country, sessionUuid, ...sessionArgs }, eventUuid, url, eventName, eventData },
|
||||
) {
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const website = await redis.get(`website:${websiteId}`);
|
||||
|
||||
const params = {
|
||||
session_id: sessionUuid,
|
||||
event_id: eventUuid,
|
||||
website_id: websiteId,
|
||||
rev_id: website?.revId || 0,
|
||||
created_at: getDateFormat(new Date()),
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
event_name: eventName?.substring(0, EVENT_NAME_LENGTH),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue