mirror of
https://github.com/umami-software/umami.git
synced 2026-02-04 04:37:11 +01:00
Database refactoring.
This commit is contained in:
parent
bb184dc2cc
commit
467c7f289f
37 changed files with 566 additions and 591 deletions
|
|
@ -1,11 +1,10 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { getDateQuery, getFilterQuery, rawQuery } from 'lib/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
|
||||
export async function getEventMetrics(...args) {
|
||||
return runAnalyticsQuery({
|
||||
[RELATIONAL]: () => relationalQuery(...args),
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
|
@ -18,11 +17,11 @@ async function relationalQuery(
|
|||
unit = 'day',
|
||||
filters = {},
|
||||
) {
|
||||
const { rawQuery, getDateQuery, getFilterQuery } = prisma;
|
||||
const params = [website_id, start_at, end_at];
|
||||
|
||||
return rawQuery(
|
||||
`
|
||||
select
|
||||
`select
|
||||
event_name x,
|
||||
${getDateQuery('created_at', unit, timezone)} t,
|
||||
count(*) y
|
||||
|
|
@ -31,8 +30,7 @@ async function relationalQuery(
|
|||
and created_at between $2 and $3
|
||||
${getFilterQuery('event', filters, params)}
|
||||
group by 1, 2
|
||||
order by 2
|
||||
`,
|
||||
order by 2`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
@ -45,21 +43,20 @@ async function clickhouseQuery(
|
|||
unit = 'day',
|
||||
filters = {},
|
||||
) {
|
||||
const { rawQuery, getDateQuery, getBetweenDates } = prisma;
|
||||
const params = [website_id];
|
||||
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
return rawQuery(
|
||||
`select
|
||||
event_name x,
|
||||
${clickhouse.getDateQuery('created_at', unit, timezone)} t,
|
||||
${getDateQuery('created_at', unit, timezone)} t,
|
||||
count(*) y
|
||||
from event
|
||||
where website_id= $1
|
||||
and ${clickhouse.getBetweenDates('created_at', start_at, end_at)}
|
||||
and ${getBetweenDates('created_at', start_at, end_at)}
|
||||
${clickhouse.getFilterQuery('event', filters, params)}
|
||||
group by x, t
|
||||
order by t
|
||||
`,
|
||||
order by t`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,36 +1,34 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import { prisma, runQuery } from 'lib/relational';
|
||||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
|
||||
export function getEvents(...args) {
|
||||
return runAnalyticsQuery({
|
||||
[RELATIONAL]: () => relationalQuery(...args),
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
function relationalQuery(websites, start_at) {
|
||||
return runQuery(
|
||||
prisma.event.findMany({
|
||||
where: {
|
||||
website: {
|
||||
website_id: {
|
||||
in: websites,
|
||||
},
|
||||
},
|
||||
created_at: {
|
||||
gte: start_at,
|
||||
return prisma.client.event.findMany({
|
||||
where: {
|
||||
website: {
|
||||
website_id: {
|
||||
in: websites,
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
created_at: {
|
||||
gte: start_at,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function clickhouseQuery(websites, start_at) {
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
const { rawQuery, getDateFormat } = clickhouse;
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
event_id,
|
||||
website_id,
|
||||
session_id,
|
||||
|
|
@ -39,7 +37,6 @@ function clickhouseQuery(websites, start_at) {
|
|||
event_name
|
||||
from event
|
||||
where website_id in (${websites.join[',']}
|
||||
and created_at >= ${clickhouse.getDateFormat(start_at)})
|
||||
`,
|
||||
and created_at >= ${getDateFormat(start_at)})`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import { CLICKHOUSE, KAFKA, RELATIONAL, URL_LENGTH } from 'lib/constants';
|
||||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import kafka from 'lib/kafka';
|
||||
import { prisma, runQuery } from 'lib/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db';
|
||||
import { runQuery, CLICKHOUSE, KAFKA, PRISMA } from 'lib/db';
|
||||
import { URL_LENGTH, EVENT_NAME_LENGTH } from 'lib/constants';
|
||||
|
||||
export async function saveEvent(...args) {
|
||||
return runAnalyticsQuery({
|
||||
[RELATIONAL]: () => relationalQuery(...args),
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
[KAFKA]: () => kafkaQuery(...args),
|
||||
});
|
||||
|
|
@ -16,8 +16,8 @@ async function relationalQuery(website_id, { session_id, url, event_name, event_
|
|||
const data = {
|
||||
website_id,
|
||||
session_id,
|
||||
url: url?.substr(0, URL_LENGTH),
|
||||
event_name: event_name?.substr(0, 50),
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
event_name: event_name?.substring(0, EVENT_NAME_LENGTH),
|
||||
};
|
||||
|
||||
if (event_data) {
|
||||
|
|
@ -28,39 +28,38 @@ async function relationalQuery(website_id, { session_id, url, event_name, event_
|
|||
};
|
||||
}
|
||||
|
||||
return runQuery(
|
||||
prisma.event.create({
|
||||
data,
|
||||
}),
|
||||
);
|
||||
return prisma.client.event.create({
|
||||
data,
|
||||
});
|
||||
}
|
||||
|
||||
async function clickhouseQuery(website_id, { event_uuid, session_uuid, url, event_name }) {
|
||||
const { rawQuery, getDateFormat } = clickhouse;
|
||||
const params = [
|
||||
website_id,
|
||||
event_uuid,
|
||||
session_uuid,
|
||||
url?.substr(0, URL_LENGTH),
|
||||
event_name?.substr(0, 50),
|
||||
url?.substring(0, URL_LENGTH),
|
||||
event_name?.substring(0, EVENT_NAME_LENGTH),
|
||||
];
|
||||
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
insert into umami.event (created_at, website_id, session_uuid, url, event_name)
|
||||
values (${clickhouse.getDateFormat(new Date())}, $1, $2, $3, $4);`,
|
||||
return rawQuery(
|
||||
`insert into umami.event (created_at, website_id, session_uuid, url, event_name)
|
||||
values (${getDateFormat(new Date())}, $1, $2, $3, $4);`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function kafkaQuery(website_id, { event_uuid, session_uuid, url, event_name }) {
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
const params = {
|
||||
event_uuid: event_uuid,
|
||||
website_id: website_id,
|
||||
session_uuid: session_uuid,
|
||||
created_at: kafka.getDateFormat(new Date()),
|
||||
url: url?.substr(0, URL_LENGTH),
|
||||
event_name: event_name?.substr(0, 50),
|
||||
created_at: getDateFormat(new Date()),
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
event_name: event_name?.substring(0, EVENT_NAME_LENGTH),
|
||||
};
|
||||
|
||||
await kafka.sendMessage(params, 'event');
|
||||
await sendMessage(params, 'event');
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue