mirror of
https://github.com/umami-software/umami.git
synced 2026-02-04 04:37:11 +01:00
split db files
This commit is contained in:
parent
46b4b98d40
commit
bb50753704
36 changed files with 496 additions and 454 deletions
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function createAccount(data) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function deleteAccount(user_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getAccountById(user_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getAccountByUsername(username) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getAccounts() {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function updateAccount(user_id, data) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function createWebsite(user_id, data) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function deleteWebsite(website_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getAllWebsites() {
|
||||
let data = await runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getUserWebsites(user_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getWebsiteById(website_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getWebsiteByShareId(share_id) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getWebsiteByUuid(website_uuid) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function resetWebsite(website_id) {
|
||||
return runQuery(prisma.$queryRaw`delete from session where website_id=${website_id}`);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { prisma, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function updateWebsite(website_id, data) {
|
||||
return runQuery(
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
getBetweenDatesClickhouse,
|
||||
getDateQuery,
|
||||
getDateQueryClickhouse,
|
||||
getFilterQuery,
|
||||
rawQuery,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
} from 'lib/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { getDateQuery, getFilterQuery, rawQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export async function getEventMetrics(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -53,16 +47,16 @@ async function clickhouseQuery(
|
|||
) {
|
||||
const params = [website_id];
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
event_name x,
|
||||
${getDateQueryClickhouse('created_at', unit, timezone)} t,
|
||||
${clickhouse.getDateQuery('created_at', unit, timezone)} t,
|
||||
count(*) y
|
||||
from event
|
||||
where website_id= $1
|
||||
and ${getBetweenDatesClickhouse('created_at', start_at, end_at)}
|
||||
${getFilterQuery('event', filters, params)}
|
||||
and ${clickhouse.getBetweenDates('created_at', start_at, end_at)}
|
||||
${clickhouse.getFilterQuery('event', filters, params)}
|
||||
group by x, t
|
||||
order by t
|
||||
`,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
rawQueryClickhouse,
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export function getEvents(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -32,7 +28,7 @@ function relationalQuery(websites, start_at) {
|
|||
}
|
||||
|
||||
function clickhouseQuery(websites, start_at) {
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
event_id,
|
||||
|
|
@ -43,7 +39,7 @@ function clickhouseQuery(websites, start_at) {
|
|||
event_name
|
||||
from event
|
||||
where website_id in (${websites.join[',']}
|
||||
and created_at >= ${getDateFormatClickhouse(start_at)})
|
||||
and created_at >= ${clickhouse.getDateFormat(start_at)})
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,8 @@
|
|||
import { CLICKHOUSE, RELATIONAL, KAFKA, URL_LENGTH } from 'lib/constants';
|
||||
import {
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import { sendKafkaMessage, getDateFormatKafka } from 'lib/db/kafka';
|
||||
import { CLICKHOUSE, KAFKA, RELATIONAL, URL_LENGTH } from 'lib/constants';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import kafka from 'lib/db/kafka';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export async function saveEvent(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -48,10 +44,10 @@ async function clickhouseQuery(website_id, { event_uuid, session_uuid, url, even
|
|||
event_name?.substr(0, 50),
|
||||
];
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
insert into umami.event (created_at, website_id, session_uuid, url, event_name)
|
||||
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4);`,
|
||||
values (${clickhouse.getDateFormat(new Date())}, $1, $2, $3, $4);`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
@ -61,10 +57,10 @@ async function kafkaQuery(website_id, { event_uuid, session_uuid, url, event_nam
|
|||
event_uuid: event_uuid,
|
||||
website_id: website_id,
|
||||
session_uuid: session_uuid,
|
||||
created_at: getDateFormatKafka(new Date()),
|
||||
created_at: kafka.getDateFormat(new Date()),
|
||||
url: url?.substr(0, URL_LENGTH),
|
||||
event_name: event_name?.substr(0, 50),
|
||||
};
|
||||
|
||||
await sendKafkaMessage(params, 'event');
|
||||
await kafka.sendKafkaMessage(params, 'event');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
parseFilters,
|
||||
rawQuery,
|
||||
getBetweenDatesClickhouse,
|
||||
} from 'lib/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { parseFilters, rawQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export async function getPageviewMetrics(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -42,7 +38,7 @@ async function relationalQuery(website_id, start_at, end_at, column, table, filt
|
|||
|
||||
async function clickhouseQuery(website_id, start_at, end_at, column, table, filters = {}) {
|
||||
const params = [website_id];
|
||||
const { pageviewQuery, sessionQuery, eventQuery, joinSession } = parseFilters(
|
||||
const { pageviewQuery, sessionQuery, eventQuery, joinSession } = clickhouse.parseFilters(
|
||||
table,
|
||||
column,
|
||||
filters,
|
||||
|
|
@ -50,13 +46,13 @@ async function clickhouseQuery(website_id, start_at, end_at, column, table, filt
|
|||
'session_uuid',
|
||||
);
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select ${column} x, count(*) y
|
||||
from ${table}
|
||||
${joinSession}
|
||||
where ${table}.website_id= $1
|
||||
and ${getBetweenDatesClickhouse(table + '.created_at', start_at, end_at)}
|
||||
and ${clickhouse.getBetweenDates(table + '.created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${joinSession && sessionQuery}
|
||||
${eventQuery}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { parseFilters, rawQuery, runAnalyticsQuery } from 'lib/db';
|
||||
import { parseFilters, rawQuery, runAnalyticsQuery } from 'lib/db/relational';
|
||||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
|
||||
export async function getPageviewParams(...args) {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
getBetweenDatesClickhouse,
|
||||
getDateQuery,
|
||||
getDateQueryClickhouse,
|
||||
getDateStringQueryClickhouse,
|
||||
parseFilters,
|
||||
rawQuery,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
} from 'lib/db';
|
||||
import { getDateQuery, parseFilters, rawQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
|
||||
export async function getPageviewStats(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -62,7 +55,7 @@ async function clickhouseQuery(
|
|||
sessionKey = 'session_uuid',
|
||||
) {
|
||||
const params = [website_id];
|
||||
const { pageviewQuery, sessionQuery, joinSession } = parseFilters(
|
||||
const { pageviewQuery, sessionQuery, joinSession } = clickhouse.parseFilters(
|
||||
'pageview',
|
||||
null,
|
||||
filters,
|
||||
|
|
@ -70,19 +63,19 @@ async function clickhouseQuery(
|
|||
sessionKey,
|
||||
);
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
${getDateStringQueryClickhouse('g.t', unit)} as t,
|
||||
${clickhouse.getDateStringQuery('g.t', unit)} as t,
|
||||
g.y as y
|
||||
from
|
||||
(select
|
||||
${getDateQueryClickhouse('created_at', unit, timezone)} t,
|
||||
${clickhouse.getDateQuery('created_at', unit, timezone)} t,
|
||||
count(${count !== '*' ? `${count}${sessionKey}` : count}) y
|
||||
from pageview
|
||||
${joinSession}
|
||||
where pageview.website_id= $1
|
||||
and ${getBetweenDatesClickhouse('pageview.created_at', start_at, end_at)}
|
||||
and ${clickhouse.getBetweenDates('pageview.created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
group by t) g
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
rawQueryClickhouse,
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
|
||||
export async function getPageviews(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -32,7 +28,7 @@ async function relationalQuery(websites, start_at) {
|
|||
}
|
||||
|
||||
async function clickhouseQuery(websites, start_at) {
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
view_id,
|
||||
|
|
@ -42,7 +38,7 @@ async function clickhouseQuery(websites, start_at) {
|
|||
url
|
||||
from pageview
|
||||
where website_id in (${websites.join[',']}
|
||||
and created_at >= ${getDateFormatClickhouse(start_at)})
|
||||
and created_at >= ${clickhouse.getDateFormat(start_at)})
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,8 @@
|
|||
import { CLICKHOUSE, RELATIONAL, KAFKA, URL_LENGTH } from 'lib/constants';
|
||||
import {
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import { sendKafkaMessage, getDateFormatKafka } from 'lib/db/kafka';
|
||||
import { CLICKHOUSE, KAFKA, RELATIONAL, URL_LENGTH } from 'lib/constants';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import kafka from 'lib/db/kafka';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function savePageView(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -37,10 +33,10 @@ async function clickhouseQuery(website_id, { session_uuid, url, referrer }) {
|
|||
referrer?.substr(0, URL_LENGTH),
|
||||
];
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
insert into umami.pageview (created_at, website_id, session_uuid, url, referrer)
|
||||
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4);`,
|
||||
values (${clickhouse.getDateFormat(new Date())}, $1, $2, $3, $4);`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
@ -49,10 +45,10 @@ async function kafkaQuery(website_id, { session_uuid, url, referrer }) {
|
|||
const params = {
|
||||
website_id: website_id,
|
||||
session_uuid: session_uuid,
|
||||
created_at: getDateFormatKafka(new Date()),
|
||||
created_at: kafka.getDateFormat(new Date()),
|
||||
url: url?.substr(0, URL_LENGTH),
|
||||
referrer: referrer?.substr(0, URL_LENGTH),
|
||||
};
|
||||
|
||||
await sendKafkaMessage(params, 'pageview');
|
||||
await kafka.sendKafkaMessage(params, 'pageview');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,8 @@
|
|||
import { CLICKHOUSE, RELATIONAL, KAFKA } from 'lib/constants';
|
||||
import {
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import { sendKafkaMessage, getDateFormatKafka } from 'lib/db/kafka';
|
||||
import { getSessionByUuid } from 'queries';
|
||||
import { CLICKHOUSE, KAFKA, RELATIONAL } from 'lib/constants';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import kafka from 'lib/db/kafka';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export async function createSession(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -47,13 +42,11 @@ async function clickhouseQuery(
|
|||
country ? country : null,
|
||||
];
|
||||
|
||||
await rawQueryClickhouse(
|
||||
await clickhouse.rawQuery(
|
||||
`insert into umami.session (created_at, session_uuid, website_id, hostname, browser, os, device, screen, language, country)
|
||||
values (${getDateFormatClickhouse(new Date())}, $1, $2, $3, $4, $5, $6, $7, $8, $9);`,
|
||||
values (${clickhouse.getDateFormat(new Date())}, $1, $2, $3, $4, $5, $6, $7, $8, $9);`,
|
||||
params,
|
||||
);
|
||||
|
||||
return getSessionByUuid(session_uuid);
|
||||
}
|
||||
|
||||
async function kafkaQuery(
|
||||
|
|
@ -63,7 +56,7 @@ async function kafkaQuery(
|
|||
const params = {
|
||||
session_uuid: session_uuid,
|
||||
website_id: website_id,
|
||||
created_at: getDateFormatKafka(new Date()),
|
||||
created_at: kafka.getDateFormat(new Date()),
|
||||
hostname: hostname,
|
||||
browser: browser,
|
||||
os: os,
|
||||
|
|
@ -73,7 +66,5 @@ async function kafkaQuery(
|
|||
country: country ? country : null,
|
||||
};
|
||||
|
||||
await sendKafkaMessage(params, 'session');
|
||||
|
||||
return getSessionByUuid(session_uuid);
|
||||
await kafka.sendKafkaMessage(params, 'session');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import { rawQueryClickhouse, findUnique, prisma, runAnalyticsQuery, runQuery } from 'lib/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
|
||||
export async function getSessionByUuid(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -21,7 +23,7 @@ async function relationalQuery(session_uuid) {
|
|||
async function clickhouseQuery(session_uuid) {
|
||||
const params = [session_uuid];
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
session_uuid,
|
||||
|
|
@ -38,5 +40,5 @@ async function clickhouseQuery(session_uuid) {
|
|||
where session_uuid = $1
|
||||
`,
|
||||
params,
|
||||
).then(data => findUnique(data));
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
getBetweenDatesClickhouse,
|
||||
parseFilters,
|
||||
rawQuery,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
} from 'lib/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import { parseFilters, rawQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getSessionMetrics(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -45,7 +41,7 @@ async function relationalQuery(website_id, start_at, end_at, field, filters = {}
|
|||
|
||||
async function clickhouseQuery(website_id, start_at, end_at, field, filters = {}) {
|
||||
const params = [website_id];
|
||||
const { pageviewQuery, sessionQuery, joinSession } = parseFilters(
|
||||
const { pageviewQuery, sessionQuery, joinSession } = clickhouse.parseFilters(
|
||||
'pageview',
|
||||
null,
|
||||
filters,
|
||||
|
|
@ -53,7 +49,7 @@ async function clickhouseQuery(website_id, start_at, end_at, field, filters = {}
|
|||
'session_uuid',
|
||||
);
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select ${field} x, count(*) y
|
||||
from session as x
|
||||
|
|
@ -62,7 +58,7 @@ async function clickhouseQuery(website_id, start_at, end_at, field, filters = {}
|
|||
from pageview
|
||||
${joinSession}
|
||||
where pageview.website_id=$1
|
||||
and ${getBetweenDatesClickhouse('pageview.created_at', start_at, end_at)}
|
||||
and ${clickhouse.getBetweenDates('pageview.created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
getDateFormatClickhouse,
|
||||
prisma,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
runQuery,
|
||||
} from 'lib/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import { prisma, runQuery } from 'lib/db/relational';
|
||||
|
||||
export async function getSessions(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -32,7 +28,7 @@ async function relationalQuery(websites, start_at) {
|
|||
}
|
||||
|
||||
async function clickhouseQuery(websites, start_at) {
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
session_id,
|
||||
|
|
@ -48,7 +44,7 @@ async function clickhouseQuery(websites, start_at) {
|
|||
country
|
||||
from session
|
||||
where website_id in (${websites.join[',']}
|
||||
and created_at >= ${getDateFormatClickhouse(start_at)})
|
||||
and created_at >= ${clickhouse.getDateFormat(start_at)})
|
||||
`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import { subMinutes } from 'date-fns';
|
||||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import { getDateFormatClickhouse, rawQuery, rawQueryClickhouse, runAnalyticsQuery } from 'lib/db';
|
||||
import { rawQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
|
||||
export async function getActiveVisitors(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -27,12 +29,12 @@ async function relationalQuery(website_id) {
|
|||
async function clickhouseQuery(website_id) {
|
||||
const params = [website_id];
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select count(distinct session_uuid) x
|
||||
from pageview
|
||||
where website_id = $1
|
||||
and created_at >= ${getDateFormatClickhouse(subMinutes(new Date(), 5))}
|
||||
and created_at >= ${clickhouse.getDateFormat(subMinutes(new Date(), 5))}
|
||||
`,
|
||||
params,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,14 +1,7 @@
|
|||
import { CLICKHOUSE, RELATIONAL } from 'lib/constants';
|
||||
import {
|
||||
getDateQuery,
|
||||
getBetweenDatesClickhouse,
|
||||
getDateQueryClickhouse,
|
||||
getTimestampInterval,
|
||||
parseFilters,
|
||||
rawQuery,
|
||||
rawQueryClickhouse,
|
||||
runAnalyticsQuery,
|
||||
} from 'lib/db';
|
||||
import { getDateQuery, getTimestampInterval, parseFilters, rawQuery } from 'lib/db/relational';
|
||||
import { runAnalyticsQuery } from 'lib/db/db';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
|
||||
export async function getWebsiteStats(...args) {
|
||||
return runAnalyticsQuery({
|
||||
|
|
@ -52,7 +45,7 @@ async function relationalQuery(website_id, start_at, end_at, filters = {}) {
|
|||
|
||||
async function clickhouseQuery(website_id, start_at, end_at, filters = {}) {
|
||||
const params = [website_id];
|
||||
const { pageviewQuery, sessionQuery, joinSession } = parseFilters(
|
||||
const { pageviewQuery, sessionQuery, joinSession } = clickhouse.parseFilters(
|
||||
'pageview',
|
||||
null,
|
||||
filters,
|
||||
|
|
@ -60,7 +53,7 @@ async function clickhouseQuery(website_id, start_at, end_at, filters = {}) {
|
|||
'session_uuid',
|
||||
);
|
||||
|
||||
return rawQueryClickhouse(
|
||||
return clickhouse.rawQuery(
|
||||
`
|
||||
select
|
||||
sum(t.c) as "pageviews",
|
||||
|
|
@ -69,14 +62,14 @@ async function clickhouseQuery(website_id, start_at, end_at, filters = {}) {
|
|||
sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
|
||||
from (
|
||||
select pageview.session_uuid,
|
||||
${getDateQueryClickhouse('pageview.created_at', 'day')} time_series,
|
||||
${clickhouse.getDateQuery('pageview.created_at', 'day')} time_series,
|
||||
count(*) c,
|
||||
min(created_at) min_time,
|
||||
max(created_at) max_time
|
||||
from pageview
|
||||
${joinSession}
|
||||
where pageview.website_id = $1
|
||||
and ${getBetweenDatesClickhouse('pageview.created_at', start_at, end_at)}
|
||||
and ${clickhouse.getBetweenDates('pageview.created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
group by pageview.session_uuid, time_series
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue