mirror of
https://github.com/umami-software/umami.git
synced 2026-02-13 17:15:37 +01:00
Dev (#1702)
* Initial Typescript models. * Re-add realtime data * get distinct sessions for session metrics * Add queries for new schema. * Fix Typo. * Add some api/team endpoints. * Fix destructure error. * Fix getWebsites call. * Ignore typescript build errors. * Fix enum issue. * add clickhouse route to deleteWebsite * Fix Website auth. * Updated lint-staged config. * Add permission checks. * Add user role api. * Fix error when updating website. * Fix isAdmin check. Fix Schema. * Initial conversion to react-basics. * Remove user/team transfer from website update. * delete website in relational query * Fix login secure token creation. * Add event type to event. * Allow user to be added to team with role. * Updated login form. * Add Role to TeamUser. * Add database migration. * Refactored permissions check. Updated redis lib. * Feat/um 114 roles and permissions (#1683) * Auth checkpoint. * Merge branch 'dev' into feat/um-114-roles-and-permissions * Add 02 migration. * Added lib/types. * Updated schema. * Updated roles and permissions logic. * Implement react-basics styles. Fix queries. * Update website details layout. * Add 01 migration. * Fix admin create. * Update react-basics. Co-authored-by: Francis Cao <franciscao@gmail.com> Co-authored-by: Mike Cao <mike@mikecao.com> Co-authored-by: Mike Cao <moocao@gmail.com>
This commit is contained in:
parent
94848cc41b
commit
8732d056dd
165 changed files with 3370 additions and 6268 deletions
|
|
@ -1,59 +0,0 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import cache from 'lib/cache';
|
||||
|
||||
export async function getPageviewMetrics(...args) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId, { startDate, endDate, column, table, filters = {} }) {
|
||||
const { rawQuery, parseFilters } = prisma;
|
||||
const params = [startDate, endDate];
|
||||
const { pageviewQuery, sessionQuery, eventQuery, joinSession } = parseFilters(
|
||||
table,
|
||||
column,
|
||||
filters,
|
||||
params,
|
||||
);
|
||||
|
||||
return rawQuery(
|
||||
`select ${column} x, count(*) y
|
||||
from ${table}
|
||||
${` join website on ${table}.website_id = website.website_id`}
|
||||
${joinSession}
|
||||
where website.website_id='${websiteId}'
|
||||
and ${table}.created_at between $1 and $2
|
||||
${pageviewQuery}
|
||||
${joinSession && sessionQuery}
|
||||
${eventQuery}
|
||||
group by 1
|
||||
order by 2 desc`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId, { startDate, endDate, column, filters = {} }) {
|
||||
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery, eventQuery } = parseFilters(column, filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${column} x, count(*) y
|
||||
from event
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
${column !== 'event_name' ? `and event_name = ''` : `and event_name != ''`}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
${eventQuery}
|
||||
group by x
|
||||
order by y desc`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
81
queries/analytics/pageview/getPageviewMetrics.ts
Normal file
81
queries/analytics/pageview/getPageviewMetrics.ts
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import cache from 'lib/cache';
|
||||
import { Prisma } from '@prisma/client';
|
||||
import { EVENT_TYPE } from 'lib/constants';
|
||||
|
||||
export async function getPageviewMetrics(
|
||||
...args: [
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
table: string;
|
||||
filters: object;
|
||||
},
|
||||
]
|
||||
) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
filters: object;
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, column, filters = {} } = data;
|
||||
const { rawQuery, parseFilters } = prisma;
|
||||
const params = [startDate, endDate];
|
||||
const { filterQuery, joinSession } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${column} x, count(*) y
|
||||
from website_event
|
||||
${joinSession}
|
||||
where website_id='${websiteId}'
|
||||
and website_event.created_at between $1 and $2
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
${filterQuery}
|
||||
group by 1
|
||||
order by 2 desc`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
column: Prisma.WebsiteEventScalarFieldEnum | Prisma.SessionScalarFieldEnum;
|
||||
filters: object;
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, column, filters = {} } = data;
|
||||
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0, EVENT_TYPE.pageView];
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${column} x, count(*) y
|
||||
from event
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
and event_type = $3
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by x
|
||||
order by y desc`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
|
||||
export async function getPageviewParams(...args) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId, start_at, end_at, column, table, filters = {}) {
|
||||
const { parseFilters, rawQuery } = prisma;
|
||||
const params = [start_at, end_at];
|
||||
const { pageviewQuery, sessionQuery, eventQuery, joinSession } = parseFilters(
|
||||
table,
|
||||
column,
|
||||
filters,
|
||||
params,
|
||||
);
|
||||
|
||||
return rawQuery(
|
||||
`select url x,
|
||||
count(*) y
|
||||
from ${table}
|
||||
${` join website on ${table}.website_id = website.website_id`}
|
||||
${joinSession}
|
||||
where website.website_id='${websiteId}'
|
||||
and ${table}.created_at between $1 and $2
|
||||
and ${table}.url like '%?%'
|
||||
${pageviewQuery}
|
||||
${joinSession && sessionQuery}
|
||||
${eventQuery}
|
||||
group by 1
|
||||
order by 2 desc`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
function clickhouseQuery() {
|
||||
return Promise.reject(new Error('Not implemented.'));
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import prisma from 'lib/prisma';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import cache from 'lib/cache';
|
||||
|
||||
export async function getPageviewStats(...args) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(
|
||||
websiteId,
|
||||
{
|
||||
start_at,
|
||||
end_at,
|
||||
timezone = 'utc',
|
||||
unit = 'day',
|
||||
count = '*',
|
||||
filters = {},
|
||||
sessionKey = 'session_id',
|
||||
},
|
||||
) {
|
||||
const { getDateQuery, parseFilters, rawQuery } = prisma;
|
||||
const params = [start_at, end_at];
|
||||
const { pageviewQuery, sessionQuery, joinSession } = parseFilters(
|
||||
'pageview',
|
||||
null,
|
||||
filters,
|
||||
params,
|
||||
);
|
||||
|
||||
return rawQuery(
|
||||
`select ${getDateQuery('pageview.created_at', unit, timezone)} t,
|
||||
count(${count !== '*' ? `${count}${sessionKey}` : count}) y
|
||||
from pageview
|
||||
join website
|
||||
on pageview.website_id = website.website_id
|
||||
${joinSession}
|
||||
where website.website_id='${websiteId}'
|
||||
and pageview.created_at between $1 and $2
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
group by 1`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
websiteId,
|
||||
{ start_at, end_at, timezone = 'UTC', unit = 'day', count = '*', filters = {} },
|
||||
) {
|
||||
const { parseFilters, rawQuery, getDateStringQuery, getDateQuery, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
${getDateStringQuery('g.t', unit)} as t,
|
||||
g.y as y
|
||||
from
|
||||
(select
|
||||
${getDateQuery('created_at', unit, timezone)} t,
|
||||
count(${count !== '*' ? 'distinct session_id' : count}) y
|
||||
from event
|
||||
where event_name = ''
|
||||
and website_id = $1
|
||||
and rev_id = $2
|
||||
and ${getBetweenDates('created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
group by t) g
|
||||
order by t`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
102
queries/analytics/pageview/getPageviewStats.ts
Normal file
102
queries/analytics/pageview/getPageviewStats.ts
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import cache from 'lib/cache';
|
||||
import clickhouse from 'lib/clickhouse';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
import { EVENT_TYPE } from 'lib/constants';
|
||||
|
||||
export async function getPageviewStats(
|
||||
...args: [
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timezone?: string;
|
||||
unit?: string;
|
||||
count?: string;
|
||||
filters: object;
|
||||
sessionKey?: string;
|
||||
},
|
||||
]
|
||||
) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timezone?: string;
|
||||
unit?: string;
|
||||
count?: string;
|
||||
filters: object;
|
||||
sessionKey?: string;
|
||||
},
|
||||
) {
|
||||
const {
|
||||
startDate,
|
||||
endDate,
|
||||
timezone = 'utc',
|
||||
unit = 'day',
|
||||
count = '*',
|
||||
filters = {},
|
||||
sessionKey = 'session_id',
|
||||
} = data;
|
||||
const { getDateQuery, parseFilters, rawQuery } = prisma;
|
||||
const params = [startDate, endDate];
|
||||
const { filterQuery, joinSession } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select ${getDateQuery('website_event.created_at', unit, timezone)} t,
|
||||
count(${count !== '*' ? `${count}${sessionKey}` : count}) y
|
||||
from website_event
|
||||
${joinSession}
|
||||
where website.website_id='${websiteId}'
|
||||
and pageview.created_at between $1 and $2
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
${filterQuery}
|
||||
group by 1`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timezone?: string;
|
||||
unit?: string;
|
||||
count?: string;
|
||||
filters: object;
|
||||
sessionKey?: string;
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, timezone = 'UTC', unit = 'day', count = '*', filters = {} } = data;
|
||||
const { parseFilters, rawQuery, getDateStringQuery, getDateQuery, getBetweenDates } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
${getDateStringQuery('g.t', unit)} as t,
|
||||
g.y as y
|
||||
from
|
||||
(select
|
||||
${getDateQuery('created_at', unit, timezone)} t,
|
||||
count(${count !== '*' ? 'distinct session_id' : count}) y
|
||||
from event
|
||||
where website_id = $1
|
||||
and rev_id = $2
|
||||
and event_type = ${EVENT_TYPE.pageView}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by t) g
|
||||
order by t`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
|
@ -1,26 +1,45 @@
|
|||
import { URL_LENGTH } from 'lib/constants';
|
||||
import { URL_LENGTH, EVENT_TYPE } from 'lib/constants';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
import cache from 'lib/cache';
|
||||
import { uuid } from 'lib/crypto';
|
||||
|
||||
export async function savePageView(...args) {
|
||||
export async function savePageView(args: {
|
||||
id: string;
|
||||
websiteId: string;
|
||||
url: string;
|
||||
referrer?: string;
|
||||
hostname?: string;
|
||||
browser?: string;
|
||||
os?: string;
|
||||
device?: string;
|
||||
screen?: string;
|
||||
language?: string;
|
||||
country?: string;
|
||||
}) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
[PRISMA]: () => relationalQuery(args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(data) {
|
||||
const { websiteId, sessionId, url, referrer } = data;
|
||||
return prisma.client.pageview.create({
|
||||
async function relationalQuery(data: {
|
||||
id: string;
|
||||
websiteId: string;
|
||||
url: string;
|
||||
referrer?: string;
|
||||
}) {
|
||||
const { websiteId, id: sessionId, url, referrer } = data;
|
||||
|
||||
return prisma.client.websiteEvent.create({
|
||||
data: {
|
||||
id: uuid(),
|
||||
websiteId,
|
||||
sessionId,
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
referrer: referrer?.substring(0, URL_LENGTH),
|
||||
eventType: EVENT_TYPE.pageView,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -30,7 +49,7 @@ async function clickhouseQuery(data) {
|
|||
const website = await cache.fetchWebsite(websiteId);
|
||||
const { getDateFormat, sendMessage } = kafka;
|
||||
|
||||
const msg = {
|
||||
const message = {
|
||||
session_id: sessionId,
|
||||
website_id: websiteId,
|
||||
url: url?.substring(0, URL_LENGTH),
|
||||
|
|
@ -38,10 +57,11 @@ async function clickhouseQuery(data) {
|
|||
rev_id: website?.revId || 0,
|
||||
created_at: getDateFormat(new Date()),
|
||||
country: country ? country : null,
|
||||
event_type: EVENT_TYPE.pageView,
|
||||
...args,
|
||||
};
|
||||
|
||||
await sendMessage(msg, 'event');
|
||||
await sendMessage(message, 'event');
|
||||
|
||||
return data;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue