mirror of
https://github.com/umami-software/umami.git
synced 2026-02-12 16:45:35 +01:00
Dev (#1702)
* Initial Typescript models. * Re-add realtime data * get distinct sessions for session metrics * Add queries for new schema. * Fix Typo. * Add some api/team endpoints. * Fix destructure error. * Fix getWebsites call. * Ignore typescript build errors. * Fix enum issue. * add clickhouse route to deleteWebsite * Fix Website auth. * Updated lint-staged config. * Add permission checks. * Add user role api. * Fix error when updating website. * Fix isAdmin check. Fix Schema. * Initial conversion to react-basics. * Remove user/team transfer from website update. * delete website in relational query * Fix login secure token creation. * Add event type to event. * Allow user to be added to team with role. * Updated login form. * Add Role to TeamUser. * Add database migration. * Refactored permissions check. Updated redis lib. * Feat/um 114 roles and permissions (#1683) * Auth checkpoint. * Merge branch 'dev' into feat/um-114-roles-and-permissions * Add 02 migration. * Added lib/types. * Updated schema. * Updated roles and permissions logic. * Implement react-basics styles. Fix queries. * Update website details layout. * Add 01 migration. * Fix admin create. * Update react-basics. Co-authored-by: Francis Cao <franciscao@gmail.com> Co-authored-by: Mike Cao <mike@mikecao.com> Co-authored-by: Mike Cao <moocao@gmail.com>
This commit is contained in:
parent
94848cc41b
commit
8732d056dd
165 changed files with 3370 additions and 6268 deletions
|
|
@ -3,14 +3,14 @@ import prisma from 'lib/prisma';
|
|||
import clickhouse from 'lib/clickhouse';
|
||||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
|
||||
export async function getActiveVisitors(...args) {
|
||||
export async function getActiveVisitors(...args: [websiteId: string]) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId) {
|
||||
async function relationalQuery(websiteId: string) {
|
||||
const date = subMinutes(new Date(), 5);
|
||||
const params = [date];
|
||||
|
||||
|
|
@ -25,7 +25,7 @@ async function relationalQuery(websiteId) {
|
|||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId) {
|
||||
async function clickhouseQuery(websiteId: string) {
|
||||
const { rawQuery, getDateFormat } = clickhouse;
|
||||
const params = [websiteId];
|
||||
|
||||
|
|
@ -3,22 +3,23 @@ import clickhouse from 'lib/clickhouse';
|
|||
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
|
||||
import cache from 'lib/cache';
|
||||
|
||||
export async function getWebsiteStats(...args) {
|
||||
export async function getWebsiteStats(
|
||||
...args: [websiteId: string, data: { startDate: Date; endDate: Date; filters: object }]
|
||||
) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
||||
async function relationalQuery(websiteId, { start_at, end_at, filters = {} }) {
|
||||
async function relationalQuery(
|
||||
websiteId: string,
|
||||
data: { startDate: Date; endDate: Date; filters: object },
|
||||
) {
|
||||
const { startDate, endDate, filters = {} } = data;
|
||||
const { getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
|
||||
const params = [start_at, end_at];
|
||||
const { pageviewQuery, sessionQuery, joinSession } = parseFilters(
|
||||
'pageview',
|
||||
null,
|
||||
filters,
|
||||
params,
|
||||
);
|
||||
const params = [startDate, endDate];
|
||||
const { filterQuery, joinSession } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select sum(t.c) as "pageviews",
|
||||
|
|
@ -36,19 +37,22 @@ async function relationalQuery(websiteId, { start_at, end_at, filters = {} }) {
|
|||
${joinSession}
|
||||
where website.website_id='${websiteId}'
|
||||
and pageview.created_at between $1 and $2
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
${filterQuery}
|
||||
group by 1, 2
|
||||
) t`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(websiteId, { start_at, end_at, filters = {} }) {
|
||||
async function clickhouseQuery(
|
||||
websiteId: string,
|
||||
data: { startDate: Date; endDate: Date; filters: object },
|
||||
) {
|
||||
const { startDate, endDate, filters = {} } = data;
|
||||
const { rawQuery, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
|
||||
const website = await cache.fetchWebsite(websiteId);
|
||||
const params = [websiteId, website?.revId || 0];
|
||||
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
|
||||
const { filterQuery } = parseFilters(filters, params);
|
||||
|
||||
return rawQuery(
|
||||
`select
|
||||
|
|
@ -66,9 +70,8 @@ async function clickhouseQuery(websiteId, { start_at, end_at, filters = {} }) {
|
|||
where event_name = ''
|
||||
and website_id = $1
|
||||
and rev_id = $2
|
||||
and ${getBetweenDates('created_at', start_at, end_at)}
|
||||
${pageviewQuery}
|
||||
${sessionQuery}
|
||||
and ${getBetweenDates('created_at', startDate, endDate)}
|
||||
${filterQuery}
|
||||
group by session_id, time_series
|
||||
) t;`,
|
||||
params,
|
||||
Loading…
Add table
Add a link
Reference in a new issue