mirror of
https://github.com/umami-software/umami.git
synced 2026-02-21 13:05:36 +01:00
feat: separate mongoQuery & add mongo filter
This commit is contained in:
parent
4c57ab1388
commit
b5b689b156
15 changed files with 858 additions and 710 deletions
|
|
@ -1,5 +1,5 @@
|
|||
import clickhouse from 'lib/clickhouse';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
|
||||
import prisma from 'lib/prisma';
|
||||
import { WebsiteEventDataMetric } from 'lib/types';
|
||||
import { loadWebsite } from 'lib/query';
|
||||
|
|
@ -23,6 +23,7 @@ export async function getEventData(
|
|||
): Promise<WebsiteEventDataMetric[]> {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(...args),
|
||||
[MONGODB]: () => mongoQuery(...args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(...args),
|
||||
});
|
||||
}
|
||||
|
|
@ -47,123 +48,13 @@ async function relationalQuery(
|
|||
},
|
||||
) {
|
||||
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
|
||||
const { getDatabaseType, toUuid, rawQuery, getEventDataFilterQuery, getDateQuery, client } =
|
||||
prisma;
|
||||
const db = getDatabaseType();
|
||||
const { toUuid, rawQuery, getEventDataFilterQuery, getDateQuery } = prisma;
|
||||
const website = await loadWebsite(websiteId);
|
||||
const resetDate = new Date(website?.resetAt || website?.createdAt);
|
||||
const params: any = [websiteId, resetDate, startDate, endDate, eventName || ''];
|
||||
|
||||
if (db === 'mongodb') {
|
||||
let joinAggregation: any = { match: {} };
|
||||
let matchAggregation: any = { match: {} };
|
||||
let eventTypeProjectProperty = '';
|
||||
let urlProjectProperty = '';
|
||||
if (eventName || urlPath) {
|
||||
joinAggregation = {
|
||||
$lookup: {
|
||||
from: 'website_event',
|
||||
localField: 'website_event_id',
|
||||
foreignField: '_id',
|
||||
as: 'result',
|
||||
},
|
||||
};
|
||||
eventTypeProjectProperty = 'event_name: {$arrayElemAt: ["$result.event_name", 0]}';
|
||||
}
|
||||
if (eventName) {
|
||||
matchAggregation = {
|
||||
$match: {
|
||||
'result.event_name': eventName,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (urlPath) {
|
||||
urlProjectProperty = 'url_path: {$arrayElemAt: ["$result.url_path", 0],}';
|
||||
}
|
||||
let timeProjectProperty = '';
|
||||
if (timeSeries) {
|
||||
timeProjectProperty = `t: $dateTrunc: {date: "$created_at",unit: ${timeSeries.unit}, timezone : ${timeSeries.timezone}`;
|
||||
}
|
||||
return await client.websiteEvent.aggregateRaw({
|
||||
pipeline: [
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [
|
||||
{
|
||||
$eq: ['$website_id', websiteId],
|
||||
},
|
||||
{
|
||||
$gte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: resetDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
$gte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: startDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
$lte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: endDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
joinAggregation,
|
||||
matchAggregation,
|
||||
{
|
||||
$project: {
|
||||
eventTypeProjectProperty,
|
||||
timeProjectProperty,
|
||||
urlProjectProperty,
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
url_path: '$url_path',
|
||||
event_name: '$event_name',
|
||||
t: '$t',
|
||||
},
|
||||
x: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
url_path: '$_id.url_path',
|
||||
urlPath: '$_id.url_path',
|
||||
event_name: '$_id.event_name',
|
||||
eventName: '$_id.event_name',
|
||||
x: 1,
|
||||
t: '$_id.t',
|
||||
_id: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
} else {
|
||||
return rawQuery(
|
||||
`select
|
||||
return rawQuery(
|
||||
`select
|
||||
count(*) x
|
||||
${eventName ? `,event_name eventName` : ''}
|
||||
${urlPath ? `,url_path urlPath` : ''}
|
||||
|
|
@ -182,9 +73,8 @@ async function relationalQuery(
|
|||
${eventName ? `and eventName = $5` : ''}
|
||||
${getEventDataFilterQuery(filters, params)}
|
||||
${timeSeries ? 'group by t' : ''}`,
|
||||
params,
|
||||
);
|
||||
}
|
||||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function clickhouseQuery(
|
||||
|
|
@ -231,3 +121,137 @@ async function clickhouseQuery(
|
|||
params,
|
||||
);
|
||||
}
|
||||
|
||||
async function mongoQuery(
|
||||
websiteId: string,
|
||||
data: {
|
||||
startDate: Date;
|
||||
endDate: Date;
|
||||
timeSeries?: {
|
||||
unit: string;
|
||||
timezone: string;
|
||||
};
|
||||
eventName: string;
|
||||
urlPath?: string;
|
||||
filters: [
|
||||
{
|
||||
eventKey?: string;
|
||||
eventValue?: string | number | boolean | Date;
|
||||
},
|
||||
];
|
||||
},
|
||||
) {
|
||||
const { startDate, endDate, timeSeries, eventName, urlPath, filters } = data;
|
||||
const { client, parseMongoFilter } = prisma;
|
||||
const website = await loadWebsite(websiteId);
|
||||
const resetDate = new Date(website?.resetAt || website?.createdAt);
|
||||
const mongoFilter = parseMongoFilter(filters);
|
||||
|
||||
let joinAggregation: any = { match: {} };
|
||||
let matchAggregation: any = { match: {} };
|
||||
let eventTypeProjectProperty = '';
|
||||
let urlProjectProperty = '';
|
||||
if (eventName || urlPath) {
|
||||
joinAggregation = {
|
||||
$lookup: {
|
||||
from: 'website_event',
|
||||
localField: 'website_event_id',
|
||||
foreignField: '_id',
|
||||
as: 'result',
|
||||
},
|
||||
};
|
||||
eventTypeProjectProperty = 'event_name: {$arrayElemAt: ["$result.event_name", 0]}';
|
||||
}
|
||||
if (eventName) {
|
||||
matchAggregation = {
|
||||
$match: {
|
||||
'result.event_name': eventName,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (urlPath) {
|
||||
urlProjectProperty = 'url_path: {$arrayElemAt: ["$result.url_path", 0],}';
|
||||
}
|
||||
let timeProjectProperty = '';
|
||||
if (timeSeries) {
|
||||
timeProjectProperty = `t: $dateTrunc: {date: "$created_at",unit: ${timeSeries.unit}, timezone : ${timeSeries.timezone}`;
|
||||
}
|
||||
return await client.websiteEvent.aggregateRaw({
|
||||
pipeline: [
|
||||
mongoFilter,
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$and: [
|
||||
{
|
||||
$eq: ['$website_id', websiteId],
|
||||
},
|
||||
{
|
||||
$gte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: resetDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
$gte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: startDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
$lte: [
|
||||
'$created_at',
|
||||
{
|
||||
$dateFromString: {
|
||||
dateString: endDate.toISOString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
joinAggregation,
|
||||
matchAggregation,
|
||||
{
|
||||
$project: {
|
||||
eventTypeProjectProperty,
|
||||
timeProjectProperty,
|
||||
urlProjectProperty,
|
||||
},
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: {
|
||||
url_path: '$url_path',
|
||||
event_name: '$event_name',
|
||||
t: '$t',
|
||||
},
|
||||
x: {
|
||||
$sum: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
url_path: '$_id.url_path',
|
||||
urlPath: '$_id.url_path',
|
||||
event_name: '$_id.event_name',
|
||||
eventName: '$_id.event_name',
|
||||
x: 1,
|
||||
t: '$_id.t',
|
||||
_id: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { Prisma } from '@prisma/client';
|
||||
import { EVENT_DATA_TYPE } from 'lib/constants';
|
||||
import { uuid } from 'lib/crypto';
|
||||
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
|
||||
import { CLICKHOUSE, MONGODB, PRISMA, runQuery } from 'lib/db';
|
||||
import { flattenJSON } from 'lib/eventData';
|
||||
import kafka from 'lib/kafka';
|
||||
import prisma from 'lib/prisma';
|
||||
|
|
@ -18,6 +18,7 @@ export async function saveEventData(args: {
|
|||
}) {
|
||||
return runQuery({
|
||||
[PRISMA]: () => relationalQuery(args),
|
||||
[MONGODB]: () => relationalQuery(args),
|
||||
[CLICKHOUSE]: () => clickhouseQuery(args),
|
||||
});
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue