diff --git a/components/pages/event-data/EventDataTable.js b/components/pages/event-data/EventDataTable.js
index 4f938f8a..8260ac35 100644
--- a/components/pages/event-data/EventDataTable.js
+++ b/components/pages/event-data/EventDataTable.js
@@ -7,14 +7,6 @@ export function EventDataTable({ data = [] }) {
const { formatMessage, labels } = useMessages();
const { resolveUrl } = usePageQuery();
- function linkToView(row, cell) {
- return (
-
- {cell}
-
- );
- }
-
if (data.length === 0) {
return ;
}
@@ -22,10 +14,14 @@ export function EventDataTable({ data = [] }) {
return (
- {row => linkToView(row, row.event)}
+ {row => (
+
+ {row.event}
+
+ )}
- {row => linkToView(row, row.field)}
+ {row => row.field}
{({ total }) => total.toLocaleString()}
diff --git a/components/pages/event-data/EventDataValueTable.js b/components/pages/event-data/EventDataValueTable.js
index fedda654..2637053e 100644
--- a/components/pages/event-data/EventDataValueTable.js
+++ b/components/pages/event-data/EventDataValueTable.js
@@ -5,14 +5,14 @@ import Icons from 'components/icons';
import PageHeader from 'components/layout/PageHeader';
import Empty from 'components/common/Empty';
-export function EventDataTable({ data = [], field, event }) {
+export function EventDataValueTable({ data = [], event }) {
const { formatMessage, labels } = useMessages();
const { resolveUrl } = usePageQuery();
const Title = () => {
return (
<>
-
+
-
- {event} - {field}
-
+ {event}
>
);
};
@@ -33,6 +31,7 @@ export function EventDataTable({ data = [], field, event }) {
{data.length <= 0 && }
{data.length > 0 && (
+
{({ total }) => total.toLocaleString()}
@@ -43,4 +42,4 @@ export function EventDataTable({ data = [], field, event }) {
);
}
-export default EventDataTable;
+export default EventDataValueTable;
diff --git a/components/pages/websites/WebsiteEventData.js b/components/pages/websites/WebsiteEventData.js
index d4ecc27c..7f9a6829 100644
--- a/components/pages/websites/WebsiteEventData.js
+++ b/components/pages/websites/WebsiteEventData.js
@@ -5,20 +5,18 @@ import { EventDataMetricsBar } from 'components/pages/event-data/EventDataMetric
import { useDateRange, useApi, usePageQuery } from 'hooks';
import styles from './WebsiteEventData.module.css';
-function useFields(websiteId, field, event) {
+function useData(websiteId, event) {
const [dateRange] = useDateRange(websiteId);
const { startDate, endDate } = dateRange;
const { get, useQuery } = useApi();
const { data, error, isLoading } = useQuery(
- ['event-data:fields', { websiteId, startDate, endDate, field }],
+ ['event-data:events', { websiteId, startDate, endDate, event }],
() =>
- get('/event-data/fields', {
+ get('/event-data/events', {
websiteId,
startAt: +startDate,
endAt: +endDate,
- field,
event,
- withEventNames: true,
}),
{ enabled: !!(websiteId && startDate && endDate) },
);
@@ -28,15 +26,15 @@ function useFields(websiteId, field, event) {
export default function WebsiteEventData({ websiteId }) {
const {
- query: { view, event },
+ query: { event },
} = usePageQuery();
- const { data } = useFields(websiteId, view, event);
+ const { data } = useData(websiteId, event);
return (
- {!view && }
- {view && }
+ {!event && }
+ {event && }
);
}
diff --git a/db/clickhouse/schema.sql b/db/clickhouse/schema.sql
index dceb0d1e..94b560c3 100644
--- a/db/clickhouse/schema.sql
+++ b/db/clickhouse/schema.sql
@@ -6,7 +6,7 @@ CREATE TABLE umami.website_event
website_id UUID,
session_id UUID,
event_id UUID,
- --session
+ --sessions
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@@ -17,14 +17,14 @@ CREATE TABLE umami.website_event
subdivision1 LowCardinality(String),
subdivision2 LowCardinality(String),
city String,
- --pageview
+ --pageviews
url_path String,
url_query String,
referrer_path String,
referrer_query String,
referrer_domain String,
page_title String,
- --event
+ --events
event_type UInt32,
event_name String,
created_at DateTime('UTC'),
@@ -38,7 +38,7 @@ CREATE TABLE umami.website_event_queue (
website_id UUID,
session_id UUID,
event_id UUID,
- --session
+ --sessions
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@@ -49,14 +49,14 @@ CREATE TABLE umami.website_event_queue (
subdivision1 LowCardinality(String),
subdivision2 LowCardinality(String),
city String,
- --pageview
+ --pageviews
url_path String,
url_query String,
referrer_path String,
referrer_query String,
referrer_domain String,
page_title String,
- --event
+ --events
event_type UInt32,
event_name String,
created_at DateTime('UTC'),
@@ -66,7 +66,7 @@ CREATE TABLE umami.website_event_queue (
)
ENGINE = Kafka
SETTINGS kafka_broker_list = 'domain:9092,domain:9093,domain:9094', -- input broker list
- kafka_topic_list = 'event',
+ kafka_topic_list = 'events',
kafka_group_name = 'event_consumer_group',
kafka_format = 'JSONEachRow',
kafka_max_block_size = 1048576,
diff --git a/lib/clickhouse.ts b/lib/clickhouse.ts
index eb73d83c..166c0daa 100644
--- a/lib/clickhouse.ts
+++ b/lib/clickhouse.ts
@@ -62,10 +62,6 @@ function getDateFormat(date) {
return `'${dateFormat(date, 'UTC:yyyy-mm-dd HH:MM:ss')}'`;
}
-function getBetweenDates(field, startAt, endAt) {
- return `${field} between ${getDateFormat(startAt)} and ${getDateFormat(endAt)}`;
-}
-
function getEventDataFilterQuery(
filters: {
eventKey?: string;
@@ -150,7 +146,22 @@ function parseFilters(filters: WebsiteMetricFilter = {}, params: any = {}) {
};
}
-async function rawQuery(query, params = {}): Promise {
+function formatField(field, type, value) {
+ switch (type) {
+ case 'date':
+ return getDateFormat(value);
+ default:
+ return field;
+ }
+}
+
+async function rawQuery(sql, params = {}): Promise {
+ const query = sql.replaceAll(/\{\{\w+:\w+}}/g, token => {
+ const [, field, type] = token.match(/\{\{(\w+):(\w+)}}/);
+
+ return formatField(field, type, params[field]);
+ });
+
if (process.env.LOG_QUERY) {
log('QUERY:\n', query);
log('PARAMETERS:\n', params);
@@ -189,7 +200,6 @@ export default {
getDateStringQuery,
getDateQuery,
getDateFormat,
- getBetweenDates,
getFilterQuery,
getFunnelQuery,
getEventDataFilterQuery,
diff --git a/lib/constants.ts b/lib/constants.ts
index 493cb525..cae654a6 100644
--- a/lib/constants.ts
+++ b/lib/constants.ts
@@ -18,7 +18,7 @@ export const DEFAULT_THEME = 'light';
export const DEFAULT_ANIMATION_DURATION = 300;
export const DEFAULT_DATE_RANGE = '24hour';
export const DEFAULT_WEBSITE_LIMIT = 10;
-export const DEFAULT_CREATED_AT = '2000-01-01';
+export const DEFAULT_RESET_DATE = '2000-01-01';
export const REALTIME_RANGE = 30;
export const REALTIME_INTERVAL = 5000;
diff --git a/lib/crypto.js b/lib/crypto.js
index fcd3e2c0..059e7b66 100644
--- a/lib/crypto.js
+++ b/lib/crypto.js
@@ -1,5 +1,3 @@
-import crypto from 'crypto';
-import { v4, v5 } from 'uuid';
import { startOfMonth } from 'date-fns';
import { hash } from 'next-basics';
@@ -12,13 +10,3 @@ export function salt() {
return hash(secret(), ROTATING_SALT);
}
-
-export function uuid(...args) {
- if (!args.length) return v4();
-
- return v5(hash(...args, salt()), v5.DNS);
-}
-
-export function md5(...args) {
- return crypto.createHash('md5').update(args.join('')).digest('hex');
-}
diff --git a/lib/detect.ts b/lib/detect.ts
index 3278ddc9..9c1e1fa4 100644
--- a/lib/detect.ts
+++ b/lib/detect.ts
@@ -1,5 +1,5 @@
import path from 'path';
-import requestIp from 'request-ip';
+import { getClientIp } from 'request-ip';
import { browserName, detectOS } from 'detect-browser';
import isLocalhost from 'is-localhost-ip';
import maxmind from 'maxmind';
@@ -25,7 +25,7 @@ export function getIpAddress(req) {
return req.headers['cf-connecting-ip'];
}
- return requestIp.getClientIp(req);
+ return getClientIp(req);
}
export function getDevice(screen, os) {
diff --git a/lib/session.ts b/lib/session.ts
index 2725f04c..9bd5ba4d 100644
--- a/lib/session.ts
+++ b/lib/session.ts
@@ -1,6 +1,6 @@
-import { secret, uuid } from 'lib/crypto';
+import { secret } from 'lib/crypto';
import { getClientInfo, getJsonBody } from 'lib/detect';
-import { parseToken } from 'next-basics';
+import { parseToken, uuid } from 'next-basics';
import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send';
import { createSession } from 'queries';
import { validate } from 'uuid';
@@ -30,7 +30,6 @@ export async function findSession(req: NextApiRequestCollect) {
// Verify payload
const { website: websiteId, hostname, screen, language } = payload;
-
// Check the hostname value for legality to eliminate dirty data
const validHostnameRegex = /^[\w-.]+$/;
if (!validHostnameRegex.test(hostname)) {
diff --git a/lib/sql.ts b/lib/sql.ts
new file mode 100644
index 00000000..325a88ed
--- /dev/null
+++ b/lib/sql.ts
@@ -0,0 +1,9 @@
+export function buildSql(query: string, parameters: object) {
+ const params = { ...parameters };
+
+ const sql = query.replaceAll(/\$[\w_]+/g, name => {
+ return name;
+ });
+
+ return { sql, params };
+}
diff --git a/package.json b/package.json
index 78b15ee2..a4cf8989 100644
--- a/package.json
+++ b/package.json
@@ -90,7 +90,7 @@
"maxmind": "^4.3.6",
"moment-timezone": "^0.5.35",
"next": "13.3.1",
- "next-basics": "^0.31.0",
+ "next-basics": "^0.33.0",
"node-fetch": "^3.2.8",
"npm-run-all": "^4.1.5",
"react": "^18.2.0",
diff --git a/pages/api/event-data/events.ts b/pages/api/event-data/events.ts
new file mode 100644
index 00000000..1d74c3d2
--- /dev/null
+++ b/pages/api/event-data/events.ts
@@ -0,0 +1,39 @@
+import { canViewWebsite } from 'lib/auth';
+import { useCors, useAuth } from 'lib/middleware';
+import { NextApiRequestQueryBody } from 'lib/types';
+import { NextApiResponse } from 'next';
+import { ok, methodNotAllowed, unauthorized } from 'next-basics';
+import { getEventDataEvents } from 'queries';
+
+export interface EventDataFieldsRequestBody {
+ websiteId: string;
+ dateRange: {
+ startDate: string;
+ endDate: string;
+ };
+}
+
+export default async (
+ req: NextApiRequestQueryBody,
+ res: NextApiResponse,
+) => {
+ await useCors(req, res);
+ await useAuth(req, res);
+
+ if (req.method === 'GET') {
+ const { websiteId, startAt, endAt, field, event } = req.query;
+
+ if (!(await canViewWebsite(req.auth, websiteId))) {
+ return unauthorized(res);
+ }
+
+ const data = await getEventDataEvents(websiteId, new Date(+startAt), new Date(+endAt), {
+ field,
+ event,
+ });
+
+ return ok(res, data);
+ }
+
+ return methodNotAllowed(res);
+};
diff --git a/pages/api/event-data/fields.ts b/pages/api/event-data/fields.ts
index 5b0247f6..18b74bc3 100644
--- a/pages/api/event-data/fields.ts
+++ b/pages/api/event-data/fields.ts
@@ -21,20 +21,13 @@ export default async (
await useAuth(req, res);
if (req.method === 'GET') {
- const { websiteId, startAt, endAt, field, event, withEventNames } = req.query;
+ const { websiteId, startAt, endAt, field } = req.query;
if (!(await canViewWebsite(req.auth, websiteId))) {
return unauthorized(res);
}
- const data = await getEventDataFields(
- websiteId,
- new Date(+startAt),
- new Date(+endAt),
- field,
- event,
- withEventNames,
- );
+ const data = await getEventDataFields(websiteId, new Date(+startAt), new Date(+endAt), field);
return ok(res, data);
}
diff --git a/pages/api/reports/funnel.ts b/pages/api/reports/funnel.ts
index 1e121326..33882e03 100644
--- a/pages/api/reports/funnel.ts
+++ b/pages/api/reports/funnel.ts
@@ -3,7 +3,7 @@ import { useCors, useAuth } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
import { ok, methodNotAllowed, unauthorized } from 'next-basics';
-import { getPageviewFunnel } from 'queries';
+import { getFunnel } from 'queries';
export interface FunnelRequestBody {
websiteId: string;
@@ -41,7 +41,7 @@ export default async (
return unauthorized(res);
}
- const data = await getPageviewFunnel(websiteId, {
+ const data = await getFunnel(websiteId, {
startDate: new Date(startDate),
endDate: new Date(endDate),
urls,
diff --git a/pages/api/reports/index.ts b/pages/api/reports/index.ts
index b2c5da9e..3fe34134 100644
--- a/pages/api/reports/index.ts
+++ b/pages/api/reports/index.ts
@@ -1,8 +1,7 @@
-import { uuid } from 'lib/crypto';
import { useAuth, useCors } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
-import { methodNotAllowed, ok, unauthorized } from 'next-basics';
+import { methodNotAllowed, ok, unauthorized, uuid } from 'next-basics';
import { createReport, getReports } from 'queries';
import { canViewWebsite } from 'lib/auth';
diff --git a/pages/api/reports/insights.ts b/pages/api/reports/insights.ts
new file mode 100644
index 00000000..dba11953
--- /dev/null
+++ b/pages/api/reports/insights.ts
@@ -0,0 +1,51 @@
+import { canViewWebsite } from 'lib/auth';
+import { useCors, useAuth } from 'lib/middleware';
+import { NextApiRequestQueryBody } from 'lib/types';
+import { NextApiResponse } from 'next';
+import { ok, methodNotAllowed, unauthorized } from 'next-basics';
+import { getInsights } from 'queries';
+
+export interface InsightsRequestBody {
+ websiteId: string;
+ dateRange: {
+ startDate: string;
+ endDate: string;
+ };
+ fields: string[];
+ filters: string[];
+ groups: string[];
+}
+
+export default async (
+ req: NextApiRequestQueryBody,
+ res: NextApiResponse,
+) => {
+ await useCors(req, res);
+ await useAuth(req, res);
+
+ if (req.method === 'POST') {
+ const {
+ websiteId,
+ dateRange: { startDate, endDate },
+ fields,
+ filters,
+ groups,
+ } = req.body;
+
+ if (!(await canViewWebsite(req.auth, websiteId))) {
+ return unauthorized(res);
+ }
+
+ const data = await getInsights(websiteId, {
+ startDate: new Date(startDate),
+ endDate: new Date(endDate),
+ fields,
+ filters,
+ groups,
+ });
+
+ return ok(res, data);
+ }
+
+ return methodNotAllowed(res);
+};
diff --git a/pages/api/teams/index.ts b/pages/api/teams/index.ts
index 453f1ef3..316f3109 100644
--- a/pages/api/teams/index.ts
+++ b/pages/api/teams/index.ts
@@ -1,7 +1,7 @@
import { Team } from '@prisma/client';
import { NextApiRequestQueryBody } from 'lib/types';
import { canCreateTeam } from 'lib/auth';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth } from 'lib/middleware';
import { NextApiResponse } from 'next';
import { getRandomChars, methodNotAllowed, ok, unauthorized } from 'next-basics';
diff --git a/pages/api/users/index.ts b/pages/api/users/index.ts
index c6103c35..14b4e451 100644
--- a/pages/api/users/index.ts
+++ b/pages/api/users/index.ts
@@ -1,6 +1,6 @@
import { canCreateUser, canViewUsers } from 'lib/auth';
import { ROLES } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth } from 'lib/middleware';
import { NextApiRequestQueryBody, Role, User } from 'lib/types';
import { NextApiResponse } from 'next';
diff --git a/pages/api/websites/index.ts b/pages/api/websites/index.ts
index 49797d08..41556f8a 100644
--- a/pages/api/websites/index.ts
+++ b/pages/api/websites/index.ts
@@ -1,5 +1,5 @@
import { canCreateWebsite } from 'lib/auth';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth, useCors } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
diff --git a/queries/admin/team.ts b/queries/admin/team.ts
index b965119b..6d216639 100644
--- a/queries/admin/team.ts
+++ b/queries/admin/team.ts
@@ -1,7 +1,7 @@
import { Prisma, Team, TeamWebsite } from '@prisma/client';
import prisma from 'lib/prisma';
-import { uuid } from 'lib/crypto';
import { ROLES } from 'lib/constants';
+import { uuid } from 'next-basics';
export async function getTeam(where: Prisma.TeamWhereInput): Promise {
return prisma.client.team.findFirst({
diff --git a/queries/admin/teamUser.ts b/queries/admin/teamUser.ts
index b1c295be..5bce2926 100644
--- a/queries/admin/teamUser.ts
+++ b/queries/admin/teamUser.ts
@@ -1,5 +1,5 @@
import { Prisma, TeamUser } from '@prisma/client';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import prisma from 'lib/prisma';
export async function getTeamUserById(teamUserId: string): Promise {
diff --git a/queries/admin/teamWebsite.ts b/queries/admin/teamWebsite.ts
index 0aedc3c7..9ffd07d3 100644
--- a/queries/admin/teamWebsite.ts
+++ b/queries/admin/teamWebsite.ts
@@ -1,6 +1,6 @@
import { Prisma, Team, TeamUser, TeamWebsite, Website } from '@prisma/client';
import { ROLES } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import prisma from 'lib/prisma';
export async function getTeamWebsite(
diff --git a/queries/analytics/eventData/getEventDataEvents.ts b/queries/analytics/eventData/getEventDataEvents.ts
new file mode 100644
index 00000000..bb57f70e
--- /dev/null
+++ b/queries/analytics/eventData/getEventDataEvents.ts
@@ -0,0 +1,119 @@
+import { buildSql } from 'lib/sql';
+import prisma from 'lib/prisma';
+import clickhouse from 'lib/clickhouse';
+import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import { WebsiteEventDataFields } from 'lib/types';
+import { loadWebsite } from 'lib/query';
+import { DEFAULT_RESET_DATE } from 'lib/constants';
+
+export async function getEventDataEvents(
+ ...args: [
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+ ]
+): Promise {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+) {
+ const { toUuid, rawQuery } = prisma;
+ const website = await loadWebsite(websiteId);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
+ const { field, event } = filters;
+
+ if (field) {
+ if (event) {
+ return rawQuery(
+ `select ed.event_key as field,
+ ed.string_value as value,
+ count(ed.*) as total
+ from event_data as ed
+ inner join website_event as we
+ on we.event_id = ed.website_event_id
+ where ed.website_id = $1${toUuid()}
+ and ed.event_key = $2
+ and ed.created_at >= $3
+ and ed.created_at between $4 and $5
+ and we.event_name = $6
+ group by ed.event_key, ed.string_value
+ order by 3 desc, 2 desc, 1 asc
+ `,
+ [websiteId, field, resetDate, startDate, endDate, event] as any,
+ );
+ }
+
+ return rawQuery(
+ `select event_key as field,
+ string_value as value,
+ count(*) as total
+ from event_data
+ where website_id = $1${toUuid()}
+ and event_key = $2
+ and created_at >= $3
+ and created_at between $4 and $5
+ group by event_key, string_value
+ order by 3 desc, 2 desc, 1 asc
+ `,
+ [websiteId, field, resetDate, startDate, endDate] as any,
+ );
+ }
+}
+
+async function clickhouseQuery(
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+) {
+ const { rawQuery } = clickhouse;
+ const website = await loadWebsite(websiteId);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
+ const { event } = filters;
+
+ if (event) {
+ return rawQuery(
+ `select
+ event_name as event,
+ event_key as field,
+ data_type as type,
+ string_value as value,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and created_at >= {resetDate:DateTime}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_name = {event:String}
+ group by event_key, data_type, string_value, event_name
+ order by 1 asc, 2 asc, 3 asc, 4 desc
+ limit 100`,
+ { websiteId, resetDate, startDate, endDate, event },
+ );
+ }
+
+ return rawQuery(
+ `select
+ event_name as event,
+ event_key as field,
+ data_type as type,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and created_at >= {resetDate:DateTime}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ group by event_key, data_type, event_name
+ order by 1 asc, 2 asc
+ limit 100
+ `,
+ { websiteId, resetDate, startDate, endDate },
+ );
+}
diff --git a/queries/analytics/eventData/getEventDataFields.ts b/queries/analytics/eventData/getEventDataFields.ts
index 92b477ab..d53e87a2 100644
--- a/queries/analytics/eventData/getEventDataFields.ts
+++ b/queries/analytics/eventData/getEventDataFields.ts
@@ -3,17 +3,10 @@ import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { WebsiteEventDataFields } from 'lib/types';
import { loadWebsite } from 'lib/query';
-import { DEFAULT_CREATED_AT } from 'lib/constants';
+import { DEFAULT_RESET_DATE } from 'lib/constants';
export async function getEventDataFields(
- ...args: [
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field?: string,
- event?: string,
- withEventNames?: boolean,
- ]
+ ...args: [websiteId: string, startDate: Date, endDate: Date, field?: string]
): Promise {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
@@ -21,37 +14,12 @@ export async function getEventDataFields(
});
}
-async function relationalQuery(
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field: string,
- event: string,
- withEventNames: boolean,
-) {
+async function relationalQuery(websiteId: string, startDate: Date, endDate: Date, field: string) {
const { toUuid, rawQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
if (field) {
- if (event) {
- return rawQuery(
- `select ed.event_key as field,
- ed.string_value as value,
- count(ed.*) as total
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = $1${toUuid()}
- and ed.event_key = $2
- and ed.created_at >= $3
- and ed.created_at between $4 and $5
- and e.event_name = $6
- group by ed.event_key, ed.string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- [websiteId, field, resetDate, startDate, endDate, event] as any,
- );
- }
return rawQuery(
`select event_key as field,
string_value as value,
@@ -63,30 +31,12 @@ async function relationalQuery(
and created_at between $4 and $5
group by event_key, string_value
order by 3 desc, 2 desc, 1 asc
+ limit 100
`,
[websiteId, field, resetDate, startDate, endDate] as any,
);
}
- if (withEventNames) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.data_type as type,
- count(ed.*) as total,
- e.event_name as event
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = $1${toUuid()}
- and ed.created_at >= $2
- and ed.created_at between $3 and $4
- group by e.event_name, ed.event_key, ed.data_type
- order by 3 desc, 2 asc, 1 asc
- `,
- [websiteId, resetDate, startDate, endDate] as any,
- );
- }
-
return rawQuery(
`select
event_key as field,
@@ -98,43 +48,18 @@ async function relationalQuery(
and created_at between $3 and $4
group by event_key, data_type
order by 3 desc, 2 asc, 1 asc
+ limit 100
`,
[websiteId, resetDate, startDate, endDate] as any,
);
}
-async function clickhouseQuery(
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field: string,
- event: string,
- withEventNames: boolean,
-) {
- const { rawQuery, getDateFormat, getBetweenDates } = clickhouse;
+async function clickhouseQuery(websiteId: string, startDate: Date, endDate: Date, field: string) {
+ const { rawQuery, getDateFormat } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
if (field) {
- if (event) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.string_value as value,
- count(ed.*) as total
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = {websiteId:UUID}
- and ed.event_key = {field:String}
- and ed.created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('ed.created_at', startDate, endDate)}
- and e.event_name = {event:String}
- group by event_key, string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- { websiteId, field, event },
- );
- }
return rawQuery(
`select
event_key as field,
@@ -144,33 +69,15 @@ async function clickhouseQuery(
where website_id = {websiteId:UUID}
and event_key = {field:String}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
group by event_key, string_value
order by 3 desc, 2 desc, 1 asc
+ limit 100
`,
{ websiteId, field },
);
}
- if (withEventNames) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.data_type as type,
- count(ed.*) as total,
- e.event_name as event
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = {websiteId:UUID}
- and ed.created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('ed.created_at', startDate, endDate)}
- group by e.event_name, ed.event_key, ed.data_type
- order by 3 desc, 2 asc, 1 asc
- `,
- [websiteId, resetDate, startDate, endDate] as any,
- );
- }
-
return rawQuery(
`select
event_key as field,
@@ -179,9 +86,10 @@ async function clickhouseQuery(
from event_data
where website_id = {websiteId:UUID}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
group by event_key, data_type
order by 3 desc, 2 asc, 1 asc
+ limit 100
`,
{ websiteId },
);
diff --git a/queries/analytics/eventData/getEventDataUsage.ts b/queries/analytics/eventData/getEventDataUsage.ts
index 5d470c3c..0abca05e 100644
--- a/queries/analytics/eventData/getEventDataUsage.ts
+++ b/queries/analytics/eventData/getEventDataUsage.ts
@@ -9,7 +9,7 @@ export function getEventDataUsage(...args: [websiteIds: string[], startDate: Dat
}
function relationalQuery(websiteIds: string[], startDate: Date, endDate: Date) {
- throw new Error('Not Implemented');
+ throw new Error('Not implemented.');
}
function clickhouseQuery(websiteIds: string[], startDate: Date, endDate: Date) {
diff --git a/queries/analytics/eventData/saveEventData.ts b/queries/analytics/eventData/saveEventData.ts
index 44ba2fce..30a6655e 100644
--- a/queries/analytics/eventData/saveEventData.ts
+++ b/queries/analytics/eventData/saveEventData.ts
@@ -1,6 +1,6 @@
import { Prisma } from '@prisma/client';
import { DATA_TYPE } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { flattenJSON } from 'lib/dynamicData';
import kafka from 'lib/kafka';
diff --git a/queries/analytics/event/getEventMetrics.ts b/queries/analytics/events/getEventMetrics.ts
similarity index 86%
rename from queries/analytics/event/getEventMetrics.ts
rename to queries/analytics/events/getEventMetrics.ts
index ea05dae4..ca104ded 100644
--- a/queries/analytics/event/getEventMetrics.ts
+++ b/queries/analytics/events/getEventMetrics.ts
@@ -2,7 +2,7 @@ import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { WebsiteEventMetric } from 'lib/types';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getEventMetrics(
@@ -47,7 +47,7 @@ async function relationalQuery(
) {
const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params: any = [websiteId, resetDate, startDate, endDate];
const filterQuery = getFilterQuery(filters, params);
@@ -87,9 +87,9 @@ async function clickhouseQuery(
};
},
) {
- const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse;
+ const { rawQuery, getDateQuery, getDateFormat, getFilterQuery } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params = { websiteId };
return rawQuery(
@@ -101,7 +101,7 @@ async function clickhouseQuery(
where website_id = {websiteId:UUID}
and event_type = ${EVENT_TYPE.customEvent}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
${getFilterQuery(filters, params)}
group by x, t
order by t`,
diff --git a/queries/analytics/event/getEventUsage.ts b/queries/analytics/events/getEventUsage.ts
similarity index 95%
rename from queries/analytics/event/getEventUsage.ts
rename to queries/analytics/events/getEventUsage.ts
index 1465264c..78616e04 100644
--- a/queries/analytics/event/getEventUsage.ts
+++ b/queries/analytics/events/getEventUsage.ts
@@ -9,7 +9,7 @@ export function getEventUsage(...args: [websiteIds: string[], startDate: Date, e
}
function relationalQuery(websiteIds: string[], startDate: Date, endDate: Date) {
- throw new Error('Not Implemented');
+ throw new Error('Not implemented.');
}
function clickhouseQuery(websiteIds: string[], startDate: Date, endDate: Date) {
diff --git a/queries/analytics/event/getEvents.ts b/queries/analytics/events/getEvents.ts
similarity index 100%
rename from queries/analytics/event/getEvents.ts
rename to queries/analytics/events/getEvents.ts
diff --git a/queries/analytics/event/saveEvent.ts b/queries/analytics/events/saveEvent.ts
similarity index 99%
rename from queries/analytics/event/saveEvent.ts
rename to queries/analytics/events/saveEvent.ts
index 51087a59..cfefc326 100644
--- a/queries/analytics/event/saveEvent.ts
+++ b/queries/analytics/events/saveEvent.ts
@@ -2,7 +2,7 @@ import { EVENT_NAME_LENGTH, URL_LENGTH, EVENT_TYPE } from 'lib/constants';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import kafka from 'lib/kafka';
import prisma from 'lib/prisma';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { saveEventData } from 'queries/analytics/eventData/saveEventData';
export async function saveEvent(args: {
diff --git a/queries/analytics/pageview/getPageviewMetrics.ts b/queries/analytics/pageviews/getPageviewMetrics.ts
similarity index 84%
rename from queries/analytics/pageview/getPageviewMetrics.ts
rename to queries/analytics/pageviews/getPageviewMetrics.ts
index b2f815f2..37f39e52 100644
--- a/queries/analytics/pageview/getPageviewMetrics.ts
+++ b/queries/analytics/pageviews/getPageviewMetrics.ts
@@ -1,7 +1,7 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getPageviewMetrics(
@@ -33,7 +33,7 @@ async function relationalQuery(
const { startDate, endDate, filters = {}, column } = criteria;
const { rawQuery, parseFilters, toUuid } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params: any = [
websiteId,
resetDate,
@@ -45,7 +45,8 @@ async function relationalQuery(
let excludeDomain = '';
if (column === 'referrer_domain') {
- excludeDomain = 'and (website_event.referrer_domain != $6 or website_event.referrer_domain is null)';
+ excludeDomain =
+ 'and (website_event.referrer_domain != $6 or website_event.referrer_domain is null)';
params.push(website.domain);
}
@@ -78,9 +79,9 @@ async function clickhouseQuery(
},
) {
const { startDate, endDate, filters = {}, column } = criteria;
- const { rawQuery, getDateFormat, parseFilters, getBetweenDates } = clickhouse;
+ const { rawQuery, getDateFormat, parseFilters } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params = {
websiteId,
eventType: column === 'event_name' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
@@ -102,7 +103,7 @@ async function clickhouseQuery(
where website_id = {websiteId:UUID}
and event_type = {eventType:UInt32}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
${excludeDomain}
${filterQuery}
group by x
diff --git a/queries/analytics/pageview/getPageviewStats.ts b/queries/analytics/pageviews/getPageviewStats.ts
similarity index 86%
rename from queries/analytics/pageview/getPageviewStats.ts
rename to queries/analytics/pageviews/getPageviewStats.ts
index 989a6d4d..a83cfcd1 100644
--- a/queries/analytics/pageview/getPageviewStats.ts
+++ b/queries/analytics/pageviews/getPageviewStats.ts
@@ -1,7 +1,7 @@
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getPageviewStats(
@@ -47,7 +47,7 @@ async function relationalQuery(
} = criteria;
const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
@@ -86,16 +86,9 @@ async function clickhouseQuery(
count = '*',
filters = {},
} = criteria;
- const {
- parseFilters,
- getDateFormat,
- rawQuery,
- getDateStringQuery,
- getDateQuery,
- getBetweenDates,
- } = clickhouse;
+ const { parseFilters, getDateFormat, rawQuery, getDateStringQuery, getDateQuery } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);
@@ -111,7 +104,7 @@ async function clickhouseQuery(
where website_id = {websiteId:UUID}
and event_type = ${EVENT_TYPE.pageView}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
${filterQuery}
group by t) g
order by t`,
diff --git a/queries/analytics/pageview/getPageviewFunnel.ts b/queries/analytics/reports/getFunnel.ts
similarity index 86%
rename from queries/analytics/pageview/getPageviewFunnel.ts
rename to queries/analytics/reports/getFunnel.ts
index bfd921c1..d7786477 100644
--- a/queries/analytics/pageview/getPageviewFunnel.ts
+++ b/queries/analytics/reports/getFunnel.ts
@@ -2,7 +2,7 @@ import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
-export async function getPageviewFunnel(
+export async function getFunnel(
...args: [
websiteId: string,
criteria: {
@@ -76,14 +76,8 @@ async function clickhouseQuery(
}[]
> {
const { windowMinutes, startDate, endDate, urls } = criteria;
- const { rawQuery, getBetweenDates, getFunnelQuery } = clickhouse;
- const { columnsQuery, conditionQuery, urlParams } = getFunnelQuery(urls);
-
- const params = {
- websiteId,
- window: windowMinutes * 60,
- ...urlParams,
- };
+ const { rawQuery, getFunnelQuery } = clickhouse;
+ const { columnsQuery, urlParams } = getFunnelQuery(urls);
return rawQuery<{ level: number; count: number }[]>(
`
@@ -98,13 +92,19 @@ async function clickhouseQuery(
) AS level
FROM website_event
WHERE website_id = {websiteId:UUID}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ AND created_at BETWEEN {startDate:DateTime} AND {endDate:DateTime}
GROUP BY 1
)
GROUP BY level
ORDER BY level ASC;
`,
- params,
+ {
+ websiteId,
+ startDate,
+ endDate,
+ window: windowMinutes * 60,
+ ...urlParams,
+ },
).then(results => {
return urls.map((a, i) => ({
x: a,
diff --git a/queries/analytics/reports/getInsights.ts b/queries/analytics/reports/getInsights.ts
new file mode 100644
index 00000000..1d8970ed
--- /dev/null
+++ b/queries/analytics/reports/getInsights.ts
@@ -0,0 +1,42 @@
+import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import prisma from 'lib/prisma';
+import clickhouse from 'lib/clickhouse';
+
+export interface GetInsightsCriteria {
+ startDate: Date;
+ endDate: Date;
+ fields: string[];
+ filters: string[];
+ groups: string[];
+}
+
+export async function getInsights(...args: [websiteId: string, criteria: GetInsightsCriteria]) {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(
+ websiteId: string,
+ criteria: GetInsightsCriteria,
+): Promise<
+ {
+ x: string;
+ y: number;
+ }[]
+> {
+ return null;
+}
+
+async function clickhouseQuery(
+ websiteId: string,
+ criteria: GetInsightsCriteria,
+): Promise<
+ {
+ x: string;
+ y: number;
+ }[]
+> {
+ return null;
+}
diff --git a/queries/analytics/session/createSession.ts b/queries/analytics/sessions/createSession.ts
similarity index 100%
rename from queries/analytics/session/createSession.ts
rename to queries/analytics/sessions/createSession.ts
diff --git a/queries/analytics/session/getSession.ts b/queries/analytics/sessions/getSession.ts
similarity index 100%
rename from queries/analytics/session/getSession.ts
rename to queries/analytics/sessions/getSession.ts
diff --git a/queries/analytics/session/getSessionMetrics.ts b/queries/analytics/sessions/getSessionMetrics.ts
similarity index 85%
rename from queries/analytics/session/getSessionMetrics.ts
rename to queries/analytics/sessions/getSessionMetrics.ts
index ef8c79c5..60fde41b 100644
--- a/queries/analytics/session/getSessionMetrics.ts
+++ b/queries/analytics/sessions/getSessionMetrics.ts
@@ -1,7 +1,7 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getSessionMetrics(
@@ -21,7 +21,7 @@ async function relationalQuery(
criteria: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const { startDate, endDate, column, filters = {} } = criteria;
const { toUuid, parseFilters, rawQuery } = prisma;
const params: any = [websiteId, resetDate, startDate, endDate];
@@ -53,9 +53,9 @@ async function clickhouseQuery(
data: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const { startDate, endDate, column, filters = {} } = data;
- const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse;
+ const { getDateFormat, parseFilters, rawQuery } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);
@@ -65,7 +65,7 @@ async function clickhouseQuery(
where website_id = {websiteId:UUID}
and event_type = ${EVENT_TYPE.pageView}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
${filterQuery}
group by x
order by y desc
diff --git a/queries/analytics/session/getSessions.ts b/queries/analytics/sessions/getSessions.ts
similarity index 100%
rename from queries/analytics/session/getSessions.ts
rename to queries/analytics/sessions/getSessions.ts
diff --git a/queries/analytics/session/saveSessionData.ts b/queries/analytics/sessions/saveSessionData.ts
similarity index 96%
rename from queries/analytics/session/saveSessionData.ts
rename to queries/analytics/sessions/saveSessionData.ts
index 192053f1..246813e7 100644
--- a/queries/analytics/session/saveSessionData.ts
+++ b/queries/analytics/sessions/saveSessionData.ts
@@ -1,5 +1,5 @@
import { DATA_TYPE } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { flattenJSON } from 'lib/dynamicData';
import prisma from 'lib/prisma';
import { DynamicData } from 'lib/types';
diff --git a/queries/analytics/stats/getRealtimeData.ts b/queries/analytics/stats/getRealtimeData.ts
index 8c215215..2b8f1212 100644
--- a/queries/analytics/stats/getRealtimeData.ts
+++ b/queries/analytics/stats/getRealtimeData.ts
@@ -1,6 +1,5 @@
-import { md5 } from 'lib/crypto';
-import { getSessions } from '../session/getSessions';
-import { getEvents } from '../event/getEvents';
+import { md5 } from 'next-basics';
+import { getSessions, getEvents } from 'queries';
import { EVENT_TYPE } from 'lib/constants';
export async function getRealtimeData(websiteId, time) {
@@ -20,7 +19,7 @@ export async function getRealtimeData(websiteId, time) {
};
return {
- pageviews: decorate('pageview', pageviews),
+ pageviews: decorate('pageviews', pageviews),
sessions: decorate('session', sessions),
events: decorate('event', events),
timestamp: Date.now(),
diff --git a/queries/analytics/stats/getWebsiteStats.ts b/queries/analytics/stats/getWebsiteStats.ts
index 5d5a1f9a..a3034879 100644
--- a/queries/analytics/stats/getWebsiteStats.ts
+++ b/queries/analytics/stats/getWebsiteStats.ts
@@ -1,7 +1,7 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
import { loadWebsite } from 'lib/query';
export async function getWebsiteStats(
@@ -23,7 +23,7 @@ async function relationalQuery(
const { startDate, endDate, filters = {} } = criteria;
const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params: any = [websiteId, resetDate, startDate, endDate];
const { filterQuery, joinSession } = parseFilters(filters, params);
@@ -57,9 +57,9 @@ async function clickhouseQuery(
criteria: { startDate: Date; endDate: Date; filters: object },
) {
const { startDate, endDate, filters = {} } = criteria;
- const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
+ const { rawQuery, getDateFormat, getDateQuery, parseFilters } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
+ const resetDate = new Date(website?.resetAt || DEFAULT_RESET_DATE);
const params = { websiteId };
const { filterQuery } = parseFilters(filters, params);
@@ -79,7 +79,7 @@ async function clickhouseQuery(
where event_type = ${EVENT_TYPE.pageView}
and website_id = {websiteId:UUID}
and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between ${getDateFormat(startDate)} and ${getDateFormat(endDate)}
${filterQuery}
group by session_id, time_series
) t;`,
diff --git a/queries/index.js b/queries/index.js
index b0e4e00b..334180a4 100644
--- a/queries/index.js
+++ b/queries/index.js
@@ -3,19 +3,21 @@ export * from './admin/teamUser';
export * from './admin/user';
export * from './admin/report';
export * from './admin/website';
-export * from './analytics/event/getEventMetrics';
-export * from './analytics/event/getEventUsage';
-export * from './analytics/event/getEvents';
+export * from './analytics/events/getEventMetrics';
+export * from './analytics/events/getEventUsage';
+export * from './analytics/events/getEvents';
+export * from './analytics/eventData/getEventDataEvents';
export * from './analytics/eventData/getEventDataFields';
export * from './analytics/eventData/getEventDataUsage';
-export * from './analytics/event/saveEvent';
-export * from './analytics/pageview/getPageviewFunnel';
-export * from './analytics/pageview/getPageviewMetrics';
-export * from './analytics/pageview/getPageviewStats';
-export * from './analytics/session/createSession';
-export * from './analytics/session/getSession';
-export * from './analytics/session/getSessionMetrics';
-export * from './analytics/session/getSessions';
+export * from './analytics/events/saveEvent';
+export * from './analytics/reports/getFunnel';
+export * from './analytics/reports/getInsights';
+export * from './analytics/pageviews/getPageviewMetrics';
+export * from './analytics/pageviews/getPageviewStats';
+export * from './analytics/sessions/createSession';
+export * from './analytics/sessions/getSession';
+export * from './analytics/sessions/getSessionMetrics';
+export * from './analytics/sessions/getSessions';
export * from './analytics/stats/getActiveVisitors';
export * from './analytics/stats/getRealtimeData';
export * from './analytics/stats/getWebsiteStats';
diff --git a/scripts/telemetry.js b/scripts/telemetry.js
index fe9ab6e7..24cd40c9 100644
--- a/scripts/telemetry.js
+++ b/scripts/telemetry.js
@@ -1,40 +1,24 @@
-const fs = require('fs-extra');
-const path = require('path');
const os = require('os');
const isCI = require('is-ci');
const pkg = require('../package.json');
-const dest = path.resolve(__dirname, '../.next/cache/umami.json');
const url = 'https://api.umami.is/v1/telemetry';
-async function sendTelemetry(action) {
- let json = {};
-
- try {
- json = await fs.readJSON(dest);
- } catch {
- // Ignore
- }
-
- try {
- await fs.writeJSON(dest, { version: pkg.version });
- } catch {
- // Ignore
- }
-
+async function sendTelemetry(type) {
const { default: isDocker } = await import('is-docker');
const { default: fetch } = await import('node-fetch');
- const payload = {
- action,
- version: pkg.version,
- node: process.version,
- platform: os.platform(),
- arch: os.arch(),
- os: `${os.type()} (${os.version()})`,
- isDocker: isDocker(),
- isCi: isCI,
- prevVersion: json.version,
+ const data = {
+ type,
+ payload: {
+ version: pkg.version,
+ node: process.version,
+ platform: os.platform(),
+ arch: os.arch(),
+ os: `${os.type()} (${os.version()})`,
+ isDocker: isDocker(),
+ isCi: isCI,
+ },
};
try {
@@ -44,7 +28,7 @@ async function sendTelemetry(action) {
headers: {
'Content-Type': 'application/json',
},
- body: JSON.stringify(payload),
+ body: JSON.stringify(data),
});
} catch {
// Ignore
diff --git a/yarn.lock b/yarn.lock
index db55eeca..ec66b4ac 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -6260,14 +6260,15 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
-next-basics@^0.31.0:
- version "0.31.0"
- resolved "https://registry.npmjs.org/next-basics/-/next-basics-0.31.0.tgz"
- integrity sha512-uAum7v4DQ37IdqKdmLTFLMyN+ir7FNMeMHQcTd1RQ14sm/fO/tntadlsXulwj/A5xs3gmnW/gfGkLX7VnNjreg==
+next-basics@^0.33.0:
+ version "0.33.0"
+ resolved "https://registry.yarnpkg.com/next-basics/-/next-basics-0.33.0.tgz#da4736dbaa6b8461ae1fe40de4cde20cbe7744e6"
+ integrity sha512-pKynp6/zMboQk/mlzyOZgfFsYShKpieu3XoQM5EmTL311mP2KkcW59hiVHloLR7W6zaaf/ONWzi0L1iw05Qfag==
dependencies:
bcryptjs "^2.4.3"
jsonwebtoken "^9.0.0"
pure-rand "^6.0.2"
+ uuid "^9.0.0"
next@13.3.1:
version "13.3.1"
@@ -8967,6 +8968,11 @@ uuid@^8.3.2:
resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+uuid@^9.0.0:
+ version "9.0.0"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5"
+ integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==
+
v8-compile-cache-lib@^3.0.1:
version "3.0.1"
resolved "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz"