diff --git a/.github/stale.yml b/.github/stale.yml
deleted file mode 100644
index 2dc5b675..00000000
--- a/.github/stale.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-# Number of days of inactivity before an issue becomes stale
-daysUntilStale: 60
-# Number of days of inactivity before a stale issue is closed
-daysUntilClose: 7
-# Issues with these labels will never be considered stale
-exemptLabels:
- - pinned
- - security
- - enhancement
- - bug
-# Label to use when marking an issue as stale
-staleLabel: wontfix
-# Comment to post when marking an issue as stale. Set to `false` to disable
-markComment: >
- This issue has been automatically marked as stale because it has not had
- recent activity. It will be closed if no further activity occurs. Thank you
- for your contributions.
-# Comment to post when closing a stale issue. Set to `false` to disable
-closeComment: false
diff --git a/.github/workflows/stale-issues.yml b/.github/workflows/stale-issues.yml
new file mode 100644
index 00000000..bf2505b1
--- /dev/null
+++ b/.github/workflows/stale-issues.yml
@@ -0,0 +1,22 @@
+name: Close stale issues
+on:
+ schedule:
+ - cron: '30 1 * * *'
+
+jobs:
+ stale:
+ runs-on: ubuntu-latest
+ permissions:
+ issues: write
+ pull-requests: write
+ steps:
+ - uses: actions/stale@v8
+ with:
+ days-before-issue-stale: 60
+ days-before-issue-close: 7
+ stale-issue-label: 'stale'
+ stale-issue-message: 'This issue is stale because it has been open for 60 days with no activity.'
+ close-issue-message: 'This issue was closed because it has been inactive for 7 days since being marked as stale.'
+ days-before-pr-stale: -1
+ days-before-pr-close: -1
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/components/input/WebsiteDateFilter.js b/components/input/WebsiteDateFilter.js
index 91721974..47e6f016 100644
--- a/components/input/WebsiteDateFilter.js
+++ b/components/input/WebsiteDateFilter.js
@@ -1,25 +1,13 @@
-import useApi from 'hooks/useApi';
import useDateRange from 'hooks/useDateRange';
import DateFilter from './DateFilter';
import styles from './WebsiteDateFilter.module.css';
export default function WebsiteDateFilter({ websiteId }) {
- const { get } = useApi();
const [dateRange, setDateRange] = useDateRange(websiteId);
const { value, startDate, endDate } = dateRange;
const handleChange = async value => {
- if (value === 'all' && websiteId) {
- const data = await get(`/websites/${websiteId}`);
-
- if (data) {
- const start = new Date(data.createdAt).getTime();
- const end = Date.now();
- setDateRange(`range:${start}:${end}`);
- }
- } else if (value !== 'all') {
- setDateRange(value);
- }
+ setDateRange(value);
};
return (
diff --git a/components/pages/event-data/EventDataTable.js b/components/pages/event-data/EventDataTable.js
index 4f938f8a..8260ac35 100644
--- a/components/pages/event-data/EventDataTable.js
+++ b/components/pages/event-data/EventDataTable.js
@@ -7,14 +7,6 @@ export function EventDataTable({ data = [] }) {
const { formatMessage, labels } = useMessages();
const { resolveUrl } = usePageQuery();
- function linkToView(row, cell) {
- return (
-
- {cell}
-
- );
- }
-
if (data.length === 0) {
return ;
}
@@ -22,10 +14,14 @@ export function EventDataTable({ data = [] }) {
return (
- {row => linkToView(row, row.event)}
+ {row => (
+
+ {row.event}
+
+ )}
- {row => linkToView(row, row.field)}
+ {row => row.field}
{({ total }) => total.toLocaleString()}
diff --git a/components/pages/event-data/EventDataValueTable.js b/components/pages/event-data/EventDataValueTable.js
index fedda654..2637053e 100644
--- a/components/pages/event-data/EventDataValueTable.js
+++ b/components/pages/event-data/EventDataValueTable.js
@@ -5,14 +5,14 @@ import Icons from 'components/icons';
import PageHeader from 'components/layout/PageHeader';
import Empty from 'components/common/Empty';
-export function EventDataTable({ data = [], field, event }) {
+export function EventDataValueTable({ data = [], event }) {
const { formatMessage, labels } = useMessages();
const { resolveUrl } = usePageQuery();
const Title = () => {
return (
<>
-
+
-
- {event} - {field}
-
+ {event}
>
);
};
@@ -33,6 +31,7 @@ export function EventDataTable({ data = [], field, event }) {
{data.length <= 0 && }
{data.length > 0 && (
+
{({ total }) => total.toLocaleString()}
@@ -43,4 +42,4 @@ export function EventDataTable({ data = [], field, event }) {
);
}
-export default EventDataTable;
+export default EventDataValueTable;
diff --git a/components/pages/realtime/RealtimePage.js b/components/pages/realtime/RealtimePage.js
index 2d2eceba..dea89f58 100644
--- a/components/pages/realtime/RealtimePage.js
+++ b/components/pages/realtime/RealtimePage.js
@@ -94,7 +94,7 @@ export function RealtimePage({ websiteId }) {
-
+
diff --git a/components/pages/websites/WebsiteChart.js b/components/pages/websites/WebsiteChart.js
index 12b8ab4b..a4d94bd1 100644
--- a/components/pages/websites/WebsiteChart.js
+++ b/components/pages/websites/WebsiteChart.js
@@ -45,15 +45,7 @@ export function WebsiteChart({ websiteId }) {
return { pageviews: [], sessions: [] };
}, [data, startDate, endDate, unit, modified]);
- return (
-
- );
+ return ;
}
export default WebsiteChart;
diff --git a/components/pages/websites/WebsiteEventData.js b/components/pages/websites/WebsiteEventData.js
index d4ecc27c..7f9a6829 100644
--- a/components/pages/websites/WebsiteEventData.js
+++ b/components/pages/websites/WebsiteEventData.js
@@ -5,20 +5,18 @@ import { EventDataMetricsBar } from 'components/pages/event-data/EventDataMetric
import { useDateRange, useApi, usePageQuery } from 'hooks';
import styles from './WebsiteEventData.module.css';
-function useFields(websiteId, field, event) {
+function useData(websiteId, event) {
const [dateRange] = useDateRange(websiteId);
const { startDate, endDate } = dateRange;
const { get, useQuery } = useApi();
const { data, error, isLoading } = useQuery(
- ['event-data:fields', { websiteId, startDate, endDate, field }],
+ ['event-data:events', { websiteId, startDate, endDate, event }],
() =>
- get('/event-data/fields', {
+ get('/event-data/events', {
websiteId,
startAt: +startDate,
endAt: +endDate,
- field,
event,
- withEventNames: true,
}),
{ enabled: !!(websiteId && startDate && endDate) },
);
@@ -28,15 +26,15 @@ function useFields(websiteId, field, event) {
export default function WebsiteEventData({ websiteId }) {
const {
- query: { view, event },
+ query: { event },
} = usePageQuery();
- const { data } = useFields(websiteId, view, event);
+ const { data } = useData(websiteId, event);
return (
- {!view && }
- {view && }
+ {!event && }
+ {event && }
);
}
diff --git a/db/clickhouse/schema.sql b/db/clickhouse/schema.sql
index dceb0d1e..94b560c3 100644
--- a/db/clickhouse/schema.sql
+++ b/db/clickhouse/schema.sql
@@ -6,7 +6,7 @@ CREATE TABLE umami.website_event
website_id UUID,
session_id UUID,
event_id UUID,
- --session
+ --sessions
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@@ -17,14 +17,14 @@ CREATE TABLE umami.website_event
subdivision1 LowCardinality(String),
subdivision2 LowCardinality(String),
city String,
- --pageview
+ --pageviews
url_path String,
url_query String,
referrer_path String,
referrer_query String,
referrer_domain String,
page_title String,
- --event
+ --events
event_type UInt32,
event_name String,
created_at DateTime('UTC'),
@@ -38,7 +38,7 @@ CREATE TABLE umami.website_event_queue (
website_id UUID,
session_id UUID,
event_id UUID,
- --session
+ --sessions
hostname LowCardinality(String),
browser LowCardinality(String),
os LowCardinality(String),
@@ -49,14 +49,14 @@ CREATE TABLE umami.website_event_queue (
subdivision1 LowCardinality(String),
subdivision2 LowCardinality(String),
city String,
- --pageview
+ --pageviews
url_path String,
url_query String,
referrer_path String,
referrer_query String,
referrer_domain String,
page_title String,
- --event
+ --events
event_type UInt32,
event_name String,
created_at DateTime('UTC'),
@@ -66,7 +66,7 @@ CREATE TABLE umami.website_event_queue (
)
ENGINE = Kafka
SETTINGS kafka_broker_list = 'domain:9092,domain:9093,domain:9094', -- input broker list
- kafka_topic_list = 'event',
+ kafka_topic_list = 'events',
kafka_group_name = 'event_consumer_group',
kafka_format = 'JSONEachRow',
kafka_max_block_size = 1048576,
diff --git a/lib/auth.ts b/lib/auth.ts
index bf01a1ab..cfd5c4ce 100644
--- a/lib/auth.ts
+++ b/lib/auth.ts
@@ -13,7 +13,7 @@ import {
import { getTeamUser } from 'queries';
import { getTeamWebsite, getTeamWebsiteByTeamMemberId } from 'queries/admin/teamWebsite';
import { validate } from 'uuid';
-import { loadWebsite } from './query';
+import { loadWebsite } from './load';
import { Auth } from './types';
const log = debug('umami:auth');
diff --git a/lib/clickhouse.ts b/lib/clickhouse.ts
index eb73d83c..10722fff 100644
--- a/lib/clickhouse.ts
+++ b/lib/clickhouse.ts
@@ -2,7 +2,6 @@ import { ClickHouse } from 'clickhouse';
import dateFormat from 'dateformat';
import debug from 'debug';
import { CLICKHOUSE } from 'lib/db';
-import { getDynamicDataType } from './dynamicData';
import { WebsiteMetricFilter } from './types';
import { FILTER_COLUMNS } from './constants';
@@ -62,49 +61,6 @@ function getDateFormat(date) {
return `'${dateFormat(date, 'UTC:yyyy-mm-dd HH:MM:ss')}'`;
}
-function getBetweenDates(field, startAt, endAt) {
- return `${field} between ${getDateFormat(startAt)} and ${getDateFormat(endAt)}`;
-}
-
-function getEventDataFilterQuery(
- filters: {
- eventKey?: string;
- eventValue?: string | number | boolean | Date;
- }[] = [],
- params: any,
-) {
- const query = filters.reduce((ac, cv, i) => {
- const type = getDynamicDataType(cv.eventValue);
-
- let value = cv.eventValue;
-
- ac.push(`and (event_key = {eventKey${i}:String}`);
-
- switch (type) {
- case 'number':
- ac.push(`and number_value = {eventValue${i}:UInt64})`);
- break;
- case 'string':
- ac.push(`and string_value = {eventValue${i}:String})`);
- break;
- case 'boolean':
- ac.push(`and string_value = {eventValue${i}:String})`);
- value = cv ? 'true' : 'false';
- break;
- case 'date':
- ac.push(`and date_value = {eventValue${i}:DateTime('UTC')})`);
- break;
- }
-
- params[`eventKey${i}`] = cv.eventKey;
- params[`eventValue${i}`] = value;
-
- return ac;
- }, []);
-
- return query.join('\n');
-}
-
function getFilterQuery(filters = {}, params = {}) {
const query = Object.keys(filters).reduce((arr, key) => {
const filter = filters[key];
@@ -150,7 +106,7 @@ function parseFilters(filters: WebsiteMetricFilter = {}, params: any = {}) {
};
}
-async function rawQuery(query, params = {}): Promise {
+async function rawQuery(query: string, params: object = {}): Promise {
if (process.env.LOG_QUERY) {
log('QUERY:\n', query);
log('PARAMETERS:\n', params);
@@ -189,10 +145,8 @@ export default {
getDateStringQuery,
getDateQuery,
getDateFormat,
- getBetweenDates,
getFilterQuery,
getFunnelQuery,
- getEventDataFilterQuery,
parseFilters,
findUnique,
findFirst,
diff --git a/lib/constants.ts b/lib/constants.ts
index 493cb525..cae654a6 100644
--- a/lib/constants.ts
+++ b/lib/constants.ts
@@ -18,7 +18,7 @@ export const DEFAULT_THEME = 'light';
export const DEFAULT_ANIMATION_DURATION = 300;
export const DEFAULT_DATE_RANGE = '24hour';
export const DEFAULT_WEBSITE_LIMIT = 10;
-export const DEFAULT_CREATED_AT = '2000-01-01';
+export const DEFAULT_RESET_DATE = '2000-01-01';
export const REALTIME_RANGE = 30;
export const REALTIME_INTERVAL = 5000;
diff --git a/lib/crypto.js b/lib/crypto.js
index fcd3e2c0..059e7b66 100644
--- a/lib/crypto.js
+++ b/lib/crypto.js
@@ -1,5 +1,3 @@
-import crypto from 'crypto';
-import { v4, v5 } from 'uuid';
import { startOfMonth } from 'date-fns';
import { hash } from 'next-basics';
@@ -12,13 +10,3 @@ export function salt() {
return hash(secret(), ROTATING_SALT);
}
-
-export function uuid(...args) {
- if (!args.length) return v4();
-
- return v5(hash(...args, salt()), v5.DNS);
-}
-
-export function md5(...args) {
- return crypto.createHash('md5').update(args.join('')).digest('hex');
-}
diff --git a/lib/date.js b/lib/date.js
index 526354b3..bf5dd90a 100644
--- a/lib/date.js
+++ b/lib/date.js
@@ -26,10 +26,20 @@ import {
differenceInCalendarMonths,
differenceInCalendarYears,
format,
- parseISO,
+ max,
+ min,
+ isDate,
} from 'date-fns';
import { getDateLocale } from 'lib/lang';
+const dateFuncs = {
+ minute: [differenceInMinutes, addMinutes, startOfMinute],
+ hour: [differenceInHours, addHours, startOfHour],
+ day: [differenceInCalendarDays, addDays, startOfDay],
+ month: [differenceInCalendarMonths, addMonths, startOfMonth],
+ year: [differenceInCalendarYears, addYears, startOfYear],
+};
+
export function getTimezone() {
return moment.tz.guess();
}
@@ -43,11 +53,19 @@ export function parseDateRange(value, locale = 'en-US') {
return value;
}
- if (value?.startsWith?.('range')) {
- const [, startAt, endAt] = value.split(':');
+ if (value === 'all') {
+ return {
+ startDate: new Date(0),
+ endDate: new Date(1),
+ value,
+ };
+ }
- const startDate = new Date(+startAt);
- const endDate = new Date(+endAt);
+ if (value?.startsWith?.('range')) {
+ const [, startTime, endTime] = value.split(':');
+
+ const startDate = new Date(+startTime);
+ const endDate = new Date(+endTime);
return {
...getDateRangeValues(startDate, endDate),
@@ -148,17 +166,34 @@ export function parseDateRange(value, locale = 'en-US') {
}
}
-export function getDateRangeValues(startDate, endDate) {
- let unit = 'year';
- if (differenceInHours(endDate, startDate) <= 48) {
- unit = 'hour';
+export function getAllowedUnits(startDate, endDate) {
+ const units = ['minute', 'hour', 'day', 'month', 'year'];
+ const minUnit = getMinimumUnit(startDate, endDate);
+ const index = units.indexOf(minUnit);
+
+ return index >= 0 ? units.splice(index) : [];
+}
+
+export function getMinimumUnit(startDate, endDate) {
+ if (differenceInMinutes(endDate, startDate) <= 60) {
+ return 'minute';
+ } else if (differenceInHours(endDate, startDate) <= 48) {
+ return 'hour';
} else if (differenceInCalendarDays(endDate, startDate) <= 90) {
- unit = 'day';
+ return 'day';
} else if (differenceInCalendarMonths(endDate, startDate) <= 24) {
- unit = 'month';
+ return 'month';
}
- return { startDate: startOfDay(startDate), endDate: endOfDay(endDate), unit };
+ return 'year';
+}
+
+export function getDateRangeValues(startDate, endDate) {
+ return {
+ startDate: startOfDay(startDate),
+ endDate: endOfDay(endDate),
+ unit: getMinimumUnit(startDate, endDate),
+ };
}
export function getDateFromString(str) {
@@ -174,14 +209,6 @@ export function getDateFromString(str) {
return new Date(year, month - 1, day);
}
-const dateFuncs = {
- minute: [differenceInMinutes, addMinutes, startOfMinute],
- hour: [differenceInHours, addHours, startOfHour],
- day: [differenceInCalendarDays, addDays, startOfDay],
- month: [differenceInCalendarMonths, addMonths, startOfMonth],
- year: [differenceInCalendarYears, addYears, startOfYear],
-};
-
export function getDateArray(data, startDate, endDate, unit) {
const arr = [];
const [diff, add, normalize] = dateFuncs[unit];
@@ -227,3 +254,11 @@ export function dateFormat(date, str, locale = 'en-US') {
locale: getDateLocale(locale),
});
}
+
+export function maxDate(...args) {
+ return max(args.filter(n => isDate(n)));
+}
+
+export function minDate(...args) {
+ return min(args.filter(n => isDate(n)));
+}
diff --git a/lib/db.js b/lib/db.js
index 19e46a3d..750cdec0 100644
--- a/lib/db.js
+++ b/lib/db.js
@@ -35,3 +35,7 @@ export async function runQuery(queries) {
return queries[CLICKHOUSE]();
}
}
+
+export function notImplemented() {
+ throw new Error('Not implemented.');
+}
diff --git a/lib/detect.ts b/lib/detect.ts
index 3278ddc9..9c1e1fa4 100644
--- a/lib/detect.ts
+++ b/lib/detect.ts
@@ -1,5 +1,5 @@
import path from 'path';
-import requestIp from 'request-ip';
+import { getClientIp } from 'request-ip';
import { browserName, detectOS } from 'detect-browser';
import isLocalhost from 'is-localhost-ip';
import maxmind from 'maxmind';
@@ -25,7 +25,7 @@ export function getIpAddress(req) {
return req.headers['cf-connecting-ip'];
}
- return requestIp.getClientIp(req);
+ return getClientIp(req);
}
export function getDevice(screen, os) {
diff --git a/lib/kafka.ts b/lib/kafka.ts
index 8f5bb87f..10326888 100644
--- a/lib/kafka.ts
+++ b/lib/kafka.ts
@@ -61,7 +61,7 @@ async function getProducer(): Promise {
return producer;
}
-function getDateFormat(date, format?): string {
+function getDateFormat(date: Date, format?: string): string {
return dateFormat(date, format ? format : 'UTC:yyyy-mm-dd HH:MM:ss');
}
diff --git a/lib/load.ts b/lib/load.ts
new file mode 100644
index 00000000..4ce18b09
--- /dev/null
+++ b/lib/load.ts
@@ -0,0 +1,51 @@
+import cache from 'lib/cache';
+import { getWebsite, getSession, getUser } from 'queries';
+import { User, Website, Session } from '@prisma/client';
+
+export async function loadWebsite(websiteId: string): Promise {
+ let website;
+
+ if (cache.enabled) {
+ website = await cache.fetchWebsite(websiteId);
+ } else {
+ website = await getWebsite({ id: websiteId });
+ }
+
+ if (!website || website.deletedAt) {
+ return null;
+ }
+
+ return website;
+}
+
+export async function loadSession(sessionId: string): Promise {
+ let session;
+
+ if (cache.enabled) {
+ session = await cache.fetchSession(sessionId);
+ } else {
+ session = await getSession({ id: sessionId });
+ }
+
+ if (!session) {
+ return null;
+ }
+
+ return session;
+}
+
+export async function loadUser(userId: string): Promise {
+ let user;
+
+ if (cache.enabled) {
+ user = await cache.fetchUser(userId);
+ } else {
+ user = await getUser({ id: userId });
+ }
+
+ if (!user || user.deletedAt) {
+ return null;
+ }
+
+ return user;
+}
diff --git a/lib/middleware.ts b/lib/middleware.ts
index 1fd13b09..4185f80b 100644
--- a/lib/middleware.ts
+++ b/lib/middleware.ts
@@ -73,5 +73,6 @@ export const useAuth = createMiddleware(async (req, res, next) => {
}
(req as any).auth = { user, token, shareToken, authKey };
+
next();
});
diff --git a/lib/prisma.ts b/lib/prisma.ts
index 875f5897..b02f69f7 100644
--- a/lib/prisma.ts
+++ b/lib/prisma.ts
@@ -1,7 +1,6 @@
import prisma from '@umami/prisma-client';
import moment from 'moment-timezone';
import { MYSQL, POSTGRESQL, getDatabaseType } from 'lib/db';
-import { getDynamicDataType } from './dynamicData';
import { FILTER_COLUMNS } from './constants';
const MYSQL_DATE_FORMATS = {
@@ -20,20 +19,8 @@ const POSTGRESQL_DATE_FORMATS = {
year: 'YYYY-01-01',
};
-function toUuid(): string {
- const db = getDatabaseType(process.env.DATABASE_URL);
-
- if (db === POSTGRESQL) {
- return '::uuid';
- }
-
- if (db === MYSQL) {
- return '';
- }
-}
-
function getAddMinutesQuery(field: string, minutes: number) {
- const db = getDatabaseType(process.env.DATABASE_URL);
+ const db = getDatabaseType();
if (db === POSTGRESQL) {
return `${field} + interval '${minutes} minute'`;
@@ -45,7 +32,7 @@ function getAddMinutesQuery(field: string, minutes: number) {
}
function getDateQuery(field: string, unit: string, timezone?: string): string {
- const db = getDatabaseType(process.env.DATABASE_URL);
+ const db = getDatabaseType();
if (db === POSTGRESQL) {
if (timezone) {
@@ -65,8 +52,8 @@ function getDateQuery(field: string, unit: string, timezone?: string): string {
}
}
-function getTimestampInterval(field: string): string {
- const db = getDatabaseType(process.env.DATABASE_URL);
+function getTimestampIntervalQuery(field: string): string {
+ const db = getDatabaseType();
if (db === POSTGRESQL) {
return `floor(extract(epoch from max(${field}) - min(${field})))`;
@@ -77,47 +64,6 @@ function getTimestampInterval(field: string): string {
}
}
-function getEventDataFilterQuery(
- filters: {
- eventKey?: string;
- eventValue?: string | number | boolean | Date;
- }[],
- params: any[],
-) {
- const query = filters.reduce((ac, cv) => {
- const type = getDynamicDataType(cv.eventValue);
-
- let value = cv.eventValue;
-
- ac.push(`and (event_key = $${params.length + 1}`);
- params.push(cv.eventKey);
-
- switch (type) {
- case 'number':
- ac.push(`and number_value = $${params.length + 1})`);
- params.push(value);
- break;
- case 'string':
- ac.push(`and string_value = $${params.length + 1})`);
- params.push(decodeURIComponent(cv.eventValue as string));
- break;
- case 'boolean':
- ac.push(`and string_value = $${params.length + 1})`);
- params.push(decodeURIComponent(cv.eventValue as string));
- value = cv ? 'true' : 'false';
- break;
- case 'date':
- ac.push(`and date_value = $${params.length + 1})`);
- params.push(cv.eventValue);
- break;
- }
-
- return ac;
- }, []);
-
- return query.join('\n');
-}
-
function getFilterQuery(filters = {}, params = []): string {
const query = Object.keys(filters).reduce((arr, key) => {
const filter = filters[key];
@@ -163,7 +109,7 @@ function getFunnelQuery(
and l0.referrer_path = $${i + initParamLength}
and l0.url_path = $${levelNumber + initParamLength}
and created_at between $2 and $3
- and website_id = $1${toUuid()}
+ and website_id = $1
)`;
}
@@ -197,27 +143,32 @@ function parseFilters(
};
}
-async function rawQuery(query: string, params: never[] = []): Promise {
- const db = getDatabaseType(process.env.DATABASE_URL);
+async function rawQuery(sql: string, data: object): Promise {
+ const db = getDatabaseType();
+ const params = [];
if (db !== POSTGRESQL && db !== MYSQL) {
return Promise.reject(new Error('Unknown database.'));
}
- const sql = db === MYSQL ? query.replace(/\$[0-9]+/g, '?') : query;
+ const query = sql?.replaceAll(/\{\{\s*(\w+)(::\w+)?\s*}}/g, (...args) => {
+ const [, name, type] = args;
- return prisma.rawQuery(sql, params);
+ params.push(data[name]);
+
+ return db === MYSQL ? '?' : `$${params.length}${type ?? ''}`;
+ });
+
+ return prisma.rawQuery(query, params);
}
export default {
...prisma,
getAddMinutesQuery,
getDateQuery,
- getTimestampInterval,
+ getTimestampIntervalQuery,
getFilterQuery,
getFunnelQuery,
- getEventDataFilterQuery,
- toUuid,
parseFilters,
rawQuery,
};
diff --git a/lib/query.ts b/lib/query.ts
index 4ce18b09..09b77df8 100644
--- a/lib/query.ts
+++ b/lib/query.ts
@@ -1,51 +1,28 @@
-import cache from 'lib/cache';
-import { getWebsite, getSession, getUser } from 'queries';
-import { User, Website, Session } from '@prisma/client';
+import { NextApiRequest } from 'next';
+import { getAllowedUnits, getMinimumUnit } from './date';
+import { getWebsiteDateRange } from '../queries';
-export async function loadWebsite(websiteId: string): Promise {
- let website;
+export async function parseDateRangeQuery(req: NextApiRequest) {
+ const { id: websiteId, startAt, endAt, unit } = req.query;
- if (cache.enabled) {
- website = await cache.fetchWebsite(websiteId);
- } else {
- website = await getWebsite({ id: websiteId });
+ // All-time
+ if (+startAt === 0 && +endAt === 1) {
+ const { min, max } = await getWebsiteDateRange(websiteId as string);
+
+ return {
+ startDate: min,
+ endDate: max,
+ unit: getMinimumUnit(min, max),
+ };
}
- if (!website || website.deletedAt) {
- return null;
- }
+ const startDate = new Date(+startAt);
+ const endDate = new Date(+endAt);
+ const minUnit = getMinimumUnit(startDate, endDate);
- return website;
-}
-
-export async function loadSession(sessionId: string): Promise {
- let session;
-
- if (cache.enabled) {
- session = await cache.fetchSession(sessionId);
- } else {
- session = await getSession({ id: sessionId });
- }
-
- if (!session) {
- return null;
- }
-
- return session;
-}
-
-export async function loadUser(userId: string): Promise {
- let user;
-
- if (cache.enabled) {
- user = await cache.fetchUser(userId);
- } else {
- user = await getUser({ id: userId });
- }
-
- if (!user || user.deletedAt) {
- return null;
- }
-
- return user;
+ return {
+ startDate,
+ endDate,
+ unit: (getAllowedUnits(startDate, endDate).includes(unit as string) ? unit : minUnit) as string,
+ };
}
diff --git a/lib/session.ts b/lib/session.ts
index 2725f04c..1f693bbd 100644
--- a/lib/session.ts
+++ b/lib/session.ts
@@ -1,11 +1,11 @@
-import { secret, uuid } from 'lib/crypto';
+import { secret } from 'lib/crypto';
import { getClientInfo, getJsonBody } from 'lib/detect';
-import { parseToken } from 'next-basics';
+import { parseToken, uuid } from 'next-basics';
import { CollectRequestBody, NextApiRequestCollect } from 'pages/api/send';
import { createSession } from 'queries';
import { validate } from 'uuid';
import cache from './cache';
-import { loadSession, loadWebsite } from './query';
+import { loadSession, loadWebsite } from './load';
export async function findSession(req: NextApiRequestCollect) {
const { payload } = getJsonBody(req);
@@ -30,7 +30,6 @@ export async function findSession(req: NextApiRequestCollect) {
// Verify payload
const { website: websiteId, hostname, screen, language } = payload;
-
// Check the hostname value for legality to eliminate dirty data
const validHostnameRegex = /^[\w-.]+$/;
if (!validHostnameRegex.test(hostname)) {
diff --git a/lib/sql.ts b/lib/sql.ts
new file mode 100644
index 00000000..e69de29b
diff --git a/package.json b/package.json
index 78b15ee2..a4cf8989 100644
--- a/package.json
+++ b/package.json
@@ -90,7 +90,7 @@
"maxmind": "^4.3.6",
"moment-timezone": "^0.5.35",
"next": "13.3.1",
- "next-basics": "^0.31.0",
+ "next-basics": "^0.33.0",
"node-fetch": "^3.2.8",
"npm-run-all": "^4.1.5",
"react": "^18.2.0",
diff --git a/pages/api/event-data/events.ts b/pages/api/event-data/events.ts
new file mode 100644
index 00000000..1d74c3d2
--- /dev/null
+++ b/pages/api/event-data/events.ts
@@ -0,0 +1,39 @@
+import { canViewWebsite } from 'lib/auth';
+import { useCors, useAuth } from 'lib/middleware';
+import { NextApiRequestQueryBody } from 'lib/types';
+import { NextApiResponse } from 'next';
+import { ok, methodNotAllowed, unauthorized } from 'next-basics';
+import { getEventDataEvents } from 'queries';
+
+export interface EventDataFieldsRequestBody {
+ websiteId: string;
+ dateRange: {
+ startDate: string;
+ endDate: string;
+ };
+}
+
+export default async (
+ req: NextApiRequestQueryBody,
+ res: NextApiResponse,
+) => {
+ await useCors(req, res);
+ await useAuth(req, res);
+
+ if (req.method === 'GET') {
+ const { websiteId, startAt, endAt, field, event } = req.query;
+
+ if (!(await canViewWebsite(req.auth, websiteId))) {
+ return unauthorized(res);
+ }
+
+ const data = await getEventDataEvents(websiteId, new Date(+startAt), new Date(+endAt), {
+ field,
+ event,
+ });
+
+ return ok(res, data);
+ }
+
+ return methodNotAllowed(res);
+};
diff --git a/pages/api/event-data/fields.ts b/pages/api/event-data/fields.ts
index 5b0247f6..18b74bc3 100644
--- a/pages/api/event-data/fields.ts
+++ b/pages/api/event-data/fields.ts
@@ -21,20 +21,13 @@ export default async (
await useAuth(req, res);
if (req.method === 'GET') {
- const { websiteId, startAt, endAt, field, event, withEventNames } = req.query;
+ const { websiteId, startAt, endAt, field } = req.query;
if (!(await canViewWebsite(req.auth, websiteId))) {
return unauthorized(res);
}
- const data = await getEventDataFields(
- websiteId,
- new Date(+startAt),
- new Date(+endAt),
- field,
- event,
- withEventNames,
- );
+ const data = await getEventDataFields(websiteId, new Date(+startAt), new Date(+endAt), field);
return ok(res, data);
}
diff --git a/pages/api/reports/funnel.ts b/pages/api/reports/funnel.ts
index 1e121326..33882e03 100644
--- a/pages/api/reports/funnel.ts
+++ b/pages/api/reports/funnel.ts
@@ -3,7 +3,7 @@ import { useCors, useAuth } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
import { ok, methodNotAllowed, unauthorized } from 'next-basics';
-import { getPageviewFunnel } from 'queries';
+import { getFunnel } from 'queries';
export interface FunnelRequestBody {
websiteId: string;
@@ -41,7 +41,7 @@ export default async (
return unauthorized(res);
}
- const data = await getPageviewFunnel(websiteId, {
+ const data = await getFunnel(websiteId, {
startDate: new Date(startDate),
endDate: new Date(endDate),
urls,
diff --git a/pages/api/reports/index.ts b/pages/api/reports/index.ts
index b2c5da9e..3fe34134 100644
--- a/pages/api/reports/index.ts
+++ b/pages/api/reports/index.ts
@@ -1,8 +1,7 @@
-import { uuid } from 'lib/crypto';
import { useAuth, useCors } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
-import { methodNotAllowed, ok, unauthorized } from 'next-basics';
+import { methodNotAllowed, ok, unauthorized, uuid } from 'next-basics';
import { createReport, getReports } from 'queries';
import { canViewWebsite } from 'lib/auth';
diff --git a/pages/api/reports/insights.ts b/pages/api/reports/insights.ts
new file mode 100644
index 00000000..dba11953
--- /dev/null
+++ b/pages/api/reports/insights.ts
@@ -0,0 +1,51 @@
+import { canViewWebsite } from 'lib/auth';
+import { useCors, useAuth } from 'lib/middleware';
+import { NextApiRequestQueryBody } from 'lib/types';
+import { NextApiResponse } from 'next';
+import { ok, methodNotAllowed, unauthorized } from 'next-basics';
+import { getInsights } from 'queries';
+
+export interface InsightsRequestBody {
+ websiteId: string;
+ dateRange: {
+ startDate: string;
+ endDate: string;
+ };
+ fields: string[];
+ filters: string[];
+ groups: string[];
+}
+
+export default async (
+ req: NextApiRequestQueryBody,
+ res: NextApiResponse,
+) => {
+ await useCors(req, res);
+ await useAuth(req, res);
+
+ if (req.method === 'POST') {
+ const {
+ websiteId,
+ dateRange: { startDate, endDate },
+ fields,
+ filters,
+ groups,
+ } = req.body;
+
+ if (!(await canViewWebsite(req.auth, websiteId))) {
+ return unauthorized(res);
+ }
+
+ const data = await getInsights(websiteId, {
+ startDate: new Date(startDate),
+ endDate: new Date(endDate),
+ fields,
+ filters,
+ groups,
+ });
+
+ return ok(res, data);
+ }
+
+ return methodNotAllowed(res);
+};
diff --git a/pages/api/send.ts b/pages/api/send.ts
index e241e5de..8c88c509 100644
--- a/pages/api/send.ts
+++ b/pages/api/send.ts
@@ -1,7 +1,7 @@
import isbot from 'isbot';
import ipaddr from 'ipaddr.js';
import { createToken, ok, send, badRequest, forbidden } from 'next-basics';
-import { saveEvent } from 'queries';
+import { saveEvent, saveSessionData } from 'queries';
import { useCors, useSession } from 'lib/middleware';
import { getJsonBody, getIpAddress } from 'lib/detect';
import { secret } from 'lib/crypto';
@@ -9,7 +9,6 @@ import { NextApiRequest, NextApiResponse } from 'next';
import { Resolver } from 'dns/promises';
import { CollectionType } from 'lib/types';
import { COLLECTION_TYPE } from 'lib/constants';
-import { saveSessionData } from 'queries/analytics/session/saveSessionData';
export interface CollectRequestBody {
payload: {
diff --git a/pages/api/teams/index.ts b/pages/api/teams/index.ts
index 453f1ef3..316f3109 100644
--- a/pages/api/teams/index.ts
+++ b/pages/api/teams/index.ts
@@ -1,7 +1,7 @@
import { Team } from '@prisma/client';
import { NextApiRequestQueryBody } from 'lib/types';
import { canCreateTeam } from 'lib/auth';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth } from 'lib/middleware';
import { NextApiResponse } from 'next';
import { getRandomChars, methodNotAllowed, ok, unauthorized } from 'next-basics';
diff --git a/pages/api/users/index.ts b/pages/api/users/index.ts
index c6103c35..14b4e451 100644
--- a/pages/api/users/index.ts
+++ b/pages/api/users/index.ts
@@ -1,6 +1,6 @@
import { canCreateUser, canViewUsers } from 'lib/auth';
import { ROLES } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth } from 'lib/middleware';
import { NextApiRequestQueryBody, Role, User } from 'lib/types';
import { NextApiResponse } from 'next';
diff --git a/pages/api/websites/[id]/events.ts b/pages/api/websites/[id]/events.ts
index 12473da0..b9e3ac71 100644
--- a/pages/api/websites/[id]/events.ts
+++ b/pages/api/websites/[id]/events.ts
@@ -5,6 +5,7 @@ import moment from 'moment-timezone';
import { NextApiResponse } from 'next';
import { badRequest, methodNotAllowed, ok, unauthorized } from 'next-basics';
import { getEventMetrics } from 'queries';
+import { parseDateRangeQuery } from 'lib/query';
const unitTypes = ['year', 'month', 'hour', 'day'];
@@ -25,7 +26,8 @@ export default async (
await useCors(req, res);
await useAuth(req, res);
- const { id: websiteId, startAt, endAt, unit, timezone, url, eventName } = req.query;
+ const { id: websiteId, timezone, url, eventName } = req.query;
+ const { startDate, endDate, unit } = await parseDateRangeQuery(req);
if (req.method === 'GET') {
if (!(await canViewWebsite(req.auth, websiteId))) {
@@ -35,8 +37,6 @@ export default async (
if (!moment.tz.zone(timezone) || !unitTypes.includes(unit)) {
return badRequest(res);
}
- const startDate = new Date(+startAt);
- const endDate = new Date(+endAt);
const events = await getEventMetrics(websiteId, {
startDate,
diff --git a/pages/api/websites/[id]/metrics.ts b/pages/api/websites/[id]/metrics.ts
index 5cf818a0..4c6fb270 100644
--- a/pages/api/websites/[id]/metrics.ts
+++ b/pages/api/websites/[id]/metrics.ts
@@ -5,6 +5,7 @@ import { canViewWebsite } from 'lib/auth';
import { useAuth, useCors } from 'lib/middleware';
import { SESSION_COLUMNS, EVENT_COLUMNS, FILTER_COLUMNS } from 'lib/constants';
import { getPageviewMetrics, getSessionMetrics } from 'queries';
+import { parseDateRangeQuery } from 'lib/query';
export interface WebsiteMetricsRequestQuery {
id: string;
@@ -34,8 +35,6 @@ export default async (
const {
id: websiteId,
type,
- startAt,
- endAt,
url,
referrer,
title,
@@ -54,8 +53,7 @@ export default async (
return unauthorized(res);
}
- const startDate = new Date(+startAt);
- const endDate = new Date(+endAt);
+ const { startDate, endDate } = await parseDateRangeQuery(req);
if (SESSION_COLUMNS.includes(type)) {
const column = FILTER_COLUMNS[type] || type;
diff --git a/pages/api/websites/[id]/pageviews.ts b/pages/api/websites/[id]/pageviews.ts
index 9dfd2264..453c6733 100644
--- a/pages/api/websites/[id]/pageviews.ts
+++ b/pages/api/websites/[id]/pageviews.ts
@@ -5,8 +5,7 @@ import { NextApiRequestQueryBody, WebsitePageviews } from 'lib/types';
import { canViewWebsite } from 'lib/auth';
import { useAuth, useCors } from 'lib/middleware';
import { getPageviewStats } from 'queries';
-
-const unitTypes = ['year', 'month', 'hour', 'day'];
+import { parseDateRangeQuery } from 'lib/query';
export interface WebsitePageviewRequestQuery {
id: string;
@@ -34,9 +33,6 @@ export default async (
const {
id: websiteId,
- startAt,
- endAt,
- unit,
timezone,
url,
referrer,
@@ -54,10 +50,9 @@ export default async (
return unauthorized(res);
}
- const startDate = new Date(+startAt);
- const endDate = new Date(+endAt);
+ const { startDate, endDate, unit } = await parseDateRangeQuery(req);
- if (!moment.tz.zone(timezone) || !unitTypes.includes(unit)) {
+ if (!moment.tz.zone(timezone)) {
return badRequest(res);
}
diff --git a/pages/api/websites/[id]/stats.ts b/pages/api/websites/[id]/stats.ts
index 1e2f2292..34347fe5 100644
--- a/pages/api/websites/[id]/stats.ts
+++ b/pages/api/websites/[id]/stats.ts
@@ -1,8 +1,10 @@
+import { addMinutes, differenceInMinutes } from 'date-fns';
+import { NextApiResponse } from 'next';
+import { methodNotAllowed, ok, unauthorized } from 'next-basics';
import { canViewWebsite } from 'lib/auth';
import { useAuth, useCors } from 'lib/middleware';
import { NextApiRequestQueryBody, WebsiteStats } from 'lib/types';
-import { NextApiResponse } from 'next';
-import { methodNotAllowed, ok, unauthorized } from 'next-basics';
+import { parseDateRangeQuery } from 'lib/query';
import { getWebsiteStats } from 'queries';
export interface WebsiteStatsRequestQuery {
@@ -31,8 +33,6 @@ export default async (
const {
id: websiteId,
- startAt,
- endAt,
url,
referrer,
title,
@@ -51,12 +51,10 @@ export default async (
return unauthorized(res);
}
- const startDate = new Date(+startAt);
- const endDate = new Date(+endAt);
-
- const distance = endAt - startAt;
- const prevStartDate = new Date(+startAt - distance);
- const prevEndDate = new Date(+endAt - distance);
+ const { startDate, endDate } = await parseDateRangeQuery(req);
+ const diff = differenceInMinutes(endDate, startDate);
+ const prevStartDate = addMinutes(startDate, -diff);
+ const prevEndDate = addMinutes(endDate, -diff);
const metrics = await getWebsiteStats(websiteId, {
startDate,
diff --git a/pages/api/websites/index.ts b/pages/api/websites/index.ts
index 49797d08..41556f8a 100644
--- a/pages/api/websites/index.ts
+++ b/pages/api/websites/index.ts
@@ -1,5 +1,5 @@
import { canCreateWebsite } from 'lib/auth';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { useAuth, useCors } from 'lib/middleware';
import { NextApiRequestQueryBody } from 'lib/types';
import { NextApiResponse } from 'next';
diff --git a/queries/admin/team.ts b/queries/admin/team.ts
index b965119b..6d216639 100644
--- a/queries/admin/team.ts
+++ b/queries/admin/team.ts
@@ -1,7 +1,7 @@
import { Prisma, Team, TeamWebsite } from '@prisma/client';
import prisma from 'lib/prisma';
-import { uuid } from 'lib/crypto';
import { ROLES } from 'lib/constants';
+import { uuid } from 'next-basics';
export async function getTeam(where: Prisma.TeamWhereInput): Promise {
return prisma.client.team.findFirst({
diff --git a/queries/admin/teamUser.ts b/queries/admin/teamUser.ts
index b1c295be..5bce2926 100644
--- a/queries/admin/teamUser.ts
+++ b/queries/admin/teamUser.ts
@@ -1,5 +1,5 @@
import { Prisma, TeamUser } from '@prisma/client';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import prisma from 'lib/prisma';
export async function getTeamUserById(teamUserId: string): Promise {
diff --git a/queries/admin/teamWebsite.ts b/queries/admin/teamWebsite.ts
index 0aedc3c7..9ffd07d3 100644
--- a/queries/admin/teamWebsite.ts
+++ b/queries/admin/teamWebsite.ts
@@ -1,6 +1,6 @@
import { Prisma, Team, TeamUser, TeamWebsite, Website } from '@prisma/client';
import { ROLES } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import prisma from 'lib/prisma';
export async function getTeamWebsite(
diff --git a/queries/analytics/eventData/getEventDataEvents.ts b/queries/analytics/eventData/getEventDataEvents.ts
new file mode 100644
index 00000000..7a926eaf
--- /dev/null
+++ b/queries/analytics/eventData/getEventDataEvents.ts
@@ -0,0 +1,120 @@
+import prisma from 'lib/prisma';
+import clickhouse from 'lib/clickhouse';
+import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import { WebsiteEventDataFields } from 'lib/types';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
+
+export async function getEventDataEvents(
+ ...args: [
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+ ]
+): Promise {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+) {
+ const { rawQuery } = prisma;
+ const website = await loadWebsite(websiteId);
+ const { field, event } = filters;
+
+ if (event) {
+ return rawQuery(
+ `
+ select
+ we.event_name as event,
+ ed.event_key as field,
+ ed.string_value as value,
+ count(ed.*) as total
+ from event_data as ed
+ inner join website_event as we
+ on we.event_id = ed.website_event_id
+ where ed.website_id = {{websiteId:uuid}}
+ and ed.event_key = {{field}}
+ and ed.created_at between {{startDate}} and {{endDate}}
+ and we.event_name = {{event}}
+ group by ed.event_key, ed.string_value
+ order by 3 desc, 2 desc, 1 asc
+ `,
+ { ...filters, websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+ }
+ return rawQuery(
+ `
+ select
+ we.event_name as event,
+ ed.event_key as field,
+ ed.string_value as value,
+ count(ed.*) as total
+ from event_data as ed
+ inner join website_event as we
+ on we.event_id = ed.website_event_id
+ where ed.website_id = {{websiteId::uuid}}
+ and ed.event_key = {{field}}
+ and ed.created_at between {{startDate}} and {{endDate}}
+ group by we.event_name, ed.event_key, ed.string_value
+ order by 3 desc, 2 desc, 1 asc
+ `,
+ { websiteId, field, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+}
+
+async function clickhouseQuery(
+ websiteId: string,
+ startDate: Date,
+ endDate: Date,
+ filters: { field?: string; event?: string },
+) {
+ const { rawQuery } = clickhouse;
+ const website = await loadWebsite(websiteId);
+ const { event } = filters;
+
+ if (event) {
+ return rawQuery(
+ `
+ select
+ event_name as event,
+ event_key as field,
+ data_type as type,
+ string_value as value,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_name = {event:String}
+ group by event_key, data_type, string_value, event_name
+ order by 1 asc, 2 asc, 3 asc, 4 desc
+ limit 100
+ `,
+ { ...filters, websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+ }
+
+ return rawQuery(
+ `
+ select
+ event_name as event,
+ event_key as field,
+ data_type as type,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ group by event_key, data_type, event_name
+ order by 1 asc, 2 asc
+ limit 100
+ `,
+ { websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+}
diff --git a/queries/analytics/eventData/getEventDataFields.ts b/queries/analytics/eventData/getEventDataFields.ts
index 92b477ab..516c58d0 100644
--- a/queries/analytics/eventData/getEventDataFields.ts
+++ b/queries/analytics/eventData/getEventDataFields.ts
@@ -2,18 +2,11 @@ import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { WebsiteEventDataFields } from 'lib/types';
-import { loadWebsite } from 'lib/query';
-import { DEFAULT_CREATED_AT } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
export async function getEventDataFields(
- ...args: [
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field?: string,
- event?: string,
- withEventNames?: boolean,
- ]
+ ...args: [websiteId: string, startDate: Date, endDate: Date, field?: string]
): Promise {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
@@ -21,168 +14,82 @@ export async function getEventDataFields(
});
}
-async function relationalQuery(
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field: string,
- event: string,
- withEventNames: boolean,
-) {
- const { toUuid, rawQuery } = prisma;
+async function relationalQuery(websiteId: string, startDate: Date, endDate: Date, field: string) {
+ const { rawQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
if (field) {
- if (event) {
- return rawQuery(
- `select ed.event_key as field,
- ed.string_value as value,
- count(ed.*) as total
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = $1${toUuid()}
- and ed.event_key = $2
- and ed.created_at >= $3
- and ed.created_at between $4 and $5
- and e.event_name = $6
- group by ed.event_key, ed.string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- [websiteId, field, resetDate, startDate, endDate, event] as any,
- );
- }
return rawQuery(
- `select event_key as field,
- string_value as value,
- count(*) as total
- from event_data
- where website_id = $1${toUuid()}
- and event_key = $2
- and created_at >= $3
- and created_at between $4 and $5
- group by event_key, string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- [websiteId, field, resetDate, startDate, endDate] as any,
- );
- }
-
- if (withEventNames) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.data_type as type,
- count(ed.*) as total,
- e.event_name as event
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = $1${toUuid()}
- and ed.created_at >= $2
- and ed.created_at between $3 and $4
- group by e.event_name, ed.event_key, ed.data_type
- order by 3 desc, 2 asc, 1 asc
- `,
- [websiteId, resetDate, startDate, endDate] as any,
- );
- }
-
- return rawQuery(
- `select
- event_key as field,
- data_type as type,
- count(*) as total
- from event_data
- where website_id = $1${toUuid()}
- and created_at >= $2
- and created_at between $3 and $4
- group by event_key, data_type
- order by 3 desc, 2 asc, 1 asc
- `,
- [websiteId, resetDate, startDate, endDate] as any,
- );
-}
-
-async function clickhouseQuery(
- websiteId: string,
- startDate: Date,
- endDate: Date,
- field: string,
- event: string,
- withEventNames: boolean,
-) {
- const { rawQuery, getDateFormat, getBetweenDates } = clickhouse;
- const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
-
- if (field) {
- if (event) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.string_value as value,
- count(ed.*) as total
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = {websiteId:UUID}
- and ed.event_key = {field:String}
- and ed.created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('ed.created_at', startDate, endDate)}
- and e.event_name = {event:String}
- group by event_key, string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- { websiteId, field, event },
- );
- }
- return rawQuery(
- `select
+ `
+ select
event_key as field,
string_value as value,
count(*) as total
- from event_data
- where website_id = {websiteId:UUID}
- and event_key = {field:String}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
- group by event_key, string_value
- order by 3 desc, 2 desc, 1 asc
- `,
- { websiteId, field },
- );
- }
-
- if (withEventNames) {
- return rawQuery(
- `select
- ed.event_key as field,
- ed.data_type as type,
- count(ed.*) as total,
- e.event_name as event
- from event_data as ed
- join website_event as e on e.event_id = ed.website_event_id
- where ed.website_id = {websiteId:UUID}
- and ed.created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('ed.created_at', startDate, endDate)}
- group by e.event_name, ed.event_key, ed.data_type
- order by 3 desc, 2 asc, 1 asc
+ from event_data
+ where website_id = {{websiteId::uuid}}
+ and event_key = {{field}}
+ and created_at between {{startDate}} and {{endDate}}
+ group by event_key, string_value
+ order by 3 desc, 2 desc, 1 asc
+ limit 100
`,
- [websiteId, resetDate, startDate, endDate] as any,
+ { websiteId, field, startDate: maxDate(startDate, website.resetAt), endDate },
);
}
return rawQuery(
- `select
- event_key as field,
- data_type as type,
- count(*) as total
- from event_data
- where website_id = {websiteId:UUID}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
- group by event_key, data_type
- order by 3 desc, 2 asc, 1 asc
+ `
+ select
+ event_key as field,
+ data_type as type,
+ count(*) as total
+ from event_data
+ where website_id = {{websiteId::uuid}}
+ and created_at between {{startDate}} and {{endDate}}
+ group by event_key, data_type
+ order by 3 desc, 2 asc, 1 asc
+ limit 100
`,
- { websiteId },
+ { websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+}
+
+async function clickhouseQuery(websiteId: string, startDate: Date, endDate: Date, field: string) {
+ const { rawQuery } = clickhouse;
+ const website = await loadWebsite(websiteId);
+
+ if (field) {
+ return rawQuery(
+ `
+ select
+ event_key as field,
+ string_value as value,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and event_key = {field:String}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ group by event_key, string_value
+ order by 3 desc, 2 desc, 1 asc
+ limit 100
+ `,
+ { websiteId, field, startDate: maxDate(startDate, website.resetAt), endDate },
+ );
+ }
+
+ return rawQuery(
+ `
+ select
+ event_key as field,
+ data_type as type,
+ count(*) as total
+ from event_data
+ where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ group by event_key, data_type
+ order by 3 desc, 2 asc, 1 asc
+ limit 100
+ `,
+ { websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
);
}
diff --git a/queries/analytics/eventData/getEventDataUsage.ts b/queries/analytics/eventData/getEventDataUsage.ts
index 5d470c3c..78bf7c91 100644
--- a/queries/analytics/eventData/getEventDataUsage.ts
+++ b/queries/analytics/eventData/getEventDataUsage.ts
@@ -1,28 +1,26 @@
import clickhouse from 'lib/clickhouse';
-import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import { CLICKHOUSE, PRISMA, runQuery, notImplemented } from 'lib/db';
export function getEventDataUsage(...args: [websiteIds: string[], startDate: Date, endDate: Date]) {
return runQuery({
- [PRISMA]: () => relationalQuery(...args),
+ [PRISMA]: notImplemented,
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
-function relationalQuery(websiteIds: string[], startDate: Date, endDate: Date) {
- throw new Error('Not Implemented');
-}
-
function clickhouseQuery(websiteIds: string[], startDate: Date, endDate: Date) {
const { rawQuery } = clickhouse;
return rawQuery(
- `select
- website_id as websiteId,
- count(*) as count
+ `
+ select
+ website_id as websiteId,
+ count(*) as count
from event_data
where created_at between {startDate:DateTime64} and {endDate:DateTime64}
- and website_id in {websiteIds:Array(UUID)}
- group by website_id`,
+ and website_id in {websiteIds:Array(UUID)}
+ group by website_id
+ `,
{
websiteIds,
startDate,
diff --git a/queries/analytics/eventData/saveEventData.ts b/queries/analytics/eventData/saveEventData.ts
index 44ba2fce..50765229 100644
--- a/queries/analytics/eventData/saveEventData.ts
+++ b/queries/analytics/eventData/saveEventData.ts
@@ -1,6 +1,6 @@
import { Prisma } from '@prisma/client';
import { DATA_TYPE } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import { flattenJSON } from 'lib/dynamicData';
import kafka from 'lib/kafka';
@@ -31,7 +31,7 @@ async function relationalQuery(data: {
const jsonKeys = flattenJSON(eventData);
- //id, websiteEventId, eventStringValue
+ // id, websiteEventId, eventStringValue
const flattendData = jsonKeys.map(a => ({
id: uuid(),
websiteEventId: eventId,
diff --git a/queries/analytics/event/getEventMetrics.ts b/queries/analytics/events/getEventMetrics.ts
similarity index 61%
rename from queries/analytics/event/getEventMetrics.ts
rename to queries/analytics/events/getEventMetrics.ts
index ea05dae4..37044d1b 100644
--- a/queries/analytics/event/getEventMetrics.ts
+++ b/queries/analytics/events/getEventMetrics.ts
@@ -2,8 +2,9 @@ import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
import { WebsiteEventMetric } from 'lib/types';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
-import { loadWebsite } from 'lib/query';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
export async function getEventMetrics(
...args: [
@@ -45,26 +46,31 @@ async function relationalQuery(
};
},
) {
- const { toUuid, rawQuery, getDateQuery, getFilterQuery } = prisma;
+ const { rawQuery, getDateQuery, getFilterQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params: any = [websiteId, resetDate, startDate, endDate];
- const filterQuery = getFilterQuery(filters, params);
+ const filterQuery = getFilterQuery(filters);
return rawQuery(
- `select
+ `
+ select
event_name x,
${getDateQuery('created_at', unit, timezone)} t,
count(*) y
from website_event
- where website_id = $1${toUuid()}
- and created_at >= $2
- and created_at between $3 and $4
- and event_type = ${EVENT_TYPE.customEvent}
+ where website_id = {{websiteId::uuid}}
+ and created_at between {{startDate}} and {{endDate}}
+ and event_type = {{eventType}}
${filterQuery}
group by 1, 2
- order by 2`,
- params,
+ order by 2
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.customEvent,
+ },
);
}
@@ -87,24 +93,30 @@ async function clickhouseQuery(
};
},
) {
- const { rawQuery, getDateQuery, getDateFormat, getBetweenDates, getFilterQuery } = clickhouse;
+ const { rawQuery, getDateQuery, getFilterQuery } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params = { websiteId };
+ const filterQuery = getFilterQuery(filters);
return rawQuery(
- `select
+ `
+ select
event_name x,
${getDateQuery('created_at', unit, timezone)} t,
count(*) y
from website_event
where website_id = {websiteId:UUID}
- and event_type = ${EVENT_TYPE.customEvent}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
- ${getFilterQuery(filters, params)}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_type = {eventType:UInt32}
+ ${filterQuery}
group by x, t
- order by t`,
- params,
+ order by t
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.customEvent,
+ },
);
}
diff --git a/queries/analytics/event/getEventUsage.ts b/queries/analytics/events/getEventUsage.ts
similarity index 51%
rename from queries/analytics/event/getEventUsage.ts
rename to queries/analytics/events/getEventUsage.ts
index 1465264c..e8ed975f 100644
--- a/queries/analytics/event/getEventUsage.ts
+++ b/queries/analytics/events/getEventUsage.ts
@@ -1,28 +1,26 @@
import clickhouse from 'lib/clickhouse';
-import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import { CLICKHOUSE, PRISMA, runQuery, notImplemented } from 'lib/db';
export function getEventUsage(...args: [websiteIds: string[], startDate: Date, endDate: Date]) {
return runQuery({
- [PRISMA]: () => relationalQuery(...args),
+ [PRISMA]: notImplemented,
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
-function relationalQuery(websiteIds: string[], startDate: Date, endDate: Date) {
- throw new Error('Not Implemented');
-}
-
function clickhouseQuery(websiteIds: string[], startDate: Date, endDate: Date) {
const { rawQuery } = clickhouse;
return rawQuery(
- `select
- website_id as websiteId,
- count(*) as count
+ `
+ select
+ website_id as websiteId,
+ count(*) as count
from website_event
- where created_at between {startDate:DateTime64} and {endDate:DateTime64}
- and website_id in {websiteIds:Array(UUID)}
- group by website_id`,
+ where website_id in {websiteIds:Array(UUID)}
+ and created_at between {startDate:DateTime64} and {endDate:DateTime64}
+ group by website_id
+ `,
{
websiteIds,
startDate,
diff --git a/queries/analytics/event/getEvents.ts b/queries/analytics/events/getEvents.ts
similarity index 63%
rename from queries/analytics/event/getEvents.ts
rename to queries/analytics/events/getEvents.ts
index 8197019d..17528d66 100644
--- a/queries/analytics/event/getEvents.ts
+++ b/queries/analytics/events/getEvents.ts
@@ -2,30 +2,31 @@ import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
-export function getEvents(...args: [websiteId: string, startAt: Date, eventType: number]) {
+export function getEvents(...args: [websiteId: string, startDate: Date, eventType: number]) {
return runQuery({
[PRISMA]: () => relationalQuery(...args),
[CLICKHOUSE]: () => clickhouseQuery(...args),
});
}
-function relationalQuery(websiteId: string, startAt: Date, eventType: number) {
+function relationalQuery(websiteId: string, startDate: Date, eventType: number) {
return prisma.client.websiteEvent.findMany({
where: {
websiteId,
eventType,
createdAt: {
- gte: startAt,
+ gte: startDate,
},
},
});
}
-function clickhouseQuery(websiteId: string, startAt: Date, eventType: number) {
+function clickhouseQuery(websiteId: string, startDate: Date, eventType: number) {
const { rawQuery } = clickhouse;
return rawQuery(
- `select
+ `
+ select
event_id as id,
website_id as websiteId,
session_id as sessionId,
@@ -35,12 +36,13 @@ function clickhouseQuery(websiteId: string, startAt: Date, eventType: number) {
referrer_domain as referrerDomain,
event_name as eventName
from website_event
- where event_type = {eventType:UInt32}
- and website_id = {websiteId:UUID}
- and created_at >= {startAt:DateTime('UTC')}`,
+ where website_id = {websiteId:UUID}
+ and created_at >= {startDate:DateTime}
+ and event_type = {eventType:UInt32}
+ `,
{
websiteId,
- startAt,
+ startDate,
eventType,
},
);
diff --git a/queries/analytics/event/saveEvent.ts b/queries/analytics/events/saveEvent.ts
similarity index 99%
rename from queries/analytics/event/saveEvent.ts
rename to queries/analytics/events/saveEvent.ts
index 51087a59..cfefc326 100644
--- a/queries/analytics/event/saveEvent.ts
+++ b/queries/analytics/events/saveEvent.ts
@@ -2,7 +2,7 @@ import { EVENT_NAME_LENGTH, URL_LENGTH, EVENT_TYPE } from 'lib/constants';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import kafka from 'lib/kafka';
import prisma from 'lib/prisma';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { saveEventData } from 'queries/analytics/eventData/saveEventData';
export async function saveEvent(args: {
diff --git a/queries/analytics/pageview/getPageviewStats.ts b/queries/analytics/pageview/getPageviewStats.ts
deleted file mode 100644
index 989a6d4d..00000000
--- a/queries/analytics/pageview/getPageviewStats.ts
+++ /dev/null
@@ -1,120 +0,0 @@
-import clickhouse from 'lib/clickhouse';
-import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
-import prisma from 'lib/prisma';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
-import { loadWebsite } from 'lib/query';
-
-export async function getPageviewStats(
- ...args: [
- websiteId: string,
- criteria: {
- startDate: Date;
- endDate: Date;
- timezone?: string;
- unit?: string;
- count?: string;
- filters: object;
- sessionKey?: string;
- },
- ]
-) {
- return runQuery({
- [PRISMA]: () => relationalQuery(...args),
- [CLICKHOUSE]: () => clickhouseQuery(...args),
- });
-}
-
-async function relationalQuery(
- websiteId: string,
- criteria: {
- startDate: Date;
- endDate: Date;
- timezone?: string;
- unit?: string;
- count?: string;
- filters: object;
- sessionKey?: string;
- },
-) {
- const {
- startDate,
- endDate,
- timezone = 'utc',
- unit = 'day',
- count = '*',
- filters = {},
- sessionKey = 'session_id',
- } = criteria;
- const { toUuid, getDateQuery, parseFilters, rawQuery } = prisma;
- const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params: any = [websiteId, resetDate, startDate, endDate];
- const { filterQuery, joinSession } = parseFilters(filters, params);
-
- return rawQuery(
- `select ${getDateQuery('website_event.created_at', unit, timezone)} x,
- count(${count !== '*' ? `${count}${sessionKey}` : count}) y
- from website_event
- ${joinSession}
- where website_event.website_id = $1${toUuid()}
- and website_event.created_at >= $2
- and website_event.created_at between $3 and $4
- and event_type = ${EVENT_TYPE.pageView}
- ${filterQuery}
- group by 1`,
- params,
- );
-}
-
-async function clickhouseQuery(
- websiteId: string,
- criteria: {
- startDate: Date;
- endDate: Date;
- timezone?: string;
- unit?: string;
- count?: string;
- filters: object;
- sessionKey?: string;
- },
-) {
- const {
- startDate,
- endDate,
- timezone = 'UTC',
- unit = 'day',
- count = '*',
- filters = {},
- } = criteria;
- const {
- parseFilters,
- getDateFormat,
- rawQuery,
- getDateStringQuery,
- getDateQuery,
- getBetweenDates,
- } = clickhouse;
- const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params = { websiteId };
- const { filterQuery } = parseFilters(filters, params);
-
- return rawQuery(
- `select
- ${getDateStringQuery('g.t', unit)} as x,
- g.y as y
- from
- (select
- ${getDateQuery('created_at', unit, timezone)} t,
- count(${count !== '*' ? 'distinct session_id' : count}) y
- from website_event
- where website_id = {websiteId:UUID}
- and event_type = ${EVENT_TYPE.pageView}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
- ${filterQuery}
- group by t) g
- order by t`,
- params,
- );
-}
diff --git a/queries/analytics/pageview/getPageviewMetrics.ts b/queries/analytics/pageviews/getPageviewMetrics.ts
similarity index 62%
rename from queries/analytics/pageview/getPageviewMetrics.ts
rename to queries/analytics/pageviews/getPageviewMetrics.ts
index b2f815f2..677de980 100644
--- a/queries/analytics/pageview/getPageviewMetrics.ts
+++ b/queries/analytics/pageviews/getPageviewMetrics.ts
@@ -1,8 +1,9 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
-import { loadWebsite } from 'lib/query';
+import { EVENT_TYPE } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
export async function getPageviewMetrics(
...args: [
@@ -31,39 +32,40 @@ async function relationalQuery(
},
) {
const { startDate, endDate, filters = {}, column } = criteria;
- const { rawQuery, parseFilters, toUuid } = prisma;
+ const { rawQuery, parseFilters } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params: any = [
+ const params: any = {
websiteId,
- resetDate,
- startDate,
+ startDate: maxDate(startDate, website.resetAt),
endDate,
- column === 'event_name' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
- ];
+ eventType: column === 'event_name' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
+ };
let excludeDomain = '';
if (column === 'referrer_domain') {
- excludeDomain = 'and (website_event.referrer_domain != $6 or website_event.referrer_domain is null)';
- params.push(website.domain);
+ excludeDomain =
+ 'and (website_event.referrer_domain != {{domain}} or website_event.referrer_domain is null)';
+
+ params.domain = website.domain;
}
- const { filterQuery, joinSession } = parseFilters(filters, params);
+ const { filterQuery, joinSession } = parseFilters(filters);
return rawQuery(
- `select ${column} x, count(*) y
+ `
+ select ${column} x, count(*) y
from website_event
${joinSession}
- where website_event.website_id = $1${toUuid()}
- and website_event.created_at >= $2
- and website_event.created_at between $3 and $4
- and event_type = $5
+ where website_event.website_id = {{websiteId::uuid}}
+ and website_event.created_at between {{startDate}} and {{endDate}}
+ and event_type = {{eventType}}
${excludeDomain}
${filterQuery}
group by 1
order by 2 desc
- limit 100`,
+ limit 100
+ `,
params,
);
}
@@ -78,11 +80,12 @@ async function clickhouseQuery(
},
) {
const { startDate, endDate, filters = {}, column } = criteria;
- const { rawQuery, getDateFormat, parseFilters, getBetweenDates } = clickhouse;
+ const { rawQuery, parseFilters } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
const params = {
websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
eventType: column === 'event_name' ? EVENT_TYPE.customEvent : EVENT_TYPE.pageView,
domain: undefined,
};
@@ -97,17 +100,18 @@ async function clickhouseQuery(
const { filterQuery } = parseFilters(filters, params);
return rawQuery(
- `select ${column} x, count(*) y
+ `
+ select ${column} x, count(*) y
from website_event
where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
and event_type = {eventType:UInt32}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
${excludeDomain}
${filterQuery}
group by x
order by y desc
- limit 100`,
+ limit 100
+ `,
params,
);
}
diff --git a/queries/analytics/pageviews/getPageviewStats.ts b/queries/analytics/pageviews/getPageviewStats.ts
new file mode 100644
index 00000000..31b0ebdd
--- /dev/null
+++ b/queries/analytics/pageviews/getPageviewStats.ts
@@ -0,0 +1,103 @@
+import clickhouse from 'lib/clickhouse';
+import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import prisma from 'lib/prisma';
+import { EVENT_TYPE } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
+
+export interface PageviewStatsCriteria {
+ startDate: Date;
+ endDate: Date;
+ timezone?: string;
+ unit?: string;
+ count?: string;
+ filters: object;
+ sessionKey?: string;
+}
+
+export async function getPageviewStats(
+ ...args: [websiteId: string, criteria: PageviewStatsCriteria]
+) {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(websiteId: string, criteria: PageviewStatsCriteria) {
+ const {
+ startDate,
+ endDate,
+ timezone = 'utc',
+ unit = 'day',
+ count = '*',
+ filters = {},
+ sessionKey = 'session_id',
+ } = criteria;
+ const { getDateQuery, parseFilters, rawQuery } = prisma;
+ const website = await loadWebsite(websiteId);
+ const { filterQuery, joinSession } = parseFilters(filters);
+
+ return rawQuery(
+ `
+ select
+ ${getDateQuery('website_event.created_at', unit, timezone)} x,
+ count(${count !== '*' ? `${count}${sessionKey}` : count}) y
+ from website_event
+ ${joinSession}
+ where website_event.website_id = {{websiteId::uuid}}
+ and website_event.created_at between {{startDate}} and {{endDate}}
+ and event_type = {{eventType}}
+ ${filterQuery}
+ group by 1
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.pageView,
+ },
+ );
+}
+
+async function clickhouseQuery(websiteId: string, criteria: PageviewStatsCriteria) {
+ const {
+ startDate,
+ endDate,
+ timezone = 'UTC',
+ unit = 'day',
+ count = '*',
+ filters = {},
+ } = criteria;
+ const { parseFilters, rawQuery, getDateStringQuery, getDateQuery } = clickhouse;
+ const website = await loadWebsite(websiteId);
+ const { filterQuery } = parseFilters(filters);
+
+ return rawQuery(
+ `
+ select
+ ${getDateStringQuery('g.t', unit)} as x,
+ g.y as y
+ from (
+ select
+ ${getDateQuery('created_at', unit, timezone)} as t,
+ count(${count !== '*' ? 'distinct session_id' : count}) as y
+ from website_event
+ where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_type = {eventType:UInt32}
+ ${filterQuery}
+ group by t
+ ) as g
+ order by t
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.pageView,
+ },
+ );
+}
diff --git a/queries/analytics/pageview/getPageviewFunnel.ts b/queries/analytics/reports/getFunnel.ts
similarity index 78%
rename from queries/analytics/pageview/getPageviewFunnel.ts
rename to queries/analytics/reports/getFunnel.ts
index bfd921c1..1dde1a13 100644
--- a/queries/analytics/pageview/getPageviewFunnel.ts
+++ b/queries/analytics/reports/getFunnel.ts
@@ -2,7 +2,7 @@ import clickhouse from 'lib/clickhouse';
import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
import prisma from 'lib/prisma';
-export async function getPageviewFunnel(
+export async function getFunnel(
...args: [
websiteId: string,
criteria: {
@@ -34,18 +34,16 @@ async function relationalQuery(
}[]
> {
const { windowMinutes, startDate, endDate, urls } = criteria;
- const { rawQuery, getFunnelQuery, toUuid } = prisma;
+ const { rawQuery, getFunnelQuery } = prisma;
const { levelQuery, sumQuery, urlFilterQuery } = getFunnelQuery(urls, windowMinutes);
- const params: any = [websiteId, startDate, endDate, ...urls];
-
return rawQuery(
`WITH level0 AS (
select distinct session_id, url_path, referrer_path, created_at
from website_event
where url_path in (${urlFilterQuery})
- and website_id = $1${toUuid()}
- and created_at between $2 and $3
+ and website_id = {{websiteId::uuid}}
+ and created_at between {{startDate}} and {{endDate}}
),level1 AS (
select distinct session_id, url_path as level_1_url, created_at as level_1_created_at
from level0
@@ -55,7 +53,7 @@ async function relationalQuery(
SELECT ${sumQuery}
from level${urls.length};
`,
- params,
+ { websiteId, startDate, endDate, ...urls },
).then((a: { [key: string]: number }) => {
return urls.map((b, i) => ({ x: b, y: a[0][`level${i + 1}`] || 0 }));
});
@@ -76,14 +74,8 @@ async function clickhouseQuery(
}[]
> {
const { windowMinutes, startDate, endDate, urls } = criteria;
- const { rawQuery, getBetweenDates, getFunnelQuery } = clickhouse;
- const { columnsQuery, conditionQuery, urlParams } = getFunnelQuery(urls);
-
- const params = {
- websiteId,
- window: windowMinutes * 60,
- ...urlParams,
- };
+ const { rawQuery, getFunnelQuery } = clickhouse;
+ const { columnsQuery, urlParams } = getFunnelQuery(urls);
return rawQuery<{ level: number; count: number }[]>(
`
@@ -98,13 +90,19 @@ async function clickhouseQuery(
) AS level
FROM website_event
WHERE website_id = {websiteId:UUID}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ AND created_at BETWEEN {startDate:DateTime} AND {endDate:DateTime}
GROUP BY 1
)
GROUP BY level
ORDER BY level ASC;
`,
- params,
+ {
+ websiteId,
+ startDate,
+ endDate,
+ window: windowMinutes * 60,
+ ...urlParams,
+ },
).then(results => {
return urls.map((a, i) => ({
x: a,
diff --git a/queries/analytics/reports/getInsights.ts b/queries/analytics/reports/getInsights.ts
new file mode 100644
index 00000000..1d8970ed
--- /dev/null
+++ b/queries/analytics/reports/getInsights.ts
@@ -0,0 +1,42 @@
+import { CLICKHOUSE, PRISMA, runQuery } from 'lib/db';
+import prisma from 'lib/prisma';
+import clickhouse from 'lib/clickhouse';
+
+export interface GetInsightsCriteria {
+ startDate: Date;
+ endDate: Date;
+ fields: string[];
+ filters: string[];
+ groups: string[];
+}
+
+export async function getInsights(...args: [websiteId: string, criteria: GetInsightsCriteria]) {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(
+ websiteId: string,
+ criteria: GetInsightsCriteria,
+): Promise<
+ {
+ x: string;
+ y: number;
+ }[]
+> {
+ return null;
+}
+
+async function clickhouseQuery(
+ websiteId: string,
+ criteria: GetInsightsCriteria,
+): Promise<
+ {
+ x: string;
+ y: number;
+ }[]
+> {
+ return null;
+}
diff --git a/queries/analytics/session/createSession.ts b/queries/analytics/sessions/createSession.ts
similarity index 100%
rename from queries/analytics/session/createSession.ts
rename to queries/analytics/sessions/createSession.ts
diff --git a/queries/analytics/session/getSession.ts b/queries/analytics/sessions/getSession.ts
similarity index 100%
rename from queries/analytics/session/getSession.ts
rename to queries/analytics/sessions/getSession.ts
diff --git a/queries/analytics/session/getSessionMetrics.ts b/queries/analytics/sessions/getSessionMetrics.ts
similarity index 58%
rename from queries/analytics/session/getSessionMetrics.ts
rename to queries/analytics/sessions/getSessionMetrics.ts
index ef8c79c5..e037176b 100644
--- a/queries/analytics/session/getSessionMetrics.ts
+++ b/queries/analytics/sessions/getSessionMetrics.ts
@@ -1,8 +1,9 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
-import { loadWebsite } from 'lib/query';
+import { DEFAULT_RESET_DATE, EVENT_TYPE } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
export async function getSessionMetrics(
...args: [
@@ -21,11 +22,9 @@ async function relationalQuery(
criteria: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
const { startDate, endDate, column, filters = {} } = criteria;
- const { toUuid, parseFilters, rawQuery } = prisma;
- const params: any = [websiteId, resetDate, startDate, endDate];
- const { filterQuery, joinSession } = parseFilters(filters, params);
+ const { parseFilters, rawQuery } = prisma;
+ const { filterQuery, joinSession } = parseFilters(filters);
return rawQuery(
`select ${column} x, count(*) y
@@ -36,15 +35,14 @@ async function relationalQuery(
join website
on website_event.website_id = website.website_id
${joinSession}
- where website.website_id = $1${toUuid()}
- and website_event.created_at >= $2
- and website_event.created_at between $3 and $4
+ where website.website_id = {{websiteId::uuid}}
+ and website_event.created_at between {{startDate}} and {{endDate}}
${filterQuery}
)
group by 1
order by 2 desc
limit 100`,
- params,
+ { ...filters, websiteId, startDate: maxDate(startDate, website.resetAt), endDate },
);
}
@@ -53,23 +51,29 @@ async function clickhouseQuery(
data: { startDate: Date; endDate: Date; column: string; filters: object },
) {
const { startDate, endDate, column, filters = {} } = data;
- const { getDateFormat, parseFilters, getBetweenDates, rawQuery } = clickhouse;
+ const { parseFilters, rawQuery } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params = { websiteId };
- const { filterQuery } = parseFilters(filters, params);
+ const { filterQuery } = parseFilters(filters);
return rawQuery(
- `select ${column} x, count(distinct session_id) y
+ `
+ select
+ ${column} x, count(distinct session_id) y
from website_event as x
where website_id = {websiteId:UUID}
- and event_type = ${EVENT_TYPE.pageView}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_type = {eventType:UInt32}
${filterQuery}
group by x
order by y desc
- limit 100`,
- params,
+ limit 100
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.pageView,
+ },
);
}
diff --git a/queries/analytics/session/getSessions.ts b/queries/analytics/sessions/getSessions.ts
similarity index 77%
rename from queries/analytics/session/getSessions.ts
rename to queries/analytics/sessions/getSessions.ts
index a4fbb501..6936f902 100644
--- a/queries/analytics/session/getSessions.ts
+++ b/queries/analytics/sessions/getSessions.ts
@@ -9,22 +9,23 @@ export async function getSessions(...args: [websiteId: string, startAt: Date]) {
});
}
-async function relationalQuery(websiteId: string, startAt: Date) {
+async function relationalQuery(websiteId: string, startDate: Date) {
return prisma.client.session.findMany({
where: {
websiteId,
createdAt: {
- gte: startAt,
+ gte: startDate,
},
},
});
}
-async function clickhouseQuery(websiteId: string, startAt: Date) {
+async function clickhouseQuery(websiteId: string, startDate: Date) {
const { rawQuery } = clickhouse;
return rawQuery(
- `select distinct
+ `
+ select distinct
session_id as id,
website_id as websiteId,
created_at as createdAt,
@@ -41,10 +42,11 @@ async function clickhouseQuery(websiteId: string, startAt: Date) {
city
from website_event
where website_id = {websiteId:UUID}
- and created_at >= {startAt:DateTime('UTC')}`,
+ and created_at >= {startDate:DateTime}
+ `,
{
websiteId,
- startAt,
+ startDate,
},
);
}
diff --git a/queries/analytics/session/saveSessionData.ts b/queries/analytics/sessions/saveSessionData.ts
similarity index 96%
rename from queries/analytics/session/saveSessionData.ts
rename to queries/analytics/sessions/saveSessionData.ts
index 192053f1..246813e7 100644
--- a/queries/analytics/session/saveSessionData.ts
+++ b/queries/analytics/sessions/saveSessionData.ts
@@ -1,5 +1,5 @@
import { DATA_TYPE } from 'lib/constants';
-import { uuid } from 'lib/crypto';
+import { uuid } from 'next-basics';
import { flattenJSON } from 'lib/dynamicData';
import prisma from 'lib/prisma';
import { DynamicData } from 'lib/types';
diff --git a/queries/analytics/stats/getActiveVisitors.ts b/queries/analytics/stats/getActiveVisitors.ts
index 89f092c1..8dcfd7c9 100644
--- a/queries/analytics/stats/getActiveVisitors.ts
+++ b/queries/analytics/stats/getActiveVisitors.ts
@@ -11,31 +11,32 @@ export async function getActiveVisitors(...args: [websiteId: string]) {
}
async function relationalQuery(websiteId: string) {
- const { toUuid, rawQuery } = prisma;
-
- const date = subMinutes(new Date(), 5);
- const params: any = [websiteId, date];
+ const { rawQuery } = prisma;
return rawQuery(
- `select count(distinct session_id) x
+ `
+ select count(distinct session_id) x
from website_event
- join website
+ join website
on website_event.website_id = website.website_id
- where website.website_id = $1${toUuid()}
- and website_event.created_at >= $2`,
- params,
+ where website.website_id = {{websiteId::uuid}}
+ and website_event.created_at >= {{startAt}}
+ `,
+ { websiteId, startAt: subMinutes(new Date(), 5) },
);
}
async function clickhouseQuery(websiteId: string) {
const { rawQuery } = clickhouse;
- const params = { websiteId, startAt: subMinutes(new Date(), 5) };
return rawQuery(
- `select count(distinct session_id) x
+ `
+ select
+ count(distinct session_id) x
from website_event
where website_id = {websiteId:UUID}
- and created_at >= {startAt:DateTime('UTC')}`,
- params,
+ and created_at >= {startAt:DateTime}
+ `,
+ { websiteId, startAt: subMinutes(new Date(), 5) },
);
}
diff --git a/queries/analytics/stats/getRealtimeData.ts b/queries/analytics/stats/getRealtimeData.ts
index 8c215215..2b8f1212 100644
--- a/queries/analytics/stats/getRealtimeData.ts
+++ b/queries/analytics/stats/getRealtimeData.ts
@@ -1,6 +1,5 @@
-import { md5 } from 'lib/crypto';
-import { getSessions } from '../session/getSessions';
-import { getEvents } from '../event/getEvents';
+import { md5 } from 'next-basics';
+import { getSessions, getEvents } from 'queries';
import { EVENT_TYPE } from 'lib/constants';
export async function getRealtimeData(websiteId, time) {
@@ -20,7 +19,7 @@ export async function getRealtimeData(websiteId, time) {
};
return {
- pageviews: decorate('pageview', pageviews),
+ pageviews: decorate('pageviews', pageviews),
sessions: decorate('session', sessions),
events: decorate('event', events),
timestamp: Date.now(),
diff --git a/queries/analytics/stats/getWebsiteDateRange.ts b/queries/analytics/stats/getWebsiteDateRange.ts
new file mode 100644
index 00000000..1f94c398
--- /dev/null
+++ b/queries/analytics/stats/getWebsiteDateRange.ts
@@ -0,0 +1,47 @@
+import prisma from 'lib/prisma';
+import clickhouse from 'lib/clickhouse';
+import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
+import { loadWebsite } from 'lib/load';
+import { DEFAULT_RESET_DATE } from 'lib/constants';
+import { maxDate } from 'lib/date';
+
+export async function getWebsiteDateRange(...args: [websiteId: string]) {
+ return runQuery({
+ [PRISMA]: () => relationalQuery(...args),
+ [CLICKHOUSE]: () => clickhouseQuery(...args),
+ });
+}
+
+async function relationalQuery(websiteId: string) {
+ const { rawQuery } = prisma;
+ const website = await loadWebsite(websiteId);
+
+ return rawQuery(
+ `
+ select
+ min(created_at) as min,
+ max(created_at) as max
+ from website_event
+ where website_id = {{websiteId::uuid}}
+ and created_at >= {{startDate}}
+ `,
+ { websiteId, startDate: maxDate(new Date(DEFAULT_RESET_DATE), new Date(website.resetAt)) },
+ );
+}
+
+async function clickhouseQuery(websiteId: string) {
+ const { rawQuery } = clickhouse;
+ const website = await loadWebsite(websiteId);
+
+ return rawQuery(
+ `
+ select
+ min(created_at) as min,
+ max(created_at) as max
+ from website_event
+ where website_id = {websiteId:UUID}
+ and created_at >= {startDate:DateTime}
+ `,
+ { websiteId, startDate: maxDate(new Date(DEFAULT_RESET_DATE), new Date(website.resetAt)) },
+ );
+}
diff --git a/queries/analytics/stats/getWebsiteStats.ts b/queries/analytics/stats/getWebsiteStats.ts
index 5d5a1f9a..f44abafa 100644
--- a/queries/analytics/stats/getWebsiteStats.ts
+++ b/queries/analytics/stats/getWebsiteStats.ts
@@ -1,8 +1,9 @@
import prisma from 'lib/prisma';
import clickhouse from 'lib/clickhouse';
import { runQuery, CLICKHOUSE, PRISMA } from 'lib/db';
-import { DEFAULT_CREATED_AT, EVENT_TYPE } from 'lib/constants';
-import { loadWebsite } from 'lib/query';
+import { EVENT_TYPE } from 'lib/constants';
+import { loadWebsite } from 'lib/load';
+import { maxDate } from 'lib/date';
export async function getWebsiteStats(
...args: [
@@ -21,34 +22,41 @@ async function relationalQuery(
criteria: { startDate: Date; endDate: Date; filters: object },
) {
const { startDate, endDate, filters = {} } = criteria;
- const { toUuid, getDateQuery, getTimestampInterval, parseFilters, rawQuery } = prisma;
+ const { getDateQuery, getTimestampIntervalQuery, parseFilters, rawQuery } = prisma;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params: any = [websiteId, resetDate, startDate, endDate];
- const { filterQuery, joinSession } = parseFilters(filters, params);
+ const { filterQuery, joinSession } = parseFilters(filters);
return rawQuery(
- `select sum(t.c) as "pageviews",
- count(distinct t.session_id) as "uniques",
- sum(case when t.c = 1 then 1 else 0 end) as "bounces",
- sum(t.time) as "totaltime"
- from (
- select website_event.session_id,
- ${getDateQuery('website_event.created_at', 'hour')},
- count(*) c,
- ${getTimestampInterval('website_event.created_at')} as "time"
- from website_event
- join website
- on website_event.website_id = website.website_id
- ${joinSession}
- where event_type = ${EVENT_TYPE.pageView}
- and website.website_id = $1${toUuid()}
- and website_event.created_at >= $2
- and website_event.created_at between $3 and $4
- ${filterQuery}
- group by 1, 2
- ) t`,
- params,
+ `
+ select
+ sum(t.c) as "pageviews",
+ count(distinct t.session_id) as "uniques",
+ sum(case when t.c = 1 then 1 else 0 end) as "bounces",
+ sum(t.time) as "totaltime"
+ from (
+ select
+ website_event.session_id,
+ ${getDateQuery('website_event.created_at', 'hour')},
+ count(*) as c,
+ ${getTimestampIntervalQuery('website_event.created_at')} as "time"
+ from website_event
+ join website
+ on website_event.website_id = website.website_id
+ ${joinSession}
+ where event_type = {{eventType}}
+ and website.website_id = {{websiteId::uuid}}
+ and website_event.created_at between {{startDate}} and {{endDate}}
+ ${filterQuery}
+ group by 1, 2
+ ) as t
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.pageView,
+ },
);
}
@@ -57,32 +65,38 @@ async function clickhouseQuery(
criteria: { startDate: Date; endDate: Date; filters: object },
) {
const { startDate, endDate, filters = {} } = criteria;
- const { rawQuery, getDateFormat, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
+ const { rawQuery, getDateQuery, parseFilters } = clickhouse;
const website = await loadWebsite(websiteId);
- const resetDate = new Date(website?.resetAt || DEFAULT_CREATED_AT);
- const params = { websiteId };
- const { filterQuery } = parseFilters(filters, params);
+ const { filterQuery } = parseFilters(filters);
return rawQuery(
- `select
- sum(t.c) as "pageviews",
- count(distinct t.session_id) as "uniques",
- sum(if(t.c = 1, 1, 0)) as "bounces",
- sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
- from (
- select session_id,
- ${getDateQuery('created_at', 'day')} time_series,
- count(*) c,
- min(created_at) min_time,
- max(created_at) max_time
- from website_event
- where event_type = ${EVENT_TYPE.pageView}
- and website_id = {websiteId:UUID}
- and created_at >= ${getDateFormat(resetDate)}
- and ${getBetweenDates('created_at', startDate, endDate)}
- ${filterQuery}
- group by session_id, time_series
- ) t;`,
- params,
+ `
+ select
+ sum(t.c) as "pageviews",
+ count(distinct t.session_id) as "uniques",
+ sum(if(t.c = 1, 1, 0)) as "bounces",
+ sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
+ from (
+ select
+ session_id,
+ ${getDateQuery('created_at', 'day')} time_series,
+ count(*) c,
+ min(created_at) min_time,
+ max(created_at) max_time
+ from website_event
+ where website_id = {websiteId:UUID}
+ and created_at between {startDate:DateTime} and {endDate:DateTime}
+ and event_type = {eventType:UInt32}
+ ${filterQuery}
+ group by session_id, time_series
+ ) as t;
+ `,
+ {
+ ...filters,
+ websiteId,
+ startDate: maxDate(startDate, website.resetAt),
+ endDate,
+ eventType: EVENT_TYPE.pageView,
+ },
);
}
diff --git a/queries/index.js b/queries/index.js
index b0e4e00b..f86551c4 100644
--- a/queries/index.js
+++ b/queries/index.js
@@ -3,19 +3,23 @@ export * from './admin/teamUser';
export * from './admin/user';
export * from './admin/report';
export * from './admin/website';
-export * from './analytics/event/getEventMetrics';
-export * from './analytics/event/getEventUsage';
-export * from './analytics/event/getEvents';
+export * from './analytics/events/getEventMetrics';
+export * from './analytics/events/getEventUsage';
+export * from './analytics/events/getEvents';
+export * from './analytics/eventData/getEventDataEvents';
export * from './analytics/eventData/getEventDataFields';
export * from './analytics/eventData/getEventDataUsage';
-export * from './analytics/event/saveEvent';
-export * from './analytics/pageview/getPageviewFunnel';
-export * from './analytics/pageview/getPageviewMetrics';
-export * from './analytics/pageview/getPageviewStats';
-export * from './analytics/session/createSession';
-export * from './analytics/session/getSession';
-export * from './analytics/session/getSessionMetrics';
-export * from './analytics/session/getSessions';
+export * from './analytics/events/saveEvent';
+export * from './analytics/reports/getFunnel';
+export * from './analytics/reports/getInsights';
+export * from './analytics/pageviews/getPageviewMetrics';
+export * from './analytics/pageviews/getPageviewStats';
+export * from './analytics/sessions/createSession';
+export * from './analytics/sessions/getSession';
+export * from './analytics/sessions/getSessionMetrics';
+export * from './analytics/sessions/getSessions';
+export * from './analytics/sessions/saveSessionData';
export * from './analytics/stats/getActiveVisitors';
export * from './analytics/stats/getRealtimeData';
+export * from './analytics/stats/getWebsiteDateRange';
export * from './analytics/stats/getWebsiteStats';
diff --git a/scripts/telemetry.js b/scripts/telemetry.js
index fe9ab6e7..24cd40c9 100644
--- a/scripts/telemetry.js
+++ b/scripts/telemetry.js
@@ -1,40 +1,24 @@
-const fs = require('fs-extra');
-const path = require('path');
const os = require('os');
const isCI = require('is-ci');
const pkg = require('../package.json');
-const dest = path.resolve(__dirname, '../.next/cache/umami.json');
const url = 'https://api.umami.is/v1/telemetry';
-async function sendTelemetry(action) {
- let json = {};
-
- try {
- json = await fs.readJSON(dest);
- } catch {
- // Ignore
- }
-
- try {
- await fs.writeJSON(dest, { version: pkg.version });
- } catch {
- // Ignore
- }
-
+async function sendTelemetry(type) {
const { default: isDocker } = await import('is-docker');
const { default: fetch } = await import('node-fetch');
- const payload = {
- action,
- version: pkg.version,
- node: process.version,
- platform: os.platform(),
- arch: os.arch(),
- os: `${os.type()} (${os.version()})`,
- isDocker: isDocker(),
- isCi: isCI,
- prevVersion: json.version,
+ const data = {
+ type,
+ payload: {
+ version: pkg.version,
+ node: process.version,
+ platform: os.platform(),
+ arch: os.arch(),
+ os: `${os.type()} (${os.version()})`,
+ isDocker: isDocker(),
+ isCi: isCI,
+ },
};
try {
@@ -44,7 +28,7 @@ async function sendTelemetry(action) {
headers: {
'Content-Type': 'application/json',
},
- body: JSON.stringify(payload),
+ body: JSON.stringify(data),
});
} catch {
// Ignore
diff --git a/yarn.lock b/yarn.lock
index db55eeca..ec66b4ac 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -6260,14 +6260,15 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
-next-basics@^0.31.0:
- version "0.31.0"
- resolved "https://registry.npmjs.org/next-basics/-/next-basics-0.31.0.tgz"
- integrity sha512-uAum7v4DQ37IdqKdmLTFLMyN+ir7FNMeMHQcTd1RQ14sm/fO/tntadlsXulwj/A5xs3gmnW/gfGkLX7VnNjreg==
+next-basics@^0.33.0:
+ version "0.33.0"
+ resolved "https://registry.yarnpkg.com/next-basics/-/next-basics-0.33.0.tgz#da4736dbaa6b8461ae1fe40de4cde20cbe7744e6"
+ integrity sha512-pKynp6/zMboQk/mlzyOZgfFsYShKpieu3XoQM5EmTL311mP2KkcW59hiVHloLR7W6zaaf/ONWzi0L1iw05Qfag==
dependencies:
bcryptjs "^2.4.3"
jsonwebtoken "^9.0.0"
pure-rand "^6.0.2"
+ uuid "^9.0.0"
next@13.3.1:
version "13.3.1"
@@ -8967,6 +8968,11 @@ uuid@^8.3.2:
resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
+uuid@^9.0.0:
+ version "9.0.0"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5"
+ integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==
+
v8-compile-cache-lib@^3.0.1:
version "3.0.1"
resolved "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz"