update api to new CH columns

This commit is contained in:
Francis Cao 2022-10-08 16:12:33 -07:00
parent 36edbe2f4c
commit 96add409b6
19 changed files with 74 additions and 48 deletions

View file

@ -36,7 +36,7 @@ async function relationalQuery(
}
async function clickhouseQuery(
website_id,
website_uuid,
start_at,
end_at,
timezone = 'UTC',
@ -44,7 +44,7 @@ async function clickhouseQuery(
filters = {},
) {
const { rawQuery, getDateQuery, getBetweenDates, getFilterQuery } = clickhouse;
const params = [website_id];
const params = [website_uuid];
return rawQuery(
`select

View file

@ -25,19 +25,23 @@ function relationalQuery(websites, start_at) {
}
function clickhouseQuery(websites, start_at) {
const { rawQuery, getDateFormat } = clickhouse;
const { rawQuery, getDateFormat, getCommaSeparatedStringFormat } = clickhouse;
return rawQuery(
`select
event_uuid,
event_id,
website_id,
session_uuid,
session_id,
created_at,
url,
event_name
from event
where event_name != ''
and ${websites && websites.length > 0 ? `website_id in (${websites.join(',')})` : '0 = 0'}
and ${
websites && websites.length > 0
? `website_id in (${getCommaSeparatedStringFormat(websites, websites.website_uuid)})`
: '0 = 0'
}
and created_at >= ${getDateFormat(start_at)}`,
);
}

View file

@ -32,14 +32,14 @@ async function relationalQuery(website_id, { session_id, url, event_name, event_
}
async function clickhouseQuery(
website_id,
website_uuid,
{ session: { country, ...sessionArgs }, event_uuid, url, event_name, event_data },
) {
const { getDateFormat, sendMessage } = kafka;
const params = {
event_uuid,
website_id,
website_uuid,
created_at: getDateFormat(new Date()),
url: url?.substring(0, URL_LENGTH),
event_name: event_name?.substring(0, EVENT_NAME_LENGTH),

View file

@ -34,9 +34,9 @@ async function relationalQuery(website_id, { startDate, endDate, column, table,
);
}
async function clickhouseQuery(website_id, { startDate, endDate, column, filters = {} }) {
async function clickhouseQuery(website_uuid, { startDate, endDate, column, filters = {} }) {
const { rawQuery, parseFilters, getBetweenDates } = clickhouse;
const params = [website_id];
const params = [website_uuid];
const { pageviewQuery, sessionQuery, eventQuery } = parseFilters(column, filters, params);
return rawQuery(

View file

@ -45,11 +45,11 @@ async function relationalQuery(
}
async function clickhouseQuery(
website_id,
website_uuid,
{ start_at, end_at, timezone = 'UTC', unit = 'day', count = '*', filters = {} },
) {
const { parseFilters, rawQuery, getDateStringQuery, getDateQuery, getBetweenDates } = clickhouse;
const params = [website_id];
const params = [website_uuid];
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
return rawQuery(
@ -59,7 +59,7 @@ async function clickhouseQuery(
from
(select
${getDateQuery('created_at', unit, timezone)} t,
count(${count !== '*' ? 'distinct session_uuid' : count}) y
count(${count !== '*' ? 'distinct session_id' : count}) y
from event
where event_name = ''
and website_id= $1

View file

@ -25,15 +25,21 @@ async function relationalQuery(websites, start_at) {
}
async function clickhouseQuery(websites, start_at) {
const { getCommaSeparatedStringFormat } = clickhouse;
return clickhouse.rawQuery(
`select
website_id,
session_uuid,
session_id,
created_at,
url
from event
where event_name = ''
and ${websites && websites.length > 0 ? `website_id in (${websites.join(',')})` : '0 = 0'}
and ${
websites && websites.length > 0
? `website_id in (${getCommaSeparatedStringFormat(websites, websites.website_uuid)})`
: '0 = 0'
}
and created_at >= ${clickhouse.getDateFormat(start_at)}`,
);
}

View file

@ -22,12 +22,12 @@ async function relationalQuery(website_id, { session: { session_id }, url, refer
}
async function clickhouseQuery(
website_id,
website_uuid,
{ session: { country, ...sessionArgs }, url, referrer },
) {
const { getDateFormat, sendMessage } = kafka;
const params = {
website_id: website_id,
website_id: website_uuid,
created_at: getDateFormat(new Date()),
url: url?.substring(0, URL_LENGTH),
referrer: referrer?.substring(0, URL_LENGTH),

View file

@ -39,14 +39,14 @@ async function relationalQuery(website_id, data) {
}
async function clickhouseQuery(
website_id,
website_uuid,
{ session_uuid, hostname, browser, os, screen, language, country, device },
) {
const { getDateFormat, sendMessage } = kafka;
const params = {
session_uuid,
website_id,
website_uuid,
created_at: getDateFormat(new Date()),
hostname,
browser,

View file

@ -23,12 +23,12 @@ async function relationalQuery(website_id) {
);
}
async function clickhouseQuery(website_id) {
async function clickhouseQuery(website_uuid) {
const { rawQuery, getDateFormat } = clickhouse;
const params = [website_id];
const params = [website_uuid];
return rawQuery(
`select count(distinct session_uuid) x
`select count(distinct session_id) x
from event
where website_id = $1
and created_at >= ${getDateFormat(subMinutes(new Date(), 5))}`,

View file

@ -41,19 +41,19 @@ async function relationalQuery(website_id, { start_at, end_at, filters = {} }) {
);
}
async function clickhouseQuery(website_id, { start_at, end_at, filters = {} }) {
async function clickhouseQuery(website_uuid, { start_at, end_at, filters = {} }) {
const { rawQuery, getDateQuery, getBetweenDates, parseFilters } = clickhouse;
const params = [website_id];
const params = [website_uuid];
const { pageviewQuery, sessionQuery } = parseFilters(null, filters, params);
return rawQuery(
`select
sum(t.c) as "pageviews",
count(distinct t.session_uuid) as "uniques",
count(distinct t.session_id) as "uniques",
sum(if(t.c = 1, 1, 0)) as "bounces",
sum(if(max_time < min_time + interval 1 hour, max_time-min_time, 0)) as "totaltime"
from (
select session_uuid,
select session_id,
${getDateQuery('created_at', 'day')} time_series,
count(*) c,
min(created_at) min_time,
@ -64,7 +64,7 @@ async function clickhouseQuery(website_id, { start_at, end_at, filters = {} }) {
and ${getBetweenDates('created_at', start_at, end_at)}
${pageviewQuery}
${sessionQuery}
group by session_uuid, time_series
group by session_id, time_series
) t;`,
params,
);