Merge branch 'dev' of https://github.com/umami-software/umami into analytics

This commit is contained in:
Francis Cao 2024-12-16 08:43:14 -08:00
commit 4e24013a77
6 changed files with 26 additions and 16 deletions

View file

@ -1,5 +1,6 @@
generator client {
provider = "prisma-client-js"
provider = "prisma-client-js"
binaryTargets = ["native", "linux-musl-openssl-3.0.x", "linux-musl-arm64-openssl-3.0.x"]
}
datasource db {
@ -19,10 +20,10 @@ model User {
updatedAt DateTime? @updatedAt @map("updated_at") @db.Timestamp(0)
deletedAt DateTime? @map("deleted_at") @db.Timestamp(0)
websiteUser Website[] @relation("user")
websiteCreateUser Website[] @relation("createUser")
teamUser TeamUser[]
report Report[]
websiteUser Website[] @relation("user")
websiteCreateUser Website[] @relation("createUser")
teamUser TeamUser[]
report Report[]
@@map("user")
}
@ -176,8 +177,8 @@ model Team {
updatedAt DateTime? @updatedAt @map("updated_at") @db.Timestamp(0)
deletedAt DateTime? @map("deleted_at") @db.Timestamp(0)
website Website[]
teamUser TeamUser[]
website Website[]
teamUser TeamUser[]
@@index([accessCode])
@@map("team")

View file

@ -1,6 +1,6 @@
generator client {
provider = "prisma-client-js"
binaryTargets = ["native", "linux-musl-openssl-3.0.x"]
binaryTargets = ["native", "linux-musl-openssl-3.0.x", "linux-musl-arm64-openssl-3.0.x"]
}
datasource db {

View file

@ -1,6 +1,6 @@
{
"name": "umami",
"version": "2.15.0",
"version": "2.15.1",
"description": "A simple, fast, privacy-focused alternative to Google Analytics.",
"author": "Umami Software, Inc. <hello@umami.is>",
"license": "MIT",

View file

@ -23,5 +23,12 @@ if (!process.env.SKIP_DB_CHECK && !process.env.DATABASE_TYPE) {
}
if (process.env.CLOUD_MODE) {
checkMissing(['CLOUD_URL', 'KAFKA_BROKER', 'KAFKA_URL', 'REDIS_URL']);
checkMissing([
'CLOUD_URL',
'KAFKA_BROKER',
'KAFKA_URL',
'REDIS_URL',
'KAFKA_SASL_MECHANISM',
'KAFKA_SSL',
]);
}

View file

@ -1,5 +1,5 @@
import debug from 'debug';
import { Kafka, Mechanism, Producer, RecordMetadata, SASLOptions, logLevel } from 'kafkajs';
import { Kafka, Producer, RecordMetadata, SASLOptions, logLevel } from 'kafkajs';
import { KAFKA, KAFKA_PRODUCER } from 'lib/db';
import * as tls from 'tls';
@ -12,13 +12,15 @@ const enabled = Boolean(process.env.KAFKA_URL && process.env.KAFKA_BROKER);
function getClient() {
const { username, password } = new URL(process.env.KAFKA_URL);
const brokers = process.env.KAFKA_BROKER.split(',');
const sslEnabled = process.env.KAFKA_SSL.toLowerCase() === 'true';
const mechanism = process.env.KAFKA_SASL_MECHANISM as 'plain' | 'scram-sha-256' | 'scram-sha-512';
const ssl: { ssl?: tls.ConnectionOptions | boolean; sasl?: SASLOptions | Mechanism } =
const ssl: { ssl?: tls.ConnectionOptions | boolean; sasl?: SASLOptions } =
username && password
? {
ssl: true,
ssl: sslEnabled,
sasl: {
mechanism: 'scram-sha-256',
mechanism,
username,
password,
},

View file

@ -75,7 +75,7 @@ function getDateSQL(field: string, unit: string, timezone?: string): string {
if (db === MYSQL) {
if (timezone) {
const tz = formatInTimeZone(new Date(), timezone, 'yyyy-MM-dd HH:mm:ss');
const tz = formatInTimeZone(new Date(), timezone, 'xxx');
return `date_format(convert_tz(${field},'+00:00','${tz}'), '${MYSQL_DATE_FORMATS[unit]}')`;
}
return `date_format(${field}, '${MYSQL_DATE_FORMATS[unit]}')`;
@ -90,7 +90,7 @@ function getDateWeeklySQL(field: string, timezone?: string) {
}
if (db === MYSQL) {
const tz = formatInTimeZone(new Date(), timezone, 'yyyy-MM-dd HH:mm:ss');
const tz = formatInTimeZone(new Date(), timezone, 'xxx');
return `date_format(convert_tz(${field},'+00:00','${tz}'), '%w:%H')`;
}
}