mirror of
https://github.com/umami-software/umami.git
synced 2025-12-08 05:12:36 +01:00
Database refactoring.
This commit is contained in:
parent
bb184dc2cc
commit
467c7f289f
37 changed files with 566 additions and 591 deletions
82
lib/kafka.js
82
lib/kafka.js
|
|
@ -1,57 +1,53 @@
|
|||
import { Kafka, logLevel } from 'kafkajs';
|
||||
import dateFormat from 'dateformat';
|
||||
import debug from 'debug';
|
||||
import { KAFKA, KAFKA_PRODUCER } from 'lib/db';
|
||||
|
||||
export function getClient() {
|
||||
if (!process.env.KAFKA_URL) {
|
||||
const log = debug('kafka');
|
||||
|
||||
function getClient() {
|
||||
if (!process.env.KAFKA_URL || !process.env.KAFKA_BROKER) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const url = new URL(process.env.KAFKA_URL);
|
||||
const { username, password } = new URL(process.env.KAFKA_URL);
|
||||
const brokers = process.env.KAFKA_BROKER.split(',');
|
||||
|
||||
if (url.username.length === 0 && url.password.length === 0) {
|
||||
return new Kafka({
|
||||
clientId: 'umami',
|
||||
brokers: brokers,
|
||||
connectionTimeout: 3000,
|
||||
logLevel: logLevel.ERROR,
|
||||
});
|
||||
} else {
|
||||
return new Kafka({
|
||||
clientId: 'umami',
|
||||
brokers: brokers,
|
||||
connectionTimeout: 3000,
|
||||
ssl: true,
|
||||
sasl: {
|
||||
mechanism: 'plain',
|
||||
username: url.username,
|
||||
password: url.password,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
const kafka = global.kafka || getClient();
|
||||
let kafkaProducer = null;
|
||||
const ssl =
|
||||
username && password
|
||||
? {
|
||||
ssl: true,
|
||||
sasl: {
|
||||
mechanism: 'plain',
|
||||
username,
|
||||
password,
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
(async () => {
|
||||
if (kafka) {
|
||||
kafkaProducer = global.kakfaProducer || (await getProducer());
|
||||
}
|
||||
const client = new Kafka({
|
||||
clientId: 'umami',
|
||||
brokers: brokers,
|
||||
connectionTimeout: 3000,
|
||||
logLevel: logLevel.ERROR,
|
||||
...ssl,
|
||||
});
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
global.kafka = kafka;
|
||||
if (kafka) {
|
||||
global.kakfaProducer = kafkaProducer;
|
||||
}
|
||||
global[KAFKA] = client;
|
||||
}
|
||||
})();
|
||||
|
||||
export { kafka, kafkaProducer };
|
||||
return client;
|
||||
}
|
||||
|
||||
async function getProducer() {
|
||||
const producer = kafka.producer();
|
||||
await producer.connect();
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
global[KAFKA_PRODUCER] = producer;
|
||||
}
|
||||
|
||||
return producer;
|
||||
}
|
||||
|
||||
|
|
@ -60,7 +56,7 @@ function getDateFormat(date) {
|
|||
}
|
||||
|
||||
async function sendMessage(params, topic) {
|
||||
await kafkaProducer.send({
|
||||
await producer.send({
|
||||
topic,
|
||||
messages: [
|
||||
{
|
||||
|
|
@ -72,7 +68,19 @@ async function sendMessage(params, topic) {
|
|||
});
|
||||
}
|
||||
|
||||
// Initialization
|
||||
let kafka;
|
||||
let producer;
|
||||
|
||||
(async () => {
|
||||
kafka = global[KAFKA] || getClient();
|
||||
producer = global[KAFKA_PRODUCER] || (await getProducer());
|
||||
})();
|
||||
|
||||
export default {
|
||||
client: kafka,
|
||||
producer: producer,
|
||||
log,
|
||||
getDateFormat,
|
||||
sendMessage,
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue