Reformatted files with biome.

This commit is contained in:
Mike Cao 2025-11-22 22:42:42 -08:00
parent d51f0641a6
commit fa8d8055df
558 changed files with 2108 additions and 2379 deletions

View file

@ -3,8 +3,8 @@ import { ROLE_PERMISSIONS, ROLES, SHARE_TOKEN_HEADER } from '@/lib/constants';
import { secret } from '@/lib/crypto';
import { getRandomChars } from '@/lib/generate';
import { createSecureToken, parseSecureToken, parseToken } from '@/lib/jwt';
import { ensureArray } from '@/lib/utils';
import redis from '@/lib/redis';
import { ensureArray } from '@/lib/utils';
import { getUser } from '@/queries/prisma/user';
const log = debug('umami:auth');

View file

@ -1,4 +1,4 @@
import { getItem, setItem, removeItem } from '@/lib/storage';
import { getItem, removeItem, setItem } from '@/lib/storage';
import { AUTH_TOKEN } from './constants';
export function getClientAuthToken() {

View file

@ -1,4 +1,4 @@
import crypto from 'crypto';
import crypto from 'node:crypto';
import { v4, v5 } from 'uuid';
const ALGORITHM = 'aes-256-gcm';

View file

@ -1,5 +1,5 @@
import { DATA_TYPE, DATETIME_REGEX } from './constants';
import { DynamicDataType } from './types';
import type { DynamicDataType } from './types';
export function flattenJSON(
eventData: Record<string, any>,

View file

@ -1,5 +1,3 @@
import { getDateLocale } from '@/lib/lang';
import { DateRange } from '@/lib/types';
import {
addDays,
addHours,
@ -40,6 +38,8 @@ import {
subYears,
} from 'date-fns';
import { utcToZonedTime } from 'date-fns-tz';
import { getDateLocale } from '@/lib/lang';
import type { DateRange } from '@/lib/types';
export const TIME_UNIT = {
minute: 'minute',
@ -371,5 +371,5 @@ export function getMonthDateRangeValue(date: Date) {
}
export function isInvalidDate(date: any) {
return date instanceof Date && isNaN(date.getTime());
return date instanceof Date && Number.isNaN(date.getTime());
}

View file

@ -5,12 +5,12 @@ export const KAFKA = 'kafka';
export const KAFKA_PRODUCER = 'kafka-producer';
// Fixes issue with converting bigint values
BigInt.prototype['toJSON'] = function () {
BigInt.prototype.toJSON = function () {
return Number(this);
};
export function getDatabaseType(url = process.env.DATABASE_URL) {
const type = url && url.split(':')[0];
const type = url?.split(':')[0];
if (type === 'postgres') {
return POSTGRESQL;

View file

@ -1,11 +1,11 @@
import path from 'node:path';
import { UAParser } from 'ua-parser-js';
import { browserName, detectOS } from 'detect-browser';
import isLocalhost from 'is-localhost-ip';
import ipaddr from 'ipaddr.js';
import isLocalhost from 'is-localhost-ip';
import maxmind from 'maxmind';
import { UAParser } from 'ua-parser-js';
import { getIpAddress, stripPort } from '@/lib/ip';
import { safeDecodeURIComponent } from '@/lib/url';
import { stripPort, getIpAddress } from '@/lib/ip';
const MAXMIND = 'maxmind';
@ -145,6 +145,8 @@ export function hasBlockedIp(clientIp: string) {
return true;
}
}
return false;
});
}

View file

@ -77,7 +77,7 @@ export function stringToColor(str: string) {
let color = '#';
for (let i = 0; i < 3; i++) {
const value = (hash >> (i * 8)) & 0xff;
color += ('00' + value.toString(16)).slice(-2);
color += `00${value.toString(16)}`.slice(-2);
}
return color;
}

View file

@ -1,8 +1,8 @@
import { serializeError } from 'serialize-error';
import type * as tls from 'node:tls';
import debug from 'debug';
import { Kafka, Producer, RecordMetadata, SASLOptions, logLevel } from 'kafkajs';
import { Kafka, logLevel, type Producer, type RecordMetadata, type SASLOptions } from 'kafkajs';
import { serializeError } from 'serialize-error';
import { KAFKA, KAFKA_PRODUCER } from '@/lib/db';
import * as tls from 'tls';
const log = debug('umami:kafka');
const CONNECT_TIMEOUT = 5000;

View file

@ -1,23 +1,24 @@
import {
arSA,
be,
bn,
bg,
bn,
bs,
ca,
cs,
sk,
da,
de,
el,
enUS,
enGB,
enUS,
es,
faIR,
fi,
fr,
faIR,
he,
hi,
hr,
hu,
id,
it,
ja,
@ -33,18 +34,17 @@ import {
ptBR,
ro,
ru,
sk,
sl,
sv,
ta,
th,
tr,
uk,
uz,
vi,
zhCN,
zhTW,
ca,
hu,
vi,
uz,
} from 'date-fns/locale';
export const languages = {

View file

@ -1,4 +1,4 @@
import { Website, Session } from '@/generated/prisma/client';
import type { Session, Website } from '@/generated/prisma/client';
import redis from '@/lib/redis';
import { getWebsite } from '@/queries/prisma';
import { getWebsiteSession } from '@/queries/sql';

View file

@ -1,5 +1,5 @@
import { FILTER_COLUMNS, OPERATORS } from '@/lib/constants';
import { Filter, QueryFilters, QueryOptions } from '@/lib/types';
import type { Filter, QueryFilters, QueryOptions } from '@/lib/types';
export function parseFilterValue(param: any) {
if (typeof param === 'string') {

View file

@ -1,16 +1,16 @@
import {
Children,
cloneElement,
FC,
type FC,
Fragment,
isValidElement,
ReactElement,
ReactNode,
type ReactElement,
type ReactNode,
} from 'react';
export function getFragmentChildren(children: ReactNode) {
return (children as ReactElement)?.type === Fragment
? (children as ReactElement).props['children']
? (children as ReactElement).props.children
: children;
}

View file

@ -16,7 +16,7 @@ export async function parseRequest(
const url = new URL(request.url);
let query = Object.fromEntries(url.searchParams);
let body = await getJsonBody(request);
let error: () => void | undefined;
let error: () => undefined | undefined;
let auth = null;
if (schema) {

View file

@ -116,8 +116,8 @@ export const goalReportSchema = z.object({
property: z.string().optional(),
})
.refine(data => {
if (data['type'] === 'event' && data['property']) {
return data['operator'] && data['property'];
if (data.type === 'event' && data.property) {
return data.operator && data.property;
}
return true;
}),

View file

@ -1,6 +1,6 @@
import { UseQueryOptions } from '@tanstack/react-query';
import { DATA_TYPE, ROLES, OPERATORS } from './constants';
import { TIME_UNIT } from './date';
import type { UseQueryOptions } from '@tanstack/react-query';
import type { DATA_TYPE, OPERATORS, ROLES } from './constants';
import type { TIME_UNIT } from './date';
export type ObjectValues<T> = T[keyof T];