Compare commits

...

17 commits

Author SHA1 Message Date
Mike Cao
227201a73c
Merge pull request #3706 from metaloozee/3703
Some checks failed
Node.js CI / build (postgresql, 18.18, 10) (push) Has been cancelled
fix: Redirect loop on auth failure
2025-11-08 11:32:02 -08:00
metaloozee
1879c161ee fix: Redirect loop on auth failure 2025-11-09 00:22:06 +05:30
Mike Cao
6ba9c1c40c New docker workflow.
Some checks are pending
Node.js CI / build (postgresql, 18.18, 10) (push) Waiting to run
2025-11-07 22:41:10 -08:00
Mike Cao
de6515139e Fixed Docker permissions.
Some checks are pending
Node.js CI / build (postgresql, 18.18, 10) (push) Waiting to run
2025-11-07 18:17:51 -08:00
Mike Cao
e3ca002d77 Fixed tags in build. 2025-11-07 14:35:05 -08:00
Mike Cao
8119dae3c3 Updated GH workflow. 2025-11-07 13:59:50 -08:00
Mike Cao
6ee93f7ac9 Updated README and cd.yml. 2025-11-07 12:21:17 -08:00
Mike Cao
3e9ca8761e Removed workflow script.
Some checks are pending
Node.js CI / build (postgresql, 18.18, 10) (push) Waiting to run
2025-11-07 09:15:01 -08:00
Mike Cao
d2f512cae7 Don't publish .sig files. 2025-11-07 09:14:19 -08:00
Mike Cao
df3ca02e8b Always push latest for Docker. 2025-11-07 08:52:16 -08:00
Mike Cao
a90b788138 Updated cd script.
Some checks are pending
Node.js CI / build (postgresql, 18.18, 10) (push) Waiting to run
2025-11-07 00:09:53 -08:00
Mike Cao
dd6556968c Updated image tag. 2025-11-06 23:58:12 -08:00
Mike Cao
04a05bbf26 Added workflow input. 2025-11-06 23:35:14 -08:00
Mike Cao
437c9603db Fixed build. 2025-11-06 22:58:26 -08:00
Mike Cao
03ed5349f4 Merge branch 'dev' 2025-11-06 22:50:30 -08:00
Mike Cao
4272bb4c4d Removed db types from docker build. 2025-11-06 22:48:34 -08:00
Mike Cao
6135ef9dd2 Fixed test. 2025-11-06 22:24:08 -08:00
8 changed files with 79 additions and 143 deletions

View file

@ -1,58 +0,0 @@
name: Create docker images (manual)
on:
workflow_dispatch:
inputs:
version:
type: string
description: Version
required: true
jobs:
build:
name: Build, push, and deploy
runs-on: ubuntu-latest
strategy:
matrix:
db-type: [postgresql]
steps:
- uses: actions/checkout@v3
- name: Extract version parts from input
id: extract_version
run: |
echo "version=$(echo ${{ github.event.inputs.version }})" >> $GITHUB_ENV
echo "major=$(echo ${{ github.event.inputs.version }} | cut -d. -f1)" >> $GITHUB_ENV
echo "minor=$(echo ${{ github.event.inputs.version }} | cut -d. -f2)" >> $GITHUB_ENV
- name: Generate tags
id: generate_tags
run: |
echo "tag_major=$(echo ${{ matrix.db-type }}-${{ env.major }})" >> $GITHUB_ENV
echo "tag_minor=$(echo ${{ matrix.db-type }}-${{ env.major }}.${{ env.minor }})" >> $GITHUB_ENV
echo "tag_patch=$(echo ${{ matrix.db-type }}-${{ env.version }})" >> $GITHUB_ENV
echo "tag_latest=$(echo ${{ matrix.db-type }}-latest)" >> $GITHUB_ENV
- uses: mr-smithers-excellent/docker-build-push@v6
name: Build & push Docker image to ghcr.io for ${{ matrix.db-type }}
with:
image: umami
tags: ${{ env.tag_major }}, ${{ env.tag_minor }}, ${{ env.tag_patch }}, ${{ env.tag_latest }}
buildArgs: DATABASE_TYPE=${{ matrix.db-type }}
registry: ghcr.io
multiPlatform: true
platform: linux/amd64,linux/arm64
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: mr-smithers-excellent/docker-build-push@v6
name: Build & push Docker image to docker.io for ${{ matrix.db-type }}
with:
image: umamisoftware/umami
tags: ${{ env.tag_major }}, ${{ env.tag_minor }}, ${{ env.tag_patch }}, ${{ env.tag_latest }}
buildArgs: DATABASE_TYPE=${{ matrix.db-type }}
registry: docker.io
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

View file

@ -5,6 +5,11 @@ on:
tags:
- 'v*.*.*'
workflow_dispatch:
inputs:
version:
description: 'Optional image version (e.g. 3.0.0, v3.0.0, or 3.0.0-beta.1)'
required: false
default: ''
jobs:
build:
@ -13,22 +18,20 @@ jobs:
permissions:
contents: read
packages: write
id-token: write
strategy:
matrix:
db-type: [postgresql]
steps:
- uses: actions/checkout@v5
# Install cosign (for image signing)
- name: Install cosign
uses: sigstore/cosign-installer@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log into GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Log into Docker Hub
if: github.repository == 'umami-software/umami'
uses: docker/login-action@v3
@ -37,44 +40,61 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log into GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compute version tags
id: compute
run: |
INPUT="${{ github.event.inputs.version }}"
REF_TYPE="${{ github.ref_type }}"
REF_NAME="${{ github.ref_name }}"
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: |
umamisoftware/umami,enable=${{ github.repository == 'umami-software/umami' }}
ghcr.io/${{ github.repository }}
flavor: |
latest=auto
prefix=${{ matrix.db-type }}-
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
# Determine version source
if [[ -n "$INPUT" ]]; then
VERSION="${INPUT#v}"
elif [[ "$REF_TYPE" == "tag" ]]; then
VERSION="${REF_NAME#v}"
else
VERSION=""
fi
TAGS=""
if [[ -n "$VERSION" ]]; then
MAJOR=$(echo "$VERSION" | cut -d. -f1)
MINOR=$(echo "$VERSION" | cut -d. -f2)
if [[ "$VERSION" == *-* ]]; then
# prerelease: only version tag
TAGS="$VERSION"
else
# stable release: version + hierarchy + latest
TAGS="$VERSION,${MAJOR}.${MINOR},${MAJOR},postgresql-latest,latest"
fi
else
# Non-tag build (e.g. from main branch)
TAGS="${REF_NAME}"
fi
echo "tags=$TAGS" >> $GITHUB_OUTPUT
echo "Computed tags: $TAGS"
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6
with:
context: .
platforms: linux/amd64,linux/arm64
build-args: DATABASE_TYPE=${{ matrix.db-type }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
run: |
TAGS="${{ steps.compute.outputs.tags }}"
# Sign the published image digest
- name: Sign the published Docker image
env:
TAGS: ${{ steps.meta.outputs.tags }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
run: echo "${TAGS}" | xargs -I {} cosign sign --yes "{}@${DIGEST}"
# Set image targets conditionally
if [[ "${{ github.repository }}" == "umami-software/umami" ]]; then
IMAGES=("umamisoftware/umami" "ghcr.io/${{ github.repository }}")
else
IMAGES=("ghcr.io/${{ github.repository }}")
fi
for IMAGE in "${IMAGES[@]}"; do
echo "Building and pushing $IMAGE with tags: $TAGS"
docker buildx build \
--platform linux/amd64,linux/arm64 \
--push \
$(echo "$TAGS" | tr ',' '\n' | sed "s|^|--tag ${IMAGE}:|") \
--cache-from type=gha \
--cache-to type=gha,mode=max \
.
done

View file

@ -1,22 +0,0 @@
name: Delete untagged GHCR images
on:
workflow_dispatch: # Run manually from the Actions tab
jobs:
cleanup:
name: Delete all untagged images
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
steps:
- name: Delete untagged GHCR images
uses: actions/delete-package-versions@v5
with:
package-name: "umami" # 👈 change if your GHCR package name differs
package-type: "container"
delete-only-untagged-versions: true
min-versions-to-keep: 0

View file

@ -89,7 +89,7 @@ docker compose up -d
Alternatively, to pull just the Umami Docker image with PostgreSQL support:
```bash
docker pull docker.umami.is/umami-software/umami:postgresql-latest
docker pull docker.umami.is/umami-software/umami:latest
```
---

View file

@ -1,7 +1,7 @@
---
services:
umami:
image: ghcr.io/umami-software/umami:postgresql-latest
image: ghcr.io/umami-software/umami:latest
ports:
- "3000:3000"
environment:

View file

@ -9,18 +9,14 @@ import { MobileNav } from '@/app/(main)/MobileNav';
export function App({ children }) {
const { user, isLoading, error } = useLoginQuery();
const config = useConfig();
const { pathname, router } = useNavigation();
const { pathname } = useNavigation();
if (isLoading || !config) {
return <Loading placement="absolute" />;
}
if (error) {
if (process.env.cloudMode) {
window.location.href = '/login';
} else {
router.push('/login');
}
window.location.href = '/login';
return null;
}

View file

@ -13,7 +13,7 @@ export function LogoutPage() {
async function logout() {
await post('/auth/logout');
router.push('/login');
window.location.href = '/login';
}
removeClientAuthToken();

View file

@ -1,4 +1,4 @@
import * as detect from '../detect';
import { getIpAddress } from '../ip';
const IP = '127.0.0.1';
const BAD_IP = '127.127.127.127';
@ -6,23 +6,23 @@ const BAD_IP = '127.127.127.127';
test('getIpAddress: Custom header', () => {
process.env.CLIENT_IP_HEADER = 'x-custom-ip-header';
expect(detect.getIpAddress(new Headers({ 'x-custom-ip-header': IP }))).toEqual(IP);
expect(getIpAddress(new Headers({ 'x-custom-ip-header': IP }))).toEqual(IP);
});
test('getIpAddress: CloudFlare header', () => {
expect(detect.getIpAddress(new Headers({ 'cf-connecting-ip': IP }))).toEqual(IP);
expect(getIpAddress(new Headers({ 'cf-connecting-ip': IP }))).toEqual(IP);
});
test('getIpAddress: Standard header', () => {
expect(detect.getIpAddress(new Headers({ 'x-forwarded-for': IP }))).toEqual(IP);
expect(getIpAddress(new Headers({ 'x-forwarded-for': IP }))).toEqual(IP);
});
test('getIpAddress: CloudFlare header is lower priority than standard header', () => {
expect(
detect.getIpAddress(new Headers({ 'cf-connecting-ip': BAD_IP, 'x-forwarded-for': IP })),
).toEqual(IP);
expect(getIpAddress(new Headers({ 'cf-connecting-ip': BAD_IP, 'x-forwarded-for': IP }))).toEqual(
IP,
);
});
test('getIpAddress: No header', () => {
expect(detect.getIpAddress(new Headers())).toEqual(null);
expect(getIpAddress(new Headers())).toEqual(null);
});