🔀 Merge branch 'master' of github.com:lissy93/dashy into ricardodemauro/master

This commit is contained in:
Alicia Sykes 2026-03-09 14:41:38 +00:00
commit b9ce363f18
44 changed files with 6894 additions and 6749 deletions

View file

@ -4,7 +4,7 @@ So that your PR can be handled effectively, please populate the following fields
-->
**Category**:
> One of: Bugfix / Feature / Code style update / Refactoring Only / Build related changes / Documentation / Other (please specify)
> For example: Bugfix / Feature / Docs / Dependency Updates / Localization / Widget / Theme
**Overview**
> Briefly outline your new changes...
@ -15,13 +15,4 @@ So that your PR can be handled effectively, please populate the following fields
> If you've added any new build scripts, environmental variables, config file options, dependency or devDependency, please outline here
**Screenshot** _(if applicable)_
> If you've introduced any significant UI changes, please include a screenshot
**Code Quality Checklist** _(Please complete)_
- [ ] All changes are backwards compatible
- [ ] All lint checks and tests are passing
- [ ] There are no (new) build warnings or errors
- [ ] _(If a new config option is added)_ Attribute is outlined in the schema and documented
- [ ] _(If a new dependency is added)_ Package is essential, and has been checked out for security or performance
- [ ] _(If significant change)_ Bumps version in package.json
> If you've introduced any significant UI changes, widget or theme please include a screenshot

View file

@ -1,23 +0,0 @@
# When a '/rebase' comment is added to a PR, it will be rebased from the main branch
name: 🏗️ Automatic PR Rebase
on:
issue_comment:
types: [created]
jobs:
rebase:
name: Rebase
if: >
github.event.issue.pull_request != ''
&& contains(github.event.comment.body, '/rebase')
&& github.event.comment.author_association == 'MEMBER'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Rebase
uses: cirrus-actions/rebase@1.4
env:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View file

@ -1,18 +0,0 @@
# Builds and published Dashy's documentation website
name: 📝 Build Docs Site
on:
workflow_dispatch:
push:
branches: [ WEBSITE/docs-site-source ]
jobs:
deploy:
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/WEBSITE/docs-site-source'
steps:
- uses: actions/checkout@master
- uses: redkubes/docusaurus-deploy-action@v1
with:
source-folder: ./
git-user: ${{ github.actor }}
git-password: ${{ secrets.GITHUB_TOKEN }}
deployment-branch: gh-pages

108
.github/workflows/bump-and-tag.yml vendored Normal file
View file

@ -0,0 +1,108 @@
# When a PR is merged into master, this workflow handles versioning:
# - If code files changed but version wasn't bumped: auto-increments patch version
# - Creates and pushes a git tag for the new version
# - The tag then triggers Docker publishing and release drafting
name: 🔖 Auto Version & Tag
on:
pull_request_target:
types: [closed]
branches: [master]
concurrency:
group: auto-version-and-tag
cancel-in-progress: false
permissions:
contents: read
pull-requests: read
jobs:
version-and-tag:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
steps:
- name: Check PR for code changes and version bump 📂
id: check_pr
uses: actions/github-script@v7
with:
script: |
const { owner, repo } = context.repo;
const pull_number = context.payload.pull_request.number;
const files = await github.paginate(
github.rest.pulls.listFiles, { owner, repo, pull_number }
);
const codePatterns = [
/^src\//, /^services\//, /^public\//, /^Dockerfile$/, /^[^/]+\.js$/,
];
const codeChanged = files.some(f =>
codePatterns.some(p => p.test(f.filename))
);
const pkgChanged = files.some(f => f.filename === 'package.json');
if (!codeChanged && !pkgChanged) {
core.info('No code or package.json changes, skipping');
core.setOutput('needs_bump', 'false');
core.setOutput('needs_tag', 'false');
return;
}
let versionBumped = false;
if (pkgChanged) {
const mergeSha = context.payload.pull_request.merge_commit_sha;
const { data: mergeCommit } = await github.rest.git.getCommit({
owner, repo, commit_sha: mergeSha,
});
const parentSha = mergeCommit.parents[0].sha;
const getVersion = async (ref) => {
const { data } = await github.rest.repos.getContent({
owner, repo, path: 'package.json', ref,
});
return JSON.parse(Buffer.from(data.content, 'base64').toString()).version;
};
const [prevVersion, mergeVersion] = await Promise.all([
getVersion(parentSha), getVersion(mergeSha),
]);
versionBumped = prevVersion !== mergeVersion;
core.info(`Version: ${prevVersion} → ${mergeVersion}`);
}
const needsBump = codeChanged && !versionBumped;
const needsTag = codeChanged || versionBumped;
core.info(`Needs bump: ${needsBump}, Needs tag: ${needsTag}`);
core.setOutput('needs_bump', needsBump.toString());
core.setOutput('needs_tag', needsTag.toString());
- name: Checkout repository 🛎️
if: steps.check_pr.outputs.needs_tag == 'true'
uses: actions/checkout@v4
with:
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- name: Configure git identity 👤
if: steps.check_pr.outputs.needs_tag == 'true'
run: |
git config user.name "Liss-Bot"
git config user.email "liss-bot@d0h.co"
- name: Bump patch version ⬆️
if: steps.check_pr.outputs.needs_bump == 'true'
run: |
npm version patch --no-git-tag-version
git add package.json
git commit -m "⬆️ Bump version to $(node -p "require('./package.json').version")"
git push
- name: Create and push tag 🏷️
if: steps.check_pr.outputs.needs_tag == 'true'
run: |
VERSION=$(node -p "require('./package.json').version")
git fetch --tags --force
if git rev-parse "refs/tags/$VERSION" >/dev/null 2>&1; then
echo "Tag $VERSION already exists, skipping"
exit 0
fi
git tag -a "$VERSION" -m "Release v$VERSION"
git push origin "$VERSION"

View file

@ -1,16 +0,0 @@
# Detect and label pull requests that have merge conflicts
name: 🏗️ Check Merge Conflicts
on:
push:
branches:
- master
jobs:
check-conflicts:
runs-on: ubuntu-latest
steps:
- uses: mschilde/auto-label-merge-conflicts@master
with:
CONFLICT_LABEL_NAME: "🚫 Merge Conflicts"
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
MAX_RETRIES: 5
WAIT_MS: 5000

View file

@ -1,55 +0,0 @@
# When Dashy's version in package.json is updated
# this workflow will create a new tag
# And then publish it to the repository
name: 🏗️ Tag on Version Change
on:
workflow_dispatch:
push:
branches:
- master
paths:
- 'package.json'
jobs:
tag-if-version-updated:
runs-on: ubuntu-latest
steps:
- name: Check Out Repository 🛎️
uses: actions/checkout@v2
- name: Set Up Python 🐍
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Extract Version from package.json 🔢
id: package_version
run: |
import json
with open('package.json', 'r') as f:
version = json.load(f)['version']
print(f"::set-output name=VERSION::{version}")
shell: python
- name: Get Latest Tag 🏷️
id: latest_tag
run: |
git fetch --tags
latest_tag=$(git describe --tags `git rev-list --tags --max-count=1` 2>/dev/null)
echo "::set-output name=TAG::${latest_tag:-0}"
- name: Create and Push Tag ⤴️
if: steps.package_version.outputs.VERSION != steps.latest_tag.outputs.TAG && steps.latest_tag.outputs.TAG != '0'
run: |
git config --local user.email "liss-bot@d0h.co"
git config --local user.name "Liss-Bot"
git tag -a ${{ steps.package_version.outputs.VERSION }} -m "Release v${{ steps.package_version.outputs.VERSION }}"
git push origin ${{ steps.package_version.outputs.VERSION }}
env:
GIT_AUTHOR_NAME: Liss-Bot
GIT_AUTHOR_EMAIL: liss-bot@d0h.co
GIT_COMMITTER_NAME: Liss-Bot
GIT_COMMITTER_EMAIL: liss-bot@d0h.co
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View file

@ -1,7 +1,10 @@
# Shows changes to any yarn.lock in PR comment
# Useful for easily understanding dependency changes and consequences
name: 💡 Show Dependency Changes
on: [pull_request]
on:
pull_request:
paths:
- 'yarn.lock'
jobs:
check:
runs-on: ubuntu-latest
@ -15,3 +18,4 @@ jobs:
collapsibleThreshold: '25'
failOnDowngrade: 'false'
path: 'yarn.lock'
updateComment: 'true'

View file

@ -3,15 +3,7 @@ name: 🐳 Build + Publish Multi-Platform Image
on:
workflow_dispatch:
push:
branches: ['master']
tags: ['*.*']
paths:
- '**.js'
- 'src/**'
- 'public/**'
- 'services/**'
- '.github/workflows/docker-build-publish.yml'
- 'Dockerfile'
tags: ['*.*.*']
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@ -28,26 +20,21 @@ jobs:
docker:
runs-on: ubuntu-latest
permissions: { contents: read, packages: write }
if: "!contains(github.event.head_commit.message, '[ci-skip]')"
steps:
- name: 🛎️ Checkout Repo
uses: actions/checkout@v3
- name: 🔖 Get App Version
uses: tyankatsu0105/read-package-version-actions@v1
id: package-version
uses: actions/checkout@v4
- name: 🗂️ Make Docker Meta
id: meta
uses: docker/metadata-action@v3
uses: docker/metadata-action@v5
with:
images: |
${{ env.DH_IMAGE }}
ghcr.io/${{ env.GH_IMAGE }}
tags: |
type=ref,event=tag
type=semver,pattern={{version}},enable=false
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}.x
type=raw,value=latest
flavor: |
@ -61,42 +48,31 @@ jobs:
org.opencontainers.image.licenses=MIT
- name: 🔧 Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3
with:
platforms: linux/amd64,linux/arm64,linux/arm/v7
- name: 🔧 Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
use-buildkit: true
buildkit-daemon-opts: "--oci-worker-no-process-sandbox"
- name: 👀 Inspect builder
run: |
echo "Name: ${{ steps.buildx.outputs.name }}"
echo "Endpoint: ${{ steps.buildx.outputs.endpoint }}"
echo "Status: ${{ steps.buildx.outputs.status }}"
echo "Flags: ${{ steps.buildx.outputs.flags }}"
echo "Platforms: ${{ steps.buildx.outputs.platforms }}"
uses: docker/setup-buildx-action@v3
- name: 🔑 Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: 🔑 Login to GitHub Container Registry
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: 🚦 Check Registry Status
uses: crazy-max/ghaction-docker-status@v1
uses: crazy-max/ghaction-docker-status@v3
- name: ⚒️ Build and push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile

View file

@ -3,8 +3,7 @@ name: 🏗️ Draft New Release
on:
push:
tags:
- '^[0-9]+\.[0-9]+\.[0-9]+$'
- '**'
- '*.*.*'
workflow_dispatch:
inputs:
tag:
@ -16,21 +15,43 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code 🛎️
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
fetch-depth: 0 # We need all history for generating release notes
fetch-depth: 0
- name: Check if major or minor version changed 🔍
id: version_check
run: |
CURRENT_TAG="${{ github.event.inputs.tag || github.ref_name }}"
git fetch --tags --force
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -v "^${CURRENT_TAG}$" | head -1)
if [ -z "$PREVIOUS_TAG" ]; then
echo "No previous tag found, creating release"
echo "should_release=true" >> $GITHUB_OUTPUT
exit 0
fi
CURRENT_MM=$(echo "$CURRENT_TAG" | sed 's/^v//; s/\([0-9]*\.[0-9]*\)\..*/\1/')
PREVIOUS_MM=$(echo "$PREVIOUS_TAG" | sed 's/^v//; s/\([0-9]*\.[0-9]*\)\..*/\1/')
if [ "$CURRENT_MM" = "$PREVIOUS_MM" ]; then
echo "Patch-only bump ($PREVIOUS_TAG -> $CURRENT_TAG), skipping release"
echo "should_release=false" >> $GITHUB_OUTPUT
else
echo "Major or minor bump ($PREVIOUS_TAG -> $CURRENT_TAG), creating release"
echo "should_release=true" >> $GITHUB_OUTPUT
fi
- name: Create Draft Release 📝
if: steps.version_check.outputs.should_release == 'true' || github.event_name == 'workflow_dispatch'
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.event.inputs.tag || github.ref_name }}
release_name: Release ${{ github.event.inputs.tag || github.ref_name }}
name: Release ${{ github.event.inputs.tag || github.ref_name }}
draft: true
prerelease: false
generate_release_notes: true
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- name: Output new release URL ↗️
run: 'echo "Draft release URL: ${{ steps.create_release.outputs.html_url }}"'
if: steps.create_release.outputs.url
run: 'echo "Draft release URL: ${{ steps.create_release.outputs.url }}"'

View file

@ -1,38 +0,0 @@
# Adds a comment to new PRs, showing the compressed size and size difference of new code
# And also labels the PR based on the number of lines changes
name: 🌈 Check PR Size
on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
# Find and comment with compressed size
- name: Get Compressed Size
uses: preactjs/compressed-size-action@v2
with:
repo-token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
pattern: './dist/**/*.{js,css,html}'
strip-hash: '\\b\\w{8}\\.'
exclude: '{./dist/manifest.json,**/*.map,**/node_modules/**}'
minimum-change-threshold: 100
# Check number of lines of code added
- name: Label based on Lines of Code
uses: codelytv/pr-size-labeler@v1
with:
GITHUB_TOKEN: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
xs_max_size: '10'
s_max_size: '100'
m_max_size: '500'
l_max_size: '1000'
s_label: '🟩 PR - Small'
m_label: '🟨 PR - Medium'
l_label: '🟧 PR - Large'
xl_label: '🟥 PR - XL'
fail_if_xl: 'false'
message_if_xl: >
It looks like this PR is very large (over 1000 lines).
Try to avoid addressing multiple issues in a single PR, and
in the future consider breaking large tasks down into smaller steps.
This it to make reviewing, testing, reverting and general quality management easier.

View file

@ -1,27 +0,0 @@
# Replies with a random looks-good GIF, when a PR is reviewed with a LGTM comment
name: 💡 Random LGTM GIF
on:
issue_comment: { types: [created] }
pull_request_review: { types: [submitted] }
jobs:
post:
runs-on: ubuntu-latest
if: (!contains(github.actor, '[bot]')) # Exclude bot comment
steps:
- uses: ddradar/choose-random-action@v1
id: act
with:
contents: |
https://media4.giphy.com/media/11ISwbgCxEzMyY/giphy.gif
https://media4.giphy.com/media/SgwPtMD47PV04/giphy.gif
https://media1.giphy.com/media/3orifdxwbvVLfS3CrS/giphy.gif
https://media4.giphy.com/media/RPwrO4b46mOdy/giphy.gif
https://media2.giphy.com/media/3o7abGQa0aRJUurpII/giphy.gif
https://media3.giphy.com/media/ZZrDTGSJXlHW9Y2khu/giphy.gif
https://media3.giphy.com/media/5DQdk5oZzNgGc/giphy.gif
https://media4.giphy.com/media/3o7abB06u9bNzA8lu8/giphy.gif
https://media4.giphy.com/media/l2JJrEx9aRsjNruhi/giphy.gif
- uses: ddradar/lgtm-action@v1
with:
image-url: ${{ steps.act.outputs.selected }}
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}

View file

@ -1,17 +0,0 @@
# When a new comment is added to an issue, if it had the Stale or Awaiting User Response labels,
# then those labels will be removed, providing it was not user lissy93 who added the commend.
name: 🎯 Remove Pending Labels on Close
on:
issues:
types: [closed]
jobs:
remove-labels:
runs-on: ubuntu-latest
steps:
- name: Remove Labels when Closed
uses: actions-cool/issues-helper@v2
with:
actions: remove-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: '🚏 Awaiting User Response,⚰️ Stale,👤 Awaiting Maintainer Response'

View file

@ -1,42 +0,0 @@
# When a new comment is added to an issue, if it had the Stale or Awaiting User Response labels,
# then those labels will be removed, providing it was not user lissy93 who added the commend.
name: 🎯 Add/ Remove Awaiting Response Labels
on:
issue_comment:
types: [created]
jobs:
remove-stale:
runs-on: ubuntu-latest
if: ${{ github.event.comment.author_association != 'COLLABORATOR' && github.event.comment.author_association != 'OWNER' }}
steps:
- name: Remove Stale labels when Updated
uses: actions-cool/issues-helper@v2
with:
actions: remove-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: '🚏 Awaiting User Response,⚰️ Stale'
add-awaiting-author:
runs-on: ubuntu-latest
if: ${{!github.event.issue.pull_request && github.event.comment.author_association != 'COLLABORATOR' && github.event.comment.author_association != 'OWNER' && github.event.issue.state == 'open' }}
steps:
- name: Add Awaiting Author labels when Updated
uses: actions-cool/issues-helper@v2
with:
actions: add-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: '👤 Awaiting Maintainer Response'
remove-awaiting-author:
runs-on: ubuntu-latest
if: ${{ github.event.comment.author_association == 'OWNER' }}
steps:
- name: Remove Awaiting Author labels when Updated
uses: actions-cool/issues-helper@v2
with:
actions: remove-labels
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: '👤 Awaiting Maintainer Response'

66
.github/workflows/manual-tag.yml vendored Normal file
View file

@ -0,0 +1,66 @@
# Manual fallback for creating a tag with optional version bump.
# The automated flow is handled by bump-and-tag.yml on PR merge.
name: 🏷️ Tag on Version Change
on:
workflow_dispatch:
inputs:
version:
description: 'Version to tag (e.g. 3.2.0). Leave empty to auto-bump patch.'
required: false
concurrency:
group: manual-tag-version
cancel-in-progress: false
jobs:
tag-version:
runs-on: ubuntu-latest
steps:
- name: Check Out Repository 🛎️
uses: actions/checkout@v4
with:
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Configure Git Identity 🤖
run: |
git config user.name "Liss-Bot"
git config user.email "liss-bot@d0h.co"
- name: Determine and Apply Version 🔢
id: version
env:
INPUT_VERSION: ${{ github.event.inputs.version }}
run: |
CURRENT=$(node -p "require('./package.json').version")
if [ -n "$INPUT_VERSION" ]; then
TARGET="${INPUT_VERSION#v}"
else
npm version patch --no-git-tag-version > /dev/null
TARGET=$(node -p "require('./package.json').version")
fi
if [ "$TARGET" != "$CURRENT" ]; then
npm version "$TARGET" --no-git-tag-version --allow-same-version
git add package.json
git commit -m "🔖 Bump version to $TARGET [skip ci]"
git push
echo "Committed version bump to $TARGET"
else
echo "package.json already at $CURRENT, skipping commit"
fi
echo "TARGET=$TARGET" >> $GITHUB_OUTPUT
- name: Create and Push Tag ⤴️
env:
TAG: ${{ steps.version.outputs.TARGET }}
run: |
git fetch --tags --force
if git rev-parse "refs/tags/$TAG" >/dev/null 2>&1; then
echo "Tag $TAG already exists, skipping"
else
git tag -a "$TAG" -m "Release v$TAG"
git push origin "$TAG"
echo "Created and pushed tag $TAG"
fi

115
.github/workflows/pr-quality-check.yml vendored Normal file
View file

@ -0,0 +1,115 @@
name: 🔍 PR Quality Check
on:
pull_request:
branches: ['master', 'develop']
paths-ignore:
- '**.md'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
lint:
name: 📝 Lint Code
runs-on: ubuntu-latest
steps:
- name: 🛎️ Checkout Code
uses: actions/checkout@v4
- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'yarn'
- name: 📦 Install Dependencies
run: yarn install --frozen-lockfile
- name: 🔍 Run ESLint
run: yarn lint
test:
name: 🧪 Run Tests
runs-on: ubuntu-latest
steps:
- name: 🛎️ Checkout Code
uses: actions/checkout@v4
- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'yarn'
- name: 📦 Install Dependencies
run: yarn install --frozen-lockfile
- name: 🧪 Run Tests
run: yarn test
build:
name: 🏗️ Build Application
runs-on: ubuntu-latest
steps:
- name: 🛎️ Checkout Code
uses: actions/checkout@v4
- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'yarn'
- name: 📦 Install Dependencies
run: yarn install --frozen-lockfile
- name: 🏗️ Build Project
run: yarn build
env:
NODE_OPTIONS: --openssl-legacy-provider
- name: ✅ Verify Build Output
run: |
if [ ! -d "dist" ]; then
echo "❌ Build failed: dist directory not created"
exit 1
fi
if [ ! -f "dist/index.html" ]; then
echo "❌ Build failed: index.html not found"
exit 1
fi
echo "✅ Build successful"
docker-smoke:
name: 🐳 Docker Smoke Test
runs-on: ubuntu-latest
continue-on-error: true
steps:
- name: 🛎️ Checkout Code
uses: actions/checkout@v4
- name: 🐳 Build & Test Docker Image
run: sh tests/docker-smoke-test.sh
timeout-minutes: 10
security:
name: 🔒 Security Audit
runs-on: ubuntu-latest
continue-on-error: true
steps:
- name: 🛎️ Checkout Code
uses: actions/checkout@v4
- name: 🔧 Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'yarn'
- name: 📦 Install Dependencies
run: yarn install --frozen-lockfile
- name: 🔒 Run Security Audit
run: yarn audit --level high

View file

@ -16,35 +16,49 @@ on:
# Jobs to be run:
# 1. Checkout master branch
# 2. Checkout website source code branch
# 3. Install Python
# 4. Copy /docs from master to website branch
# 5. Run the script which processes documentation
# 6. Commit and push updated docs to the website source code branch
# 3. Install Python and Node.js
# 4. Install website dependencies (Docusaurus)
# 5. Copy /docs from master to website branch
# 6. Run the script which processes documentation and tests the build
# 7. Commit and push updated docs to the website source code branch
jobs:
update-docs:
runs-on: ubuntu-latest
steps:
- name: Checkout master branch 🛎️
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
path: 'master-docs'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- name: Checkout WEBSITE/docs-site-source branch 🛎️
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
ref: 'WEBSITE/docs-site-source'
path: 'website-docs'
token: ${{ secrets.BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- name: Install Python 🐍
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Setup Node.js 🟢
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Install website dependencies 📦
working-directory: website-docs
run: yarn install --frozen-lockfile
- name: Run script to update documentation 🪄
working-directory: website-docs
env:
NODE_OPTIONS: '--openssl-legacy-provider'
run: |
cp -r ../master-docs/docs ./
python ./do-markdown-magic.py
python ./do-doc-updaty-magic.py
- name: Commit changes 🚀
run: |

1
.yarnrc Normal file
View file

@ -0,0 +1 @@
--ignore-engines true

View file

@ -1 +0,0 @@
nodeLinker: node-modules

View file

@ -11,25 +11,26 @@
---
<p align="center">
<sup>Dashy is kindly sponsored by <a href="https://www.lambdatest.com/?utm_source=dashy&utm_medium=sponsor">LambdaTest</a> - Browser Testing</sup><br>
<a href="https://www.lambdatest.com/?utm_source=dashy&utm_medium=sponsor" target="_blank">
<img src="https://www.lambdatest.com/blue-logo.png" width="400" alt="LambdaTest" />
<sup>Dashy is kindly sponsored by <a href="https://ssdnodes.com/dashy">SSD Nodes</a> - Affordable VPS hosting for self-hosters</sup><br>
<a href="https://ssdnodes.com/dashy" target="_blank">
<img src="https://pixelflare.cc/alicia/sponsors/ssd-nodes-sponsorship.png" width="400" alt="SSD Nodes" />
</a>
</p>
<p align="center">
<br>
<sup>Dashy is kindly sponsored by <a href="https://umbrel.com?ref=dashy">Umbrel</a> - the personal home cloud and OS for self-hosting</sup><br>
<a href="https://umbrel.com?ref=dashy">
<img width="400" src="https://github.com/Lissy93/dashy/blob/WEBSITE/docs-site-source/static/umbrel-banner.jpg?raw=true" />
<img width="400" src="https://cdn.as93.net/sponsors/umbrel-banner.jpg" />
</a>
</p>
> [!NOTE]
> Version [3.0.0](https://github.com/Lissy93/dashy/releases/tag/3.0.0) has been released, and requires some changes to your setup, see [#1529](https://github.com/Lissy93/dashy/discussions/1529) for details.
<p align="center">
<sup>Dashy is kindly sponsored by <a href="https://www.testmuai.com/?utm_medium=sponsor&utm_source=dashy">TestMu AI</a> - The worlds first full-stack Agentic AI Quality Engineering platform</sup><br>
<a href="https://www.testmuai.com/?utm_medium=sponsor&utm_source=dashy" target="_blank">
<img src="https://cdn.as93.net/sponsors/test-mu-ai.png" width="400" alt="LambdaTest" />
</a>
</p>
<details>
<summary><b>Table of Contents</b></summary>
@ -433,6 +434,7 @@ Dashy supports multiple languages and locales. When available, your language sho
- 🇷🇺 **Russian**: `ru` -Contributed by **[@sasetz](https://github.com/sasetz)**
- 🇸🇰 **Slovak**: `sk` - Contributed by **[@Smexhy](https://github.com/Smexhy)**
- 🇸🇮 **Slovenian**: `sl` - Contributed by **[@UrekD](https://github.com/UrekD)**
- 🇰🇬 **Kyrgyz**: `ky` - Contributed by **[@noblepower1337](https://github.com/noblepower1337)**
- 🇪🇸 **Spanish**: `es` - Contributed by **[@lu4t](https://github.com/lu4t)**
- 🇸🇪 **Swedish**: `sv` - Contributed by **[@BOZG](https://github.com/BOZG)**
- 🇹🇼 **Traditional Chinese**: `zh-TW` - Contributed by **[@stanly0726](https://github.com/stanly0726)**

View file

@ -101,7 +101,6 @@ The following is a complete example of a [`docker-compose.yml`](https://github.c
```yaml
---
version: "3.8"
services:
dashy:
# To build from source, replace 'image: lissy93/dashy' with 'build: .'

View file

@ -5,6 +5,7 @@
- [Deployment Process](#deployment-process)
- [Git Strategy](#git-strategy)
- [Automated Workflows](#automated-workflows)
- [Release Pipeline](#release-pipeline)
## Release Schedule
@ -22,24 +23,17 @@ All changes and new features are submitted as pull requests, which can then be t
When a PR is opened:
- The feature branch is built, and deployed as a Netlify instance. This can be accessed at: `https://deploy-preview-[pr-number]--dashy-dev.netlify.app`, and this URL as well as a link to the build logs are added as a comment on the PR by Netlify bot
- Depending on what files were modified, the bot may also add a comment to remind the author of useful info
- A series of checks will run on the new code, using GH Actions, and prevent merging if they fail. This includes: linting, testing, code quality and complexity checking, security scanning and a spell check
- If a new dependency was added, liss-bot will comment with a summary of those changes, as well as the cost of the module, version, and any security concerns. If the bundle size has increased, this will also be added as a comment
- A series of CI checks run against the new code (lint, test, build, Docker smoke test, security audit). The PR cannot be merged if any required check fails
- If `yarn.lock` was modified, Liss-Bot adds a comment summarising which packages changed
After the PR is merged:
After the PR is merged into master:
- The app is build, and deployed to: <https://dev.dashy.to>
- A new tag in GitHub is created, using the apps version number (from the package.json)
- The Docker container is built, and published under the `:latest` tag on DockerHub and GHCR
- If code files changed and the version in package.json wasn't already bumped, the patch version is auto-incremented and committed ([bump-and-tag.yml](https://github.com/Lissy93/dashy/blob/master/.github/workflows/bump-and-tag.yml))
- A git tag is created and pushed for the new version
- The tag push triggers Docker image builds for `linux/amd64`, `linux/arm64` and `linux/arm/v7`, published to both DockerHub and GHCR ([docker-build-publish.yml](https://github.com/Lissy93/dashy/blob/master/.github/workflows/docker-build-publish.yml))
- If the tag is a major or minor version bump, a draft GitHub release is created with auto-generated release notes ([draft-release.yml](https://github.com/Lissy93/dashy/blob/master/.github/workflows/draft-release.yml)). Patch-only bumps skip the release
When a new major version is released:
- A new GitHub release is created and published, under new versions tag, with info from the changelog
- The container is built and published under a new tag will be created on DockerHub, called `:release-[version]`
- An announcement is opened in GitHub discussions, outlining the main changes, where users can comment and ask questions
[![Netlify Status](https://api.netlify.com/api/v1/badges/3a0216c3-1ed0-40f5-ad90-ff68b1c96c09/deploy-status)](https://app.netlify.com/sites/dashy-dev/deploys)
Manual tagging is also available via the [manual-tag.yml](https://github.com/Lissy93/dashy/blob/master/.github/workflows/manual-tag.yml) workflow. You can either provide a specific version (e.g. `3.2.0`) or leave it empty to auto-bump the patch version. This is useful if the automated flow didn't trigger or you need to cut a release outside the normal PR flow.
---
@ -115,52 +109,68 @@ When you submit your PR, include the required info, by filling out the PR templa
Dashy makes heavy use of [GitHub Actions](https://github.com/features/actions) to fully automate the checking, testing, building, deploying of the project, as well as administration tasks like management of issues, tags, releases and documentation. The following section outlines each workflow, along with a link the the action file, current status and short description. A lot of these automations were made possible using community actions contributed to GH marketplace by some amazing people.
### Code Processing
### CI Checks
These run on every pull request targeting master. All required checks must pass before merging.
Action | Description
--- | ---
**Code Linter**<br/>[![code-linter.yml](https://github.com/Lissy93/dashy/actions/workflows/code-linter.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/code-linter.yml) | After a pull request is created, all new code changes will be linted, and the CI will fail with a helpful message if the code has any formatting inconsistencies
**Code Spell Check**<br/>[![code-spell-check.yml](https://github.com/Lissy93/dashy/actions/workflows/code-spell-check.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/code-spell-check.yml) | After a PR submitted, all auto-fixable spelling errors will be detected, then Liss-Bot will create a separate PR to propose the fixes
**Dependency Update Summary** <br/>[![dependency-updates-summary.yml](https://github.com/Lissy93/dashy/actions/workflows/dependency-updates-summary.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/dependency-updates-summary.yml) | After a PR is submitted, if any of the dependencies are modified, then Liss-Bot will add a comment, explaining which packages have been added, removed, updated or downgraded, as well as other helpful info
**Get Size** <br/>[![get-size.yml](https://github.com/Lissy93/dashy/actions/workflows/get-size.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/get-size.yml) | Adds comment to PR if the size of the built + bundled application has changed compared to the previous version
**Security Scan** <br/>[![security-scanning.yml](https://github.com/Lissy93/dashy/actions/workflows/security-scanning.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/security-scanning.yml) | Uses Snyk to scan the code and dependencies after a PR. Will add a comment and cause the build to fail if a new vulnerability or potential issue is present
**PR Quality Check** <br/>[![pr-quality-check.yml](https://github.com/Lissy93/dashy/actions/workflows/pr-quality-check.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/pr-quality-check.yml) | Runs lint, unit tests, a full build and Docker smoke test against every PR. Also runs a security audit on dependencies
**Dependency Update Summary** <br/>[![dependency-updates-summary.yml](https://github.com/Lissy93/dashy/actions/workflows/dependency-updates-summary.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/dependency-updates-summary.yml) | When yarn.lock is modified in a PR, Liss-Bot comments with a summary of which packages changed
### Releases
Action | Description
--- | ---
**Create Tag** <br/>[![auto-tag-pr.yml](https://github.com/Lissy93/dashy/actions/workflows/auto-tag-pr.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/auto-tag-pr.yml) | Whenever the version indicated in package.json is updates, a new GitHub tag will be created for that point in time
**Build App** <br/>[![build-app.yml](https://github.com/Lissy93/dashy/actions/workflows/build-app.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/build-app.yml) | After changes are merged into the master branch, the app will be build, with output pushed to the `dev-demo` branch
**Cache Artifacts** <br/>[![cache-artifacts.yml](https://github.com/Lissy93/dashy/actions/workflows/cache-artifacts.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/cache-artifacts.yml) | After build, returned files will be cached for future actions for that commit
**Docker Publish** <br/>[![docker-publish.yml](https://github.com/Lissy93/dashy/actions/workflows/docker-publish.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/docker-publish.yml) | After PR is merged, the multi-architecture Docker container will be built, and then published to GHCR
**Auto Version & Tag** <br/>[![bump-and-tag.yml](https://github.com/Lissy93/dashy/actions/workflows/bump-and-tag.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/bump-and-tag.yml) | When a PR with code changes is merged into master, auto-bumps the patch version (if not already bumped) and creates a git tag
**Manual Tag** <br/>[![manual-tag.yml](https://github.com/Lissy93/dashy/actions/workflows/manual-tag.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/manual-tag.yml) | Manual dispatch workflow. Provide a version to tag, or leave empty to auto-bump patch. Updates package.json, commits and creates the tag
**Docker Publish** <br/>[![docker-build-publish.yml](https://github.com/Lissy93/dashy/actions/workflows/docker-build-publish.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/docker-build-publish.yml) | Triggered by tag pushes. Builds multi-arch Docker images and publishes to DockerHub and GHCR with semver tags
**Draft Release** <br/>[![draft-release.yml](https://github.com/Lissy93/dashy/actions/workflows/draft-release.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/draft-release.yml) | Triggered by tag pushes. Creates a draft GitHub release with auto-generated notes for major or minor version bumps. Patch-only bumps are skipped
### Issue Management
Action | Description
--- | ---
**Close Incomplete Issues** <br/>[![close-incomplete-issues.yml](https://github.com/Lissy93/dashy/actions/workflows/close-incomplete-issues.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/close-incomplete-issues.yml) | Issues which do not match any of the issue templates will be closed, and a comment posted explaining why
**Close Stale Issues** <br/>[![close-stale-issues.yml](https://github.com/Lissy93/dashy/actions/workflows/close-stale-issues.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/close-stale-issues.yml) | Issues which have not been updated for 6 weeks will have a comment posted to them. If the author does not reply within the next week, then the issue will be marked as stale and closed. The original author may still reopen the issue at any time
**Close Potential Spam Issues** <br/>[![issue-spam-control.yml](https://github.com/Lissy93/dashy/actions/workflows/issue-spam-control.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/issue-spam-control.yml) | Auto-closes issues, and adds a comment if it was submitted by a user who hasn't yet interacted with the repo, is new to GitHub and has not starred the repository. The comment will advise them to check their issue is complete, and then allow them to reopen it
**Issue Translator** <br/>[![issue-translator.yml](https://github.com/Lissy93/dashy/actions/workflows/issue-translator.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/issue-translator.yml) | Auto-translates any comments and issues that were written in any language other than English, and posts the translation as a comment below
**Label Sponsors** <br/>[![label-sponsors.yml](https://github.com/Lissy93/dashy/actions/workflows/label-sponsors.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/label-sponsors.yml) | Adds a special label to any issues or pull requests raised by users who are sponsoring the project via GitHub, so that they can get priority support
**LGTM Comment**<br/>[![lgtm-comment.yml](https://github.com/Lissy93/dashy/actions/workflows/lgtm-comment.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/lgtm-comment.yml) | When a PR review contains the words LGTM (looks good to me), the Liss-Bot will reply with a random celebratory or thumbs up GIF, just as a bit of fun
**Mind your Language** <br/>[![mind-your-language.yml](https://github.com/Lissy93/dashy/actions/workflows/mind-your-language.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/mind-your-language.yml) | Replies to any comment (on issue or PR) that contains profanities, offensive or inappropriate language with a polite note reminding the user of the code of conduct
**Release Notifier** <br/>[![release-commenter.yml](https://github.com/Lissy93/dashy/actions/workflows/release-commenter.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/release-commenter.yml) | Once a release has been published which fixes an issue, a comment will be added to the relevant issues informing the user who raised it that it was fixed in the current release
**Update Issue after Merge** <br/>[![update-issue-after-pr.yml](https://github.com/Lissy93/dashy/actions/workflows/update-issue-after-pr.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/update-issue-after-pr.yml) | After a PR which fixes an issue is merged, Liss-Bot will add a comment to said issue based on the git commit message
**Auto Add Comment Based on Tag** <br/>[![add-comment-from-tag.yml](https://github.com/Lissy93/dashy/actions/workflows/add-comment-from-tag.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/add-comment-from-tag.yml) | Will add comment with useful info to certain issues, based on the tag applied
**Close Stale Issues** <br/>[![close-stale-issues.yml](https://github.com/Lissy93/dashy/actions/workflows/close-stale-issues.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/close-stale-issues.yml) | Issues which have not been updated for a long time will have a comment posted to them. If the author does not reply, the issue will be marked as stale and closed. Also handles issues awaiting user response and pings the maintainer when needed
### PR Management
### Documentation
Action | Description
--- | ---
**PR Commenter** <br/>[![pr-commenter.yml](https://github.com/Lissy93/dashy/actions/workflows/pr-commenter.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/pr-commenter.yml) | Adds comment with helpful info to pull requests, based on which files have been changes
**Issue from Todo Code** <br/>[![raise-issue-from-todo.yml](https://github.com/Lissy93/dashy/actions/workflows/raise-issue-from-todo.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/raise-issue-from-todo.yml) | When a `todo` note is found in the code after a PR, then Liss-Bot will automatically raise an issue, so that the todo can be addressed/ implemented. The issue will be closed once the todo has been implemented or removed
**Wiki Sync** <br/>[![wiki-sync.yml](https://github.com/Lissy93/dashy/actions/workflows/wiki-sync.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/wiki-sync.yml) | Publishes the repository wiki from the markdown files in the docs directory. Runs weekly and on manual dispatch
**Update Docs Site** <br/>[![update-docs-site.yml](https://github.com/Lissy93/dashy/actions/workflows/update-docs-site.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/update-docs-site.yml) | When docs change on master, copies them to the website branch and processes them for the docs site
**Build Docs Site** <br/>[![build-docs-site.yml](https://github.com/Lissy93/dashy/actions/workflows/build-docs-site.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/build-docs-site.yml) | Builds and deploys the documentation website from the WEBSITE/docs-site-source branch
### Documentation & Reports
### Other
Action | Description
--- | ---
**Generate Credits** <br/>[![generate-credits.yml](https://github.com/Lissy93/dashy/actions/workflows/generate-credits.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/generate-credits.yml) | Generates a report, including contributors, collaborators, sponsors, bots and helpful users. Will then insert a markdown table with thanks to these GitHub users and links to their profiles into the Credits page, as well as a summary of sponsors and top contributors into the main readme
**Wiki Sync** <br/>[![wiki-sync.yml](https://github.com/Lissy93/dashy/actions/workflows/wiki-sync.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/wiki-sync.yml) | Generates and publishes the repositories wiki page using the markdown files within the docs directory
**Mirror to Codeberg** <br/>[![mirror.yml](https://github.com/Lissy93/dashy/actions/workflows/mirror.yml/badge.svg)](https://github.com/Lissy93/dashy/actions/workflows/mirror.yml) | Pushes a copy of the repo to Codeberg weekly and on manual dispatch
---
## Release Pipeline
```mermaid
flowchart TD
A[PR opened] --> B[CI checks run\nlint, test, build, Docker smoke, security]
B --> C{Checks pass?}
C -- No --> D[PR blocked]
C -- Yes --> R[Maintainers review]
R --> E[PR merged into master]
E --> F{Code files changed?}
F -- No --> G[No action]
F -- Yes --> H{Version already\nbumped in PR?}
H -- Yes --> I[Use existing version]
H -- No --> J[Auto bump patch version]
J --> I
I --> K[Create git tag]
K --> L[Docker build + publish\namd64, arm64, arm/v7]
K --> M{Major or minor bump?}
M -- Yes --> N[Draft GitHub release]
M -- No --> O[Skip release]
```
---

View file

@ -1,6 +1,6 @@
{
"name": "dashy",
"version": "3.1.1",
"version": "3.1.3",
"license": "MIT",
"main": "server",
"author": "Alicia Sykes <alicia@omg.lol> (https://aliciasykes.com)",
@ -9,6 +9,10 @@
"dev": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service serve",
"build": "NODE_OPTIONS=--openssl-legacy-provider vue-cli-service build",
"lint": "vue-cli-service lint",
"test": "vitest run",
"test:watch": "vitest",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"pm2-start": "npx pm2 start server.js",
"build-watch": "vue-cli-service build --watch --mode production",
"build-and-start": "NODE_OPTIONS=--openssl-legacy-provider npm-run-all --parallel build start",
@ -17,7 +21,7 @@
"dependency-audit": "npx improved-yarn-audit --ignore-dev-deps"
},
"dependencies": {
"@babel/core": "^7.0.0",
"@babel/core": "^7.26.0",
"@formschema/native": "^2.0.0-beta.6",
"@sentry/tracing": "^7.102.1",
"@sentry/vue": "^7.102.1",
@ -25,7 +29,8 @@
"axios": "^1.12.0",
"connect-history-api-fallback": "^1.6.0",
"crypto-js": "^4.2.0",
"express": "^4.17.2",
"dompurify": "^3.0.8",
"express": "^4.21.0",
"express-basic-auth": "^1.2.1",
"frappe-charts": "^1.6.2",
"js-yaml": "^4.1.0",
@ -50,22 +55,27 @@
"vuex": "^3.6.2"
},
"devDependencies": {
"@babel/preset-env": "^7.17.10",
"@vue/cli-plugin-babel": "^4.5.15",
"@vue/cli-plugin-eslint": "^4.5.15",
"@vue/cli-plugin-pwa": "^4.5.15",
"@babel/preset-env": "^7.26.0",
"@vitest/ui": "^1.6.0",
"@vue/cli-plugin-babel": "^5.0.8",
"@vue/cli-plugin-eslint": "^5.0.8",
"@vue/cli-plugin-pwa": "^5.0.8",
"@vue/cli-plugin-typescript": "^5.0.8",
"@vue/cli-service": "^4.5.19",
"@vue/cli-service": "^5.0.8",
"@babel/eslint-parser": "^7.25.0",
"@vue/eslint-config-standard": "^4.0.0",
"babel-eslint": "^10.0.1",
"@vue/test-utils": "^1.3.6",
"copy-webpack-plugin": "6.4.0",
"eslint": "^6.8.0",
"eslint": "^7.32.0",
"eslint-config-airbnb": "^18.0.1",
"eslint-plugin-vue": "^7.9.0",
"happy-dom": "^17.4.0",
"npm-run-all": "^4.1.5",
"sass": "^1.38.0",
"sass-loader": "^7.1.0",
"sass": "^1.77.0",
"sass-loader": "^12.0.0",
"typescript": "^5.4.4",
"vite-plugin-vue2": "^2.0.3",
"vitest": "^1.6.0",
"vue-cli-plugin-yaml": "^1.0.2",
"vue-svg-loader": "^0.16.0",
"vue-template-compiler": "^2.7.0"
@ -90,12 +100,26 @@
"no-else-return": 0
},
"parserOptions": {
"parser": "babel-eslint"
}
"parser": "@babel/eslint-parser"
},
"overrides": [
{
"files": [
"tests/**",
"vitest.config.js"
],
"rules": {
"import/no-extraneous-dependencies": "off",
"no-undef": "off",
"global-require": "off",
"no-unused-vars": "off"
}
}
]
},
"babel": {
"presets": [
"@vue/app",
"@vue/cli-plugin-babel/preset",
"@babel/preset-env"
]
},
@ -107,5 +131,19 @@
"browserslist": [
"> 1%",
"last 2 versions"
]
],
"resolutions": {
"ejs": "^3.1.10",
"loader-utils": "^2.0.4",
"minimatch": "^3.1.2",
"braces": "^3.0.3",
"micromatch": "^4.0.8",
"serialize-javascript": "^6.0.2",
"node-forge": "^1.3.1",
"nth-check": "^2.1.1",
"ip": "^2.0.1",
"postcss": "^8.4.31",
"tar": "^6.2.1"
},
"packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
}

View file

@ -12,6 +12,7 @@
<link rel="icon" type="image/png" sizes="16x16" href="web-icons/favicon-16x16.png">
<link rel="icon" type="image/png" href="/favicon.ico" />
<link rel="stylesheet" type="text/css" href="/loading-screen.css" />
<link rel="stylesheet" type="text/css" href="/theme-fonts.css" media="print" onload="this.media='all'" />
<!-- Default Page Title -->
<title>Dashy</title>
</head>

44
public/theme-fonts.css Normal file
View file

@ -0,0 +1,44 @@
/* Optional fonts for specific themes, served statically from public/ */
@font-face {
font-family: 'Cutive Mono';
src: url('/fonts/CutiveMono-Regular.ttf');
}
@font-face {
font-family: 'Francois One';
src: url('/fonts/FrancoisOne-Regular.ttf');
}
@font-face {
font-family: 'Podkova';
src: url('/fonts/Podkova-Medium.ttf');
}
@font-face {
font-family: 'Roboto';
src: url('/fonts/Roboto-Light.ttf');
}
@font-face {
font-family: 'Sniglet';
src: url('/fonts/Sniglet-Regular.ttf');
}
@font-face {
font-family: 'VT323';
src: url('/fonts/VT323-Regular.ttf');
}
@font-face {
font-family: 'Audiowide';
src: url('/fonts/Audiowide-Regular.ttf');
}
@font-face {
font-family: 'Shrikhand';
src: url('/fonts/Shrikhand-Regular.ttf');
}
@font-face {
font-family: 'Digital';
src: url('/fonts/Digital-Regular.ttf');
}
@font-face {
font-family: 'OpenWeatherIcons';
src: url('/widget-resources/WeatherIcons.woff2');
font-style: normal;
font-weight: 400;
}

View file

@ -379,7 +379,8 @@
"post-code": "Post Code",
"location": "Location",
"timezone": "Timezone",
"organization": "Organization"
"organization": "Organization",
"forwarded-port": "Forwarded Port"
},
"nextcloud": {
"active": "active",

View file

@ -85,10 +85,6 @@ export default {
</script>
<style scoped lang="scss">
@font-face {
font-family: 'Digital';
src: url('/fonts/Digital-Regular.ttf');
}
.clock {
padding: 0.5rem 0;

View file

@ -4,6 +4,10 @@
<span class="lbl">{{ $t('widgets.gluetun-status.vpn-ip') }}</span>
<span class="val">{{ public_ip }}</span>
</div>
<div class="ip-row" v-if="forwarded_port">
<span class="lbl">{{ $t('widgets.gluetun-status.forwarded-port') }}</span>
<span class="val">{{ forwarded_port }}</span>
</div>
<div class="ip-row" v-if="country">
<span class="lbl">{{ $t('widgets.gluetun-status.country') }}</span>
<span class="val">{{ country }}</span>
@ -50,6 +54,7 @@ export default {
organization: null,
postal_code: null,
timezone: null,
forwarded_port: null,
};
},
computed: {
@ -65,6 +70,12 @@ export default {
/* Make GET request to Gluetun publicip API endpoint */
fetchData() {
this.makeRequest(`${this.hostname}/v1/publicip/ip`).then(this.processData);
this.makeRequest(`${this.hostname}/v1/portforward`)
.then(this.processPortData)
.catch(() => {
this.forwarded_port = null;
});
},
/* Assign data variables to the returned data */
processData(ipInfo) {
@ -78,6 +89,12 @@ export default {
this.postal_code = fields.includes('postal_code') ? ipInfo.postal_code : null;
this.timezone = fields.includes('timezone') ? ipInfo.timezone : null;
},
processPortData(portInfo) {
const fields = this.visibleFields.split(',');
this.forwarded_port = fields.includes('forwarded_port')
? portInfo.port
: null;
},
},
};
</script>

View file

@ -1,17 +1,31 @@
<template>
<div class="rss-wrapper">
<!-- Feed Meta Info -->
<a class="meta-container" v-if="meta" :href="meta.link" :title="meta.description">
<component
:is="meta && meta.link ? 'a' : 'div'"
class="meta-container"
v-if="meta"
:href="meta.link || undefined"
:title="meta.description"
target="_blank"
rel="noopener noreferrer"
>
<img class="feed-icon" :src="meta.image" v-if="meta.image" alt="Feed Image" />
<div class="feed-text">
<p class="feed-title">{{ meta.title }}</p>
<p class="feed-author" v-if="meta.author">By {{ meta.author }}</p>
</div>
</a>
</component>
<!-- Feed Content -->
<div class="post-wrapper" v-if="posts">
<div class="post-row" v-for="(post, indx) in posts" :key="indx">
<a class="post-top" :href="post.link">
<component
:is="post.link ? 'a' : 'div'"
class="post-top"
:href="post.link || undefined"
target="_blank"
rel="noopener noreferrer"
>
<img class="post-img" :src="post.image" v-if="post.image" alt="Post Image">
<div class="post-title-wrap">
<p class="post-title">{{ post.title }}</p>
@ -19,9 +33,15 @@
{{ post.date | formatDate }} {{ post.author | formatAuthor }}
</p>
</div>
</a>
</component>
<div class="post-body" v-html="post.description"></div>
<a class="continue-reading-btn" :href="post.link">
<a
class="continue-reading-btn"
v-if="post.link"
:href="post.link"
target="_blank"
rel="noopener noreferrer"
>
{{ $t('widgets.general.open-link') }}
</a>
</div>
@ -34,6 +54,7 @@
import * as Parser from 'rss-parser';
import WidgetMixin from '@/mixins/WidgetMixin';
import { widgetApiEndpoints } from '@/utils/defaults';
import { sanitizeRssItem, sanitizeRssMeta } from '@/utils/Sanitizer';
export default {
mixins: [WidgetMixin],
@ -68,7 +89,7 @@ export default {
return 'pubDate';
},
orderDirection() {
const usersChoice = this.options.orderBy;
const usersChoice = this.options.orderDirection;
if (usersChoice && (usersChoice === 'desc' || usersChoice === 'asc')) return usersChoice;
return 'desc';
},
@ -87,9 +108,13 @@ export default {
},
filters: {
formatDate(timestamp) {
if (!timestamp) return '';
const date = new Date(timestamp);
// Check if date is valid
if (Number.isNaN(date.getTime())) return '';
const localFormat = navigator.language;
const dateFormat = { weekday: 'short', day: 'numeric', month: 'short' };
return new Date(timestamp).toLocaleDateString(localFormat, dateFormat);
return date.toLocaleDateString(localFormat, dateFormat);
},
formatAuthor(author) {
return author ? `by ${author}` : '';
@ -107,23 +132,23 @@ export default {
const {
link, title, items, author, description, image,
} = await parser.parseString(data);
this.meta = {
this.meta = sanitizeRssMeta({
title,
link,
author,
description,
image,
};
});
this.processItems(items);
} else {
const { feed, items } = data;
this.meta = {
this.meta = sanitizeRssMeta({
title: feed.title,
link: feed.link,
author: feed.author,
description: feed.description,
image: feed.image,
};
});
this.processItems(items);
}
},
@ -134,13 +159,14 @@ export default {
length = Math.min(length, this.limit);
}
for (let i = 0; length > i; i += 1) {
const sanitized = sanitizeRssItem(items[i]);
posts.push({
title: items[i].title,
description: items[i].description,
image: items[i].thumbnail,
author: items[i].author,
date: items[i].pubDate,
link: items[i].link,
title: sanitized.title,
description: sanitized.description,
image: sanitized.thumbnail,
author: sanitized.author,
date: sanitized.pubDate,
link: sanitized.link,
});
}
this.posts = posts;

View file

@ -41,37 +41,3 @@ html {
font-weight: normal;
}
}
/* Optional fonts for specific themes */
/* These fonts are loaded from ./public so not bundled within dist */
@font-face { // Used by body text in Matrix and Hacker themes. Credit to the late Vernon Adams, RIP
font-family: 'Cutive Mono';
src: url('/fonts/CutiveMono-Regular.ttf');
}
@font-face { // Heading text in Material and Material Dark. Credit to Vernon Adams
font-family: 'Francois One';
src: url('/fonts/FrancoisOne-Regular.ttf');
}
@font-face { // Heading text in Colorful theme. Credit to Cyreal
font-family: 'Podkova';
src: url('/fonts/Podkova-Medium.ttf');
}
@font-face { // Standard body text in material original. Credit to Christian Robertson
font-family: 'Roboto';
src: url('/fonts/Roboto-Light.ttf');
}
@font-face { // Heading text in Jam, Bee and Tiger themes. Credit to Haley Fiege
font-family: 'Sniglet';
src: url('/fonts/Sniglet-Regular.ttf');
}
@font-face { // Used by heading text in Matrix and Hacker themes. Credit to Peter Hull
font-family: 'VT323';
src: url('/fonts/VT323-Regular.ttf');
}
@font-face { // Used by cyberpunk theme. Credit to Astigmatic
font-family: 'Audiowide';
src: url('/fonts/Audiowide-Regular.ttf');
}
@font-face { // Used by Dracula, Lissy themes. Credit to Jonny Pinhorn
font-family: 'Shrikhand';
src: url('/fonts/Shrikhand-Regular.ttf');
}

View file

@ -4,12 +4,7 @@ Based on https://github.com/isneezy/open-weather-icons
Licensed under MIT - Copyright (c) 2017 Ivan Vilanculo
**/
@font-face {
font-family: 'OpenWeatherIcons';
src: url('/widget-resources/WeatherIcons.woff2');
font-style: normal;
font-weight: 400;
}
/* OpenWeatherIcons font-face is loaded via public/theme-fonts.css */
i.owi {
display: inline-block;

132
src/utils/Sanitizer.js Normal file
View file

@ -0,0 +1,132 @@
/**
* Sanitization Utilities
* Used by RSS feed widgets, preventing XSS
*/
import DOMPurify from 'dompurify';
import ErrorHandler from '@/utils/ErrorHandler';
// DOMPurify settings
const HTML_SANITIZE_CONFIG = {
ALLOWED_TAGS: [
'a', 'p', 'br', 'strong', 'em', 'b', 'i', 'u', 's', 'strike',
'ul', 'ol', 'li', 'blockquote', 'pre', 'code',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'img', 'figure', 'figcaption',
'span', 'div', 'hr',
'table', 'thead', 'tbody', 'tr', 'th', 'td',
],
ALLOWED_ATTR: [
'href', 'src', 'alt', 'title', 'class', 'id',
'width', 'height', 'target', 'rel',
],
ALLOWED_URI_REGEXP: /^(?:(?:(?:f|ht)tps?|mailto|tel|callto|sms|cid|xmpp):|[^a-z]|[a-z+.-]+(?:[^a-z+.-:]|$))/i,
FORBID_TAGS: ['script', 'style', 'iframe', 'object', 'embed', 'base', 'form', 'input', 'button'],
KEEP_CONTENT: true,
SAFE_FOR_TEMPLATES: true,
};
// DOMPurify configuration for text-only sanitization
const TEXT_SANITIZE_CONFIG = {
ALLOWED_TAGS: [],
KEEP_CONTENT: true,
};
/**
* Sanitizes HTML content from RSS feeds, to only allow safe tags
* @param {string} html - The HTML content to sanitize
* @returns {string} Sanitized HTML safe for rendering with v-html
*/
export const sanitizeHtml = (html) => {
if (!html || typeof html !== 'string') return '';
try {
return DOMPurify.sanitize(html, HTML_SANITIZE_CONFIG);
} catch (error) {
ErrorHandler('HTML sanitization error', error);
return '';
}
};
/**
* Validates and sanitizes URLs from RSS feeds, only accept http/https
* @param {string} url - The URL to validate
* @returns {string|null} Sanitized URL or null if invalid/malicious
*/
export const sanitizeUrl = (url) => {
if (!url || typeof url !== 'string') return null;
try {
const trimmedUrl = url.trim();
if (!trimmedUrl) return null;
const parsedUrl = new URL(trimmedUrl);
if (parsedUrl.protocol === 'http:' || parsedUrl.protocol === 'https:') {
return trimmedUrl;
}
return null;
} catch (error) {
return null;
}
};
/**
* Sanitizes text fields by stripping all HTML tags
* Use for titles, author names, and other text-only fields
* @param {string} text - The text to sanitize
* @returns {string} Plain text with HTML stripped
*/
export const sanitizeText = (text) => {
if (!text || typeof text !== 'string') return '';
try {
return DOMPurify.sanitize(text, TEXT_SANITIZE_CONFIG);
} catch (error) {
ErrorHandler('Text sanitization error', error);
return '';
}
};
/**
* Sanitizes all fields of an RSS item
* @param {object} item - RSS item with title, description, link, etc.
* @returns {object} Sanitized RSS item
*/
export const sanitizeRssItem = (item) => {
if (!item || typeof item !== 'object') return {};
try {
return {
title: sanitizeText(item.title || ''),
description: sanitizeHtml(item.description || item.content || item.contentSnippet || ''),
link: sanitizeUrl(item.link),
author: sanitizeText(item.author || ''),
pubDate: sanitizeText(item.pubDate || item.isoDate || ''),
thumbnail: sanitizeUrl(item.thumbnail || item.enclosure?.url),
};
} catch (error) {
ErrorHandler('RSS item sanitization error', error);
return {};
}
};
/**
* Sanitizes RSS feed metadata
* @param {object} meta - Feed metadata with title, link, description, etc.
* @returns {object} Sanitized feed metadata
*/
export const sanitizeRssMeta = (meta) => {
if (!meta || typeof meta !== 'object') return {};
try {
return {
title: sanitizeText(meta.title || ''),
description: sanitizeText(meta.description || ''),
link: sanitizeUrl(meta.link),
author: sanitizeText(meta.author || ''),
image: sanitizeUrl(meta.image),
};
} catch (error) {
ErrorHandler('RSS metadata sanitization error', error);
return {};
}
};

View file

@ -325,6 +325,8 @@ module.exports = {
mode: 'production',
manifestCrossorigin: 'use-credentials',
iconPaths: {
faviconSVG: null,
appleTouchIcon: null,
favicon64: './web-icons/favicon-64x64.png',
favicon32: './web-icons/favicon-32x32.png',
favicon16: './web-icons/favicon-16x16.png',

View file

@ -0,0 +1,726 @@
import {
describe, it, expect, beforeEach, afterEach, vi,
} from 'vitest';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import Item from '@/components/LinkItems/Item.vue';
import router from '@/router';
vi.mock('axios', () => ({ default: { get: vi.fn(() => Promise.resolve({ data: {} })) } }));
vi.mock('@/router', () => ({ default: { push: vi.fn() } }));
vi.mock('@/utils/ErrorHandler', () => ({ default: vi.fn() }));
vi.mock('@/assets/interface-icons/interactive-editor-edit-mode.svg', () => ({
default: { template: '<span />' },
}));
const localVue = createLocalVue();
localVue.use(Vuex);
localVue.directive('tooltip', {});
localVue.directive('longPress', {});
localVue.directive('clickOutside', {});
/** Factory — accepts overrides for item, props, appConfig, storeState, etc. */
function mountItem(overrides = {}) {
const item = overrides.item || {
id: 'test-1',
title: 'Test Item',
description: 'A test description',
url: 'https://example.com',
icon: 'fas fa-rocket',
};
const mutations = {
SET_MODAL_OPEN: vi.fn(),
REMOVE_ITEM: vi.fn(),
...(overrides.mutations || {}),
};
const storeState = {
editMode: false,
config: { appConfig: overrides.appConfig || {} },
...(overrides.storeState || {}),
};
const store = new Vuex.Store({
state: storeState,
getters: {
appConfig: (state) => state.config.appConfig,
iconSize: () => overrides.iconSize || 'medium',
getParentSectionOfItem: () => () => overrides.parentSection || { name: 'Default' },
},
mutations,
});
const wrapper = shallowMount(Item, {
localVue,
store,
propsData: { item, ...(overrides.props || {}) },
mocks: {
$modal: { show: vi.fn(), hide: vi.fn() },
$toasted: { show: vi.fn() },
$t: (key) => key,
...(overrides.mocks || {}),
},
stubs: {
Icon: true,
ItemOpenMethodIcon: true,
StatusIndicator: true,
ContextMenu: true,
MoveItemTo: true,
EditItem: true,
EditModeIcon: true,
},
});
return { wrapper, store, mutations };
}
let openSpy;
let clipboardSpy;
beforeEach(() => {
openSpy = vi.spyOn(window, 'open').mockImplementation(() => {});
clipboardSpy = vi.fn(() => Promise.resolve());
Object.defineProperty(navigator, 'clipboard', {
value: { writeText: clipboardSpy },
writable: true,
configurable: true,
});
localStorage.getItem.mockReset();
localStorage.setItem.mockReset();
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('Computed: itemIcon', () => {
it('returns item.icon when set', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', icon: 'my-icon',
},
});
expect(wrapper.vm.itemIcon).toBe('my-icon');
});
it('falls back to appConfig.defaultIcon', () => {
const { wrapper } = mountItem({
item: { id: '1', title: 'X', url: '#' },
appConfig: { defaultIcon: 'default-icon' },
});
expect(wrapper.vm.itemIcon).toBe('default-icon');
});
it('returns falsy when neither is set', () => {
const { wrapper } = mountItem({ item: { id: '1', title: 'X', url: '#' } });
expect(wrapper.vm.itemIcon).toBeFalsy();
});
});
describe('Computed: size', () => {
it('returns valid itemSize prop', () => {
const { wrapper } = mountItem({ props: { itemSize: 'large' } });
expect(wrapper.vm.size).toBe('large');
});
it('ignores invalid itemSize and falls back to store', () => {
const { wrapper } = mountItem({ props: { itemSize: 'bogus' }, iconSize: 'small' });
expect(wrapper.vm.size).toBe('small');
});
it('falls back to store iconSize getter', () => {
const { wrapper } = mountItem({ iconSize: 'small' });
expect(wrapper.vm.size).toBe('small');
});
it('defaults to medium', () => {
const { wrapper } = mountItem();
expect(wrapper.vm.size).toBe('medium');
});
});
describe('Computed: makeColumnCount', () => {
it.each([
[300, 1], [400, 2], [600, 3], [800, 4], [1100, 5], [1500, 0],
])('sectionWidth %i → %i columns', (width, expected) => {
const { wrapper } = mountItem({ props: { sectionWidth: width } });
expect(wrapper.vm.makeColumnCount).toBe(expected);
});
it('uses sectionDisplayData.itemCountX when set', () => {
const { wrapper } = mountItem({
props: { sectionWidth: 300, sectionDisplayData: { itemCountX: 7 } },
});
expect(wrapper.vm.makeColumnCount).toBe(7);
});
});
describe('Computed: makeClassList', () => {
it('includes size-{size}', () => {
const { wrapper } = mountItem({ props: { itemSize: 'small' } });
expect(wrapper.vm.makeClassList).toContain('size-small');
});
it('includes "short" when no icon', () => {
const { wrapper } = mountItem({ item: { id: '1', title: 'X', url: '#' } });
expect(wrapper.vm.makeClassList).toContain('short');
});
it('includes "add-new" when isAddNew', () => {
const { wrapper } = mountItem({ props: { isAddNew: true } });
expect(wrapper.vm.makeClassList).toContain('add-new');
});
it('includes "is-edit-mode" when editMode is true', () => {
const { wrapper } = mountItem({ storeState: { editMode: true, config: { appConfig: {} } } });
expect(wrapper.vm.makeClassList).toContain('is-edit-mode');
});
});
describe('Computed: enableStatusCheck', () => {
it('item.statusCheck boolean overrides appConfig', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheck: false,
},
appConfig: { statusCheck: true },
});
expect(wrapper.vm.enableStatusCheck).toBe(false);
});
it('falls back to appConfig.statusCheck', () => {
const { wrapper } = mountItem({
item: { id: '1', title: 'X', url: '#' },
appConfig: { statusCheck: true },
});
expect(wrapper.vm.enableStatusCheck).toBe(true);
});
it('defaults to false', () => {
const { wrapper } = mountItem({ item: { id: '1', title: 'X', url: '#' } });
expect(wrapper.vm.enableStatusCheck).toBe(false);
});
});
describe('Computed: statusCheckInterval', () => {
it('reads from item', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheckInterval: 30,
},
});
expect(wrapper.vm.statusCheckInterval).toBe(30);
});
it('falls back to appConfig', () => {
const { wrapper } = mountItem({
item: { id: '1', title: 'X', url: '#' },
appConfig: { statusCheckInterval: 15 },
});
expect(wrapper.vm.statusCheckInterval).toBe(15);
});
it('clamps to max 60', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheckInterval: 120,
},
});
expect(wrapper.vm.statusCheckInterval).toBe(60);
});
it('clamps values less than 1 to 0', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheckInterval: 0.5,
},
});
expect(wrapper.vm.statusCheckInterval).toBe(0);
});
});
describe('Computed: accumulatedTarget', () => {
it('uses item.target first', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', target: 'workspace',
},
});
expect(wrapper.vm.accumulatedTarget).toBe('workspace');
});
it('falls back to appConfig.defaultOpeningMethod', () => {
const { wrapper } = mountItem({
item: { id: '1', title: 'X', url: '#' },
appConfig: { defaultOpeningMethod: 'sametab' },
});
expect(wrapper.vm.accumulatedTarget).toBe('sametab');
});
it('defaults to "newtab"', () => {
const { wrapper } = mountItem({ item: { id: '1', title: 'X', url: '#' } });
expect(wrapper.vm.accumulatedTarget).toBe('newtab');
});
});
describe('Computed: anchorTarget', () => {
it.each([
['sametab', '_self'],
['newtab', '_blank'],
['parent', '_parent'],
['top', '_top'],
['modal', undefined],
])('target "%s" → %s', (target, expected) => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', target,
},
});
expect(wrapper.vm.anchorTarget).toBe(expected);
});
it('returns _self in edit mode', () => {
const { wrapper } = mountItem({
storeState: { editMode: true, config: { appConfig: {} } },
});
expect(wrapper.vm.anchorTarget).toBe('_self');
});
});
describe('Computed: hyperLinkHref', () => {
it('returns "#" in edit mode', () => {
const { wrapper } = mountItem({
storeState: { editMode: true, config: { appConfig: {} } },
});
expect(wrapper.vm.hyperLinkHref).toBe('#');
});
it.each(['modal', 'workspace', 'clipboard'])('returns "#" for %s target', (target) => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', target,
},
});
expect(wrapper.vm.hyperLinkHref).toBe('#');
});
it('returns URL for normal targets', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', target: 'newtab',
},
});
expect(wrapper.vm.hyperLinkHref).toBe('https://x.com');
});
});
describe('Computed: unicodeOpeningIcon', () => {
it.each([
['newtab', '"\\f360"'],
['sametab', '"\\f24d"'],
['parent', '"\\f3bf"'],
['top', '"\\f102"'],
['modal', '"\\f2d0"'],
['workspace', '"\\f0b1"'],
['clipboard', '"\\f0ea"'],
])('target "%s" → correct icon', (target, expected) => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', target,
},
});
expect(wrapper.vm.unicodeOpeningIcon).toBe(expected);
});
it('returns default icon for unknown target', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', target: 'unknown',
},
});
expect(wrapper.vm.unicodeOpeningIcon).toBe('"\\f054"');
});
});
describe('Filter: shortUrl', () => {
const { shortUrl } = Item.filters;
it('extracts hostname from URL', () => {
expect(shortUrl('https://www.example.com/path?q=1')).toBe('www.example.com');
});
it('handles IP addresses', () => {
expect(shortUrl('192.168.1.1')).toBe('192.168.1.1');
});
it('returns empty string for falsy input', () => {
expect(shortUrl(null)).toBe('');
expect(shortUrl(undefined)).toBe('');
expect(shortUrl('')).toBe('');
});
it('returns empty string for invalid input', () => {
expect(shortUrl('not-a-url')).toBe('');
});
});
describe('Methods: getTooltipOptions', () => {
it('returns empty object when no description or provider', () => {
const { wrapper } = mountItem({ item: { id: '1', title: 'X', url: '#' } });
expect(wrapper.vm.getTooltipOptions()).toEqual({});
});
it('includes description and provider in content', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', description: 'Desc', provider: 'Prov',
},
});
const { content } = wrapper.vm.getTooltipOptions();
expect(content).toContain('Desc');
expect(content).toContain('Prov');
});
it('includes hotkey in content', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', description: 'D', hotkey: 3,
},
});
const { content } = wrapper.vm.getTooltipOptions();
expect(content).toContain("'3'");
});
it('shows edit text in edit mode', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', description: 'D',
},
storeState: { editMode: true, config: { appConfig: {} } },
});
expect(wrapper.vm.getTooltipOptions().content).toBe(
'interactive-editor.edit-section.edit-tooltip',
);
});
it('placement is "left" when statusResponse exists', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: '#', description: 'D',
},
});
wrapper.vm.statusResponse = { message: 'ok' };
expect(wrapper.vm.getTooltipOptions().placement).toBe('left');
});
});
describe('Methods: openItemSettings / closeEditMenu', () => {
it('openItemSettings sets editMenuOpen, shows modal, commits SET_MODAL_OPEN', () => {
const { wrapper, mutations } = mountItem();
wrapper.vm.openItemSettings();
expect(wrapper.vm.editMenuOpen).toBe(true);
expect(wrapper.vm.$modal.show).toHaveBeenCalledWith('EDIT_ITEM');
expect(mutations.SET_MODAL_OPEN).toHaveBeenCalledWith(expect.anything(), true);
});
it('closeEditMenu clears editMenuOpen, hides modal, commits SET_MODAL_OPEN(false)', () => {
const { wrapper, mutations } = mountItem();
wrapper.vm.editMenuOpen = true;
wrapper.vm.closeEditMenu();
expect(wrapper.vm.editMenuOpen).toBe(false);
expect(wrapper.vm.$modal.hide).toHaveBeenCalledWith('EDIT_ITEM');
expect(mutations.SET_MODAL_OPEN).toHaveBeenCalledWith(expect.anything(), false);
});
});
describe('Methods: openDeleteItem', () => {
it('commits REMOVE_ITEM with correct payload', () => {
const { wrapper, mutations } = mountItem({ parentSection: { name: 'MySection' } });
wrapper.vm.openDeleteItem();
expect(mutations.REMOVE_ITEM).toHaveBeenCalledWith(
expect.anything(),
{ itemId: 'test-1', sectionName: 'MySection' },
);
});
});
describe('Methods: itemClicked', () => {
const event = (extra = {}) => ({
preventDefault: vi.fn(), ctrlKey: false, altKey: false, ...extra,
});
it('in edit mode: preventDefault + openItemSettings', () => {
const { wrapper } = mountItem({ storeState: { editMode: true, config: { appConfig: {} } } });
const e = event();
const spy = vi.spyOn(wrapper.vm, 'openItemSettings');
wrapper.vm.itemClicked(e);
expect(e.preventDefault).toHaveBeenCalled();
expect(spy).toHaveBeenCalled();
});
it('ctrl key: opens in new tab', () => {
const { wrapper } = mountItem();
wrapper.vm.itemClicked(event({ ctrlKey: true }));
expect(openSpy).toHaveBeenCalledWith('https://example.com', '_blank');
});
it('alt key: emits triggerModal', () => {
const { wrapper } = mountItem();
wrapper.vm.itemClicked(event({ altKey: true }));
expect(wrapper.emitted().triggerModal).toBeTruthy();
});
it('target modal: emits triggerModal', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', target: 'modal',
},
});
wrapper.vm.itemClicked(event());
expect(wrapper.emitted().triggerModal).toBeTruthy();
});
it('target workspace: calls router.push', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', target: 'workspace',
},
});
wrapper.vm.itemClicked(event());
expect(router.push).toHaveBeenCalledWith({ name: 'workspace', query: { url: 'https://x.com' } });
});
it('target clipboard: calls copyToClipboard', () => {
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', target: 'clipboard',
},
});
const spy = vi.spyOn(wrapper.vm, 'copyToClipboard');
wrapper.vm.itemClicked(event());
expect(spy).toHaveBeenCalledWith('https://x.com');
});
it('always emits itemClicked', () => {
const { wrapper } = mountItem();
wrapper.vm.itemClicked(event());
expect(wrapper.emitted().itemClicked).toBeTruthy();
});
it('skips smart-sort when disableSmartSort is set', () => {
const { wrapper } = mountItem({ appConfig: { disableSmartSort: true } });
const spy = vi.spyOn(wrapper.vm, 'incrementMostUsedCount');
wrapper.vm.itemClicked(event());
expect(spy).not.toHaveBeenCalled();
});
});
describe('Methods: launchItem', () => {
it.each([
['newtab', '_blank'],
['sametab', '_self'],
])('%s calls window.open with %s', (method, target) => {
const { wrapper } = mountItem();
wrapper.vm.launchItem(method, 'https://test.com');
expect(openSpy).toHaveBeenCalledWith('https://test.com', target);
});
it('modal emits triggerModal', () => {
const { wrapper } = mountItem();
wrapper.vm.launchItem('modal', 'https://test.com');
expect(wrapper.emitted().triggerModal[0]).toEqual(['https://test.com']);
});
it('workspace calls router.push', () => {
const { wrapper } = mountItem();
wrapper.vm.launchItem('workspace', 'https://test.com');
expect(router.push).toHaveBeenCalledWith({ name: 'workspace', query: { url: 'https://test.com' } });
});
it('clipboard calls copyToClipboard', () => {
const { wrapper } = mountItem();
const spy = vi.spyOn(wrapper.vm, 'copyToClipboard');
wrapper.vm.launchItem('clipboard', 'https://test.com');
expect(spy).toHaveBeenCalledWith('https://test.com');
});
it('closes context menu', () => {
const { wrapper } = mountItem();
wrapper.vm.contextMenuOpen = true;
wrapper.vm.launchItem('newtab');
expect(wrapper.vm.contextMenuOpen).toBe(false);
});
it('falls back to item.url when no link arg', () => {
const { wrapper } = mountItem({
item: { id: '1', title: 'X', url: 'https://fallback.com' },
});
wrapper.vm.launchItem('newtab');
expect(openSpy).toHaveBeenCalledWith('https://fallback.com', '_blank');
});
});
describe('Methods: openContextMenu / closeContextMenu', () => {
it('toggles contextMenuOpen and sets position', () => {
const { wrapper } = mountItem();
wrapper.vm.openContextMenu({ clientX: 100, clientY: 200 });
expect(wrapper.vm.contextMenuOpen).toBe(true);
expect(wrapper.vm.contextPos.posX).toBe(100 + window.pageXOffset);
expect(wrapper.vm.contextPos.posY).toBe(200 + window.pageYOffset);
});
it('closeContextMenu sets false', () => {
const { wrapper } = mountItem();
wrapper.vm.contextMenuOpen = true;
wrapper.vm.closeContextMenu();
expect(wrapper.vm.contextMenuOpen).toBe(false);
});
});
describe('Methods: copyToClipboard', () => {
it('calls navigator.clipboard.writeText and shows toast', () => {
const { wrapper } = mountItem();
wrapper.vm.copyToClipboard('hello');
expect(clipboardSpy).toHaveBeenCalledWith('hello');
expect(wrapper.vm.$toasted.show).toHaveBeenCalled();
});
it('shows error when clipboard unavailable', async () => {
const ErrorHandler = (await import('@/utils/ErrorHandler')).default;
Object.defineProperty(navigator, 'clipboard', {
value: undefined, writable: true, configurable: true,
});
const { wrapper } = mountItem();
wrapper.vm.copyToClipboard('hello');
expect(ErrorHandler).toHaveBeenCalled();
expect(wrapper.vm.$toasted.show).toHaveBeenCalledWith(
'Unable to copy, see log',
expect.objectContaining({ className: 'toast-error' }),
);
});
});
describe('Methods: incrementMostUsedCount / incrementLastUsedCount', () => {
it('increments existing count', () => {
localStorage.getItem.mockReturnValue(JSON.stringify({ 'item-1': 5 }));
const { wrapper } = mountItem();
wrapper.vm.incrementMostUsedCount('item-1');
const saved = JSON.parse(localStorage.setItem.mock.calls[0][1]);
expect(saved['item-1']).toBe(6);
});
it('initializes new items to 1', () => {
localStorage.getItem.mockReturnValue('{}');
const { wrapper } = mountItem();
wrapper.vm.incrementMostUsedCount('new-item');
const saved = JSON.parse(localStorage.setItem.mock.calls[0][1]);
expect(saved['new-item']).toBe(1);
});
it('writes last-used timestamp', () => {
localStorage.getItem.mockReturnValue('{}');
const { wrapper } = mountItem();
const before = Date.now();
wrapper.vm.incrementLastUsedCount('item-1');
const saved = JSON.parse(localStorage.setItem.mock.calls[0][1]);
expect(saved['item-1']).toBeGreaterThanOrEqual(before);
});
});
describe('Lifecycle: mounted', () => {
it('calls checkWebsiteStatus when enableStatusCheck is true', () => {
const spy = vi.spyOn(Item.mixins[0].methods, 'checkWebsiteStatus');
mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', statusCheck: true,
},
});
expect(spy).toHaveBeenCalled();
spy.mockRestore();
});
it('sets up interval when statusCheckInterval > 0', () => {
vi.useFakeTimers();
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', statusCheck: true, statusCheckInterval: 5,
},
});
expect(wrapper.vm.intervalId).toBeDefined();
vi.useRealTimers();
});
it('does nothing when statusCheck disabled', () => {
const spy = vi.spyOn(Item.mixins[0].methods, 'checkWebsiteStatus');
mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', statusCheck: false,
},
});
expect(spy).not.toHaveBeenCalled();
spy.mockRestore();
});
});
describe('Lifecycle: beforeDestroy', () => {
it('clears interval if intervalId exists', () => {
vi.useFakeTimers();
const clearSpy = vi.spyOn(global, 'clearInterval');
const { wrapper } = mountItem({
item: {
id: '1', title: 'X', url: 'https://x.com', statusCheck: true, statusCheckInterval: 5,
},
});
const { intervalId } = wrapper.vm;
wrapper.destroy();
expect(clearSpy).toHaveBeenCalledWith(intervalId);
vi.useRealTimers();
});
});
describe('Template rendering', () => {
it('renders item title and description', () => {
const { wrapper } = mountItem();
expect(wrapper.find('.text').text()).toBe('Test Item');
expect(wrapper.find('.description').text()).toBe('A test description');
});
it('has correct wrapper classes', () => {
const { wrapper } = mountItem({ props: { itemSize: 'large', sectionWidth: 800 } });
const div = wrapper.find('.item-wrapper');
expect(div.classes()).toContain('wrap-size-large');
expect(div.classes()).toContain('span-4');
});
it('shows StatusIndicator only when enableStatusCheck', () => {
const { wrapper: off } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheck: false,
},
});
expect(off.find('statusindicator-stub').exists()).toBe(false);
const { wrapper: on } = mountItem({
item: {
id: '1', title: 'X', url: '#', statusCheck: true,
},
});
expect(on.find('statusindicator-stub').exists()).toBe(true);
});
it('shows EditModeIcon only in edit mode', () => {
const { wrapper: normal } = mountItem();
expect(normal.find('editmodeicon-stub').exists()).toBe(false);
const { wrapper: editing } = mountItem({
storeState: { editMode: true, config: { appConfig: {} } },
});
expect(editing.find('editmodeicon-stub').exists()).toBe(true);
});
it('sets correct id on anchor', () => {
const { wrapper } = mountItem();
expect(wrapper.find('a.item').attributes('id')).toBe('link-test-1');
});
});

65
tests/docker-smoke-test.sh Executable file
View file

@ -0,0 +1,65 @@
#!/bin/sh
set -e
IMAGE="dashy-smoke-test"
CONTAINER="dashy-smoke-$$"
cleanup() {
echo "Cleaning up..."
docker rm -f "$CONTAINER" 2>/dev/null || true
docker rmi -f "$IMAGE" 2>/dev/null || true
}
trap cleanup EXIT
# Build the Docker image
echo "Building Docker image..."
docker build -t "$IMAGE" .
# Run container with a random host port mapped to 8080
echo "Starting container..."
docker run -d --name "$CONTAINER" -P "$IMAGE"
# Discover the host port Docker assigned to container port 8080
PORT=$(docker port "$CONTAINER" 8080 | head -1 | cut -d: -f2)
echo "Container exposed on port $PORT"
# Wait for the container to become healthy (poll every 2s, max ~90s)
echo "Waiting for container to be ready..."
for i in $(seq 1 45); do
if curl -sf "http://localhost:$PORT" > /dev/null 2>&1; then
echo "Container is ready (after ~$((i * 2))s)"
break
fi
if [ "$i" -eq 45 ]; then
echo "FAIL: container did not become ready within 90s"
echo "--- Container logs ---"
docker logs "$CONTAINER"
exit 1
fi
sleep 2
done
# Endpoint check helper
fail() {
echo "FAIL: $1"
echo "--- Container logs ---"
docker logs "$CONTAINER"
exit 1
}
check() {
URL="$1"
EXPECTED="$2"
RESP=$(curl -sf "$URL") || fail "$URL returned non-200"
if [ -n "$EXPECTED" ]; then
echo "$RESP" | grep -q "$EXPECTED" || fail "$URL missing '$EXPECTED'"
fi
echo "OK: $URL"
}
# Check key endpoints
check "http://localhost:$PORT/" "<title>"
check "http://localhost:$PORT/conf.yml" "pageInfo"
check "http://localhost:$PORT/system-info" "meta"
echo "All smoke tests passed"

23
tests/fixtures/valid-config.yml vendored Normal file
View file

@ -0,0 +1,23 @@
---
# Valid test configuration for Dashy
pageInfo:
title: Test Dashboard
description: A test configuration
appConfig:
theme: default
language: en
sections:
- name: Test Section
icon: fas fa-rocket
items:
- title: Test Item 1
description: First test item
url: https://example.com
icon: favicon
- title: Test Item 2
description: Second test item
url: https://example.org
icon: fas fa-star

56
tests/setup.js Normal file
View file

@ -0,0 +1,56 @@
/**
* Global test setup file
* This file is run before all tests to configure the testing environment
*/
import Vue from 'vue';
// Suppress Vue warnings in tests
Vue.config.silent = true;
// Suppress noisy console methods in test output
// Vue dev mode prints info messages (devtools, production tips) that clutter results
global.console = {
...console,
info: vi.fn(),
// Uncomment to suppress console.log in tests
// log: vi.fn(),
// Uncomment to suppress console.debug in tests
// debug: vi.fn(),
// Keep warnings and errors visible
warn: console.warn,
error: console.error,
};
// Mock localStorage for tests
const localStorageMock = {
getItem: vi.fn(),
setItem: vi.fn(),
removeItem: vi.fn(),
clear: vi.fn(),
};
global.localStorage = localStorageMock;
// Mock sessionStorage for tests
const sessionStorageMock = {
getItem: vi.fn(),
setItem: vi.fn(),
removeItem: vi.fn(),
clear: vi.fn(),
};
global.sessionStorage = sessionStorageMock;
// Mock window.matchMedia (for responsive design tests)
Object.defineProperty(window, 'matchMedia', {
writable: true,
value: vi.fn().mockImplementation(query => ({
matches: false,
media: query,
onchange: null,
addListener: vi.fn(),
removeListener: vi.fn(),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
dispatchEvent: vi.fn(),
})),
});

View file

@ -0,0 +1,128 @@
import { describe, it, expect } from 'vitest';
import {
makePageName,
makePageSlug,
formatConfigPath,
componentVisibility,
getCustomKeyShortcuts,
} from '@/utils/ConfigHelpers';
describe('ConfigHelpers - makePageName', () => {
it('converts page name to lowercase', () => {
expect(makePageName('My Page')).toBe('my-page');
});
it('replaces spaces with hyphens', () => {
expect(makePageName('Multiple Word Page')).toBe('multiple-word-page');
});
it('removes .yml extension', () => {
expect(makePageName('config.yml')).toBe('config');
});
it('removes special characters', () => {
expect(makePageName('Page!@#$Name')).toBe('pagename');
});
it('handles undefined input', () => {
expect(makePageName(undefined)).toBe('unnamed-page');
});
it('handles null input', () => {
expect(makePageName(null)).toBe('unnamed-page');
});
it('handles empty string', () => {
expect(makePageName('')).toBe('unnamed-page');
});
});
describe('ConfigHelpers - makePageSlug', () => {
it('creates correct slug format', () => {
expect(makePageSlug('My Page', 'home')).toBe('/home/my-page');
});
it('handles page names with special chars', () => {
expect(makePageSlug('Config! Page', 'admin')).toBe('/admin/config-page');
});
});
describe('ConfigHelpers - formatConfigPath', () => {
it('leaves http URLs unchanged', () => {
const url = 'https://example.com/config.yml';
expect(formatConfigPath(url)).toBe(url);
});
it('adds leading slash to relative paths', () => {
expect(formatConfigPath('config.yml')).toBe('/config.yml');
});
it('keeps absolute paths unchanged', () => {
expect(formatConfigPath('/config.yml')).toBe('/config.yml');
});
});
describe('ConfigHelpers - componentVisibility', () => {
it('returns all visible by default when no config', () => {
const result = componentVisibility({});
expect(result.pageTitle).toBe(true);
expect(result.navigation).toBe(true);
expect(result.searchBar).toBe(true);
expect(result.settings).toBe(true);
expect(result.footer).toBe(true);
});
it('hides components based on config', () => {
const appConfig = {
hideComponents: {
hideHeading: true,
hideNav: true,
},
};
const result = componentVisibility(appConfig);
expect(result.pageTitle).toBe(false);
expect(result.navigation).toBe(false);
expect(result.searchBar).toBe(true);
});
it('handles partial config correctly', () => {
const appConfig = {
hideComponents: {
hideFooter: true,
},
};
const result = componentVisibility(appConfig);
expect(result.footer).toBe(false);
expect(result.pageTitle).toBe(true);
});
});
describe('ConfigHelpers - getCustomKeyShortcuts', () => {
it('extracts hotkeys from sections', () => {
const sections = [
{
items: [
{ hotkey: 1, url: 'https://example.com' },
{ url: 'https://example.org' },
],
},
];
const result = getCustomKeyShortcuts(sections);
expect(result).toHaveLength(1);
expect(result[0]).toEqual({ hotkey: 1, url: 'https://example.com' });
});
it('returns empty array when no hotkeys', () => {
const sections = [{ items: [{ url: 'https://example.com' }] }];
expect(getCustomKeyShortcuts(sections)).toEqual([]);
});
it('flattens hotkeys from multiple sections', () => {
const sections = [
{ items: [{ hotkey: 1, url: 'https://a.com' }] },
{ items: [{ hotkey: 2, url: 'https://b.com' }] },
];
const result = getCustomKeyShortcuts(sections);
expect(result).toHaveLength(2);
});
});

View file

@ -0,0 +1,75 @@
import {
describe, it, expect, beforeEach,
} from 'vitest';
import path from 'path';
describe('Config Validator', () => {
const configValidator = path.resolve(__dirname, '../../services/config-validator.js');
beforeEach(() => {
delete process.env.VUE_APP_CONFIG_VALID;
});
it('validates a correct config file', () => {
const Ajv = require('ajv');
const schema = require('../../src/utils/ConfigSchema.json');
const validConfig = {
pageInfo: { title: 'Test' },
appConfig: {},
sections: [{ name: 'Test', items: [{ title: 'Item', url: 'https://example.com' }] }],
};
const ajv = new Ajv({ strict: false, allowUnionTypes: true, allErrors: true });
const valid = ajv.validate(schema, validConfig);
expect(valid).toBe(true);
});
it('rejects config with invalid structure', () => {
const Ajv = require('ajv');
const schema = require('../../src/utils/ConfigSchema.json');
const invalidConfig = {
pageInfo: { title: 'Test' },
sections: 'not an array',
};
const ajv = new Ajv({ strict: false, allowUnionTypes: true, allErrors: true });
const valid = ajv.validate(schema, invalidConfig);
expect(valid).toBe(false);
expect(ajv.errors).toBeTruthy();
});
it('requires sections to be an array', () => {
const Ajv = require('ajv');
const schema = require('../../src/utils/ConfigSchema.json');
const config = {
pageInfo: { title: 'Test' },
sections: {},
};
const ajv = new Ajv({ strict: false, allowUnionTypes: true, allErrors: true });
const valid = ajv.validate(schema, config);
expect(valid).toBe(false);
});
it('allows items with just title', () => {
const Ajv = require('ajv');
const schema = require('../../src/utils/ConfigSchema.json');
const config = {
pageInfo: { title: 'Test' },
sections: [
{
name: 'Test Section',
items: [{ title: 'Item', url: 'https://example.com' }],
},
],
};
const ajv = new Ajv({ strict: false, allowUnionTypes: true, allErrors: true });
const valid = ajv.validate(schema, config);
expect(valid).toBe(true);
});
});

View file

@ -0,0 +1,24 @@
import { describe, it, expect } from 'vitest';
describe('ErrorHandler', () => {
it('exports InfoKeys constants', async () => {
const { InfoKeys } = await import('@/utils/ErrorHandler');
expect(InfoKeys.AUTH).toBe('Authentication');
expect(InfoKeys.CLOUD_BACKUP).toBe('Cloud Backup & Restore');
expect(InfoKeys.EDITOR).toBe('Interactive Editor');
expect(InfoKeys.RAW_EDITOR).toBe('Raw Config Editor');
expect(InfoKeys.VISUAL).toBe('Layout & Styles');
});
it('exports handler functions', async () => {
const handlers = await import('@/utils/ErrorHandler');
expect(typeof handlers.ErrorHandler).toBe('function');
expect(typeof handlers.InfoHandler).toBe('function');
expect(typeof handlers.WarningInfoHandler).toBe('function');
});
it('ErrorHandler can be called without throwing', async () => {
const { ErrorHandler } = await import('@/utils/ErrorHandler');
expect(() => ErrorHandler('Test error')).not.toThrow();
});
});

View file

@ -0,0 +1,17 @@
import { describe, it, expect } from 'vitest';
import fs from 'fs';
import path from 'path';
describe('Healthcheck Service', () => {
it('healthcheck script exists', () => {
const healthcheckPath = path.resolve(__dirname, '../../services/healthcheck.js');
expect(fs.existsSync(healthcheckPath)).toBe(true);
});
it('healthcheck file has correct structure', () => {
const healthcheckPath = path.resolve(__dirname, '../../services/healthcheck.js');
const content = fs.readFileSync(healthcheckPath, 'utf8');
expect(content).toContain('healthCheck');
expect(content).toContain('http.request');
});
});

115
tests/unit/smoke.test.js Normal file
View file

@ -0,0 +1,115 @@
/**
* Smoke Tests
* Basic tests to verify that the testing infrastructure is working correctly
* and that core functionality is operational
*/
import { describe, it, expect } from 'vitest';
import fs from 'fs';
import path from 'path';
import yaml from 'js-yaml';
describe('Smoke Tests - Testing Infrastructure', () => {
it('should run a basic test', () => {
expect(true).toBe(true);
});
it('should perform basic math', () => {
expect(2 + 2).toBe(4);
});
it('should handle strings correctly', () => {
expect('dashy').toMatch(/dash/);
});
});
describe('Smoke Tests - Project Files', () => {
it('should have a package.json file', () => {
const packageJsonPath = path.resolve(__dirname, '../../package.json');
expect(fs.existsSync(packageJsonPath)).toBe(true);
});
it('should have a valid package.json', () => {
const packageJsonPath = path.resolve(__dirname, '../../package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
expect(packageJson.name).toBe('dashy');
expect(packageJson.version).toBeDefined();
expect(packageJson.license).toBe('MIT');
});
it('should have a server.js file', () => {
const serverPath = path.resolve(__dirname, '../../server.js');
expect(fs.existsSync(serverPath)).toBe(true);
});
it('should have src directory', () => {
const srcPath = path.resolve(__dirname, '../../src');
expect(fs.existsSync(srcPath)).toBe(true);
});
});
describe('Smoke Tests - Config Loading', () => {
it('should parse a valid YAML config file', () => {
const configPath = path.resolve(__dirname, '../fixtures/valid-config.yml');
const configContent = fs.readFileSync(configPath, 'utf8');
const config = yaml.load(configContent);
expect(config).toBeDefined();
expect(config.pageInfo).toBeDefined();
expect(config.pageInfo.title).toBe('Test Dashboard');
});
it('should have required config structure', () => {
const configPath = path.resolve(__dirname, '../fixtures/valid-config.yml');
const configContent = fs.readFileSync(configPath, 'utf8');
const config = yaml.load(configContent);
// Check required top-level properties
expect(config).toHaveProperty('pageInfo');
expect(config).toHaveProperty('appConfig');
expect(config).toHaveProperty('sections');
// Check sections structure
expect(Array.isArray(config.sections)).toBe(true);
expect(config.sections.length).toBeGreaterThan(0);
// Check first section has items
const firstSection = config.sections[0];
expect(firstSection).toHaveProperty('name');
expect(firstSection).toHaveProperty('items');
expect(Array.isArray(firstSection.items)).toBe(true);
});
it('should validate item structure in config', () => {
const configPath = path.resolve(__dirname, '../fixtures/valid-config.yml');
const configContent = fs.readFileSync(configPath, 'utf8');
const config = yaml.load(configContent);
const firstItem = config.sections[0].items[0];
// Each item should have required properties
expect(firstItem).toHaveProperty('title');
expect(firstItem).toHaveProperty('url');
// URL should be valid
expect(firstItem.url).toMatch(/^https?:\/\//);
});
});
describe('Smoke Tests - Core Dependencies', () => {
it('should load yaml parser', () => {
expect(yaml).toBeDefined();
expect(typeof yaml.load).toBe('function');
});
it('should load fs module', () => {
expect(fs).toBeDefined();
expect(typeof fs.readFileSync).toBe('function');
});
it('should have config schema file', () => {
const schemaPath = path.resolve(__dirname, '../../src/utils/ConfigSchema.json');
expect(fs.existsSync(schemaPath)).toBe(true);
});
});

43
vitest.config.js Normal file
View file

@ -0,0 +1,43 @@
import { defineConfig } from 'vitest/config';
import { createVuePlugin } from 'vite-plugin-vue2';
import path from 'path';
export default defineConfig({
plugins: [createVuePlugin()],
test: {
// Use happy-dom for faster DOM simulation
environment: 'happy-dom',
// Make test functions available globally (describe, it, expect, etc.)
globals: true,
// Setup file for global test configuration
setupFiles: ['./tests/setup.js'],
// Include patterns
include: ['tests/**/*.{test,spec}.{js,ts}', 'src/**/*.{test,spec}.{js,ts}'],
// Coverage configuration
coverage: {
provider: 'v8',
reporter: ['text', 'json', 'html'],
exclude: [
'node_modules/',
'tests/',
'*.config.js',
'dist/',
'.github/',
'docs/',
],
},
},
resolve: {
extensions: ['.mjs', '.js', '.ts', '.jsx', '.tsx', '.json', '.vue'],
// Match the alias configuration from vue.config.js
alias: {
'@': path.resolve(__dirname, './src'),
vue: 'vue/dist/vue.esm.js', // Use the full build for tests
},
},
});

View file

@ -62,6 +62,15 @@ const configureWebpack = {
},
],
},
resolve: {
fallback: {
http: false,
https: false,
url: false,
timers: false,
stream: false,
},
},
performance: {
maxEntrypointSize: 10000000,
maxAssetSize: 10000000,
@ -69,18 +78,19 @@ const configureWebpack = {
};
// Development server config
const userDataDir = path.join(__dirname, process.env.USER_DATA_DIR || 'user-data');
const devServer = {
contentBase: [
path.join(__dirname, 'public'),
path.join(__dirname, process.env.USER_DATA_DIR || 'user-data'),
],
watchContentBase: true,
publicPath: '/',
static: {
directory: path.join(__dirname, 'public'),
},
watchFiles: {
paths: [userDataDir],
},
};
// Application pages
const pages = {
dashy: {
index: {
entry: 'src/main.js',
filename: 'index.html',
},

11201
yarn.lock

File diff suppressed because it is too large Load diff