diff --git a/.github/workflows/docs-metadata-guard.yml b/.github/workflows/docs-metadata-guard.yml
new file mode 100644
index 0000000000..a606e43ab6
--- /dev/null
+++ b/.github/workflows/docs-metadata-guard.yml
@@ -0,0 +1,35 @@
+name: Docs Metadata Guard
+
+on:
+ pull_request:
+ paths:
+ - "data/docs/**"
+ - "next.config.js"
+ - "scripts/check-docs-metadata.js"
+ - "tests/docs-metadata.test.js"
+ - "tests/fixtures/**"
+ - "package.json"
+ - ".github/workflows/docs-metadata-guard.yml"
+
+jobs:
+ check-metadata:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v5
+ with:
+ fetch-depth: 0
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: "20"
+
+ - name: Install dependencies
+ run: yarn install --frozen-lockfile --non-interactive
+
+ - name: Run docs metadata tests
+ run: yarn test:docs-metadata
+
+ - name: Validate docs metadata
+ run: yarn check:docs-metadata
diff --git a/.husky/pre-commit b/.husky/pre-commit
index 6fdfd4957d..bd56716569 100755
--- a/.husky/pre-commit
+++ b/.husky/pre-commit
@@ -1,5 +1,4 @@
#!/usr/bin/env sh
-
set -e
echo 'husky (pre-commit): running lint-staged'
@@ -7,9 +6,18 @@ yarn lint-staged
STAGED_FILES=$(git diff --cached --name-only)
+# Check for docs redirect changes
if printf '%s\n' "$STAGED_FILES" | grep -E '^(data/docs/.*\.mdx|next\.config\.js|scripts/check-doc-redirects\.js)$' >/dev/null; then
echo 'husky (pre-commit): verifying docs redirects'
yarn check:doc-redirects
else
echo 'husky (pre-commit): skipping docs redirect check (no relevant changes staged)'
fi
+
+# Check for docs metadata
+if printf '%s\n' "$STAGED_FILES" | grep -E '^data/docs/.*\.mdx$' >/dev/null; then
+ echo 'husky (pre-commit): validating docs metadata'
+ HUSKY_PRE_COMMIT=true yarn check:docs-metadata
+else
+ echo 'husky (pre-commit): skipping docs metadata check (no documentation changes staged)'
+fi
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6ed49f88c2..fa4933ea1a 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -39,9 +39,11 @@ Thanks for helping improve SigNoz documentation. Clear, complete docs are critic
- Pre-commit behavior
- Runs `lint-staged` on staged files. ESLint and Prettier fix typical JS/TS/MD/MDX formatting and lint issues.
- When changes include docs or redirect-related files (`data/docs/**/*.mdx`, `next.config.js`, or `scripts/check-doc-redirects.js`), it runs `yarn check:doc-redirects` to ensure renamed/moved docs have permanent redirects.
+ - When changes include docs (`data/docs/**/*.mdx`), it runs `yarn check:docs-metadata` to ensure metadata such as date, description, tag, title is complete and correct.
- Fixing failures
- Lint/format: run `yarn lint` or re-stage after auto-fixes from Prettier/ESLint.
- Redirects: run `yarn check:doc-redirects` locally to see missing entries, then add a permanent redirect in `next.config.js` under `async redirects()`. Re-stage and commit.
+ - Metadata: run `yarn check:docs-metadata` locally to see missing/invalid entries, then update the metadata in the `.mdx` file. Re-stage and commit.
- Optional: `yarn test:doc-redirects` runs a small test for redirect rules.
- Hooks path
- The repo uses Husky v9 defaults (`core.hooksPath=.husky`). If your local Git still points elsewhere (e.g., `.husky/_` from older setups), run `git config core.hooksPath .husky` or re-run `yarn install` to refresh hooks.
@@ -54,6 +56,10 @@ Thanks for helping improve SigNoz documentation. Clear, complete docs are critic
- Triggers on PRs that touch `data/docs/**`, `next.config.js`, `scripts/check-doc-redirects.js`, tests, or `package.json`.
- Runs `yarn test:doc-redirects` and `yarn check:doc-redirects`.
- Fails if redirects are missing/invalid or tests fail. Fix by adding permanent redirects in `next.config.js` and re-running locally.
+- Docs Metadata Guard
+ - Triggers on PRs that touch `data/docs/**`, `next.config.js`, `scripts/check-docs-metadata.js`, tests, or `package.json`.
+ - Runs `yarn test:docs-metadata` and `yarn check:docs-metadata`.
+ - Fails if title, date, description are missing/invalid, and warns if tags are missing from MDX files. Fix by adding relevant metadata in MDX file and re-running locally.
- Add to Onboarding (label-driven)
- When a PR is labeled `add-to-onboarding`, this job checks that the PR includes docs changes. If none are found, the job fails with a message.
- If docs are present, it auto-creates an onboarding issue listing changed docs and comments on the PR with a link.
@@ -163,6 +169,7 @@ Every doc should be skimmable and actionable.
```
- Use descriptive anchor text that makes the link destination clear. Avoid generic phrases like "here" or "link" and do not paste raw URLs into the body text.
+
- ✅ `Learn from the [Temporal Golang sample repository](https://github.com/SigNoz/temporal-golang-opentelemetry/tree/main)`
- ❌ `See (link)` or `Refer to https://github.com/...`
diff --git a/data/docs/mastra-observability.mdx b/data/docs/mastra-observability.mdx
index 97edffeec7..810937f531 100644
--- a/data/docs/mastra-observability.mdx
+++ b/data/docs/mastra-observability.mdx
@@ -143,4 +143,4 @@ When you click on a trace in SigNoz, you'll see a detailed view of the trace, in
You can also check out our custom Mastra dashboard [here](https://signoz.io/docs/dashboards/dashboard-templates/mastra-dashboard/) which provides specialized visualizations for monitoring your Masrta usage in applications. The dashboard includes pre-built charts specifically tailored for LLM usage, along with import instructions to get started quickly.
-
\ No newline at end of file
+
diff --git a/package.json b/package.json
index 67a2fd4047..feca4f19d8 100644
--- a/package.json
+++ b/package.json
@@ -11,6 +11,8 @@
"lint": "next lint --fix --dir pages --dir app --dir components --dir lib --dir layouts --dir scripts",
"check:doc-redirects": "node scripts/check-doc-redirects.js",
"test:doc-redirects": "node --test tests/doc-redirects.test.js",
+ "check:docs-metadata": "node scripts/check-docs-metadata.js",
+ "test:docs-metadata": "node --test tests/docs-metadata.test.js",
"prepare": "husky"
},
"dependencies": {
diff --git a/scripts/check-docs-metadata.js b/scripts/check-docs-metadata.js
new file mode 100644
index 0000000000..710f66938a
--- /dev/null
+++ b/scripts/check-docs-metadata.js
@@ -0,0 +1,313 @@
+#!/usr/bin/env node
+
+const { execSync } = require('child_process')
+const fs = require('fs')
+
+function run(command) {
+ try {
+ return execSync(command, { encoding: 'utf8' }).trim()
+ } catch (error) {
+ console.error(`Failed to execute: ${command}`)
+ console.error(error.message)
+ process.exit(1)
+ }
+}
+
+function getChangedDocFiles(baseRef) {
+ let mergeBase
+ try {
+ mergeBase = run(`git merge-base HEAD ${baseRef}`)
+ } catch (error) {
+ if (baseRef !== 'origin/main') {
+ mergeBase = run('git merge-base HEAD origin/main')
+ } else {
+ throw error
+ }
+ }
+
+ const docPattern = /^data\/docs\/.*\.mdx$/
+ const changedFiles = new Set()
+
+ // Get committed changes
+ try {
+ const committedDiff = execSync(`git diff --name-only --diff-filter=ACMR ${mergeBase} HEAD`, {
+ encoding: 'utf8',
+ })
+ committedDiff
+ .split('\n')
+ .filter((file) => docPattern.test(file))
+ .forEach((file) => changedFiles.add(file))
+ } catch (error) {
+ console.error('Unable to read git diff for docs changes.')
+ console.error(error.message)
+ process.exit(1)
+ }
+
+ // Get working tree changes
+ try {
+ const workingDiff = execSync('git diff --name-only --diff-filter=ACMR HEAD', {
+ encoding: 'utf8',
+ })
+ workingDiff
+ .split('\n')
+ .filter((file) => docPattern.test(file))
+ .forEach((file) => changedFiles.add(file))
+ } catch (error) {
+ console.error('Unable to read local git diff for docs changes.')
+ console.error(error.message)
+ process.exit(1)
+ }
+
+ return Array.from(changedFiles).filter(Boolean)
+}
+
+function getGitAuthorDate(filePath) {
+ try {
+ const dateString = execSync(`git log -2 --pretty=format:%as -- ${filePath}`, {
+ encoding: 'utf8',
+ }).trim()
+ return dateString || null
+ } catch (error) {
+ return null
+ }
+}
+
+function getStagedDocFiles() {
+ try {
+ const stagedFiles = execSync('git diff --cached --name-only --diff-filter=ACMR', {
+ encoding: 'utf8',
+ })
+ const docPattern = /^data\/docs\/.*\.mdx$/
+ return stagedFiles
+ .split('\n')
+ .filter((file) => docPattern.test(file))
+ .filter(Boolean)
+ } catch (error) {
+ console.error('Unable to read staged files.')
+ console.error(error.message)
+ process.exit(1)
+ }
+}
+
+function extractFrontmatter(filePath) {
+ try {
+ const content = fs.readFileSync(filePath, 'utf8')
+ const lines = content.split('\n')
+ let inFrontmatter = false
+ let frontmatterLines = []
+ let delimiterCount = 0
+
+ for (const line of lines) {
+ if (line.trim() === '---') {
+ delimiterCount++
+ if (delimiterCount === 1) {
+ inFrontmatter = true
+ continue
+ }
+ if (delimiterCount === 2) {
+ break
+ }
+ }
+ if (inFrontmatter && delimiterCount === 1) {
+ frontmatterLines.push(line)
+ }
+ }
+
+ return frontmatterLines.join('\n')
+ } catch (error) {
+ return null
+ }
+}
+
+function validateMetadata(filePath) {
+ const errors = []
+ const warnings = []
+
+ // Check if file exists
+ if (!fs.existsSync(filePath)) {
+ errors.push('file not found')
+ return { errors, warnings }
+ }
+
+ // Extract frontmatter
+ const frontmatter = extractFrontmatter(filePath)
+ if (frontmatter === null) {
+ errors.push('cannot read file')
+ return { errors, warnings }
+ }
+
+ const lines = frontmatter.split('\n')
+ const fieldMap = new Map()
+
+ // Parse frontmatter fields
+ for (const line of lines) {
+ const match = line.match(/^(\w+):\s*(.*)$/)
+ if (match) {
+ fieldMap.set(match[1], match[2].trim())
+ }
+ }
+
+ // Validate tags field (warning only)
+ if (!fieldMap.has('tags')) {
+ warnings.push('missing tags')
+ } else {
+ const tagsValue = fieldMap.get('tags')
+ if (!tagsValue.includes('[')) {
+ warnings.push('tags must be an array')
+ } else if (/^\[\s*\]$/.test(tagsValue)) {
+ warnings.push('tags array cannot be empty')
+ }
+ }
+
+ // Validate date field (required)
+ if (!fieldMap.has('date')) {
+ errors.push('missing date')
+ } else {
+ const dateValue = fieldMap.get('date').replace(/['"]/g, '').trim()
+ const datePattern = /^\d{4}-\d{2}-\d{2}$/
+ if (!datePattern.test(dateValue)) {
+ errors.push('invalid date format - use YYYY-MM-DD')
+ } else {
+ // Check if date is valid
+ const date = new Date(dateValue)
+ if (isNaN(date.getTime())) {
+ errors.push('invalid date value')
+ } else {
+ // Allow dates up to 7 days in the future
+ const today = new Date()
+ today.setHours(0, 0, 0, 0)
+
+ const maxFutureDate = new Date(today)
+ maxFutureDate.setDate(maxFutureDate.getDate() + 7)
+
+ if (date > maxFutureDate) {
+ errors.push('date cannot be more than 7 days in the future')
+ }
+ }
+ }
+ }
+
+ // Compare frontmatter date with git commit date
+ if (fieldMap.has('date')) {
+ const frontmatterDate = fieldMap.get('date').replace(/['"]/g, '').trim()
+ const gitDate = getGitAuthorDate(filePath)
+
+ if (gitDate) {
+ const frontDate = new Date(frontmatterDate)
+ const commitDate = new Date(gitDate)
+
+ if (frontDate < commitDate) {
+ warnings.push(
+ `frontmatter date (${frontmatterDate}) is before git commit date (${gitDate})`
+ )
+ }
+ }
+ }
+
+ // Validate title field (required)
+ if (!fieldMap.has('title')) {
+ errors.push('missing title')
+ } else {
+ const titleValue = fieldMap.get('title').trim()
+ if (!titleValue || titleValue === '""' || titleValue === "''") {
+ errors.push('title cannot be empty')
+ }
+ }
+
+ // Validate description field (required)
+ if (!fieldMap.has('description')) {
+ errors.push('missing description')
+ } else {
+ const descriptionValue = fieldMap.get('description').trim()
+ if (!descriptionValue || descriptionValue === '""' || descriptionValue === "''") {
+ errors.push('description cannot be empty')
+ }
+ }
+
+ return { errors, warnings }
+}
+
+function main() {
+ const isPreCommit = process.env.HUSKY_PRE_COMMIT === 'true'
+ const baseBranch = process.env.GITHUB_BASE_REF
+ ? `origin/${process.env.GITHUB_BASE_REF}`
+ : process.env.DEFAULT_BRANCH || 'origin/main'
+
+ // Get changed files
+ const changedFiles = isPreCommit ? getStagedDocFiles() : getChangedDocFiles(baseBranch)
+
+ if (changedFiles.length === 0) {
+ console.log('No documentation files to check')
+ return
+ }
+
+ console.log(`Checking ${changedFiles.length} documentation file(s) for required metadata...\n`)
+
+ const invalidFiles = []
+ const warningFiles = []
+ let allValid = true
+
+ for (const file of changedFiles) {
+ const { errors, warnings } = validateMetadata(file)
+
+ if (errors.length > 0) {
+ console.error(`❌ ${file}: ${errors.join('; ')}`)
+ invalidFiles.push({ file, issues: errors })
+ allValid = false
+ }
+
+ if (warnings.length > 0) {
+ console.warn(`⚠️ ${file}: ${warnings.join('; ')}`)
+ warningFiles.push({ file, issues: warnings })
+ }
+
+ if (errors.length === 0 && warnings.length === 0) {
+ console.log(`✅ ${file}`)
+ }
+ }
+
+ console.log('')
+
+ // Display summary
+ if (warningFiles.length > 0) {
+ console.warn('Documentation metadata warnings:')
+ warningFiles.forEach(({ file, issues }) => {
+ console.warn(` • ${file}: ${issues.join('; ')}`)
+ })
+ console.warn('\nConsider adding tags to improve documentation discoverability.\n')
+ }
+
+ if (!allValid) {
+ console.error('Documentation metadata validation failed:')
+ invalidFiles.forEach(({ file, issues }) => {
+ console.error(` • ${file}: ${issues.join('; ')}`)
+ })
+ console.error('\nRequired fields:')
+ console.error(' - date: Date in YYYY-MM-DD format')
+ console.error(' - title: Non-empty title field')
+ console.error(' - description: Non-empty description field')
+ console.error(' - tags: Array of tags (recommended)')
+ console.error('\nExample:')
+ console.error('---')
+ console.error('title: My Documentation Page')
+ console.error(`date: ${new Date().toISOString().split('T')[0]}`)
+ console.error('description: A brief description of this page for SEO')
+ console.error('tags: ["SigNoz Cloud", "Self-Host"]')
+ console.error('---\n')
+ process.exit(1)
+ }
+
+ console.log('✅ All documentation files have valid metadata\n')
+}
+
+module.exports = {
+ getChangedDocFiles,
+ getStagedDocFiles,
+ extractFrontmatter,
+ validateMetadata,
+ main,
+}
+
+if (require.main === module) {
+ main()
+}
diff --git a/tests/docs-metadata.test.js b/tests/docs-metadata.test.js
new file mode 100644
index 0000000000..50cb01cc6d
--- /dev/null
+++ b/tests/docs-metadata.test.js
@@ -0,0 +1,281 @@
+const { describe, it, beforeEach, afterEach } = require('node:test')
+const assert = require('node:assert')
+const { extractFrontmatter, validateMetadata } = require('../scripts/check-docs-metadata')
+const fs = require('fs')
+const path = require('path')
+const os = require('os')
+
+describe('check-docs-metadata', () => {
+ let tempDir
+
+ beforeEach(() => {
+ tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docs-metadata-test-'))
+ })
+
+ afterEach(() => {
+ if (tempDir) {
+ fs.rmSync(tempDir, { recursive: true, force: true })
+ }
+ })
+
+ function createTestFile(filename, content) {
+ const filePath = path.join(tempDir, filename)
+ fs.writeFileSync(filePath, content, 'utf8')
+ return filePath
+ }
+
+ describe('extractFrontmatter', () => {
+ it('should extract frontmatter from valid MDX file', () => {
+ const content = `---
+title: Test Document
+date: 2024-01-15
+description: A test document for validation
+tags: ["test", "example"]
+---
+
+# Content here
+`
+ const filePath = createTestFile('test.mdx', content)
+ const frontmatter = extractFrontmatter(filePath)
+
+ assert.ok(frontmatter.includes('title: Test Document'))
+ assert.ok(frontmatter.includes('date: 2024-01-15'))
+ assert.ok(frontmatter.includes('description: A test document for validation'))
+ assert.ok(frontmatter.includes('tags: ["test", "example"]'))
+ })
+
+ it('should return empty string for file without frontmatter', () => {
+ const content = `# Just content, no frontmatter`
+ const filePath = createTestFile('no-frontmatter.mdx', content)
+ const frontmatter = extractFrontmatter(filePath)
+
+ assert.strictEqual(frontmatter, '')
+ })
+
+ it('should return null for non-existent file', () => {
+ const frontmatter = extractFrontmatter('/non/existent/file.mdx')
+ assert.strictEqual(frontmatter, null)
+ })
+ })
+
+ describe('validateMetadata', () => {
+ it('should pass validation for valid metadata', () => {
+ const content = `---
+title: Valid Document
+date: 2024-01-15
+description: A valid document with all required fields
+tags: ["SigNoz Cloud", "Self-Host"]
+---
+
+# Content
+`
+ const filePath = createTestFile('valid.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.strictEqual(errors.length, 0)
+ assert.strictEqual(warnings.length, 0)
+ })
+
+ it('should warn when tags are missing', () => {
+ const content = `---
+title: No Tags Document
+date: 2024-01-15
+description: A document without tags
+---
+
+# Content
+`
+ const filePath = createTestFile('no-tags.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.strictEqual(errors.length, 0)
+ assert.ok(warnings.includes('missing tags'))
+ })
+
+ it('should error when date is missing', () => {
+ const content = `---
+title: No Date Document
+description: A document without a date
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('no-date.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('missing date'))
+ })
+
+ it('should error when title is missing', () => {
+ const content = `---
+date: 2024-01-15
+description: A document without a title
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('no-title.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('missing title'))
+ })
+
+ it('should error when description is missing', () => {
+ const content = `---
+title: No Description Document
+date: 2024-01-15
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('no-description.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('missing description'))
+ })
+
+ it('should error when description is empty', () => {
+ const content = `---
+title: Empty Description Document
+date: 2024-01-15
+description: ""
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('empty-description.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('description cannot be empty'))
+ })
+
+ it('should error for invalid date format', () => {
+ const content = `---
+title: Invalid Date Format
+date: 01/15/2024
+description: A document with invalid date format
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('invalid-date.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('invalid date format - use YYYY-MM-DD'))
+ })
+
+ it('should error for dates more than 7 days in the future', () => {
+ const futureDate = new Date()
+ futureDate.setDate(futureDate.getDate() + 10) // 10 days in the future
+ const futureDateStr = futureDate.toISOString().split('T')[0]
+
+ const content = `---
+title: Future Date Document
+date: ${futureDateStr}
+description: A document with a future date
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('future-date.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.includes('date cannot be more than 7 days in the future'))
+ })
+
+ it('should allow dates up to 7 days in the future', () => {
+ const futureDate = new Date()
+ futureDate.setDate(futureDate.getDate() + 5) // 5 days in the future
+ const futureDateStr = futureDate.toISOString().split('T')[0]
+
+ const content = `---
+title: Near Future Date Document
+date: ${futureDateStr}
+description: A document with a near future date
+tags: ["test"]
+---
+
+# Content
+`
+ const filePath = createTestFile('near-future-date.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.strictEqual(errors.length, 0)
+ assert.strictEqual(warnings.length, 0)
+ })
+
+ it('should warn when tags is not an array', () => {
+ const content = `---
+title: Wrong Tags Format
+date: 2024-01-15
+description: A document with wrong tags format
+tags: test
+---
+
+# Content
+`
+ const filePath = createTestFile('wrong-tags.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.strictEqual(errors.length, 0)
+ assert.ok(warnings.includes('tags must be an array'))
+ })
+
+ it('should warn when tags array is empty', () => {
+ const content = `---
+title: Empty Tags
+date: 2024-01-15
+description: A document with empty tags array
+tags: []
+---
+
+# Content
+`
+ const filePath = createTestFile('empty-tags.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.strictEqual(errors.length, 0)
+ assert.ok(warnings.includes('tags array cannot be empty'))
+ })
+
+ it('should handle files with no frontmatter', () => {
+ const content = `# Just content`
+ const filePath = createTestFile('no-frontmatter.mdx', content)
+ const { errors } = validateMetadata(filePath)
+
+ assert.ok(errors.length > 0)
+ assert.ok(errors.includes('missing date'))
+ assert.ok(errors.includes('missing title'))
+ assert.ok(errors.includes('missing description'))
+ })
+
+ it('should error for non-existent file', () => {
+ const { errors } = validateMetadata('/non/existent/file.mdx')
+
+ assert.ok(errors.includes('file not found'))
+ })
+
+ it('should handle multiple errors and warnings', () => {
+ const content = `---
+date: invalid-date
+---
+
+# Content
+`
+ const filePath = createTestFile('multiple-issues.mdx', content)
+ const { errors, warnings } = validateMetadata(filePath)
+
+ assert.ok(errors.length > 1)
+ assert.ok(warnings.includes('missing tags'))
+ assert.ok(errors.includes('missing title'))
+ assert.ok(errors.includes('missing description'))
+ assert.ok(errors.includes('invalid date format - use YYYY-MM-DD'))
+ })
+ })
+})