diff --git a/.github/BRANCH_PROTECTION.md b/.github/BRANCH_PROTECTION.md
new file mode 100644
index 0000000..f9a5ce9
--- /dev/null
+++ b/.github/BRANCH_PROTECTION.md
@@ -0,0 +1,84 @@
+# Branch Protection Configuration
+
+This document outlines the recommended branch protection settings for the `main` branch.
+
+## Setting Up Branch Protection
+
+Navigate to: Settings → Branches → Add rule
+
+### Branch name pattern
+- `main`
+
+### Protect matching branches
+
+#### ✅ Required Settings
+
+1. **Require a pull request before merging**
+ - ✅ Require approvals: 1
+ - ✅ Dismiss stale pull request approvals when new commits are pushed
+ - ✅ Require review from CODEOWNERS (if applicable)
+
+2. **Require status checks to pass before merging**
+ - ✅ Require branches to be up to date before merging
+ - **Required status checks:**
+ - `validate` (from PR Validation workflow)
+
+3. **Require conversation resolution before merging**
+ - ✅ Enable this to ensure all PR comments are addressed
+
+4. **Additional restrictions**
+ - ✅ Do not allow bypassing the above settings
+ - ✅ Restrict who can push to matching branches (optional, but recommended)
+
+#### ⚠️ Optional but Recommended
+
+1. **Require signed commits**
+ - Ensures commits are verified
+
+2. **Include administrators**
+ - Apply rules to admin users as well
+
+3. **Restrict who can dismiss pull request reviews**
+ - Limit to repository administrators
+
+## Automated Setup via GitHub CLI
+
+You can also configure branch protection using the GitHub CLI:
+
+```bash
+# Install GitHub CLI if not already installed
+# brew install gh (macOS)
+# or see: https://cli.github.com/
+
+# Authenticate
+gh auth login
+
+# Set up branch protection
+gh api repos/:owner/:repo/branches/main/protection \
+ --method PUT \
+ --field required_status_checks='{"strict":true,"contexts":["validate"]}' \
+ --field enforce_admins=true \
+ --field required_pull_request_reviews='{"dismiss_stale_reviews":true,"require_code_owner_reviews":false,"required_approving_review_count":1}' \
+ --field restrictions=null
+```
+
+## Verifying Protection
+
+After setup, the main branch should show a 🔒 icon in the GitHub UI, indicating it's protected.
+
+Test the protection by:
+1. Attempting to push directly to main (should fail)
+2. Creating a PR and verifying checks run
+3. Ensuring merge is blocked until checks pass
+
+## Status Checks
+
+The following GitHub Actions workflows provide status checks:
+
+### Currently Implemented
+- **PR Validation** (`validate`): Runs linting, type checking, and build verification
+
+### Planned Additions
+- **Link Checker**: Validates internal and external links
+- **Accessibility Tests**: Automated a11y testing
+- **Performance Budget**: Lighthouse CI checks
\ No newline at end of file
diff --git a/.github/CI_CD_DOCUMENTATION.md b/.github/CI_CD_DOCUMENTATION.md
new file mode 100644
index 0000000..13c7822
--- /dev/null
+++ b/.github/CI_CD_DOCUMENTATION.md
@@ -0,0 +1,154 @@
+# CI/CD and Branch Protection Documentation
+
+## Overview
+
+This repository implements comprehensive quality gates to ensure code quality and prevent broken deployments to GitHub Pages.
+
+## 🛡️ Protection Mechanisms
+
+### 1. Pull Request Validation
+
+**File:** `.github/workflows/pr-validation.yml`
+
+All pull requests to `main` undergo automated validation:
+
+- **Linting** - Code style and quality checks via ESLint
+- **Type Checking** - TypeScript validation via `astro check`
+- **Build Verification** - Ensures the site builds successfully
+- **Link Validation** - Checks all internal links are valid
+
+The workflow provides automatic PR comments with validation results.
+
+### 2. Link Validation
+
+**File:** `scripts/validate-links.js`
+
+Custom Node.js script that:
+- Parses all generated HTML files in `dist/`
+- Extracts internal links (href and src attributes)
+- Validates each link resolves to an actual file or route
+- Reports broken links with their source locations
+
+**Usage:**
+```bash
+npm run validate:links # Run link validation
+npm run validate:all # Run all validation (lint, build, links)
+```
+
+### 3. Deployment Workflow
+
+**File:** `.github/workflows/deploy.yml`
+
+Automatically deploys to GitHub Pages when changes are pushed to `main`:
+- Builds the site with `npm run build`
+- Uploads artifacts to GitHub Pages
+- Only runs after all PR checks have passed (when branch protection is enabled)
+
+## 🔐 Branch Protection Setup
+
+### Required Configuration
+
+Follow the instructions in `.github/BRANCH_PROTECTION.md` to enable branch protection.
+
+**Key Settings:**
+- Require pull request reviews before merging
+- Require status checks to pass (specifically the `validate` check)
+- Dismiss stale reviews when new commits are pushed
+- Require branches to be up to date before merging
+
+### Setting Up Protection
+
+#### Via GitHub UI:
+1. Go to Settings → Branches
+2. Add rule for `main` branch
+3. Configure settings per `BRANCH_PROTECTION.md`
+
+#### Via GitHub CLI:
+```bash
+gh api repos/:owner/:repo/branches/main/protection \
+ --method PUT \
+ --field required_status_checks='{"strict":true,"contexts":["validate"]}' \
+ --field enforce_admins=true \
+ --field required_pull_request_reviews='{"dismiss_stale_reviews":true,"required_approving_review_count":1}'
+```
+
+## 📋 Development Workflow
+
+### 1. Create Feature Branch
+```bash
+git checkout -b feature/my-feature
+```
+
+### 2. Make Changes
+Edit files, add content, modify styles, etc.
+
+### 3. Test Locally
+```bash
+npm run validate:all # Run all checks locally
+```
+
+### 4. Create Pull Request
+```bash
+git push origin feature/my-feature
+# Create PR via GitHub UI or CLI
+```
+
+### 5. Automated Validation
+PR validation workflow automatically runs and reports results.
+
+### 6. Merge
+Once checks pass and PR is approved, merge to main.
+
+### 7. Automatic Deployment
+Changes are automatically deployed to GitHub Pages.
+
+## 🚀 Future Enhancements
+
+### Phase 2: Enhanced Link Validation
+- [ ] External link checking with smart caching
+- [ ] Anchor link validation (#fragments)
+- [ ] Asset optimization checks
+
+### Phase 3: Quality Improvements
+- [ ] Accessibility testing (axe-core)
+- [ ] SEO validation
+- [ ] Performance budgets (Lighthouse CI)
+- [ ] Content validation (frontmatter requirements)
+
+### Phase 4: Advanced Features
+- [ ] Preview deployments for PRs
+- [ ] Visual regression testing
+- [ ] Spell checking
+- [ ] Security scanning
+
+## 🛠️ Troubleshooting
+
+### PR Checks Failing
+
+1. **Linting Errors:**
+ ```bash
+ npm run lint:fix # Auto-fix linting issues
+ ```
+
+2. **Build Errors:**
+ ```bash
+ npm run build # Test build locally
+ ```
+
+3. **Link Validation Errors:**
+ ```bash
+ npm run validate:links # Check which links are broken
+ ```
+
+### Branch Protection Not Working
+
+- Ensure you have admin access to the repository
+- Verify the `validate` status check name matches the workflow job name
+- Check that branch protection rules are enabled in Settings → Branches
+
+## 📚 Related Documentation
+
+- [Branch Protection Setup](./BRANCH_PROTECTION.md)
+- [PR Validation Workflow](./workflows/pr-validation.yml)
+- [Deployment Workflow](./workflows/deploy.yml)
+- [Link Validation Script](../scripts/validate-links.js)
\ No newline at end of file
diff --git a/.github/workflows/pr-validation.yml b/.github/workflows/pr-validation.yml
new file mode 100644
index 0000000..625eb68
--- /dev/null
+++ b/.github/workflows/pr-validation.yml
@@ -0,0 +1,67 @@
+name: PR Validation
+
+on:
+ pull_request:
+ branches: [main]
+ types: [opened, synchronize, reopened]
+
+permissions:
+ contents: read
+ pull-requests: write
+
+jobs:
+ validate:
+ name: Validate PR
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Setup Node
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: 'npm'
+
+ - name: Install Dependencies
+ run: npm ci
+
+ - name: Run Linting
+ run: npm run lint
+ continue-on-error: false
+
+ - name: Type Check and Build
+ run: npm run build
+ continue-on-error: false
+
+ - name: Check Build Output
+ run: |
+ if [ ! -d "dist" ]; then
+ echo "Error: Build output directory 'dist' not found"
+ exit 1
+ fi
+ echo "Build successful - dist directory created"
+ echo "Files generated: $(find dist -type f | wc -l)"
+
+ - name: Validate Internal Links
+ run: npm run validate:links
+ continue-on-error: false
+
+ - name: Report Status
+ if: always()
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const status = '${{ job.status }}';
+ const icon = status === 'success' ? '✅' : '❌';
+ const message = status === 'success'
+ ? 'All checks passed! Ready for review.'
+ : 'Some checks failed. Please review the errors above.';
+
+ github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: `## PR Validation ${icon}\n\n${message}\n\n### Checks Performed:\n- ✓ Linting\n- ✓ Type checking\n- ✓ Build verification\n- ✓ Internal link validation`
+ });
\ No newline at end of file
diff --git a/package.json b/package.json
index dbb10c4..300bb53 100644
--- a/package.json
+++ b/package.json
@@ -10,7 +10,9 @@
"preview:network": "astro preview --host",
"astro": "astro",
"lint": "eslint .",
- "lint:fix": "eslint . --fix"
+ "lint:fix": "eslint . --fix",
+ "validate:links": "node scripts/validate-links.js",
+ "validate:all": "npm run lint && npm run build && npm run validate:links"
},
"dependencies": {
"@astrojs/check": "^0.9.4",
diff --git a/scripts/validate-links.js b/scripts/validate-links.js
new file mode 100755
index 0000000..aa7b1bd
--- /dev/null
+++ b/scripts/validate-links.js
@@ -0,0 +1,320 @@
+#!/usr/bin/env node
+
+import fs from "fs/promises";
+import fsSync from "fs";
+import path from "path";
+import { fileURLToPath } from "url";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+
+const DIST_DIR = path.join(__dirname, "..", "dist");
+const IGNORED_PATTERNS = [
+ /^mailto:/, // Email links
+ /^tel:/, // Phone links
+ /^https?:\/\//, // External links (handle separately if needed)
+ /^\/\//, // Protocol-relative URLs
+];
+
+class LinkValidator {
+ constructor() {
+ this.allLinks = new Map(); // link -> Set of pages containing it
+ this.brokenLinks = new Map(); // link -> Set of pages containing it
+ this.fragmentLinks = new Map(); // fragment link -> Set of pages containing it
+ this.brokenFragments = new Map(); // broken fragment -> Set of pages containing it
+ this.validPaths = new Set();
+ this.htmlFiles = [];
+ this.pageIds = new Map(); // page path -> Set of IDs on that page
+ }
+
+ async findHtmlFiles(dir) {
+ const entries = await fs.readdir(dir, { withFileTypes: true });
+
+ for (const entry of entries) {
+ const fullPath = path.join(dir, entry.name);
+
+ if (entry.isDirectory()) {
+ await this.findHtmlFiles(fullPath);
+ } else if (entry.name.endsWith(".html")) {
+ this.htmlFiles.push(fullPath);
+ // Add this HTML file as a valid path
+ const relativePath = path.relative(DIST_DIR, fullPath);
+ this.validPaths.add("/" + relativePath);
+ this.validPaths.add("/" + relativePath.replace(/index\.html$/, ""));
+ this.validPaths.add("/" + relativePath.replace(/\.html$/, ""));
+ }
+ }
+ }
+
+ extractLinks(html) {
+ const links = new Set();
+ const fragmentLinks = new Set();
+
+ // Match href attributes
+ const hrefRegex = /href=["']([^"']+)["']/gi;
+ let match;
+
+ while ((match = hrefRegex.exec(html)) !== null) {
+ const link = match[1];
+
+ // Handle fragment-only links separately
+ if (link.startsWith("#")) {
+ fragmentLinks.add(link);
+ continue;
+ }
+
+ // Skip ignored patterns
+ if (IGNORED_PATTERNS.some(pattern => pattern.test(link))) {
+ continue;
+ }
+
+ // Process internal links (including those with fragments)
+ if (link.startsWith("/")) {
+ links.add(link);
+ }
+ }
+
+ // Match src attributes for images, scripts, etc.
+ const srcRegex = /src=["']([^"']+)["']/gi;
+ while ((match = srcRegex.exec(html)) !== null) {
+ const link = match[1];
+
+ if (IGNORED_PATTERNS.some(pattern => pattern.test(link))) {
+ continue;
+ }
+
+ if (link.startsWith("/")) {
+ links.add(link);
+ }
+ }
+
+ return { links, fragmentLinks };
+ }
+
+ extractIds(html) {
+ const ids = new Set();
+
+ // Match id attributes in HTML elements (not in comments)
+ // First, remove HTML comments to avoid false matches
+ const htmlWithoutComments = html.replace(//g, "");
+
+ // Match id attributes in any element
+ const idRegex = /\sid=["']([^"']+)["']/gi;
+ let match;
+
+ while ((match = idRegex.exec(htmlWithoutComments)) !== null) {
+ ids.add(match[1]);
+ }
+
+ return ids;
+ }
+
+ async validateFile(filePath) {
+ const content = await fs.readFile(filePath, "utf-8");
+ const { links, fragmentLinks } = this.extractLinks(content);
+ const ids = this.extractIds(content);
+ const relativePath = path.relative(DIST_DIR, filePath);
+
+ // Store IDs for this page
+ this.pageIds.set("/" + relativePath.replace(/index\.html$/, ""), ids);
+ this.pageIds.set("/" + relativePath, ids);
+
+ // Validate regular links
+ for (const link of links) {
+ // Split link and fragment
+ const [linkPath, fragment] = link.split("#");
+
+ if (!this.allLinks.has(linkPath)) {
+ this.allLinks.set(linkPath, new Set());
+ }
+ this.allLinks.get(linkPath).add(relativePath);
+
+ // Check if link path is valid
+ if (!this.isValidLink(linkPath)) {
+ if (!this.brokenLinks.has(linkPath)) {
+ this.brokenLinks.set(linkPath, new Set());
+ }
+ this.brokenLinks.get(linkPath).add(relativePath);
+ } else if (fragment) {
+ // If the path is valid but has a fragment, validate the fragment later
+ const fullLink = link;
+ if (!this.fragmentLinks.has(fullLink)) {
+ this.fragmentLinks.set(fullLink, new Set());
+ }
+ this.fragmentLinks.get(fullLink).add(relativePath);
+ }
+ }
+
+ // Store fragment-only links for validation
+ for (const fragmentLink of fragmentLinks) {
+ if (!this.fragmentLinks.has(fragmentLink)) {
+ this.fragmentLinks.set(fragmentLink, new Set());
+ }
+ this.fragmentLinks.get(fragmentLink).add(relativePath);
+ }
+ }
+
+ isValidLink(link) {
+ // Check if the exact path exists
+ if (this.validPaths.has(link)) {
+ return true;
+ }
+
+ // Check if it's a directory that might have an index.html
+ if (!link.endsWith("/") && this.validPaths.has(link + "/")) {
+ return true;
+ }
+
+ // Check if adding .html makes it valid
+ if (!link.endsWith(".html") && this.validPaths.has(link + ".html")) {
+ return true;
+ }
+
+ // Check if adding /index.html makes it valid
+ if (this.validPaths.has(link + "/index.html")) {
+ return true;
+ }
+
+ // Check if it's a static asset
+ const assetPath = path.join(DIST_DIR, link.slice(1)); // Remove leading /
+ try {
+ const stats = fsSync.statSync(assetPath);
+ return stats.isFile() || stats.isDirectory();
+ } catch {
+ return false;
+ }
+ }
+
+ validateFragments() {
+ // Validate fragment links
+ for (const [link, pages] of this.fragmentLinks) {
+ if (link.startsWith("#")) {
+ // Fragment-only link - check if ID exists on the same page
+ const fragmentId = link.slice(1);
+ for (const page of pages) {
+ const pagePath = "/" + page.replace(/index\.html$/, "");
+ const pageIdSet = this.pageIds.get(pagePath) || this.pageIds.get("/" + page);
+ if (!pageIdSet || !pageIdSet.has(fragmentId)) {
+ // Create a unique key for this fragment on this specific page
+ const brokenKey = `${link} (on ${page})`;
+ if (!this.brokenFragments.has(brokenKey)) {
+ this.brokenFragments.set(brokenKey, new Set());
+ }
+ this.brokenFragments.get(brokenKey).add(page);
+ }
+ }
+ } else {
+ // Link with fragment - check if ID exists on the target page
+ const [targetPath, fragmentId] = link.split("#");
+ const targetPageIds = this.pageIds.get(targetPath) ||
+ this.pageIds.get(targetPath + "/") ||
+ this.pageIds.get(targetPath + "/index.html") ||
+ this.pageIds.get(targetPath + ".html");
+
+ if (!targetPageIds || !targetPageIds.has(fragmentId)) {
+ if (!this.brokenFragments.has(link)) {
+ this.brokenFragments.set(link, new Set());
+ }
+ for (const page of pages) {
+ this.brokenFragments.get(link).add(page);
+ }
+ }
+ }
+ }
+ }
+
+ async validate() {
+ console.log("🔍 Starting link validation...\n");
+
+ // Check if dist directory exists
+ try {
+ await fs.access(DIST_DIR);
+ } catch {
+ console.error("❌ Error: dist directory not found. Please run \"npm run build\" first.");
+ process.exit(1);
+ }
+
+ // Find all HTML files
+ await this.findHtmlFiles(DIST_DIR);
+ console.log(`📁 Found ${this.htmlFiles.length} HTML files\n`);
+
+ // Validate each file
+ for (const file of this.htmlFiles) {
+ await this.validateFile(file);
+ }
+
+ // Validate fragments after all IDs have been collected
+ this.validateFragments();
+
+ // Report results
+ this.reportResults();
+ }
+
+ reportResults() {
+ const totalLinks = this.allLinks.size;
+ const brokenCount = this.brokenLinks.size;
+ const totalFragments = this.fragmentLinks.size;
+ const brokenFragmentCount = this.brokenFragments.size;
+
+ console.log(`📊 Link Validation Results`);
+ console.log(`${"=".repeat(50)}`);
+ console.log(`Total unique internal links: ${totalLinks}`);
+ console.log(`Valid links: ${totalLinks - brokenCount}`);
+ console.log(`Broken links: ${brokenCount}`);
+ console.log(`\nTotal fragment links: ${totalFragments}`);
+ console.log(`Valid fragments: ${totalFragments - brokenFragmentCount}`);
+ console.log(`Broken fragments: ${brokenFragmentCount}\n`);
+
+ let hasErrors = false;
+
+ if (brokenCount > 0) {
+ hasErrors = true;
+ console.log("❌ Broken Links Found:\n");
+
+ for (const [link, pages] of this.brokenLinks) {
+ console.log(` ${link}`);
+ const pageList = Array.from(pages).slice(0, 3);
+ for (const page of pageList) {
+ console.log(` → Found in: ${page}`);
+ }
+ if (pages.size > 3) {
+ console.log(` → And ${pages.size - 3} more files...`);
+ }
+ console.log();
+ }
+ }
+
+ if (brokenFragmentCount > 0) {
+ hasErrors = true;
+ console.log("❌ Broken Fragment Links Found:\n");
+
+ for (const [link, pages] of this.brokenFragments) {
+ // Extract the original link from the key (remove " (on page)" suffix if present)
+ const displayLink = link.includes(" (on ") ? link.split(" (on ")[0] : link;
+ console.log(` ${displayLink}`);
+ const pageList = Array.from(pages).slice(0, 3);
+ for (const page of pageList) {
+ console.log(` → Found in: ${page}`);
+ }
+ if (pages.size > 3) {
+ console.log(` → And ${pages.size - 3} more files...`);
+ }
+ console.log();
+ }
+ }
+
+ if (hasErrors) {
+ process.exit(1);
+ } else {
+ console.log("✅ All internal links and fragments are valid!");
+ process.exit(0);
+ }
+ }
+}
+
+// Run validation
+const validator = new LinkValidator();
+validator.validate().catch(error => {
+ console.error("❌ Validation error:", error);
+ process.exit(1);
+});
\ No newline at end of file
diff --git a/src/lib/markdown.ts b/src/lib/markdown.ts
index a154184..8a41fa6 100644
--- a/src/lib/markdown.ts
+++ b/src/lib/markdown.ts
@@ -1,29 +1,29 @@
export function renderInlineMarkdown(text: string): string {
- if (!text) return '';
+ if (!text) return "";
// Process inline markdown patterns
let html = text
// Escape HTML entities first
- .replace(/&/g, '&')
- .replace(//g, '>')
- .replace(/"/g, '"')
- .replace(/'/g, ''')
+ .replace(/&/g, "&")
+ .replace(//g, ">")
+ .replace(/"/g, """)
+ .replace(/'/g, "'")
// Process markdown patterns
// Bold: **text** or __text__
- .replace(/\*\*([^*]+)\*\*/g, '$1')
- .replace(/__([^_]+)__/g, '$1')
+ .replace(/\*\*([^*]+)\*\*/g, "$1")
+ .replace(/__([^_]+)__/g, "$1")
// Italic: *text* or _text_ (but not part of bold)
- .replace(/(?$1')
- .replace(/(?$1')
+ .replace(/(?$1")
+ .replace(/(?$1")
// Code: `text`
- .replace(/`([^`]+)`/g, '$1')
+ .replace(/`([^`]+)`/g, "$1")
// Strikethrough: ~~text~~
- .replace(/~~([^~]+)~~/g, '$1');
+ .replace(/~~([^~]+)~~/g, "$1");
return html;
}
\ No newline at end of file