Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 134 additions & 0 deletions .github/workflows/continuous-integration.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
name: Continuous Integration

on:
pull_request:
push:
branches:
- main

jobs:
pr-tests:
if: ${{ github.event_name == 'pull_request' }}
runs-on: ubuntu-latest
name: Run Tests
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'

- name: Ensure test scripts are executable
run: |
if compgen -G "test/*.sh" > /dev/null; then
chmod +x test/*.sh
fi

- name: Execute pull request tests
run: |
set -euo pipefail
if compgen -G "test/*.sh" > /dev/null; then
for test_script in test/*.sh; do
echo "Running ${test_script}"
bash "${test_script}"
done
else
echo "No tests found in ./test"
fi

pr-report:
if: ${{ github.event_name == 'pull_request' }}
runs-on: ubuntu-latest
name: Report Tests Statuses
needs:
- pr-tests
steps:
- name: Summarize pull request test results
run: echo "Pull request tests completed successfully."

main-build:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
name: Build and Upload Artifacts
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Build static site
run: |
set -euo pipefail
mkdir -p build
cp index.html build/
cp style.css build/
cp landing.js build/

- name: Upload static site artifact
uses: actions/upload-artifact@v4
with:
name: static-site
path: build

main-report-build:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
name: Report Build Status
needs:
- main-build
steps:
- name: Report build outcome
run: echo "Static site build completed successfully."

main-tests:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
name: Run Tests
needs:
- main-build
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'

- name: Ensure test scripts are executable
run: |
if compgen -G "tests/*.sh" > /dev/null; then
chmod +x tests/*.sh
fi

- name: Execute main branch tests
run: |
set -euo pipefail
if compgen -G "tests/*.sh" > /dev/null; then
for test_script in tests/*.sh; do
echo "Running ${test_script}"
bash "${test_script}"
done
else
echo "No tests found in ./tests"
fi

main-report-tests:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
name: Report Tests Statuses
needs:
- main-tests
steps:
- name: Summarize main branch test results
run: echo "Main branch tests completed successfully."

deploy-pages:
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
name: Deploy to Pages
needs:
- main-build
steps:
- name: Deploy placeholder
run: echo "Deploying static site to GitHub Pages (placeholder)."
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Talk to Unity in Plain English

![Main Branch Workflow Status](https://github.com/Unity-Lab-AI/Talk-to-Unity/actions/workflows/main.yml/badge.svg?branch=main)
![Pull Request Workflow Status](https://github.com/Unity-Lab-AI/Talk-to-Unity/actions/workflows/pull-request.yml/badge.svg)
[![Main Pipeline Status](https://github.com/Unity-Lab-AI/Talk/actions/workflows/continuous-integration.yml/badge.svg?branch=main)](https://github.com/Unity-Lab-AI/Talk/actions/workflows/continuous-integration.yml)
[![Pull Request Checks](https://github.com/Unity-Lab-AI/Talk/actions/workflows/continuous-integration.yml/badge.svg?event=pull_request)](https://github.com/Unity-Lab-AI/Talk/actions/workflows/continuous-integration.yml)

Talk to Unity is a single web page that acts like a friendly concierge. The landing screen double-checks that your browser has everything it needs (secure connection, microphone, speech tools). Once every light turns green, a voice assistant named **Unity** wakes up so you can talk out loud and hear it answer back.

Expand Down
Empty file added test/__init__.py
Empty file.
5 changes: 5 additions & 0 deletions test/site_structure_test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail

echo "Running Talk to Unity smoke tests for pull requests..."
python -m unittest discover -s test -p 'test_*.py' -v
114 changes: 114 additions & 0 deletions test/test_landing_page_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
"""Smoke tests for validating the landing page metadata used in pull requests."""

from __future__ import annotations

from html.parser import HTMLParser
from pathlib import Path
import unittest


class _HeadStructureParser(HTMLParser):
"""Minimal HTML parser that records metadata inside the document head."""

def __init__(self) -> None:
super().__init__()
self._in_head = False
self._in_title = False
self._in_noscript = False
self._current_title: list[str] = []
self.titles: list[str] = []
self.meta_tags: list[dict[str, str]] = []
self.scripts: list[dict[str, str]] = []
self.noscript_styles: list[dict[str, str]] = []

def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
attr_map = {name: value or "" for name, value in attrs}
if tag == "head":
self._in_head = True
elif tag == "title" and self._in_head:
self._in_title = True
self._current_title.clear()

if self._in_head:
if tag == "meta":
self.meta_tags.append(attr_map)
elif tag == "script":
self.scripts.append(attr_map)
elif tag == "noscript":
self._in_noscript = True

if self._in_noscript and tag == "link" and attr_map.get("rel") == "stylesheet":
self.noscript_styles.append(attr_map)

def handle_endtag(self, tag: str) -> None:
if tag == "head":
self._in_head = False
elif tag == "title" and self._in_title:
title = "".join(self._current_title).strip()
if title:
self.titles.append(title)
self._in_title = False
elif tag == "noscript" and self._in_noscript:
self._in_noscript = False

def handle_data(self, data: str) -> None:
if self._in_title:
self._current_title.append(data)


class LandingPageHeadTests(unittest.TestCase):
"""Validates the metadata embedded in ``index.html``."""

@classmethod
def setUpClass(cls) -> None: # noqa: D401 - required by unittest
"""Load and parse the landing page once for the entire suite."""

cls.index_html = Path("index.html").read_text(encoding="utf-8")
parser = _HeadStructureParser()
parser.feed(cls.index_html)
cls.parser = parser

def test_document_title_mentions_unity_voice_lab(self) -> None:
"""The page title should advertise the Unity Voice Lab system check."""

self.assertGreater(len(self.parser.titles), 0, "No <title> element was parsed from the head.")
self.assertTrue(
any("Unity Voice Lab" in title for title in self.parser.titles),
f"Expected 'Unity Voice Lab' in titles, found {self.parser.titles}",
)

def test_viewport_meta_is_present(self) -> None:
"""Mobile viewport metadata keeps the layout responsive."""

viewport_metas = [tag for tag in self.parser.meta_tags if tag.get("name") == "viewport"]
self.assertEqual(len(viewport_metas), 1, "The responsive viewport <meta> tag is missing or duplicated.")
self.assertIn("width=device-width", viewport_metas[0].get("content", ""))

def test_required_scripts_are_loaded_in_head(self) -> None:
"""Critical JavaScript bundles must be referenced before the body."""

script_sources = {tag.get("src", "") for tag in self.parser.scripts}
self.assertIn("landing.js?v=20240606", script_sources)
self.assertIn("AI/app.js", script_sources)

def test_noscript_stylesheet_fallbacks_are_available(self) -> None:
"""Users without JavaScript still need usable styling."""

self.assertGreaterEqual(
len(self.parser.noscript_styles),
2,
"Expected the <noscript> block to include at least two stylesheet fallbacks.",
)

def test_body_has_accessibility_state(self) -> None:
"""The body element should advertise the landing state for assistive tech."""

self.assertRegex(
self.index_html,
r"<body[^>]*data-app-state=\"landing\"",
"The landing body state attribute is missing.",
)


if __name__ == "__main__": # pragma: no cover - convenience for local runs
unittest.main(verbosity=2)
Empty file added tests/__init__.py
Empty file.
5 changes: 5 additions & 0 deletions tests/content_integrity_test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -euo pipefail

echo "Running comprehensive Talk to Unity regression tests..."
python -m unittest discover -s tests -p 'test_*.py' -v
111 changes: 111 additions & 0 deletions tests/test_landing_page_structure.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
"""Regression tests for validating the structure and scripts of Talk to Unity."""

from __future__ import annotations

from html.parser import HTMLParser
from pathlib import Path
import re
import unittest


class _DependencyParser(HTMLParser):
"""Collects dependency checklist items and status containers from the landing page."""

def __init__(self) -> None:
super().__init__()
self.dependencies: list[dict[str, str]] = []
self.status_regions: list[dict[str, str]] = []
self._current_dependency: dict[str, str] | None = None

def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
attr_map = {name: value or "" for name, value in attrs}
class_attr = attr_map.get("class", "")

if tag == "li" and "dependency-item" in class_attr:
self.dependencies.append(attr_map)
self._current_dependency = attr_map
elif tag == "div" and attr_map.get("role") == "status":
self.status_regions.append(attr_map)

if tag == "span" and "dependency-status" in class_attr and self._current_dependency is not None:
# Record that we saw a visible status element for the dependency
self._current_dependency.setdefault("has_status_element", "true")

def handle_endtag(self, tag: str) -> None:
if tag == "li":
self._current_dependency = None


class LandingPageDependencyTests(unittest.TestCase):
"""Ensures the dependency checklist is wired up with the required semantics."""

@classmethod
def setUpClass(cls) -> None:
cls.index_html = Path("index.html").read_text(encoding="utf-8")
parser = _DependencyParser()
parser.feed(cls.index_html)
cls.parser = parser

def test_expected_dependency_items_present(self) -> None:
"""The landing checklist should cover the four major readiness items."""

dependency_ids = {item.get("data-dependency", "") for item in self.parser.dependencies}
self.assertSetEqual(
dependency_ids,
{"secure-context", "speech-recognition", "speech-synthesis", "microphone"},
"Unexpected dependency checklist items detected.",
)

def test_dependency_items_define_user_friendly_status(self) -> None:
"""Each checklist item needs both success and failure messaging."""

for dependency in self.parser.dependencies:
with self.subTest(dependency=dependency.get("data-dependency")):
self.assertIn("data-pass-status", dependency)
self.assertIn("data-fail-status", dependency)
self.assertIn("has_status_element", dependency, "Missing visible status span for dependency.")

def test_status_regions_are_accessible(self) -> None:
"""Status messaging should be exposed to assistive technologies."""

status_roles = [region.get("role") for region in self.parser.status_regions]
self.assertGreaterEqual(len(status_roles), 1, "No live status region detected in the layout.")


class LandingJavaScriptStructureTests(unittest.TestCase):
"""Validates key behaviors baked into ``landing.js``."""

@classmethod
def setUpClass(cls) -> None:
cls.source = Path("landing.js").read_text(encoding="utf-8")

def test_dependency_checks_cover_required_fields(self) -> None:
"""The dependencyChecks array should enumerate the major readiness checks."""

for identifier in ("secure-context", "speech-recognition", "speech-synthesis", "microphone"):
self.assertIn(f"id: '{identifier}'", self.source)

def test_bootstrap_sets_up_event_handlers(self) -> None:
"""Landing bootstrap should register DOM events for launch and recheck flows."""

self.assertIn(
"document.addEventListener('DOMContentLoaded', bootstrapLandingExperience);",
self.source,
)
self.assertIn("launchButton?.addEventListener('click', handleLaunchButtonClick);", self.source)
self.assertIn("recheckButton?.addEventListener('click', handleRecheckClick);", self.source)

def test_launch_event_dispatch_includes_custom_event(self) -> None:
"""The landing page should dispatch a rich custom event for the app shell."""

pattern = re.compile(r"CustomEvent\('[\w-]+:launch'", re.MULTILINE)
self.assertRegex(self.source, pattern)

def test_resolve_app_launch_url_targets_ai_bundle(self) -> None:
"""The launch URL resolver should always land on the AI bundle entry point."""

self.assertIn("return new URL('./AI/index.html', base || window.location.href).toString();", self.source)


if __name__ == "__main__": # pragma: no cover
unittest.main(verbosity=2)