Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .custom_wordlist.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ backend
backends
balancer
balancers
Canonical's
Charmcraft
Charmhub
CIdP
Expand Down Expand Up @@ -135,6 +136,7 @@ SHA
SSL
stateful
stdin
stdout
subcommand
subdirectories
subnet
Expand Down Expand Up @@ -174,4 +176,5 @@ wordlist
xetex
xindy
yaml
YAML
YAML
Makefiles
20 changes: 0 additions & 20 deletions .github/workflows/automatic-doc-checks.yml

This file was deleted.

16 changes: 8 additions & 8 deletions .sphinx/get_vale_conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@
def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
"""
Clone the repository to a temporary directory and copy required files

Args:
file_source_dest: dictionary of file paths to copy from the repository,
and their destination paths
overwrite: boolean flag to overwrite existing files in the destination

Returns:
bool: True if all files were copied successfully, False otherwise
"""
Expand All @@ -52,8 +52,8 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):

try:
result = subprocess.run(
clone_cmd,
capture_output=True,
clone_cmd,
capture_output=True,
text=True,
check=True
)
Expand All @@ -73,7 +73,7 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
continue

if not copy_files_to_path(source_path, dest, overwrite):
is_copy_success = False
is_copy_success = False
logging.error("Failed to copy %s to %s", source_path, dest)

# Clean up temporary directory
Expand All @@ -85,12 +85,12 @@ def clone_repo_and_copy_paths(file_source_dest, overwrite=False):
def copy_files_to_path(source_path, dest_path, overwrite=False):
"""
Copy a file or directory from source to destination

Args:
source_path: Path to the source file or directory
dest_path: Path to the destination
overwrite: Boolean flag to overwrite existing files in the destination

Returns:
bool: True if copy was successful, False otherwise
"""
Expand Down Expand Up @@ -138,7 +138,7 @@ def main():
# Parse command line arguments, default to overwrite_enabled = True
overwrite_enabled = not parse_arguments().no_overwrite

# Download into /tmp through git clone
# Download into /tmp through git clone
if not clone_repo_and_copy_paths(vale_files_dict, overwrite=overwrite_enabled):
logging.error("Failed to download files from repository")
return 1
Expand Down
94 changes: 94 additions & 0 deletions .sphinx/metrics/build_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#!/usr/bin/python3

import sys
import argparse
from pathlib import Path
from html.parser import HTMLParser
from urllib.parse import urlsplit


class MetricsParser(HTMLParser):
def __init__(self):
super().__init__()
self.int_link_count = 0
self.ext_link_count = 0
self.fragment_count = 0
self.image_count = 0
self.in_object = 0

@property
def link_count(self):
return self.fragment_count + self.int_link_count + self.ext_link_count

def read(self, file):
"""
Read *file* (a file-like object with a ``read`` method returning
strings) a chunk at a time, feeding each chunk to the parser.
"""
# Ensure the parser state is reset before each file (just in case
# there's an erroneous dangling <object>)
self.reset()
self.in_object = 0
buf = ''
while True:
# Parse 1MB chunks at a time
buf = file.read(1024**2)
if not buf:
break
self.feed(buf)

def handle_starttag(self, tag, attrs):
"""
Count <a>, <img>, and <object> tags to determine the number of internal
and external links, and the number of images.
"""
attrs = dict(attrs)
if tag == 'a' and 'href' in attrs:
# If there's no href, it's an anchor; if there's no hostname
# (netloc) or path, it's just a fragment link within the page
url = urlsplit(attrs['href'])
if url.netloc:
self.ext_link_count += 1
elif url.path:
self.int_link_count += 1
else:
self.fragment_count += 1
elif tag == 'object':
# <object> tags are a bit complex as they nest to offer fallbacks
# and may contain an <img> fallback. We only want to count the
# outer-most <object> in this case
if self.in_object == 0:
self.image_count += 1
self.in_object += 1
elif tag == 'img' and self.in_object == 0:
self.image_count += 1

def handle_endtag(self, tag):
if tag == 'object':
# Never let in_object be negative
self.in_object = max(0, self.in_object - 1)


def main(args=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'build_dir', metavar='build-dir', nargs='?', default='.',
help="The directory to scan for HTML files")
config = parser.parse_args(args)

parser = MetricsParser()
for path in Path(config.build_dir).rglob('*.html'):
with path.open('r', encoding='utf-8', errors='replace') as f:
parser.read(f)

print('Summarising metrics for build files (.html)...')
print(f'\tlinks: {parser.link_count} ('
f'{parser.fragment_count} #frag…, '
f'{parser.int_link_count} /int…, '
f'{parser.ext_link_count} https://ext…'
')')
print(f'\timages: {parser.image_count}')


if __name__ == '__main__':
sys.exit(main())
66 changes: 66 additions & 0 deletions .sphinx/metrics/source_metrics.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#!/bin/bash
# shellcheck disable=all

VENV=".sphinx/venv/bin/activate"

files=0
words=0
readabilityWords=0
readabilitySentences=0
readabilitySyllables=0
readabilityAverage=0
readable=true

# measure number of files (.rst and .md), excluding those in .sphinx dir
files=$(find . -type d -path './.sphinx' -prune -o -type f \( -name '*.md' -o -name '*.rst' \) -print | wc -l)

# calculate metrics only if source files are present
if [ "$files" -eq 0 ]; then
echo "There are no source files to calculate metrics"
else
# measure raw total number of words, excluding those in .sphinx dir
words=$(find . -type d -path './.sphinx' -prune -o \( -name '*.md' -o -name '*.rst' \) -exec cat {} + | wc -w)

# calculate readability for markdown source files
echo "Activating virtual environment to run vale..."
source "${VENV}"

for file in *.md *.rst; do
if [ -f "$file" ]; then
readabilityWords=$(vale ls-metrics "$file" | grep '"words"' | sed 's/[^0-9]*//g')
readabilitySentences=$(vale ls-metrics "$file" | grep '"sentences"' | sed 's/[^0-9]*//g')
readabilitySyllables=$(vale ls-metrics "$file" | grep '"syllables"' | sed 's/[^0-9]*//g')
fi
done

echo "Deactivating virtual environment..."
deactivate

# calculate mean number of words
if [ "$files" -ge 1 ]; then
meanval=$((readabilityWords / files))
else
meanval=$readabilityWords
fi

readabilityAverage=$(echo "scale=2; 0.39 * ($readabilityWords / $readabilitySentences) + (11.8 * ($readabilitySyllables / $readabilityWords)) - 15.59" | bc)

# cast average to int for comparison
readabilityAverageInt=$(echo "$readabilityAverage / 1" | bc)

# value below 8 is considered readable
if [ "$readabilityAverageInt" -lt 8 ]; then
readable=true
else
readable=false
fi

# summarise latest metrics
echo "Summarising metrics for source files (.md, .rst)..."
echo -e "\ttotal files: $files"
echo -e "\ttotal words (raw): $words"
echo -e "\ttotal words (prose): $readabilityWords"
echo -e "\taverage word count: $meanval"
echo -e "\treadability: $readabilityAverage"
echo -e "\treadable: $readable"
fi
8 changes: 5 additions & 3 deletions .sphinx/update_sp.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@
from requests.exceptions import RequestException
from packaging.version import parse as parse_version

SPHINX_DIR = os.path.join(os.getcwd(), ".sphinx")
SPHINX_DIR = os.path.abspath(os.path.dirname(__file__))
DOCS_DIR = os.path.abspath(os.path.join(SPHINX_DIR, '..'))
REQUIREMENTS = os.path.join(DOCS_DIR, "requirements.txt")
SPHINX_UPDATE_DIR = os.path.join(SPHINX_DIR, "update")
GITHUB_REPO = "canonical/sphinx-docs-starter-pack"
GITHUB_API_BASE = f"https://api.github.com/repos/{GITHUB_REPO}"
Expand Down Expand Up @@ -103,7 +105,7 @@ def main():
# Check requirements are the same
new_requirements = []
try:
with open("requirements.txt", "r") as file:
with open(REQUIREMENTS, "r") as file:
logging.debug("Checking requirements")

local_reqs = set(file.read().splitlines()) - {""}
Expand Down Expand Up @@ -206,7 +208,7 @@ def update_static_files():
# Writes return value for parent function
if new_file_list != []:
# Provides more information on new files
with open("NEWFILES.txt", "w") as f:
with open(f"{SPHINX_DIR}/NEWFILES.txt", "w") as f:
for entry in new_file_list:
f.write(f"{entry}\n")
logging.debug("Some downloaded files are new")
Expand Down
2 changes: 1 addition & 1 deletion .sphinx/version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.2.0
1.3.0
Loading