Initial Robot U site prototype
This commit is contained in:
commit
fe19f200d7
27 changed files with 3677 additions and 0 deletions
515
live_prototype.py
Normal file
515
live_prototype.py
Normal file
|
|
@ -0,0 +1,515 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from calendar_feeds import CalendarFeed, CalendarFeedError, fetch_calendar_feed
|
||||
from forgejo_client import ForgejoClient, ForgejoClientError
|
||||
from settings import Settings
|
||||
|
||||
|
||||
async def build_live_prototype_payload(settings: Settings) -> dict[str, object]:
|
||||
warnings: list[str] = []
|
||||
source_cards = [
|
||||
{
|
||||
"title": "Forgejo base URL",
|
||||
"description": settings.forgejo_base_url,
|
||||
},
|
||||
{
|
||||
"title": "Access mode",
|
||||
"description": (
|
||||
"Server token configured for live API reads."
|
||||
if settings.forgejo_token
|
||||
else "Instance API requires auth. Set FORGEJO_TOKEN for live repo discovery."
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
calendar_feeds = await _load_calendar_feeds(settings, warnings)
|
||||
if settings.calendar_feed_urls:
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "Calendar feeds",
|
||||
"description": f"{len(calendar_feeds)} configured feed(s)",
|
||||
},
|
||||
)
|
||||
|
||||
async with ForgejoClient(settings) as client:
|
||||
try:
|
||||
oidc = await client.fetch_openid_configuration()
|
||||
except ForgejoClientError as error:
|
||||
warnings.append(str(error))
|
||||
oidc = {}
|
||||
|
||||
issuer = oidc.get("issuer", "Unavailable")
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "OIDC issuer",
|
||||
"description": str(issuer),
|
||||
},
|
||||
)
|
||||
|
||||
if not settings.forgejo_token:
|
||||
warnings.append(
|
||||
"aksal.cloud blocks anonymous API calls, so the prototype needs FORGEJO_TOKEN "
|
||||
"before it can load repos or issues.",
|
||||
)
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "Discovery state",
|
||||
"description": "Waiting for FORGEJO_TOKEN to enable live repo and issue reads.",
|
||||
},
|
||||
)
|
||||
return _empty_payload(
|
||||
source_cards=source_cards,
|
||||
warnings=warnings,
|
||||
hero_summary=(
|
||||
"Connected to aksal.cloud for identity and OIDC discovery, but live repo content "
|
||||
"is gated until a Forgejo API token is configured on the backend."
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
current_user, repos, issues = await asyncio.gather(
|
||||
client.fetch_current_user(),
|
||||
client.search_repositories(),
|
||||
client.search_recent_issues(),
|
||||
)
|
||||
except ForgejoClientError as error:
|
||||
warnings.append(str(error))
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "Discovery state",
|
||||
"description": "Forgejo connection exists, but live repo discovery failed.",
|
||||
},
|
||||
)
|
||||
return _empty_payload(
|
||||
source_cards=source_cards,
|
||||
warnings=warnings,
|
||||
hero_summary=(
|
||||
"The backend reached aksal.cloud, but the configured token could not complete "
|
||||
"the repo discovery flow."
|
||||
),
|
||||
)
|
||||
|
||||
repo_summaries = await asyncio.gather(
|
||||
*[
|
||||
_summarize_repo(client, repo)
|
||||
for repo in repos
|
||||
if not repo.get("fork") and not repo.get("private")
|
||||
],
|
||||
)
|
||||
content_repos = [summary for summary in repo_summaries if summary is not None]
|
||||
course_repos = [summary for summary in content_repos if summary["lesson_count"] > 0]
|
||||
post_repos = [summary for summary in content_repos if summary["blog_count"] > 0]
|
||||
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "Signed-in API identity",
|
||||
"description": str(current_user.get("login", "Unknown user")),
|
||||
},
|
||||
)
|
||||
source_cards.append(
|
||||
{
|
||||
"title": "Discovery state",
|
||||
"description": (
|
||||
f"Detected {len(course_repos)} course repos, {len(post_repos)} post repos, "
|
||||
f"and {len(issues)} recent issues."
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
return {
|
||||
"hero": {
|
||||
"eyebrow": "Live Forgejo integration",
|
||||
"title": "Robot U is reading from your aksal.cloud Forgejo instance.",
|
||||
"summary": (
|
||||
"This prototype now uses the real Forgejo base URL, OIDC metadata, visible repos, "
|
||||
"and recent issues available to the configured backend token."
|
||||
),
|
||||
"highlights": [
|
||||
"Repo discovery filters to public, non-fork repositories only",
|
||||
"Course repos are detected from /lessons/, post repos from /blogs/",
|
||||
"Recent discussions are loaded from live Forgejo issues",
|
||||
],
|
||||
},
|
||||
"source_of_truth": source_cards,
|
||||
"featured_courses": [_course_card(summary) for summary in course_repos[:6]],
|
||||
"recent_posts": [_post_card(summary) for summary in post_repos[:6]],
|
||||
"upcoming_events": _event_cards(calendar_feeds, settings.calendar_event_limit),
|
||||
"recent_discussions": await asyncio.gather(
|
||||
*[_discussion_card(client, issue) for issue in issues],
|
||||
),
|
||||
"implementation_notes": [
|
||||
"Live repo discovery is now driven by the Forgejo API instead of mock content.",
|
||||
"Issues shown here are real Forgejo issues visible to the configured token.",
|
||||
*warnings,
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
async def _summarize_repo(
|
||||
client: ForgejoClient,
|
||||
repo: dict[str, Any],
|
||||
) -> dict[str, Any] | None:
|
||||
owner = repo.get("owner", {})
|
||||
owner_login = owner.get("login")
|
||||
repo_name = repo.get("name")
|
||||
if not isinstance(owner_login, str) or not isinstance(repo_name, str):
|
||||
return None
|
||||
|
||||
try:
|
||||
root_entries = await client.list_directory(owner_login, repo_name)
|
||||
except ForgejoClientError:
|
||||
return None
|
||||
|
||||
entry_names = {
|
||||
entry.get("name")
|
||||
for entry in root_entries
|
||||
if entry.get("type") == "dir" and isinstance(entry.get("name"), str)
|
||||
}
|
||||
has_lessons = "lessons" in entry_names
|
||||
has_blogs = "blogs" in entry_names
|
||||
if not has_lessons and not has_blogs:
|
||||
return None
|
||||
|
||||
chapter_count = 0
|
||||
lesson_count = 0
|
||||
course_outline: list[dict[str, object]] = []
|
||||
if has_lessons:
|
||||
lesson_entries = await client.list_directory(owner_login, repo_name, "lessons")
|
||||
chapter_dirs = _sorted_dir_entries(lesson_entries)
|
||||
chapter_count = len(chapter_dirs)
|
||||
chapter_entry_lists = await asyncio.gather(
|
||||
*[
|
||||
client.list_directory(owner_login, repo_name, f"lessons/{entry['name']}")
|
||||
for entry in chapter_dirs
|
||||
if isinstance(entry.get("name"), str)
|
||||
],
|
||||
)
|
||||
lesson_count = sum(
|
||||
1
|
||||
for chapter_entries in chapter_entry_lists
|
||||
for entry in chapter_entries
|
||||
if entry.get("type") == "dir"
|
||||
)
|
||||
for chapter_dir, chapter_entries in zip(chapter_dirs, chapter_entry_lists, strict=False):
|
||||
chapter_name = str(chapter_dir.get("name", ""))
|
||||
lesson_dirs = _sorted_dir_entries(chapter_entries)
|
||||
lesson_summaries = await asyncio.gather(
|
||||
*[
|
||||
_summarize_lesson(
|
||||
client,
|
||||
owner_login,
|
||||
repo_name,
|
||||
chapter_name,
|
||||
str(lesson_dir.get("name", "")),
|
||||
)
|
||||
for lesson_dir in lesson_dirs
|
||||
],
|
||||
)
|
||||
course_outline.append(
|
||||
{
|
||||
"slug": chapter_name,
|
||||
"title": _display_name(chapter_name),
|
||||
"lessons": lesson_summaries,
|
||||
},
|
||||
)
|
||||
|
||||
blog_count = 0
|
||||
if has_blogs:
|
||||
blog_entries = await client.list_directory(owner_login, repo_name, "blogs")
|
||||
blog_count = sum(1 for entry in blog_entries if entry.get("type") == "dir")
|
||||
|
||||
return {
|
||||
"name": repo_name,
|
||||
"owner": owner_login,
|
||||
"full_name": repo.get("full_name", f"{owner_login}/{repo_name}"),
|
||||
"html_url": repo.get("html_url", ""),
|
||||
"description": repo.get("description") or "No repository description yet.",
|
||||
"lesson_count": lesson_count,
|
||||
"chapter_count": chapter_count,
|
||||
"blog_count": blog_count,
|
||||
"updated_at": repo.get("updated_at", ""),
|
||||
"course_outline": course_outline,
|
||||
}
|
||||
|
||||
|
||||
def _course_card(summary: dict[str, Any]) -> dict[str, object]:
|
||||
return {
|
||||
"title": summary["name"],
|
||||
"owner": summary["owner"],
|
||||
"name": summary["name"],
|
||||
"repo": summary["full_name"],
|
||||
"html_url": summary["html_url"],
|
||||
"lessons": summary["lesson_count"],
|
||||
"chapters": summary["chapter_count"],
|
||||
"summary": summary["description"],
|
||||
"status": "Live course repo",
|
||||
"outline": summary["course_outline"],
|
||||
}
|
||||
|
||||
|
||||
def _post_card(summary: dict[str, Any]) -> dict[str, object]:
|
||||
post_count = int(summary["blog_count"])
|
||||
label = "1 post folder detected" if post_count == 1 else f"{post_count} post folders detected"
|
||||
return {
|
||||
"title": summary["name"],
|
||||
"repo": summary["full_name"],
|
||||
"kind": "Repo with /blogs/",
|
||||
"summary": f"{label}. {summary['description']}",
|
||||
}
|
||||
|
||||
|
||||
def _event_cards(calendar_feeds: list[CalendarFeed], limit: int) -> list[dict[str, object]]:
|
||||
upcoming_events = sorted(
|
||||
[event for feed in calendar_feeds for event in feed.events],
|
||||
key=lambda event: event.starts_at,
|
||||
)[:limit]
|
||||
return [
|
||||
{
|
||||
"title": event.title,
|
||||
"when": _format_event_datetime(event.starts_at),
|
||||
"source": event.source,
|
||||
"mode": event.mode,
|
||||
}
|
||||
for event in upcoming_events
|
||||
]
|
||||
|
||||
|
||||
async def _discussion_card(client: ForgejoClient, issue: dict[str, Any]) -> dict[str, object]:
|
||||
repository = issue.get("repository") or {}
|
||||
owner = repository.get("owner", "")
|
||||
full_name = repository.get("full_name", "Unknown repo")
|
||||
comments = issue.get("comments", 0)
|
||||
issue_number = int(issue.get("number", 0))
|
||||
issue_author = issue.get("user") or {}
|
||||
labels = [
|
||||
label.get("name")
|
||||
for label in issue.get("labels", [])
|
||||
if isinstance(label, dict) and isinstance(label.get("name"), str)
|
||||
]
|
||||
comment_items: list[dict[str, object]] = []
|
||||
if isinstance(owner, str) and isinstance(repository.get("name"), str) and issue_number > 0:
|
||||
try:
|
||||
comment_items = [
|
||||
_discussion_reply(comment)
|
||||
for comment in await client.list_issue_comments(
|
||||
owner,
|
||||
repository["name"],
|
||||
issue_number,
|
||||
)
|
||||
]
|
||||
except ForgejoClientError:
|
||||
comment_items = []
|
||||
|
||||
body = str(issue.get("body", "") or "").strip()
|
||||
if not body:
|
||||
body = "No issue description yet. Right now the conversation starts in the replies."
|
||||
|
||||
return {
|
||||
"id": int(issue.get("id", 0)),
|
||||
"title": issue.get("title", "Untitled issue"),
|
||||
"repo": full_name,
|
||||
"replies": comments,
|
||||
"context": "Live Forgejo issue",
|
||||
"author": issue_author.get("login", "Unknown author"),
|
||||
"author_avatar_url": issue_author.get("avatar_url", ""),
|
||||
"state": issue.get("state", "open"),
|
||||
"body": body,
|
||||
"number": issue_number,
|
||||
"updated_at": issue.get("updated_at", ""),
|
||||
"html_url": issue.get("html_url", ""),
|
||||
"labels": [label for label in labels if isinstance(label, str)],
|
||||
"comments": comment_items,
|
||||
}
|
||||
|
||||
|
||||
def _discussion_reply(comment: dict[str, Any]) -> dict[str, object]:
|
||||
author = comment.get("user") or {}
|
||||
body = str(comment.get("body", "") or "").strip()
|
||||
if not body:
|
||||
body = "No comment body provided."
|
||||
|
||||
return {
|
||||
"id": int(comment.get("id", 0)),
|
||||
"author": author.get("login", "Unknown author"),
|
||||
"avatar_url": author.get("avatar_url", ""),
|
||||
"body": body,
|
||||
"created_at": comment.get("created_at", ""),
|
||||
"html_url": comment.get("html_url", ""),
|
||||
}
|
||||
|
||||
|
||||
def _empty_payload(
|
||||
*,
|
||||
source_cards: list[dict[str, str]],
|
||||
warnings: list[str],
|
||||
hero_summary: str,
|
||||
) -> dict[str, object]:
|
||||
return {
|
||||
"hero": {
|
||||
"eyebrow": "Forgejo connection status",
|
||||
"title": "Robot U is configured for aksal.cloud.",
|
||||
"summary": hero_summary,
|
||||
"highlights": [
|
||||
"Forgejo remains the source of truth for content and discussions",
|
||||
"The prototype now targets aksal.cloud by default",
|
||||
"Live repo discovery unlocks as soon as a backend token is configured",
|
||||
],
|
||||
},
|
||||
"source_of_truth": source_cards,
|
||||
"featured_courses": [],
|
||||
"recent_posts": [],
|
||||
"upcoming_events": [],
|
||||
"recent_discussions": [],
|
||||
"implementation_notes": warnings
|
||||
or ["Live repo discovery is ready, but no Forgejo token has been configured yet."],
|
||||
}
|
||||
|
||||
|
||||
async def _summarize_lesson(
|
||||
client: ForgejoClient,
|
||||
owner: str,
|
||||
repo: str,
|
||||
chapter_name: str,
|
||||
lesson_name: str,
|
||||
) -> dict[str, object]:
|
||||
lesson_path = f"lessons/{chapter_name}/{lesson_name}"
|
||||
fallback_title = _display_name(lesson_name)
|
||||
|
||||
try:
|
||||
lesson_entries = await client.list_directory(owner, repo, lesson_path)
|
||||
except ForgejoClientError:
|
||||
return _empty_lesson(lesson_name, fallback_title, lesson_path)
|
||||
|
||||
markdown_files = sorted(
|
||||
[
|
||||
entry
|
||||
for entry in lesson_entries
|
||||
if entry.get("type") == "file"
|
||||
and isinstance(entry.get("name"), str)
|
||||
and str(entry.get("name", "")).lower().endswith(".md")
|
||||
],
|
||||
key=lambda entry: str(entry["name"]),
|
||||
)
|
||||
if not markdown_files:
|
||||
return _empty_lesson(lesson_name, fallback_title, lesson_path)
|
||||
|
||||
markdown_name = str(markdown_files[0]["name"])
|
||||
markdown_path = f"{lesson_path}/{markdown_name}"
|
||||
|
||||
try:
|
||||
file_payload = await client.get_file_content(owner, repo, markdown_path)
|
||||
except ForgejoClientError:
|
||||
return _empty_lesson(
|
||||
lesson_name,
|
||||
fallback_title,
|
||||
lesson_path,
|
||||
file_path=markdown_path,
|
||||
html_url=str(markdown_files[0].get("html_url", "")),
|
||||
)
|
||||
|
||||
metadata, body = _parse_frontmatter(str(file_payload.get("content", "")))
|
||||
return {
|
||||
"slug": lesson_name,
|
||||
"title": str(metadata.get("title") or _display_name(markdown_name) or fallback_title),
|
||||
"summary": str(metadata.get("summary") or ""),
|
||||
"path": lesson_path,
|
||||
"file_path": str(file_payload.get("path", markdown_path)),
|
||||
"html_url": str(file_payload.get("html_url", "")),
|
||||
"body": body,
|
||||
}
|
||||
|
||||
|
||||
def _sorted_dir_entries(entries: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
return sorted(
|
||||
[
|
||||
entry
|
||||
for entry in entries
|
||||
if entry.get("type") == "dir" and isinstance(entry.get("name"), str)
|
||||
],
|
||||
key=lambda entry: str(entry["name"]),
|
||||
)
|
||||
|
||||
|
||||
def _display_name(value: str) -> str:
|
||||
cleaned = value.strip().rsplit(".", 1)[0]
|
||||
cleaned = cleaned.replace("_", " ").replace("-", " ")
|
||||
cleaned = " ".join(cleaned.split())
|
||||
cleaned = cleaned.lstrip("0123456789 ").strip()
|
||||
return cleaned.title() or value
|
||||
|
||||
|
||||
async def _load_calendar_feeds(settings: Settings, warnings: list[str]) -> list[CalendarFeed]:
|
||||
if not settings.calendar_feed_urls:
|
||||
return []
|
||||
|
||||
results = await asyncio.gather(
|
||||
*[
|
||||
fetch_calendar_feed(url, settings.forgejo_request_timeout_seconds)
|
||||
for url in settings.calendar_feed_urls
|
||||
],
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
feeds: list[CalendarFeed] = []
|
||||
for url, result in zip(settings.calendar_feed_urls, results, strict=False):
|
||||
if isinstance(result, CalendarFeed):
|
||||
feeds.append(result)
|
||||
continue
|
||||
if isinstance(result, CalendarFeedError):
|
||||
warnings.append(str(result))
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
warnings.append(f"Calendar feed failed for {url}: {result}")
|
||||
|
||||
return feeds
|
||||
|
||||
|
||||
def _format_event_datetime(value: Any) -> str:
|
||||
return value.strftime("%b %-d, %-I:%M %p UTC")
|
||||
|
||||
|
||||
def _empty_lesson(
|
||||
lesson_name: str,
|
||||
title: str,
|
||||
lesson_path: str,
|
||||
*,
|
||||
file_path: str = "",
|
||||
html_url: str = "",
|
||||
) -> dict[str, object]:
|
||||
return {
|
||||
"slug": lesson_name,
|
||||
"title": title,
|
||||
"summary": "",
|
||||
"path": lesson_path,
|
||||
"file_path": file_path,
|
||||
"html_url": html_url,
|
||||
"body": "",
|
||||
}
|
||||
|
||||
|
||||
def _parse_frontmatter(markdown: str) -> tuple[dict[str, str], str]:
|
||||
if not markdown.startswith("---\n"):
|
||||
return {}, markdown.strip()
|
||||
|
||||
lines = markdown.splitlines()
|
||||
if not lines or lines[0].strip() != "---":
|
||||
return {}, markdown.strip()
|
||||
|
||||
metadata: dict[str, str] = {}
|
||||
for index, line in enumerate(lines[1:], start=1):
|
||||
if line.strip() == "---":
|
||||
body = "\n".join(lines[index + 1 :]).strip()
|
||||
return metadata, body
|
||||
if ":" not in line:
|
||||
continue
|
||||
|
||||
key, raw_value = line.split(":", 1)
|
||||
key = key.strip()
|
||||
value = raw_value.strip().strip("\"'")
|
||||
if key and value:
|
||||
metadata[key] = value
|
||||
|
||||
return {}, markdown.strip()
|
||||
Loading…
Add table
Add a link
Reference in a new issue