Merge pull request 'feat/display' (#9) from feat/display into main

Reviewed-on: #9
This commit was merged in pull request #9.
This commit is contained in:
2026-03-15 06:25:57 +00:00
2 changed files with 181 additions and 63 deletions

133
engine/fetch.py Normal file
View File

@@ -0,0 +1,133 @@
"""
RSS feed fetching, Project Gutenberg parsing, and headline caching.
Depends on: config, sources, filter, terminal.
"""
import re
import json
import pathlib
import urllib.request
from datetime import datetime
import feedparser
from engine import config
from engine.sources import FEEDS, POETRY_SOURCES
from engine.filter import strip_tags, skip
from engine.terminal import boot_ln
# ─── SINGLE FEED ──────────────────────────────────────────
def fetch_feed(url):
try:
req = urllib.request.Request(url, headers={"User-Agent": "mainline/0.1"})
resp = urllib.request.urlopen(req, timeout=config.FEED_TIMEOUT)
return feedparser.parse(resp.read())
except Exception:
return None
# ─── ALL RSS FEEDS ────────────────────────────────────────
def fetch_all():
items = []
linked = failed = 0
for src, url in FEEDS.items():
feed = fetch_feed(url)
if feed is None or (feed.bozo and not feed.entries):
boot_ln(src, "DARK", False)
failed += 1
continue
n = 0
for e in feed.entries:
t = strip_tags(e.get("title", ""))
if not t or skip(t):
continue
pub = e.get("published_parsed") or e.get("updated_parsed")
try:
ts = datetime(*pub[:6]).strftime("%H:%M") if pub else "——:——"
except Exception:
ts = "——:——"
items.append((t, src, ts))
n += 1
if n:
boot_ln(src, f"LINKED [{n}]", True)
linked += 1
else:
boot_ln(src, "EMPTY", False)
failed += 1
return items, linked, failed
# ─── PROJECT GUTENBERG ────────────────────────────────────
def _fetch_gutenberg(url, label):
"""Download and parse stanzas/passages from a Project Gutenberg text."""
try:
req = urllib.request.Request(url, headers={"User-Agent": "mainline/0.1"})
resp = urllib.request.urlopen(req, timeout=15)
text = resp.read().decode('utf-8', errors='replace').replace('\r\n', '\n').replace('\r', '\n')
# Strip PG boilerplate
m = re.search(r'\*\*\*\s*START OF[^\n]*\n', text)
if m:
text = text[m.end():]
m = re.search(r'\*\*\*\s*END OF', text)
if m:
text = text[:m.start()]
# Split on blank lines into stanzas/passages
blocks = re.split(r'\n{2,}', text.strip())
items = []
for blk in blocks:
blk = ' '.join(blk.split()) # flatten to one line
if len(blk) < 20 or len(blk) > 280:
continue
if blk.isupper(): # skip all-caps headers
continue
if re.match(r'^[IVXLCDM]+\.?\s*$', blk): # roman numerals
continue
items.append((blk, label, ''))
return items
except Exception:
return []
def fetch_poetry():
"""Fetch all poetry/literature sources."""
items = []
linked = failed = 0
for label, url in POETRY_SOURCES.items():
stanzas = _fetch_gutenberg(url, label)
if stanzas:
boot_ln(label, f"LOADED [{len(stanzas)}]", True)
items.extend(stanzas)
linked += 1
else:
boot_ln(label, "DARK", False)
failed += 1
return items, linked, failed
# ─── CACHE ────────────────────────────────────────────────
_CACHE_DIR = pathlib.Path(__file__).resolve().parent.parent
def _cache_path():
return _CACHE_DIR / f".mainline_cache_{config.MODE}.json"
def load_cache():
"""Load cached items from disk if available."""
p = _cache_path()
if not p.exists():
return None
try:
data = json.loads(p.read_text())
items = [tuple(i) for i in data["items"]]
return items if items else None
except Exception:
return None
def save_cache(items):
"""Save fetched items to disk for fast subsequent runs."""
try:
_cache_path().write_text(json.dumps({"items": items}))
except Exception:
pass

View File

@@ -848,6 +848,7 @@ def stream(items):
sh = h - fh
# ── Check for ntfy message ────────────────────────
msg_h = 0 # rows consumed by message zone at top
msg_active = False
with _ntfy_lock:
if _ntfy_message is not None:
@@ -857,10 +858,9 @@ def stream(items):
else:
_ntfy_message = None # expired
buf = []
if msg_active:
# ── MESSAGE state: freeze scroll, render message ──
buf = []
# Render message text with OTF font (cached across frames)
# ── Message zone: pinned to top, scroll continues below ──
display_text = m_body or m_title or "(empty)"
display_text = re.sub(r"\s+", " ", display_text.upper())
cache_key = (display_text, w)
@@ -870,50 +870,33 @@ def stream(items):
else:
msg_rows = _msg_cache[1]
msg_rows = _lr_gradient(msg_rows, (time.monotonic() * GRAD_SPEED) % 1.0)
# Center vertically in scroll zone
total_h = len(msg_rows) + 4 # +4 for border + meta + padding
y_off = max(0, (sh - total_h) // 2)
for r in range(sh):
ri = r - y_off
if ri == 0 or ri == total_h - 1:
# Border lines
bar = "" * (w - 4)
buf.append(f"\033[{r+1};1H {MSG_BORDER}{bar}{RST}\033[K")
elif 1 <= ri <= len(msg_rows):
ln = _vis_trunc(msg_rows[ri - 1], w)
buf.append(f"\033[{r+1};1H {ln}{RST}\033[K")
elif ri == len(msg_rows) + 1:
# Title line (if present and different from body)
if m_title and m_title != m_body:
meta = f" {MSG_META}\u2591 {m_title}{RST}"
else:
meta = ""
buf.append(f"\033[{r+1};1H{meta}\033[K")
elif ri == len(msg_rows) + 2:
# Source + timestamp
elapsed_s = int(time.monotonic() - m_ts)
remaining = max(0, MESSAGE_DISPLAY_SECS - elapsed_s)
ts_str = datetime.now().strftime("%H:%M:%S")
meta = f" {MSG_META}\u2591 ntfy \u00b7 {ts_str} \u00b7 {remaining}s{RST}"
buf.append(f"\033[{r+1};1H{meta}\033[K")
else:
# Sparse noise outside the message
if random.random() < 0.06:
buf.append(f"\033[{r+1};1H{noise(w)}")
else:
buf.append(f"\033[{r+1};1H\033[K")
# Firehose keeps running during messages
if FIREHOSE and fh > 0:
for fr in range(fh):
fline = _firehose_line(items, w)
buf.append(f"\033[{sh + fr + 1};1H{fline}\033[K")
sys.stdout.buffer.write("".join(buf).encode())
sys.stdout.flush()
elapsed = time.monotonic() - t0
time.sleep(max(0, _FRAME_DT - elapsed))
continue
# Layout: rendered text + meta + border
elapsed_s = int(time.monotonic() - m_ts)
remaining = max(0, MESSAGE_DISPLAY_SECS - elapsed_s)
ts_str = datetime.now().strftime("%H:%M:%S")
row_idx = 0
for mr in msg_rows:
ln = _vis_trunc(mr, w)
buf.append(f"\033[{row_idx+1};1H {ln}{RST}\033[K")
row_idx += 1
# Meta line: title (if distinct) + source + countdown
meta_parts = []
if m_title and m_title != m_body:
meta_parts.append(m_title)
meta_parts.append(f"ntfy \u00b7 {ts_str} \u00b7 {remaining}s")
meta = " " + " \u00b7 ".join(meta_parts) if len(meta_parts) > 1 else " " + meta_parts[0]
buf.append(f"\033[{row_idx+1};1H{MSG_META}{meta}{RST}\033[K")
row_idx += 1
# Border — constant boundary between message and scroll
bar = "\u2500" * (w - 4)
buf.append(f"\033[{row_idx+1};1H {MSG_BORDER}{bar}{RST}\033[K")
row_idx += 1
msg_h = row_idx
# ── SCROLL state: normal headline rendering ───────
# Effective scroll zone: below message, above firehose
scroll_h = sh - msg_h
# ── Scroll: headline rendering (always runs) ──────
# Advance scroll on schedule
scroll_accum += _FRAME_DT
while scroll_accum >= scroll_interval:
@@ -935,15 +918,16 @@ def stream(items):
if k < cam:
del noise_cache[k]
# Draw scroll zone
top_zone = max(1, int(sh * 0.25))
bot_zone = max(1, int(sh * 0.10))
# Draw scroll zone (below message zone, above firehose)
top_zone = max(1, int(scroll_h * 0.25))
bot_zone = max(1, int(scroll_h * 0.10))
grad_offset = (time.monotonic() * GRAD_SPEED) % 1.0
buf = []
for r in range(sh):
scroll_buf_start = len(buf) # track where scroll rows start in buf
for r in range(scroll_h):
scr_row = msg_h + r + 1 # 1-indexed ANSI screen row
cy = cam + r
top_f = min(1.0, r / top_zone)
bot_f = min(1.0, (sh - 1 - r) / bot_zone)
top_f = min(1.0, r / top_zone) if top_zone > 0 else 1.0
bot_f = min(1.0, (scroll_h - 1 - r) / bot_zone) if bot_zone > 0 else 1.0
row_fade = min(top_f, bot_f)
drawn = False
for content, hc, by, midx in active:
@@ -958,11 +942,11 @@ def stream(items):
if row_fade < 1.0:
ln = _fade_line(ln, row_fade)
if cr == midx:
buf.append(f"\033[{r+1};1H{W_COOL}{ln}{RST}\033[K")
buf.append(f"\033[{scr_row};1H{W_COOL}{ln}{RST}\033[K")
elif ln.strip():
buf.append(f"\033[{r+1};1H{ln}{RST}\033[K")
buf.append(f"\033[{scr_row};1H{ln}{RST}\033[K")
else:
buf.append(f"\033[{r+1};1H\033[K")
buf.append(f"\033[{scr_row};1H\033[K")
drawn = True
break
if not drawn:
@@ -970,9 +954,9 @@ def stream(items):
if row_fade < 1.0 and n:
n = _fade_line(n, row_fade)
if n:
buf.append(f"\033[{r+1};1H{n}")
buf.append(f"\033[{scr_row};1H{n}")
else:
buf.append(f"\033[{r+1};1H\033[K")
buf.append(f"\033[{scr_row};1H\033[K")
# Draw firehose zone
if FIREHOSE and fh > 0:
@@ -984,11 +968,12 @@ def stream(items):
mic_excess = max(0.0, _mic_db - MIC_THRESHOLD_DB)
glitch_prob = 0.32 + min(0.9, mic_excess * 0.16)
n_hits = 4 + int(mic_excess / 2)
g_limit = sh if FIREHOSE else len(buf)
if random.random() < glitch_prob and g_limit > 0:
for _ in range(min(n_hits, g_limit)):
gi = random.randint(0, g_limit - 1)
buf[gi] = f"\033[{gi+1};1H{glitch_bar(w)}"
scroll_buf_len = len(buf) - scroll_buf_start
if random.random() < glitch_prob and scroll_buf_len > 0:
for _ in range(min(n_hits, scroll_buf_len)):
gi = random.randint(0, scroll_buf_len - 1)
scr_row = msg_h + gi + 1
buf[scroll_buf_start + gi] = f"\033[{scr_row};1H{glitch_bar(w)}"
sys.stdout.buffer.write("".join(buf).encode())
sys.stdout.flush()