Compare commits
7 Commits
feat/mod_p
...
3571e2780b
| Author | SHA1 | Date | |
|---|---|---|---|
| 3571e2780b | |||
| dfd902fb90 | |||
| 424332e065 | |||
| 2e69cad984 | |||
| 7274f57bbb | |||
| c857d7bd81 | |||
| 6a5a73fd88 |
133
engine/fetch.py
Normal file
133
engine/fetch.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
"""
|
||||||
|
RSS feed fetching, Project Gutenberg parsing, and headline caching.
|
||||||
|
Depends on: config, sources, filter, terminal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import pathlib
|
||||||
|
import urllib.request
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import feedparser
|
||||||
|
|
||||||
|
from engine import config
|
||||||
|
from engine.sources import FEEDS, POETRY_SOURCES
|
||||||
|
from engine.filter import strip_tags, skip
|
||||||
|
from engine.terminal import boot_ln
|
||||||
|
|
||||||
|
# ─── SINGLE FEED ──────────────────────────────────────────
|
||||||
|
def fetch_feed(url):
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(url, headers={"User-Agent": "mainline/0.1"})
|
||||||
|
resp = urllib.request.urlopen(req, timeout=config.FEED_TIMEOUT)
|
||||||
|
return feedparser.parse(resp.read())
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ─── ALL RSS FEEDS ────────────────────────────────────────
|
||||||
|
def fetch_all():
|
||||||
|
items = []
|
||||||
|
linked = failed = 0
|
||||||
|
for src, url in FEEDS.items():
|
||||||
|
feed = fetch_feed(url)
|
||||||
|
if feed is None or (feed.bozo and not feed.entries):
|
||||||
|
boot_ln(src, "DARK", False)
|
||||||
|
failed += 1
|
||||||
|
continue
|
||||||
|
n = 0
|
||||||
|
for e in feed.entries:
|
||||||
|
t = strip_tags(e.get("title", ""))
|
||||||
|
if not t or skip(t):
|
||||||
|
continue
|
||||||
|
pub = e.get("published_parsed") or e.get("updated_parsed")
|
||||||
|
try:
|
||||||
|
ts = datetime(*pub[:6]).strftime("%H:%M") if pub else "——:——"
|
||||||
|
except Exception:
|
||||||
|
ts = "——:——"
|
||||||
|
items.append((t, src, ts))
|
||||||
|
n += 1
|
||||||
|
if n:
|
||||||
|
boot_ln(src, f"LINKED [{n}]", True)
|
||||||
|
linked += 1
|
||||||
|
else:
|
||||||
|
boot_ln(src, "EMPTY", False)
|
||||||
|
failed += 1
|
||||||
|
return items, linked, failed
|
||||||
|
|
||||||
|
|
||||||
|
# ─── PROJECT GUTENBERG ────────────────────────────────────
|
||||||
|
def _fetch_gutenberg(url, label):
|
||||||
|
"""Download and parse stanzas/passages from a Project Gutenberg text."""
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(url, headers={"User-Agent": "mainline/0.1"})
|
||||||
|
resp = urllib.request.urlopen(req, timeout=15)
|
||||||
|
text = resp.read().decode('utf-8', errors='replace').replace('\r\n', '\n').replace('\r', '\n')
|
||||||
|
# Strip PG boilerplate
|
||||||
|
m = re.search(r'\*\*\*\s*START OF[^\n]*\n', text)
|
||||||
|
if m:
|
||||||
|
text = text[m.end():]
|
||||||
|
m = re.search(r'\*\*\*\s*END OF', text)
|
||||||
|
if m:
|
||||||
|
text = text[:m.start()]
|
||||||
|
# Split on blank lines into stanzas/passages
|
||||||
|
blocks = re.split(r'\n{2,}', text.strip())
|
||||||
|
items = []
|
||||||
|
for blk in blocks:
|
||||||
|
blk = ' '.join(blk.split()) # flatten to one line
|
||||||
|
if len(blk) < 20 or len(blk) > 280:
|
||||||
|
continue
|
||||||
|
if blk.isupper(): # skip all-caps headers
|
||||||
|
continue
|
||||||
|
if re.match(r'^[IVXLCDM]+\.?\s*$', blk): # roman numerals
|
||||||
|
continue
|
||||||
|
items.append((blk, label, ''))
|
||||||
|
return items
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_poetry():
|
||||||
|
"""Fetch all poetry/literature sources."""
|
||||||
|
items = []
|
||||||
|
linked = failed = 0
|
||||||
|
for label, url in POETRY_SOURCES.items():
|
||||||
|
stanzas = _fetch_gutenberg(url, label)
|
||||||
|
if stanzas:
|
||||||
|
boot_ln(label, f"LOADED [{len(stanzas)}]", True)
|
||||||
|
items.extend(stanzas)
|
||||||
|
linked += 1
|
||||||
|
else:
|
||||||
|
boot_ln(label, "DARK", False)
|
||||||
|
failed += 1
|
||||||
|
return items, linked, failed
|
||||||
|
|
||||||
|
|
||||||
|
# ─── CACHE ────────────────────────────────────────────────
|
||||||
|
_CACHE_DIR = pathlib.Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
|
||||||
|
def _cache_path():
|
||||||
|
return _CACHE_DIR / f".mainline_cache_{config.MODE}.json"
|
||||||
|
|
||||||
|
|
||||||
|
def load_cache():
|
||||||
|
"""Load cached items from disk if available."""
|
||||||
|
p = _cache_path()
|
||||||
|
if not p.exists():
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
data = json.loads(p.read_text())
|
||||||
|
items = [tuple(i) for i in data["items"]]
|
||||||
|
return items if items else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def save_cache(items):
|
||||||
|
"""Save fetched items to disk for fast subsequent runs."""
|
||||||
|
try:
|
||||||
|
_cache_path().write_text(json.dumps({"items": items}))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
134
mainline.py
134
mainline.py
@@ -148,6 +148,7 @@ _FONT_PATH = "/Users/genejohnson/Documents/CS Bishop Drawn/CSBishopDrawn-Italic.
|
|||||||
_FONT_OBJ = None
|
_FONT_OBJ = None
|
||||||
_FONT_SZ = 60
|
_FONT_SZ = 60
|
||||||
_RENDER_H = 8 # terminal rows per rendered text line
|
_RENDER_H = 8 # terminal rows per rendered text line
|
||||||
|
_SSAA = 4 # super-sampling factor: render at _SSAA× then downsample
|
||||||
|
|
||||||
# Non-Latin scripts → macOS system fonts
|
# Non-Latin scripts → macOS system fonts
|
||||||
_SCRIPT_FONTS = {
|
_SCRIPT_FONTS = {
|
||||||
@@ -502,9 +503,10 @@ def _save_cache(items):
|
|||||||
|
|
||||||
|
|
||||||
# ─── STREAM ───────────────────────────────────────────────
|
# ─── STREAM ───────────────────────────────────────────────
|
||||||
_SCROLL_DUR = 3.75 # seconds per headline
|
_SCROLL_DUR = 5.625 # seconds per headline (2/3 original speed)
|
||||||
_FRAME_DT = 0.05 # 50ms base frame rate (20 FPS)
|
_FRAME_DT = 0.05 # 50ms base frame rate (20 FPS)
|
||||||
FIREHOSE_H = 12 # firehose zone height (terminal rows)
|
FIREHOSE_H = 12 # firehose zone height (terminal rows)
|
||||||
|
GRAD_SPEED = 0.08 # gradient traversal speed (cycles/sec, ~12s full sweep)
|
||||||
_mic_db = -99.0 # current mic level, written by background thread
|
_mic_db = -99.0 # current mic level, written by background thread
|
||||||
_mic_stream = None
|
_mic_stream = None
|
||||||
|
|
||||||
@@ -578,8 +580,11 @@ def _render_line(text, font=None):
|
|||||||
draw = ImageDraw.Draw(img)
|
draw = ImageDraw.Draw(img)
|
||||||
draw.text((-bbox[0] + pad, -bbox[1] + pad), text, fill=255, font=font)
|
draw.text((-bbox[0] + pad, -bbox[1] + pad), text, fill=255, font=font)
|
||||||
pix_h = _RENDER_H * 2
|
pix_h = _RENDER_H * 2
|
||||||
scale = pix_h / max(img_h, 1)
|
hi_h = pix_h * _SSAA
|
||||||
new_w = max(1, int(img_w * scale))
|
scale = hi_h / max(img_h, 1)
|
||||||
|
new_w_hi = max(1, int(img_w * scale))
|
||||||
|
img = img.resize((new_w_hi, hi_h), Image.Resampling.LANCZOS)
|
||||||
|
new_w = max(1, int(new_w_hi / _SSAA))
|
||||||
img = img.resize((new_w, pix_h), Image.Resampling.LANCZOS)
|
img = img.resize((new_w, pix_h), Image.Resampling.LANCZOS)
|
||||||
data = img.tobytes()
|
data = img.tobytes()
|
||||||
thr = 80
|
thr = 80
|
||||||
@@ -636,8 +641,8 @@ def _big_wrap(text, max_w, font=None):
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
def _lr_gradient(rows):
|
def _lr_gradient(rows, offset=0.0):
|
||||||
"""Color each non-space block character with a left-to-right gradient."""
|
"""Color each non-space block character with a shifting left-to-right gradient."""
|
||||||
n = len(_GRAD_COLS)
|
n = len(_GRAD_COLS)
|
||||||
max_x = max((len(r.rstrip()) for r in rows if r.strip()), default=1)
|
max_x = max((len(r.rstrip()) for r in rows if r.strip()), default=1)
|
||||||
out = []
|
out = []
|
||||||
@@ -650,7 +655,8 @@ def _lr_gradient(rows):
|
|||||||
if ch == ' ':
|
if ch == ' ':
|
||||||
buf.append(' ')
|
buf.append(' ')
|
||||||
else:
|
else:
|
||||||
idx = min(round(x / max(max_x - 1, 1) * (n - 1)), n - 1)
|
shifted = (x / max(max_x - 1, 1) + offset) % 1.0
|
||||||
|
idx = min(round(shifted * (n - 1)), n - 1)
|
||||||
buf.append(f"{_GRAD_COLS[idx]}{ch}\033[0m")
|
buf.append(f"{_GRAD_COLS[idx]}{ch}\033[0m")
|
||||||
out.append("".join(buf))
|
out.append("".join(buf))
|
||||||
return out
|
return out
|
||||||
@@ -730,7 +736,6 @@ def _make_block(title, src, ts, w):
|
|||||||
("\u201d",'"'), ("\u2013","-"), ("\u2014","-")]:
|
("\u201d",'"'), ("\u2013","-"), ("\u2014","-")]:
|
||||||
title_up = title_up.replace(old, new)
|
title_up = title_up.replace(old, new)
|
||||||
big_rows = _big_wrap(title_up, w - 4, lang_font)
|
big_rows = _big_wrap(title_up, w - 4, lang_font)
|
||||||
big_rows = _lr_gradient(big_rows)
|
|
||||||
hc = random.choice([
|
hc = random.choice([
|
||||||
"\033[38;5;46m", # matrix green
|
"\033[38;5;46m", # matrix green
|
||||||
"\033[38;5;34m", # dark green
|
"\033[38;5;34m", # dark green
|
||||||
@@ -843,6 +848,7 @@ def stream(items):
|
|||||||
sh = h - fh
|
sh = h - fh
|
||||||
|
|
||||||
# ── Check for ntfy message ────────────────────────
|
# ── Check for ntfy message ────────────────────────
|
||||||
|
msg_h = 0 # rows consumed by message zone at top
|
||||||
msg_active = False
|
msg_active = False
|
||||||
with _ntfy_lock:
|
with _ntfy_lock:
|
||||||
if _ntfy_message is not None:
|
if _ntfy_message is not None:
|
||||||
@@ -852,63 +858,45 @@ def stream(items):
|
|||||||
else:
|
else:
|
||||||
_ntfy_message = None # expired
|
_ntfy_message = None # expired
|
||||||
|
|
||||||
if msg_active:
|
|
||||||
# ── MESSAGE state: freeze scroll, render message ──
|
|
||||||
buf = []
|
buf = []
|
||||||
# Render message text with OTF font (cached across frames)
|
if msg_active:
|
||||||
|
# ── Message zone: pinned to top, scroll continues below ──
|
||||||
display_text = m_body or m_title or "(empty)"
|
display_text = m_body or m_title or "(empty)"
|
||||||
display_text = re.sub(r"\s+", " ", display_text.upper())
|
display_text = re.sub(r"\s+", " ", display_text.upper())
|
||||||
cache_key = (display_text, w)
|
cache_key = (display_text, w)
|
||||||
if _msg_cache[0] != cache_key:
|
if _msg_cache[0] != cache_key:
|
||||||
msg_rows = _big_wrap(display_text, w - 4)
|
msg_rows = _big_wrap(display_text, w - 4)
|
||||||
msg_rows = _lr_gradient(msg_rows)
|
|
||||||
_msg_cache = (cache_key, msg_rows)
|
_msg_cache = (cache_key, msg_rows)
|
||||||
else:
|
else:
|
||||||
msg_rows = _msg_cache[1]
|
msg_rows = _msg_cache[1]
|
||||||
# Center vertically in scroll zone
|
msg_rows = _lr_gradient(msg_rows, (time.monotonic() * GRAD_SPEED) % 1.0)
|
||||||
total_h = len(msg_rows) + 4 # +4 for border + meta + padding
|
# Layout: rendered text + meta + border
|
||||||
y_off = max(0, (sh - total_h) // 2)
|
|
||||||
for r in range(sh):
|
|
||||||
ri = r - y_off
|
|
||||||
if ri == 0 or ri == total_h - 1:
|
|
||||||
# Border lines
|
|
||||||
bar = "─" * (w - 4)
|
|
||||||
buf.append(f"\033[{r+1};1H {MSG_BORDER}{bar}{RST}\033[K")
|
|
||||||
elif 1 <= ri <= len(msg_rows):
|
|
||||||
ln = _vis_trunc(msg_rows[ri - 1], w)
|
|
||||||
buf.append(f"\033[{r+1};1H {ln}{RST}\033[K")
|
|
||||||
elif ri == len(msg_rows) + 1:
|
|
||||||
# Title line (if present and different from body)
|
|
||||||
if m_title and m_title != m_body:
|
|
||||||
meta = f" {MSG_META}\u2591 {m_title}{RST}"
|
|
||||||
else:
|
|
||||||
meta = ""
|
|
||||||
buf.append(f"\033[{r+1};1H{meta}\033[K")
|
|
||||||
elif ri == len(msg_rows) + 2:
|
|
||||||
# Source + timestamp
|
|
||||||
elapsed_s = int(time.monotonic() - m_ts)
|
elapsed_s = int(time.monotonic() - m_ts)
|
||||||
remaining = max(0, MESSAGE_DISPLAY_SECS - elapsed_s)
|
remaining = max(0, MESSAGE_DISPLAY_SECS - elapsed_s)
|
||||||
ts_str = datetime.now().strftime("%H:%M:%S")
|
ts_str = datetime.now().strftime("%H:%M:%S")
|
||||||
meta = f" {MSG_META}\u2591 ntfy \u00b7 {ts_str} \u00b7 {remaining}s{RST}"
|
row_idx = 0
|
||||||
buf.append(f"\033[{r+1};1H{meta}\033[K")
|
for mr in msg_rows:
|
||||||
else:
|
ln = _vis_trunc(mr, w)
|
||||||
# Sparse noise outside the message
|
buf.append(f"\033[{row_idx+1};1H {ln}{RST}\033[K")
|
||||||
if random.random() < 0.06:
|
row_idx += 1
|
||||||
buf.append(f"\033[{r+1};1H{noise(w)}")
|
# Meta line: title (if distinct) + source + countdown
|
||||||
else:
|
meta_parts = []
|
||||||
buf.append(f"\033[{r+1};1H\033[K")
|
if m_title and m_title != m_body:
|
||||||
# Firehose keeps running during messages
|
meta_parts.append(m_title)
|
||||||
if FIREHOSE and fh > 0:
|
meta_parts.append(f"ntfy \u00b7 {ts_str} \u00b7 {remaining}s")
|
||||||
for fr in range(fh):
|
meta = " " + " \u00b7 ".join(meta_parts) if len(meta_parts) > 1 else " " + meta_parts[0]
|
||||||
fline = _firehose_line(items, w)
|
buf.append(f"\033[{row_idx+1};1H{MSG_META}{meta}{RST}\033[K")
|
||||||
buf.append(f"\033[{sh + fr + 1};1H{fline}\033[K")
|
row_idx += 1
|
||||||
sys.stdout.buffer.write("".join(buf).encode())
|
# Border — constant boundary between message and scroll
|
||||||
sys.stdout.flush()
|
bar = "\u2500" * (w - 4)
|
||||||
elapsed = time.monotonic() - t0
|
buf.append(f"\033[{row_idx+1};1H {MSG_BORDER}{bar}{RST}\033[K")
|
||||||
time.sleep(max(0, _FRAME_DT - elapsed))
|
row_idx += 1
|
||||||
continue
|
msg_h = row_idx
|
||||||
|
|
||||||
# ── SCROLL state: normal headline rendering ───────
|
# Effective scroll zone: below message, above firehose
|
||||||
|
scroll_h = sh - msg_h
|
||||||
|
|
||||||
|
# ── Scroll: headline rendering (always runs) ──────
|
||||||
# Advance scroll on schedule
|
# Advance scroll on schedule
|
||||||
scroll_accum += _FRAME_DT
|
scroll_accum += _FRAME_DT
|
||||||
while scroll_accum >= scroll_interval:
|
while scroll_accum >= scroll_interval:
|
||||||
@@ -930,28 +918,35 @@ def stream(items):
|
|||||||
if k < cam:
|
if k < cam:
|
||||||
del noise_cache[k]
|
del noise_cache[k]
|
||||||
|
|
||||||
# Draw scroll zone
|
# Draw scroll zone (below message zone, above firehose)
|
||||||
top_zone = max(1, int(sh * 0.25))
|
top_zone = max(1, int(scroll_h * 0.25))
|
||||||
bot_zone = max(1, int(sh * 0.10))
|
bot_zone = max(1, int(scroll_h * 0.10))
|
||||||
buf = []
|
grad_offset = (time.monotonic() * GRAD_SPEED) % 1.0
|
||||||
for r in range(sh):
|
scroll_buf_start = len(buf) # track where scroll rows start in buf
|
||||||
|
for r in range(scroll_h):
|
||||||
|
scr_row = msg_h + r + 1 # 1-indexed ANSI screen row
|
||||||
cy = cam + r
|
cy = cam + r
|
||||||
top_f = min(1.0, r / top_zone)
|
top_f = min(1.0, r / top_zone) if top_zone > 0 else 1.0
|
||||||
bot_f = min(1.0, (sh - 1 - r) / bot_zone)
|
bot_f = min(1.0, (scroll_h - 1 - r) / bot_zone) if bot_zone > 0 else 1.0
|
||||||
row_fade = min(top_f, bot_f)
|
row_fade = min(top_f, bot_f)
|
||||||
drawn = False
|
drawn = False
|
||||||
for content, hc, by, midx in active:
|
for content, hc, by, midx in active:
|
||||||
cr = cy - by
|
cr = cy - by
|
||||||
if 0 <= cr < len(content):
|
if 0 <= cr < len(content):
|
||||||
ln = _vis_trunc(content[cr], w)
|
raw = content[cr]
|
||||||
|
if cr != midx:
|
||||||
|
colored = _lr_gradient([raw], grad_offset)[0]
|
||||||
|
else:
|
||||||
|
colored = raw
|
||||||
|
ln = _vis_trunc(colored, w)
|
||||||
if row_fade < 1.0:
|
if row_fade < 1.0:
|
||||||
ln = _fade_line(ln, row_fade)
|
ln = _fade_line(ln, row_fade)
|
||||||
if cr == midx:
|
if cr == midx:
|
||||||
buf.append(f"\033[{r+1};1H{W_COOL}{ln}{RST}\033[K")
|
buf.append(f"\033[{scr_row};1H{W_COOL}{ln}{RST}\033[K")
|
||||||
elif ln.strip():
|
elif ln.strip():
|
||||||
buf.append(f"\033[{r+1};1H{hc}{ln}{RST}\033[K")
|
buf.append(f"\033[{scr_row};1H{ln}{RST}\033[K")
|
||||||
else:
|
else:
|
||||||
buf.append(f"\033[{r+1};1H\033[K")
|
buf.append(f"\033[{scr_row};1H\033[K")
|
||||||
drawn = True
|
drawn = True
|
||||||
break
|
break
|
||||||
if not drawn:
|
if not drawn:
|
||||||
@@ -959,9 +954,9 @@ def stream(items):
|
|||||||
if row_fade < 1.0 and n:
|
if row_fade < 1.0 and n:
|
||||||
n = _fade_line(n, row_fade)
|
n = _fade_line(n, row_fade)
|
||||||
if n:
|
if n:
|
||||||
buf.append(f"\033[{r+1};1H{n}")
|
buf.append(f"\033[{scr_row};1H{n}")
|
||||||
else:
|
else:
|
||||||
buf.append(f"\033[{r+1};1H\033[K")
|
buf.append(f"\033[{scr_row};1H\033[K")
|
||||||
|
|
||||||
# Draw firehose zone
|
# Draw firehose zone
|
||||||
if FIREHOSE and fh > 0:
|
if FIREHOSE and fh > 0:
|
||||||
@@ -973,11 +968,12 @@ def stream(items):
|
|||||||
mic_excess = max(0.0, _mic_db - MIC_THRESHOLD_DB)
|
mic_excess = max(0.0, _mic_db - MIC_THRESHOLD_DB)
|
||||||
glitch_prob = 0.32 + min(0.9, mic_excess * 0.16)
|
glitch_prob = 0.32 + min(0.9, mic_excess * 0.16)
|
||||||
n_hits = 4 + int(mic_excess / 2)
|
n_hits = 4 + int(mic_excess / 2)
|
||||||
g_limit = sh if FIREHOSE else len(buf)
|
scroll_buf_len = len(buf) - scroll_buf_start
|
||||||
if random.random() < glitch_prob and g_limit > 0:
|
if random.random() < glitch_prob and scroll_buf_len > 0:
|
||||||
for _ in range(min(n_hits, g_limit)):
|
for _ in range(min(n_hits, scroll_buf_len)):
|
||||||
gi = random.randint(0, g_limit - 1)
|
gi = random.randint(0, scroll_buf_len - 1)
|
||||||
buf[gi] = f"\033[{gi+1};1H{glitch_bar(w)}"
|
scr_row = msg_h + gi + 1
|
||||||
|
buf[scroll_buf_start + gi] = f"\033[{scr_row};1H{glitch_bar(w)}"
|
||||||
|
|
||||||
sys.stdout.buffer.write("".join(buf).encode())
|
sys.stdout.buffer.write("".join(buf).encode())
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|||||||
Reference in New Issue
Block a user