From 7d0ce178eb59b56c0f31c0cdc9f7e1088f01b165 Mon Sep 17 00:00:00 2001 From: Felix Pankratz Date: Tue, 29 Aug 2023 11:23:32 +0200 Subject: [PATCH] add autotruncate --- hn.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/hn.py b/hn.py index d6af7c5..04062d5 100755 --- a/hn.py +++ b/hn.py @@ -1,4 +1,9 @@ #!/usr/bin/env python3 + +#TODO: make sure things dont explode no matter what terminal size +# -> prevent linebreaks in a single story +# -> only load as many stories as fit + import requests from bs4 import BeautifulSoup as Soup import curses @@ -20,15 +25,18 @@ class Story: def main(stdscr): stdscr.clear() + height, width = stdscr.getmaxyx() + num_stories = curses.LINES - 2 # headline and detail # Query Hacker News API for top stories and their titles/links url = 'https://hacker-news.firebaseio.com/v0/topstories.json' r = requests.get(url) if not r.ok: raise Exception('Error fetching data from Hacker News API') - ids = r.json()[:15] # Show only the first ten stories + ids = r.json()[:10] stories = [] for idx, i in enumerate(ids): stdscr.clear() + stdscr.addstr(f'{num_stories}\n') stdscr.addstr(f'[{spinner_states[idx%4]}] Getting stories...') stdscr.refresh() story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json' @@ -46,7 +54,15 @@ def main(stdscr): stdscr.addstr('Hacker News Top Stories:\n') for i, story in enumerate(stories): prefix = '>>> ' if i == current_pos else ' ' - text = f'{prefix}{story.title} ({story.link})\n' + #text = f'{prefix}{story.title} ({story.link})\n' + # calculate length of line + text = f'{prefix} ()\n' + chars_available = width - len(text) + max_title_len = (chars_available//3)*2 + max_url_len = chars_available//3 + + text = f'{prefix}{(story.title[:max_title_len-1] + "…") if len(story.title) > max_title_len else story.title} ({story.link[:max_url_len-1] + "…" if len(story.link) > max_url_len else story.link})\n' + stdscr.addstr(text) if i == current_pos: detail = f' by {story.author} | {story.comments} comments | {story.votes} points\n'