From 6b967dd33e3d9051b286c7cb3529502a24502854 Mon Sep 17 00:00:00 2001 From: Felix Pankratz Date: Mon, 28 Aug 2023 21:32:12 +0200 Subject: [PATCH] second ai attempt, and my own fixes --- ai2.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ hn.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 102 insertions(+) create mode 100644 ai2.py create mode 100644 hn.py diff --git a/ai2.py b/ai2.py new file mode 100644 index 0000000..c5830ec --- /dev/null +++ b/ai2.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +import requests +from bs4 import BeautifulSoup as Soup +import curses + +def main(stdscr): + stdscr.clear() + + # Query Hacker News API for top stories and their titles/links + url = 'https://hacker-news.firebaseio.com/v0/topstories.json' + r = requests.get(url) + if not r.ok: + raise Exception('Error fetching data from Hacker News API') + ids = r.json()[:10] # Show only the first ten stories + stories = [] + for i in ids: + story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json' + s = requests.get(story_url).json() + try: + stories.append((s['title'], s['url'])) + except KeyError: + stories.append((s['title'], 'No link' )) + + # Display list of stories in terminal window with arrow key navigation + current_pos = 0 + while True: + stdscr.addstr('\n\nHacker News Top Stories:\n') + for i, (title, link) in enumerate(stories): + prefix = '>>> ' if i == current_pos else ' ' + text = f'{prefix}{i+1}: {title} ({link})\n' + stdscr.addstr(text) + + stdscr.refresh() + c = stdscr.getch() + if c == ord('q'): # Quit + break + elif c == curses.KEY_UP: + current_pos -= 1 + if current_pos < 0: + current_pos = len(stories)-1 + elif c == curses.KEY_DOWN: + current_pos += 1 + if current_pos >= len(stories): + current_pos = 0 + elif c == curses.KEY_ENTER or c == 10: + title, link = stories[current_pos] + print(f'\nOpening link: {link}\n') + webbrowser.open(link) + +curses.wrapper(main) diff --git a/hn.py b/hn.py new file mode 100644 index 0000000..a260670 --- /dev/null +++ b/hn.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +import requests +from bs4 import BeautifulSoup as Soup +import curses +import webbrowser + +def main(stdscr): + stdscr.clear() + + # Query Hacker News API for top stories and their titles/links + url = 'https://hacker-news.firebaseio.com/v0/topstories.json' + r = requests.get(url) + if not r.ok: + raise Exception('Error fetching data from Hacker News API') + ids = r.json()[:10] # Show only the first ten stories + stories = [] + for i in ids: + story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json' + s = requests.get(story_url).json() + try: + stories.append((s['title'], s['url'])) + except KeyError: + stories.append((s['title'], 'No link' )) + + # Display list of stories in terminal window with arrow key navigation + current_pos = 0 + while True: + stdscr.clear() + stdscr.addstr('\n\nHacker News Top Stories:\n') + for i, (title, link) in enumerate(stories): + prefix = '>>> ' if i == current_pos else ' ' + text = f'{prefix}{i+1}: {title} ({link})\n' + stdscr.addstr(text) + + stdscr.refresh() + c = stdscr.getch() + if c == ord('q'): # Quit + break + elif c == curses.KEY_UP: + current_pos -= 1 + if current_pos < 0: + current_pos = len(stories)-1 + elif c == curses.KEY_DOWN: + current_pos += 1 + if current_pos >= len(stories): + current_pos = 0 + elif c == curses.KEY_ENTER or c == 10: + title, link = stories[current_pos] + print(f'\nOpening link: {link}\n') + webbrowser.open(link) + +curses.wrapper(main)