You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

51 lines
1.6 KiB
Python

#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup as Soup
import curses
def main(stdscr):
stdscr.clear()
# Query Hacker News API for top stories and their titles/links
url = 'https://hacker-news.firebaseio.com/v0/topstories.json'
r = requests.get(url)
if not r.ok:
raise Exception('Error fetching data from Hacker News API')
ids = r.json()[:10] # Show only the first ten stories
stories = []
for i in ids:
story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json'
s = requests.get(story_url).json()
try:
stories.append((s['title'], s['url']))
except KeyError:
stories.append((s['title'], 'No link' ))
# Display list of stories in terminal window with arrow key navigation
current_pos = 0
while True:
stdscr.addstr('\n\nHacker News Top Stories:\n')
for i, (title, link) in enumerate(stories):
prefix = '>>> ' if i == current_pos else ' '
text = f'{prefix}{i+1}: {title} ({link})\n'
stdscr.addstr(text)
stdscr.refresh()
c = stdscr.getch()
if c == ord('q'): # Quit
break
elif c == curses.KEY_UP:
current_pos -= 1
if current_pos < 0:
current_pos = len(stories)-1
elif c == curses.KEY_DOWN:
current_pos += 1
if current_pos >= len(stories):
current_pos = 0
elif c == curses.KEY_ENTER or c == 10:
title, link = stories[current_pos]
print(f'\nOpening link: {link}\n')
webbrowser.open(link)
curses.wrapper(main)