You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

68 lines
2.2 KiB
Python

#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup as Soup
import curses
import webbrowser
from dataclasses import dataclass
@dataclass
class Story:
id: int
title: str
link: str
author: str
votes: int
def main(stdscr):
stdscr.clear()
# Query Hacker News API for top stories and their titles/links
url = 'https://hacker-news.firebaseio.com/v0/topstories.json'
r = requests.get(url)
if not r.ok:
raise Exception('Error fetching data from Hacker News API')
ids = r.json()[:15] # Show only the first ten stories
stories = []
for i in ids:
story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json'
s = requests.get(story_url).json()
try:
stories.append(Story(s['id'], s['title'], s['url'], s['by'], s['score']))
except KeyError:
stories.append(Story(s['id'], s['title'], 'No URL', s['by'], s['score']))
# Display list of stories in terminal window with arrow key navigation
current_pos = 0
while True:
stdscr.clear()
stdscr.addstr('\n\nHacker News Top Stories:\n')
for i, story in enumerate(stories):
prefix = '>>> ' if i == current_pos else ' '
text = f'{prefix}{story.title} ({story.link})\n'
stdscr.addstr(text)
if i == current_pos:
detail = f' by {story.author} | {story.votes} points\n'
stdscr.addstr(detail)
stdscr.refresh()
c = stdscr.getch()
if c == ord('q'): # Quit
break
elif c == curses.KEY_UP:
current_pos -= 1
if current_pos < 0:
current_pos = len(stories)-1
elif c == curses.KEY_DOWN:
current_pos += 1
if current_pos >= len(stories):
current_pos = 0
elif c == ord('c'):
webbrowser.open(f'https://news.ycombinator.com/item?id={stories[current_pos].id}')
elif c == curses.KEY_ENTER or c == 10:
#title, link = stories[current_pos]
#print(f'\nOpening link: {link}\n')
webbrowser.open(stories[current_pos].link)
curses.wrapper(main)