second ai attempt, and my own fixes
This commit is contained in:
parent
25f49e344f
commit
6b967dd33e
50
ai2.py
Normal file
50
ai2.py
Normal file
@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env python3
|
||||
import requests
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
import curses
|
||||
|
||||
def main(stdscr):
|
||||
stdscr.clear()
|
||||
|
||||
# Query Hacker News API for top stories and their titles/links
|
||||
url = 'https://hacker-news.firebaseio.com/v0/topstories.json'
|
||||
r = requests.get(url)
|
||||
if not r.ok:
|
||||
raise Exception('Error fetching data from Hacker News API')
|
||||
ids = r.json()[:10] # Show only the first ten stories
|
||||
stories = []
|
||||
for i in ids:
|
||||
story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json'
|
||||
s = requests.get(story_url).json()
|
||||
try:
|
||||
stories.append((s['title'], s['url']))
|
||||
except KeyError:
|
||||
stories.append((s['title'], 'No link' ))
|
||||
|
||||
# Display list of stories in terminal window with arrow key navigation
|
||||
current_pos = 0
|
||||
while True:
|
||||
stdscr.addstr('\n\nHacker News Top Stories:\n')
|
||||
for i, (title, link) in enumerate(stories):
|
||||
prefix = '>>> ' if i == current_pos else ' '
|
||||
text = f'{prefix}{i+1}: {title} ({link})\n'
|
||||
stdscr.addstr(text)
|
||||
|
||||
stdscr.refresh()
|
||||
c = stdscr.getch()
|
||||
if c == ord('q'): # Quit
|
||||
break
|
||||
elif c == curses.KEY_UP:
|
||||
current_pos -= 1
|
||||
if current_pos < 0:
|
||||
current_pos = len(stories)-1
|
||||
elif c == curses.KEY_DOWN:
|
||||
current_pos += 1
|
||||
if current_pos >= len(stories):
|
||||
current_pos = 0
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
title, link = stories[current_pos]
|
||||
print(f'\nOpening link: {link}\n')
|
||||
webbrowser.open(link)
|
||||
|
||||
curses.wrapper(main)
|
52
hn.py
Normal file
52
hn.py
Normal file
@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
import requests
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
import curses
|
||||
import webbrowser
|
||||
|
||||
def main(stdscr):
|
||||
stdscr.clear()
|
||||
|
||||
# Query Hacker News API for top stories and their titles/links
|
||||
url = 'https://hacker-news.firebaseio.com/v0/topstories.json'
|
||||
r = requests.get(url)
|
||||
if not r.ok:
|
||||
raise Exception('Error fetching data from Hacker News API')
|
||||
ids = r.json()[:10] # Show only the first ten stories
|
||||
stories = []
|
||||
for i in ids:
|
||||
story_url = f'https://hacker-news.firebaseio.com/v0/item/{i}.json'
|
||||
s = requests.get(story_url).json()
|
||||
try:
|
||||
stories.append((s['title'], s['url']))
|
||||
except KeyError:
|
||||
stories.append((s['title'], 'No link' ))
|
||||
|
||||
# Display list of stories in terminal window with arrow key navigation
|
||||
current_pos = 0
|
||||
while True:
|
||||
stdscr.clear()
|
||||
stdscr.addstr('\n\nHacker News Top Stories:\n')
|
||||
for i, (title, link) in enumerate(stories):
|
||||
prefix = '>>> ' if i == current_pos else ' '
|
||||
text = f'{prefix}{i+1}: {title} ({link})\n'
|
||||
stdscr.addstr(text)
|
||||
|
||||
stdscr.refresh()
|
||||
c = stdscr.getch()
|
||||
if c == ord('q'): # Quit
|
||||
break
|
||||
elif c == curses.KEY_UP:
|
||||
current_pos -= 1
|
||||
if current_pos < 0:
|
||||
current_pos = len(stories)-1
|
||||
elif c == curses.KEY_DOWN:
|
||||
current_pos += 1
|
||||
if current_pos >= len(stories):
|
||||
current_pos = 0
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
title, link = stories[current_pos]
|
||||
print(f'\nOpening link: {link}\n')
|
||||
webbrowser.open(link)
|
||||
|
||||
curses.wrapper(main)
|
Loading…
Reference in New Issue
Block a user