Adding tests and some minor tweaks to structure.
This commit is contained in:
@@ -8,6 +8,7 @@ from datetime import datetime
|
||||
from timeit import default_timer as timer
|
||||
|
||||
import six
|
||||
from bs4 import BeautifulSoup
|
||||
from kitchen.text.display import wrap
|
||||
|
||||
from . import exceptions
|
||||
@@ -317,6 +318,22 @@ class Content(object):
|
||||
out.extend(lines)
|
||||
return out
|
||||
|
||||
@staticmethod
|
||||
def extract_links(html):
|
||||
"""
|
||||
Extract a list of hyperlinks from an HTMl document.
|
||||
"""
|
||||
links = []
|
||||
soup = BeautifulSoup(html, 'html.parser')
|
||||
for link in soup.findAll('a'):
|
||||
href = link.get('href')
|
||||
if not href:
|
||||
continue
|
||||
if href.startswith('/'):
|
||||
href = 'https://www.reddit.com' + href
|
||||
links.append({'text': link.text, 'href': href})
|
||||
return links
|
||||
|
||||
|
||||
class SubmissionContent(Content):
|
||||
"""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import re
|
||||
import time
|
||||
|
||||
@@ -139,12 +138,11 @@ class SubmissionPage(Page):
|
||||
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_BROWSER'))
|
||||
def open_link(self):
|
||||
"""
|
||||
Open the selected link
|
||||
Open the link contained in the selected item.
|
||||
|
||||
Prompt user to choose which link to open if additional links
|
||||
are mentioned at the item.
|
||||
If there is more than one link contained in the item, prompt the user
|
||||
to choose which link to open.
|
||||
"""
|
||||
|
||||
data = self.get_selected_item()
|
||||
if data['type'] == 'Submission':
|
||||
opened_link = self.prompt_and_open_link(data)
|
||||
@@ -156,29 +154,25 @@ class SubmissionPage(Page):
|
||||
self.term.flash()
|
||||
|
||||
def prompt_and_open_link(self, data):
|
||||
links = [{'text': 'Permalink', 'href': data['permalink']}]
|
||||
if data['html']:
|
||||
links += self.get_links_in_html(data['html'])
|
||||
if len(links) > 1:
|
||||
link = self.term.prompt_user_to_select_link(links)
|
||||
elif 'url_full' in data and data['url_full']:
|
||||
url_full = data.get('url_full')
|
||||
if url_full and url_full != data['permalink']:
|
||||
# The item is a link-only submission that won't contain text
|
||||
link = data['url_full']
|
||||
else:
|
||||
link = data['permalink']
|
||||
extracted_links = self.content.extract_links(data['html'])
|
||||
if not extracted_links:
|
||||
# Only one selection to choose from, so just pick it
|
||||
link = data['permalink']
|
||||
else:
|
||||
# Let the user decide which link to open
|
||||
links = [{'text': 'Permalink', 'href': data['permalink']}]
|
||||
links += extracted_links
|
||||
link = self.term.prompt_user_to_select_link(links)
|
||||
|
||||
if link is not None:
|
||||
self.term.open_link(link)
|
||||
return link
|
||||
|
||||
def get_links_in_html(self, html):
|
||||
links = []
|
||||
soup = BeautifulSoup(html)
|
||||
for link in soup.findAll('a'):
|
||||
link = {'text': link.text, 'href': link.get('href')}
|
||||
if link['href'].startswith('/'):
|
||||
link['href'] = 'https://www.reddit.com' + link['href']
|
||||
links.append(link)
|
||||
return links
|
||||
|
||||
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_PAGER'))
|
||||
def open_pager(self):
|
||||
"""
|
||||
|
||||
@@ -355,9 +355,16 @@ class Terminal(object):
|
||||
return ch
|
||||
|
||||
def prompt_user_to_select_link(self, links):
|
||||
"""
|
||||
Prompt the user to select a link from a list to open.
|
||||
|
||||
Return the link that was selected, or ``None`` if no link was selected.
|
||||
"""
|
||||
link_pages = self.get_link_pages(links)
|
||||
for link_page in link_pages:
|
||||
text = self.get_link_page_text(link_page)
|
||||
for n, link_page in enumerate(link_pages, start=1):
|
||||
text = 'Select a link to open (page {} of {}):\n\n'
|
||||
text = text.format(n, len(link_pages))
|
||||
text += self.get_link_page_text(link_page)
|
||||
if link_page is not link_pages[-1]:
|
||||
text += '[9] next page...'
|
||||
|
||||
@@ -371,7 +378,14 @@ class Terminal(object):
|
||||
return None
|
||||
return link_page[choice - 1]['href']
|
||||
|
||||
def get_link_pages(self, links):
|
||||
@staticmethod
|
||||
def get_link_pages(links):
|
||||
"""
|
||||
Given a list of links, separate them into pages that can be displayed
|
||||
to the user and navigated using the 1-9 number keys. The last page
|
||||
can contain up to 9 links, and all other pages can contain up to 8
|
||||
links.
|
||||
"""
|
||||
link_pages = []
|
||||
i = 0
|
||||
while i < len(links):
|
||||
@@ -385,13 +399,16 @@ class Terminal(object):
|
||||
link_pages.append(link_page)
|
||||
return link_pages
|
||||
|
||||
def get_link_page_text(self, link_page):
|
||||
text = 'Open link:\n'
|
||||
@staticmethod
|
||||
def get_link_page_text(link_page):
|
||||
"""
|
||||
Construct the dialog box to display a list of links to the user.
|
||||
"""
|
||||
text = ''
|
||||
for i, link in enumerate(link_page):
|
||||
capped_link_text = (link['text'] if len(link['text']) <= 20
|
||||
else link['text'][:19] + '…')
|
||||
text += '[{}] [{}]({})\n'.format(
|
||||
i + 1, capped_link_text, link['href'])
|
||||
text += '[{}] [{}]({})\n'.format(i + 1, capped_link_text, link['href'])
|
||||
return text
|
||||
|
||||
def open_link(self, url):
|
||||
|
||||
@@ -617,3 +617,17 @@ def test_content_rate_limit(reddit, oauth, refresh_token):
|
||||
# Even though the headers were returned, the rate limiting should
|
||||
# still not be triggering a delay for the next request
|
||||
assert reddit.handler.next_request_timestamp is None
|
||||
|
||||
|
||||
def test_content_extract_links():
|
||||
|
||||
# Should handle relative & absolute links, should ignore empty links.
|
||||
html = """
|
||||
<a href='/'>Home Page</a>
|
||||
<a href='https://www.github.com'>Github</a>
|
||||
<a>Blank</a>
|
||||
"""
|
||||
assert Content.extract_links(html) == [
|
||||
{'href': 'https://www.reddit.com/', 'text': 'Home Page'},
|
||||
{'href': 'https://www.github.com', 'text': 'Github'}
|
||||
]
|
||||
|
||||
@@ -5,6 +5,7 @@ import re
|
||||
import os
|
||||
import curses
|
||||
import codecs
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
import pytest
|
||||
@@ -187,7 +188,7 @@ def test_terminal_add_line(terminal, stdscr, use_ascii):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_ascii', [True, False])
|
||||
def test_show_notification(terminal, stdscr, use_ascii):
|
||||
def test_terminal_show_notification(terminal, stdscr, use_ascii):
|
||||
|
||||
terminal.config['ascii'] = use_ascii
|
||||
|
||||
@@ -216,7 +217,7 @@ def test_show_notification(terminal, stdscr, use_ascii):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_ascii', [True, False])
|
||||
def test_text_input(terminal, stdscr, use_ascii):
|
||||
def test_terminal_text_input(terminal, stdscr, use_ascii):
|
||||
|
||||
terminal.config['ascii'] = use_ascii
|
||||
stdscr.nlines = 1
|
||||
@@ -237,7 +238,7 @@ def test_text_input(terminal, stdscr, use_ascii):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_ascii', [True, False])
|
||||
def test_prompt_input(terminal, stdscr, use_ascii):
|
||||
def test_terminal_prompt_input(terminal, stdscr, use_ascii):
|
||||
|
||||
terminal.config['ascii'] = use_ascii
|
||||
window = stdscr.derwin()
|
||||
@@ -259,7 +260,7 @@ def test_prompt_input(terminal, stdscr, use_ascii):
|
||||
assert terminal.prompt_input('hi', key=True) is None
|
||||
|
||||
|
||||
def test_prompt_y_or_n(terminal, stdscr):
|
||||
def test_terminal_prompt_y_or_n(terminal, stdscr):
|
||||
|
||||
stdscr.getch.side_effect = [ord('y'), ord('N'), terminal.ESCAPE, ord('a')]
|
||||
text = 'hi'.encode('ascii')
|
||||
@@ -286,7 +287,7 @@ def test_prompt_y_or_n(terminal, stdscr):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_ascii', [True, False])
|
||||
def test_open_editor(terminal, use_ascii):
|
||||
def test_terminal_open_editor(terminal, use_ascii):
|
||||
|
||||
terminal.config['ascii'] = use_ascii
|
||||
|
||||
@@ -311,7 +312,7 @@ def test_open_editor(terminal, use_ascii):
|
||||
assert not os.path.isfile(data['filename'])
|
||||
|
||||
|
||||
def test_open_editor_error(terminal):
|
||||
def test_terminal_open_editor_error(terminal):
|
||||
|
||||
with mock.patch('subprocess.Popen', autospec=True) as Popen, \
|
||||
mock.patch.object(terminal, 'show_notification'):
|
||||
@@ -356,7 +357,7 @@ def test_open_editor_error(terminal):
|
||||
os.remove(data['filename'])
|
||||
|
||||
|
||||
def test_open_link_mailcap(terminal):
|
||||
def test_terminal_open_link_mailcap(terminal):
|
||||
|
||||
url = 'http://www.test.com'
|
||||
|
||||
@@ -389,7 +390,7 @@ def test_open_link_mailcap(terminal):
|
||||
terminal.open_browser.reset_mock()
|
||||
|
||||
|
||||
def test_open_link_subprocess(terminal):
|
||||
def test_terminal_open_link_subprocess(terminal):
|
||||
|
||||
url = 'http://www.test.com'
|
||||
terminal.config['enable_media'] = True
|
||||
@@ -471,7 +472,7 @@ def test_open_link_subprocess(terminal):
|
||||
assert get_error()
|
||||
|
||||
|
||||
def test_open_browser_display(terminal):
|
||||
def test_terminal_open_browser_display(terminal):
|
||||
|
||||
terminal._display = True
|
||||
with mock.patch('webbrowser.open_new_tab', autospec=True) as open_new_tab:
|
||||
@@ -486,7 +487,7 @@ def test_open_browser_display(terminal):
|
||||
assert not curses.doupdate.called
|
||||
|
||||
|
||||
def test_open_browser_display_no_response(terminal):
|
||||
def test_terminal_open_browser_display_no_response(terminal):
|
||||
|
||||
terminal._display = True
|
||||
with mock.patch('rtv.terminal.Process', autospec=True) as Process:
|
||||
@@ -495,7 +496,7 @@ def test_open_browser_display_no_response(terminal):
|
||||
assert isinstance(terminal.loader.exception, BrowserError)
|
||||
|
||||
|
||||
def test_open_browser_no_display(terminal):
|
||||
def test_terminal_open_browser_no_display(terminal):
|
||||
|
||||
terminal._display = False
|
||||
with mock.patch('webbrowser.open_new_tab', autospec=True) as open_new_tab:
|
||||
@@ -507,7 +508,7 @@ def test_open_browser_no_display(terminal):
|
||||
assert curses.doupdate.called
|
||||
|
||||
|
||||
def test_open_pager(terminal, stdscr):
|
||||
def test_terminal_open_pager(terminal, stdscr):
|
||||
|
||||
data = "Hello World! ❤"
|
||||
|
||||
@@ -530,7 +531,7 @@ def test_open_pager(terminal, stdscr):
|
||||
assert stdscr.addstr.called_with(0, 0, message)
|
||||
|
||||
|
||||
def test_open_urlview(terminal, stdscr):
|
||||
def test_terminal_open_urlview(terminal, stdscr):
|
||||
|
||||
data = "Hello World! ❤"
|
||||
|
||||
@@ -557,7 +558,7 @@ def test_open_urlview(terminal, stdscr):
|
||||
assert stdscr.addstr.called_with(0, 0, message)
|
||||
|
||||
|
||||
def test_strip_textpad(terminal):
|
||||
def test_terminal_strip_textpad(terminal):
|
||||
|
||||
assert terminal.strip_textpad(None) is None
|
||||
assert terminal.strip_textpad(' foo ') == ' foo'
|
||||
@@ -567,7 +568,7 @@ def test_strip_textpad(terminal):
|
||||
'alpha bravocharlie delta\n echo\n\nfoxtrot')
|
||||
|
||||
|
||||
def test_add_space(terminal, stdscr):
|
||||
def test_terminal_add_space(terminal, stdscr):
|
||||
|
||||
stdscr.x, stdscr.y = 10, 20
|
||||
terminal.add_space(stdscr)
|
||||
@@ -581,7 +582,7 @@ def test_add_space(terminal, stdscr):
|
||||
assert not stdscr.addstr.called
|
||||
|
||||
|
||||
def test_attr(terminal):
|
||||
def test_terminal_attr(terminal):
|
||||
|
||||
assert terminal.attr('CursorBlock') == 0
|
||||
assert terminal.attr('@CursorBlock') == curses.A_REVERSE
|
||||
@@ -592,7 +593,7 @@ def test_attr(terminal):
|
||||
assert terminal.attr('NeutralVote') == curses.A_BOLD
|
||||
|
||||
|
||||
def test_check_theme(terminal):
|
||||
def test_terminal_check_theme(terminal):
|
||||
|
||||
monochrome = Theme(use_color=False)
|
||||
default = Theme()
|
||||
@@ -625,7 +626,7 @@ def test_check_theme(terminal):
|
||||
assert not terminal.check_theme(color256)
|
||||
|
||||
|
||||
def test_set_theme(terminal, stdscr):
|
||||
def test_terminal_set_theme(terminal, stdscr):
|
||||
|
||||
# Default with color enabled
|
||||
stdscr.reset_mock()
|
||||
@@ -660,7 +661,7 @@ def test_set_theme(terminal, stdscr):
|
||||
assert terminal.theme.display_string == 'molokai (preset)'
|
||||
|
||||
|
||||
def test_set_theme_no_colors(terminal, stdscr):
|
||||
def test_terminal_set_theme_no_colors(terminal, stdscr):
|
||||
|
||||
# Monochrome should be forced if the terminal doesn't support color
|
||||
with mock.patch('curses.has_colors') as has_colors:
|
||||
@@ -673,7 +674,7 @@ def test_set_theme_no_colors(terminal, stdscr):
|
||||
assert not terminal.theme.use_color
|
||||
|
||||
|
||||
def test_strip_instructions(terminal):
|
||||
def test_terminal_strip_instructions(terminal):
|
||||
|
||||
# These templates only contain instructions, so they should be empty
|
||||
assert terminal.strip_instructions(SUBMISSION_FILE) == ''
|
||||
@@ -699,3 +700,69 @@ def test_strip_instructions(terminal):
|
||||
# Another edge case
|
||||
text = '<!--{0} instructions {0}--><!-- An HTML comment -->'.format(TOKEN)
|
||||
assert terminal.strip_instructions(text) == '<!-- An HTML comment -->'
|
||||
|
||||
|
||||
def test_terminal_get_link_pages(terminal):
|
||||
|
||||
# When there are no links passed in, there should be no pages generated
|
||||
assert terminal.get_link_pages([]) == []
|
||||
|
||||
# A single link should generate a single page
|
||||
assert terminal.get_link_pages([1]) == [[1]]
|
||||
|
||||
# Up to 9 links should fit on a single page
|
||||
link_pages = terminal.get_link_pages([1, 2, 3, 4, 5, 6, 7, 8, 9])
|
||||
assert link_pages == [[1, 2, 3, 4, 5, 6, 7, 8, 9]]
|
||||
|
||||
# When the 10th link is added, the 9th link should now wrap to a new page
|
||||
link_pages = terminal.get_link_pages([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||
assert link_pages == [[1, 2, 3, 4, 5, 6, 7, 8], [9, 10]]
|
||||
|
||||
|
||||
def test_terminal_get_link_page_text(terminal):
|
||||
|
||||
link_page = [
|
||||
{'href': 'https://www.reddit.com', 'text': 'Reddit Homepage'},
|
||||
{'href': 'https://www.duckduckgo.com', 'text': 'Search Engine'},
|
||||
{
|
||||
'href': 'https://github.com/michael-lazar/rtv',
|
||||
'text': 'This project\'s homepage'
|
||||
}
|
||||
]
|
||||
|
||||
text = terminal.get_link_page_text(link_page)
|
||||
assert text == dedent("""\
|
||||
[1] [Reddit Homepage](https://www.reddit.com)
|
||||
[2] [Search Engine](https://www.duckduckgo.com)
|
||||
[3] [This project's home…](https://github.com/michael-lazar/rtv)
|
||||
""")
|
||||
|
||||
|
||||
def test_terminal_prompt_user_to_select_link(terminal, stdscr):
|
||||
|
||||
# Select the 2nd link on the 2nd page
|
||||
links = [
|
||||
{'href': 'www.website-1.com', 'text': 'Website 1'},
|
||||
{'href': 'www.website-2.com', 'text': 'Website 2'},
|
||||
{'href': 'www.website-3.com', 'text': 'Website 3'},
|
||||
{'href': 'www.website-4.com', 'text': 'Website 4'},
|
||||
{'href': 'www.website-5.com', 'text': 'Website 5'},
|
||||
{'href': 'www.website-6.com', 'text': 'Website 6'},
|
||||
{'href': 'www.website-7.com', 'text': 'Website 7'},
|
||||
{'href': 'www.website-8.com', 'text': 'Website 8'},
|
||||
{'href': 'www.website-9.com', 'text': 'Website 9'},
|
||||
{'href': 'www.website-10.com', 'text': 'Website 10'},
|
||||
{'href': 'www.website-11.com', 'text': 'Website 11'},
|
||||
{'href': 'www.website-12.com', 'text': 'Website 12'},
|
||||
]
|
||||
stdscr.getch.side_effect = [ord('9'), ord('2')]
|
||||
href = terminal.prompt_user_to_select_link(links)
|
||||
assert href == 'www.website-10.com'
|
||||
|
||||
# Select a link that doesn't exist
|
||||
links = [
|
||||
{'href': 'www.website-1.com', 'text': 'Website 1'},
|
||||
{'href': 'www.website-2.com', 'text': 'Website 2'},
|
||||
]
|
||||
stdscr.getch.side_effect = [ord('3')]
|
||||
assert terminal.prompt_user_to_select_link(links) is None
|
||||
|
||||
Reference in New Issue
Block a user