From 68d5d99f5749035f3f1047ac7a00a802d3138776 Mon Sep 17 00:00:00 2001 From: Obosob Date: Thu, 30 Jul 2015 09:01:20 +0100 Subject: [PATCH 1/6] Handle internal reddit links internally, including x-posts --- rtv/content.py | 13 +++++++++++-- rtv/subreddit.py | 7 ++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/rtv/content.py b/rtv/content.py index adb1e44..65b7c49 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -2,6 +2,7 @@ import logging import praw import requests +import re from .exceptions import SubmissionError, SubredditError, AccountError from .helpers import humanize_timestamp, wrap_text, strip_subreddit_url @@ -105,7 +106,8 @@ class BaseContent(object): displayed through the terminal. """ - is_selfpost = lambda s: s.startswith('http://www.reddit.com/r/') + reddit_link = re.compile("https?://(np.)?redd(it.com|.it)/r/.*") + reddit_link_no_host = re.compile("/r/.*") author = getattr(sub, 'author', '[deleted]') name = getattr(author, 'name', '[deleted]') flair = getattr(sub, 'link_flair_text', '') @@ -123,7 +125,14 @@ class BaseContent(object): data['subreddit'] = strip_subreddit_url(sub.permalink) data['flair'] = flair data['url_full'] = sub.url - data['url'] = ('selfpost' if is_selfpost(sub.url) else sub.url) + + if reddit_link.match(sub.url): + stripped_url = reddit_link_no_host.search(sub.url).group() + stripped_comments = reddit_link_no_host.search(sub.permalink).group() + data['url'] = ('selfpost' if stripped_url == stripped_comments + else 'x-post via {}'.format(strip_subreddit_url(sub.url)) ) + else: + data['url'] = sub.url data['likes'] = sub.likes data['gold'] = sub.gilded > 0 data['nsfw'] = sub.over_18 diff --git a/rtv/subreddit.py b/rtv/subreddit.py index c12f107..03501ac 100644 --- a/rtv/subreddit.py +++ b/rtv/subreddit.py @@ -108,10 +108,11 @@ class SubredditPage(BasePage): "Open a link with the webbrowser" data = self.content.get(self.nav.absolute_index) - if data['url'] == 'selfpost': - self.open_submission() + url = data['url_full'] + if data['url'] != url: + page = SubmissionPage(self.stdscr, self.reddit, url=url) + page.loop() else: - url = data['url_full'] open_browser(url) global history history.add(url) From a916820866783986a08c77fa644c57706ddfc0bf Mon Sep 17 00:00:00 2001 From: Obosob Date: Thu, 30 Jul 2015 11:43:13 +0100 Subject: [PATCH 2/6] squashed a teensy bug --- rtv/content.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtv/content.py b/rtv/content.py index 65b7c49..27f2efb 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -106,7 +106,7 @@ class BaseContent(object): displayed through the terminal. """ - reddit_link = re.compile("https?://(np.)?redd(it.com|.it)/r/.*") + reddit_link = re.compile("https?://(www.)?(np.)?redd(it.com|.it)/r/.*") reddit_link_no_host = re.compile("/r/.*") author = getattr(sub, 'author', '[deleted]') name = getattr(author, 'name', '[deleted]') From 763174fea6a89ef814473184c3c3a516d96a993d Mon Sep 17 00:00:00 2001 From: Obosob Date: Thu, 30 Jul 2015 12:49:25 +0100 Subject: [PATCH 3/6] always force https urls --- rtv/content.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtv/content.py b/rtv/content.py index 27f2efb..62fe0d7 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -163,7 +163,7 @@ class SubmissionContent(BaseContent): try: with loader(): - submission = reddit.get_submission(url, comment_sort='hot') + submission = reddit.get_submission(url.replace("http:","https:"), comment_sort='hot') except praw.errors.APIException: raise SubmissionError(url) From f4ea094e933ff01f919005e71d621f0e14c993e5 Mon Sep 17 00:00:00 2001 From: Obosob Date: Thu, 30 Jul 2015 16:46:47 +0100 Subject: [PATCH 4/6] address Brobin's comments on #126 --- rtv/content.py | 2 +- rtv/subreddit.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/rtv/content.py b/rtv/content.py index 62fe0d7..2bdb6b3 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -130,7 +130,7 @@ class BaseContent(object): stripped_url = reddit_link_no_host.search(sub.url).group() stripped_comments = reddit_link_no_host.search(sub.permalink).group() data['url'] = ('selfpost' if stripped_url == stripped_comments - else 'x-post via {}'.format(strip_subreddit_url(sub.url)) ) + else 'x-post') else: data['url'] = sub.url data['likes'] = sub.likes diff --git a/rtv/subreddit.py b/rtv/subreddit.py index 03501ac..c27b45f 100644 --- a/rtv/subreddit.py +++ b/rtv/subreddit.py @@ -9,7 +9,7 @@ from .exceptions import SubredditError, AccountError from .page import BasePage, Navigator, BaseController from .submission import SubmissionPage from .content import SubredditContent -from .helpers import open_browser, open_editor +from .helpers import open_browser, open_editor, strip_subreddit_url from .docs import SUBMISSION_FILE from .history import load_history, save_history from .curses_helpers import (Color, LoadScreen, add_line, get_arrow, get_gold, @@ -109,7 +109,7 @@ class SubredditPage(BasePage): data = self.content.get(self.nav.absolute_index) url = data['url_full'] - if data['url'] != url: + if data['url'] in ['x-post', 'selfpost']: page = SubmissionPage(self.stdscr, self.reddit, url=url) page.loop() else: @@ -178,7 +178,10 @@ class SubredditPage(BasePage): seen = (data['url_full'] in history) link_color = Color.MAGENTA if seen else Color.BLUE attr = curses.A_UNDERLINE | link_color - add_line(win, u'{url}'.format(**data), row, 1, attr) + if data['url'] == 'x-post': + add_line(win, u'x-post via {}'.format(strip_subreddit_url(data['url_full'])), row, 1, attr) + else: + add_line(win, u'{url}'.format(**data), row, 1, attr) row = n_title + offset + 1 if row in valid_rows: From aeff01ad00f9a7c4a0f2cbcaf2bf11b1ebdee2cc Mon Sep 17 00:00:00 2001 From: obosob Date: Thu, 30 Jul 2015 20:45:23 +0000 Subject: [PATCH 5/6] Escape regex . (dot), they need to be literal --- rtv/content.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtv/content.py b/rtv/content.py index 2bdb6b3..cdaccef 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -106,7 +106,7 @@ class BaseContent(object): displayed through the terminal. """ - reddit_link = re.compile("https?://(www.)?(np.)?redd(it.com|.it)/r/.*") + reddit_link = re.compile("https?://(www\.)?(np\.)?redd(it\.com|\.it)/r/.*") reddit_link_no_host = re.compile("/r/.*") author = getattr(sub, 'author', '[deleted]') name = getattr(author, 'name', '[deleted]') From 42d40cf6fc82f5ebb1dbbefea56ed142ae826657 Mon Sep 17 00:00:00 2001 From: Obosob Date: Fri, 31 Jul 2015 09:18:52 +0100 Subject: [PATCH 6/6] Make selfpost/x-post logic easier to follow --- rtv/content.py | 18 +++++++++++------- rtv/subreddit.py | 7 ++----- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/rtv/content.py b/rtv/content.py index cdaccef..c0d397f 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -107,7 +107,6 @@ class BaseContent(object): """ reddit_link = re.compile("https?://(www\.)?(np\.)?redd(it\.com|\.it)/r/.*") - reddit_link_no_host = re.compile("/r/.*") author = getattr(sub, 'author', '[deleted]') name = getattr(author, 'name', '[deleted]') flair = getattr(sub, 'link_flair_text', '') @@ -126,13 +125,18 @@ class BaseContent(object): data['flair'] = flair data['url_full'] = sub.url - if reddit_link.match(sub.url): - stripped_url = reddit_link_no_host.search(sub.url).group() - stripped_comments = reddit_link_no_host.search(sub.permalink).group() - data['url'] = ('selfpost' if stripped_url == stripped_comments - else 'x-post') + if data['permalink'].split('/r/')[-1] == data['url_full'].split('/r/')[-1]: + data['url_type'] = 'selfpost' + data['url'] = 'selfpost' + + elif reddit_link.match(data['url_full']): + data['url_type'] = 'x-post' + data['url'] = 'x-post via {}'.format(strip_subreddit_url(data['url_full'])) + else: - data['url'] = sub.url + data['url_type'] = 'external' + data['url'] = data['url_full'] + data['likes'] = sub.likes data['gold'] = sub.gilded > 0 data['nsfw'] = sub.over_18 diff --git a/rtv/subreddit.py b/rtv/subreddit.py index c27b45f..342bea2 100644 --- a/rtv/subreddit.py +++ b/rtv/subreddit.py @@ -109,7 +109,7 @@ class SubredditPage(BasePage): data = self.content.get(self.nav.absolute_index) url = data['url_full'] - if data['url'] in ['x-post', 'selfpost']: + if data['url_type'] in ['x-post', 'selfpost']: page = SubmissionPage(self.stdscr, self.reddit, url=url) page.loop() else: @@ -178,10 +178,7 @@ class SubredditPage(BasePage): seen = (data['url_full'] in history) link_color = Color.MAGENTA if seen else Color.BLUE attr = curses.A_UNDERLINE | link_color - if data['url'] == 'x-post': - add_line(win, u'x-post via {}'.format(strip_subreddit_url(data['url_full'])), row, 1, attr) - else: - add_line(win, u'{url}'.format(**data), row, 1, attr) + add_line(win, u'{url}'.format(**data), row, 1, attr) row = n_title + offset + 1 if row in valid_rows: