PyLint and PEP8 formatting

This commit is contained in:
Michael Lazar
2018-04-02 17:54:15 -04:00
parent ec951cf163
commit 100515769c
19 changed files with 103 additions and 85 deletions

View File

@@ -59,7 +59,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have # --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes # no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W" # --disable=W"
disable=execfile-builtin,zip-builtin-not-iterating,range-builtin-not-iterating,hex-method,old-division,file-builtin,long-builtin,input-builtin,no-absolute-import,invalid-name,delslice-method,suppressed-message,coerce-builtin,buffer-builtin,import-star-module-level,round-builtin,old-ne-operator,apply-builtin,missing-final-newline,basestring-builtin,xrange-builtin,getslice-method,filter-builtin-not-iterating,map-builtin-not-iterating,raw_input-builtin,indexing-exception,dict-iter-method,metaclass-assignment,setslice-method,next-method-called,intern-builtin,using-cmp-argument,missing-docstring,oct-method,backtick,print-statement,reload-builtin,long-suffix,old-raise-syntax,unicode-builtin,nonzero-method,old-octal-literal,cmp-method,useless-suppression,dict-view-method,parameter-unpacking,unpacking-in-except,coerce-method,unichr-builtin,raising-string,cmp-builtin,reduce-builtin,standarderror-builtin disable=execfile-builtin,zip-builtin-not-iterating,range-builtin-not-iterating,hex-method,old-division,file-builtin,long-builtin,input-builtin,no-absolute-import,invalid-name,delslice-method,suppressed-message,coerce-builtin,buffer-builtin,import-star-module-level,round-builtin,old-ne-operator,apply-builtin,missing-final-newline,basestring-builtin,xrange-builtin,getslice-method,filter-builtin-not-iterating,map-builtin-not-iterating,raw_input-builtin,indexing-exception,dict-iter-method,metaclass-assignment,setslice-method,next-method-called,intern-builtin,using-cmp-argument,missing-docstring,oct-method,backtick,print-statement,reload-builtin,long-suffix,old-raise-syntax,unicode-builtin,nonzero-method,old-octal-literal,cmp-method,useless-suppression,dict-view-method,parameter-unpacking,unpacking-in-except,coerce-method,unichr-builtin,raising-string,cmp-builtin,reduce-builtin,standarderror-builtin,no-else-return,too-many-locals,too-many-statements,too-few-public-methods,too-many-public-methods,too-many-instance-attributes
[REPORTS] [REPORTS]
@@ -337,7 +337,7 @@ exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN] [DESIGN]
# Maximum number of arguments for function / method # Maximum number of arguments for function / method
max-args=5 max-args=7
# Argument names that match this expression will be ignored. Default to name # Argument names that match this expression will be ignored. Default to name
# with leading underscore # with leading underscore

View File

@@ -24,7 +24,6 @@ _____/ /_/ \___/____/|__/ \___//_/
(RTV) (RTV)
""" """
from __future__ import unicode_literals from __future__ import unicode_literals
from .__version__ import __version__ from .__version__ import __version__

View File

@@ -1,4 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# pylint: disable=wrong-import-position
from __future__ import unicode_literals from __future__ import unicode_literals
from __future__ import print_function from __future__ import print_function
@@ -44,9 +46,9 @@ from .subreddit_page import SubredditPage
from .exceptions import ConfigError, SubredditError from .exceptions import ConfigError, SubredditError
from .__version__ import __version__ from .__version__ import __version__
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
# Pycharm debugging note: # Pycharm debugging note:
# You can use pycharm to debug a curses application by launching rtv in a # You can use pycharm to debug a curses application by launching rtv in a
# console window (python -m rtv) and using pycharm to attach to the remote # console window (python -m rtv) and using pycharm to attach to the remote
@@ -62,7 +64,7 @@ def main():
logging.captureWarnings(True) logging.captureWarnings(True)
if six.PY3: if six.PY3:
# These ones get triggered even when capturing warnings is turned on # These ones get triggered even when capturing warnings is turned on
warnings.simplefilter('ignore', ResourceWarning) #pylint:disable=E0602 warnings.simplefilter('ignore', ResourceWarning) # pylint:disable=E0602
# Set the terminal title # Set the terminal title
if os.getenv('DISPLAY'): if os.getenv('DISPLAY'):

View File

@@ -13,7 +13,6 @@ from six.moves import configparser
from . import docs, __version__ from . import docs, __version__
from .objects import KeyMap from .objects import KeyMap
PACKAGE = os.path.dirname(__file__) PACKAGE = os.path.dirname(__file__)
HOME = os.path.expanduser('~') HOME = os.path.expanduser('~')
TEMPLATES = os.path.join(PACKAGE, 'templates') TEMPLATES = os.path.join(PACKAGE, 'templates')
@@ -30,7 +29,6 @@ THEMES = os.path.join(XDG_CONFIG_HOME, 'rtv', 'themes')
def build_parser(): def build_parser():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
prog='rtv', description=docs.SUMMARY, prog='rtv', description=docs.SUMMARY,
epilog=docs.CONTROLS, epilog=docs.CONTROLS,
@@ -79,7 +77,7 @@ def build_parser():
'--enable-media', dest='enable_media', action='store_const', const=True, '--enable-media', dest='enable_media', action='store_const', const=True,
help='Open external links using programs defined in the mailcap config') help='Open external links using programs defined in the mailcap config')
parser.add_argument( parser.add_argument(
'-V', '--version', action='version', version='rtv '+__version__) '-V', '--version', action='version', version='rtv ' + __version__)
parser.add_argument( parser.add_argument(
'--no-flash', dest='flash', action='store_const', const=False, '--no-flash', dest='flash', action='store_const', const=False,
help='Disable screen flashing') help='Disable screen flashing')

View File

@@ -747,7 +747,7 @@ class SubscriptionContent(Content):
name = 'Popular Subreddits' name = 'Popular Subreddits'
items = reddit.get_popular_subreddits(limit=None) items = reddit.get_popular_subreddits(limit=None)
else: else:
raise exceptions.SubscriptionError('Invalid type %s', content_type) raise exceptions.SubscriptionError('Invalid type %s' % content_type)
return cls(name, items, loader) return cls(name, items, loader)

View File

@@ -56,11 +56,14 @@ class OpenGraphMIMEParser(BaseMIMEParser):
page = requests.get(url) page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser') soup = BeautifulSoup(page.content, 'html.parser')
for og_type in ['video', 'image']: for og_type in ['video', 'image']:
tag = soup.find('meta', prop = 'og:' + og_type + ':secure_url'
attrs={'property':'og:' + og_type + ':secure_url'}) or \ tag = soup.find('meta', attrs={'property': prop})
soup.find('meta', attrs={'property': 'og:' + og_type}) if not tag:
prop = 'og:' + og_type
tag = soup.find('meta', attrs={'property': prop})
if tag: if tag:
return BaseMIMEParser.get_mimetype(tag.get('content')) return BaseMIMEParser.get_mimetype(tag.get('content'))
return url, None return url, None
@@ -148,8 +151,8 @@ class RedditVideoMIMEParser(BaseMIMEParser):
rep = sorted(reps, reverse=True, rep = sorted(reps, reverse=True,
key=lambda t: int(t.get('bandwidth')))[0] key=lambda t: int(t.get('bandwidth')))[0]
return url + '/' + rep.find('baseurl').text, 'video/mp4' return url + '/' + rep.find('baseurl').text, 'video/mp4'
else:
return request_url, 'video/x-youtube' return request_url, 'video/x-youtube'
class ImgurApiMIMEParser(BaseMIMEParser): class ImgurApiMIMEParser(BaseMIMEParser):
@@ -291,8 +294,8 @@ class ImgurScrapeAlbumMIMEParser(BaseMIMEParser):
if urls: if urls:
return " ".join(urls), 'image/x-imgur-album' return " ".join(urls), 'image/x-imgur-album'
else:
return url, None return url, None
class InstagramMIMEParser(OpenGraphMIMEParser): class InstagramMIMEParser(OpenGraphMIMEParser):
@@ -324,8 +327,8 @@ class TwitchMIMEParser(BaseMIMEParser):
suffix = '-preview.jpg' suffix = '-preview.jpg'
if thumbnail.endswith(suffix): if thumbnail.endswith(suffix):
return thumbnail.replace(suffix, '.mp4'), 'video/mp4' return thumbnail.replace(suffix, '.mp4'), 'video/mp4'
else:
return url, None return url, None
class OddshotMIMEParser(OpenGraphMIMEParser): class OddshotMIMEParser(OpenGraphMIMEParser):
@@ -348,8 +351,8 @@ class VidmeMIMEParser(BaseMIMEParser):
resp = requests.get('https://api.vid.me/videoByUrl?url=' + url) resp = requests.get('https://api.vid.me/videoByUrl?url=' + url)
if resp.status_code == 200 and resp.json()['status']: if resp.status_code == 200 and resp.json()['status']:
return resp.json()['video']['complete_url'], 'video/mp4' return resp.json()['video']['complete_url'], 'video/mp4'
else:
return url, None return url, None
class LiveleakMIMEParser(BaseMIMEParser): class LiveleakMIMEParser(BaseMIMEParser):
@@ -371,24 +374,28 @@ class LiveleakMIMEParser(BaseMIMEParser):
urls = [] urls = []
videos = soup.find_all('video') videos = soup.find_all('video')
for vid in videos: for vid in videos:
source = vid.find('source', attr={'res': 'HD'}) \ source = vid.find('source', attr={'res': 'HD'})
or vid.find('source') source = source or vid.find('source')
if source: if source:
urls.append((source.get('src'), source.get('type'))) urls.append((source.get('src'), source.get('type')))
# TODO: Handle pages with multiple videos # TODO: Handle pages with multiple videos
if urls: if urls:
return urls[0] return urls[0]
else:
iframe = soup.find_all(lambda t: t.name == 'iframe' and def filter_iframe(t):
'youtube.com' in t['src']) return t.name == 'iframe' and 'youtube.com' in t['src']
if iframe:
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'].strip('/')) iframe = soup.find_all(filter_iframe)
else: if iframe:
return url, None return YoutubeMIMEParser.get_mimetype(iframe[0]['src'].strip('/'))
return url, None
class ClippitUserMIMEParser(BaseMIMEParser): class ClippitUserMIMEParser(BaseMIMEParser):
""" """
Clippit uses a video player container
""" """
pattern = re.compile(r'https?://(www\.)?clippituser\.tv/c/.+$') pattern = re.compile(r'https?://(www\.)?clippituser\.tv/c/.+$')
@@ -447,8 +454,8 @@ class FlickrMIMEParser(OpenGraphMIMEParser):
""" """
Flickr uses the Open Graph protocol Flickr uses the Open Graph protocol
""" """
pattern = re.compile(r'https?://(www\.)?flickr\.com/photos/[^/]+/[^/]+/?$')
# TODO: handle albums/photosets (https://www.flickr.com/services/api) # TODO: handle albums/photosets (https://www.flickr.com/services/api)
pattern = re.compile(r'https?://(www\.)?flickr\.com/photos/[^/]+/[^/]+/?$')
class WorldStarHipHopMIMEParser(BaseMIMEParser): class WorldStarHipHopMIMEParser(BaseMIMEParser):
@@ -466,18 +473,21 @@ class WorldStarHipHopMIMEParser(BaseMIMEParser):
page = requests.get(url) page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser') soup = BeautifulSoup(page.content, 'html.parser')
source = soup.find_all(lambda t: t.name == 'source' and def filter_source(t):
t['src'] and t['type'] == 'video/mp4') return t.name == 'source' and t['src'] and t['type'] == 'video/mp4'
source = soup.find_all(filter_source)
if source: if source:
return source[0]['src'], 'video/mp4' return source[0]['src'], 'video/mp4'
else:
iframe = soup.find_all(lambda t: t.name == 'iframe' and
'youtube.com' in t['src'])
if iframe:
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'])
else:
return url, None
def filter_iframe(t):
return t.name == 'iframe' and 'youtube.com' in t['src']
iframe = soup.find_all(filter_iframe)
if iframe:
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'])
return url, None
# Parsers should be listed in the order they will be checked # Parsers should be listed in the order they will be checked

View File

@@ -9,7 +9,7 @@ import codecs
import logging import logging
import threading import threading
#pylint: disable=import-error # pylint: disable=import-error
from six.moves.urllib.parse import urlparse, parse_qs from six.moves.urllib.parse import urlparse, parse_qs
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
@@ -18,7 +18,6 @@ from .config import TEMPLATES
from .exceptions import InvalidRefreshToken from .exceptions import InvalidRefreshToken
from .packages.praw.errors import HTTPException, OAuthException from .packages.praw.errors import HTTPException, OAuthException
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
INDEX = os.path.join(TEMPLATES, 'index.html') INDEX = os.path.join(TEMPLATES, 'index.html')
@@ -82,11 +81,11 @@ class OAuthHandler(BaseHTTPRequestHandler):
thread.daemon = True thread.daemon = True
thread.start() thread.start()
def log_message(self, format, *args): def log_message(self, fmt, *args):
""" """
Redirect logging to our own handler instead of stdout Redirect logging to our own handler instead of stdout
""" """
_logger.debug(format, *args) _logger.debug(fmt, *args)
def build_body(self, template_file=INDEX): def build_body(self, template_file=INDEX):
""" """

View File

@@ -289,7 +289,7 @@ class LoadScreen(object):
# Break up the designated sleep interval into smaller # Break up the designated sleep interval into smaller
# chunks so we can more responsively check for interrupts. # chunks so we can more responsively check for interrupts.
for _ in range(int(interval/0.01)): for _ in range(int(interval / 0.01)):
# Pressing escape triggers a keyboard interrupt # Pressing escape triggers a keyboard interrupt
if self._terminal.getch() == self._terminal.ESCAPE: if self._terminal.getch() == self._terminal.ESCAPE:
os.kill(os.getpid(), signal.SIGINT) os.kill(os.getpid(), signal.SIGINT)
@@ -457,12 +457,12 @@ class Navigator(object):
valid = True valid = True
else: else:
# flip to the direction of movement # flip to the direction of movement
if ((direction > 0) & (self.inverted is True))\ if ((direction > 0) & (self.inverted is True)) \
| ((direction < 0) & (self.inverted is False)): | ((direction < 0) & (self.inverted is False)):
self.page_index += (self.step * (n_windows-1)) self.page_index += (self.step * (n_windows - 1))
self.inverted = not self.inverted self.inverted = not self.inverted
self.cursor_index \ self.cursor_index \
= (n_windows-(direction < 0)) - self.cursor_index = (n_windows - (direction < 0)) - self.cursor_index
valid = False valid = False
adj = 0 adj = 0

View File

@@ -7,8 +7,8 @@ Reference:
https://github.com/kennethreitz/requests/blob/master/requests/packages/__init__.py https://github.com/kennethreitz/requests/blob/master/requests/packages/__init__.py
""" """
from __future__ import absolute_import from __future__ import absolute_import
import sys
import sys
__praw_hash__ = '1e82eb0f8690a2acbdc15d030130dc50507eb4ba' __praw_hash__ = '1e82eb0f8690a2acbdc15d030130dc50507eb4ba'
__praw_bundled__ = True __praw_bundled__ = True
@@ -18,6 +18,7 @@ try:
from . import praw from . import praw
except ImportError: except ImportError:
import praw import praw
if not praw.__version__.startswith('3.'): if not praw.__version__.startswith('3.'):
raise RuntimeError('Invalid PRAW version ({0}) detected, ' raise RuntimeError('Invalid PRAW version ({0}) detected, '
'rtv requires PRAW version 3'.format(praw.__version__)) 'rtv requires PRAW version 3'.format(praw.__version__))

View File

@@ -23,11 +23,12 @@ def logged_in(f):
""" """
Decorator for Page methods that require the user to be authenticated. Decorator for Page methods that require the user to be authenticated.
""" """
@wraps(f) @wraps(f)
def wrapped_method(self, *args, **kwargs): def wrapped_method(self, *args, **kwargs):
if not self.reddit.is_oauth_session(): if not self.reddit.is_oauth_session():
self.term.show_notification('Not logged in') self.term.show_notification('Not logged in')
return return None
return f(self, *args, **kwargs) return f(self, *args, **kwargs)
return wrapped_method return wrapped_method
@@ -58,7 +59,7 @@ class Page(object):
def refresh_content(self, order=None, name=None): def refresh_content(self, order=None, name=None):
raise NotImplementedError raise NotImplementedError
def _draw_item(self, window, data, inverted): def _draw_item(self, win, data, inverted):
raise NotImplementedError raise NotImplementedError
def get_selected_item(self): def get_selected_item(self):
@@ -501,7 +502,8 @@ class Page(object):
# if the content will fill up the page, given that it is dependent # if the content will fill up the page, given that it is dependent
# on the size of the terminal. # on the size of the terminal.
self.nav.flip((len(self._subwindows) - 1)) self.nav.flip((len(self._subwindows) - 1))
return self._draw_content() self._draw_content()
return
if self.nav.cursor_index >= len(self._subwindows): if self.nav.cursor_index >= len(self._subwindows):
# Don't allow the cursor to go over the number of subwindows # Don't allow the cursor to go over the number of subwindows

View File

@@ -356,7 +356,7 @@ class SubmissionPage(Page):
self.term.add_space(win) self.term.add_space(win)
self.term.add_line(win, '[saved]', attr=attr) self.term.add_line(win, '[saved]', attr=attr)
for row, text in enumerate(split_body, start=offset+1): for row, text in enumerate(split_body, start=offset + 1):
attr = self.term.attr('CommentText') attr = self.term.attr('CommentText')
if row in valid_rows: if row in valid_rows:
self.term.add_line(win, text, row, 1, attr=attr) self.term.add_line(win, text, row, 1, attr=attr)

View File

@@ -320,7 +320,7 @@ class Terminal(object):
# Cut off the lines of the message that don't fit on the screen # Cut off the lines of the message that don't fit on the screen
box_width = min(box_width, n_cols) box_width = min(box_width, n_cols)
box_height = min(box_height, n_rows) box_height = min(box_height, n_rows)
message = message[:box_height-2] message = message[:box_height - 2]
s_row = (n_rows - box_height) // 2 + v_offset s_row = (n_rows - box_height) // 2 + v_offset
s_col = (n_cols - box_width) // 2 + h_offset s_col = (n_cols - box_width) // 2 + h_offset
@@ -377,13 +377,15 @@ class Terminal(object):
""" """
if not self.config['enable_media']: if not self.config['enable_media']:
return self.open_browser(url) self.open_browser(url)
return
try: try:
with self.loader('Checking link', catch_exception=False): with self.loader('Checking link', catch_exception=False):
command, entry = self.get_mailcap_entry(url) command, entry = self.get_mailcap_entry(url)
except exceptions.MailcapEntryNotFound: except exceptions.MailcapEntryNotFound:
return self.open_browser(url) self.open_browser(url)
return
_logger.info('Executing command: %s', command) _logger.info('Executing command: %s', command)
needs_terminal = 'needsterminal' in entry needs_terminal = 'needsterminal' in entry
@@ -733,7 +735,7 @@ class Terminal(object):
n_rows, n_cols = self.stdscr.getmaxyx() n_rows, n_cols = self.stdscr.getmaxyx()
v_offset, h_offset = self.stdscr.getbegyx() v_offset, h_offset = self.stdscr.getbegyx()
ch, attr = str(' '), self.attr('Prompt') ch, attr = str(' '), self.attr('Prompt')
prompt = self.clean(prompt, n_cols-1) prompt = self.clean(prompt, n_cols - 1)
# Create a new window to draw the text at the bottom of the screen, # Create a new window to draw the text at the bottom of the screen,
# so we can erase it when we're done. # so we can erase it when we're done.
@@ -811,7 +813,7 @@ class Terminal(object):
# Prune empty lines at the bottom of the textbox. # Prune empty lines at the bottom of the textbox.
for item in stack[::-1]: for item in stack[::-1]:
if len(item) == 0: if item:
stack.pop() stack.pop()
else: else:
break break

View File

@@ -1,3 +1,5 @@
# pylint: disable=bad-whitespace
import os import os
import codecs import codecs
import curses import curses
@@ -167,20 +169,20 @@ class Theme(object):
# Create the "Selected" versions of elements, which are prefixed with # Create the "Selected" versions of elements, which are prefixed with
# the @ symbol. For example, "@CommentText" represents how comment # the @ symbol. For example, "@CommentText" represents how comment
# text is formatted when it is highlighted by the cursor. # text is formatted when it is highlighted by the cursor.
for name in self.DEFAULT_THEME['normal']: for key in self.DEFAULT_THEME['normal']:
dest = '@{0}'.format(name) dest = '@{0}'.format(key)
self._set_fallback(elements, name, 'Selected', dest) self._set_fallback(elements, key, 'Selected', dest)
for name in self.DEFAULT_THEME['cursor']: for key in self.DEFAULT_THEME['cursor']:
dest = '@{0}'.format(name) dest = '@{0}'.format(key)
self._set_fallback(elements, name, 'SelectedCursor', dest) self._set_fallback(elements, key, 'SelectedCursor', dest)
# Fill in the ``None`` values for all of the elements with normal text # Fill in the ``None`` values for all of the elements with normal text
for name in self.DEFAULT_THEME['normal']: for key in self.DEFAULT_THEME['normal']:
self._set_fallback(elements, name, 'Normal') self._set_fallback(elements, key, 'Normal')
for name in self.DEFAULT_THEME['cursor']: for key in self.DEFAULT_THEME['cursor']:
self._set_fallback(elements, name, 'Normal') self._set_fallback(elements, key, 'Normal')
for name in self.DEFAULT_THEME['page']: for key in self.DEFAULT_THEME['page']:
self._set_fallback(elements, name, 'Normal') self._set_fallback(elements, key, 'Normal')
self.elements = elements self.elements = elements
@@ -554,5 +556,3 @@ class ThemeList(object):
def previous(self, theme): def previous(self, theme):
return self._step(theme, -1) return self._step(theme, -1)

View File

@@ -25,6 +25,7 @@ Thanks to the following people for their contributions to this project.
""" """
def main(): def main():
logging.captureWarnings(True) logging.captureWarnings(True)
@@ -53,5 +54,6 @@ def main():
with open(FILENAME, 'wb') as fp: with open(FILENAME, 'wb') as fp:
fp.write(text) fp.write(text)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -79,5 +79,6 @@ def main():
with open(os.path.join(ROOT, 'rtv.1'), 'w') as fp: with open(os.path.join(ROOT, 'rtv.1'), 'w') as fp:
fp.write(out) fp.write(out)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -278,5 +278,6 @@ def main():
theme = theme_list.next(theme) theme = theme_list.next(theme)
theme = theme_list.previous(theme) theme = theme_list.previous(theme)
sys.exit(main()) sys.exit(main())

View File

@@ -190,6 +190,7 @@ def test_content_flatten_comments_3(reddit):
self.id = comment_id self.id = comment_id
self.parent_id = parent_id self.parent_id = parent_id
self.replies = [] self.replies = []
def __repr__(self): def __repr__(self):
return '%s (%s)' % (self.id, self.parent_id) return '%s (%s)' % (self.id, self.parent_id)