PyLint and PEP8 formatting
This commit is contained in:
@@ -59,7 +59,7 @@ confidence=
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=execfile-builtin,zip-builtin-not-iterating,range-builtin-not-iterating,hex-method,old-division,file-builtin,long-builtin,input-builtin,no-absolute-import,invalid-name,delslice-method,suppressed-message,coerce-builtin,buffer-builtin,import-star-module-level,round-builtin,old-ne-operator,apply-builtin,missing-final-newline,basestring-builtin,xrange-builtin,getslice-method,filter-builtin-not-iterating,map-builtin-not-iterating,raw_input-builtin,indexing-exception,dict-iter-method,metaclass-assignment,setslice-method,next-method-called,intern-builtin,using-cmp-argument,missing-docstring,oct-method,backtick,print-statement,reload-builtin,long-suffix,old-raise-syntax,unicode-builtin,nonzero-method,old-octal-literal,cmp-method,useless-suppression,dict-view-method,parameter-unpacking,unpacking-in-except,coerce-method,unichr-builtin,raising-string,cmp-builtin,reduce-builtin,standarderror-builtin
|
||||
disable=execfile-builtin,zip-builtin-not-iterating,range-builtin-not-iterating,hex-method,old-division,file-builtin,long-builtin,input-builtin,no-absolute-import,invalid-name,delslice-method,suppressed-message,coerce-builtin,buffer-builtin,import-star-module-level,round-builtin,old-ne-operator,apply-builtin,missing-final-newline,basestring-builtin,xrange-builtin,getslice-method,filter-builtin-not-iterating,map-builtin-not-iterating,raw_input-builtin,indexing-exception,dict-iter-method,metaclass-assignment,setslice-method,next-method-called,intern-builtin,using-cmp-argument,missing-docstring,oct-method,backtick,print-statement,reload-builtin,long-suffix,old-raise-syntax,unicode-builtin,nonzero-method,old-octal-literal,cmp-method,useless-suppression,dict-view-method,parameter-unpacking,unpacking-in-except,coerce-method,unichr-builtin,raising-string,cmp-builtin,reduce-builtin,standarderror-builtin,no-else-return,too-many-locals,too-many-statements,too-few-public-methods,too-many-public-methods,too-many-instance-attributes
|
||||
|
||||
|
||||
[REPORTS]
|
||||
@@ -337,7 +337,7 @@ exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
max-args=7
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
|
||||
@@ -24,7 +24,6 @@ _____/ /_/ \___/____/|__/ \___//_/
|
||||
(RTV)
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from .__version__ import __version__
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# pylint: disable=wrong-import-position
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import print_function
|
||||
|
||||
@@ -44,9 +46,9 @@ from .subreddit_page import SubredditPage
|
||||
from .exceptions import ConfigError, SubredditError
|
||||
from .__version__ import __version__
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Pycharm debugging note:
|
||||
# You can use pycharm to debug a curses application by launching rtv in a
|
||||
# console window (python -m rtv) and using pycharm to attach to the remote
|
||||
@@ -62,7 +64,7 @@ def main():
|
||||
logging.captureWarnings(True)
|
||||
if six.PY3:
|
||||
# These ones get triggered even when capturing warnings is turned on
|
||||
warnings.simplefilter('ignore', ResourceWarning) #pylint:disable=E0602
|
||||
warnings.simplefilter('ignore', ResourceWarning) # pylint:disable=E0602
|
||||
|
||||
# Set the terminal title
|
||||
if os.getenv('DISPLAY'):
|
||||
|
||||
@@ -13,7 +13,6 @@ from six.moves import configparser
|
||||
from . import docs, __version__
|
||||
from .objects import KeyMap
|
||||
|
||||
|
||||
PACKAGE = os.path.dirname(__file__)
|
||||
HOME = os.path.expanduser('~')
|
||||
TEMPLATES = os.path.join(PACKAGE, 'templates')
|
||||
@@ -30,7 +29,6 @@ THEMES = os.path.join(XDG_CONFIG_HOME, 'rtv', 'themes')
|
||||
|
||||
|
||||
def build_parser():
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='rtv', description=docs.SUMMARY,
|
||||
epilog=docs.CONTROLS,
|
||||
@@ -79,7 +77,7 @@ def build_parser():
|
||||
'--enable-media', dest='enable_media', action='store_const', const=True,
|
||||
help='Open external links using programs defined in the mailcap config')
|
||||
parser.add_argument(
|
||||
'-V', '--version', action='version', version='rtv '+__version__)
|
||||
'-V', '--version', action='version', version='rtv ' + __version__)
|
||||
parser.add_argument(
|
||||
'--no-flash', dest='flash', action='store_const', const=False,
|
||||
help='Disable screen flashing')
|
||||
|
||||
@@ -747,7 +747,7 @@ class SubscriptionContent(Content):
|
||||
name = 'Popular Subreddits'
|
||||
items = reddit.get_popular_subreddits(limit=None)
|
||||
else:
|
||||
raise exceptions.SubscriptionError('Invalid type %s', content_type)
|
||||
raise exceptions.SubscriptionError('Invalid type %s' % content_type)
|
||||
|
||||
return cls(name, items, loader)
|
||||
|
||||
@@ -898,7 +898,7 @@ class RequestHeaderRateLimiter(DefaultHandler):
|
||||
def request(self, _cache_key, _cache_ignore, _cache_timeout, **kwargs):
|
||||
"""
|
||||
This is a wrapper function that handles the caching of the request.
|
||||
|
||||
|
||||
See DefaultHandler.with_cache for reference.
|
||||
"""
|
||||
if _cache_key:
|
||||
|
||||
@@ -56,11 +56,14 @@ class OpenGraphMIMEParser(BaseMIMEParser):
|
||||
page = requests.get(url)
|
||||
soup = BeautifulSoup(page.content, 'html.parser')
|
||||
for og_type in ['video', 'image']:
|
||||
tag = soup.find('meta',
|
||||
attrs={'property':'og:' + og_type + ':secure_url'}) or \
|
||||
soup.find('meta', attrs={'property': 'og:' + og_type})
|
||||
prop = 'og:' + og_type + ':secure_url'
|
||||
tag = soup.find('meta', attrs={'property': prop})
|
||||
if not tag:
|
||||
prop = 'og:' + og_type
|
||||
tag = soup.find('meta', attrs={'property': prop})
|
||||
if tag:
|
||||
return BaseMIMEParser.get_mimetype(tag.get('content'))
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
@@ -148,8 +151,8 @@ class RedditVideoMIMEParser(BaseMIMEParser):
|
||||
rep = sorted(reps, reverse=True,
|
||||
key=lambda t: int(t.get('bandwidth')))[0]
|
||||
return url + '/' + rep.find('baseurl').text, 'video/mp4'
|
||||
else:
|
||||
return request_url, 'video/x-youtube'
|
||||
|
||||
return request_url, 'video/x-youtube'
|
||||
|
||||
|
||||
class ImgurApiMIMEParser(BaseMIMEParser):
|
||||
@@ -291,8 +294,8 @@ class ImgurScrapeAlbumMIMEParser(BaseMIMEParser):
|
||||
|
||||
if urls:
|
||||
return " ".join(urls), 'image/x-imgur-album'
|
||||
else:
|
||||
return url, None
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
class InstagramMIMEParser(OpenGraphMIMEParser):
|
||||
@@ -324,8 +327,8 @@ class TwitchMIMEParser(BaseMIMEParser):
|
||||
suffix = '-preview.jpg'
|
||||
if thumbnail.endswith(suffix):
|
||||
return thumbnail.replace(suffix, '.mp4'), 'video/mp4'
|
||||
else:
|
||||
return url, None
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
class OddshotMIMEParser(OpenGraphMIMEParser):
|
||||
@@ -348,8 +351,8 @@ class VidmeMIMEParser(BaseMIMEParser):
|
||||
resp = requests.get('https://api.vid.me/videoByUrl?url=' + url)
|
||||
if resp.status_code == 200 and resp.json()['status']:
|
||||
return resp.json()['video']['complete_url'], 'video/mp4'
|
||||
else:
|
||||
return url, None
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
class LiveleakMIMEParser(BaseMIMEParser):
|
||||
@@ -371,24 +374,28 @@ class LiveleakMIMEParser(BaseMIMEParser):
|
||||
urls = []
|
||||
videos = soup.find_all('video')
|
||||
for vid in videos:
|
||||
source = vid.find('source', attr={'res': 'HD'}) \
|
||||
or vid.find('source')
|
||||
source = vid.find('source', attr={'res': 'HD'})
|
||||
source = source or vid.find('source')
|
||||
if source:
|
||||
urls.append((source.get('src'), source.get('type')))
|
||||
|
||||
# TODO: Handle pages with multiple videos
|
||||
if urls:
|
||||
return urls[0]
|
||||
else:
|
||||
iframe = soup.find_all(lambda t: t.name == 'iframe' and
|
||||
'youtube.com' in t['src'])
|
||||
if iframe:
|
||||
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'].strip('/'))
|
||||
else:
|
||||
return url, None
|
||||
|
||||
def filter_iframe(t):
|
||||
return t.name == 'iframe' and 'youtube.com' in t['src']
|
||||
|
||||
iframe = soup.find_all(filter_iframe)
|
||||
if iframe:
|
||||
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'].strip('/'))
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
class ClippitUserMIMEParser(BaseMIMEParser):
|
||||
"""
|
||||
Clippit uses a video player container
|
||||
"""
|
||||
pattern = re.compile(r'https?://(www\.)?clippituser\.tv/c/.+$')
|
||||
|
||||
@@ -447,8 +454,8 @@ class FlickrMIMEParser(OpenGraphMIMEParser):
|
||||
"""
|
||||
Flickr uses the Open Graph protocol
|
||||
"""
|
||||
pattern = re.compile(r'https?://(www\.)?flickr\.com/photos/[^/]+/[^/]+/?$')
|
||||
# TODO: handle albums/photosets (https://www.flickr.com/services/api)
|
||||
pattern = re.compile(r'https?://(www\.)?flickr\.com/photos/[^/]+/[^/]+/?$')
|
||||
|
||||
|
||||
class WorldStarHipHopMIMEParser(BaseMIMEParser):
|
||||
@@ -466,18 +473,21 @@ class WorldStarHipHopMIMEParser(BaseMIMEParser):
|
||||
page = requests.get(url)
|
||||
soup = BeautifulSoup(page.content, 'html.parser')
|
||||
|
||||
source = soup.find_all(lambda t: t.name == 'source' and
|
||||
t['src'] and t['type'] == 'video/mp4')
|
||||
def filter_source(t):
|
||||
return t.name == 'source' and t['src'] and t['type'] == 'video/mp4'
|
||||
|
||||
source = soup.find_all(filter_source)
|
||||
if source:
|
||||
return source[0]['src'], 'video/mp4'
|
||||
else:
|
||||
iframe = soup.find_all(lambda t: t.name == 'iframe' and
|
||||
'youtube.com' in t['src'])
|
||||
if iframe:
|
||||
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'])
|
||||
else:
|
||||
return url, None
|
||||
|
||||
def filter_iframe(t):
|
||||
return t.name == 'iframe' and 'youtube.com' in t['src']
|
||||
|
||||
iframe = soup.find_all(filter_iframe)
|
||||
if iframe:
|
||||
return YoutubeMIMEParser.get_mimetype(iframe[0]['src'])
|
||||
|
||||
return url, None
|
||||
|
||||
|
||||
# Parsers should be listed in the order they will be checked
|
||||
|
||||
@@ -9,7 +9,7 @@ import codecs
|
||||
import logging
|
||||
import threading
|
||||
|
||||
#pylint: disable=import-error
|
||||
# pylint: disable=import-error
|
||||
from six.moves.urllib.parse import urlparse, parse_qs
|
||||
from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
@@ -18,7 +18,6 @@ from .config import TEMPLATES
|
||||
from .exceptions import InvalidRefreshToken
|
||||
from .packages.praw.errors import HTTPException, OAuthException
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
INDEX = os.path.join(TEMPLATES, 'index.html')
|
||||
@@ -82,11 +81,11 @@ class OAuthHandler(BaseHTTPRequestHandler):
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
def log_message(self, format, *args):
|
||||
def log_message(self, fmt, *args):
|
||||
"""
|
||||
Redirect logging to our own handler instead of stdout
|
||||
"""
|
||||
_logger.debug(format, *args)
|
||||
_logger.debug(fmt, *args)
|
||||
|
||||
def build_body(self, template_file=INDEX):
|
||||
"""
|
||||
|
||||
@@ -289,7 +289,7 @@ class LoadScreen(object):
|
||||
|
||||
# Break up the designated sleep interval into smaller
|
||||
# chunks so we can more responsively check for interrupts.
|
||||
for _ in range(int(interval/0.01)):
|
||||
for _ in range(int(interval / 0.01)):
|
||||
# Pressing escape triggers a keyboard interrupt
|
||||
if self._terminal.getch() == self._terminal.ESCAPE:
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
@@ -457,12 +457,12 @@ class Navigator(object):
|
||||
valid = True
|
||||
else:
|
||||
# flip to the direction of movement
|
||||
if ((direction > 0) & (self.inverted is True))\
|
||||
| ((direction < 0) & (self.inverted is False)):
|
||||
self.page_index += (self.step * (n_windows-1))
|
||||
if ((direction > 0) & (self.inverted is True)) \
|
||||
| ((direction < 0) & (self.inverted is False)):
|
||||
self.page_index += (self.step * (n_windows - 1))
|
||||
self.inverted = not self.inverted
|
||||
self.cursor_index \
|
||||
= (n_windows-(direction < 0)) - self.cursor_index
|
||||
= (n_windows - (direction < 0)) - self.cursor_index
|
||||
|
||||
valid = False
|
||||
adj = 0
|
||||
|
||||
@@ -7,8 +7,8 @@ Reference:
|
||||
https://github.com/kennethreitz/requests/blob/master/requests/packages/__init__.py
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
|
||||
import sys
|
||||
|
||||
__praw_hash__ = '1e82eb0f8690a2acbdc15d030130dc50507eb4ba'
|
||||
__praw_bundled__ = True
|
||||
@@ -18,6 +18,7 @@ try:
|
||||
from . import praw
|
||||
except ImportError:
|
||||
import praw
|
||||
|
||||
if not praw.__version__.startswith('3.'):
|
||||
raise RuntimeError('Invalid PRAW version ({0}) detected, '
|
||||
'rtv requires PRAW version 3'.format(praw.__version__))
|
||||
|
||||
@@ -23,11 +23,12 @@ def logged_in(f):
|
||||
"""
|
||||
Decorator for Page methods that require the user to be authenticated.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrapped_method(self, *args, **kwargs):
|
||||
if not self.reddit.is_oauth_session():
|
||||
self.term.show_notification('Not logged in')
|
||||
return
|
||||
return None
|
||||
return f(self, *args, **kwargs)
|
||||
return wrapped_method
|
||||
|
||||
@@ -58,7 +59,7 @@ class Page(object):
|
||||
def refresh_content(self, order=None, name=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def _draw_item(self, window, data, inverted):
|
||||
def _draw_item(self, win, data, inverted):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_selected_item(self):
|
||||
@@ -501,7 +502,8 @@ class Page(object):
|
||||
# if the content will fill up the page, given that it is dependent
|
||||
# on the size of the terminal.
|
||||
self.nav.flip((len(self._subwindows) - 1))
|
||||
return self._draw_content()
|
||||
self._draw_content()
|
||||
return
|
||||
|
||||
if self.nav.cursor_index >= len(self._subwindows):
|
||||
# Don't allow the cursor to go over the number of subwindows
|
||||
|
||||
@@ -137,7 +137,7 @@ class SubmissionPage(Page):
|
||||
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_BROWSER'))
|
||||
def open_link(self):
|
||||
"""
|
||||
Open the selected item with the web browser
|
||||
Open the selected item with the web browser
|
||||
"""
|
||||
|
||||
data = self.get_selected_item()
|
||||
@@ -356,7 +356,7 @@ class SubmissionPage(Page):
|
||||
self.term.add_space(win)
|
||||
self.term.add_line(win, '[saved]', attr=attr)
|
||||
|
||||
for row, text in enumerate(split_body, start=offset+1):
|
||||
for row, text in enumerate(split_body, start=offset + 1):
|
||||
attr = self.term.attr('CommentText')
|
||||
if row in valid_rows:
|
||||
self.term.add_line(win, text, row, 1, attr=attr)
|
||||
|
||||
@@ -320,7 +320,7 @@ class Terminal(object):
|
||||
# Cut off the lines of the message that don't fit on the screen
|
||||
box_width = min(box_width, n_cols)
|
||||
box_height = min(box_height, n_rows)
|
||||
message = message[:box_height-2]
|
||||
message = message[:box_height - 2]
|
||||
|
||||
s_row = (n_rows - box_height) // 2 + v_offset
|
||||
s_col = (n_cols - box_width) // 2 + h_offset
|
||||
@@ -377,13 +377,15 @@ class Terminal(object):
|
||||
"""
|
||||
|
||||
if not self.config['enable_media']:
|
||||
return self.open_browser(url)
|
||||
self.open_browser(url)
|
||||
return
|
||||
|
||||
try:
|
||||
with self.loader('Checking link', catch_exception=False):
|
||||
command, entry = self.get_mailcap_entry(url)
|
||||
except exceptions.MailcapEntryNotFound:
|
||||
return self.open_browser(url)
|
||||
self.open_browser(url)
|
||||
return
|
||||
|
||||
_logger.info('Executing command: %s', command)
|
||||
needs_terminal = 'needsterminal' in entry
|
||||
@@ -733,7 +735,7 @@ class Terminal(object):
|
||||
n_rows, n_cols = self.stdscr.getmaxyx()
|
||||
v_offset, h_offset = self.stdscr.getbegyx()
|
||||
ch, attr = str(' '), self.attr('Prompt')
|
||||
prompt = self.clean(prompt, n_cols-1)
|
||||
prompt = self.clean(prompt, n_cols - 1)
|
||||
|
||||
# Create a new window to draw the text at the bottom of the screen,
|
||||
# so we can erase it when we're done.
|
||||
@@ -811,7 +813,7 @@ class Terminal(object):
|
||||
|
||||
# Prune empty lines at the bottom of the textbox.
|
||||
for item in stack[::-1]:
|
||||
if len(item) == 0:
|
||||
if item:
|
||||
stack.pop()
|
||||
else:
|
||||
break
|
||||
@@ -874,7 +876,7 @@ class Terminal(object):
|
||||
Check that the terminal supports the provided theme, and applies
|
||||
the theme to the terminal if possible.
|
||||
|
||||
If the terminal doesn't support the theme, this falls back to the
|
||||
If the terminal doesn't support the theme, this falls back to the
|
||||
default theme. The default theme only requires 8 colors so it
|
||||
should be compatible with any terminal that supports basic colors.
|
||||
"""
|
||||
|
||||
44
rtv/theme.py
44
rtv/theme.py
@@ -1,3 +1,5 @@
|
||||
# pylint: disable=bad-whitespace
|
||||
|
||||
import os
|
||||
import codecs
|
||||
import curses
|
||||
@@ -167,20 +169,20 @@ class Theme(object):
|
||||
# Create the "Selected" versions of elements, which are prefixed with
|
||||
# the @ symbol. For example, "@CommentText" represents how comment
|
||||
# text is formatted when it is highlighted by the cursor.
|
||||
for name in self.DEFAULT_THEME['normal']:
|
||||
dest = '@{0}'.format(name)
|
||||
self._set_fallback(elements, name, 'Selected', dest)
|
||||
for name in self.DEFAULT_THEME['cursor']:
|
||||
dest = '@{0}'.format(name)
|
||||
self._set_fallback(elements, name, 'SelectedCursor', dest)
|
||||
for key in self.DEFAULT_THEME['normal']:
|
||||
dest = '@{0}'.format(key)
|
||||
self._set_fallback(elements, key, 'Selected', dest)
|
||||
for key in self.DEFAULT_THEME['cursor']:
|
||||
dest = '@{0}'.format(key)
|
||||
self._set_fallback(elements, key, 'SelectedCursor', dest)
|
||||
|
||||
# Fill in the ``None`` values for all of the elements with normal text
|
||||
for name in self.DEFAULT_THEME['normal']:
|
||||
self._set_fallback(elements, name, 'Normal')
|
||||
for name in self.DEFAULT_THEME['cursor']:
|
||||
self._set_fallback(elements, name, 'Normal')
|
||||
for name in self.DEFAULT_THEME['page']:
|
||||
self._set_fallback(elements, name, 'Normal')
|
||||
for key in self.DEFAULT_THEME['normal']:
|
||||
self._set_fallback(elements, key, 'Normal')
|
||||
for key in self.DEFAULT_THEME['cursor']:
|
||||
self._set_fallback(elements, key, 'Normal')
|
||||
for key in self.DEFAULT_THEME['page']:
|
||||
self._set_fallback(elements, key, 'Normal')
|
||||
|
||||
self.elements = elements
|
||||
|
||||
@@ -215,7 +217,7 @@ class Theme(object):
|
||||
"""
|
||||
Bind the theme's colors to curses's internal color pair map.
|
||||
|
||||
This method must be called once (after curses has been initialized)
|
||||
This method must be called once (after curses has been initialized)
|
||||
before any element attributes can be accessed. Color codes and other
|
||||
special attributes will be mixed bitwise into a single value that
|
||||
can be passed into curses draw functions.
|
||||
@@ -242,7 +244,7 @@ class Theme(object):
|
||||
|
||||
def get(self, element, selected=False):
|
||||
"""
|
||||
Returns the curses attribute code for the given element.
|
||||
Returns the curses attribute code for the given element.
|
||||
"""
|
||||
if self._attribute_map is None:
|
||||
raise RuntimeError('Attempted to access theme attribute before '
|
||||
@@ -314,7 +316,7 @@ class Theme(object):
|
||||
def print_themes(cls, path=THEMES):
|
||||
"""
|
||||
Prints a human-readable summary of the installed themes to stdout.
|
||||
|
||||
|
||||
This is intended to be used as a command-line utility, outside of the
|
||||
main curses display loop.
|
||||
"""
|
||||
@@ -377,7 +379,7 @@ class Theme(object):
|
||||
def from_file(cls, filename, source):
|
||||
"""
|
||||
Load a theme from the specified configuration file.
|
||||
|
||||
|
||||
Parameters:
|
||||
filename: The name of the filename to load.
|
||||
source: A description of where the theme was loaded from.
|
||||
@@ -415,7 +417,7 @@ class Theme(object):
|
||||
def _parse_line(cls, element, line, filename=None):
|
||||
"""
|
||||
Parse a single line from a theme file.
|
||||
|
||||
|
||||
Format:
|
||||
<element>: <foreground> <background> <attributes>
|
||||
"""
|
||||
@@ -484,14 +486,14 @@ class Theme(object):
|
||||
def rgb_to_ansi(color):
|
||||
"""
|
||||
Converts hex RGB to the 6x6x6 xterm color space
|
||||
|
||||
|
||||
Args:
|
||||
color (str): RGB color string in the format "#RRGGBB"
|
||||
|
||||
|
||||
Returns:
|
||||
str: ansi color string in the format "ansi_n", where n
|
||||
is between 16 and 230
|
||||
|
||||
|
||||
Reference:
|
||||
https://github.com/chadj2/bash-ui/blob/master/COLORS.md
|
||||
"""
|
||||
@@ -554,5 +556,3 @@ class ThemeList(object):
|
||||
|
||||
def previous(self, theme):
|
||||
return self._step(theme, -1)
|
||||
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ Thanks to the following people for their contributions to this project.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
logging.captureWarnings(True)
|
||||
@@ -53,5 +54,6 @@ def main():
|
||||
with open(FILENAME, 'wb') as fp:
|
||||
fp.write(text)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -79,5 +79,6 @@ def main():
|
||||
with open(os.path.join(ROOT, 'rtv.1'), 'w') as fp:
|
||||
fp.write(out)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -278,5 +278,6 @@ def main():
|
||||
theme = theme_list.next(theme)
|
||||
theme = theme_list.previous(theme)
|
||||
|
||||
|
||||
sys.exit(main())
|
||||
|
||||
|
||||
@@ -39,4 +39,4 @@ def test_copy():
|
||||
assert Popen.call_args[0][0] == ['pbcopy', 'w']
|
||||
p.communicate.assert_called_with(input='test'.encode('utf-8'))
|
||||
copy_osx('test ❤')
|
||||
p.communicate.assert_called_with(input='test ❤'.encode('utf-8'))
|
||||
p.communicate.assert_called_with(input='test ❤'.encode('utf-8'))
|
||||
|
||||
@@ -190,6 +190,7 @@ def test_content_flatten_comments_3(reddit):
|
||||
self.id = comment_id
|
||||
self.parent_id = parent_id
|
||||
self.replies = []
|
||||
|
||||
def __repr__(self):
|
||||
return '%s (%s)' % (self.id, self.parent_id)
|
||||
|
||||
|
||||
@@ -181,4 +181,4 @@ def test_oauth_authorize(oauth, reddit, stdscr, refresh_token):
|
||||
oauth.reddit.get_access_information.side_effect = exception
|
||||
oauth.authorize()
|
||||
assert isinstance(oauth.term.loader.exception, OAuthException)
|
||||
assert not oauth.config.save_refresh_token.called
|
||||
assert not oauth.config.save_refresh_token.called
|
||||
|
||||
Reference in New Issue
Block a user