diff --git a/rtv/__main__.py b/rtv/__main__.py
index 9bd9ba7..9a2759b 100644
--- a/rtv/__main__.py
+++ b/rtv/__main__.py
@@ -9,10 +9,10 @@ import logging
import warnings
import six
-import praw
import requests
from . import docs
+from .packages import praw
from .config import Config, copy_default_config, copy_default_mailcap
from .oauth import OAuthHelper
from .terminal import Terminal
diff --git a/rtv/content.py b/rtv/content.py
index f417a7a..3551410 100644
--- a/rtv/content.py
+++ b/rtv/content.py
@@ -6,11 +6,11 @@ import logging
from datetime import datetime
import six
-import praw
-from praw.errors import InvalidSubreddit
from kitchen.text.display import wrap
from . import exceptions
+from .packages import praw
+from .packages.praw.errors import InvalidSubreddit
_logger = logging.getLogger(__name__)
diff --git a/rtv/objects.py b/rtv/objects.py
index eb030b5..0ac79eb 100644
--- a/rtv/objects.py
+++ b/rtv/objects.py
@@ -14,10 +14,10 @@ import curses.ascii
from contextlib import contextmanager
import six
-import praw
import requests
from . import exceptions
+from .packages import praw
_logger = logging.getLogger(__name__)
diff --git a/rtv/packages/__init__.py b/rtv/packages/__init__.py
new file mode 100644
index 0000000..65807c6
--- /dev/null
+++ b/rtv/packages/__init__.py
@@ -0,0 +1,23 @@
+"""
+This stub allows the end-user to fallback to their system installation of praw
+if the bundled package missing. This technique was inspired by the requests
+library and how it handles dependencies.
+
+Reference:
+ https://github.com/kennethreitz/requests/blob/master/requests/packages/__init__.py
+"""
+from __future__ import absolute_import
+import sys
+
+
+__praw_hash__ = 'a632ff005fc09e74a8d3d276adc10aa92638962c'
+
+
+try:
+ from . import praw
+except ImportError:
+ import praw
+ if not praw.__version__.startswith('3.'):
+ msg = 'Invalid PRAW version {0}, exiting'.format(praw.__version__)
+ raise RuntimeError(msg)
+ sys.modules['%s.praw' % __name__] = praw
diff --git a/rtv/packages/praw/__init__.py b/rtv/packages/praw/__init__.py
new file mode 100644
index 0000000..0f6fee1
--- /dev/null
+++ b/rtv/packages/praw/__init__.py
@@ -0,0 +1,2794 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""
+Python Reddit API Wrapper.
+
+PRAW, an acronym for "Python Reddit API Wrapper", is a python package that
+allows for simple access to reddit's API. PRAW aims to be as easy to use as
+possible and is designed to follow all of reddit's API rules. You have to give
+a useragent, everything else is handled by PRAW so you needn't worry about
+violating them.
+
+More information about PRAW can be found at https://github.com/praw-dev/praw
+"""
+
+from __future__ import print_function, unicode_literals
+
+import json
+import os
+import platform
+import re
+import six
+import sys
+from praw import decorators, errors
+from praw.handlers import DefaultHandler
+from praw.helpers import chunk_sequence, normalize_url
+from praw.internal import (_image_type, _prepare_request,
+ _raise_redirect_exceptions,
+ _raise_response_exceptions,
+ _to_reddit_list, _warn_pyopenssl)
+from praw.settings import CONFIG
+from requests import Session
+from requests.compat import urljoin
+from requests.utils import to_native_string
+from requests import Request
+# pylint: disable=F0401
+from six.moves import html_entities, http_cookiejar
+from six.moves.urllib.parse import parse_qs, urlparse, urlunparse
+# pylint: enable=F0401
+from warnings import warn_explicit
+
+
+__version__ = '3.6.1'
+
+
+class Config(object): # pylint: disable=R0903
+ """A class containing the configuration for a reddit site."""
+
+ API_PATHS = {'accept_mod_invite': 'api/accept_moderator_invite',
+ 'access_token_url': 'api/v1/access_token/',
+ 'approve': 'api/approve/',
+ 'authorize': 'api/v1/authorize/',
+ 'banned': 'r/{subreddit}/about/banned/',
+ 'blocked': 'prefs/blocked/',
+ 'by_id': 'by_id/',
+ 'captcha': 'captcha/',
+ 'clearflairtemplates': 'api/clearflairtemplates/',
+ 'collapse_message': 'api/collapse_message/',
+ 'comment': 'api/comment/',
+ 'comment_replies': 'message/comments/',
+ 'comments': 'comments/',
+ 'compose': 'api/compose/',
+ 'contest_mode': 'api/set_contest_mode/',
+ 'contributors': 'r/{subreddit}/about/contributors/',
+ 'controversial': 'controversial/',
+ 'default_subreddits': 'subreddits/default/',
+ 'del': 'api/del/',
+ 'deleteflair': 'api/deleteflair',
+ 'delete_redditor': 'api/delete_user',
+ 'delete_sr_header': 'r/{subreddit}/api/delete_sr_header',
+ 'delete_sr_image': 'r/{subreddit}/api/delete_sr_img',
+ 'distinguish': 'api/distinguish/',
+ 'domain': 'domain/{domain}/',
+ 'duplicates': 'duplicates/{submissionid}/',
+ 'edit': 'api/editusertext/',
+ 'edited': 'r/{subreddit}/about/edited/',
+ 'flair': 'api/flair/',
+ 'flairconfig': 'api/flairconfig/',
+ 'flaircsv': 'api/flaircsv/',
+ 'flairlist': 'r/{subreddit}/api/flairlist/',
+ 'flairselector': 'api/flairselector/',
+ 'flairtemplate': 'api/flairtemplate/',
+ 'friend': 'api/friend/',
+ 'friend_v1': 'api/v1/me/friends/{user}',
+ 'friends': 'prefs/friends/',
+ 'gild_thing': 'api/v1/gold/gild/{fullname}/',
+ 'gild_user': 'api/v1/gold/give/{username}/',
+ 'help': 'help/',
+ 'hide': 'api/hide/',
+ 'ignore_reports': 'api/ignore_reports/',
+ 'inbox': 'message/inbox/',
+ 'info': 'api/info/',
+ 'leavecontributor': 'api/leavecontributor',
+ 'leavemoderator': 'api/leavemoderator',
+ 'lock': 'api/lock/',
+ 'login': 'api/login/',
+ 'me': 'api/v1/me',
+ 'mentions': 'message/mentions',
+ 'message': 'message/messages/{messageid}/',
+ 'messages': 'message/messages/',
+ 'moderators': 'r/{subreddit}/about/moderators/',
+ 'modlog': 'r/{subreddit}/about/log/',
+ 'modqueue': 'r/{subreddit}/about/modqueue/',
+ 'mod_mail': 'r/{subreddit}/message/moderator/',
+ 'morechildren': 'api/morechildren/',
+ 'my_con_subreddits': 'subreddits/mine/contributor/',
+ 'my_mod_subreddits': 'subreddits/mine/moderator/',
+ 'my_multis': 'api/multi/mine/',
+ 'my_subreddits': 'subreddits/mine/subscriber/',
+ 'new': 'new/',
+ 'new_subreddits': 'subreddits/new/',
+ 'marknsfw': 'api/marknsfw/',
+ 'multireddit': 'user/{user}/m/{multi}/',
+ 'multireddit_add': ('api/multi/user/{user}/m/{multi}/r/'
+ '{subreddit}'),
+ 'multireddit_about': 'api/multi/user/{user}/m/{multi}/',
+ 'multireddit_copy': 'api/multi/copy/',
+ 'multireddit_mine': 'me/m/{multi}/',
+ 'multireddit_rename': 'api/multi/rename/',
+ 'multireddit_user': 'api/multi/user/{user}/',
+ 'mute_sender': 'api/mute_message_author/',
+ 'muted': 'r/{subreddit}/about/muted/',
+ 'popular_subreddits': 'subreddits/popular/',
+ 'post_replies': 'message/selfreply/',
+ 'read_message': 'api/read_message/',
+ 'reddit_url': '/',
+ 'register': 'api/register/',
+ 'remove': 'api/remove/',
+ 'report': 'api/report/',
+ 'reports': 'r/{subreddit}/about/reports/',
+ 'rising': 'rising/',
+ 'rules': 'r/{subreddit}/about/rules/',
+ 'save': 'api/save/',
+ 'saved': 'saved/',
+ 'search': 'r/{subreddit}/search/',
+ 'search_reddit_names': 'api/search_reddit_names/',
+ 'select_flair': 'api/selectflair/',
+ 'sent': 'message/sent/',
+ 'sticky': 'r/{subreddit}/about/sticky/',
+ 'sticky_submission': 'api/set_subreddit_sticky/',
+ 'site_admin': 'api/site_admin/',
+ 'spam': 'r/{subreddit}/about/spam/',
+ 'stylesheet': 'r/{subreddit}/about/stylesheet/',
+ 'submit': 'api/submit/',
+ 'sub_comments_gilded': 'r/{subreddit}/comments/gilded/',
+ 'sub_recommendations': 'api/recommend/sr/{subreddits}',
+ 'subreddit': 'r/{subreddit}/',
+ 'subreddit_about': 'r/{subreddit}/about/',
+ 'subreddit_comments': 'r/{subreddit}/comments/',
+ 'subreddit_css': 'api/subreddit_stylesheet/',
+ 'subreddit_random': 'r/{subreddit}/random/',
+ 'subreddit_settings': 'r/{subreddit}/about/edit/',
+ 'subreddit_traffic': 'r/{subreddit}/about/traffic/',
+ 'subscribe': 'api/subscribe/',
+ 'suggested_sort': 'api/set_suggested_sort/',
+ 'top': 'top/',
+ 'uncollapse_message': 'api/uncollapse_message/',
+ 'unfriend': 'api/unfriend/',
+ 'unhide': 'api/unhide/',
+ 'unlock': 'api/unlock/',
+ 'unmarknsfw': 'api/unmarknsfw/',
+ 'unmoderated': 'r/{subreddit}/about/unmoderated/',
+ 'unmute_sender': 'api/unmute_message_author/',
+ 'unignore_reports': 'api/unignore_reports/',
+ 'unread': 'message/unread/',
+ 'unread_message': 'api/unread_message/',
+ 'unsave': 'api/unsave/',
+ 'upload_image': 'api/upload_sr_img',
+ 'user': 'user/{user}/',
+ 'user_about': 'user/{user}/about/',
+ 'username_available': 'api/username_available/',
+ 'vote': 'api/vote/',
+ 'wiki_edit': 'api/wiki/edit/',
+ 'wiki_page': 'r/{subreddit}/wiki/{page}', # No /
+ 'wiki_page_editor': ('r/{subreddit}/api/wiki/alloweditor/'
+ '{method}'),
+ 'wiki_page_settings': 'r/{subreddit}/wiki/settings/{page}',
+ 'wiki_pages': 'r/{subreddit}/wiki/pages/',
+ 'wiki_banned': 'r/{subreddit}/about/wikibanned/',
+ 'wiki_contributors': 'r/{subreddit}/about/wikicontributors/'
+ }
+ WWW_PATHS = set(['authorize'])
+
+ @staticmethod
+ def ua_string(praw_info):
+ """Return the user-agent string.
+
+ The user-agent string contains PRAW version and platform version info.
+
+ """
+ if os.environ.get('SERVER_SOFTWARE') is not None:
+ # Google App Engine information
+ # https://developers.google.com/appengine/docs/python/
+ info = os.environ.get('SERVER_SOFTWARE')
+ else:
+ # Standard platform information
+ info = platform.platform(True).encode('ascii', 'ignore')
+
+ return '{0} PRAW/{1} Python/{2} {3}'.format(
+ praw_info, __version__, sys.version.split()[0], info)
+
+ def __init__(self, site_name, **kwargs):
+ """Initialize PRAW's configuration."""
+ def config_boolean(item):
+ return item and item.lower() in ('1', 'yes', 'true', 'on')
+
+ obj = dict(CONFIG.items(site_name))
+ # Overwrite configuration file settings with those given during
+ # instantiation of the Reddit instance.
+ for key, value in kwargs.items():
+ obj[key] = value
+
+ self.api_url = 'https://' + obj['api_domain']
+ self.permalink_url = 'https://' + obj['permalink_domain']
+ self.oauth_url = ('https://' if config_boolean(obj['oauth_https'])
+ else 'http://') + obj['oauth_domain']
+ self.api_request_delay = float(obj['api_request_delay'])
+ self.by_kind = {obj['comment_kind']: objects.Comment,
+ obj['message_kind']: objects.Message,
+ obj['redditor_kind']: objects.Redditor,
+ obj['submission_kind']: objects.Submission,
+ obj['subreddit_kind']: objects.Subreddit,
+ 'LabeledMulti': objects.Multireddit,
+ 'modaction': objects.ModAction,
+ 'more': objects.MoreComments,
+ 'wikipage': objects.WikiPage,
+ 'wikipagelisting': objects.WikiPageListing,
+ 'UserList': objects.UserList}
+ self.by_object = dict((value, key) for (key, value) in
+ six.iteritems(self.by_kind))
+ self.by_object[objects.LoggedInRedditor] = obj['redditor_kind']
+ self.cache_timeout = float(obj['cache_timeout'])
+ self.check_for_updates = config_boolean(obj['check_for_updates'])
+ self.domain = obj['permalink_domain']
+ self.output_chars_limit = int(obj['output_chars_limit'])
+ self.log_requests = int(obj['log_requests'])
+ self.http_proxy = (obj.get('http_proxy') or os.getenv('http_proxy') or
+ None)
+ self.https_proxy = (obj.get('https_proxy') or
+ os.getenv('https_proxy') or None)
+ # We use `get(...) or None` because `get` may return an empty string
+
+ self.validate_certs = config_boolean(obj.get('validate_certs'))
+
+ self.client_id = obj.get('oauth_client_id') or None
+ self.client_secret = obj.get('oauth_client_secret') or None
+ self.redirect_uri = obj.get('oauth_redirect_uri') or None
+ self.grant_type = obj.get('oauth_grant_type') or None
+ self.refresh_token = obj.get('oauth_refresh_token') or None
+ self.store_json_result = config_boolean(obj.get('store_json_result'))
+
+ if 'short_domain' in obj and obj['short_domain']:
+ self._short_domain = 'http://' + obj['short_domain']
+ else:
+ self._short_domain = None
+ self.timeout = float(obj['timeout'])
+ try:
+ self.user = obj['user'] if obj['user'] else None
+ self.pswd = obj['pswd']
+ except KeyError:
+ self.user = self.pswd = None
+
+ def __getitem__(self, key):
+ """Return the URL for key."""
+ prefix = self.permalink_url if key in self.WWW_PATHS else self.api_url
+ return urljoin(prefix, self.API_PATHS[key])
+
+ @property
+ def short_domain(self):
+ """Return the short domain of the reddit server.
+
+ Used to generate the shortlink. For reddit.com the short_domain is
+ redd.it.
+
+ """
+ if self._short_domain:
+ return self._short_domain
+ else:
+ raise errors.ClientException('No short domain specified.')
+
+
+class BaseReddit(object):
+ """A base class that allows access to reddit's API.
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ RETRY_CODES = [502, 503, 504]
+ update_checked = False
+ openssl_warned = False
+
+ def __init__(self, user_agent, site_name=None, handler=None,
+ disable_update_check=False, **kwargs):
+ """Initialize our connection with a reddit server.
+
+ The user_agent is how your application identifies itself. Read the
+ official API guidelines for user_agents
+ https://github.com/reddit/reddit/wiki/API. Applications using default
+ user_agents such as "Python/urllib" are drastically limited.
+
+ site_name allows you to specify which reddit you want to connect to.
+ The installation defaults are reddit.com, if you only need to connect
+ to reddit.com then you can safely ignore this. If you want to connect
+ to another reddit, set site_name to the name of that reddit. This must
+ match with an entry in praw.ini. If site_name is None, then the site
+ name will be looked for in the environment variable REDDIT_SITE. If it
+ is not found there, the default site name reddit matching reddit.com
+ will be used.
+
+ disable_update_check allows you to prevent an update check from
+ occurring in spite of the check_for_updates setting in praw.ini.
+
+ All additional parameters specified via kwargs will be used to
+ initialize the Config object. This can be used to specify configuration
+ settings during instantiation of the Reddit instance. See
+ https://praw.readthedocs.io/en/latest/pages/configuration_files.html
+ for more details.
+
+ """
+ if not user_agent or not isinstance(user_agent, six.string_types):
+ raise TypeError('user_agent must be a non-empty string.')
+ if 'bot' in user_agent.lower():
+ warn_explicit(
+ 'The keyword `bot` in your user_agent may be problematic.',
+ UserWarning, '', 0)
+
+ self.config = Config(site_name or os.getenv('REDDIT_SITE') or 'reddit',
+ **kwargs)
+ self.handler = handler or DefaultHandler()
+ self.http = Session()
+ self.http.headers['User-Agent'] = self.config.ua_string(user_agent)
+ self.http.validate_certs = self.config.validate_certs
+
+ # This `Session` object is only used to store request information that
+ # is used to make prepared requests. It _should_ never be used to make
+ # a direct request, thus we raise an exception when it is used.
+
+ def _req_error(*_, **__):
+ raise errors.ClientException('Do not make direct requests.')
+ self.http.request = _req_error
+
+ if self.config.http_proxy or self.config.https_proxy:
+ self.http.proxies = {}
+ if self.config.http_proxy:
+ self.http.proxies['http'] = self.config.http_proxy
+ if self.config.https_proxy:
+ self.http.proxies['https'] = self.config.https_proxy
+ self.modhash = None
+
+ # Check for updates if permitted and this is the first Reddit instance
+ # if not disable_update_check and not BaseReddit.update_checked \
+ # and self.config.check_for_updates:
+ # update_check(__name__, __version__)
+ # BaseReddit.update_checked = True
+
+ # Warn against a potentially incompatible version of pyOpenSSL
+ if not BaseReddit.openssl_warned and self.config.validate_certs:
+ _warn_pyopenssl()
+ BaseReddit.openssl_warned = True
+
+ # Initial values
+ self._use_oauth = False
+
+ def _request(self, url, params=None, data=None, files=None, auth=None,
+ timeout=None, raw_response=False, retry_on_error=True,
+ method=None):
+ """Given a page url and a dict of params, open and return the page.
+
+ :param url: the url to grab content from.
+ :param params: a dictionary containing the GET data to put in the url
+ :param data: a dictionary containing the extra data to submit
+ :param files: a dictionary specifying the files to upload
+ :param auth: Add the HTTP authentication headers (see requests)
+ :param timeout: Specifies the maximum time that the actual HTTP request
+ can take.
+ :param raw_response: return the response object rather than the
+ response body
+ :param retry_on_error: if True retry the request, if it fails, for up
+ to 3 attempts
+ :returns: either the response body or the response object
+
+ """
+ def build_key_items(url, params, data, auth, files, method):
+ request = _prepare_request(self, url, params, data, auth, files,
+ method)
+
+ # Prepare extra arguments
+ key_items = []
+ oauth = request.headers.get('Authorization', None)
+ for key_value in (params, data, request.cookies, auth, oauth):
+ if isinstance(key_value, dict):
+ key_items.append(tuple(key_value.items()))
+ elif isinstance(key_value, http_cookiejar.CookieJar):
+ key_items.append(tuple(key_value.get_dict().items()))
+ else:
+ key_items.append(key_value)
+ kwargs = {'_rate_domain': self.config.domain,
+ '_rate_delay': int(self.config.api_request_delay),
+ '_cache_ignore': bool(files) or raw_response,
+ '_cache_timeout': int(self.config.cache_timeout)}
+
+ return (request, key_items, kwargs)
+
+ def decode(match):
+ return six.unichr(html_entities.name2codepoint[match.group(1)])
+
+ def handle_redirect():
+ response = None
+ url = request.url
+ while url: # Manually handle 302 redirects
+ request.url = url
+ kwargs['_cache_key'] = (normalize_url(request.url),
+ tuple(key_items))
+ response = self.handler.request(
+ request=request.prepare(),
+ proxies=self.http.proxies,
+ timeout=timeout,
+ verify=self.http.validate_certs, **kwargs)
+
+ if self.config.log_requests >= 2:
+ msg = 'status: {0}\n'.format(response.status_code)
+ sys.stderr.write(msg)
+ url = _raise_redirect_exceptions(response)
+ assert url != request.url
+ return response
+
+ timeout = self.config.timeout if timeout is None else timeout
+ request, key_items, kwargs = build_key_items(url, params, data,
+ auth, files, method)
+
+ tempauth = self._use_oauth
+ remaining_attempts = 3 if retry_on_error else 1
+ attempt_oauth_refresh = bool(self.refresh_token)
+ while True:
+ try:
+ self._use_oauth = self.is_oauth_session()
+ response = handle_redirect()
+ _raise_response_exceptions(response)
+ self.http.cookies.update(response.cookies)
+ if raw_response:
+ return response
+ else:
+ return re.sub('&([^;]+);', decode, response.text)
+ except errors.OAuthInvalidToken as error:
+ if not attempt_oauth_refresh:
+ raise
+ attempt_oauth_refresh = False
+ self._use_oauth = False
+ self.refresh_access_information()
+ self._use_oauth = tempauth
+ request, key_items, kwargs = build_key_items(url, params,
+ data, auth, files,
+ method)
+ except errors.HTTPException as error:
+ remaining_attempts -= 1
+ # pylint: disable=W0212
+ if error._raw.status_code not in self.RETRY_CODES or \
+ remaining_attempts == 0:
+ raise
+ finally:
+ self._use_oauth = tempauth
+
+ def _json_reddit_objecter(self, json_data):
+ """Return an appropriate RedditObject from json_data when possible."""
+ try:
+ object_class = self.config.by_kind[json_data['kind']]
+ except KeyError:
+ if 'json' in json_data:
+ if len(json_data) != 1:
+ msg = 'Unknown object type: {0}'.format(json_data)
+ warn_explicit(msg, UserWarning, '', 0)
+ return json_data['json']
+ else:
+ return object_class.from_api_response(self, json_data['data'])
+ return json_data
+
+ def evict(self, urls):
+ """Evict url(s) from the cache.
+
+ :param urls: An iterable containing normalized urls.
+ :returns: The number of items removed from the cache.
+
+ """
+ if isinstance(urls, six.string_types):
+ urls = (urls,)
+ return self.handler.evict(urls)
+
+ @decorators.oauth_generator
+ def get_content(self, url, params=None, limit=0, place_holder=None,
+ root_field='data', thing_field='children',
+ after_field='after', object_filter=None, **kwargs):
+ """A generator method to return reddit content from a URL.
+
+ Starts at the initial url, and fetches content using the `after`
+ JSON data until `limit` entries have been fetched, or the
+ `place_holder` has been reached.
+
+ :param url: the url to start fetching content from
+ :param params: dictionary containing extra GET data to put in the url
+ :param limit: the number of content entries to fetch. If limit <= 0,
+ fetch the default for your account (25 for unauthenticated
+ users). If limit is None, then fetch as many entries as possible
+ (reddit returns at most 100 per request, however, PRAW will
+ automatically make additional requests as necessary).
+ :param place_holder: if not None, the method will fetch `limit`
+ content, stopping if it finds content with `id` equal to
+ `place_holder`. The place_holder item is the last item to be
+ yielded from this generator. Note that the use of `place_holder` is
+ not 100% reliable as the place holder item may no longer exist due
+ to being removed or deleted.
+ :param root_field: indicates the field in the json response that holds
+ the data. Most objects use 'data', however some (flairlist) don't
+ have the 'data' object. Use None for the root object.
+ :param thing_field: indicates the field under the root_field which
+ contains the list of things. Most objects use 'children'.
+ :param after_field: indicates the field which holds the after item
+ element
+ :param object_filter: if set to an integer value, fetch content from
+ the corresponding list index in the JSON response. For example
+ the JSON response for submission duplicates is a list of objects,
+ and the object we want to fetch from is at index 1. So we set
+ object_filter=1 to filter out the other useless list elements.
+ :type place_holder: a string corresponding to a reddit base36 id
+ without prefix, e.g. 'asdfasdf'
+ :returns: a list of reddit content, of type Subreddit, Comment,
+ Submission or user flair.
+
+ """
+ _use_oauth = kwargs.get('_use_oauth', self.is_oauth_session())
+
+ objects_found = 0
+ params = params or {}
+ fetch_all = fetch_once = False
+ if limit is None:
+ fetch_all = True
+ params['limit'] = 1024 # Just use a big number
+ elif limit > 0:
+ params['limit'] = limit
+ else:
+ fetch_once = True
+
+ if hasattr(self, '_url_update'):
+ url = self._url_update(url) # pylint: disable=E1101
+
+ # While we still need to fetch more content to reach our limit, do so.
+ while fetch_once or fetch_all or objects_found < limit:
+ if _use_oauth: # Set the necessary _use_oauth value
+ assert self._use_oauth is False
+ self._use_oauth = _use_oauth
+ try:
+ page_data = self.request_json(url, params=params)
+ if object_filter:
+ page_data = page_data[object_filter]
+ finally: # Restore _use_oauth value
+ if _use_oauth:
+ self._use_oauth = False
+ fetch_once = False
+ root = page_data.get(root_field, page_data)
+ for thing in root[thing_field]:
+ yield thing
+ objects_found += 1
+ # Terminate when we've reached the limit, or place holder
+ if objects_found == limit or (place_holder and
+ thing.id == place_holder):
+ return
+ # Set/update the 'after' parameter for the next iteration
+ if root.get(after_field):
+ # We use `root.get` to also test if the value evaluates to True
+ params['after'] = root[after_field]
+ else:
+ return
+
+ @decorators.raise_api_exceptions
+ def request(self, url, params=None, data=None, retry_on_error=True,
+ method=None):
+ """Make a HTTP request and return the response.
+
+ :param url: the url to grab content from.
+ :param params: a dictionary containing the GET data to put in the url
+ :param data: a dictionary containing the extra data to submit
+ :param retry_on_error: if True retry the request, if it fails, for up
+ to 3 attempts
+ :param method: The HTTP method to use in the request.
+ :returns: The HTTP response.
+ """
+ return self._request(url, params, data, raw_response=True,
+ retry_on_error=retry_on_error, method=method)
+
+ @decorators.raise_api_exceptions
+ def request_json(self, url, params=None, data=None, as_objects=True,
+ retry_on_error=True, method=None):
+ """Get the JSON processed from a page.
+
+ :param url: the url to grab content from.
+ :param params: a dictionary containing the GET data to put in the url
+ :param data: a dictionary containing the extra data to submit
+ :param as_objects: if True return reddit objects else raw json dict.
+ :param retry_on_error: if True retry the request, if it fails, for up
+ to 3 attempts
+ :returns: JSON processed page
+
+ """
+ if not url.endswith('.json'):
+ url += '.json'
+ response = self._request(url, params, data, method=method,
+ retry_on_error=retry_on_error)
+ hook = self._json_reddit_objecter if as_objects else None
+ # Request url just needs to be available for the objecter to use
+ self._request_url = url # pylint: disable=W0201
+
+ if response == '':
+ # Some of the v1 urls don't return anything, even when they're
+ # successful.
+ return response
+
+ data = json.loads(response, object_hook=hook)
+ delattr(self, '_request_url')
+ # Update the modhash
+ if isinstance(data, dict) and 'data' in data \
+ and 'modhash' in data['data']:
+ self.modhash = data['data']['modhash']
+ return data
+
+
+class OAuth2Reddit(BaseReddit):
+ """Provides functionality for obtaining reddit OAuth2 access tokens.
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize an OAuth2Reddit instance."""
+ super(OAuth2Reddit, self).__init__(*args, **kwargs)
+ self.client_id = self.config.client_id
+ self.client_secret = self.config.client_secret
+ self.redirect_uri = self.config.redirect_uri
+
+ def _handle_oauth_request(self, data):
+ auth = (self.client_id, self.client_secret)
+ url = self.config['access_token_url']
+ response = self._request(url, auth=auth, data=data, raw_response=True)
+ if not response.ok:
+ msg = 'Unexpected OAuthReturn: {0}'.format(response.status_code)
+ raise errors.OAuthException(msg, url)
+ retval = response.json()
+ if 'error' in retval:
+ error = retval['error']
+ if error == 'invalid_grant':
+ raise errors.OAuthInvalidGrant(error, url)
+ raise errors.OAuthException(retval['error'], url)
+ return retval
+
+ @decorators.require_oauth
+ def get_access_information(self, code):
+ """Return the access information for an OAuth2 authorization grant.
+
+ :param code: the code received in the request from the OAuth2 server
+ :returns: A dictionary with the key/value pairs for ``access_token``,
+ ``refresh_token`` and ``scope``. The ``refresh_token`` value will
+ be None when the OAuth2 grant is not refreshable. The ``scope``
+ value will be a set containing the scopes the tokens are valid for.
+
+ """
+ if self.config.grant_type == 'password':
+ data = {'grant_type': 'password',
+ 'username': self.config.user,
+ 'password': self.config.pswd}
+ else:
+ data = {'code': code, 'grant_type': 'authorization_code',
+ 'redirect_uri': self.redirect_uri}
+ retval = self._handle_oauth_request(data)
+ return {'access_token': retval['access_token'],
+ 'refresh_token': retval.get('refresh_token'),
+ 'scope': set(retval['scope'].split(' '))}
+
+ @decorators.require_oauth
+ def get_authorize_url(self, state, scope='identity', refreshable=False):
+ """Return the URL to send the user to for OAuth2 authorization.
+
+ :param state: a unique string of your choice that represents this
+ individual client
+ :param scope: the reddit scope to ask permissions for. Multiple scopes
+ can be enabled by passing in a container of strings.
+ :param refreshable: when True, a permanent "refreshable" token is
+ issued
+
+ """
+ params = {'client_id': self.client_id, 'response_type': 'code',
+ 'redirect_uri': self.redirect_uri, 'state': state,
+ 'scope': _to_reddit_list(scope)}
+ params['duration'] = 'permanent' if refreshable else 'temporary'
+ request = Request('GET', self.config['authorize'], params=params)
+ return request.prepare().url
+
+ @property
+ def has_oauth_app_info(self):
+ """Return True when OAuth credentials are associated with the instance.
+
+ The necessary credentials are: ``client_id``, ``client_secret`` and
+ ``redirect_uri``.
+
+ """
+ return all((self.client_id is not None,
+ self.client_secret is not None,
+ self.redirect_uri is not None))
+
+ @decorators.require_oauth
+ def refresh_access_information(self, refresh_token):
+ """Return updated access information for an OAuth2 authorization grant.
+
+ :param refresh_token: the refresh token used to obtain the updated
+ information
+ :returns: A dictionary with the key/value pairs for access_token,
+ refresh_token and scope. The refresh_token value will be done when
+ the OAuth2 grant is not refreshable. The scope value will be a set
+ containing the scopes the tokens are valid for.
+
+ Password grants aren't refreshable, so use `get_access_information()`
+ again, instead.
+ """
+ if self.config.grant_type == 'password':
+ data = {'grant_type': 'password',
+ 'username': self.config.user,
+ 'password': self.config.pswd}
+ else:
+ data = {'grant_type': 'refresh_token',
+ 'redirect_uri': self.redirect_uri,
+ 'refresh_token': refresh_token}
+ retval = self._handle_oauth_request(data)
+ return {'access_token': retval['access_token'],
+ 'refresh_token': refresh_token,
+ 'scope': set(retval['scope'].split(' '))}
+
+ def set_oauth_app_info(self, client_id, client_secret, redirect_uri):
+ """Set the app information to use with OAuth2.
+
+ This function need only be called if your praw.ini site configuration
+ does not already contain the necessary information.
+
+ Go to https://www.reddit.com/prefs/apps/ to discover the appropriate
+ values for your application.
+
+ :param client_id: the client_id of your application
+ :param client_secret: the client_secret of your application
+ :param redirect_uri: the redirect_uri of your application
+
+ """
+ self.client_id = client_id
+ self.client_secret = client_secret
+ self.redirect_uri = redirect_uri
+
+
+class UnauthenticatedReddit(BaseReddit):
+ """This mixin provides bindings for basic functions of reddit's API.
+
+ None of these functions require authenticated access to reddit's API.
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize an UnauthenticatedReddit instance."""
+ super(UnauthenticatedReddit, self).__init__(*args, **kwargs)
+ # initialize to 1 instead of 0, because 0 does not reliably make
+ # new requests.
+ self._unique_count = 1
+
+ def create_redditor(self, user_name, password, email=''):
+ """Register a new user.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'email': email,
+ 'passwd': password,
+ 'passwd2': password,
+ 'user': user_name}
+ return self.request_json(self.config['register'], data=data)
+
+ def default_subreddits(self, *args, **kwargs):
+ """Return a get_content generator for the default subreddits.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['default_subreddits']
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_comments(self, subreddit, gilded_only=False, *args, **kwargs):
+ """Return a get_content generator for comments in the given subreddit.
+
+ :param gilded_only: If True only return gilded comments.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ key = 'sub_comments_gilded' if gilded_only else 'subreddit_comments'
+ url = self.config[key].format(subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_controversial(self, *args, **kwargs):
+ """Return a get_content generator for controversial submissions.
+
+ Corresponds to submissions provided by
+ ``https://www.reddit.com/controversial/`` for the session.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['controversial'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_domain_listing(self, domain, sort='hot', period=None, *args,
+ **kwargs):
+ """Return a get_content generator for submissions by domain.
+
+ Corresponds to the submissions provided by
+ ``https://www.reddit.com/domain/{domain}``.
+
+ :param domain: The domain to generate a submission listing for.
+ :param sort: When provided must be one of 'hot', 'new', 'rising',
+ 'controversial, or 'top'. Defaults to 'hot'.
+ :param period: When sort is either 'controversial', or 'top' the period
+ can be either None (for account default), 'all', 'year', 'month',
+ 'week', 'day', or 'hour'.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ # Verify arguments
+ if sort not in ('controversial', 'hot', 'new', 'rising', 'top'):
+ raise TypeError('Invalid sort parameter.')
+ if period not in (None, 'all', 'day', 'hour', 'month', 'week', 'year'):
+ raise TypeError('Invalid period parameter.')
+ if sort not in ('controversial', 'top') and period:
+ raise TypeError('Period cannot be set for that sort argument.')
+
+ url = self.config['domain'].format(domain=domain)
+ if sort != 'hot':
+ url += sort
+ if period: # Set or overwrite params 't' parameter
+ kwargs.setdefault('params', {})['t'] = period
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='modflair')
+ def get_flair(self, subreddit, redditor, **params):
+ """Return the flair for a user on the given subreddit.
+
+ :param subreddit: Can be either a Subreddit object or the name of a
+ subreddit.
+ :param redditor: Can be either a Redditor object or the name of a
+ redditor.
+ :returns: None if the user doesn't exist, otherwise a dictionary
+ containing the keys `flair_css_class`, `flair_text`, and `user`.
+
+ """
+ name = six.text_type(redditor)
+ params.update(name=name)
+ url = self.config['flairlist'].format(
+ subreddit=six.text_type(subreddit))
+ data = self.request_json(url, params=params)
+ if not data['users'] or \
+ data['users'][0]['user'].lower() != name.lower():
+ return None
+ return data['users'][0]
+
+ @decorators.restrict_access(scope='read')
+ def get_front_page(self, *args, **kwargs):
+ """Return a get_content generator for the front page submissions.
+
+ Corresponds to the submissions provided by ``https://www.reddit.com/``
+ for the session.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['reddit_url'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', generator_called=True)
+ def get_info(self, url=None, thing_id=None, *args, **kwargs):
+ """Look up existing items by thing_id (fullname) or url.
+
+ :param url: A url to lookup.
+ :param thing_id: A single thing_id, or a list of thing_ids. A thing_id
+ can be any one of Comment (``t1_``), Link (``t3_``), or Subreddit
+ (``t5_``) to lookup by fullname.
+ :returns: When a single ``thing_id`` is provided, return the
+ corresponding thing object, or ``None`` if not found. When a list
+ of ``thing_id``s or a ``url`` is provided return a list of thing
+ objects (up to ``limit``). ``None`` is returned if all of the
+ thing_ids or the URL is invalid.
+
+ The additional parameters are passed into :meth:`.get_content` after
+ the `params` parameter is exctracted and used to update the dictionary
+ of url parameters this function sends. Note: the `url` parameter
+ cannot be altered.
+
+ Also, if using thing_id and the `limit` parameter passed to
+ :meth:`.get_content` is used to slice the list of retreived things
+ before returning it to the user, for when `limit > 100` and
+ `(limit % 100) > 0`, to ensure a maximum of `limit` thigns are
+ returned.
+
+ """
+ if bool(url) == bool(thing_id):
+ raise TypeError('Only one of url or thing_id is required!')
+
+ # In these cases, we will have a list of things to return.
+ # Otherwise, it will just be one item.
+ if isinstance(thing_id, six.string_types) and ',' in thing_id:
+ thing_id = thing_id.split(',')
+ return_list = bool(url) or not isinstance(thing_id, six.string_types)
+
+ if url:
+ param_groups = [{'url': url}]
+ else:
+ if isinstance(thing_id, six.string_types):
+ thing_id = [thing_id]
+ id_chunks = chunk_sequence(thing_id, 100)
+ param_groups = [{'id': ','.join(id_chunk)} for
+ id_chunk in id_chunks]
+
+ items = []
+ update_with = kwargs.pop('params', {})
+ for param_group in param_groups:
+ param_group.update(update_with)
+ kwargs['params'] = param_group
+ chunk = self.get_content(self.config['info'], *args, **kwargs)
+ items.extend(list(chunk))
+
+ # if using ids, manually set the limit
+ if kwargs.get('limit'):
+ items = items[:kwargs['limit']]
+
+ if return_list:
+ return items if items else None
+ elif items:
+ return items[0]
+ else:
+ return None
+
+ @decorators.restrict_access(scope='read')
+ def get_moderators(self, subreddit, **kwargs):
+ """Return the list of moderators for the given subreddit."""
+ url = self.config['moderators'].format(
+ subreddit=six.text_type(subreddit))
+ return self.request_json(url, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_new(self, *args, **kwargs):
+ """Return a get_content generator for new submissions.
+
+ Corresponds to the submissions provided by
+ ``https://www.reddit.com/new/`` for the session.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['new'], *args, **kwargs)
+
+ def get_new_subreddits(self, *args, **kwargs):
+ """Return a get_content generator for the newest subreddits.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['new_subreddits']
+ return self.get_content(url, *args, **kwargs)
+
+ def get_popular_subreddits(self, *args, **kwargs):
+ """Return a get_content generator for the most active subreddits.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['popular_subreddits']
+ return self.get_content(url, *args, **kwargs)
+
+ def get_random_subreddit(self, nsfw=False):
+ """Return a random Subreddit object.
+
+ :param nsfw: When true, return a random NSFW Subreddit object. Calling
+ in this manner will set the 'over18' cookie for the duration of the
+ PRAW session.
+
+ """
+ path = 'random'
+ if nsfw:
+ self.http.cookies.set('over18', '1')
+ path = 'randnsfw'
+ url = self.config['subreddit'].format(subreddit=path)
+ response = self._request(url, params={'unique': self._unique_count},
+ raw_response=True)
+ self._unique_count += 1
+ return self.get_subreddit(response.url.rsplit('/', 2)[-2])
+
+ def get_random_submission(self, subreddit='all'):
+ """Return a random Submission object.
+
+ :param subreddit: Limit the submission to the specified
+ subreddit(s). Default: all
+
+ """
+ url = self.config['subreddit_random'].format(
+ subreddit=six.text_type(subreddit))
+ try:
+ item = self.request_json(url,
+ params={'unique': self._unique_count})
+ self._unique_count += 1 # Avoid network-level caching
+ return objects.Submission.from_json(item)
+ except errors.RedirectException as exc:
+ self._unique_count += 1
+ return self.get_submission(exc.response_url)
+ raise errors.ClientException('Expected exception not raised.')
+
+ def get_redditor(self, user_name, *args, **kwargs):
+ """Return a Redditor instance for the user_name specified.
+
+ The additional parameters are passed directly into the
+ :class:`.Redditor` constructor.
+
+ """
+ return objects.Redditor(self, user_name, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_rising(self, *args, **kwargs):
+ """Return a get_content generator for rising submissions.
+
+ Corresponds to the submissions provided by
+ ``https://www.reddit.com/rising/`` for the session.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['rising'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='read')
+ def get_rules(self, subreddit, bottom=False):
+ """Return the json dictionary containing rules for a subreddit.
+
+ :param subreddit: The subreddit whose rules we will return.
+
+ """
+ url = self.config['rules'].format(subreddit=six.text_type(subreddit))
+ return self.request_json(url)
+
+ @decorators.restrict_access(scope='read')
+ def get_sticky(self, subreddit, bottom=False):
+ """Return a Submission object for the sticky of the subreddit.
+
+ :param bottom: Get the top or bottom sticky. If the subreddit has only
+ a single sticky, it is considered the top one.
+
+ """
+ url = self.config['sticky'].format(subreddit=six.text_type(subreddit))
+ param = {'num': 2} if bottom else None
+ return objects.Submission.from_json(self.request_json(url,
+ params=param))
+
+ def get_submission(self, url=None, submission_id=None, comment_limit=0,
+ comment_sort=None, params=None):
+ """Return a Submission object for the given url or submission_id.
+
+ :param comment_limit: The desired number of comments to fetch. If <= 0
+ fetch the default number for the session's user. If None, fetch the
+ maximum possible.
+ :param comment_sort: The sort order for retrieved comments. When None
+ use the default for the session's user.
+ :param params: Dictionary containing extra GET data to put in the url.
+
+ """
+ if bool(url) == bool(submission_id):
+ raise TypeError('One (and only one) of id or url is required!')
+ if submission_id:
+ url = urljoin(self.config['comments'], submission_id)
+ return objects.Submission.from_url(self, url,
+ comment_limit=comment_limit,
+ comment_sort=comment_sort,
+ params=params)
+
+ def get_submissions(self, fullnames, *args, **kwargs):
+ """Generate Submission objects for each item provided in `fullnames`.
+
+ A submission fullname looks like `t3_`. Submissions are
+ yielded in the same order they appear in `fullnames`.
+
+ Up to 100 items are batched at a time -- this happens transparently.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` and `limit` parameters cannot be
+ altered.
+
+ """
+ fullnames = fullnames[:]
+ while fullnames:
+ cur = fullnames[:100]
+ fullnames[:100] = []
+ url = self.config['by_id'] + ','.join(cur)
+ for item in self.get_content(url, limit=len(cur), *args, **kwargs):
+ yield item
+
+ def get_subreddit(self, subreddit_name, *args, **kwargs):
+ """Return a Subreddit object for the subreddit_name specified.
+
+ The additional parameters are passed directly into the
+ :class:`.Subreddit` constructor.
+
+ """
+ sr_name_lower = subreddit_name.lower()
+ if sr_name_lower == 'random':
+ return self.get_random_subreddit()
+ elif sr_name_lower == 'randnsfw':
+ return self.get_random_subreddit(nsfw=True)
+ return objects.Subreddit(self, subreddit_name, *args, **kwargs)
+
+ def get_subreddit_recommendations(self, subreddits, omit=None):
+ """Return a list of recommended subreddits as Subreddit objects.
+
+ Subreddits with activity less than a certain threshold, will not have
+ any recommendations due to lack of data.
+
+ :param subreddits: A list of subreddits (either names or Subreddit
+ objects) to base the recommendations on.
+ :param omit: A list of subreddits (either names or Subreddit
+ objects) that will be filtered out of the result.
+
+ """
+ params = {'omit': _to_reddit_list(omit or [])}
+ url = self.config['sub_recommendations'].format(
+ subreddits=_to_reddit_list(subreddits))
+ result = self.request_json(url, params=params)
+ return [objects.Subreddit(self, sub['sr_name']) for sub in result]
+
+ @decorators.restrict_access(scope='read')
+ def get_top(self, *args, **kwargs):
+ """Return a get_content generator for top submissions.
+
+ Corresponds to the submissions provided by
+ ``https://www.reddit.com/top/`` for the session.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['top'], *args, **kwargs)
+
+ # There exists a `modtraffic` scope, but it is unused.
+ @decorators.restrict_access(scope='modconfig')
+ def get_traffic(self, subreddit):
+ """Return the json dictionary containing traffic stats for a subreddit.
+
+ :param subreddit: The subreddit whose /about/traffic page we will
+ collect.
+
+ """
+ url = self.config['subreddit_traffic'].format(
+ subreddit=six.text_type(subreddit))
+ return self.request_json(url)
+
+ @decorators.restrict_access(scope='wikiread', login=False)
+ def get_wiki_page(self, subreddit, page):
+ """Return a WikiPage object for the subreddit and page provided."""
+ return objects.WikiPage(self, six.text_type(subreddit), page.lower())
+
+ @decorators.restrict_access(scope='wikiread', login=False)
+ def get_wiki_pages(self, subreddit):
+ """Return a list of WikiPage objects for the subreddit."""
+ url = self.config['wiki_pages'].format(
+ subreddit=six.text_type(subreddit))
+ return self.request_json(url)
+
+ def is_username_available(self, username):
+ """Return True if username is valid and available, otherwise False."""
+ params = {'user': username}
+ try:
+ result = self.request_json(self.config['username_available'],
+ params=params)
+ except errors.BadUsername:
+ return False
+ return result
+
+ def search(self, query, subreddit=None, sort=None, syntax=None,
+ period=None, *args, **kwargs):
+ """Return a generator for submissions that match the search query.
+
+ :param query: The query string to search for. If query is a URL only
+ submissions which link to that URL will be returned.
+ :param subreddit: Limit search results to the subreddit if provided.
+ :param sort: The sort order of the results.
+ :param syntax: The syntax of the search query.
+ :param period: The time period of the results.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ See https://www.reddit.com/wiki/search for more information on how to
+ build a search query.
+
+ """
+ params = {'q': query}
+ if 'params' in kwargs:
+ params.update(kwargs['params'])
+ kwargs.pop('params')
+ if sort:
+ params['sort'] = sort
+ if syntax:
+ params['syntax'] = syntax
+ if period:
+ params['t'] = period
+ if subreddit:
+ params['restrict_sr'] = 'on'
+ subreddit = six.text_type(subreddit)
+ else:
+ subreddit = 'all'
+ url = self.config['search'].format(subreddit=subreddit)
+
+ depth = 2
+ while depth > 0:
+ depth -= 1
+ try:
+ for item in self.get_content(url, params=params, *args,
+ **kwargs):
+ yield item
+ break
+ except errors.RedirectException as exc:
+ parsed = urlparse(exc.response_url)
+ params = dict((k, ",".join(v)) for k, v in
+ parse_qs(parsed.query).items())
+ url = urlunparse(parsed[:3] + ("", "", ""))
+ # Handle redirects from URL searches
+ if 'already_submitted' in params:
+ yield self.get_submission(url)
+ break
+
+ def search_reddit_names(self, query):
+ """Return subreddits whose display name contains the query."""
+ data = {'query': query}
+ results = self.request_json(self.config['search_reddit_names'],
+ data=data)
+ return [self.get_subreddit(name) for name in results['names']]
+
+
+class AuthenticatedReddit(OAuth2Reddit, UnauthenticatedReddit):
+ """This class adds the methods necessary for authenticating with reddit.
+
+ Authentication can either be login based
+ (through :meth:`~praw.__init__.AuthenticatedReddit.login`), or OAuth2 based
+ (via :meth:`~praw.__init__.AuthenticatedReddit.set_access_credentials`).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize an AuthenticatedReddit instance."""
+ super(AuthenticatedReddit, self).__init__(*args, **kwargs)
+ # Add variable to distinguish between authentication type
+ # * None means unauthenticated
+ # * True mean login authenticated
+ # * set(...) means OAuth authenticated with the scopes in the set
+ self._authentication = None
+ self.access_token = None
+ self.refresh_token = self.config.refresh_token or None
+ self.user = None
+
+ def __str__(self):
+ """Return a string representation of the AuthenticatedReddit."""
+ if isinstance(self._authentication, set):
+ return 'OAuth2 reddit session (scopes: {0})'.format(
+ ', '.join(self._authentication))
+ elif self._authentication:
+ return 'LoggedIn reddit session (user: {0})'.format(self.user)
+ else:
+ return 'Unauthenticated reddit session'
+
+ def _url_update(self, url):
+ # When getting posts from a multireddit owned by the authenticated
+ # Redditor, we are redirected to me/m/multi/. Handle that now
+ # instead of catching later.
+ if re.search('user/.*/m/.*', url):
+ redditor = url.split('/')[-4]
+ if self.user and self.user.name.lower() == redditor.lower():
+ url = url.replace("user/"+redditor, 'me')
+ return url
+
+ @decorators.restrict_access(scope='modself', mod=False)
+ def accept_moderator_invite(self, subreddit):
+ """Accept a moderator invite to the given subreddit.
+
+ Callable upon an instance of Subreddit with no arguments.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'r': six.text_type(subreddit)}
+ # Clear moderated subreddits and cache
+ self.user._mod_subs = None # pylint: disable=W0212
+ self.evict(self.config['my_mod_subreddits'])
+ return self.request_json(self.config['accept_mod_invite'], data=data)
+
+ def clear_authentication(self):
+ """Clear any existing authentication on the reddit object.
+
+ This function is implicitly called on `login` and
+ `set_access_credentials`.
+
+ """
+ self._authentication = None
+ self.access_token = None
+ self.refresh_token = None
+ self.http.cookies.clear()
+ self.user = None
+
+ def delete(self, password, message=""):
+ """Delete the currently authenticated redditor.
+
+ WARNING!
+
+ This action is IRREVERSIBLE. Use only if you're okay with NEVER
+ accessing this reddit account again.
+
+ :param password: password for currently authenticated account
+ :param message: optional 'reason for deletion' message.
+ :returns: json response from the server.
+
+ """
+ data = {'user': self.user.name,
+ 'passwd': password,
+ 'delete_message': message,
+ 'confirm': True}
+ return self.request_json(self.config['delete_redditor'], data=data)
+
+ @decorators.restrict_access(scope='wikiedit')
+ def edit_wiki_page(self, subreddit, page, content, reason=''):
+ """Create or edit a wiki page with title `page` for `subreddit`.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'content': content,
+ 'page': page,
+ 'r': six.text_type(subreddit),
+ 'reason': reason}
+ evict = self.config['wiki_page'].format(
+ subreddit=six.text_type(subreddit), page=page.lower())
+ self.evict(evict)
+ return self.request_json(self.config['wiki_edit'], data=data)
+
+ def get_access_information(self, code, # pylint: disable=W0221
+ update_session=True):
+ """Return the access information for an OAuth2 authorization grant.
+
+ :param code: the code received in the request from the OAuth2 server
+ :param update_session: Update the current session with the retrieved
+ token(s).
+ :returns: A dictionary with the key/value pairs for access_token,
+ refresh_token and scope. The refresh_token value will be done when
+ the OAuth2 grant is not refreshable.
+
+ """
+ retval = super(AuthenticatedReddit, self).get_access_information(code)
+ if update_session:
+ self.set_access_credentials(**retval)
+ return retval
+
+ @decorators.restrict_access(scope='flair')
+ def get_flair_choices(self, subreddit, link=None):
+ """Return available flair choices and current flair.
+
+ :param link: If link is given, return the flair options for this
+ submission. Not normally given directly, but instead set by calling
+ the flair_choices method for Submission objects.
+ Use the default for the session's user.
+
+ :returns: A dictionary with 2 keys. 'current' containing current flair
+ settings for the authenticated user and 'choices' containing a list
+ of possible flair choices.
+
+ """
+ data = {'r': six.text_type(subreddit), 'link': link}
+ return self.request_json(self.config['flairselector'], data=data)
+
+ @decorators.restrict_access(scope='read', login=True)
+ def get_friends(self, **params):
+ """Return a UserList of Redditors with whom the user is friends."""
+ url = self.config['friends']
+ return self.request_json(url, params=params)[0]
+
+ @decorators.restrict_access(scope='identity', oauth_only=True)
+ def get_me(self):
+ """Return a LoggedInRedditor object.
+
+ Note: This function is only intended to be used with an 'identity'
+ providing OAuth2 grant.
+ """
+ response = self.request_json(self.config['me'])
+ user = objects.Redditor(self, response['name'], response)
+ user.__class__ = objects.LoggedInRedditor
+ return user
+
+ def has_scope(self, scope):
+ """Return True if OAuth2 authorized for the passed in scope(s)."""
+ if not self.is_oauth_session():
+ return False
+ if '*' in self._authentication:
+ return True
+ if isinstance(scope, six.string_types):
+ scope = [scope]
+ return all(s in self._authentication for s in scope)
+
+ def is_logged_in(self):
+ """Return True when the session is authenticated via username/password.
+
+ Username and passwords are provided via
+ :meth:`~praw.__init__.AuthenticatedReddit.login`.
+
+ """
+ return self._authentication is True
+
+ def is_oauth_session(self):
+ """Return True when the current session is an OAuth2 session."""
+ return isinstance(self._authentication, set)
+
+ @decorators.deprecated('reddit intends to disable password-based '
+ 'authentication of API clients sometime in the '
+ 'near future. As a result this method will be '
+ 'removed in a future major version of PRAW.\n\n'
+ 'For more information please see:\n\n'
+ '* Original reddit deprecation notice: '
+ 'https://www.reddit.com/comments/2ujhkr/\n\n'
+ '* Updated delayed deprecation notice: '
+ 'https://www.reddit.com/comments/37e2mv/\n\n'
+ 'Pass ``disable_warning=True`` to ``login`` to '
+ 'disable this warning.')
+ def login(self, username=None, password=None, **kwargs):
+ """Login to a reddit site.
+
+ **DEPRECATED**. Will be removed in a future version of PRAW.
+
+ https://www.reddit.com/comments/2ujhkr/
+ https://www.reddit.com/comments/37e2mv/
+
+ Look for username first in parameter, then praw.ini and finally if both
+ were empty get it from stdin. Look for password in parameter, then
+ praw.ini (but only if username matches that in praw.ini) and finally
+ if they both are empty get it with getpass. Add the variables ``user``
+ (username) and ``pswd`` (password) to your praw.ini file to allow for
+ auto-login.
+
+ A successful login will overwrite any existing authentication.
+
+ """
+ if password and not username:
+ raise Exception('Username must be provided when password is.')
+ user = username or self.config.user
+ if not user:
+ sys.stdout.write('Username: ')
+ sys.stdout.flush()
+ user = sys.stdin.readline().strip()
+ pswd = None
+ else:
+ pswd = password or self.config.pswd
+ if not pswd:
+ import getpass
+ pswd = getpass.getpass('Password for {0}: '.format(user)
+ .encode('ascii', 'ignore'))
+
+ data = {'passwd': pswd,
+ 'user': user}
+ self.clear_authentication()
+ self.request_json(self.config['login'], data=data)
+ # Update authentication settings
+ self._authentication = True
+ self.user = self.get_redditor(user)
+ self.user.__class__ = objects.LoggedInRedditor
+
+ def refresh_access_information(self, # pylint: disable=W0221
+ refresh_token=None,
+ update_session=True):
+ """Return updated access information for an OAuth2 authorization grant.
+
+ :param refresh_token: The refresh token used to obtain the updated
+ information. When not provided, use the stored refresh_token.
+ :param update_session: Update the session with the returned data.
+ :returns: A dictionary with the key/value pairs for ``access_token``,
+ ``refresh_token`` and ``scope``. The ``refresh_token`` value will
+ be None when the OAuth2 grant is not refreshable. The ``scope``
+ value will be a set containing the scopes the tokens are valid for.
+
+ """
+ response = super(AuthenticatedReddit, self).refresh_access_information(
+ refresh_token=refresh_token or self.refresh_token)
+ if update_session:
+ self.set_access_credentials(**response)
+ return response
+
+ @decorators.restrict_access(scope='flair')
+ def select_flair(self, item, flair_template_id='', flair_text=''):
+ """Select user flair or link flair on subreddits.
+
+ This can only be used for assigning your own name flair or link flair
+ on your own submissions. For assigning other's flairs using moderator
+ access, see :meth:`~praw.__init__.ModFlairMixin.set_flair`.
+
+ :param item: A string, Subreddit object (for user flair), or
+ Submission object (for link flair). If ``item`` is a string it
+ will be treated as the name of a Subreddit.
+ :param flair_template_id: The id for the desired flair template. Use
+ the :meth:`~praw.objects.Subreddit.get_flair_choices` and
+ :meth:`~praw.objects.Submission.get_flair_choices` methods to find
+ the ids for the available user and link flair choices.
+ :param flair_text: A string containing the custom flair text.
+ Used on subreddits that allow it.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'flair_template_id': flair_template_id or '',
+ 'text': flair_text or ''}
+ if isinstance(item, objects.Submission):
+ # Link flair
+ data['link'] = item.fullname
+ evict = item.permalink
+ else:
+ # User flair
+ data['name'] = self.user.name
+ data['r'] = six.text_type(item)
+ evict = self.config['flairlist'].format(
+ subreddit=six.text_type(item))
+ response = self.request_json(self.config['select_flair'], data=data)
+ self.evict(evict)
+ return response
+
+ @decorators.require_oauth
+ def set_access_credentials(self, scope, access_token, refresh_token=None,
+ update_user=True):
+ """Set the credentials used for OAuth2 authentication.
+
+ Calling this function will overwrite any currently existing access
+ credentials.
+
+ :param scope: A set of reddit scopes the tokens provide access to
+ :param access_token: the access token of the authentication
+ :param refresh_token: the refresh token of the authentication
+ :param update_user: Whether or not to set the user attribute for
+ identity scopes
+
+ """
+ if isinstance(scope, (list, tuple)):
+ scope = set(scope)
+ elif isinstance(scope, six.string_types):
+ scope = set(scope.split())
+ if not isinstance(scope, set):
+ raise TypeError('`scope` parameter must be a set')
+ self.clear_authentication()
+ # Update authentication settings
+ self._authentication = scope
+ self.access_token = access_token
+ self.refresh_token = refresh_token
+ # Update the user object
+ if update_user and ('identity' in scope or '*' in scope):
+ self.user = self.get_me()
+
+
+class ModConfigMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'modconfig' scope (or mod access).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='modconfig', mod=False)
+ @decorators.require_captcha
+ def create_subreddit(self, name, title, description='', language='en',
+ subreddit_type='public', content_options='any',
+ over_18=False, default_set=True, show_media=False,
+ domain='', wikimode='disabled', captcha=None,
+ **kwargs):
+ """Create a new subreddit.
+
+ :returns: The json response from the server.
+
+ This function may result in a captcha challenge. PRAW will
+ automatically prompt you for a response. See :ref:`handling-captchas`
+ if you want to manually handle captchas.
+
+ """
+ data = {'name': name,
+ 'title': title,
+ 'description': description,
+ 'lang': language,
+ 'type': subreddit_type,
+ 'link_type': content_options,
+ 'over_18': 'on' if over_18 else 'off',
+ 'allow_top': 'on' if default_set else 'off',
+ 'show_media': 'on' if show_media else 'off',
+ 'wikimode': wikimode,
+ 'domain': domain}
+ if captcha:
+ data.update(captcha)
+ return self.request_json(self.config['site_admin'], data=data)
+
+ @decorators.restrict_access(scope='modconfig')
+ def delete_image(self, subreddit, name=None, header=False):
+ """Delete an image from the subreddit.
+
+ :param name: The name of the image if removing a CSS image.
+ :param header: When true, delete the subreddit header.
+ :returns: The json response from the server.
+
+ """
+ subreddit = six.text_type(subreddit)
+ if name and header:
+ raise TypeError('Both name and header cannot be set.')
+ elif name:
+ data = {'img_name': name}
+ url = self.config['delete_sr_image']
+ self.evict(self.config['stylesheet'].format(subreddit=subreddit))
+ else:
+ data = True
+ url = self.config['delete_sr_header']
+ url = url.format(subreddit=subreddit)
+ return self.request_json(url, data=data)
+
+ @decorators.restrict_access(scope='modconfig')
+ def get_settings(self, subreddit, **params):
+ """Return the settings for the given subreddit."""
+ url = self.config['subreddit_settings'].format(
+ subreddit=six.text_type(subreddit))
+ return self.request_json(url, params=params)['data']
+
+ @decorators.restrict_access(scope='modconfig')
+ def set_settings(self, subreddit, title, public_description='',
+ description='', language='en', subreddit_type='public',
+ content_options='any', over_18=False, default_set=True,
+ show_media=False, domain='', domain_css=False,
+ domain_sidebar=False, header_hover_text='',
+ wikimode='disabled', wiki_edit_age=30,
+ wiki_edit_karma=100,
+ submit_link_label='', submit_text_label='',
+ exclude_banned_modqueue=False, comment_score_hide_mins=0,
+ public_traffic=False, collapse_deleted_comments=False,
+ spam_comments='low', spam_links='high',
+ spam_selfposts='high', submit_text='',
+ hide_ads=False, suggested_comment_sort='',
+ key_color='',
+ **kwargs):
+ """Set the settings for the given subreddit.
+
+ :param subreddit: Must be a subreddit object.
+ :returns: The json response from the server.
+
+ """
+ data = {'sr': subreddit.fullname,
+ 'allow_top': default_set,
+ 'comment_score_hide_mins': comment_score_hide_mins,
+ 'collapse_deleted_comments': collapse_deleted_comments,
+ 'description': description,
+ 'domain': domain or '',
+ 'domain_css': domain_css,
+ 'domain_sidebar': domain_sidebar,
+ 'exclude_banned_modqueue': exclude_banned_modqueue,
+ 'header-title': header_hover_text or '',
+ 'hide_ads': hide_ads,
+ 'key_color': key_color,
+ 'lang': language,
+ 'link_type': content_options,
+ 'over_18': over_18,
+ 'public_description': public_description,
+ 'public_traffic': public_traffic,
+ 'show_media': show_media,
+ 'submit_link_label': submit_link_label or '',
+ 'submit_text': submit_text,
+ 'submit_text_label': submit_text_label or '',
+ 'suggested_comment_sort': suggested_comment_sort or '',
+ 'spam_comments': spam_comments,
+ 'spam_links': spam_links,
+ 'spam_selfposts': spam_selfposts,
+ 'title': title,
+ 'type': subreddit_type,
+ 'wiki_edit_age': six.text_type(wiki_edit_age),
+ 'wiki_edit_karma': six.text_type(wiki_edit_karma),
+ 'wikimode': wikimode}
+
+ if kwargs:
+ msg = 'Extra settings fields: {0}'.format(kwargs.keys())
+ warn_explicit(msg, UserWarning, '', 0)
+ data.update(kwargs)
+ evict = self.config['subreddit_settings'].format(
+ subreddit=six.text_type(subreddit))
+ self.evict(evict)
+ return self.request_json(self.config['site_admin'], data=data)
+
+ @decorators.restrict_access(scope='modconfig')
+ def set_stylesheet(self, subreddit, stylesheet):
+ """Set stylesheet for the given subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ subreddit = six.text_type(subreddit)
+ data = {'r': subreddit,
+ 'stylesheet_contents': stylesheet,
+ 'op': 'save'} # Options: save / preview
+ self.evict(self.config['stylesheet'].format(subreddit=subreddit))
+ return self.request_json(self.config['subreddit_css'], data=data)
+
+ @decorators.restrict_access(scope='modconfig')
+ def upload_image(self, subreddit, image_path, name=None,
+ header=False, upload_as=None):
+ """Upload an image to the subreddit.
+
+ :param image_path: A path to the jpg or png image you want to upload.
+ :param name: The name to provide the image. When None the name will be
+ filename less any extension.
+ :param header: When True, upload the image as the subreddit header.
+ :param upload_as: Must be `'jpg'`, `'png'` or `None`. When None, this
+ will match the format of the image itself. In all cases where both
+ this value and the image format is not png, reddit will also
+ convert the image mode to RGBA. reddit optimizes the image
+ according to this value.
+ :returns: A link to the uploaded image. Raises an exception otherwise.
+
+ """
+ if name and header:
+ raise TypeError('Both name and header cannot be set.')
+ if upload_as not in (None, 'png', 'jpg'):
+ raise TypeError("upload_as must be 'jpg', 'png', or None.")
+ with open(image_path, 'rb') as image:
+ image_type = upload_as or _image_type(image)
+ data = {'r': six.text_type(subreddit), 'img_type': image_type}
+ if header:
+ data['header'] = 1
+ else:
+ if not name:
+ name = os.path.splitext(os.path.basename(image.name))[0]
+ data['name'] = name
+
+ response = json.loads(self._request(
+ self.config['upload_image'], data=data, files={'file': image},
+ method=to_native_string('POST'), retry_on_error=False))
+
+ if response['errors']:
+ raise errors.APIException(response['errors'], None)
+ return response['img_src']
+
+ def update_settings(self, subreddit, **kwargs):
+ """Update only the given settings for the given subreddit.
+
+ The settings to update must be given by keyword and match one of the
+ parameter names in `set_settings`.
+
+ :returns: The json response from the server.
+
+ """
+ settings = self.get_settings(subreddit)
+ settings.update(kwargs)
+ del settings['subreddit_id']
+ return self.set_settings(subreddit, **settings)
+
+
+class ModFlairMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'modflair' scope (or mod access).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='modflair')
+ def add_flair_template(self, subreddit, text='', css_class='',
+ text_editable=False, is_link=False):
+ """Add a flair template to the given subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'r': six.text_type(subreddit),
+ 'text': text,
+ 'css_class': css_class,
+ 'text_editable': six.text_type(text_editable),
+ 'flair_type': 'LINK_FLAIR' if is_link else 'USER_FLAIR'}
+ return self.request_json(self.config['flairtemplate'], data=data)
+
+ @decorators.restrict_access(scope='modflair')
+ def clear_flair_templates(self, subreddit, is_link=False):
+ """Clear flair templates for the given subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'r': six.text_type(subreddit),
+ 'flair_type': 'LINK_FLAIR' if is_link else 'USER_FLAIR'}
+ return self.request_json(self.config['clearflairtemplates'], data=data)
+
+ @decorators.restrict_access(scope='modflair')
+ def configure_flair(self, subreddit, flair_enabled=False,
+ flair_position='right',
+ flair_self_assign=False,
+ link_flair_enabled=False,
+ link_flair_position='left',
+ link_flair_self_assign=False):
+ """Configure the flair setting for the given subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ flair_enabled = 'on' if flair_enabled else 'off'
+ flair_self_assign = 'on' if flair_self_assign else 'off'
+ if not link_flair_enabled:
+ link_flair_position = ''
+ link_flair_self_assign = 'on' if link_flair_self_assign else 'off'
+ data = {'r': six.text_type(subreddit),
+ 'flair_enabled': flair_enabled,
+ 'flair_position': flair_position,
+ 'flair_self_assign_enabled': flair_self_assign,
+ 'link_flair_position': link_flair_position,
+ 'link_flair_self_assign_enabled': link_flair_self_assign}
+ return self.request_json(self.config['flairconfig'], data=data)
+
+ @decorators.restrict_access(scope='modflair')
+ def delete_flair(self, subreddit, user):
+ """Delete the flair for the given user on the given subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'r': six.text_type(subreddit),
+ 'name': six.text_type(user)}
+ return self.request_json(self.config['deleteflair'], data=data)
+
+ @decorators.restrict_access(scope='modflair')
+ def get_flair_list(self, subreddit, *args, **kwargs):
+ """Return a get_content generator of flair mappings.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the flair list for.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url`, `root_field`, `thing_field`, and
+ `after_field` parameters cannot be altered.
+
+ """
+ url = self.config['flairlist'].format(
+ subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, root_field=None,
+ thing_field='users', after_field='next',
+ **kwargs)
+
+ @decorators.restrict_access(scope='modflair')
+ def set_flair(self, subreddit, item, flair_text='', flair_css_class=''):
+ """Set flair for the user in the given subreddit.
+
+ `item` can be a string, Redditor object, or Submission object.
+ If `item` is a string it will be treated as the name of a Redditor.
+
+ This method can only be called by a subreddit moderator with flair
+ permissions. To set flair on yourself or your own links use
+ :meth:`~praw.__init__.AuthenticatedReddit.select_flair`.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'r': six.text_type(subreddit),
+ 'text': flair_text or '',
+ 'css_class': flair_css_class or ''}
+ if isinstance(item, objects.Submission):
+ data['link'] = item.fullname
+ evict = item.permalink
+ else:
+ data['name'] = six.text_type(item)
+ evict = self.config['flairlist'].format(
+ subreddit=six.text_type(subreddit))
+ response = self.request_json(self.config['flair'], data=data)
+ self.evict(evict)
+ return response
+
+ @decorators.restrict_access(scope='modflair')
+ def set_flair_csv(self, subreddit, flair_mapping):
+ """Set flair for a group of users in the given subreddit.
+
+ flair_mapping should be a list of dictionaries with the following keys:
+ `user`: the user name,
+ `flair_text`: the flair text for the user (optional),
+ `flair_css_class`: the flair css class for the user (optional)
+
+ :returns: The json response from the server.
+
+ """
+ if not flair_mapping:
+ raise errors.ClientException('flair_mapping must be set')
+ item_order = ['user', 'flair_text', 'flair_css_class']
+ lines = []
+ for mapping in flair_mapping:
+ if 'user' not in mapping:
+ raise errors.ClientException('flair_mapping must '
+ 'contain `user` key')
+ lines.append(','.join([mapping.get(x, '') for x in item_order]))
+ response = []
+ while len(lines):
+ data = {'r': six.text_type(subreddit),
+ 'flair_csv': '\n'.join(lines[:100])}
+ response.extend(self.request_json(self.config['flaircsv'],
+ data=data))
+ lines = lines[100:]
+ evict = self.config['flairlist'].format(
+ subreddit=six.text_type(subreddit))
+ self.evict(evict)
+ return response
+
+
+class ModLogMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'modlog' scope (or mod access).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='modlog')
+ def get_mod_log(self, subreddit, mod=None, action=None, *args, **kwargs):
+ """Return a get_content generator for moderation log items.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the modlog for.
+ :param mod: If given, only return the actions made by this moderator.
+ Both a moderator name or Redditor object can be used here.
+ :param action: If given, only return entries for the specified action.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ params = kwargs.setdefault('params', {})
+ if mod is not None:
+ params['mod'] = six.text_type(mod)
+ if action is not None:
+ params['type'] = six.text_type(action)
+ url = self.config['modlog'].format(subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+
+class ModOnlyMixin(AuthenticatedReddit):
+ """Adds methods requiring the logged in moderator access.
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def _get_userlist(self, url, user_only, *args, **kwargs):
+ content = self.get_content(url, *args, **kwargs)
+ for data in content:
+ user = objects.Redditor(self, data['name'], fetch=False)
+ user.id = data['id'].split('_')[1]
+ if user_only:
+ yield user
+ else:
+ data['name'] = user
+ yield data
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_banned(self, subreddit, user_only=True, *args, **kwargs):
+ """Return a get_content generator of banned users for the subreddit.
+
+ :param subreddit: The subreddit to get the banned user list for.
+ :param user_only: When False, the generator yields a dictionary of data
+ associated with the server response for that user. In such cases,
+ the Redditor will be in key 'name' (default: True).
+
+ """
+ url = self.config['banned'].format(subreddit=six.text_type(subreddit))
+ return self._get_userlist(url, user_only, *args, **kwargs)
+
+ def get_contributors(self, subreddit, *args, **kwargs):
+ """
+ Return a get_content generator of contributors for the given subreddit.
+
+ If it's a public subreddit, then authentication as a
+ moderator of the subreddit is required. For protected/private
+ subreddits only access is required. See issue #246.
+
+ """
+ # pylint: disable=W0613
+ def get_contributors_helper(self, subreddit):
+ # It is necessary to have the 'self' argument as it's needed in
+ # restrict_access to determine what class the decorator is
+ # operating on.
+ url = self.config['contributors'].format(
+ subreddit=six.text_type(subreddit))
+ return self._get_userlist(url, user_only=True, *args, **kwargs)
+
+ if self.is_logged_in():
+ if not isinstance(subreddit, objects.Subreddit):
+ subreddit = self.get_subreddit(subreddit)
+ if subreddit.subreddit_type == "public":
+ decorator = decorators.restrict_access(scope='read', mod=True)
+ return decorator(get_contributors_helper)(self, subreddit)
+ return get_contributors_helper(self, subreddit)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_edited(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator of edited items.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the edited items for. Defaults to `mod` which
+ includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['edited'].format(subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages', mod=True)
+ def get_mod_mail(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator for moderator messages.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the moderator mail from. Defaults to `mod`
+ which includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['mod_mail'].format(
+ subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_mod_queue(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator for the moderator queue.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the modqueue for. Defaults to `mod` which
+ includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['modqueue'].format(
+ subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_muted(self, subreddit, user_only=True, *args, **kwargs):
+ """Return a get_content generator for modmail-muted users.
+
+ :param subreddit: Either a Subreddit object or the name of a subreddit
+ to get the list of muted users from.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['muted'].format(subreddit=six.text_type(subreddit))
+ return self._get_userlist(url, user_only, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_reports(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator of reported items.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the reported items. Defaults to `mod` which
+ includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['reports'].format(subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_spam(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator of spam-filtered items.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the spam-filtered items for. Defaults to `mod`
+ which includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['spam'].format(subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access('modconfig', mod=False, login=False)
+ def get_stylesheet(self, subreddit, **params):
+ """Return the stylesheet and images for the given subreddit."""
+ url = self.config['stylesheet'].format(
+ subreddit=six.text_type(subreddit))
+ return self.request_json(url, params=params)['data']
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_unmoderated(self, subreddit='mod', *args, **kwargs):
+ """Return a get_content generator of unmoderated submissions.
+
+ :param subreddit: Either a Subreddit object or the name of the
+ subreddit to return the unmoderated submissions for. Defaults to
+ `mod` which includes items for all the subreddits you moderate.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ url = self.config['unmoderated'].format(
+ subreddit=six.text_type(subreddit))
+ return self.get_content(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_wiki_banned(self, subreddit, *args, **kwargs):
+ """Return a get_content generator of users banned from the wiki."""
+ url = self.config['wiki_banned'].format(
+ subreddit=six.text_type(subreddit))
+ return self._get_userlist(url, user_only=True, *args, **kwargs)
+
+ @decorators.restrict_access(scope='read', mod=True)
+ def get_wiki_contributors(self, subreddit, *args, **kwargs):
+ """Return a get_content generator of wiki contributors.
+
+ The returned users are those who have been approved as a wiki
+ contributor by the moderators of the subreddit, Whether or not they've
+ actually contributed to the wiki is irrellevant, their approval as wiki
+ contributors is all that matters.
+
+ """
+ url = self.config['wiki_contributors'].format(
+ subreddit=six.text_type(subreddit))
+ return self._get_userlist(url, user_only=True, *args, **kwargs)
+
+
+class ModSelfMixin(AuthenticatedReddit):
+ """Adds methods pertaining to the 'modself' OAuth scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def leave_contributor(self, subreddit):
+ """Abdicate approved submitter status in a subreddit. Use with care.
+
+ :param subreddit: The name of the subreddit to leave `status` from.
+
+ :returns: the json response from the server.
+ """
+ return self._leave_status(subreddit, self.config['leavecontributor'])
+
+ def leave_moderator(self, subreddit):
+ """Abdicate moderator status in a subreddit. Use with care.
+
+ :param subreddit: The name of the subreddit to leave `status` from.
+
+ :returns: the json response from the server.
+ """
+ self.evict(self.config['my_mod_subreddits'])
+ return self._leave_status(subreddit, self.config['leavemoderator'])
+
+ @decorators.restrict_access(scope='modself', mod=False)
+ def _leave_status(self, subreddit, statusurl):
+ """Abdicate status in a subreddit.
+
+ :param subreddit: The name of the subreddit to leave `status` from.
+ :param statusurl: The API URL which will be used in the leave request.
+ Please use :meth:`leave_contributor` or :meth:`leave_moderator`
+ rather than setting this directly.
+
+ :returns: the json response from the server.
+ """
+ if isinstance(subreddit, six.string_types):
+ subreddit = self.get_subreddit(subreddit)
+
+ data = {'id': subreddit.fullname}
+ return self.request_json(statusurl, data=data)
+
+
+class MultiredditMixin(AuthenticatedReddit):
+ """Adds methods pertaining to multireddits.
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ MULTI_PATH = '/user/{0}/m/{1}'
+
+ @decorators.restrict_access(scope='subscribe')
+ def copy_multireddit(self, from_redditor, from_name, to_name=None,
+ *args, **kwargs):
+ """Copy a multireddit.
+
+ :param from_redditor: The username or Redditor object for the user
+ who owns the original multireddit
+ :param from_name: The name of the multireddit, belonging to
+ from_redditor
+ :param to_name: The name to copy the multireddit as. If None, uses
+ the name of the original
+
+ The additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request_json`
+
+ """
+ if to_name is None:
+ to_name = from_name
+
+ from_multipath = self.MULTI_PATH.format(from_redditor, from_name)
+ to_multipath = self.MULTI_PATH.format(self.user.name, to_name)
+ data = {'display_name': to_name,
+ 'from': from_multipath,
+ 'to': to_multipath}
+ return self.request_json(self.config['multireddit_copy'], data=data,
+ *args, **kwargs)
+
+ @decorators.restrict_access(scope='subscribe')
+ def create_multireddit(self, name, description_md=None, icon_name=None,
+ key_color=None, subreddits=None, visibility=None,
+ weighting_scheme=None, overwrite=False,
+ *args, **kwargs): # pylint: disable=W0613
+ """Create a new multireddit.
+
+ :param name: The name of the new multireddit.
+ :param description_md: Optional description for the multireddit,
+ formatted in markdown.
+ :param icon_name: Optional, choose an icon name from this list: ``art
+ and design``, ``ask``, ``books``, ``business``, ``cars``,
+ ``comics``, ``cute animals``, ``diy``, ``entertainment``, ``food
+ and drink``, ``funny``, ``games``, ``grooming``, ``health``, ``life
+ advice``, ``military``, ``models pinup``, ``music``, ``news``,
+ ``philosophy``, ``pictures and gifs``, ``science``, ``shopping``,
+ ``sports``, ``style``, ``tech``, ``travel``, ``unusual stories``,
+ ``video``, or ``None``.
+ :param key_color: Optional rgb hex color code of the form `#xxxxxx`.
+ :param subreddits: Optional list of subreddit names or Subreddit
+ objects to initialize the Multireddit with. You can always
+ add more later with
+ :meth:`~praw.objects.Multireddit.add_subreddit`.
+ :param visibility: Choose a privacy setting from this list:
+ ``public``, ``private``, ``hidden``. Defaults to private if blank.
+ :param weighting_scheme: Choose a weighting scheme from this list:
+ ``classic``, ``fresh``. Defaults to classic if blank.
+ :param overwrite: Allow for overwriting / updating multireddits.
+ If False, and the multi name already exists, throw 409 error.
+ If True, and the multi name already exists, use the given
+ properties to update that multi.
+ If True, and the multi name does not exist, create it normally.
+
+ :returns: The newly created Multireddit object.
+
+ The additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request_json`
+
+ """
+ url = self.config['multireddit_about'].format(user=self.user.name,
+ multi=name)
+ if subreddits:
+ subreddits = [{'name': six.text_type(sr)} for sr in subreddits]
+ model = {}
+ for key in ('description_md', 'icon_name', 'key_color', 'subreddits',
+ 'visibility', 'weighting_scheme'):
+ value = locals()[key]
+ if value:
+ model[key] = value
+
+ method = 'PUT' if overwrite else 'POST'
+ return self.request_json(url, data={'model': json.dumps(model)},
+ method=method, *args, **kwargs)
+
+ @decorators.restrict_access(scope='subscribe')
+ def delete_multireddit(self, name, *args, **kwargs):
+ """Delete a Multireddit.
+
+ Any additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request`
+
+ """
+ url = self.config['multireddit_about'].format(user=self.user.name,
+ multi=name)
+ self.http.headers['x-modhash'] = self.modhash
+ try:
+ self.request(url, data={}, method='DELETE', *args, **kwargs)
+ finally:
+ del self.http.headers['x-modhash']
+
+ @decorators.restrict_access(scope='subscribe')
+ def edit_multireddit(self, *args, **kwargs):
+ """Edit a multireddit, or create one if it doesn't already exist.
+
+ See :meth:`create_multireddit` for accepted parameters.
+
+ """
+ return self.create_multireddit(*args, overwrite=True, **kwargs)
+
+ def get_multireddit(self, redditor, multi, *args, **kwargs):
+ """Return a Multireddit object for the author and name specified.
+
+ :param redditor: The username or Redditor object of the user
+ who owns the multireddit.
+ :param multi: The name of the multireddit to fetch.
+
+ The additional parameters are passed directly into the
+ :class:`.Multireddit` constructor.
+
+ """
+ return objects.Multireddit(self, six.text_type(redditor), multi,
+ *args, **kwargs)
+
+ def get_multireddits(self, redditor, *args, **kwargs):
+ """Return a list of multireddits belonging to a redditor.
+
+ :param redditor: The username or Redditor object to find multireddits
+ from.
+ :returns: The json response from the server
+
+ The additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request_json`
+
+ If the requested redditor is the current user, all multireddits
+ are visible. Otherwise, only public multireddits are returned.
+
+ """
+ redditor = six.text_type(redditor)
+ url = self.config['multireddit_user'].format(user=redditor)
+ return self.request_json(url, *args, **kwargs)
+
+ @decorators.restrict_access(scope='subscribe')
+ def rename_multireddit(self, current_name, new_name, *args, **kwargs):
+ """Rename a Multireddit.
+
+ :param current_name: The name of the multireddit to rename
+ :param new_name: The new name to assign to this multireddit
+
+ The additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request_json`
+
+ """
+ current_path = self.MULTI_PATH.format(self.user.name, current_name)
+ new_path = self.MULTI_PATH.format(self.user.name, new_name)
+ data = {'from': current_path,
+ 'to': new_path}
+ return self.request_json(self.config['multireddit_rename'], data=data,
+ *args, **kwargs)
+
+
+class MySubredditsMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'mysubreddits' scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='mysubreddits')
+ def get_my_contributions(self, *args, **kwargs):
+ """Return a get_content generator of subreddits.
+
+ The Subreddits generated are those where the session's user is a
+ contributor.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['my_con_subreddits'], *args,
+ **kwargs)
+
+ @decorators.restrict_access(scope='mysubreddits')
+ def get_my_moderation(self, *args, **kwargs):
+ """Return a get_content generator of subreddits.
+
+ The Subreddits generated are those where the session's user is a
+ moderator.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['my_mod_subreddits'], *args,
+ **kwargs)
+
+ @decorators.restrict_access(scope='mysubreddits')
+ def get_my_multireddits(self):
+ """Return a list of the authenticated Redditor's Multireddits."""
+ # The JSON data for multireddits is returned from Reddit as a list
+ # Therefore, we cannot use :meth:`get_content` to retrieve the objects
+ return self.request_json(self.config['my_multis'])
+
+ @decorators.restrict_access(scope='mysubreddits')
+ def get_my_subreddits(self, *args, **kwargs):
+ """Return a get_content generator of subreddits.
+
+ The subreddits generated are those that hat the session's user is
+ subscribed to.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['my_subreddits'], *args, **kwargs)
+
+
+class PrivateMessagesMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'privatemessages' scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='privatemessages')
+ def _mark_as_read(self, thing_ids, unread=False):
+ """Mark each of the supplied thing_ids as (un)read.
+
+ :returns: The json response from the server.
+
+ """
+ data = {'id': ','.join(thing_ids)}
+ key = 'unread_message' if unread else 'read_message'
+ response = self.request_json(self.config[key], data=data)
+ self.evict([self.config[x] for x in ['inbox', 'messages',
+ 'mod_mail', 'unread']])
+ return response
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_comment_replies(self, *args, **kwargs):
+ """Return a get_content generator for inboxed comment replies.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['comment_replies'],
+ *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_inbox(self, *args, **kwargs):
+ """Return a get_content generator for inbox (messages and comments).
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['inbox'], *args, **kwargs)
+
+ def get_message(self, message_id, *args, **kwargs):
+ """Return a Message object corresponding to the given ID.
+
+ :param message_id: The ID or Fullname for a Message
+
+ The additional parameters are passed directly into
+ :meth:`~praw.objects.Message.from_id` of Message, and subsequently into
+ :meth:`.request_json`.
+
+ """
+ return objects.Message.from_id(self, message_id, *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_messages(self, *args, **kwargs):
+ """Return a get_content generator for inbox (messages only).
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['messages'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_post_replies(self, *args, **kwargs):
+ """Return a get_content generator for inboxed submission replies.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['post_replies'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_sent(self, *args, **kwargs):
+ """Return a get_content generator for sent messages.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['sent'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_unread(self, unset_has_mail=False, update_user=False, *args,
+ **kwargs):
+ """Return a get_content generator for unread messages.
+
+ :param unset_has_mail: When True, clear the has_mail flag (orangered)
+ for the user.
+ :param update_user: If both `unset_has_mail` and `update user` is True,
+ set the `has_mail` attribute of the logged-in user to False.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ params = kwargs.setdefault('params', {})
+ if unset_has_mail:
+ params['mark'] = 'true'
+ if update_user: # Update the user object
+ # Use setattr to avoid pylint error
+ setattr(self.user, 'has_mail', False)
+ return self.get_content(self.config['unread'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ def get_mentions(self, *args, **kwargs):
+ """Return a get_content generator for username mentions.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ return self.get_content(self.config['mentions'], *args, **kwargs)
+
+ @decorators.restrict_access(scope='privatemessages')
+ @decorators.require_captcha
+ def send_message(self, recipient, subject, message, from_sr=None,
+ captcha=None, **kwargs):
+ """Send a message to a redditor or a subreddit's moderators (mod mail).
+
+ :param recipient: A Redditor or Subreddit instance to send a message
+ to. A string can also be used in which case the string is treated
+ as a redditor unless it is prefixed with either '/r/' or '#', in
+ which case it will be treated as a subreddit.
+ :param subject: The subject of the message to send.
+ :param message: The actual message content.
+ :param from_sr: A Subreddit instance or string to send the message
+ from. When provided, messages are sent from the subreddit rather
+ than from the authenticated user. Note that the authenticated user
+ must be a moderator of the subreddit and have mail permissions.
+
+ :returns: The json response from the server.
+
+ This function may result in a captcha challenge. PRAW will
+ automatically prompt you for a response. See :ref:`handling-captchas`
+ if you want to manually handle captchas.
+
+ """
+ if isinstance(recipient, objects.Subreddit):
+ recipient = '/r/{0}'.format(six.text_type(recipient))
+ else:
+ recipient = six.text_type(recipient)
+
+ data = {'text': message,
+ 'subject': subject,
+ 'to': recipient}
+ if from_sr:
+ data['from_sr'] = six.text_type(from_sr)
+ if captcha:
+ data.update(captcha)
+ response = self.request_json(self.config['compose'], data=data,
+ retry_on_error=False)
+ self.evict(self.config['sent'])
+ return response
+
+
+class ReportMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'report' scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='report')
+ def hide(self, thing_id, _unhide=False):
+ """Hide one or multiple objects in the context of the logged in user.
+
+ :param thing_id: A single fullname or list of fullnames,
+ representing objects which will be hidden.
+ :param _unhide: If True, unhide the object(s) instead. Use
+ :meth:`~praw.__init__.ReportMixin.unhide` rather than setting this
+ manually.
+
+ :returns: The json response from the server.
+
+ """
+ if isinstance(thing_id, six.string_types):
+ thing_id = [thing_id]
+ else:
+ # Guarantee a subscriptable type.
+ thing_id = list(thing_id)
+
+ if len(thing_id) == 0:
+ raise ValueError('No fullnames provided')
+
+ # Will we return a list of server responses, or just one?
+ # TODO: In future versions, change the threshold to 1 to get
+ # list-in-list-out, single-in-single-out behavior. Threshold of 50
+ # is to avoid a breaking change at this time.
+ return_list = len(thing_id) > 50
+
+ id_chunks = chunk_sequence(thing_id, 50)
+ responses = []
+ for id_chunk in id_chunks:
+ id_chunk = ','.join(id_chunk)
+
+ method = 'unhide' if _unhide else 'hide'
+ data = {'id': id_chunk,
+ 'executed': method}
+
+ response = self.request_json(self.config[method], data=data)
+ responses.append(response)
+
+ if self.user is not None:
+ self.evict(urljoin(self.user._url, # pylint: disable=W0212
+ 'hidden'))
+ if return_list:
+ return responses
+ else:
+ return responses[0]
+
+ def unhide(self, thing_id):
+ """Unhide up to 50 objects in the context of the logged in user.
+
+ :param thing_id: A single fullname or list of fullnames,
+ representing objects which will be unhidden.
+
+ :returns: The json response from the server.
+
+ """
+ return self.hide(thing_id, _unhide=True)
+
+
+class SubmitMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'submit' scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ def _add_comment(self, thing_id, text):
+ """Comment on the given thing with the given text.
+
+ :returns: A Comment object for the newly created comment.
+
+ """
+ def add_comment_helper(self, thing_id, text):
+ data = {'thing_id': thing_id,
+ 'text': text}
+ retval = self.request_json(self.config['comment'], data=data,
+ retry_on_error=False)
+ return retval
+
+ if thing_id.startswith(self.config.by_object[objects.Message]):
+ decorator = decorators.restrict_access(scope='privatemessages')
+ else:
+ decorator = decorators.restrict_access(scope='submit')
+ retval = decorator(add_comment_helper)(self, thing_id, text)
+ # REDDIT: reddit's end should only ever return a single comment
+ return retval['data']['things'][0]
+
+ @decorators.restrict_access(scope='submit')
+ @decorators.require_captcha
+ def submit(self, subreddit, title, text=None, url=None, captcha=None,
+ save=None, send_replies=None, resubmit=None, **kwargs):
+ """Submit a new link to the given subreddit.
+
+ Accepts either a Subreddit object or a str containing the subreddit's
+ display name.
+
+ :param resubmit: If True, submit the link even if it has already been
+ submitted.
+ :param save: If True the new Submission will be saved after creation.
+ :param send_replies: If True, inbox replies will be received when
+ people comment on the submission. If set to None, the default of
+ True for text posts and False for link posts will be used.
+
+ :returns: The newly created Submission object if the reddit instance
+ can access it. Otherwise, return the url to the submission.
+
+ This function may result in a captcha challenge. PRAW will
+ automatically prompt you for a response. See :ref:`handling-captchas`
+ if you want to manually handle captchas.
+
+ """
+ if isinstance(text, six.string_types) == bool(url):
+ raise TypeError('One (and only one) of text or url is required!')
+ data = {'sr': six.text_type(subreddit),
+ 'title': title}
+ if text or text == '':
+ data['kind'] = 'self'
+ data['text'] = text
+ else:
+ data['kind'] = 'link'
+ data['url'] = url
+ if captcha:
+ data.update(captcha)
+ if resubmit is not None:
+ data['resubmit'] = resubmit
+ if save is not None:
+ data['save'] = save
+ if send_replies is not None:
+ data['sendreplies'] = send_replies
+ result = self.request_json(self.config['submit'], data=data,
+ retry_on_error=False)
+ url = result['data']['url']
+ # Clear the OAuth setting when attempting to fetch the submission
+ if self._use_oauth:
+ self._use_oauth = False
+ if url.startswith(self.config.oauth_url):
+ url = self.config.api_url + url[len(self.config.oauth_url):]
+ try:
+ return self.get_submission(url)
+ except errors.Forbidden:
+ # While the user may be able to submit to a subreddit,
+ # that does not guarantee they have read access.
+ return url
+
+
+class SubscribeMixin(AuthenticatedReddit):
+ """Adds methods requiring the 'subscribe' scope (or login).
+
+ You should **not** directly instantiate instances of this class. Use
+ :class:`.Reddit` instead.
+
+ """
+
+ @decorators.restrict_access(scope='subscribe')
+ def subscribe(self, subreddit, unsubscribe=False):
+ """Subscribe to the given subreddit.
+
+ :param subreddit: Either the subreddit name or a subreddit object.
+ :param unsubscribe: When True, unsubscribe.
+ :returns: The json response from the server.
+
+ """
+ data = {'action': 'unsub' if unsubscribe else 'sub',
+ 'sr_name': six.text_type(subreddit)}
+ response = self.request_json(self.config['subscribe'], data=data)
+ self.evict(self.config['my_subreddits'])
+ return response
+
+ def unsubscribe(self, subreddit):
+ """Unsubscribe from the given subreddit.
+
+ :param subreddit: Either the subreddit name or a subreddit object.
+ :returns: The json response from the server.
+
+ """
+ return self.subscribe(subreddit, unsubscribe=True)
+
+
+class Reddit(ModConfigMixin, ModFlairMixin, ModLogMixin, ModOnlyMixin,
+ ModSelfMixin, MultiredditMixin, MySubredditsMixin,
+ PrivateMessagesMixin, ReportMixin, SubmitMixin, SubscribeMixin):
+ """Provides access to reddit's API.
+
+ See :class:`.BaseReddit`'s documentation for descriptions of the
+ initialization parameters.
+
+ """
+
+# Prevent recursive import
+from praw import objects # NOQA
diff --git a/rtv/packages/praw/decorator_helpers.py b/rtv/packages/praw/decorator_helpers.py
new file mode 100644
index 0000000..375dca8
--- /dev/null
+++ b/rtv/packages/praw/decorator_helpers.py
@@ -0,0 +1,38 @@
+"""Internal helper functions used by praw.decorators."""
+import inspect
+from requests.compat import urljoin
+import six
+import sys
+
+
+def _get_captcha(reddit_session, captcha_id):
+ """Prompt user for captcha solution and return a prepared result."""
+ url = urljoin(reddit_session.config['captcha'],
+ captcha_id + '.png')
+ sys.stdout.write('Captcha URL: {0}\nCaptcha: '.format(url))
+ sys.stdout.flush()
+ raw = sys.stdin.readline()
+ if not raw: # stdin has reached the end of file
+ # Trigger exception raising next time through. The request is
+ # cached so this will not require and extra request and delay.
+ sys.stdin.close()
+ return None
+ return {'iden': captcha_id, 'captcha': raw.strip()}
+
+
+def _is_mod_of_all(user, subreddit):
+ mod_subs = user.get_cached_moderated_reddits()
+ subs = six.text_type(subreddit).lower().split('+')
+ return all(sub in mod_subs for sub in subs)
+
+
+def _make_func_args(function):
+ if six.PY3 and not hasattr(sys, 'pypy_version_info'):
+ # CPython3 uses inspect.signature(), not inspect.getargspec()
+ # see #551 and #541 for more info
+ func_items = inspect.signature(function).parameters.items()
+ func_args = [name for name, param in func_items
+ if param.kind == param.POSITIONAL_OR_KEYWORD]
+ else:
+ func_args = inspect.getargspec(function).args
+ return func_args
diff --git a/rtv/packages/praw/decorators.py b/rtv/packages/praw/decorators.py
new file mode 100644
index 0000000..6d4fa08
--- /dev/null
+++ b/rtv/packages/praw/decorators.py
@@ -0,0 +1,294 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""
+Decorators.
+
+They mainly do two things: ensure API guidelines are followed and
+prevent unnecessary failed API requests by testing that the call can be made
+first. Also, they can limit the length of output strings and parse json
+response for certain errors.
+"""
+
+from __future__ import print_function, unicode_literals
+
+import decorator
+import six
+import sys
+from functools import wraps
+from praw.decorator_helpers import (
+ _get_captcha,
+ _is_mod_of_all,
+ _make_func_args
+)
+from praw import errors
+from warnings import filterwarnings, warn
+
+
+# Enable deprecation warnings from this module
+filterwarnings('default', category=DeprecationWarning,
+ module='^praw\.decorators$')
+
+
+def alias_function(function, class_name):
+ """Create a RedditContentObject function mapped to a BaseReddit function.
+
+ The BaseReddit classes define the majority of the API's functions. The
+ first argument for many of these functions is the RedditContentObject that
+ they operate on. This factory returns functions appropriate to be called on
+ a RedditContent object that maps to the corresponding BaseReddit function.
+
+ """
+ @wraps(function)
+ def wrapped(self, *args, **kwargs):
+ func_args = _make_func_args(function)
+ if 'subreddit' in func_args and func_args.index('subreddit') != 1:
+ # Only happens for search
+ kwargs['subreddit'] = self
+ return function(self.reddit_session, *args, **kwargs)
+ else:
+ return function(self.reddit_session, self, *args, **kwargs)
+ # Only grab the short-line doc and add a link to the complete doc
+ if wrapped.__doc__ is not None:
+ wrapped.__doc__ = wrapped.__doc__.split('\n', 1)[0]
+ wrapped.__doc__ += ('\n\nSee :meth:`.{0}.{1}` for complete usage. '
+ 'Note that you should exclude the subreddit '
+ 'parameter when calling this convenience method.'
+ .format(class_name, function.__name__))
+ # Don't hide from sphinx as this is a parameter modifying decorator
+ return wrapped
+
+
+def deprecated(msg=''):
+ """Deprecate decorated method."""
+ @decorator.decorator
+ def wrap(function, *args, **kwargs):
+ if not kwargs.pop('disable_warning', False):
+ warn(msg, DeprecationWarning)
+ return function(*args, **kwargs)
+ return wrap
+
+
+@decorator.decorator
+def limit_chars(function, *args, **kwargs):
+ """Truncate the string returned from a function and return the result."""
+ output_chars_limit = args[0].reddit_session.config.output_chars_limit
+ output_string = function(*args, **kwargs)
+ if -1 < output_chars_limit < len(output_string):
+ output_string = output_string[:output_chars_limit - 3] + '...'
+ return output_string
+
+
+@decorator.decorator
+def oauth_generator(function, *args, **kwargs):
+ """Set the _use_oauth keyword argument to True when appropriate.
+
+ This is needed because generator functions may be called at anytime, and
+ PRAW relies on the Reddit._use_oauth value at original call time to know
+ when to make OAuth requests.
+
+ Returned data is not modified.
+
+ """
+ if getattr(args[0], '_use_oauth', False):
+ kwargs['_use_oauth'] = True
+ return function(*args, **kwargs)
+
+
+@decorator.decorator
+def raise_api_exceptions(function, *args, **kwargs):
+ """Raise client side exception(s) when present in the API response.
+
+ Returned data is not modified.
+
+ """
+ try:
+ return_value = function(*args, **kwargs)
+ except errors.HTTPException as exc:
+ if exc._raw.status_code != 400: # pylint: disable=W0212
+ raise # Unhandled HTTPErrors
+ try: # Attempt to convert v1 errors into older format (for now)
+ data = exc._raw.json() # pylint: disable=W0212
+ assert len(data) == 2
+ return_value = {'errors': [(data['reason'],
+ data['explanation'], '')]}
+ except Exception:
+ raise exc
+ if isinstance(return_value, dict):
+ if return_value.get('error') == 304: # Not modified exception
+ raise errors.NotModified(return_value)
+ elif return_value.get('errors'):
+ error_list = []
+ for error_type, msg, value in return_value['errors']:
+ if error_type in errors.ERROR_MAPPING:
+ if error_type == 'RATELIMIT':
+ args[0].evict(args[1])
+ error_class = errors.ERROR_MAPPING[error_type]
+ else:
+ error_class = errors.APIException
+ error_list.append(error_class(error_type, msg, value,
+ return_value))
+ if len(error_list) == 1:
+ raise error_list[0]
+ else:
+ raise errors.ExceptionList(error_list)
+ return return_value
+
+
+@decorator.decorator
+def require_captcha(function, *args, **kwargs):
+ """Return a decorator for methods that require captchas."""
+ raise_captcha_exception = kwargs.pop('raise_captcha_exception', False)
+ captcha_id = None
+
+ # Get a handle to the reddit session
+ if hasattr(args[0], 'reddit_session'):
+ reddit_session = args[0].reddit_session
+ else:
+ reddit_session = args[0]
+
+ while True:
+ try:
+ if captcha_id:
+ captcha_answer = _get_captcha(reddit_session, captcha_id)
+
+ # When the method is being decorated, all of its default
+ # parameters become part of this *args tuple. This means that
+ # *args currently contains a None where the captcha answer
+ # needs to go. If we put the captcha in the **kwargs,
+ # we get a TypeError for having two values of the same param.
+ func_args = _make_func_args(function)
+ if 'captcha' in func_args:
+ captcha_index = func_args.index('captcha')
+ args = list(args)
+ args[captcha_index] = captcha_answer
+ else:
+ kwargs['captcha'] = captcha_answer
+ return function(*args, **kwargs)
+ except errors.InvalidCaptcha as exception:
+ if raise_captcha_exception or \
+ not hasattr(sys.stdin, 'closed') or sys.stdin.closed:
+ raise
+ captcha_id = exception.response['captcha']
+
+
+def restrict_access(scope, mod=None, login=None, oauth_only=False,
+ generator_called=False):
+ """Restrict function access unless the user has the necessary permissions.
+
+ Raises one of the following exceptions when appropriate:
+ * LoginRequired
+ * LoginOrOAuthRequired
+ * the scope attribute will provide the necessary scope name
+ * ModeratorRequired
+ * ModeratorOrOAuthRequired
+ * the scope attribute will provide the necessary scope name
+
+ :param scope: Indicate the scope that is required for the API call. None or
+ False must be passed to indicate that no scope handles the API call.
+ All scopes save for `read` imply login=True. Scopes with 'mod' in their
+ name imply mod=True.
+ :param mod: Indicate that a moderator is required. Implies login=True.
+ :param login: Indicate that a login is required.
+ :param oauth_only: Indicate that only OAuth is supported for the function.
+ :param generator_called: Indicate that the function consists solely of
+ exhausting one or more oauth_generator wrapped generators. This is
+ because the oauth_generator itself will determine whether or not to
+ use the oauth domain.
+
+ Returned data is not modified.
+
+ This decorator assumes that all mod required functions fit one of these
+ categories:
+
+ * have the subreddit as the first argument (Reddit instance functions) or
+ have a subreddit keyword argument
+ * are called upon a subreddit object (Subreddit RedditContentObject)
+ * are called upon a RedditContent object with attribute subreddit
+
+ """
+ if not scope and oauth_only:
+ raise TypeError('`scope` must be set when `oauth_only` is set')
+
+ mod = mod is not False and (mod or scope and 'mod' in scope)
+ login = login is not False and (login or mod or scope and scope != 'read')
+
+ @decorator.decorator
+ def wrap(function, *args, **kwargs):
+ if args[0] is None: # Occurs with (un)friend
+ assert login
+ raise errors.LoginRequired(function.__name__)
+ # This segment of code uses hasattr to determine what instance type
+ # the function was called on. We could use isinstance if we wanted
+ # to import the types at runtime (decorators is used by all the
+ # types).
+ if mod:
+ if hasattr(args[0], 'reddit_session'):
+ # Defer access until necessary for RedditContentObject.
+ # This is because scoped sessions may not require this
+ # attribute to exist, thus it might not be set.
+ from praw.objects import Subreddit
+ subreddit = args[0] if isinstance(args[0], Subreddit) \
+ else False
+ else:
+ subreddit = kwargs.get(
+ 'subreddit', args[1] if len(args) > 1 else None)
+ if subreddit is None: # Try the default value
+ defaults = six.get_function_defaults(function)
+ subreddit = defaults[0] if defaults else None
+ else:
+ subreddit = None
+
+ obj = getattr(args[0], 'reddit_session', args[0])
+ # This function sets _use_oauth for one time use only.
+ # Verify that statement is actually true.
+ assert not obj._use_oauth # pylint: disable=W0212
+
+ if scope and obj.has_scope(scope):
+ obj._use_oauth = not generator_called # pylint: disable=W0212
+ elif oauth_only:
+ raise errors.OAuthScopeRequired(function.__name__, scope)
+ elif login and obj.is_logged_in():
+ if subreddit is False:
+ # Now fetch the subreddit attribute. There is no good
+ # reason for it to not be set during a logged in session.
+ subreddit = args[0].subreddit
+ if mod and not _is_mod_of_all(obj.user, subreddit):
+ if scope:
+ raise errors.ModeratorOrScopeRequired(
+ function.__name__, scope)
+ raise errors.ModeratorRequired(function.__name__)
+ elif login:
+ if scope:
+ raise errors.LoginOrScopeRequired(function.__name__, scope)
+ raise errors.LoginRequired(function.__name__)
+ try:
+ return function(*args, **kwargs)
+ finally:
+ obj._use_oauth = False # pylint: disable=W0212
+ return wrap
+
+
+@decorator.decorator
+def require_oauth(function, *args, **kwargs):
+ """Verify that the OAuth functions can be used prior to use.
+
+ Returned data is not modified.
+
+ """
+ if not args[0].has_oauth_app_info:
+ err_msg = ("The OAuth app config parameters client_id, client_secret "
+ "and redirect_url must be specified to use this function.")
+ raise errors.OAuthAppRequired(err_msg)
+ return function(*args, **kwargs)
diff --git a/rtv/packages/praw/errors.py b/rtv/packages/praw/errors.py
new file mode 100644
index 0000000..4cba0a0
--- /dev/null
+++ b/rtv/packages/praw/errors.py
@@ -0,0 +1,487 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""
+Error classes.
+
+Includes two main exceptions: ClientException, when something goes
+wrong on our end, and APIExeception for when something goes wrong on the
+server side. A number of classes extend these two main exceptions for more
+specific exceptions.
+"""
+
+from __future__ import print_function, unicode_literals
+
+import inspect
+import six
+import sys
+
+
+class PRAWException(Exception):
+ """The base PRAW Exception class.
+
+ Ideally, this can be caught to handle any exception from PRAW.
+
+ """
+
+
+class ClientException(PRAWException):
+ """Base exception class for errors that don't involve the remote API."""
+
+ def __init__(self, message=None):
+ """Construct a ClientException.
+
+ :param message: The error message to display.
+
+ """
+ if not message:
+ message = 'Clientside error'
+ super(ClientException, self).__init__()
+ self.message = message
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.message
+
+
+class OAuthScopeRequired(ClientException):
+ """Indicates that an OAuth2 scope is required to make the function call.
+
+ The attribute `scope` will contain the name of the necessary scope.
+
+ """
+
+ def __init__(self, function, scope, message=None):
+ """Contruct an OAuthScopeRequiredClientException.
+
+ :param function: The function that requires a scope.
+ :param scope: The scope required for the function.
+ :param message: A custom message to associate with the
+ exception. Default: `function` requires the OAuth2 scope `scope`
+
+ """
+ if not message:
+ message = '`{0}` requires the OAuth2 scope `{1}`'.format(function,
+ scope)
+ super(OAuthScopeRequired, self).__init__(message)
+ self.scope = scope
+
+
+class LoginRequired(ClientException):
+ """Indicates that a logged in session is required.
+
+ This exception is raised on a preemptive basis, whereas NotLoggedIn occurs
+ in response to a lack of credentials on a privileged API call.
+
+ """
+
+ def __init__(self, function, message=None):
+ """Construct a LoginRequired exception.
+
+ :param function: The function that requires login-based authentication.
+ :param message: A custom message to associate with the exception.
+ Default: `function` requires a logged in session
+
+ """
+ if not message:
+ message = '`{0}` requires a logged in session'.format(function)
+ super(LoginRequired, self).__init__(message)
+
+
+class LoginOrScopeRequired(OAuthScopeRequired, LoginRequired):
+ """Indicates that either a logged in session or OAuth2 scope is required.
+
+ The attribute `scope` will contain the name of the necessary scope.
+
+ """
+
+ def __init__(self, function, scope, message=None):
+ """Construct a LoginOrScopeRequired exception.
+
+ :param function: The function that requires authentication.
+ :param scope: The scope that is required if not logged in.
+ :param message: A custom message to associate with the exception.
+ Default: `function` requires a logged in session or the OAuth2
+ scope `scope`
+
+ """
+ if not message:
+ message = ('`{0}` requires a logged in session or the '
+ 'OAuth2 scope `{1}`').format(function, scope)
+ super(LoginOrScopeRequired, self).__init__(function, scope, message)
+
+
+class ModeratorRequired(LoginRequired):
+ """Indicates that a moderator of the subreddit is required."""
+
+ def __init__(self, function):
+ """Construct a ModeratorRequired exception.
+
+ :param function: The function that requires moderator access.
+
+ """
+ message = ('`{0}` requires a moderator '
+ 'of the subreddit').format(function)
+ super(ModeratorRequired, self).__init__(message)
+
+
+class ModeratorOrScopeRequired(LoginOrScopeRequired, ModeratorRequired):
+ """Indicates that a moderator of the sub or OAuth2 scope is required.
+
+ The attribute `scope` will contain the name of the necessary scope.
+
+ """
+
+ def __init__(self, function, scope):
+ """Construct a ModeratorOrScopeRequired exception.
+
+ :param function: The function that requires moderator authentication or
+ a moderator scope..
+ :param scope: The scope that is required if not logged in with
+ moderator access..
+
+ """
+ message = ('`{0}` requires a moderator of the subreddit or the '
+ 'OAuth2 scope `{1}`').format(function, scope)
+ super(ModeratorOrScopeRequired, self).__init__(function, scope,
+ message)
+
+
+class OAuthAppRequired(ClientException):
+ """Raised when an OAuth client cannot be initialized.
+
+ This occurs when any one of the OAuth config values are not set.
+
+ """
+
+
+class HTTPException(PRAWException):
+ """Base class for HTTP related exceptions."""
+
+ def __init__(self, _raw, message=None):
+ """Construct a HTTPException.
+
+ :params _raw: The internal request library response object. This object
+ is mapped to attribute `_raw` whose format may change at any time.
+
+ """
+ if not message:
+ message = 'HTTP error'
+ super(HTTPException, self).__init__()
+ self._raw = _raw
+ self.message = message
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.message
+
+
+class Forbidden(HTTPException):
+ """Raised when the user does not have permission to the entity."""
+
+
+class NotFound(HTTPException):
+ """Raised when the requested entity is not found."""
+
+
+class InvalidComment(PRAWException):
+ """Indicate that the comment is no longer available on reddit."""
+
+ ERROR_TYPE = 'DELETED_COMMENT'
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.ERROR_TYPE
+
+
+class InvalidSubmission(PRAWException):
+ """Indicates that the submission is no longer available on reddit."""
+
+ ERROR_TYPE = 'DELETED_LINK'
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.ERROR_TYPE
+
+
+class InvalidSubreddit(PRAWException):
+ """Indicates that an invalid subreddit name was supplied."""
+
+ ERROR_TYPE = 'SUBREDDIT_NOEXIST'
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.ERROR_TYPE
+
+
+class RedirectException(PRAWException):
+ """Raised when a redirect response occurs that is not expected."""
+
+ def __init__(self, request_url, response_url, message=None):
+ """Construct a RedirectException.
+
+ :param request_url: The url requested.
+ :param response_url: The url being redirected to.
+ :param message: A custom message to associate with the exception.
+
+ """
+ if not message:
+ message = ('Unexpected redirect '
+ 'from {0} to {1}').format(request_url, response_url)
+ super(RedirectException, self).__init__()
+ self.request_url = request_url
+ self.response_url = response_url
+ self.message = message
+
+ def __str__(self):
+ """Return the message of the error."""
+ return self.message
+
+
+class OAuthException(PRAWException):
+ """Base exception class for OAuth API calls.
+
+ Attribute `message` contains the error message.
+ Attribute `url` contains the url that resulted in the error.
+
+ """
+
+ def __init__(self, message, url):
+ """Construct a OAuthException.
+
+ :param message: The message associated with the exception.
+ :param url: The url that resulted in error.
+
+ """
+ super(OAuthException, self).__init__()
+ self.message = message
+ self.url = url
+
+ def __str__(self):
+ """Return the message along with the url."""
+ return self.message + " on url {0}".format(self.url)
+
+
+class OAuthInsufficientScope(OAuthException):
+ """Raised when the current OAuth scope is not sufficient for the action.
+
+ This indicates the access token is valid, but not for the desired action.
+
+ """
+
+
+class OAuthInvalidGrant(OAuthException):
+ """Raised when the code to retrieve access information is not valid."""
+
+
+class OAuthInvalidToken(OAuthException):
+ """Raised when the current OAuth access token is not valid."""
+
+
+class APIException(PRAWException):
+ """Base exception class for the reddit API error message exceptions.
+
+ All exceptions of this type should have their own subclass.
+
+ """
+
+ def __init__(self, error_type, message, field='', response=None):
+ """Construct an APIException.
+
+ :param error_type: The error type set on reddit's end.
+ :param message: The associated message for the error.
+ :param field: The input field associated with the error, or ''.
+ :param response: The HTTP response that resulted in the exception.
+
+ """
+ super(APIException, self).__init__()
+ self.error_type = error_type
+ self.message = message
+ self.field = field
+ self.response = response
+
+ def __str__(self):
+ """Return a string containing the error message and field."""
+ if hasattr(self, 'ERROR_TYPE'):
+ return '`{0}` on field `{1}`'.format(self.message, self.field)
+ else:
+ return '({0}) `{1}` on field `{2}`'.format(self.error_type,
+ self.message,
+ self.field)
+
+
+class ExceptionList(APIException):
+ """Raised when more than one exception occurred."""
+
+ def __init__(self, errors):
+ """Construct an ExceptionList.
+
+ :param errors: The list of errors.
+
+ """
+ super(ExceptionList, self).__init__(None, None)
+ self.errors = errors
+
+ def __str__(self):
+ """Return a string representation for all the errors."""
+ ret = '\n'
+ for i, error in enumerate(self.errors):
+ ret += '\tError {0}) {1}\n'.format(i, six.text_type(error))
+ return ret
+
+
+class AlreadySubmitted(APIException):
+ """An exception to indicate that a URL was previously submitted."""
+
+ ERROR_TYPE = 'ALREADY_SUB'
+
+
+class AlreadyModerator(APIException):
+ """Used to indicate that a user is already a moderator of a subreddit."""
+
+ ERROR_TYPE = 'ALREADY_MODERATOR'
+
+
+class BadCSS(APIException):
+ """An exception to indicate bad CSS (such as invalid) was used."""
+
+ ERROR_TYPE = 'BAD_CSS'
+
+
+class BadCSSName(APIException):
+ """An exception to indicate a bad CSS name (such as invalid) was used."""
+
+ ERROR_TYPE = 'BAD_CSS_NAME'
+
+
+class BadUsername(APIException):
+ """An exception to indicate an invalid username was used."""
+
+ ERROR_TYPE = 'BAD_USERNAME'
+
+
+class InvalidCaptcha(APIException):
+ """An exception for when an incorrect captcha error is returned."""
+
+ ERROR_TYPE = 'BAD_CAPTCHA'
+
+
+class InvalidEmails(APIException):
+ """An exception for when invalid emails are provided."""
+
+ ERROR_TYPE = 'BAD_EMAILS'
+
+
+class InvalidFlairTarget(APIException):
+ """An exception raised when an invalid user is passed as a flair target."""
+
+ ERROR_TYPE = 'BAD_FLAIR_TARGET'
+
+
+class InvalidInvite(APIException):
+ """Raised when attempting to accept a nonexistent moderator invite."""
+
+ ERROR_TYPE = 'NO_INVITE_FOUND'
+
+
+class InvalidUser(APIException):
+ """An exception for when a user doesn't exist."""
+
+ ERROR_TYPE = 'USER_DOESNT_EXIST'
+
+
+class InvalidUserPass(APIException):
+ """An exception for failed logins."""
+
+ ERROR_TYPE = 'WRONG_PASSWORD'
+
+
+class InsufficientCreddits(APIException):
+ """Raised when there are not enough creddits to complete the action."""
+
+ ERROR_TYPE = 'INSUFFICIENT_CREDDITS'
+
+
+class NotLoggedIn(APIException):
+ """An exception for when a Reddit user isn't logged in."""
+
+ ERROR_TYPE = 'USER_REQUIRED'
+
+
+class NotModified(APIException):
+ """An exception raised when reddit returns {'error': 304}.
+
+ This error indicates that the requested content was not modified and is
+ being requested too frequently. Such an error usually occurs when multiple
+ instances of PRAW are running concurrently or in rapid succession.
+
+ """
+
+ def __init__(self, response):
+ """Construct an instance of the NotModified exception.
+
+ This error does not have an error_type, message, nor field.
+
+ """
+ super(NotModified, self).__init__(None, None, response=response)
+
+ def __str__(self):
+ """Return: That page has not been modified."""
+ return 'That page has not been modified.'
+
+
+class RateLimitExceeded(APIException):
+ """An exception for when something has happened too frequently.
+
+ Contains a `sleep_time` attribute for the number of seconds that must
+ transpire prior to the next request.
+
+ """
+
+ ERROR_TYPE = 'RATELIMIT'
+
+ def __init__(self, error_type, message, field, response):
+ """Construct an instance of the RateLimitExceeded exception.
+
+ The parameters match that of :class:`APIException`.
+
+ The `sleep_time` attribute is extracted from the response object.
+
+ """
+ super(RateLimitExceeded, self).__init__(error_type, message,
+ field, response)
+ self.sleep_time = self.response['ratelimit']
+
+
+class SubredditExists(APIException):
+ """An exception to indicate that a subreddit name is not available."""
+
+ ERROR_TYPE = 'SUBREDDIT_EXISTS'
+
+
+class UsernameExists(APIException):
+ """An exception to indicate that a username is not available."""
+
+ ERROR_TYPE = 'USERNAME_TAKEN'
+
+
+def _build_error_mapping():
+ def predicate(obj):
+ return inspect.isclass(obj) and hasattr(obj, 'ERROR_TYPE')
+
+ tmp = {}
+ for _, obj in inspect.getmembers(sys.modules[__name__], predicate):
+ tmp[obj.ERROR_TYPE] = obj
+ return tmp
+ERROR_MAPPING = _build_error_mapping()
diff --git a/rtv/packages/praw/handlers.py b/rtv/packages/praw/handlers.py
new file mode 100644
index 0000000..fa4df11
--- /dev/null
+++ b/rtv/packages/praw/handlers.py
@@ -0,0 +1,243 @@
+"""Provides classes that handle request dispatching."""
+
+from __future__ import print_function, unicode_literals
+
+import socket
+import sys
+import time
+from functools import wraps
+from praw.errors import ClientException
+from praw.helpers import normalize_url
+from requests import Session
+from six import text_type
+from six.moves import cPickle # pylint: disable=F0401
+from threading import Lock
+from timeit import default_timer as timer
+
+
+class RateLimitHandler(object):
+ """The base handler that provides thread-safe rate limiting enforcement.
+
+ While this handler is threadsafe, PRAW is not thread safe when the same
+ `Reddit` instance is being utilized from multiple threads.
+
+ """
+
+ last_call = {} # Stores a two-item list: [lock, previous_call_time]
+ rl_lock = Lock() # lock used for adding items to last_call
+
+ @staticmethod
+ def rate_limit(function):
+ """Return a decorator that enforces API request limit guidelines.
+
+ We are allowed to make a API request every api_request_delay seconds as
+ specified in praw.ini. This value may differ from reddit to reddit. For
+ reddit.com it is 2. Any function decorated with this will be forced to
+ delay _rate_delay seconds from the calling of the last function
+ decorated with this before executing.
+
+ This decorator must be applied to a RateLimitHandler class method or
+ instance method as it assumes `rl_lock` and `last_call` are available.
+
+ """
+ @wraps(function)
+ def wrapped(cls, _rate_domain, _rate_delay, **kwargs):
+ cls.rl_lock.acquire()
+ lock_last = cls.last_call.setdefault(_rate_domain, [Lock(), 0])
+ with lock_last[0]: # Obtain the domain specific lock
+ cls.rl_lock.release()
+ # Sleep if necessary, then perform the request
+ now = timer()
+ delay = lock_last[1] + _rate_delay - now
+ if delay > 0:
+ now += delay
+ time.sleep(delay)
+ lock_last[1] = now
+ return function(cls, **kwargs)
+ return wrapped
+
+ @classmethod
+ def evict(cls, urls): # pylint: disable=W0613
+ """Method utilized to evict entries for the given urls.
+
+ :param urls: An iterable containing normalized urls.
+ :returns: The number of items removed from the cache.
+
+ By default this method returns False as a cache need not be present.
+
+ """
+ return 0
+
+ def __del__(self):
+ """Cleanup the HTTP session."""
+ if self.http:
+ try:
+ self.http.close()
+ except: # Never fail pylint: disable=W0702
+ pass
+
+ def __init__(self):
+ """Establish the HTTP session."""
+ self.http = Session() # Each instance should have its own session
+
+ def request(self, request, proxies, timeout, verify, **_):
+ """Responsible for dispatching the request and returning the result.
+
+ Network level exceptions should be raised and only
+ ``requests.Response`` should be returned.
+
+ :param request: A ``requests.PreparedRequest`` object containing all
+ the data necessary to perform the request.
+ :param proxies: A dictionary of proxy settings to be utilized for the
+ request.
+ :param timeout: Specifies the maximum time that the actual HTTP request
+ can take.
+ :param verify: Specifies if SSL certificates should be validated.
+
+ ``**_`` should be added to the method call to ignore the extra
+ arguments intended for the cache handler.
+
+ """
+ settings = self.http.merge_environment_settings(
+ request.url, proxies, False, verify, None
+ )
+ return self.http.send(request, timeout=timeout, allow_redirects=False,
+ **settings)
+
+RateLimitHandler.request = RateLimitHandler.rate_limit(
+ RateLimitHandler.request)
+
+
+class DefaultHandler(RateLimitHandler):
+ """Extends the RateLimitHandler to add thread-safe caching support."""
+
+ ca_lock = Lock()
+ cache = {}
+ cache_hit_callback = None
+ timeouts = {}
+
+ @staticmethod
+ def with_cache(function):
+ """Return a decorator that interacts with a handler's cache.
+
+ This decorator must be applied to a DefaultHandler class method or
+ instance method as it assumes `cache`, `ca_lock` and `timeouts` are
+ available.
+
+ """
+ @wraps(function)
+ def wrapped(cls, _cache_key, _cache_ignore, _cache_timeout, **kwargs):
+ def clear_timeouts():
+ """Clear the cache of timed out results."""
+ for key in list(cls.timeouts):
+ if timer() - cls.timeouts[key] > _cache_timeout:
+ del cls.timeouts[key]
+ del cls.cache[key]
+
+ if _cache_ignore:
+ return function(cls, **kwargs)
+ with cls.ca_lock:
+ clear_timeouts()
+ if _cache_key in cls.cache:
+ if cls.cache_hit_callback:
+ cls.cache_hit_callback(_cache_key)
+ return cls.cache[_cache_key]
+ # Releasing the lock before actually making the request allows for
+ # the possibility of more than one thread making the same request
+ # to get through. Without having domain-specific caching (under the
+ # assumption only one request to a domain can be made at a
+ # time), there isn't a better way to handle this.
+ result = function(cls, **kwargs)
+ # The handlers don't call `raise_for_status` so we need to ignore
+ # status codes that will result in an exception that should not be
+ # cached.
+ if result.status_code not in (200, 302):
+ return result
+ with cls.ca_lock:
+ cls.timeouts[_cache_key] = timer()
+ cls.cache[_cache_key] = result
+ return result
+ return wrapped
+
+ @classmethod
+ def clear_cache(cls):
+ """Remove all items from the cache."""
+ with cls.ca_lock:
+ cls.cache = {}
+ cls.timeouts = {}
+
+ @classmethod
+ def evict(cls, urls):
+ """Remove items from cache matching URLs.
+
+ Return the number of items removed.
+
+ """
+ if isinstance(urls, text_type):
+ urls = [urls]
+ urls = set(normalize_url(url) for url in urls)
+ retval = 0
+ with cls.ca_lock:
+ for key in list(cls.cache):
+ if key[0] in urls:
+ retval += 1
+ del cls.cache[key]
+ del cls.timeouts[key]
+ return retval
+DefaultHandler.request = DefaultHandler.with_cache(RateLimitHandler.request)
+
+
+class MultiprocessHandler(object):
+ """A PRAW handler to interact with the PRAW multi-process server."""
+
+ def __init__(self, host='localhost', port=10101):
+ """Construct an instance of the MultiprocessHandler."""
+ self.host = host
+ self.port = port
+
+ def _relay(self, **kwargs):
+ """Send the request through the server and return the HTTP response."""
+ retval = None
+ delay_time = 2 # For connection retries
+ read_attempts = 0 # For reading from socket
+ while retval is None: # Evict can return False
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock_fp = sock.makefile('rwb') # Used for pickle
+ try:
+ sock.connect((self.host, self.port))
+ cPickle.dump(kwargs, sock_fp, cPickle.HIGHEST_PROTOCOL)
+ sock_fp.flush()
+ retval = cPickle.load(sock_fp)
+ except: # pylint: disable=W0702
+ exc_type, exc, _ = sys.exc_info()
+ socket_error = exc_type is socket.error
+ if socket_error and exc.errno == 111: # Connection refused
+ sys.stderr.write('Cannot connect to multiprocess server. I'
+ 's it running? Retrying in {0} seconds.\n'
+ .format(delay_time))
+ time.sleep(delay_time)
+ delay_time = min(64, delay_time * 2)
+ elif exc_type is EOFError or socket_error and exc.errno == 104:
+ # Failure during socket READ
+ if read_attempts >= 3:
+ raise ClientException('Successive failures reading '
+ 'from the multiprocess server.')
+ sys.stderr.write('Lost connection with multiprocess server'
+ ' during read. Trying again.\n')
+ read_attempts += 1
+ else:
+ raise
+ finally:
+ sock_fp.close()
+ sock.close()
+ if isinstance(retval, Exception):
+ raise retval # pylint: disable=E0702
+ return retval
+
+ def evict(self, urls):
+ """Forward the eviction to the server and return its response."""
+ return self._relay(method='evict', urls=urls)
+
+ def request(self, **kwargs):
+ """Forward the request to the server and return its HTTP response."""
+ return self._relay(method='request', **kwargs)
diff --git a/rtv/packages/praw/helpers.py b/rtv/packages/praw/helpers.py
new file mode 100644
index 0000000..45dfea2
--- /dev/null
+++ b/rtv/packages/praw/helpers.py
@@ -0,0 +1,481 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""
+Helper functions.
+
+The functions here provide functionality that is often needed by programs using
+PRAW, but which isn't part of reddit's API.
+"""
+
+from __future__ import unicode_literals
+
+import six
+import sys
+import time
+from collections import deque
+from functools import partial
+from timeit import default_timer as timer
+from praw.errors import HTTPException, PRAWException
+from operator import attrgetter
+
+BACKOFF_START = 4 # Minimum number of seconds to sleep during errors
+KEEP_ITEMS = 128 # On each iteration only remember the first # items
+
+# for conversion between broken reddit timestamps and unix timestamps
+REDDIT_TIMESTAMP_OFFSET = 28800
+
+
+def comment_stream(reddit_session, subreddit, limit=None, verbosity=1):
+ """Indefinitely yield new comments from the provided subreddit.
+
+ Comments are yielded from oldest to newest.
+
+ :param reddit_session: The reddit_session to make requests from. In all the
+ examples this is assigned to the variable ``r``.
+ :param subreddit: Either a subreddit object, or the name of a
+ subreddit. Use `all` to get the comment stream for all comments made to
+ reddit.
+ :param limit: The maximum number of comments to fetch in a single
+ iteration. When None, fetch all available comments (reddit limits this
+ to 1000 (or multiple of 1000 for multi-subreddits). If this number is
+ too small, comments may be missed.
+ :param verbosity: A number that controls the amount of output produced to
+ stderr. <= 0: no output; >= 1: output the total number of comments
+ processed and provide the short-term number of comments processed per
+ second; >= 2: output when additional delays are added in order to avoid
+ subsequent unexpected http errors. >= 3: output debugging information
+ regarding the comment stream. (Default: 1)
+
+ """
+ get_function = partial(reddit_session.get_comments,
+ six.text_type(subreddit))
+ return _stream_generator(get_function, limit, verbosity)
+
+
+def submission_stream(reddit_session, subreddit, limit=None, verbosity=1):
+ """Indefinitely yield new submissions from the provided subreddit.
+
+ Submissions are yielded from oldest to newest.
+
+ :param reddit_session: The reddit_session to make requests from. In all the
+ examples this is assigned to the variable ``r``.
+ :param subreddit: Either a subreddit object, or the name of a
+ subreddit. Use `all` to get the submissions stream for all submissions
+ made to reddit.
+ :param limit: The maximum number of submissions to fetch in a single
+ iteration. When None, fetch all available submissions (reddit limits
+ this to 1000 (or multiple of 1000 for multi-subreddits). If this number
+ is too small, submissions may be missed. Since there isn't a limit to
+ the number of submissions that can be retrieved from r/all, the limit
+ will be set to 1000 when limit is None.
+ :param verbosity: A number that controls the amount of output produced to
+ stderr. <= 0: no output; >= 1: output the total number of submissions
+ processed and provide the short-term number of submissions processed
+ per second; >= 2: output when additional delays are added in order to
+ avoid subsequent unexpected http errors. >= 3: output debugging
+ information regarding the submission stream. (Default: 1)
+
+ """
+ if six.text_type(subreddit).lower() == "all":
+ if limit is None:
+ limit = 1000
+ if not hasattr(subreddit, 'reddit_session'):
+ subreddit = reddit_session.get_subreddit(subreddit)
+ return _stream_generator(subreddit.get_new, limit, verbosity)
+
+
+def valid_redditors(redditors, sub):
+ """Return a verified list of valid Redditor instances.
+
+ :param redditors: A list comprised of Redditor instances and/or strings
+ that are to be verified as actual redditor accounts.
+ :param sub: A Subreddit instance that the authenticated account has
+ flair changing permission on.
+
+ Note: Flair will be unset for all valid redditors in `redditors` on the
+ subreddit `sub`. A valid redditor is defined as a redditor that is
+ registered on reddit.
+
+ """
+ simplified = list(set(six.text_type(x).lower() for x in redditors))
+ return [sub.reddit_session.get_redditor(simplified[i], fetch=False)
+ for (i, resp) in enumerate(sub.set_flair_csv(
+ ({'user': x, 'flair_text': x} for x in simplified)))
+ if resp['ok']]
+
+
+def submissions_between(reddit_session,
+ subreddit,
+ lowest_timestamp=None,
+ highest_timestamp=None,
+ newest_first=True,
+ extra_cloudsearch_fields=None,
+ verbosity=1):
+ """Yield submissions between two timestamps.
+
+ If both ``highest_timestamp`` and ``lowest_timestamp`` are unspecified,
+ yields all submissions in the ``subreddit``.
+
+ Submissions are yielded from newest to oldest(like in the "new" queue).
+
+ :param reddit_session: The reddit_session to make requests from. In all the
+ examples this is assigned to the variable ``r``.
+ :param subreddit: Either a subreddit object, or the name of a
+ subreddit. Use `all` to get the submissions stream for all submissions
+ made to reddit.
+ :param lowest_timestamp: The lower bound for ``created_utc`` atributed of
+ submissions.
+ (Default: subreddit's created_utc or 0 when subreddit == "all").
+ :param highest_timestamp: The upper bound for ``created_utc`` attribute
+ of submissions. (Default: current unix time)
+ NOTE: both highest_timestamp and lowest_timestamp are proper
+ unix timestamps(just like ``created_utc`` attributes)
+ :param newest_first: If set to true, yields submissions
+ from newest to oldest. Otherwise yields submissions
+ from oldest to newest
+ :param extra_cloudsearch_fields: Allows extra filtering of results by
+ parameters like author, self. Full list is available here:
+ https://www.reddit.com/wiki/search
+ :param verbosity: A number that controls the amount of output produced to
+ stderr. <= 0: no output; >= 1: output the total number of submissions
+ processed; >= 2: output debugging information regarding
+ the search queries. (Default: 1)
+ """
+ def debug(msg, level):
+ if verbosity >= level:
+ sys.stderr.write(msg + '\n')
+
+ def format_query_field(k, v):
+ if k in ["nsfw", "self"]:
+ # even though documentation lists "no" and "yes"
+ # as possible values, in reality they don't work
+ if v not in [0, 1, "0", "1"]:
+ raise PRAWException("Invalid value for the extra"
+ "field {}. Only '0' and '1' are"
+ "valid values.".format(k))
+ return "{}:{}".format(k, v)
+ return "{}:'{}'".format(k, v)
+
+ if extra_cloudsearch_fields is None:
+ extra_cloudsearch_fields = {}
+
+ extra_query_part = " ".join(
+ [format_query_field(k, v) for (k, v)
+ in sorted(extra_cloudsearch_fields.items())]
+ )
+
+ if highest_timestamp is None:
+ highest_timestamp = int(time.time()) + REDDIT_TIMESTAMP_OFFSET
+ else:
+ highest_timestamp = int(highest_timestamp) + REDDIT_TIMESTAMP_OFFSET
+
+ if lowest_timestamp is not None:
+ lowest_timestamp = int(lowest_timestamp) + REDDIT_TIMESTAMP_OFFSET
+ elif not isinstance(subreddit, six.string_types):
+ lowest_timestamp = int(subreddit.created)
+ elif subreddit not in ("all", "contrib", "mod", "friend"):
+ lowest_timestamp = int(reddit_session.get_subreddit(subreddit).created)
+ else:
+ lowest_timestamp = 0
+
+ original_highest_timestamp = highest_timestamp
+ original_lowest_timestamp = lowest_timestamp
+
+ # When making timestamp:X..Y queries, reddit misses submissions
+ # inside X..Y range, but they can be found inside Y..Z range
+ # It is not clear what is the value of Z should be, but it seems
+ # like the difference is usually about ~1 hour or less
+ # To be sure, let's set the workaround offset to 2 hours
+ out_of_order_submissions_workaround_offset = 7200
+ highest_timestamp += out_of_order_submissions_workaround_offset
+ lowest_timestamp -= out_of_order_submissions_workaround_offset
+
+ # Those parameters work ok, but there may be a better set of parameters
+ window_size = 60 * 60
+ search_limit = 100
+ min_search_results_in_window = 50
+ window_adjustment_ratio = 1.25
+ backoff = BACKOFF_START
+
+ processed_submissions = 0
+ prev_win_increased = False
+ prev_win_decreased = False
+
+ while highest_timestamp >= lowest_timestamp:
+ try:
+ if newest_first:
+ t1 = max(highest_timestamp - window_size, lowest_timestamp)
+ t2 = highest_timestamp
+ else:
+ t1 = lowest_timestamp
+ t2 = min(lowest_timestamp + window_size, highest_timestamp)
+
+ search_query = 'timestamp:{}..{}'.format(t1, t2)
+ if extra_query_part:
+ search_query = "(and {} {})".format(search_query,
+ extra_query_part)
+
+ debug(search_query, 3)
+ search_results = list(reddit_session.search(search_query,
+ subreddit=subreddit,
+ limit=search_limit,
+ syntax='cloudsearch',
+ sort='new'))
+
+ debug("Received {0} search results for query {1}"
+ .format(len(search_results), search_query),
+ 2)
+
+ backoff = BACKOFF_START
+ except HTTPException as exc:
+ debug("{0}. Sleeping for {1} seconds".format(exc, backoff), 2)
+ time.sleep(backoff)
+ backoff *= 2
+ continue
+
+ if len(search_results) >= search_limit:
+ power = 2 if prev_win_decreased else 1
+ window_size = int(window_size / window_adjustment_ratio**power)
+ prev_win_decreased = True
+ debug("Decreasing window size to {0} seconds".format(window_size),
+ 2)
+ # Since it is possible that there are more submissions
+ # in the current window, we have to re-do the request
+ # with reduced window
+ continue
+ else:
+ prev_win_decreased = False
+
+ search_results = [s for s in search_results
+ if original_lowest_timestamp <= s.created and
+ s.created <= original_highest_timestamp]
+
+ for submission in sorted(search_results,
+ key=attrgetter('created_utc', 'id'),
+ reverse=newest_first):
+ yield submission
+
+ processed_submissions += len(search_results)
+ debug('Total processed submissions: {}'
+ .format(processed_submissions), 1)
+
+ if newest_first:
+ highest_timestamp -= (window_size + 1)
+ else:
+ lowest_timestamp += (window_size + 1)
+
+ if len(search_results) < min_search_results_in_window:
+ power = 2 if prev_win_increased else 1
+ window_size = int(window_size * window_adjustment_ratio**power)
+ prev_win_increased = True
+ debug("Increasing window size to {0} seconds"
+ .format(window_size), 2)
+ else:
+ prev_win_increased = False
+
+
+def _stream_generator(get_function, limit=None, verbosity=1):
+ def debug(msg, level):
+ if verbosity >= level:
+ sys.stderr.write(msg + '\n')
+
+ def b36_id(item):
+ return int(item.id, 36)
+
+ seen = BoundedSet(KEEP_ITEMS * 16)
+ before = None
+ count = 0 # Count is incremented to bypass the cache
+ processed = 0
+ backoff = BACKOFF_START
+ while True:
+ items = []
+ sleep = None
+ start = timer()
+ try:
+ i = None
+ params = {'uniq': count}
+ count = (count + 1) % 100
+ if before:
+ params['before'] = before
+ gen = enumerate(get_function(limit=limit, params=params))
+ for i, item in gen:
+ if b36_id(item) in seen:
+ if i == 0:
+ if before is not None:
+ # reddit sent us out of order data -- log it
+ debug('(INFO) {0} already seen with before of {1}'
+ .format(item.fullname, before), 3)
+ before = None
+ break
+ if i == 0: # Always the first item in the generator
+ before = item.fullname
+ if b36_id(item) not in seen:
+ items.append(item)
+ processed += 1
+ if verbosity >= 1 and processed % 100 == 0:
+ sys.stderr.write(' Items: {0} \r'
+ .format(processed))
+ sys.stderr.flush()
+ if i < KEEP_ITEMS:
+ seen.add(b36_id(item))
+ else: # Generator exhausted
+ if i is None: # Generator yielded no items
+ assert before is not None
+ # Try again without before as the before item may be too
+ # old or no longer exist.
+ before = None
+ backoff = BACKOFF_START
+ except HTTPException as exc:
+ sleep = (backoff, '{0}. Sleeping for {{0}} seconds.'.format(exc),
+ 2)
+ backoff *= 2
+ # Provide rate limit
+ if verbosity >= 1:
+ rate = len(items) / (timer() - start)
+ sys.stderr.write(' Items: {0} ({1:.2f} ips) \r'
+ .format(processed, rate))
+ sys.stderr.flush()
+ # Yield items from oldest to newest
+ for item in items[::-1]:
+ yield item
+ # Sleep if necessary
+ if sleep:
+ sleep_time, msg, msg_level = sleep # pylint: disable=W0633
+ debug(msg.format(sleep_time), msg_level)
+ time.sleep(sleep_time)
+
+
+def chunk_sequence(sequence, chunk_length, allow_incomplete=True):
+ """Given a sequence, divide it into sequences of length `chunk_length`.
+
+ :param allow_incomplete: If True, allow final chunk to be shorter if the
+ given sequence is not an exact multiple of `chunk_length`.
+ If False, the incomplete chunk will be discarded.
+ """
+ (complete, leftover) = divmod(len(sequence), chunk_length)
+ if not allow_incomplete:
+ leftover = 0
+
+ chunk_count = complete + min(leftover, 1)
+
+ chunks = []
+ for x in range(chunk_count):
+ left = chunk_length * x
+ right = left + chunk_length
+ chunks.append(sequence[left:right])
+
+ return chunks
+
+
+def convert_id36_to_numeric_id(id36):
+ """Convert strings representing base36 numbers into an integer."""
+ if not isinstance(id36, six.string_types) or id36.count("_") > 0:
+ raise ValueError("must supply base36 string, not fullname (e.g. use "
+ "xxxxx, not t3_xxxxx)")
+ return int(id36, 36)
+
+
+def convert_numeric_id_to_id36(numeric_id):
+ """Convert an integer into its base36 string representation.
+
+ This method has been cleaned up slightly to improve readability. For more
+ info see:
+
+ https://github.com/reddit/reddit/blob/master/r2/r2/lib/utils/_utils.pyx
+
+ https://www.reddit.com/r/redditdev/comments/n624n/submission_ids_question/
+
+ https://en.wikipedia.org/wiki/Base36
+ """
+ # base36 allows negative numbers, but reddit does not
+ if not isinstance(numeric_id, six.integer_types) or numeric_id < 0:
+ raise ValueError("must supply a positive int/long")
+
+ # Alphabet used for base 36 conversion
+ alphabet = '0123456789abcdefghijklmnopqrstuvwxyz'
+ alphabet_len = len(alphabet)
+
+ # Temp assign
+ current_number = numeric_id
+ base36 = []
+
+ # Current_number must be greater than alphabet length to while/divmod
+ if 0 <= current_number < alphabet_len:
+ return alphabet[current_number]
+
+ # Break up into chunks
+ while current_number != 0:
+ current_number, rem = divmod(current_number, alphabet_len)
+ base36.append(alphabet[rem])
+
+ # String is built in reverse order
+ return ''.join(reversed(base36))
+
+
+def flatten_tree(tree, nested_attr='replies', depth_first=False):
+ """Return a flattened version of the passed in tree.
+
+ :param nested_attr: The attribute name that contains the nested items.
+ Defaults to ``replies`` which is suitable for comments.
+ :param depth_first: When true, add to the list in a depth-first manner
+ rather than the default breadth-first manner.
+
+ """
+ stack = deque(tree)
+ extend = stack.extend if depth_first else stack.extendleft
+ retval = []
+ while stack:
+ item = stack.popleft()
+ nested = getattr(item, nested_attr, None)
+ if nested:
+ extend(nested)
+ retval.append(item)
+ return retval
+
+
+def normalize_url(url):
+ """Return url after stripping trailing .json and trailing slashes."""
+ if url.endswith('.json'):
+ url = url[:-5]
+ if url.endswith('/'):
+ url = url[:-1]
+ return url
+
+
+class BoundedSet(object):
+ """A set with a maximum size that evicts the oldest items when necessary.
+
+ This class does not implement the complete set interface.
+
+ """
+
+ def __init__(self, max_items):
+ """Construct an instance of the BoundedSet."""
+ self.max_items = max_items
+ self._fifo = []
+ self._set = set()
+
+ def __contains__(self, item):
+ """Test if the BoundedSet contains item."""
+ return item in self._set
+
+ def add(self, item):
+ """Add an item to the set discarding the oldest item if necessary."""
+ if item in self._set:
+ self._fifo.remove(item)
+ elif len(self._set) == self.max_items:
+ self._set.remove(self._fifo.pop(0))
+ self._fifo.append(item)
+ self._set.add(item)
diff --git a/rtv/packages/praw/internal.py b/rtv/packages/praw/internal.py
new file mode 100644
index 0000000..1e8d36f
--- /dev/null
+++ b/rtv/packages/praw/internal.py
@@ -0,0 +1,271 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""Internal helper functions.
+
+The functions in this module are not to be relied upon by third-parties.
+
+"""
+
+from __future__ import print_function, unicode_literals
+import os
+import re
+import six
+import sys
+from requests import Request, codes, exceptions
+from requests.compat import urljoin
+from praw.decorators import restrict_access
+from praw.errors import (ClientException, HTTPException, Forbidden, NotFound,
+ InvalidSubreddit, OAuthException,
+ OAuthInsufficientScope, OAuthInvalidToken,
+ RedirectException)
+from warnings import warn
+try:
+ from OpenSSL import __version__ as _opensslversion
+ _opensslversionlist = [int(minor) if minor.isdigit() else minor
+ for minor in _opensslversion.split('.')]
+except ImportError:
+ _opensslversionlist = [0, 15]
+
+MIN_PNG_SIZE = 67
+MIN_JPEG_SIZE = 128
+MAX_IMAGE_SIZE = 512000
+JPEG_HEADER = b'\xff\xd8\xff'
+PNG_HEADER = b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a'
+RE_REDIRECT = re.compile('(rand(om|nsfw))|about/sticky')
+
+
+def _get_redditor_listing(subpath=''):
+ """Return function to generate Redditor listings."""
+ def _listing(self, sort='new', time='all', *args, **kwargs):
+ """Return a get_content generator for some RedditContentObject type.
+
+ :param sort: Specify the sort order of the results if applicable
+ (one of ``'hot'``, ``'new'``, ``'top'``, ``'controversial'``).
+ :param time: Specify the time-period to return submissions if
+ applicable (one of ``'hour'``, ``'day'``, ``'week'``,
+ ``'month'``, ``'year'``, ``'all'``).
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ kwargs.setdefault('params', {})
+ kwargs['params'].setdefault('sort', sort)
+ kwargs['params'].setdefault('t', time)
+ url = urljoin(self._url, subpath) # pylint: disable=W0212
+ return self.reddit_session.get_content(url, *args, **kwargs)
+ return _listing
+
+
+def _get_sorter(subpath='', **defaults):
+ """Return function to generate specific subreddit Submission listings."""
+ @restrict_access(scope='read')
+ def _sorted(self, *args, **kwargs):
+ """Return a get_content generator for some RedditContentObject type.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ """
+ if not kwargs.get('params'):
+ kwargs['params'] = {}
+ for key, value in six.iteritems(defaults):
+ kwargs['params'].setdefault(key, value)
+ url = urljoin(self._url, subpath) # pylint: disable=W0212
+ return self.reddit_session.get_content(url, *args, **kwargs)
+ return _sorted
+
+
+def _image_type(image):
+ size = os.path.getsize(image.name)
+ if size < MIN_PNG_SIZE:
+ raise ClientException('png image is too small.')
+ if size > MAX_IMAGE_SIZE:
+ raise ClientException('`image` is too big. Max: {0} bytes'
+ .format(MAX_IMAGE_SIZE))
+ first_bytes = image.read(MIN_PNG_SIZE)
+ image.seek(0)
+ if first_bytes.startswith(PNG_HEADER):
+ return 'png'
+ elif first_bytes.startswith(JPEG_HEADER):
+ if size < MIN_JPEG_SIZE:
+ raise ClientException('jpeg image is too small.')
+ return 'jpg'
+ raise ClientException('`image` must be either jpg or png.')
+
+
+def _modify_relationship(relationship, unlink=False, is_sub=False):
+ """Return a function for relationship modification.
+
+ Used to support friending (user-to-user), as well as moderating,
+ contributor creating, and banning (user-to-subreddit).
+
+ """
+ # The API uses friend and unfriend to manage all of these relationships.
+ url_key = 'unfriend' if unlink else 'friend'
+
+ if relationship == 'friend':
+ access = {'scope': None, 'login': True}
+ elif relationship == 'moderator':
+ access = {'scope': 'modothers'}
+ elif relationship in ['banned', 'contributor', 'muted']:
+ access = {'scope': 'modcontributors'}
+ elif relationship in ['wikibanned', 'wikicontributor']:
+ access = {'scope': ['modcontributors', 'modwiki']}
+ else:
+ access = {'scope': None, 'mod': True}
+
+ @restrict_access(**access)
+ def do_relationship(thing, user, **kwargs):
+ data = {'name': six.text_type(user),
+ 'type': relationship}
+ data.update(kwargs)
+ if is_sub:
+ data['r'] = six.text_type(thing)
+ else:
+ data['container'] = thing.fullname
+
+ session = thing.reddit_session
+ if relationship == 'moderator':
+ session.evict(session.config['moderators'].format(
+ subreddit=six.text_type(thing)))
+ url = session.config[url_key]
+ return session.request_json(url, data=data)
+ return do_relationship
+
+
+def _prepare_request(reddit_session, url, params, data, auth, files,
+ method=None):
+ """Return a requests Request object that can be "prepared"."""
+ # Requests using OAuth for authorization must switch to using the oauth
+ # domain.
+ if getattr(reddit_session, '_use_oauth', False):
+ bearer = 'bearer {0}'.format(reddit_session.access_token)
+ headers = {'Authorization': bearer}
+ config = reddit_session.config
+ for prefix in (config.api_url, config.permalink_url):
+ if url.startswith(prefix):
+ if config.log_requests >= 1:
+ msg = 'substituting {0} for {1} in url\n'.format(
+ config.oauth_url, prefix)
+ sys.stderr.write(msg)
+ url = config.oauth_url + url[len(prefix):]
+ break
+ else:
+ headers = {}
+ headers.update(reddit_session.http.headers)
+
+ if method:
+ pass
+ elif data or files:
+ method = 'POST'
+ else:
+ method = 'GET'
+
+ # Log the request if logging is enabled
+ if reddit_session.config.log_requests >= 1:
+ sys.stderr.write('{0}: {1}\n'.format(method, url))
+ if reddit_session.config.log_requests >= 2:
+ if params:
+ sys.stderr.write('params: {0}\n'.format(params))
+ if data:
+ sys.stderr.write('data: {0}\n'.format(data))
+ if auth:
+ sys.stderr.write('auth: {0}\n'.format(auth))
+ # Prepare request
+ request = Request(method=method, url=url, headers=headers, params=params,
+ auth=auth, cookies=reddit_session.http.cookies)
+ if method == 'GET':
+ return request
+ # Most POST requests require adding `api_type` and `uh` to the data.
+ if data is True:
+ data = {}
+
+ if isinstance(data, dict):
+ if not auth:
+ data.setdefault('api_type', 'json')
+ if reddit_session.modhash:
+ data.setdefault('uh', reddit_session.modhash)
+ else:
+ request.headers.setdefault('Content-Type', 'application/json')
+
+ request.data = data
+ request.files = files
+ return request
+
+
+def _raise_redirect_exceptions(response):
+ """Return the new url or None if there are no redirects.
+
+ Raise exceptions if appropriate.
+
+ """
+ if response.status_code not in [301, 302, 307]:
+ return None
+ new_url = urljoin(response.url, response.headers['location'])
+ if 'reddits/search' in new_url: # Handle non-existent subreddit
+ subreddit = new_url.rsplit('=', 1)[1]
+ raise InvalidSubreddit('`{0}` is not a valid subreddit'
+ .format(subreddit))
+ elif not RE_REDIRECT.search(response.url):
+ raise RedirectException(response.url, new_url)
+ return new_url
+
+
+def _raise_response_exceptions(response):
+ """Raise specific errors on some status codes."""
+ if not response.ok and 'www-authenticate' in response.headers:
+ msg = response.headers['www-authenticate']
+ if 'insufficient_scope' in msg:
+ raise OAuthInsufficientScope('insufficient_scope', response.url)
+ elif 'invalid_token' in msg:
+ raise OAuthInvalidToken('invalid_token', response.url)
+ else:
+ raise OAuthException(msg, response.url)
+
+ if response.status_code == codes.forbidden: # pylint: disable=E1101
+ raise Forbidden(_raw=response)
+ elif response.status_code == codes.not_found: # pylint: disable=E1101
+ raise NotFound(_raw=response)
+ else:
+ try:
+ response.raise_for_status() # These should all be directly mapped
+ except exceptions.HTTPError as exc:
+ raise HTTPException(_raw=exc.response)
+
+
+def _to_reddit_list(arg):
+ """Return an argument converted to a reddit-formatted list.
+
+ The returned format is a comma deliminated list. Each element is a string
+ representation of an object. Either given as a string or as an object that
+ is then converted to its string representation.
+ """
+ if (isinstance(arg, six.string_types) or not (
+ hasattr(arg, "__getitem__") or hasattr(arg, "__iter__"))):
+ return six.text_type(arg)
+ else:
+ return ','.join(six.text_type(a) for a in arg)
+
+
+def _warn_pyopenssl():
+ """Warn the user against faulty versions of pyOpenSSL."""
+ if _opensslversionlist < [0, 15]: # versions >= 0.15 are fine
+ warn(RuntimeWarning(
+ "pyOpenSSL {0} may be incompatible with praw if validating"
+ "ssl certificates, which is on by default.\nSee https://"
+ "github.com/praw/pull/625 for more information".format(
+ _opensslversion)
+ ))
diff --git a/rtv/packages/praw/multiprocess.py b/rtv/packages/praw/multiprocess.py
new file mode 100644
index 0000000..3d5aad2
--- /dev/null
+++ b/rtv/packages/praw/multiprocess.py
@@ -0,0 +1,102 @@
+"""Provides a request server to be used with the multiprocess handler."""
+
+from __future__ import print_function, unicode_literals
+
+import socket
+import sys
+from optparse import OptionParser
+from praw import __version__
+from praw.handlers import DefaultHandler
+from requests import Session
+from six.moves import cPickle, socketserver # pylint: disable=F0401
+from threading import Lock
+
+
+class ThreadingTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
+ # pylint: disable=R0903,W0232
+ """A TCP server that creates new threads per connection."""
+
+ allow_reuse_address = True
+
+ @staticmethod
+ def handle_error(_, client_addr):
+ """Mute tracebacks of common errors."""
+ exc_type, exc_value, _ = sys.exc_info()
+ if exc_type is socket.error and exc_value[0] == 32:
+ pass
+ elif exc_type is cPickle.UnpicklingError:
+ sys.stderr.write('Invalid connection from {0}\n'
+ .format(client_addr[0]))
+ else:
+ raise
+
+
+class RequestHandler(socketserver.StreamRequestHandler):
+ # pylint: disable=W0232
+ """A class that handles incoming requests.
+
+ Requests to the same domain are cached and rate-limited.
+
+ """
+
+ ca_lock = Lock() # lock around cache and timeouts
+ cache = {} # caches requests
+ http = Session() # used to make requests
+ last_call = {} # Stores a two-item list: [lock, previous_call_time]
+ rl_lock = Lock() # lock used for adding items to last_call
+ timeouts = {} # store the time items in cache were entered
+
+ do_evict = DefaultHandler.evict # Add in the evict method
+
+ @staticmethod
+ def cache_hit_callback(key):
+ """Output when a cache hit occurs."""
+ print('HIT {0} {1}'.format('POST' if key[1][1] else 'GET', key[0]))
+
+ @DefaultHandler.with_cache
+ @DefaultHandler.rate_limit
+ def do_request(self, request, proxies, timeout, **_):
+ """Dispatch the actual request and return the result."""
+ print('{0} {1}'.format(request.method, request.url))
+ response = self.http.send(request, proxies=proxies, timeout=timeout,
+ allow_redirects=False)
+ response.raw = None # Make pickleable
+ return response
+
+ def handle(self):
+ """Parse the RPC, make the call, and pickle up the return value."""
+ data = cPickle.load(self.rfile) # pylint: disable=E1101
+ method = data.pop('method')
+ try:
+ retval = getattr(self, 'do_{0}'.format(method))(**data)
+ except Exception as e:
+ # All exceptions should be passed to the client
+ retval = e
+ cPickle.dump(retval, self.wfile, # pylint: disable=E1101
+ cPickle.HIGHEST_PROTOCOL)
+
+
+def run():
+ """The entry point from the praw-multiprocess utility."""
+ parser = OptionParser(version='%prog {0}'.format(__version__))
+ parser.add_option('-a', '--addr', default='localhost',
+ help=('The address or host to listen on. Specify -a '
+ '0.0.0.0 to listen on all addresses. '
+ 'Default: localhost'))
+ parser.add_option('-p', '--port', type='int', default='10101',
+ help=('The port to listen for requests on. '
+ 'Default: 10101'))
+ options, _ = parser.parse_args()
+ try:
+ server = ThreadingTCPServer((options.addr, options.port),
+ RequestHandler)
+ except (socket.error, socket.gaierror) as exc: # Handle bind errors
+ print(exc)
+ sys.exit(1)
+ print('Listening on {0} port {1}'.format(options.addr, options.port))
+ try:
+ server.serve_forever() # pylint: disable=E1101
+ except KeyboardInterrupt:
+ server.socket.close() # pylint: disable=E1101
+ RequestHandler.http.close()
+ print('Goodbye!')
diff --git a/rtv/packages/praw/objects.py b/rtv/packages/praw/objects.py
new file mode 100644
index 0000000..13418bc
--- /dev/null
+++ b/rtv/packages/praw/objects.py
@@ -0,0 +1,2003 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""
+Contains code about objects such as Submissions, Redditors or Commments.
+
+There are two main groups of objects in this file. The first are objects that
+correspond to a Thing or part of a Thing as specified in reddit's API overview,
+https://github.com/reddit/reddit/wiki/API. The second gives functionality that
+extends over multiple Things. An object that extends from Saveable indicates
+that it can be saved and unsaved in the context of a logged in user.
+"""
+
+from __future__ import print_function, unicode_literals
+import six
+from six.moves.urllib.parse import ( # pylint: disable=F0401
+ parse_qs, urlparse, urlunparse)
+from heapq import heappop, heappush
+from json import dumps
+from requests.compat import urljoin
+from warnings import warn, warn_explicit
+from praw import (AuthenticatedReddit as AR, ModConfigMixin as MCMix,
+ ModFlairMixin as MFMix, ModLogMixin as MLMix,
+ ModOnlyMixin as MOMix, ModSelfMixin as MSMix,
+ MultiredditMixin as MultiMix, PrivateMessagesMixin as PMMix,
+ SubmitMixin, SubscribeMixin, UnauthenticatedReddit as UR)
+from praw.decorators import (alias_function, limit_chars, restrict_access,
+ deprecated)
+from praw.errors import ClientException
+from praw.internal import (_get_redditor_listing, _get_sorter,
+ _modify_relationship)
+
+
+REDDITOR_KEYS = ('approved_by', 'author', 'banned_by', 'redditor',
+ 'revision_by')
+
+
+class RedditContentObject(object):
+ """Base class that represents actual reddit objects."""
+
+ @classmethod
+ def from_api_response(cls, reddit_session, json_dict):
+ """Return an instance of the appropriate class from the json_dict."""
+ return cls(reddit_session, json_dict=json_dict)
+
+ def __init__(self, reddit_session, json_dict=None, fetch=True,
+ info_url=None, underscore_names=None, uniq=None):
+ """Create a new object from the dict of attributes returned by the API.
+
+ The fetch parameter specifies whether to retrieve the object's
+ information from the API (only matters when it isn't provided using
+ json_dict).
+
+ """
+ self._info_url = info_url or reddit_session.config['info']
+ self.reddit_session = reddit_session
+ self._underscore_names = underscore_names
+ self._uniq = uniq
+ self._has_fetched = self._populate(json_dict, fetch)
+
+ def __eq__(self, other):
+ """Return whether the other instance equals the current."""
+ return (isinstance(other, RedditContentObject) and
+ self.fullname == other.fullname)
+
+ def __hash__(self):
+ """Return the hash of the current instance."""
+ return hash(self.fullname)
+
+ def __getattr__(self, attr):
+ """Return the value of the `attr` attribute."""
+ # Because this method may perform web requests, there are certain
+ # attributes we must blacklist to prevent accidental requests:
+ # __members__, __methods__: Caused by `dir(obj)` in Python 2.
+ # __setstate__: Caused by Pickle deserialization.
+ blacklist = ('__members__', '__methods__', '__setstate__')
+ if attr not in blacklist and not self._has_fetched:
+ self._has_fetched = self._populate(None, True)
+ return getattr(self, attr)
+ msg = '\'{0}\' has no attribute \'{1}\''.format(type(self), attr)
+ raise AttributeError(msg)
+
+ def __getstate__(self):
+ """Needed for `pickle`.
+
+ Without this, pickle protocol version 0 will make HTTP requests
+ upon serialization, hence slowing it down significantly.
+ """
+ return self.__dict__
+
+ def __ne__(self, other):
+ """Return whether the other instance differs from the current."""
+ return not self == other
+
+ def __reduce_ex__(self, _):
+ """Needed for `pickle`.
+
+ Without this, `pickle` protocol version 2 will make HTTP requests
+ upon serialization, hence slowing it down significantly.
+ """
+ return self.__reduce__()
+
+ def __setattr__(self, name, value):
+ """Set the `name` attribute to `value."""
+ if value and name == 'subreddit':
+ value = Subreddit(self.reddit_session, value, fetch=False)
+ elif value and name in REDDITOR_KEYS:
+ if isinstance(value, bool):
+ pass
+ elif isinstance(value, dict):
+ value = Redditor(self.reddit_session, json_dict=value['data'])
+ elif not value or value == '[deleted]':
+ value = None
+ else:
+ value = Redditor(self.reddit_session, value, fetch=False)
+ object.__setattr__(self, name, value)
+
+ def __str__(self):
+ """Return a string representation of the RedditContentObject."""
+ retval = self.__unicode__()
+ if not six.PY3:
+ retval = retval.encode('utf-8')
+ return retval
+
+ def _get_json_dict(self):
+ # (disabled for entire function) pylint: disable=W0212
+
+ # OAuth handling needs to be special cased here. For instance, the user
+ # might be calling a method on a Subreddit object that requires first
+ # loading the information about the subreddit. This method should try
+ # to obtain the information in a scope-less manner unless either:
+ # a) The object is a WikiPage and the reddit_session has the `wikiread`
+ # scope.
+ # b) The object is not a WikiPage and the reddit_session has the
+ # `read` scope.
+ prev_use_oauth = self.reddit_session._use_oauth
+
+ wiki_page = isinstance(self, WikiPage)
+ scope = self.reddit_session.has_scope
+
+ self.reddit_session._use_oauth = wiki_page and scope('wikiread') or \
+ not wiki_page and scope('read')
+
+ try:
+ params = {'uniq': self._uniq} if self._uniq else {}
+ response = self.reddit_session.request_json(
+ self._info_url, params=params, as_objects=False)
+ finally:
+ self.reddit_session._use_oauth = prev_use_oauth
+ return response['data']
+
+ def _populate(self, json_dict, fetch):
+ if json_dict is None:
+ json_dict = self._get_json_dict() if fetch else {}
+
+ if self.reddit_session.config.store_json_result is True:
+ self.json_dict = json_dict
+ else:
+ self.json_dict = None
+
+ # TODO: Remove this wikipagelisting hack
+ if isinstance(json_dict, list):
+ json_dict = {'_tmp': json_dict}
+
+ for name, value in six.iteritems(json_dict):
+ if self._underscore_names and name in self._underscore_names:
+ name = '_' + name
+ setattr(self, name, value)
+
+ self._post_populate(fetch)
+ return bool(json_dict) or fetch
+
+ def _post_populate(self, fetch):
+ """Called after populating the attributes of the instance."""
+
+ @property
+ def fullname(self):
+ """Return the object's fullname.
+
+ A fullname is an object's kind mapping like `t3` followed by an
+ underscore and the object's base36 id, e.g., `t1_c5s96e0`.
+
+ """
+ by_object = self.reddit_session.config.by_object
+ return '{0}_{1}'.format(by_object[self.__class__], self.id)
+
+ @property
+ @deprecated('``has_fetched`` will not be a public attribute in PRAW4.')
+ def has_fetched(self):
+ """Return whether the object has been fully fetched from reddit."""
+ return self._has_fetched
+
+
+class Moderatable(RedditContentObject):
+ """Interface for Reddit content objects that have can be moderated."""
+
+ @restrict_access(scope='modposts')
+ def approve(self):
+ """Approve object.
+
+ This reverts a removal, resets the report counter, marks it with a
+ green check mark (only visible to other moderators) on the website view
+ and sets the approved_by attribute to the logged in user.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['approve']
+ data = {'id': self.fullname}
+ response = self.reddit_session.request_json(url, data=data)
+ urls = [self.reddit_session.config[x] for x in ['modqueue', 'spam']]
+ if isinstance(self, Submission):
+ urls += self.subreddit._listing_urls # pylint: disable=W0212
+ self.reddit_session.evict(urls)
+ return response
+
+ @restrict_access(scope='modposts')
+ def distinguish(self, as_made_by='mod', sticky=False):
+ """Distinguish object as made by mod, admin or special.
+
+ Distinguished objects have a different author color. With Reddit
+ Enhancement Suite it is the background color that changes.
+
+ `sticky` argument only used for top-level Comments.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['distinguish']
+ data = {'id': self.fullname,
+ 'how': 'yes' if as_made_by == 'mod' else as_made_by}
+ if isinstance(self, Comment) and self.is_root:
+ data['sticky'] = sticky
+ return self.reddit_session.request_json(url, data=data)
+
+ @restrict_access(scope='modposts')
+ def ignore_reports(self):
+ """Ignore future reports on this object.
+
+ This prevents future reports from causing notifications or appearing
+ in the various moderation listing. The report count will still
+ increment.
+
+ """
+ url = self.reddit_session.config['ignore_reports']
+ data = {'id': self.fullname}
+ return self.reddit_session.request_json(url, data=data)
+
+ @restrict_access(scope='modposts')
+ def remove(self, spam=False):
+ """Remove object. This is the moderator version of delete.
+
+ The object is removed from the subreddit listings and placed into the
+ spam listing. If spam is set to True, then the automatic spam filter
+ will try to remove objects with similar attributes in the future.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['remove']
+ data = {'id': self.fullname,
+ 'spam': 'True' if spam else 'False'}
+ response = self.reddit_session.request_json(url, data=data)
+ urls = [self.reddit_session.config[x] for x in ['modqueue', 'spam']]
+ if isinstance(self, Submission) and hasattr(self, 'subreddit'):
+ urls += self.subreddit._listing_urls # pylint: disable=W0212
+ self.reddit_session.evict(urls)
+ return response
+
+ def undistinguish(self):
+ """Remove mod, admin or special distinguishing on object.
+
+ :returns: The json response from the server.
+
+ """
+ return self.distinguish(as_made_by='no')
+
+ @restrict_access(scope='modposts')
+ def unignore_reports(self):
+ """Remove ignoring of future reports on this object.
+
+ Undoes 'ignore_reports'. Future reports will now cause notifications
+ and appear in the various moderation listings.
+
+ """
+ url = self.reddit_session.config['unignore_reports']
+ data = {'id': self.fullname}
+ return self.reddit_session.request_json(url, data=data)
+
+
+class Editable(RedditContentObject):
+ """Interface for Reddit content objects that can be edited and deleted."""
+
+ @restrict_access(scope='edit')
+ def delete(self):
+ """Delete this object.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['del']
+ data = {'id': self.fullname}
+ response = self.reddit_session.request_json(url, data=data)
+ self.reddit_session.evict(self.reddit_session.config['user'])
+ return response
+
+ @restrict_access(scope='edit')
+ def edit(self, text):
+ """Replace the body of the object with `text`.
+
+ :returns: The updated object.
+
+ """
+ url = self.reddit_session.config['edit']
+ data = {'thing_id': self.fullname,
+ 'text': text}
+ response = self.reddit_session.request_json(url, data=data)
+ self.reddit_session.evict(self.reddit_session.config['user'])
+ return response['data']['things'][0]
+
+
+class Gildable(RedditContentObject):
+ """Interface for RedditContentObjects that can be gilded."""
+
+ @restrict_access(scope='creddits', oauth_only=True)
+ def gild(self, months=None):
+ """Gild the Redditor or author of the content.
+
+ :param months: Specifies the number of months to gild. This parameter
+ is Only valid when the instance called upon is of type
+ Redditor. When not provided, the value defaults to 1.
+ :returns: True on success, otherwise raises an exception.
+
+ """
+ if isinstance(self, Redditor):
+ months = int(months) if months is not None else 1
+ if months < 1:
+ raise TypeError('months must be at least 1')
+ if months > 36:
+ raise TypeError('months must be no more than 36')
+ response = self.reddit_session.request(
+ self.reddit_session.config['gild_user'].format(
+ username=six.text_type(self)), data={'months': months})
+ elif months is not None:
+ raise TypeError('months is not a valid parameter for {0}'
+ .format(type(self)))
+ else:
+ response = self.reddit_session.request(
+ self.reddit_session.config['gild_thing']
+ .format(fullname=self.fullname), data=True)
+ return response.status_code == 200
+
+
+class Hideable(RedditContentObject):
+ """Interface for objects that can be hidden."""
+
+ def hide(self, _unhide=False):
+ """Hide object in the context of the logged in user.
+
+ :param _unhide: If True, unhide the item instead. Use
+ :meth:`~praw.objects.Hideable.unhide` instead of setting this
+ manually.
+
+ :returns: The json response from the server.
+
+ """
+ return self.reddit_session.hide(self.fullname, _unhide=_unhide)
+
+ def unhide(self):
+ """Unhide object in the context of the logged in user.
+
+ :returns: The json response from the server.
+
+ """
+ return self.hide(_unhide=True)
+
+
+class Inboxable(RedditContentObject):
+ """Interface for objects that appear in the inbox (orangereds)."""
+
+ def mark_as_read(self):
+ """Mark object as read.
+
+ :returns: The json response from the server.
+
+ """
+ return self.reddit_session._mark_as_read([self.fullname])
+
+ def mark_as_unread(self):
+ """Mark object as unread.
+
+ :returns: The json response from the server.
+
+ """
+ return self.reddit_session._mark_as_read([self.fullname], unread=True)
+
+ def reply(self, text):
+ """Reply to object with the specified text.
+
+ :returns: A Comment object for the newly created comment (reply).
+
+ """
+ # pylint: disable=W0212
+ response = self.reddit_session._add_comment(self.fullname, text)
+ # pylint: enable=W0212
+ urls = [self.reddit_session.config['inbox']]
+ if isinstance(self, Comment):
+ urls.append(self.submission._api_link) # pylint: disable=W0212
+ elif isinstance(self, Message):
+ urls.append(self.reddit_session.config['sent'])
+ self.reddit_session.evict(urls)
+ return response
+
+
+class Messageable(RedditContentObject):
+ """Interface for RedditContentObjects that can be messaged."""
+
+ _methods = (('send_message', PMMix),)
+
+
+class Refreshable(RedditContentObject):
+ """Interface for objects that can be refreshed."""
+
+ def refresh(self):
+ """Re-query to update object with latest values. Return the object.
+
+ Any listing, such as the submissions on a subreddits top page, will
+ automatically be refreshed serverside. Refreshing a submission will
+ also refresh all its comments.
+
+ In the rare case of a comment being deleted or removed when it had
+ no replies, a second request will be made, not all information will
+ be updated and a warning will list the attributes that could not be
+ retrieved if there were any.
+
+ """
+ unique = self.reddit_session._unique_count # pylint: disable=W0212
+ self.reddit_session._unique_count += 1 # pylint: disable=W0212
+
+ if isinstance(self, Redditor):
+ other = Redditor(self.reddit_session, self._case_name, fetch=True,
+ uniq=unique)
+ elif isinstance(self, Comment):
+ sub = Submission.from_url(self.reddit_session, self.permalink,
+ params={'uniq': unique})
+ if sub.comments:
+ other = sub.comments[0]
+ else:
+ # comment is "specially deleted", a reddit inconsistency;
+ # see #519, #524, #535, #537, and #552 it needs to be
+ # retreived via /api/info, but that's okay since these
+ # specially deleted comments always have the same json
+ # structure. The unique count needs to be updated
+ # in case the comment originally came from /api/info
+ msg = ("Comment {0} was deleted or removed, and had "
+ "no replies when such happened, so a second "
+ "request was made to /api/info.".format(self.name))
+ unique = self.reddit_session._unique_count
+ self.reddit_session._unique_count += 1
+ other = self.reddit_session.get_info(thing_id=self.name,
+ params={'uniq': unique})
+ oldkeys = set(self.__dict__.keys())
+ newkeys = set(other.__dict__.keys())
+ keydiff = ", ".join(oldkeys - newkeys)
+ if keydiff:
+ msg += "\nCould not retrieve:\n{0}".format(keydiff)
+ self.__dict__.update(other.__dict__) # pylint: disable=W0201
+ warn(msg, RuntimeWarning)
+ return self
+ elif isinstance(self, Multireddit):
+ other = Multireddit(self.reddit_session, author=self._author,
+ name=self.name, uniq=unique, fetch=True)
+ elif isinstance(self, Submission):
+ params = self._params.copy()
+ params['uniq'] = unique
+ other = Submission.from_url(self.reddit_session, self.permalink,
+ comment_sort=self._comment_sort,
+ params=params)
+ elif isinstance(self, Subreddit):
+ other = Subreddit(self.reddit_session, self._case_name, fetch=True,
+ uniq=unique)
+ elif isinstance(self, WikiPage):
+ other = WikiPage(self.reddit_session,
+ six.text_type(self.subreddit), self.page,
+ fetch=True, uniq=unique)
+
+ self.__dict__ = other.__dict__ # pylint: disable=W0201
+ return self
+
+
+class Reportable(RedditContentObject):
+ """Interface for RedditContentObjects that can be reported."""
+
+ @restrict_access(scope='report')
+ def report(self, reason=None):
+ """Report this object to the moderators.
+
+ :param reason: The user-supplied reason for reporting a comment
+ or submission. Default: None (blank reason)
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['report']
+ data = {'id': self.fullname}
+ if reason:
+ data['reason'] = reason
+ response = self.reddit_session.request_json(url, data=data)
+ # Reported objects are automatically hidden as well
+ # pylint: disable=W0212
+ self.reddit_session.evict(
+ [self.reddit_session.config['user'],
+ urljoin(self.reddit_session.user._url, 'hidden')])
+ # pylint: enable=W0212
+ return response
+
+
+class Saveable(RedditContentObject):
+ """Interface for RedditContentObjects that can be saved."""
+
+ @restrict_access(scope='save')
+ def save(self, unsave=False):
+ """Save the object.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['unsave' if unsave else 'save']
+ data = {'id': self.fullname,
+ 'executed': 'unsaved' if unsave else 'saved'}
+ response = self.reddit_session.request_json(url, data=data)
+ self.reddit_session.evict(self.reddit_session.config['saved'])
+ return response
+
+ def unsave(self):
+ """Unsave the object.
+
+ :returns: The json response from the server.
+
+ """
+ return self.save(unsave=True)
+
+
+class Voteable(RedditContentObject):
+ """Interface for RedditContentObjects that can be voted on."""
+
+ def clear_vote(self):
+ """Remove the logged in user's vote on the object.
+
+ Running this on an object with no existing vote has no adverse effects.
+
+ Note: votes must be cast by humans. That is, API clients proxying a
+ human's action one-for-one are OK, but bots deciding how to vote on
+ content or amplifying a human's vote are not. See the reddit rules for
+ more details on what constitutes vote cheating.
+
+ Source for note: http://www.reddit.com/dev/api#POST_api_vote
+
+ :returns: The json response from the server.
+
+ """
+ return self.vote()
+
+ def downvote(self):
+ """Downvote object. If there already is a vote, replace it.
+
+ Note: votes must be cast by humans. That is, API clients proxying a
+ human's action one-for-one are OK, but bots deciding how to vote on
+ content or amplifying a human's vote are not. See the reddit rules for
+ more details on what constitutes vote cheating.
+
+ Source for note: http://www.reddit.com/dev/api#POST_api_vote
+
+ :returns: The json response from the server.
+
+ """
+ return self.vote(direction=-1)
+
+ def upvote(self):
+ """Upvote object. If there already is a vote, replace it.
+
+ Note: votes must be cast by humans. That is, API clients proxying a
+ human's action one-for-one are OK, but bots deciding how to vote on
+ content or amplifying a human's vote are not. See the reddit rules for
+ more details on what constitutes vote cheating.
+
+ Source for note: http://www.reddit.com/dev/api#POST_api_vote
+
+ :returns: The json response from the server.
+
+ """
+ return self.vote(direction=1)
+
+ @restrict_access(scope='vote')
+ def vote(self, direction=0):
+ """Vote for the given item in the direction specified.
+
+ Note: votes must be cast by humans. That is, API clients proxying a
+ human's action one-for-one are OK, but bots deciding how to vote on
+ content or amplifying a human's vote are not. See the reddit rules for
+ more details on what constitutes vote cheating.
+
+ Source for note: http://www.reddit.com/dev/api#POST_api_vote
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['vote']
+ data = {'id': self.fullname,
+ 'dir': six.text_type(direction)}
+ if self.reddit_session.user:
+ # pylint: disable=W0212
+ urls = [urljoin(self.reddit_session.user._url, 'disliked'),
+ urljoin(self.reddit_session.user._url, 'liked')]
+ # pylint: enable=W0212
+ self.reddit_session.evict(urls)
+ return self.reddit_session.request_json(url, data=data)
+
+
+class Comment(Editable, Gildable, Inboxable, Moderatable, Refreshable,
+ Reportable, Saveable, Voteable):
+ """A class that represents a reddit comments."""
+
+ def __init__(self, reddit_session, json_dict):
+ """Construct an instance of the Comment object."""
+ super(Comment, self).__init__(reddit_session, json_dict,
+ underscore_names=['replies'])
+ self._has_fetched_replies = not hasattr(self, 'was_comment')
+ if self._replies:
+ self._replies = self._replies['data']['children']
+ elif self._replies == '': # Comment tree was built and there are none
+ self._replies = []
+ else:
+ self._replies = None
+ self._submission = None
+
+ @limit_chars
+ def __unicode__(self):
+ """Return a string representation of the comment."""
+ return getattr(self, 'body', '[Unloaded Comment]')
+
+ @property
+ def _fast_permalink(self):
+ """Return the short permalink to the comment."""
+ if hasattr(self, 'link_id'): # from /r or /u comments page
+ sid = self.link_id.split('_')[1]
+ else: # from user's /message page
+ sid = self.context.split('/')[4]
+ return urljoin(self.reddit_session.config['comments'], '{0}/_/{1}'
+ .format(sid, self.id))
+
+ def _update_submission(self, submission):
+ """Submission isn't set on __init__ thus we need to update it."""
+ submission._comments_by_id[self.name] = self # pylint: disable=W0212
+ self._submission = submission
+ if self._replies:
+ for reply in self._replies:
+ reply._update_submission(submission) # pylint: disable=W0212
+
+ @property
+ def is_root(self):
+ """Return True when the comment is a top level comment."""
+ sub_prefix = self.reddit_session.config.by_object[Submission]
+ return self.parent_id.startswith(sub_prefix)
+
+ @property
+ def permalink(self):
+ """Return a permalink to the comment."""
+ return urljoin(self.submission.permalink, self.id)
+
+ @property
+ def replies(self):
+ """Return a list of the comment replies to this comment.
+
+ If the comment is not from a submission, :meth:`replies` will
+ always be an empty list unless you call :meth:`refresh()
+ before calling :meth:`replies` due to a limitation in
+ reddit's API.
+
+ """
+ if self._replies is None or not self._has_fetched_replies:
+ response = self.reddit_session.request_json(self._fast_permalink)
+ if response[1]['data']['children']:
+ # pylint: disable=W0212
+ self._replies = response[1]['data']['children'][0]._replies
+ else:
+ # comment is "specially deleted", a reddit inconsistency;
+ # see #519, #524, #535, #537, and #552 it needs to be
+ # retreived via /api/info, but that's okay since these
+ # specially deleted comments always have the same json
+ # structure.
+ msg = ("Comment {0} was deleted or removed, and had "
+ "no replies when such happened, so it still "
+ "has no replies".format(self.name))
+ warn(msg, RuntimeWarning)
+ self._replies = []
+ # pylint: enable=W0212
+ self._has_fetched_replies = True
+ # Set the submission object if it is not set.
+ if not self._submission:
+ self._submission = response[0]['data']['children'][0]
+ return self._replies
+
+ @property
+ def submission(self):
+ """Return the Submission object this comment belongs to."""
+ if not self._submission: # Comment not from submission
+ self._submission = self.reddit_session.get_submission(
+ url=self._fast_permalink)
+ return self._submission
+
+
+class Message(Inboxable):
+ """A class for private messages."""
+
+ @staticmethod
+ @restrict_access(scope='privatemessages')
+ def from_id(reddit_session, message_id, *args, **kwargs):
+ """Request the url for a Message and return a Message object.
+
+ :param reddit_session: The session to make the request with.
+ :param message_id: The ID of the message to request.
+
+ The additional parameters are passed directly into
+ :meth:`.request_json`.
+
+ """
+ # Reduce fullname to ID if necessary
+ message_id = message_id.split('_', 1)[-1]
+ url = reddit_session.config['message'].format(messageid=message_id)
+ message_info = reddit_session.request_json(url, *args, **kwargs)
+ message = message_info['data']['children'][0]
+
+ # Messages are received as a listing such that
+ # the first item is always the thread's root.
+ # The ID requested by the user may be a child.
+ if message.id == message_id:
+ return message
+ for child in message.replies:
+ if child.id == message_id:
+ return child
+
+ def __init__(self, reddit_session, json_dict):
+ """Construct an instance of the Message object."""
+ super(Message, self).__init__(reddit_session, json_dict)
+ if self.replies: # pylint: disable=E0203
+ self.replies = self.replies['data']['children']
+ else:
+ self.replies = []
+
+ @limit_chars
+ def __unicode__(self):
+ """Return a string representation of the Message."""
+ return 'From: {0}\nSubject: {1}\n\n{2}'.format(self.author,
+ self.subject, self.body)
+
+ @restrict_access(scope='privatemessages')
+ def collapse(self):
+ """Collapse a private message or modmail."""
+ url = self.reddit_session.config['collapse_message']
+ self.reddit_session.request_json(url, data={'id': self.name})
+
+ @restrict_access(scope='modcontributors')
+ def mute_modmail_author(self, _unmute=False):
+ """Mute the sender of this modmail message.
+
+ :param _unmute: Unmute the user instead. Please use
+ :meth:`unmute_modmail_author` instead of setting this directly.
+
+ """
+ path = 'unmute_sender' if _unmute else 'mute_sender'
+ return self.reddit_session.request_json(
+ self.reddit_session.config[path], data={'id': self.fullname})
+
+ @restrict_access(scope='privatemessages')
+ def uncollapse(self):
+ """Uncollapse a private message or modmail."""
+ url = self.reddit_session.config['uncollapse_message']
+ self.reddit_session.request_json(url, data={'id': self.name})
+
+ def unmute_modmail_author(self):
+ """Unmute the sender of this modmail message."""
+ return self.mute_modmail_author(_unmute=True)
+
+
+class MoreComments(RedditContentObject):
+ """A class indicating there are more comments."""
+
+ def __init__(self, reddit_session, json_dict):
+ """Construct an instance of the MoreComment object."""
+ super(MoreComments, self).__init__(reddit_session, json_dict)
+ self.submission = None
+ self._comments = None
+
+ def __lt__(self, other):
+ """Proide a sort order on the MoreComments object."""
+ # To work with heapq a "smaller" item is the one with the most comments
+ # We are intentionally making the biggest element the smallest element
+ # to turn the min-heap implementation in heapq into a max-heap
+ # implementation for Submission.replace_more_comments()
+ return self.count > other.count
+
+ def __unicode__(self):
+ """Return a string representation of the MoreComments object."""
+ return '[More Comments: {0}]'.format(self.count)
+
+ def _continue_comments(self, update):
+ assert len(self.children) > 0
+ tmp = self.reddit_session.get_submission(urljoin(
+ self.submission.permalink, self.parent_id.split('_', 1)[1]))
+ assert len(tmp.comments) == 1
+ self._comments = tmp.comments[0].replies
+ if update:
+ for comment in self._comments:
+ # pylint: disable=W0212
+ comment._update_submission(self.submission)
+ # pylint: enable=W0212
+ return self._comments
+
+ def _update_submission(self, submission):
+ self.submission = submission
+
+ def comments(self, update=True):
+ """Fetch and return the comments for a single MoreComments object."""
+ if not self._comments:
+ if self.count == 0: # Handle 'continue this thread' type
+ return self._continue_comments(update)
+ # pylint: disable=W0212
+ children = [x for x in self.children if 't1_{0}'.format(x)
+ not in self.submission._comments_by_id]
+ # pylint: enable=W0212
+ if not children:
+ return None
+ data = {'children': ','.join(children),
+ 'link_id': self.submission.fullname,
+ 'r': str(self.submission.subreddit)}
+
+ # pylint: disable=W0212
+ if self.submission._comment_sort:
+ data['where'] = self.submission._comment_sort
+ # pylint: enable=W0212
+ url = self.reddit_session.config['morechildren']
+ response = self.reddit_session.request_json(url, data=data)
+ self._comments = response['data']['things']
+ if update:
+ for comment in self._comments:
+ # pylint: disable=W0212
+ comment._update_submission(self.submission)
+ # pylint: enable=W0212
+ return self._comments
+
+
+class Redditor(Gildable, Messageable, Refreshable):
+ """A class representing the users of reddit."""
+
+ _methods = (('get_multireddit', MultiMix), ('get_multireddits', MultiMix))
+
+ get_comments = _get_redditor_listing('comments')
+ get_overview = _get_redditor_listing('')
+ get_submitted = _get_redditor_listing('submitted')
+
+ def __init__(self, reddit_session, user_name=None, json_dict=None,
+ fetch=False, **kwargs):
+ """Construct an instance of the Redditor object."""
+ if not user_name:
+ user_name = json_dict['name']
+ info_url = reddit_session.config['user_about'].format(user=user_name)
+ # name is set before calling the parent constructor so that the
+ # json_dict 'name' attribute (if available) has precedence
+ self._case_name = user_name
+ super(Redditor, self).__init__(reddit_session, json_dict,
+ fetch, info_url, **kwargs)
+ self.name = self._case_name
+ self._url = reddit_session.config['user'].format(user=self.name)
+ self._mod_subs = None
+
+ def __repr__(self):
+ """Return a code representation of the Redditor."""
+ return 'Redditor(user_name=\'{0}\')'.format(self.name)
+
+ def __unicode__(self):
+ """Return a string representation of the Redditor."""
+ return self.name
+
+ def _post_populate(self, fetch):
+ if fetch:
+ # Maintain a consistent `name` until the user
+ # explicitly calls `redditor.refresh()`
+ tmp = self._case_name
+ self._case_name = self.name
+ self.name = tmp
+
+ @restrict_access(scope='subscribe')
+ def friend(self, note=None, _unfriend=False):
+ """Friend the user.
+
+ :param note: A personal note about the user. Requires reddit Gold.
+ :param _unfriend: Unfriend the user. Please use :meth:`unfriend`
+ instead of setting this parameter manually.
+
+ :returns: The json response from the server.
+
+ """
+ self.reddit_session.evict(self.reddit_session.config['friends'])
+
+ # Requests through password auth use /api/friend
+ # Requests through oauth use /api/v1/me/friends/{username}
+ if not self.reddit_session.is_oauth_session():
+ modifier = _modify_relationship('friend', unlink=_unfriend)
+ data = {'note': note} if note else {}
+ return modifier(self.reddit_session.user, self, **data)
+
+ url = self.reddit_session.config['friend_v1'].format(user=self.name)
+ # This endpoint wants the data to be a string instead of an actual
+ # dictionary, although it is not required to have any content for adds.
+ # Unfriending does require the 'id' key.
+ if _unfriend:
+ data = {'id': self.name}
+ else:
+ # We cannot send a null or empty note string.
+ data = {'note': note} if note else {}
+ data = dumps(data)
+ method = 'DELETE' if _unfriend else 'PUT'
+ return self.reddit_session.request_json(url, data=data, method=method)
+
+ def get_disliked(self, *args, **kwargs):
+ """Return a listing of the Submissions the user has downvoted.
+
+ This method points to :meth:`get_downvoted`, as the "disliked" name
+ is being phased out.
+ """
+ return self.get_downvoted(*args, **kwargs)
+
+ def get_downvoted(self, *args, **kwargs):
+ """Return a listing of the Submissions the user has downvoted.
+
+ :returns: get_content generator of Submission items.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ As a default, this listing is only accessible by the user. Thereby
+ requiring either user/pswd authentication or OAuth authentication with
+ the 'history' scope. Users may choose to make their voting record
+ public by changing a user preference. In this case, no authentication
+ will be needed to access this listing.
+
+ """
+ # Sending an OAuth authenticated request for a redditor, who isn't the
+ # authenticated user. But who has a public voting record will be
+ # successful.
+ kwargs['_use_oauth'] = self.reddit_session.is_oauth_session()
+ return _get_redditor_listing('downvoted')(self, *args, **kwargs)
+
+ @restrict_access(scope='mysubreddits')
+ def get_friend_info(self):
+ """Return information about this friend, including personal notes.
+
+ The personal note can be added or overwritten with :meth:friend, but
+ only if the user has reddit Gold.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['friend_v1'].format(user=self.name)
+ data = {'id': self.name}
+ return self.reddit_session.request_json(url, data=data, method='GET')
+
+ def get_liked(self, *args, **kwargs):
+ """Return a listing of the Submissions the user has upvoted.
+
+ This method points to :meth:`get_upvoted`, as the "liked" name
+ is being phased out.
+ """
+ return self.get_upvoted(*args, **kwargs)
+
+ def get_upvoted(self, *args, **kwargs):
+ """Return a listing of the Submissions the user has upvoted.
+
+ :returns: get_content generator of Submission items.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` parameter cannot be altered.
+
+ As a default, this listing is only accessible by the user. Thereby
+ requirering either user/pswd authentication or OAuth authentication
+ with the 'history' scope. Users may choose to make their voting record
+ public by changing a user preference. In this case, no authentication
+ will be needed to access this listing.
+
+ """
+ kwargs['_use_oauth'] = self.reddit_session.is_oauth_session()
+ return _get_redditor_listing('upvoted')(self, *args, **kwargs)
+
+ def mark_as_read(self, messages, unread=False):
+ """Mark message(s) as read or unread.
+
+ :returns: The json response from the server.
+
+ """
+ ids = []
+ if isinstance(messages, Inboxable):
+ ids.append(messages.fullname)
+ elif hasattr(messages, '__iter__'):
+ for msg in messages:
+ if not isinstance(msg, Inboxable):
+ msg = 'Invalid message type: {0}'.format(type(msg))
+ raise ClientException(msg)
+ ids.append(msg.fullname)
+ else:
+ msg = 'Invalid message type: {0}'.format(type(messages))
+ raise ClientException(msg)
+ # pylint: disable=W0212
+ retval = self.reddit_session._mark_as_read(ids, unread=unread)
+ # pylint: enable=W0212
+ return retval
+
+ def unfriend(self):
+ """Unfriend the user.
+
+ :returns: The json response from the server.
+
+ """
+ return self.friend(_unfriend=True)
+
+
+class LoggedInRedditor(Redditor):
+ """A class representing a currently logged in Redditor."""
+
+ get_hidden = restrict_access('history')(_get_redditor_listing('hidden'))
+ get_saved = restrict_access('history')(_get_redditor_listing('saved'))
+
+ def get_blocked(self):
+ """Return a UserList of Redditors with whom the user has blocked."""
+ url = self.reddit_session.config['blocked']
+ return self.reddit_session.request_json(url)
+
+ def get_cached_moderated_reddits(self):
+ """Return a cached dictionary of the user's moderated reddits.
+
+ This list is used internally. Consider using the `get_my_moderation`
+ function instead.
+
+ """
+ if self._mod_subs is None:
+ self._mod_subs = {'mod': self.reddit_session.get_subreddit('mod')}
+ for sub in self.reddit_session.get_my_moderation(limit=None):
+ self._mod_subs[six.text_type(sub).lower()] = sub
+ return self._mod_subs
+
+ @deprecated('``get_friends`` has been moved to '
+ ':class:`praw.AuthenticatedReddit` and will be removed from '
+ ':class:`objects.LoggedInRedditor` in PRAW v4.0.0')
+ def get_friends(self, **params):
+ """Return a UserList of Redditors with whom the user is friends.
+
+ This method has been moved to :class:`praw.AuthenticatedReddit`.
+
+ """
+ return self.reddit_session.get_friends(**params)
+
+
+class ModAction(RedditContentObject):
+ """A moderator action."""
+
+ def __init__(self, reddit_session, json_dict=None, fetch=False):
+ """Construct an instance of the ModAction object."""
+ super(ModAction, self).__init__(reddit_session, json_dict, fetch)
+
+ def __unicode__(self):
+ """Return a string reprsentation of the moderator action."""
+ return 'Action: {0}'.format(self.action)
+
+
+class Submission(Editable, Gildable, Hideable, Moderatable, Refreshable,
+ Reportable, Saveable, Voteable):
+ """A class for submissions to reddit."""
+
+ _methods = (('select_flair', AR),)
+
+ @staticmethod
+ def _extract_more_comments(tree):
+ """Return a list of MoreComments objects removed from tree."""
+ more_comments = []
+ queue = [(None, x) for x in tree]
+ while len(queue) > 0:
+ parent, comm = queue.pop(0)
+ if isinstance(comm, MoreComments):
+ heappush(more_comments, comm)
+ if parent:
+ parent.replies.remove(comm)
+ else:
+ tree.remove(comm)
+ else:
+ for item in comm.replies:
+ queue.append((comm, item))
+ return more_comments
+
+ @staticmethod
+ def from_id(reddit_session, subreddit_id):
+ """Return an edit-only submission object based on the id."""
+ pseudo_data = {'id': subreddit_id,
+ 'permalink': '/comments/{0}'.format(subreddit_id)}
+ return Submission(reddit_session, pseudo_data)
+
+ @staticmethod
+ def from_json(json_response):
+ """Return a submission object from the json response."""
+ submission = json_response[0]['data']['children'][0]
+ submission.comments = json_response[1]['data']['children']
+ return submission
+
+ @staticmethod
+ @restrict_access(scope='read')
+ def from_url(reddit_session, url, comment_limit=0, comment_sort=None,
+ comments_only=False, params=None):
+ """Request the url and return a Submission object.
+
+ :param reddit_session: The session to make the request with.
+ :param url: The url to build the Submission object from.
+ :param comment_limit: The desired number of comments to fetch. If <= 0
+ fetch the default number for the session's user. If None, fetch the
+ maximum possible.
+ :param comment_sort: The sort order for retrieved comments. When None
+ use the default for the session's user.
+ :param comments_only: Return only the list of comments.
+ :param params: dictionary containing extra GET data to put in the url.
+
+ """
+ if params is None:
+ params = {}
+
+ parsed = urlparse(url)
+ query_pairs = parse_qs(parsed.query)
+ get_params = dict((k, ",".join(v)) for k, v in query_pairs.items())
+ params.update(get_params)
+ url = urlunparse(parsed[:3] + ("", "", ""))
+ if comment_limit is None: # Fetch MAX
+ params['limit'] = 2048 # Just use a big number
+ elif comment_limit > 0: # Use value
+ params['limit'] = comment_limit
+ if comment_sort:
+ params['sort'] = comment_sort
+
+ response = reddit_session.request_json(url, params=params)
+ if comments_only:
+ return response[1]['data']['children']
+ submission = Submission.from_json(response)
+ submission._comment_sort = comment_sort # pylint: disable=W0212
+ submission._params = params # pylint: disable=W0212
+ return submission
+
+ def __init__(self, reddit_session, json_dict):
+ """Construct an instance of the Subreddit object."""
+ super(Submission, self).__init__(reddit_session, json_dict)
+ # pylint: disable=E0203
+ self._api_link = urljoin(reddit_session.config.api_url, self.permalink)
+ # pylint: enable=E0203
+ self.permalink = urljoin(reddit_session.config.permalink_url,
+ self.permalink)
+ self._comment_sort = None
+ self._comments_by_id = {}
+ self._comments = None
+ self._orphaned = {}
+ self._replaced_more = False
+ self._params = {}
+
+ @limit_chars
+ def __unicode__(self):
+ """Return a string representation of the Subreddit.
+
+ Note: The representation is truncated to a fix number of characters.
+ """
+ title = self.title.replace('\r\n', ' ')
+ return six.text_type('{0} :: {1}').format(self.score, title)
+
+ def _insert_comment(self, comment):
+ if comment.name in self._comments_by_id: # Skip existing comments
+ return
+
+ comment._update_submission(self) # pylint: disable=W0212
+
+ if comment.name in self._orphaned: # Reunite children with parent
+ comment.replies.extend(self._orphaned[comment.name])
+ del self._orphaned[comment.name]
+
+ if comment.is_root:
+ self._comments.append(comment)
+ elif comment.parent_id in self._comments_by_id:
+ self._comments_by_id[comment.parent_id].replies.append(comment)
+ else: # Orphan
+ if comment.parent_id in self._orphaned:
+ self._orphaned[comment.parent_id].append(comment)
+ else:
+ self._orphaned[comment.parent_id] = [comment]
+
+ def _update_comments(self, comments):
+ self._comments = comments
+ for comment in self._comments:
+ comment._update_submission(self) # pylint: disable=W0212
+
+ def add_comment(self, text):
+ """Comment on the submission using the specified text.
+
+ :returns: A Comment object for the newly created comment.
+
+ """
+ # pylint: disable=W0212
+ response = self.reddit_session._add_comment(self.fullname, text)
+ # pylint: enable=W0212
+ self.reddit_session.evict(self._api_link) # pylint: disable=W0212
+ return response
+
+ @property
+ def comments(self): # pylint: disable=E0202
+ """Return forest of comments, with top-level comments as tree roots.
+
+ May contain instances of MoreComment objects. To easily replace these
+ objects with Comment objects, use the replace_more_comments method then
+ fetch this attribute. Use comment replies to walk down the tree. To get
+ an unnested, flat list of comments from this attribute use
+ helpers.flatten_tree.
+
+ """
+ if self._comments is None:
+ self.comments = Submission.from_url( # pylint: disable=W0212
+ self.reddit_session, self._api_link, comments_only=True)
+ return self._comments
+
+ @comments.setter # NOQA
+ def comments(self, new_comments): # pylint: disable=E0202
+ """Update the list of comments with the provided nested list."""
+ self._update_comments(new_comments)
+ self._orphaned = {}
+
+ def get_duplicates(self, *args, **kwargs):
+ """Return a get_content generator for the submission's duplicates.
+
+ :returns: get_content generator iterating over Submission objects.
+
+ The additional parameters are passed directly into
+ :meth:`.get_content`. Note: the `url` and `object_filter` parameters
+ cannot be altered.
+
+ """
+ url = self.reddit_session.config['duplicates'].format(
+ submissionid=self.id)
+ return self.reddit_session.get_content(url, *args, object_filter=1,
+ **kwargs)
+
+ def get_flair_choices(self, *args, **kwargs):
+ """Return available link flair choices and current flair.
+
+ Convenience function for
+ :meth:`~.AuthenticatedReddit.get_flair_choices` populating both the
+ `subreddit` and `link` parameters.
+
+ :returns: The json response from the server.
+
+ """
+ return self.subreddit.get_flair_choices(self.fullname, *args, **kwargs)
+
+ @restrict_access(scope='modposts')
+ def lock(self):
+ """Lock thread.
+
+ Requires that the currently authenticated user has the modposts oauth
+ scope or has user/password authentication as a mod of the subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['lock']
+ data = {'id': self.fullname}
+ return self.reddit_session.request_json(url, data=data)
+
+ def mark_as_nsfw(self, unmark_nsfw=False):
+ """Mark as Not Safe For Work.
+
+ Requires that the currently authenticated user is the author of the
+ submission, has the modposts oauth scope or has user/password
+ authentication as a mod of the subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ def mark_as_nsfw_helper(self): # pylint: disable=W0613
+ # It is necessary to have the 'self' argument as it's needed in
+ # restrict_access to determine what class the decorator is
+ # operating on.
+ url = self.reddit_session.config['unmarknsfw' if unmark_nsfw else
+ 'marknsfw']
+ data = {'id': self.fullname}
+ return self.reddit_session.request_json(url, data=data)
+
+ is_author = (self.reddit_session.is_logged_in() and self.author ==
+ self.reddit_session.user)
+ if is_author:
+ return mark_as_nsfw_helper(self)
+ else:
+ return restrict_access('modposts')(mark_as_nsfw_helper)(self)
+
+ def replace_more_comments(self, limit=32, threshold=1):
+ """Update the comment tree by replacing instances of MoreComments.
+
+ :param limit: The maximum number of MoreComments objects to
+ replace. Each replacement requires 1 API request. Set to None to
+ have no limit, or to 0 to make no extra requests. Default: 32
+ :param threshold: The minimum number of children comments a
+ MoreComments object must have in order to be replaced. Default: 1
+ :returns: A list of MoreComments objects that were not replaced.
+
+ Note that after making this call, the `comments` attribute of the
+ submission will no longer contain any MoreComments objects. Items that
+ weren't replaced are still removed from the tree, and will be included
+ in the returned list.
+
+ """
+ if self._replaced_more:
+ return []
+
+ remaining = limit
+ more_comments = self._extract_more_comments(self.comments)
+ skipped = []
+
+ # Fetch largest more_comments until reaching the limit or the threshold
+ while more_comments:
+ item = heappop(more_comments)
+ if remaining == 0: # We're not going to replace any more
+ heappush(more_comments, item) # It wasn't replaced
+ break
+ elif len(item.children) == 0 or 0 < item.count < threshold:
+ heappush(skipped, item) # It wasn't replaced
+ continue
+
+ # Fetch new comments and decrease remaining if a request was made
+ new_comments = item.comments(update=False)
+ if new_comments is not None and remaining is not None:
+ remaining -= 1
+ elif new_comments is None:
+ continue
+
+ # Re-add new MoreComment objects to the heap of more_comments
+ for more in self._extract_more_comments(new_comments):
+ more._update_submission(self) # pylint: disable=W0212
+ heappush(more_comments, more)
+ # Insert the new comments into the tree
+ for comment in new_comments:
+ self._insert_comment(comment)
+
+ self._replaced_more = True
+ return more_comments + skipped
+
+ def set_flair(self, *args, **kwargs):
+ """Set flair for this submission.
+
+ Convenience function that utilizes :meth:`.ModFlairMixin.set_flair`
+ populating both the `subreddit` and `item` parameters.
+
+ :returns: The json response from the server.
+
+ """
+ return self.subreddit.set_flair(self, *args, **kwargs)
+
+ @restrict_access(scope='modposts')
+ def set_contest_mode(self, state=True):
+ """Set 'Contest Mode' for the comments of this submission.
+
+ Contest mode have the following effects:
+ * The comment thread will default to being sorted randomly.
+ * Replies to top-level comments will be hidden behind
+ "[show replies]" buttons.
+ * Scores will be hidden from non-moderators.
+ * Scores accessed through the API (mobile apps, bots) will be
+ obscured to "1" for non-moderators.
+
+ Source for effects: https://www.reddit.com/159bww/
+
+ :returns: The json response from the server.
+
+ """
+ # TODO: Whether a submission is in contest mode is not exposed via the
+ # API. Adding a test of this method is thus currently impossible.
+ # Add a test when it becomes possible.
+ url = self.reddit_session.config['contest_mode']
+ data = {'id': self.fullname, 'state': state}
+ return self.reddit_session.request_json(url, data=data)
+
+ @restrict_access(scope='modposts')
+ def set_suggested_sort(self, sort='blank'):
+ """Set 'Suggested Sort' for the comments of the submission.
+
+ Comments can be sorted in one of (confidence, top, new, hot,
+ controversial, old, random, qa, blank).
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['suggested_sort']
+ data = {'id': self.fullname, 'sort': sort}
+ return self.reddit_session.request_json(url, data=data)
+
+ @property
+ def short_link(self):
+ """Return a short link to the submission.
+
+ The short link points to a page on the short_domain that redirects to
+ the main. For example http://redd.it/eorhm is a short link for
+ https://www.reddit.com/r/announcements/comments/eorhm/reddit_30_less_typing/.
+
+ """
+ return urljoin(self.reddit_session.config.short_domain, self.id)
+
+ @restrict_access(scope='modposts')
+ def sticky(self, bottom=True):
+ """Sticky a post in its subreddit.
+
+ If there is already a stickied post in the designated slot it will be
+ unstickied.
+
+ :param bottom: Set this as the top or bottom sticky. If no top sticky
+ exists, this submission will become the top sticky regardless.
+
+ :returns: The json response from the server
+
+ """
+ url = self.reddit_session.config['sticky_submission']
+ data = {'id': self.fullname, 'state': True}
+ if not bottom:
+ data['num'] = 1
+ return self.reddit_session.request_json(url, data=data)
+
+ @restrict_access(scope='modposts')
+ def unlock(self):
+ """Lock thread.
+
+ Requires that the currently authenticated user has the modposts oauth
+ scope or has user/password authentication as a mod of the subreddit.
+
+ :returns: The json response from the server.
+
+ """
+ url = self.reddit_session.config['unlock']
+ data = {'id': self.fullname}
+ return self.reddit_session.request_json(url, data=data)
+
+ def unmark_as_nsfw(self):
+ """Mark as Safe For Work.
+
+ :returns: The json response from the server.
+
+ """
+ return self.mark_as_nsfw(unmark_nsfw=True)
+
+ @restrict_access(scope='modposts')
+ def unset_contest_mode(self):
+ """Unset 'Contest Mode' for the comments of this submission.
+
+ Contest mode have the following effects:
+ * The comment thread will default to being sorted randomly.
+ * Replies to top-level comments will be hidden behind
+ "[show replies]" buttons.
+ * Scores will be hidden from non-moderators.
+ * Scores accessed through the API (mobile apps, bots) will be
+ obscured to "1" for non-moderators.
+
+ Source for effects: http://www.reddit.com/159bww/
+
+ :returns: The json response from the server.
+
+ """
+ return self.set_contest_mode(False)
+
+ @restrict_access(scope='modposts')
+ def unsticky(self):
+ """Unsticky this post.
+
+ :returns: The json response from the server
+
+ """
+ url = self.reddit_session.config['sticky_submission']
+ data = {'id': self.fullname, 'state': False}
+ return self.reddit_session.request_json(url, data=data)
+
+
+class Subreddit(Messageable, Refreshable):
+ """A class for Subreddits."""
+
+ _methods = (('accept_moderator_invite', AR),
+ ('add_flair_template', MFMix),
+ ('clear_flair_templates', MFMix),
+ ('configure_flair', MFMix),
+ ('delete_flair', MFMix),
+ ('delete_image', MCMix),
+ ('edit_wiki_page', AR),
+ ('get_banned', MOMix),
+ ('get_comments', UR),
+ ('get_contributors', MOMix),
+ ('get_edited', MOMix),
+ ('get_flair', UR),
+ ('get_flair_choices', AR),
+ ('get_flair_list', MFMix),
+ ('get_moderators', UR),
+ ('get_mod_log', MLMix),
+ ('get_mod_queue', MOMix),
+ ('get_mod_mail', MOMix),
+ ('get_muted', MOMix),
+ ('get_random_submission', UR),
+ ('get_reports', MOMix),
+ ('get_rules', UR),
+ ('get_settings', MCMix),
+ ('get_spam', MOMix),
+ ('get_sticky', UR),
+ ('get_stylesheet', MOMix),
+ ('get_traffic', UR),
+ ('get_unmoderated', MOMix),
+ ('get_wiki_banned', MOMix),
+ ('get_wiki_contributors', MOMix),
+ ('get_wiki_page', UR),
+ ('get_wiki_pages', UR),
+ ('leave_contributor', MSMix),
+ ('leave_moderator', MSMix),
+ ('search', UR),
+ ('select_flair', AR),
+ ('set_flair', MFMix),
+ ('set_flair_csv', MFMix),
+ ('set_settings', MCMix),
+ ('set_stylesheet', MCMix),
+ ('submit', SubmitMixin),
+ ('subscribe', SubscribeMixin),
+ ('unsubscribe', SubscribeMixin),
+ ('update_settings', MCMix),
+ ('upload_image', MCMix))
+
+ # Subreddit banned
+ add_ban = _modify_relationship('banned', is_sub=True)
+ remove_ban = _modify_relationship('banned', unlink=True, is_sub=True)
+
+ # Subreddit contributors
+ add_contributor = _modify_relationship('contributor', is_sub=True)
+ remove_contributor = _modify_relationship('contributor', unlink=True,
+ is_sub=True)
+ # Subreddit moderators
+ add_moderator = _modify_relationship('moderator', is_sub=True)
+ remove_moderator = _modify_relationship('moderator', unlink=True,
+ is_sub=True)
+ # Subreddit muted
+ add_mute = _modify_relationship('muted', is_sub=True)
+ remove_mute = _modify_relationship('muted', is_sub=True, unlink=True)
+
+ # Subreddit wiki banned
+ add_wiki_ban = _modify_relationship('wikibanned', is_sub=True)
+ remove_wiki_ban = _modify_relationship('wikibanned', unlink=True,
+ is_sub=True)
+ # Subreddit wiki contributors
+ add_wiki_contributor = _modify_relationship('wikicontributor', is_sub=True)
+ remove_wiki_contributor = _modify_relationship('wikicontributor',
+ unlink=True, is_sub=True)
+
+ # Generic listing selectors
+ get_controversial = _get_sorter('controversial')
+ get_hot = _get_sorter('')
+ get_new = _get_sorter('new')
+ get_top = _get_sorter('top')
+
+ # Explicit listing selectors
+ get_controversial_from_all = _get_sorter('controversial', t='all')
+ get_controversial_from_day = _get_sorter('controversial', t='day')
+ get_controversial_from_hour = _get_sorter('controversial', t='hour')
+ get_controversial_from_month = _get_sorter('controversial', t='month')
+ get_controversial_from_week = _get_sorter('controversial', t='week')
+ get_controversial_from_year = _get_sorter('controversial', t='year')
+ get_rising = _get_sorter('rising')
+ get_top_from_all = _get_sorter('top', t='all')
+ get_top_from_day = _get_sorter('top', t='day')
+ get_top_from_hour = _get_sorter('top', t='hour')
+ get_top_from_month = _get_sorter('top', t='month')
+ get_top_from_week = _get_sorter('top', t='week')
+ get_top_from_year = _get_sorter('top', t='year')
+
+ def __init__(self, reddit_session, subreddit_name=None, json_dict=None,
+ fetch=False, **kwargs):
+ """Construct an instance of the Subreddit object."""
+ # Special case for when my_subreddits is called as no name is returned
+ # so we have to extract the name from the URL. The URLs are returned
+ # as: /r/reddit_name/
+ if subreddit_name is None:
+ subreddit_name = json_dict['url'].split('/')[2]
+
+ if not isinstance(subreddit_name, six.string_types) \
+ or not subreddit_name:
+ raise TypeError('subreddit_name must be a non-empty string.')
+
+ if fetch and ('+' in subreddit_name or '-' in subreddit_name):
+ fetch = False
+ warn_explicit('fetch=True has no effect on multireddits',
+ UserWarning, '', 0)
+
+ info_url = reddit_session.config['subreddit_about'].format(
+ subreddit=subreddit_name)
+ self._case_name = subreddit_name
+ super(Subreddit, self).__init__(reddit_session, json_dict, fetch,
+ info_url, **kwargs)
+ self.display_name = self._case_name
+ self._url = reddit_session.config['subreddit'].format(
+ subreddit=self.display_name)
+ # '' is the hot listing
+ listings = ['new/', '', 'top/', 'controversial/', 'rising/']
+ base = reddit_session.config['subreddit'].format(
+ subreddit=self.display_name)
+ self._listing_urls = [base + x + '.json' for x in listings]
+
+ def __repr__(self):
+ """Return a code representation of the Subreddit."""
+ return 'Subreddit(subreddit_name=\'{0}\')'.format(self.display_name)
+
+ def __unicode__(self):
+ """Return a string representation of the Subreddit."""
+ return self.display_name
+
+ def _post_populate(self, fetch):
+ if fetch:
+ # Maintain a consistent `display_name` until the user
+ # explicitly calls `subreddit.refresh()`
+ tmp = self._case_name
+ self._case_name = self.display_name
+ self.display_name = tmp
+
+ def clear_all_flair(self):
+ """Remove all user flair on this subreddit.
+
+ :returns: The json response from the server when there is flair to
+ clear, otherwise returns None.
+
+ """
+ csv = [{'user': x['user']} for x in self.get_flair_list(limit=None)]
+ if csv:
+ return self.set_flair_csv(csv)
+ else:
+ return
+
+
+class Multireddit(Refreshable):
+ """A class for users' Multireddits."""
+
+ # Generic listing selectors
+ get_controversial = _get_sorter('controversial')
+ get_hot = _get_sorter('')
+ get_new = _get_sorter('new')
+ get_top = _get_sorter('top')
+
+ # Explicit listing selectors
+ get_controversial_from_all = _get_sorter('controversial', t='all')
+ get_controversial_from_day = _get_sorter('controversial', t='day')
+ get_controversial_from_hour = _get_sorter('controversial', t='hour')
+ get_controversial_from_month = _get_sorter('controversial', t='month')
+ get_controversial_from_week = _get_sorter('controversial', t='week')
+ get_controversial_from_year = _get_sorter('controversial', t='year')
+ get_rising = _get_sorter('rising')
+ get_top_from_all = _get_sorter('top', t='all')
+ get_top_from_day = _get_sorter('top', t='day')
+ get_top_from_hour = _get_sorter('top', t='hour')
+ get_top_from_month = _get_sorter('top', t='month')
+ get_top_from_week = _get_sorter('top', t='week')
+ get_top_from_year = _get_sorter('top', t='year')
+
+ @classmethod
+ def from_api_response(cls, reddit_session, json_dict):
+ """Return an instance of the appropriate class from the json dict."""
+ # The Multireddit response contains the Subreddits attribute as a list
+ # of dicts of the form {'name': 'subredditname'}.
+ # We must convert each of these into a Subreddit object.
+ json_dict['subreddits'] = [Subreddit(reddit_session, item['name'])
+ for item in json_dict['subreddits']]
+ return cls(reddit_session, None, None, json_dict)
+
+ def __init__(self, reddit_session, author=None, name=None,
+ json_dict=None, fetch=False, **kwargs):
+ """Construct an instance of the Multireddit object."""
+ author = six.text_type(author) if author \
+ else json_dict['path'].split('/')[-3]
+ if not name:
+ name = json_dict['path'].split('/')[-1]
+
+ info_url = reddit_session.config['multireddit_about'].format(
+ user=author, multi=name)
+ self.name = name
+ self._author = author
+ super(Multireddit, self).__init__(reddit_session, json_dict, fetch,
+ info_url, **kwargs)
+ self._url = reddit_session.config['multireddit'].format(
+ user=author, multi=name)
+
+ def __repr__(self):
+ """Return a code representation of the Multireddit."""
+ return 'Multireddit(author=\'{0}\', name=\'{1}\')'.format(
+ self._author, self.name)
+
+ def __unicode__(self):
+ """Return a string representation of the Multireddit."""
+ return self.name
+
+ def _post_populate(self, fetch):
+ if fetch:
+ # Subreddits are returned as dictionaries in the form
+ # {'name': 'subredditname'}. Convert them to Subreddit objects.
+ self.subreddits = [Subreddit(self.reddit_session, item['name'])
+ for item in self.subreddits]
+
+ # paths are of the form "/user/{USERNAME}/m/{MULTINAME}"
+ author = self.path.split('/')[2]
+ self.author = Redditor(self.reddit_session, author)
+
+ @restrict_access(scope='subscribe')
+ def add_subreddit(self, subreddit, _delete=False, *args, **kwargs):
+ """Add a subreddit to the multireddit.
+
+ :param subreddit: The subreddit name or Subreddit object to add
+
+ The additional parameters are passed directly into
+ :meth:`~praw.__init__.BaseReddit.request_json`.
+
+ """
+ subreddit = six.text_type(subreddit)
+ url = self.reddit_session.config['multireddit_add'].format(
+ user=self._author, multi=self.name, subreddit=subreddit)
+ method = 'DELETE' if _delete else 'PUT'
+ self.reddit_session.http.headers['x-modhash'] = \
+ self.reddit_session.modhash
+ data = {'model': dumps({'name': subreddit})}
+ try:
+ self.reddit_session.request(url, data=data, method=method,
+ *args, **kwargs)
+ finally:
+ del self.reddit_session.http.headers['x-modhash']
+
+ @restrict_access(scope='subscribe')
+ def copy(self, to_name):
+ """Copy this multireddit.
+
+ Convenience function that utilizes
+ :meth:`.MultiredditMixin.copy_multireddit` populating both
+ the `from_redditor` and `from_name` parameters.
+
+ """
+ return self.reddit_session.copy_multireddit(self._author, self.name,
+ to_name)
+
+ @restrict_access(scope='subscribe')
+ def delete(self):
+ """Delete this multireddit.
+
+ Convenience function that utilizes
+ :meth:`.MultiredditMixin.delete_multireddit` populating the `name`
+ parameter.
+
+ """
+ return self.reddit_session.delete_multireddit(self.name)
+
+ @restrict_access(scope='subscribe')
+ def edit(self, *args, **kwargs):
+ """Edit this multireddit.
+
+ Convenience function that utilizes
+ :meth:`.MultiredditMixin.edit_multireddit` populating the `name`
+ parameter.
+
+ """
+ return self.reddit_session.edit_multireddit(name=self.name, *args,
+ **kwargs)
+
+ @restrict_access(scope='subscribe')
+ def remove_subreddit(self, subreddit, *args, **kwargs):
+ """Remove a subreddit from the user's multireddit."""
+ return self.add_subreddit(subreddit, True, *args, **kwargs)
+
+ @restrict_access(scope='subscribe')
+ def rename(self, new_name, *args, **kwargs):
+ """Rename this multireddit.
+
+ This function is a handy shortcut to
+ :meth:`rename_multireddit` of the reddit_session.
+
+ """
+ new = self.reddit_session.rename_multireddit(self.name, new_name,
+ *args, **kwargs)
+ self.__dict__ = new.__dict__ # pylint: disable=W0201
+ return self
+
+
+class PRAWListing(RedditContentObject):
+ """An abstract class to coerce a listing into RedditContentObjects."""
+
+ CHILD_ATTRIBUTE = None
+
+ def __init__(self, reddit_session, json_dict=None, fetch=False):
+ """Construct an instance of the PRAWListing object."""
+ super(PRAWListing, self).__init__(reddit_session, json_dict, fetch)
+
+ if not self.CHILD_ATTRIBUTE:
+ raise NotImplementedError('PRAWListing must be extended.')
+
+ child_list = getattr(self, self.CHILD_ATTRIBUTE)
+ for i in range(len(child_list)):
+ child_list[i] = self._convert(reddit_session, child_list[i])
+
+ def __contains__(self, item):
+ """Test if item exists in the listing."""
+ return item in getattr(self, self.CHILD_ATTRIBUTE)
+
+ def __delitem__(self, index):
+ """Remove the item at position index from the listing."""
+ del getattr(self, self.CHILD_ATTRIBUTE)[index]
+
+ def __getitem__(self, index):
+ """Return the item at position index in the listing."""
+ return getattr(self, self.CHILD_ATTRIBUTE)[index]
+
+ def __iter__(self):
+ """Return an iterator to the listing."""
+ return getattr(self, self.CHILD_ATTRIBUTE).__iter__()
+
+ def __len__(self):
+ """Return the number of items in the listing."""
+ return len(getattr(self, self.CHILD_ATTRIBUTE))
+
+ def __setitem__(self, index, item):
+ """Set item at position `index` in the listing."""
+ getattr(self, self.CHILD_ATTRIBUTE)[index] = item
+
+ def __unicode__(self):
+ """Return a string representation of the listing."""
+ return six.text_type(getattr(self, self.CHILD_ATTRIBUTE))
+
+
+class UserList(PRAWListing):
+ """A list of Redditors. Works just like a regular list."""
+
+ CHILD_ATTRIBUTE = 'children'
+
+ @staticmethod
+ def _convert(reddit_session, data):
+ """Return a Redditor object from the data."""
+ retval = Redditor(reddit_session, data['name'], fetch=False)
+ retval.id = data['id'].split('_')[1] # pylint: disable=C0103,W0201
+ return retval
+
+
+class WikiPage(Refreshable):
+ """An individual WikiPage object."""
+
+ @classmethod
+ def from_api_response(cls, reddit_session, json_dict):
+ """Return an instance of the appropriate class from the json_dict."""
+ # The WikiPage response does not contain the necessary information
+ # in the JSON response to determine the name of the page nor the
+ # subreddit it belongs to. Thus we must extract this information
+ # from the request URL.
+ # pylint: disable=W0212
+ parts = reddit_session._request_url.split('/', 6)
+ # pylint: enable=W0212
+ subreddit = parts[4]
+ page = parts[6].split('.', 1)[0]
+ return cls(reddit_session, subreddit, page, json_dict=json_dict)
+
+ def __init__(self, reddit_session, subreddit=None, page=None,
+ json_dict=None, fetch=False, **kwargs):
+ """Construct an instance of the WikiPage object."""
+ if not subreddit and not page:
+ subreddit = json_dict['sr']
+ page = json_dict['page']
+ info_url = reddit_session.config['wiki_page'].format(
+ subreddit=six.text_type(subreddit), page=page)
+ super(WikiPage, self).__init__(reddit_session, json_dict, fetch,
+ info_url, **kwargs)
+ self.page = page
+ self.subreddit = subreddit
+
+ def __unicode__(self):
+ """Return a string representation of the page."""
+ return six.text_type('{0}:{1}').format(self.subreddit, self.page)
+
+ @restrict_access(scope='modwiki')
+ def add_editor(self, username, _delete=False, *args, **kwargs):
+ """Add an editor to this wiki page.
+
+ :param username: The name or Redditor object of the user to add.
+ :param _delete: If True, remove the user as an editor instead.
+ Please use :meth:`remove_editor` rather than setting it manually.
+
+ Additional parameters are passed into
+ :meth:`~praw.__init__.BaseReddit.request_json`.
+ """
+ url = self.reddit_session.config['wiki_page_editor']
+ url = url.format(subreddit=six.text_type(self.subreddit),
+ method='del' if _delete else 'add')
+
+ data = {'page': self.page,
+ 'username': six.text_type(username)}
+ return self.reddit_session.request_json(url, data=data, *args,
+ **kwargs)
+
+ @restrict_access(scope='modwiki')
+ def get_settings(self, *args, **kwargs):
+ """Return the settings for this wiki page.
+
+ Includes permission level, names of editors, and whether
+ the page is listed on /wiki/pages.
+
+ Additional parameters are passed into
+ :meth:`~praw.__init__.BaseReddit.request_json`
+ """
+ url = self.reddit_session.config['wiki_page_settings']
+ url = url.format(subreddit=six.text_type(self.subreddit),
+ page=self.page)
+ return self.reddit_session.request_json(url, *args, **kwargs)['data']
+
+ def edit(self, *args, **kwargs):
+ """Edit the wiki page.
+
+ Convenience function that utilizes
+ :meth:`.AuthenticatedReddit.edit_wiki_page` populating both the
+ ``subreddit`` and ``page`` parameters.
+ """
+ return self.subreddit.edit_wiki_page(self.page, *args, **kwargs)
+
+ @restrict_access(scope='modwiki')
+ def edit_settings(self, permlevel, listed, *args, **kwargs):
+ """Edit the settings for this individual wiki page.
+
+ :param permlevel: Who can edit this page?
+ (0) use subreddit wiki permissions, (1) only approved wiki
+ contributors for this page may edit (see
+ :meth:`~praw.objects.WikiPage.add_editor`), (2) only mods may edit
+ and view
+ :param listed: Show this page on the listing?
+ True - Appear in /wiki/pages
+ False - Do not appear in /wiki/pages
+ :returns: The updated settings data.
+
+ Additional parameters are passed into :meth:`request_json`.
+
+ """
+ url = self.reddit_session.config['wiki_page_settings']
+ url = url.format(subreddit=six.text_type(self.subreddit),
+ page=self.page)
+ data = {'permlevel': permlevel,
+ 'listed': 'on' if listed else 'off'}
+
+ return self.reddit_session.request_json(url, data=data, *args,
+ **kwargs)['data']
+
+ def remove_editor(self, username, *args, **kwargs):
+ """Remove an editor from this wiki page.
+
+ :param username: The name or Redditor object of the user to remove.
+
+ This method points to :meth:`add_editor` with _delete=True.
+
+ Additional parameters are are passed to :meth:`add_editor` and
+ subsequently into :meth:`~praw.__init__.BaseReddit.request_json`.
+ """
+ return self.add_editor(username=username, _delete=True, *args,
+ **kwargs)
+
+
+class WikiPageListing(PRAWListing):
+ """A list of WikiPages. Works just like a regular list."""
+
+ CHILD_ATTRIBUTE = '_tmp'
+
+ @staticmethod
+ def _convert(reddit_session, data):
+ """Return a WikiPage object from the data."""
+ # TODO: The _request_url hack shouldn't be necessary
+ # pylint: disable=W0212
+ subreddit = reddit_session._request_url.rsplit('/', 4)[1]
+ # pylint: enable=W0212
+ return WikiPage(reddit_session, subreddit, data, fetch=False)
+
+
+def _add_aliases():
+ def predicate(obj):
+ return inspect.isclass(obj) and hasattr(obj, '_methods')
+
+ import inspect
+ import sys
+
+ for _, cls in inspect.getmembers(sys.modules[__name__], predicate):
+ for name, mixin in cls._methods: # pylint: disable=W0212
+ setattr(cls, name, alias_function(getattr(mixin, name),
+ mixin.__name__))
+_add_aliases()
diff --git a/rtv/packages/praw/praw.ini b/rtv/packages/praw/praw.ini
new file mode 100644
index 0000000..5926c60
--- /dev/null
+++ b/rtv/packages/praw/praw.ini
@@ -0,0 +1,79 @@
+[DEFAULT]
+# The domain name PRAW will use to interact with the reddit site via its API.
+api_domain: api.reddit.com
+
+# Time, a float, in seconds, required between calls. See:
+# http://code.reddit.com/wiki/API
+api_request_delay: 2.0
+
+# A boolean to indicate whether or not to check for package updates.
+check_for_updates: True
+
+# Time, a float, in seconds, to save the results of a get/post request.
+cache_timeout: 30
+
+# Log the API calls
+# 0: no logging
+# 1: log only the request URIs
+# 2: log the request URIs as well as any POST data
+log_requests: 0
+
+# The domain name PRAW will use for oauth-related requests.
+oauth_domain: oauth.reddit.com
+
+# Whether or not to use HTTPS for oauth connections. This should only be
+# changed for development environments.
+oauth_https: True
+
+# OAuth grant type: either `authorization_code` or `password`
+oauth_grant_type: authorization_code
+
+# The maximum length of unicode representations of Comment, Message and
+# Submission objects. This is mainly used to fit them within a terminal window
+# line. A negative value means no limit.
+output_chars_limit: 80
+
+# The domain name PRAW will use when permalinks are requested.
+permalink_domain: www.reddit.com
+
+# The domain name to use for short urls.
+short_domain: redd.it
+
+# A boolean to indicate if json_dict, which contains the original API response,
+# should be stored on every object in the json_dict attribute. Default is
+# False as memory usage will double if enabled.
+store_json_result: False
+
+# Maximum time, a float, in seconds, before a single HTTP request times
+# out. urllib2.URLError is raised upon timeout.
+timeout: 45
+
+# A boolean to indicate if SSL certificats should be validated. The
+# default is True.
+validate_certs: True
+
+# Object to kind mappings
+comment_kind: t1
+message_kind: t4
+redditor_kind: t2
+submission_kind: t3
+subreddit_kind: t5
+
+
+[reddit]
+# Uses the default settings
+
+[reddit_oauth_test]
+oauth_client_id: stJlUSUbPQe5lQ
+oauth_client_secret: iU-LsOzyJH7BDVoq-qOWNEq2zuI
+oauth_redirect_uri: https://127.0.0.1:65010/authorize_callback
+
+[local_example]
+api_domain: reddit.local
+api_request_delay: 0
+log_requests: 0
+message_kind: t7
+permalink_domain: reddit.local
+short_domain:
+submission_kind: t6
+subreddit_kind: t5
diff --git a/rtv/packages/praw/settings.py b/rtv/packages/praw/settings.py
new file mode 100644
index 0000000..49821fb
--- /dev/null
+++ b/rtv/packages/praw/settings.py
@@ -0,0 +1,45 @@
+# This file is part of PRAW.
+#
+# PRAW is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# PRAW. If not, see .
+
+"""Provides the code to load PRAW's configuration file `praw.ini`."""
+
+from __future__ import print_function, unicode_literals
+
+import os
+import sys
+
+from six.moves import configparser
+
+
+def _load_configuration():
+ """Attempt to load settings from various praw.ini files."""
+ config = configparser.RawConfigParser()
+ module_dir = os.path.dirname(sys.modules[__name__].__file__)
+ if 'APPDATA' in os.environ: # Windows
+ os_config_path = os.environ['APPDATA']
+ elif 'XDG_CONFIG_HOME' in os.environ: # Modern Linux
+ os_config_path = os.environ['XDG_CONFIG_HOME']
+ elif 'HOME' in os.environ: # Legacy Linux
+ os_config_path = os.path.join(os.environ['HOME'], '.config')
+ else:
+ os_config_path = None
+ locations = [os.path.join(module_dir, 'praw.ini'), 'praw.ini']
+ if os_config_path is not None:
+ locations.insert(1, os.path.join(os_config_path, 'praw.ini'))
+ if not config.read(locations):
+ raise Exception('Could not find config file in any of: {0}'
+ .format(locations))
+ return config
+CONFIG = _load_configuration()
+del _load_configuration
diff --git a/scripts/update_packages.py b/scripts/update_packages.py
new file mode 100644
index 0000000..78c770d
--- /dev/null
+++ b/scripts/update_packages.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+Update the project's bundled dependencies by downloading the git repository and
+copying over the most recent commit.
+"""
+
+import os
+import shutil
+import subprocess
+import tempfile
+
+_filepath = os.path.dirname(os.path.relpath(__file__))
+ROOT = os.path.abspath(os.path.join(_filepath, '..'))
+
+PRAW_REPO = 'https://github.com/michael-lazar/praw3.git'
+
+
+def main():
+
+ tmpdir = tempfile.mkdtemp()
+ subprocess.check_call(['git', 'clone', PRAW_REPO, tmpdir])
+
+ # Update the commit hash reference
+ os.chdir(tmpdir)
+ p = subprocess.Popen(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE)
+ p.wait()
+ commit = p.stdout.read().strip()
+ print('Found commit %s' % commit)
+ regex = '"s/^__praw_hash__ =.*$/__praw_hash__ = \'%s\'/g"' % commit
+ packages_root = os.path.join(ROOT, 'rtv', 'packages', '__init__.py')
+ print('Updating commit hash in %s' % packages_root)
+ subprocess.check_call(['sed', '-i', '', regex, packages_root])
+
+ # Overwrite the project files
+ src = os.path.join(tmpdir, 'praw')
+ dest = os.path.join(ROOT, 'packages', 'praw')
+ print('Copying package files to %s' % dest)
+ shutil.rmtree(dest, ignore_errors=True)
+ shutil.copytree(src, dest)
+
+ # Cleanup
+ print('Removing directory %s' % tmpdir)
+ shutil.rmtree(tmpdir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/setup.cfg b/setup.cfg
index 50728d2..d43964e 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -2,10 +2,10 @@
universal = 1
[metadata]
-requires-dist =
- praw>=3.5,<4
- six
- requests>=2.4.0
- kitchen
+requires-dist =
beautifulsoup4
+ decorator
+ kitchen
mailcap-fix
+ requests>=2.4.0
+ six
diff --git a/setup.py b/setup.py
index b32de65..68f1599 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,3 @@
-import sys
import setuptools
from version import __version__ as version
@@ -18,12 +17,13 @@ setuptools.setup(
package_data={'rtv': ['templates/*']},
data_files=[("share/man/man1", ["rtv.1"])],
install_requires=[
- 'praw >=3.5, <4',
- 'six',
- 'requests >=2.4.0',
- 'kitchen',
'beautifulsoup4',
+ 'decorator',
+ 'kitchen',
'mailcap-fix',
+ # For info on why this is pinned, see https://github.com/michael-lazar/rtv/issues/325
+ 'requests >=2.4.0',
+ 'six',
],
entry_points={'console_scripts': ['rtv=rtv.__main__:main']},
classifiers=[
diff --git a/tests/conftest.py b/tests/conftest.py
index 96233a5..2eabd8b 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -7,7 +7,6 @@ import logging
import threading
from functools import partial
-import praw
import pytest
from vcr import VCR
from six.moves.urllib.parse import urlparse, parse_qs
@@ -15,6 +14,7 @@ from six.moves.BaseHTTPServer import HTTPServer
from rtv.oauth import OAuthHelper, OAuthHandler
from rtv.config import Config
+from rtv.packages import praw
from rtv.terminal import Terminal
from rtv.subreddit_page import SubredditPage
from rtv.submission_page import SubmissionPage
diff --git a/tests/test_content.py b/tests/test_content.py
index 095d8d6..e5f0f99 100644
--- a/tests/test_content.py
+++ b/tests/test_content.py
@@ -6,12 +6,12 @@ from itertools import islice
from collections import OrderedDict
import six
-import praw
import pytest
+from rtv import exceptions
+from rtv.packages import praw
from rtv.content import (
Content, SubmissionContent, SubredditContent, SubscriptionContent)
-from rtv import exceptions
try:
from unittest import mock
diff --git a/tests/test_oauth.py b/tests/test_oauth.py
index f6f4f7c..efc87f4 100644
--- a/tests/test_oauth.py
+++ b/tests/test_oauth.py
@@ -2,9 +2,9 @@
from __future__ import unicode_literals
import requests
-from praw.errors import OAuthException
from rtv.oauth import OAuthHelper, OAuthHandler
+from rtv.packages.praw.errors import OAuthException
try:
diff --git a/tests/test_subreddit.py b/tests/test_subreddit.py
index c7c87b5..88bc4f1 100644
--- a/tests/test_subreddit.py
+++ b/tests/test_subreddit.py
@@ -2,10 +2,10 @@
from __future__ import unicode_literals
import six
-from praw.errors import NotFound
-from rtv.subreddit_page import SubredditPage
from rtv import __version__
+from rtv.subreddit_page import SubredditPage
+from rtv.packages.praw.errors import NotFound
try:
from unittest import mock
diff --git a/tests/test_subscription.py b/tests/test_subscription.py
index 434e802..1cd635c 100644
--- a/tests/test_subscription.py
+++ b/tests/test_subscription.py
@@ -3,7 +3,6 @@ from __future__ import unicode_literals
import curses
-import praw
import pytest
from rtv.subscription_page import SubscriptionPage
diff --git a/tests/test_terminal.py b/tests/test_terminal.py
index e4b0249..32ca996 100644
--- a/tests/test_terminal.py
+++ b/tests/test_terminal.py
@@ -10,8 +10,7 @@ import six
import pytest
from rtv.docs import HELP, COMMENT_EDIT_FILE
-from rtv.objects import Color
-from rtv.exceptions import TemporaryFileError, MailcapEntryNotFound
+from rtv.exceptions import TemporaryFileError
try:
from unittest import mock