mirror of
https://github.com/gryf/ebook-converter.git
synced 2026-02-19 07:55:52 +01:00
Removed share_open function.
Also, some more cleanup style changes has been made.
This commit is contained in:
@@ -14,6 +14,7 @@ def as_bytes(x, encoding='utf-8'):
|
|||||||
|
|
||||||
|
|
||||||
def as_unicode(x, encoding='utf-8', errors='strict'):
|
def as_unicode(x, encoding='utf-8', errors='strict'):
|
||||||
|
return str(x)
|
||||||
if isinstance(x, bytes):
|
if isinstance(x, bytes):
|
||||||
return x.decode(encoding, errors)
|
return x.decode(encoding, errors)
|
||||||
return str(x)
|
return str(x)
|
||||||
|
|||||||
@@ -5,9 +5,7 @@ import optparse
|
|||||||
import os
|
import os
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from ebook_converter.constants_old import (
|
from ebook_converter import constants_old
|
||||||
CONFIG_DIR_MODE, __appname__, __author__, config_dir, get_version, iswindows
|
|
||||||
)
|
|
||||||
from ebook_converter.utils.config_base import (
|
from ebook_converter.utils.config_base import (
|
||||||
Config, ConfigInterface, ConfigProxy, Option, OptionSet, OptionValues,
|
Config, ConfigInterface, ConfigProxy, Option, OptionSet, OptionValues,
|
||||||
StringConfig, json_dumps, json_loads, make_config_dir, plugin_dir, prefs,
|
StringConfig, json_dumps, json_loads, make_config_dir, plugin_dir, prefs,
|
||||||
@@ -23,7 +21,8 @@ if False:
|
|||||||
|
|
||||||
|
|
||||||
def check_config_write_access():
|
def check_config_write_access():
|
||||||
return os.access(config_dir, os.W_OK) and os.access(config_dir, os.X_OK)
|
return (os.access(constants_old.config_dir, os.W_OK) and
|
||||||
|
os.access(constants_old.config_dir, os.X_OK))
|
||||||
|
|
||||||
|
|
||||||
class CustomHelpFormatter(optparse.IndentedHelpFormatter):
|
class CustomHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
@@ -85,14 +84,16 @@ class OptionParser(optparse.OptionParser):
|
|||||||
|
|
||||||
usage = textwrap.dedent(usage)
|
usage = textwrap.dedent(usage)
|
||||||
if epilog is None:
|
if epilog is None:
|
||||||
epilog = 'Created by ' + colored(__author__, fg='cyan')
|
epilog = 'Created by ' + colored(constants_old.__author__,
|
||||||
|
fg='cyan')
|
||||||
usage += ('\n\nWhenever you pass arguments to %prog that have spaces '
|
usage += ('\n\nWhenever you pass arguments to %prog that have spaces '
|
||||||
'in them, enclose the arguments in quotation marks. For '
|
'in them, enclose the arguments in quotation marks. For '
|
||||||
'example: "{}"\n\n').format("C:\\some path with spaces"
|
'example: "{}"\n\n').format("C:\\some path with spaces"
|
||||||
if iswindows
|
if constants_old.iswindows
|
||||||
else '/some path/with spaces')
|
else '/some path/with spaces')
|
||||||
if version is None:
|
if version is None:
|
||||||
version = '%%prog (%s %s)'%(__appname__, get_version())
|
version = '%%prog (%s %s)' % (constants_old.__appname__,
|
||||||
|
constants_old.get_version())
|
||||||
optparse.OptionParser.__init__(self, usage=usage, version=version, epilog=epilog,
|
optparse.OptionParser.__init__(self, usage=usage, version=version, epilog=epilog,
|
||||||
formatter=CustomHelpFormatter(),
|
formatter=CustomHelpFormatter(),
|
||||||
conflict_handler=conflict_handler, **kwds)
|
conflict_handler=conflict_handler, **kwds)
|
||||||
@@ -198,18 +199,17 @@ class DynamicConfig(dict):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def file_path(self):
|
def file_path(self):
|
||||||
return os.path.join(config_dir, self.name+'.pickle.json')
|
return os.path.join(constants_old.config_dir, self.name+'.pickle.json')
|
||||||
|
|
||||||
def decouple(self, prefix):
|
def decouple(self, prefix):
|
||||||
self.name = prefix + self.name
|
self.name = prefix + self.name
|
||||||
self.refresh()
|
self.refresh()
|
||||||
|
|
||||||
def read_old_serialized_representation(self):
|
def read_old_serialized_representation(self):
|
||||||
from ebook_converter.utils.shared_file import share_open
|
|
||||||
from ebook_converter.utils.serialize import pickle_loads
|
from ebook_converter.utils.serialize import pickle_loads
|
||||||
path = self.file_path.rpartition('.')[0]
|
path = self.file_path.rpartition('.')[0]
|
||||||
try:
|
try:
|
||||||
with share_open(path, 'rb') as f:
|
with open(path, 'rb') as f:
|
||||||
raw = f.read()
|
raw = f.read()
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
raw = b''
|
raw = b''
|
||||||
@@ -293,7 +293,8 @@ class XMLConfig(dict):
|
|||||||
|
|
||||||
EXTENSION = '.plist'
|
EXTENSION = '.plist'
|
||||||
|
|
||||||
def __init__(self, rel_path_to_cf_file, base_path=config_dir):
|
def __init__(self, rel_path_to_cf_file,
|
||||||
|
base_path=constants_old.config_dir):
|
||||||
dict.__init__(self)
|
dict.__init__(self)
|
||||||
self.no_commit = False
|
self.no_commit = False
|
||||||
self.defaults = {}
|
self.defaults = {}
|
||||||
@@ -390,7 +391,7 @@ class XMLConfig(dict):
|
|||||||
if hasattr(self, 'file_path') and self.file_path:
|
if hasattr(self, 'file_path') and self.file_path:
|
||||||
dpath = os.path.dirname(self.file_path)
|
dpath = os.path.dirname(self.file_path)
|
||||||
if not os.path.exists(dpath):
|
if not os.path.exists(dpath):
|
||||||
os.makedirs(dpath, mode=CONFIG_DIR_MODE)
|
os.makedirs(dpath, mode=constants_old.CONFIG_DIR_MODE)
|
||||||
with ExclusiveFile(self.file_path) as f:
|
with ExclusiveFile(self.file_path) as f:
|
||||||
raw = self.to_raw()
|
raw = self.to_raw()
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
|
|||||||
@@ -1,22 +1,30 @@
|
|||||||
__license__ = 'GPL v3'
|
import json
|
||||||
__copyright__ = '2011, Kovid Goyal <kovid@kovidgoyal.net>'
|
import collections
|
||||||
__docformat__ = 'restructuredtext en'
|
import copy
|
||||||
|
import functools
|
||||||
import os, re, traceback, numbers
|
import numbers
|
||||||
from functools import partial
|
import os
|
||||||
from collections import defaultdict
|
|
||||||
from copy import deepcopy
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
import pickle
|
||||||
|
import datetime
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from ebook_converter.constants_old import CONFIG_DIR_MODE
|
||||||
|
from ebook_converter.constants_old import config_dir
|
||||||
|
from ebook_converter.constants_old import filesystem_encoding
|
||||||
|
from ebook_converter.constants_old import iswindows
|
||||||
|
from ebook_converter.constants_old import preferred_encoding
|
||||||
from ebook_converter.utils.lock import ExclusiveFile
|
from ebook_converter.utils.lock import ExclusiveFile
|
||||||
from ebook_converter.constants_old import config_dir, CONFIG_DIR_MODE, preferred_encoding, filesystem_encoding, iswindows
|
from ebook_converter.utils.date import isoformat
|
||||||
|
from ebook_converter.utils import iso8601
|
||||||
|
|
||||||
plugin_dir = os.path.join(config_dir, 'plugins')
|
plugin_dir = os.path.join(config_dir, 'plugins')
|
||||||
|
|
||||||
|
|
||||||
def parse_old_style(src):
|
def parse_old_style(src):
|
||||||
import pickle as cPickle
|
options = {'cPickle': pickle}
|
||||||
options = {'cPickle':cPickle}
|
|
||||||
try:
|
try:
|
||||||
if not isinstance(src, str):
|
if not isinstance(src, str):
|
||||||
src = src.decode('utf-8')
|
src = src.decode('utf-8')
|
||||||
@@ -25,20 +33,19 @@ def parse_old_style(src):
|
|||||||
exec(src, options)
|
exec(src, options)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
try:
|
try:
|
||||||
print('Failed to parse old style options string with error: {}'.format(err))
|
print('Failed to parse old style options string with error: '
|
||||||
|
'{}'.format(err))
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
def to_json(obj):
|
def to_json(obj):
|
||||||
import datetime
|
|
||||||
if isinstance(obj, bytearray):
|
if isinstance(obj, bytearray):
|
||||||
from base64 import standard_b64encode
|
|
||||||
return {'__class__': 'bytearray',
|
return {'__class__': 'bytearray',
|
||||||
'__value__': standard_b64encode(bytes(obj)).decode('ascii')}
|
'__value__': base64.standard_b64encode(bytes(obj))
|
||||||
|
.decode('ascii')}
|
||||||
if isinstance(obj, datetime.datetime):
|
if isinstance(obj, datetime.datetime):
|
||||||
from ebook_converter.utils.date import isoformat
|
|
||||||
return {'__class__': 'datetime.datetime',
|
return {'__class__': 'datetime.datetime',
|
||||||
'__value__': isoformat(obj, as_utc=True)}
|
'__value__': isoformat(obj, as_utc=True)}
|
||||||
if isinstance(obj, (set, frozenset)):
|
if isinstance(obj, (set, frozenset)):
|
||||||
@@ -62,11 +69,10 @@ def from_json(obj):
|
|||||||
custom = obj.get('__class__')
|
custom = obj.get('__class__')
|
||||||
if custom is not None:
|
if custom is not None:
|
||||||
if custom == 'bytearray':
|
if custom == 'bytearray':
|
||||||
from base64 import standard_b64decode
|
return bytearray(base64.standard_b64decode(obj['__value__']
|
||||||
return bytearray(standard_b64decode(obj['__value__'].encode('ascii')))
|
.encode('ascii')))
|
||||||
if custom == 'datetime.datetime':
|
if custom == 'datetime.datetime':
|
||||||
from ebook_converter.utils.iso8601 import parse_iso8601
|
return iso8601.parse_iso8601(obj['__value__'], assume_utc=True)
|
||||||
return parse_iso8601(obj['__value__'], assume_utc=True)
|
|
||||||
if custom == 'set':
|
if custom == 'set':
|
||||||
return set(obj['__value__'])
|
return set(obj['__value__'])
|
||||||
return obj
|
return obj
|
||||||
@@ -88,24 +94,27 @@ def force_unicode_recursive(obj):
|
|||||||
if isinstance(obj, (list, tuple)):
|
if isinstance(obj, (list, tuple)):
|
||||||
return type(obj)(map(force_unicode_recursive, obj))
|
return type(obj)(map(force_unicode_recursive, obj))
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
return {force_unicode_recursive(k): force_unicode_recursive(v) for k, v in obj.items()}
|
return {force_unicode_recursive(k): force_unicode_recursive(v)
|
||||||
|
for k, v in obj.items()}
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def json_dumps(obj, ignore_unserializable=False):
|
def json_dumps(obj, ignore_unserializable=False):
|
||||||
import json
|
|
||||||
try:
|
try:
|
||||||
ans = json.dumps(obj, indent=2, default=safe_to_json if ignore_unserializable else to_json, sort_keys=True, ensure_ascii=False)
|
ans = json.dumps(obj, indent=2, default=safe_to_json
|
||||||
|
if ignore_unserializable
|
||||||
|
else to_json, sort_keys=True, ensure_ascii=False)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
obj = force_unicode_recursive(obj)
|
obj = force_unicode_recursive(obj)
|
||||||
ans = json.dumps(obj, indent=2, default=safe_to_json if ignore_unserializable else to_json, sort_keys=True, ensure_ascii=False)
|
ans = json.dumps(obj, indent=2, default=safe_to_json
|
||||||
|
if ignore_unserializable
|
||||||
|
else to_json, sort_keys=True, ensure_ascii=False)
|
||||||
if not isinstance(ans, bytes):
|
if not isinstance(ans, bytes):
|
||||||
ans = ans.encode('utf-8')
|
ans = ans.encode('utf-8')
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
def json_loads(raw):
|
def json_loads(raw):
|
||||||
import json
|
|
||||||
if isinstance(raw, bytes):
|
if isinstance(raw, bytes):
|
||||||
raw = raw.decode('utf-8')
|
raw = raw.decode('utf-8')
|
||||||
return json.loads(raw, object_hook=from_json)
|
return json.loads(raw, object_hook=from_json)
|
||||||
@@ -119,26 +128,28 @@ def make_config_dir():
|
|||||||
class Option(object):
|
class Option(object):
|
||||||
|
|
||||||
def __init__(self, name, switches=[], help='', type=None, choices=None,
|
def __init__(self, name, switches=[], help='', type=None, choices=None,
|
||||||
check=None, group=None, default=None, action=None, metavar=None):
|
check=None, group=None, default=None, action=None,
|
||||||
|
metavar=None):
|
||||||
if choices:
|
if choices:
|
||||||
type = 'choice'
|
type = 'choice'
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.switches = switches
|
self.switches = switches
|
||||||
self.help = help.replace('%default', repr(default)) if help else None
|
self.help = help.replace('%default', repr(default)) if help else None
|
||||||
self.type = type
|
self.type = type
|
||||||
if self.type is None and action is None and choices is None:
|
if self.type is None and action is None and choices is None:
|
||||||
if isinstance(default, float):
|
if isinstance(default, float):
|
||||||
self.type = 'float'
|
self.type = 'float'
|
||||||
elif isinstance(default, numbers.Integral) and not isinstance(default, bool):
|
elif (isinstance(default, numbers.Integral) and
|
||||||
|
not isinstance(default, bool)):
|
||||||
self.type = 'int'
|
self.type = 'int'
|
||||||
|
|
||||||
self.choices = choices
|
self.choices = choices
|
||||||
self.check = check
|
self.check = check
|
||||||
self.group = group
|
self.group = group
|
||||||
self.default = default
|
self.default = default
|
||||||
self.action = action
|
self.action = action
|
||||||
self.metavar = metavar
|
self.metavar = metavar
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.name == getattr(other, 'name', other)
|
return self.name == getattr(other, 'name', other)
|
||||||
@@ -153,21 +164,22 @@ class Option(object):
|
|||||||
class OptionValues(object):
|
class OptionValues(object):
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
return deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
|
||||||
class OptionSet(object):
|
class OptionSet(object):
|
||||||
|
|
||||||
OVERRIDE_PAT = re.compile(r'#{3,100} Override Options #{15}(.*?)#{3,100} End Override #{3,100}',
|
OVERRIDE_PAT = re.compile(r'#{3,100} Override Options #{15}(.*?)#{3,100} '
|
||||||
re.DOTALL|re.IGNORECASE)
|
'End Override #{3,100}',
|
||||||
|
re.DOTALL | re.IGNORECASE)
|
||||||
|
|
||||||
def __init__(self, description=''):
|
def __init__(self, description=''):
|
||||||
self.description = description
|
self.description = description
|
||||||
self.defaults = {}
|
self.defaults = {}
|
||||||
self.preferences = []
|
self.preferences = []
|
||||||
self.group_list = []
|
self.group_list = []
|
||||||
self.groups = {}
|
self.groups = {}
|
||||||
self.set_buffer = {}
|
self.set_buffer = {}
|
||||||
self.loads_pat = None
|
self.loads_pat = None
|
||||||
|
|
||||||
def has_option(self, name_or_option_object):
|
def has_option(self, name_or_option_object):
|
||||||
@@ -188,10 +200,11 @@ class OptionSet(object):
|
|||||||
|
|
||||||
def add_group(self, name, description=''):
|
def add_group(self, name, description=''):
|
||||||
if name in self.group_list:
|
if name in self.group_list:
|
||||||
raise ValueError('A group by the name %s already exists in this set'%name)
|
raise ValueError('A group by the name %s already exists in this '
|
||||||
|
'set' % name)
|
||||||
self.groups[name] = description
|
self.groups[name] = description
|
||||||
self.group_list.append(name)
|
self.group_list.append(name)
|
||||||
return partial(self.add_opt, group=name)
|
return functools.partial(self.add_opt, group=name)
|
||||||
|
|
||||||
def update(self, other):
|
def update(self, other):
|
||||||
for name in other.groups.keys():
|
for name in other.groups.keys():
|
||||||
@@ -204,9 +217,10 @@ class OptionSet(object):
|
|||||||
self.preferences.append(pref)
|
self.preferences.append(pref)
|
||||||
|
|
||||||
def smart_update(self, opts1, opts2):
|
def smart_update(self, opts1, opts2):
|
||||||
'''
|
"""
|
||||||
Updates the preference values in opts1 using only the non-default preference values in opts2.
|
Updates the preference values in opts1 using only the non-default
|
||||||
'''
|
preference values in opts2.
|
||||||
|
"""
|
||||||
for pref in self.preferences:
|
for pref in self.preferences:
|
||||||
new = getattr(opts2, pref.name, pref.default)
|
new = getattr(opts2, pref.name, pref.default)
|
||||||
if new != pref.default:
|
if new != pref.default:
|
||||||
@@ -217,47 +231,45 @@ class OptionSet(object):
|
|||||||
self.preferences.remove(name)
|
self.preferences.remove(name)
|
||||||
|
|
||||||
def add_opt(self, name, switches=[], help=None, type=None, choices=None,
|
def add_opt(self, name, switches=[], help=None, type=None, choices=None,
|
||||||
group=None, default=None, action=None, metavar=None):
|
group=None, default=None, action=None, metavar=None):
|
||||||
'''
|
"""
|
||||||
Add an option to this section.
|
Add an option to this section.
|
||||||
|
|
||||||
:param name: The name of this option. Must be a valid Python identifier.
|
:param name: The name of this option. Must be a valid Python
|
||||||
Must also be unique in this OptionSet and all its subsets.
|
identifier. Must also be unique in this OptionSet and all
|
||||||
:param switches: List of command line switches for this option
|
its subsets.
|
||||||
(as supplied to :module:`optparse`). If empty, this
|
:param switches: List of command line switches for this option (as
|
||||||
option will not be added to the command line parser.
|
supplied to :module:`optparse`). If empty, this option
|
||||||
:param help: Help text.
|
will not be added to the command line parser.
|
||||||
:param type: Type checking of option values. Supported types are:
|
:param help: Help text.
|
||||||
`None, 'choice', 'complex', 'float', 'int', 'string'`.
|
:param type: Type checking of option values. Supported types are:
|
||||||
:param choices: List of strings or `None`.
|
`None, 'choice', 'complex', 'float', 'int', 'string'`.
|
||||||
:param group: Group this option belongs to. You must previously
|
:param choices: List of strings or `None`.
|
||||||
have created this group with a call to :method:`add_group`.
|
:param group: Group this option belongs to. You must previously
|
||||||
:param default: The default value for this option.
|
have created this group with a call to
|
||||||
:param action: The action to pass to optparse. Supported values are:
|
:method:`add_group`.
|
||||||
`None, 'count'`. For choices and boolean options,
|
:param default: The default value for this option.
|
||||||
action is automatically set correctly.
|
:param action: The action to pass to optparse. Supported values are:
|
||||||
'''
|
`None, 'count'`. For choices and boolean options,
|
||||||
pref = Option(name, switches=switches, help=help, type=type, choices=choices,
|
action is automatically set correctly.
|
||||||
group=group, default=default, action=action, metavar=None)
|
"""
|
||||||
|
pref = Option(name, switches=switches, help=help, type=type,
|
||||||
|
choices=choices, group=group, default=default,
|
||||||
|
action=action, metavar=None)
|
||||||
if group is not None and group not in self.groups.keys():
|
if group is not None and group not in self.groups.keys():
|
||||||
raise ValueError('Group %s has not been added to this section'%group)
|
raise ValueError('Group %s has not been added to this section' %
|
||||||
|
group)
|
||||||
|
|
||||||
if pref in self.preferences:
|
if pref in self.preferences:
|
||||||
raise ValueError('An option with the name %s already exists in this set.'%name)
|
raise ValueError('An option with the name %s already exists in '
|
||||||
|
'this set.' % name)
|
||||||
self.preferences.append(pref)
|
self.preferences.append(pref)
|
||||||
self.defaults[name] = default
|
self.defaults[name] = default
|
||||||
|
|
||||||
def retranslate_help(self):
|
|
||||||
t = _
|
|
||||||
for opt in self.preferences:
|
|
||||||
if opt.help:
|
|
||||||
opt.help = t(opt.help)
|
|
||||||
if opt.name == 'use_primary_find_in_search':
|
|
||||||
opt.help = opt.help.format(u'ñ')
|
|
||||||
|
|
||||||
def option_parser(self, user_defaults=None, usage='', gui_mode=False):
|
def option_parser(self, user_defaults=None, usage='', gui_mode=False):
|
||||||
from ebook_converter.utils.config import OptionParser
|
from ebook_converter.utils.config import OptionParser
|
||||||
parser = OptionParser(usage, gui_mode=gui_mode)
|
parser = OptionParser(usage, gui_mode=gui_mode)
|
||||||
groups = defaultdict(lambda : parser)
|
groups = collections.defaultdict(lambda: parser)
|
||||||
for group, desc in self.groups.items():
|
for group, desc in self.groups.items():
|
||||||
groups[group] = parser.add_option_group(group.upper(), desc)
|
groups[group] = parser.add_option_group(group.upper(), desc)
|
||||||
|
|
||||||
@@ -270,15 +282,13 @@ class OptionSet(object):
|
|||||||
action = 'store'
|
action = 'store'
|
||||||
if pref.default is True or pref.default is False:
|
if pref.default is True or pref.default is False:
|
||||||
action = 'store_' + ('false' if pref.default else 'true')
|
action = 'store_' + ('false' if pref.default else 'true')
|
||||||
args = dict(
|
args = {'dest': pref.name,
|
||||||
dest=pref.name,
|
'help': pref.help,
|
||||||
help=pref.help,
|
'metavar': pref.metavar,
|
||||||
metavar=pref.metavar,
|
'type': pref.type,
|
||||||
type=pref.type,
|
'choices': pref.choices,
|
||||||
choices=pref.choices,
|
'default': getattr(user_defaults, pref.name, pref.default),
|
||||||
default=getattr(user_defaults, pref.name, pref.default),
|
'action': action}
|
||||||
action=action,
|
|
||||||
)
|
|
||||||
g.add_option(*pref.switches, **args)
|
g.add_option(*pref.switches, **args)
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
@@ -292,7 +302,9 @@ class OptionSet(object):
|
|||||||
def parse_string(self, src):
|
def parse_string(self, src):
|
||||||
options = {}
|
options = {}
|
||||||
if src:
|
if src:
|
||||||
is_old_style = (isinstance(src, bytes) and src.startswith(b'#')) or (isinstance(src, str) and src.startswith(u'#'))
|
is_old_style = (isinstance(src, bytes) and
|
||||||
|
src.startswith(b'#')) or (isinstance(src, str) and
|
||||||
|
src.startswith(u'#'))
|
||||||
if is_old_style:
|
if is_old_style:
|
||||||
options = parse_old_style(src)
|
options = parse_old_style(src)
|
||||||
else:
|
else:
|
||||||
@@ -302,7 +314,8 @@ class OptionSet(object):
|
|||||||
raise Exception('options is not a dictionary')
|
raise Exception('options is not a dictionary')
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
try:
|
try:
|
||||||
print('Failed to parse options string with error: {}'.format(err))
|
print('Failed to parse options string with error: {}'
|
||||||
|
.format(err))
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
opts = OptionValues()
|
opts = OptionValues()
|
||||||
@@ -316,20 +329,21 @@ class OptionSet(object):
|
|||||||
return opts
|
return opts
|
||||||
|
|
||||||
def serialize(self, opts, ignore_unserializable=False):
|
def serialize(self, opts, ignore_unserializable=False):
|
||||||
data = {pref.name: getattr(opts, pref.name, pref.default) for pref in self.preferences}
|
data = {pref.name: getattr(opts, pref.name, pref.default)
|
||||||
|
for pref in self.preferences}
|
||||||
return json_dumps(data, ignore_unserializable=ignore_unserializable)
|
return json_dumps(data, ignore_unserializable=ignore_unserializable)
|
||||||
|
|
||||||
|
|
||||||
class ConfigInterface(object):
|
class ConfigInterface(object):
|
||||||
|
|
||||||
def __init__(self, description):
|
def __init__(self, description):
|
||||||
self.option_set = OptionSet(description=description)
|
self.option_set = OptionSet(description=description)
|
||||||
self.add_opt = self.option_set.add_opt
|
self.add_opt = self.option_set.add_opt
|
||||||
self.add_group = self.option_set.add_group
|
self.add_group = self.option_set.add_group
|
||||||
self.remove_opt = self.remove = self.option_set.remove_opt
|
self.remove_opt = self.remove = self.option_set.remove_opt
|
||||||
self.parse_string = self.option_set.parse_string
|
self.parse_string = self.option_set.parse_string
|
||||||
self.get_option = self.option_set.get_option
|
self.get_option = self.option_set.get_option
|
||||||
self.preferences = self.option_set.preferences
|
self.preferences = self.option_set.preferences
|
||||||
|
|
||||||
def update(self, other):
|
def update(self, other):
|
||||||
self.option_set.update(other.option_set)
|
self.option_set.update(other.option_set)
|
||||||
@@ -343,9 +357,9 @@ class ConfigInterface(object):
|
|||||||
|
|
||||||
|
|
||||||
class Config(ConfigInterface):
|
class Config(ConfigInterface):
|
||||||
'''
|
"""
|
||||||
A file based configuration.
|
A file based configuration.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def __init__(self, basename, description=''):
|
def __init__(self, basename, description=''):
|
||||||
ConfigInterface.__init__(self, description)
|
ConfigInterface.__init__(self, description)
|
||||||
@@ -368,9 +382,8 @@ class Config(ConfigInterface):
|
|||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
if not src:
|
if not src:
|
||||||
path = path.rpartition('.')[0]
|
path = path.rpartition('.')[0]
|
||||||
from ebook_converter.utils.shared_file import share_open
|
|
||||||
try:
|
try:
|
||||||
with share_open(path, 'rb') as f:
|
with open(path, 'rb') as f:
|
||||||
src = f.read().decode('utf-8')
|
src = f.read().decode('utf-8')
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@@ -378,7 +391,8 @@ class Config(ConfigInterface):
|
|||||||
migrate = bool(src)
|
migrate = bool(src)
|
||||||
ans = self.option_set.parse_string(src)
|
ans = self.option_set.parse_string(src)
|
||||||
if migrate:
|
if migrate:
|
||||||
new_src = self.option_set.serialize(ans, ignore_unserializable=True)
|
new_src = self.option_set.serialize(ans,
|
||||||
|
ignore_unserializable=True)
|
||||||
with ExclusiveFile(self.config_file_path) as f:
|
with ExclusiveFile(self.config_file_path) as f:
|
||||||
f.seek(0), f.truncate()
|
f.seek(0), f.truncate()
|
||||||
f.write(new_src)
|
f.write(new_src)
|
||||||
@@ -386,9 +400,11 @@ class Config(ConfigInterface):
|
|||||||
|
|
||||||
def set(self, name, val):
|
def set(self, name, val):
|
||||||
if not self.option_set.has_option(name):
|
if not self.option_set.has_option(name):
|
||||||
raise ValueError('The option %s is not defined.'%name)
|
raise ValueError('The option %s is not defined.' % name)
|
||||||
|
|
||||||
if not os.path.exists(config_dir):
|
if not os.path.exists(config_dir):
|
||||||
make_config_dir()
|
make_config_dir()
|
||||||
|
|
||||||
with ExclusiveFile(self.config_file_path) as f:
|
with ExclusiveFile(self.config_file_path) as f:
|
||||||
src = f.read()
|
src = f.read()
|
||||||
opts = self.option_set.parse_string(src)
|
opts = self.option_set.parse_string(src)
|
||||||
@@ -402,9 +418,9 @@ class Config(ConfigInterface):
|
|||||||
|
|
||||||
|
|
||||||
class StringConfig(ConfigInterface):
|
class StringConfig(ConfigInterface):
|
||||||
'''
|
"""
|
||||||
A string based configuration
|
A string based configuration
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def __init__(self, src, description=''):
|
def __init__(self, src, description=''):
|
||||||
ConfigInterface.__init__(self, description)
|
ConfigInterface.__init__(self, description)
|
||||||
@@ -420,20 +436,21 @@ class StringConfig(ConfigInterface):
|
|||||||
|
|
||||||
def set(self, name, val):
|
def set(self, name, val):
|
||||||
if not self.option_set.has_option(name):
|
if not self.option_set.has_option(name):
|
||||||
raise ValueError('The option %s is not defined.'%name)
|
raise ValueError('The option %s is not defined.' % name)
|
||||||
|
|
||||||
opts = self.option_set.parse_string(self.src)
|
opts = self.option_set.parse_string(self.src)
|
||||||
setattr(opts, name, val)
|
setattr(opts, name, val)
|
||||||
self.set_src(self.option_set.serialize(opts))
|
self.set_src(self.option_set.serialize(opts))
|
||||||
|
|
||||||
|
|
||||||
class ConfigProxy(object):
|
class ConfigProxy(object):
|
||||||
'''
|
"""
|
||||||
A Proxy to minimize file reads for widely used config settings
|
A Proxy to minimize file reads for widely used config settings
|
||||||
'''
|
"""
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.__config = config
|
self.__config = config
|
||||||
self.__opts = None
|
self.__opts = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def defaults(self):
|
def defaults(self):
|
||||||
@@ -442,9 +459,6 @@ class ConfigProxy(object):
|
|||||||
def refresh(self):
|
def refresh(self):
|
||||||
self.__opts = self.__config.parse()
|
self.__opts = self.__config.parse()
|
||||||
|
|
||||||
def retranslate_help(self):
|
|
||||||
self.__config.option_set.retranslate_help()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self.get(key)
|
return self.get(key)
|
||||||
|
|
||||||
@@ -470,7 +484,8 @@ class ConfigProxy(object):
|
|||||||
|
|
||||||
|
|
||||||
def create_global_prefs(conf_obj=None):
|
def create_global_prefs(conf_obj=None):
|
||||||
c = Config('global', 'calibre wide preferences') if conf_obj is None else conf_obj
|
c = Config('global',
|
||||||
|
'calibre wide preferences') if conf_obj is None else conf_obj
|
||||||
c.add_opt('database_path',
|
c.add_opt('database_path',
|
||||||
default=os.path.expanduser('~/library1.db'),
|
default=os.path.expanduser('~/library1.db'),
|
||||||
help='Path to the database in which books are stored')
|
help='Path to the database in which books are stored')
|
||||||
@@ -481,17 +496,19 @@ def create_global_prefs(conf_obj=None):
|
|||||||
c.add_opt('network_timeout', default=5,
|
c.add_opt('network_timeout', default=5,
|
||||||
help='Default timeout for network operations (seconds)')
|
help='Default timeout for network operations (seconds)')
|
||||||
c.add_opt('library_path', default=None,
|
c.add_opt('library_path', default=None,
|
||||||
help='Path to directory in which your library of books is stored')
|
help='Path to directory in which your library of books is '
|
||||||
|
'stored')
|
||||||
c.add_opt('language', default=None,
|
c.add_opt('language', default=None,
|
||||||
help='The language in which to display the user interface')
|
help='The language in which to display the user interface')
|
||||||
c.add_opt('output_format', default='EPUB',
|
c.add_opt('output_format', default='EPUB', help='The default output '
|
||||||
help='The default output format for e-book conversions. When auto-converting'
|
'format for e-book conversions. When auto-converting to send to '
|
||||||
' to send to a device this can be overridden by individual device preferences.'
|
'a device this can be overridden by individual device '
|
||||||
' These can be changed by right clicking the device icon in calibre and'
|
'preferences. These can be changed by right clicking the device '
|
||||||
' choosing "Configure".')
|
'icon in calibre and choosing "Configure".')
|
||||||
c.add_opt('input_format_order', default=['EPUB', 'AZW3', 'MOBI', 'LIT', 'PRC',
|
c.add_opt('input_format_order',
|
||||||
'FB2', 'HTML', 'HTM', 'XHTM', 'SHTML', 'XHTML', 'ZIP', 'DOCX', 'ODT', 'RTF', 'PDF',
|
default=['EPUB', 'AZW3', 'MOBI', 'LIT', 'PRC', 'FB2', 'HTML',
|
||||||
'TXT'],
|
'HTM', 'XHTM', 'SHTML', 'XHTML', 'ZIP', 'DOCX', 'ODT',
|
||||||
|
'RTF', 'PDF', 'TXT'],
|
||||||
help='Ordered list of formats to prefer for input.')
|
help='Ordered list of formats to prefer for input.')
|
||||||
c.add_opt('read_file_metadata', default=True,
|
c.add_opt('read_file_metadata', default=True,
|
||||||
help='Read metadata from files')
|
help='Read metadata from files')
|
||||||
@@ -501,27 +518,32 @@ def create_global_prefs(conf_obj=None):
|
|||||||
'Most tasks like conversion/news download/adding books/etc. '
|
'Most tasks like conversion/news download/adding books/etc. '
|
||||||
'are affected by this setting.')
|
'are affected by this setting.')
|
||||||
c.add_opt('swap_author_names', default=False,
|
c.add_opt('swap_author_names', default=False,
|
||||||
help='Swap author first and last names when reading metadata')
|
help='Swap author first and last names when reading metadata')
|
||||||
c.add_opt('add_formats_to_existing', default=False,
|
c.add_opt('add_formats_to_existing', default=False,
|
||||||
help='Add new formats to existing book records')
|
help='Add new formats to existing book records')
|
||||||
c.add_opt('check_for_dupes_on_ctl', default=False,
|
c.add_opt('check_for_dupes_on_ctl', default=False,
|
||||||
help='Check for duplicates when copying to another library')
|
help='Check for duplicates when copying to another library')
|
||||||
c.add_opt('installation_uuid', default=None, help='Installation UUID')
|
c.add_opt('installation_uuid', default=None, help='Installation UUID')
|
||||||
c.add_opt('new_book_tags', default=[], help='Tags to apply to books added to the library')
|
c.add_opt('new_book_tags', default=[],
|
||||||
c.add_opt('mark_new_books', default=False, help='Mark newly added books. The mark is a temporary mark that is automatically removed when calibre is restarted.')
|
help='Tags to apply to books added to the library')
|
||||||
|
c.add_opt('mark_new_books', default=False, help='Mark newly added books. '
|
||||||
|
'The mark is a temporary mark that is automatically removed '
|
||||||
|
'when calibre is restarted.')
|
||||||
|
|
||||||
# these are here instead of the gui preferences because calibredb and
|
# these are here instead of the gui preferences because calibredb and
|
||||||
# calibre server can execute searches
|
# calibre server can execute searches
|
||||||
c.add_opt('saved_searches', default={}, help='List of named saved searches')
|
c.add_opt('saved_searches', default={},
|
||||||
c.add_opt('user_categories', default={}, help='User-created Tag browser categories')
|
help='List of named saved searches')
|
||||||
|
c.add_opt('user_categories', default={},
|
||||||
|
help='User-created Tag browser categories')
|
||||||
c.add_opt('manage_device_metadata', default='manual',
|
c.add_opt('manage_device_metadata', default='manual',
|
||||||
help='How and when calibre updates metadata on the device.')
|
help='How and when calibre updates metadata on the device.')
|
||||||
c.add_opt('limit_search_columns', default=False,
|
c.add_opt('limit_search_columns', default=False,
|
||||||
help='When searching for text without using lookup '
|
help='When searching for text without using lookup '
|
||||||
'prefixes, as for example, Red instead of title:Red, '
|
'prefixes, as for example, Red instead of title:Red, '
|
||||||
'limit the columns searched to those named below.')
|
'limit the columns searched to those named below.')
|
||||||
c.add_opt('limit_search_columns_to',
|
c.add_opt('limit_search_columns_to',
|
||||||
default=['title', 'authors', 'tags', 'series', 'publisher'],
|
default=['title', 'authors', 'tags', 'series', 'publisher'],
|
||||||
help='Choose columns to be searched when not using prefixes, '
|
help='Choose columns to be searched when not using prefixes, '
|
||||||
'as for example, when searching for Red instead of '
|
'as for example, when searching for Red instead of '
|
||||||
'title:Red. Enter a list of search/lookup names '
|
'title:Red. Enter a list of search/lookup names '
|
||||||
@@ -536,9 +558,11 @@ def create_global_prefs(conf_obj=None):
|
|||||||
'this is much slower than a simple search on very large '
|
'this is much slower than a simple search on very large '
|
||||||
'libraries. Also, this option will have no effect if you turn '
|
'libraries. Also, this option will have no effect if you turn '
|
||||||
'on case-sensitive searching')
|
'on case-sensitive searching')
|
||||||
c.add_opt('case_sensitive', default=False, help='Make searches case-sensitive')
|
c.add_opt('case_sensitive', default=False,
|
||||||
|
help='Make searches case-sensitive')
|
||||||
|
|
||||||
c.add_opt('migrated', default=False, help='For Internal use. Don\'t modify.')
|
c.add_opt('migrated', default=False,
|
||||||
|
help='For Internal use. Don\'t modify.')
|
||||||
return c
|
return c
|
||||||
|
|
||||||
|
|
||||||
@@ -581,7 +605,8 @@ def write_custom_tweaks(tweaks_dict):
|
|||||||
changed_tweaks = {}
|
changed_tweaks = {}
|
||||||
default_tweaks = exec_tweaks(default_tweaks_raw())
|
default_tweaks = exec_tweaks(default_tweaks_raw())
|
||||||
for key, cval in tweaks_dict.items():
|
for key, cval in tweaks_dict.items():
|
||||||
if key in default_tweaks and normalize_tweak(cval) == normalize_tweak(default_tweaks[key]):
|
if (key in default_tweaks and
|
||||||
|
normalize_tweak(cval) == normalize_tweak(default_tweaks[key])):
|
||||||
continue
|
continue
|
||||||
changed_tweaks[key] = cval
|
changed_tweaks[key] = cval
|
||||||
raw = json_dumps(changed_tweaks)
|
raw = json_dumps(changed_tweaks)
|
||||||
@@ -598,10 +623,10 @@ def exec_tweaks(path):
|
|||||||
raw = f.read()
|
raw = f.read()
|
||||||
fname = f.name
|
fname = f.name
|
||||||
code = compile(raw, fname, 'exec')
|
code = compile(raw, fname, 'exec')
|
||||||
l = {}
|
x = {}
|
||||||
g = {'__file__': fname}
|
g = {'__file__': fname}
|
||||||
exec(code, g, l)
|
exec(code, g, x)
|
||||||
return l
|
return x
|
||||||
|
|
||||||
|
|
||||||
def read_custom_tweaks():
|
def read_custom_tweaks():
|
||||||
|
|||||||
@@ -1,17 +1,10 @@
|
|||||||
import codecs
|
|
||||||
import sys
|
import sys
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
# Setup code {{{
|
# Setup code {{{
|
||||||
from ebook_converter.constants_old import plugins
|
|
||||||
from ebook_converter.polyglot.builtins import cmp
|
from ebook_converter.polyglot.builtins import cmp
|
||||||
from ebook_converter.utils.config_base import tweaks
|
from ebook_converter.utils import config_base
|
||||||
|
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
|
||||||
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
is_narrow_build = sys.maxunicode < 0x10ffff
|
is_narrow_build = sys.maxunicode < 0x10ffff
|
||||||
_locale = _collator = _primary_collator = _sort_collator = _numeric_collator = _case_sensitive_collator = None
|
_locale = _collator = _primary_collator = _sort_collator = _numeric_collator = _case_sensitive_collator = None
|
||||||
cmp
|
cmp
|
||||||
@@ -24,33 +17,6 @@ _icu, err = 1, None # plugins['icu']
|
|||||||
if _icu is None:
|
if _icu is None:
|
||||||
raise RuntimeError('Failed to load icu with error: %s' % err)
|
raise RuntimeError('Failed to load icu with error: %s' % err)
|
||||||
del err
|
del err
|
||||||
#icu_unicode_version = getattr(_icu, 'unicode_version', None)
|
|
||||||
# _nmodes = {m:getattr(_icu, m) for m in ('NFC', 'NFD', 'NFKC', 'NFKD')}
|
|
||||||
|
|
||||||
# Ensure that the python internal filesystem and default encodings are not ASCII
|
|
||||||
|
|
||||||
|
|
||||||
#def is_ascii(name):
|
|
||||||
# try:
|
|
||||||
# return codecs.lookup(name).name == b'ascii'
|
|
||||||
# except (TypeError, LookupError):
|
|
||||||
# return True
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#try:
|
|
||||||
# if is_ascii(sys.getdefaultencoding()):
|
|
||||||
# _icu.set_default_encoding(b'utf-8')
|
|
||||||
#except:
|
|
||||||
# import traceback
|
|
||||||
# traceback.print_exc()
|
|
||||||
#
|
|
||||||
#try:
|
|
||||||
# if is_ascii(sys.getfilesystemencoding()):
|
|
||||||
# _icu.set_filesystem_encoding(b'utf-8')
|
|
||||||
#except:
|
|
||||||
# import traceback
|
|
||||||
# traceback.print_exc()
|
|
||||||
#del is_ascii
|
|
||||||
|
|
||||||
|
|
||||||
def collator():
|
def collator():
|
||||||
@@ -58,8 +24,8 @@ def collator():
|
|||||||
if _collator is None:
|
if _collator is None:
|
||||||
if _locale is None:
|
if _locale is None:
|
||||||
from ebook_converter.utils.localization import get_lang
|
from ebook_converter.utils.localization import get_lang
|
||||||
if tweaks['locale_for_sorting']:
|
if config_base.tweaks['locale_for_sorting']:
|
||||||
_locale = tweaks['locale_for_sorting']
|
_locale = config_base.tweaks['locale_for_sorting']
|
||||||
else:
|
else:
|
||||||
_locale = get_lang()
|
_locale = get_lang()
|
||||||
try:
|
try:
|
||||||
@@ -91,7 +57,7 @@ def sort_collator():
|
|||||||
if _sort_collator is None:
|
if _sort_collator is None:
|
||||||
_sort_collator = collator().clone()
|
_sort_collator = collator().clone()
|
||||||
_sort_collator.strength = _icu.UCOL_SECONDARY
|
_sort_collator.strength = _icu.UCOL_SECONDARY
|
||||||
_sort_collator.numeric = tweaks['numeric_collation']
|
_sort_collator.numeric = config_base.tweaks['numeric_collation']
|
||||||
return _sort_collator
|
return _sort_collator
|
||||||
|
|
||||||
|
|
||||||
@@ -311,9 +277,3 @@ string_length = len #_icu.string_length if is_narrow_build else len
|
|||||||
|
|
||||||
# Return the number of UTF-16 codepoints in a string
|
# Return the number of UTF-16 codepoints in a string
|
||||||
utf16_length = len # if is_narrow_build else _icu.utf16_length
|
utf16_length = len # if is_narrow_build else _icu.utf16_length
|
||||||
|
|
||||||
################################################################################
|
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
|
||||||
# from ebook_converter.utils.icu_test import run
|
|
||||||
# run(verbosity=4)
|
|
||||||
|
|||||||
@@ -14,66 +14,7 @@ until all open file handles are closed. You also cannot delete the containing
|
|||||||
directory until all file handles are closed. To get around this, rename the
|
directory until all file handles are closed. To get around this, rename the
|
||||||
file before deleting it.
|
file before deleting it.
|
||||||
"""
|
"""
|
||||||
import os, sys
|
import os
|
||||||
|
|
||||||
from ebook_converter.polyglot.builtins import reraise
|
|
||||||
from ebook_converter.constants_old import iswindows, plugins
|
|
||||||
|
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
|
||||||
__copyright__ = '2015, Kovid Goyal <kovid at kovidgoyal.net>'
|
|
||||||
|
|
||||||
# speedup, err = plugins['speedup']
|
|
||||||
|
|
||||||
# if not speedup:
|
|
||||||
# raise RuntimeError('Failed to load the speedup plugin with error: %s' % err)
|
|
||||||
|
|
||||||
valid_modes = {'a', 'a+', 'a+b', 'ab', 'r', 'rb', 'r+', 'r+b', 'w', 'wb', 'w+', 'w+b'}
|
|
||||||
|
|
||||||
|
|
||||||
def validate_mode(mode):
|
|
||||||
return mode in valid_modes
|
|
||||||
|
|
||||||
|
|
||||||
class FlagConstants(object):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
for x in 'APPEND CREAT TRUNC EXCL RDWR RDONLY WRONLY'.split():
|
|
||||||
x = 'O_' + x
|
|
||||||
setattr(self, x, getattr(os, x))
|
|
||||||
for x in 'RANDOM SEQUENTIAL TEXT BINARY'.split():
|
|
||||||
x = 'O_' + x
|
|
||||||
setattr(self, x, getattr(os, x, 0))
|
|
||||||
|
|
||||||
|
|
||||||
fc = FlagConstants()
|
|
||||||
|
|
||||||
|
|
||||||
def flags_from_mode(mode):
|
|
||||||
if not validate_mode(mode):
|
|
||||||
raise ValueError('The mode is invalid')
|
|
||||||
m = mode[0]
|
|
||||||
random = '+' in mode
|
|
||||||
binary = 'b' in mode
|
|
||||||
if m == 'a':
|
|
||||||
flags = fc.O_APPEND | fc.O_CREAT
|
|
||||||
if random:
|
|
||||||
flags |= fc.O_RDWR | fc.O_RANDOM
|
|
||||||
else:
|
|
||||||
flags |= fc.O_WRONLY | fc.O_SEQUENTIAL
|
|
||||||
elif m == 'r':
|
|
||||||
if random:
|
|
||||||
flags = fc.O_RDWR | fc.O_RANDOM
|
|
||||||
else:
|
|
||||||
flags = fc.O_RDONLY | fc.O_SEQUENTIAL
|
|
||||||
elif m == 'w':
|
|
||||||
if random:
|
|
||||||
flags = fc.O_RDWR | fc.O_RANDOM
|
|
||||||
else:
|
|
||||||
flags = fc.O_WRONLY | fc.O_SEQUENTIAL
|
|
||||||
flags |= fc.O_TRUNC | fc.O_CREAT
|
|
||||||
flags |= (fc.O_BINARY if binary else fc.O_TEXT)
|
|
||||||
return flags
|
|
||||||
|
|
||||||
|
|
||||||
share_open = open
|
share_open = open
|
||||||
|
|||||||
Reference in New Issue
Block a user