mirror of
https://github.com/gryf/ebook-converter.git
synced 2025-12-18 13:10:17 +01:00
Removed unused in convertion catalog feature.
This commit is contained in:
@@ -700,13 +700,6 @@ plugins += [
|
|||||||
]
|
]
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
# Catalog plugins {{{
|
|
||||||
from ebook_converter.library.catalogs.csv_xml import CSV_XML
|
|
||||||
from ebook_converter.library.catalogs.bibtex import BIBTEX
|
|
||||||
from ebook_converter.library.catalogs.epub_mobi import EPUB_MOBI
|
|
||||||
plugins += [CSV_XML, BIBTEX, EPUB_MOBI]
|
|
||||||
# }}}
|
|
||||||
|
|
||||||
# Profiles {{{
|
# Profiles {{{
|
||||||
from ebook_converter.customize.profiles import input_profiles, output_profiles
|
from ebook_converter.customize.profiles import input_profiles, output_profiles
|
||||||
plugins += input_profiles + output_profiles
|
plugins += input_profiles + output_profiles
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
__license__ = 'GPL v3'
|
|
||||||
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
FIELDS = ['all', 'title', 'title_sort', 'author_sort', 'authors', 'comments',
|
|
||||||
'cover', 'formats','id', 'isbn', 'library_name','ondevice', 'pubdate', 'publisher',
|
|
||||||
'rating', 'series_index', 'series', 'size', 'tags', 'timestamp',
|
|
||||||
'uuid', 'languages', 'identifiers']
|
|
||||||
|
|
||||||
# Allowed fields for template
|
|
||||||
TEMPLATE_ALLOWED_FIELDS = ['author_sort', 'authors', 'id', 'isbn', 'pubdate', 'title_sort',
|
|
||||||
'publisher', 'series_index', 'series', 'tags', 'timestamp', 'title', 'uuid']
|
|
||||||
|
|
||||||
|
|
||||||
class AuthorSortMismatchException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class EmptyCatalogException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidGenresSourceFieldException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@@ -1,399 +0,0 @@
|
|||||||
import re, codecs, os, numbers
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from ebook_converter.utils import date
|
|
||||||
from ebook_converter.customize import CatalogPlugin
|
|
||||||
from ebook_converter.library.catalogs import FIELDS, TEMPLATE_ALLOWED_FIELDS
|
|
||||||
from ebook_converter.customize.conversion import DummyReporter
|
|
||||||
from ebook_converter.ebooks.metadata import format_isbn
|
|
||||||
|
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
|
||||||
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
|
|
||||||
class BIBTEX(CatalogPlugin):
|
|
||||||
'BIBTEX catalog generator'
|
|
||||||
|
|
||||||
Option = namedtuple('Option', 'option, default, dest, action, help')
|
|
||||||
|
|
||||||
name = 'Catalog_BIBTEX'
|
|
||||||
description = 'BIBTEX catalog generator'
|
|
||||||
supported_platforms = ['osx', 'linux']
|
|
||||||
author = 'Sengian'
|
|
||||||
version = (1, 0, 0)
|
|
||||||
file_types = {'bib'}
|
|
||||||
|
|
||||||
cli_options = [
|
|
||||||
Option('--fields',
|
|
||||||
default='all',
|
|
||||||
dest='fields',
|
|
||||||
action=None,
|
|
||||||
help='The fields to output when cataloging books in the '
|
|
||||||
'database. Should be a comma-separated list of fields.\n'
|
|
||||||
'Available fields: %(fields)s.\n'
|
|
||||||
'plus user-created custom fields.\n'
|
|
||||||
'Example: %(opt)s=title,authors,tags\n'
|
|
||||||
"Default: '%%default'\n"
|
|
||||||
"Applies to: BIBTEX output format" % dict(
|
|
||||||
fields=', '.join(FIELDS), opt='--fields')),
|
|
||||||
|
|
||||||
Option('--sort-by',
|
|
||||||
default='id',
|
|
||||||
dest='sort_by',
|
|
||||||
action=None,
|
|
||||||
help='Output field to sort on.\n'
|
|
||||||
'Available fields: author_sort, id, rating, size, timestamp, title.\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format"),
|
|
||||||
|
|
||||||
Option('--create-citation',
|
|
||||||
default='True',
|
|
||||||
dest='impcit',
|
|
||||||
action=None,
|
|
||||||
help='Create a citation for BibTeX entries.\n'
|
|
||||||
'Boolean value: True, False\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format"),
|
|
||||||
|
|
||||||
Option('--add-files-path',
|
|
||||||
default='True',
|
|
||||||
dest='addfiles',
|
|
||||||
action=None,
|
|
||||||
help='Create a file entry if formats is selected for BibTeX entries.\n'
|
|
||||||
'Boolean value: True, False\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format"),
|
|
||||||
|
|
||||||
Option('--citation-template',
|
|
||||||
default='{authors}{id}',
|
|
||||||
dest='bib_cit',
|
|
||||||
action=None,
|
|
||||||
help='The template for citation creation from database fields.\n'
|
|
||||||
'Should be a template with {} enclosed fields.\n'
|
|
||||||
'Available fields: %s.\n'
|
|
||||||
"Default: '%%default'\n"
|
|
||||||
"Applies to: BIBTEX output format" %
|
|
||||||
', '.join(TEMPLATE_ALLOWED_FIELDS)),
|
|
||||||
|
|
||||||
Option('--choose-encoding',
|
|
||||||
default='utf8',
|
|
||||||
dest='bibfile_enc',
|
|
||||||
action=None,
|
|
||||||
help='BibTeX file encoding output.\n'
|
|
||||||
'Available types: utf8, cp1252, ascii.\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format"),
|
|
||||||
|
|
||||||
Option('--choose-encoding-configuration',
|
|
||||||
default='strict',
|
|
||||||
dest='bibfile_enctag',
|
|
||||||
action=None,
|
|
||||||
help='BibTeX file encoding flag.\n'
|
|
||||||
'Available types: strict, replace, ignore, backslashreplace.\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format"),
|
|
||||||
|
|
||||||
Option('--entry-type',
|
|
||||||
default='book',
|
|
||||||
dest='bib_entry',
|
|
||||||
action=None,
|
|
||||||
help='Entry type for BibTeX catalog.\n'
|
|
||||||
'Available types: book, misc, mixed.\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: BIBTEX output format")]
|
|
||||||
|
|
||||||
def run(self, path_to_output, opts, db, notification=DummyReporter()):
|
|
||||||
from ebook_converter.utils.date import isoformat
|
|
||||||
from ebook_converter.utils.html2text import html2text
|
|
||||||
from ebook_converter.utils.bibtex import BibTeX
|
|
||||||
from ebook_converter.library.save_to_disk import preprocess_template
|
|
||||||
from ebook_converter.utils.logging import default_log as log
|
|
||||||
from ebook_converter.utils.filenames import ascii_text
|
|
||||||
|
|
||||||
library_name = os.path.basename(db.library_path)
|
|
||||||
|
|
||||||
def create_bibtex_entry(entry, fields, mode, template_citation,
|
|
||||||
bibtexdict, db, citation_bibtex=True, calibre_files=True):
|
|
||||||
|
|
||||||
# Bibtex doesn't like UTF-8 but keep unicode until writing
|
|
||||||
# Define starting chain or if book valid strict and not book return a Fail string
|
|
||||||
|
|
||||||
bibtex_entry = []
|
|
||||||
if mode != "misc" and check_entry_book_valid(entry) :
|
|
||||||
bibtex_entry.append('@book{')
|
|
||||||
elif mode != "book" :
|
|
||||||
bibtex_entry.append('@misc{')
|
|
||||||
else :
|
|
||||||
# case strict book
|
|
||||||
return ''
|
|
||||||
|
|
||||||
if citation_bibtex :
|
|
||||||
# Citation tag
|
|
||||||
bibtex_entry.append(make_bibtex_citation(entry, template_citation,
|
|
||||||
bibtexdict))
|
|
||||||
bibtex_entry = [' '.join(bibtex_entry)]
|
|
||||||
|
|
||||||
for field in fields:
|
|
||||||
if field.startswith('#'):
|
|
||||||
item = db.get_field(entry['id'],field,index_is_id=True)
|
|
||||||
if isinstance(item, (bool, numbers.Number)):
|
|
||||||
item = repr(item)
|
|
||||||
elif field == 'title_sort':
|
|
||||||
item = entry['sort']
|
|
||||||
elif field == 'library_name':
|
|
||||||
item = library_name
|
|
||||||
else:
|
|
||||||
item = entry[field]
|
|
||||||
|
|
||||||
# check if the field should be included (none or empty)
|
|
||||||
if item is None:
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
if len(item) == 0 :
|
|
||||||
continue
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if field == 'authors' :
|
|
||||||
bibtex_entry.append('author = "%s"' % bibtexdict.bibtex_author_format(item))
|
|
||||||
|
|
||||||
elif field == 'id' :
|
|
||||||
bibtex_entry.append('calibreid = "%s"' % int(item))
|
|
||||||
|
|
||||||
elif field == 'rating' :
|
|
||||||
bibtex_entry.append('rating = "%s"' % int(item))
|
|
||||||
|
|
||||||
elif field == 'size' :
|
|
||||||
bibtex_entry.append('%s = "%s octets"' % (field, int(item)))
|
|
||||||
|
|
||||||
elif field == 'tags' :
|
|
||||||
# A list to flatten
|
|
||||||
bibtex_entry.append('tags = "%s"' % bibtexdict.utf8ToBibtex(', '.join(item)))
|
|
||||||
|
|
||||||
elif field == 'comments' :
|
|
||||||
# \n removal
|
|
||||||
item = item.replace('\r\n', ' ')
|
|
||||||
item = item.replace('\n', ' ')
|
|
||||||
# unmatched brace removal (users should use \leftbrace or \rightbrace for single braces)
|
|
||||||
item = bibtexdict.stripUnmatchedSyntax(item, '{', '}')
|
|
||||||
# html to text
|
|
||||||
try:
|
|
||||||
item = html2text(item)
|
|
||||||
except:
|
|
||||||
log.warn("Failed to convert comments to text")
|
|
||||||
bibtex_entry.append('note = "%s"' % bibtexdict.utf8ToBibtex(item))
|
|
||||||
|
|
||||||
elif field == 'isbn' :
|
|
||||||
# Could be 9, 10 or 13 digits
|
|
||||||
bibtex_entry.append('isbn = "%s"' % format_isbn(item))
|
|
||||||
|
|
||||||
elif field == 'formats' :
|
|
||||||
# Add file path if format is selected
|
|
||||||
formats = [format.rpartition('.')[2].lower() for format in item]
|
|
||||||
bibtex_entry.append('formats = "%s"' % ', '.join(formats))
|
|
||||||
if calibre_files:
|
|
||||||
files = [':%s:%s' % (format, format.rpartition('.')[2].upper())
|
|
||||||
for format in item]
|
|
||||||
bibtex_entry.append('file = "%s"' % ', '.join(files))
|
|
||||||
|
|
||||||
elif field == 'series_index' :
|
|
||||||
bibtex_entry.append('volume = "%s"' % int(item))
|
|
||||||
|
|
||||||
elif field == 'timestamp' :
|
|
||||||
bibtex_entry.append('timestamp = "%s"' % isoformat(item).partition('T')[0])
|
|
||||||
|
|
||||||
elif field == 'pubdate' :
|
|
||||||
bibtex_entry.append('year = "%s"' % item.year)
|
|
||||||
bibtex_entry.append('month = "%s"' % bibtexdict.utf8ToBibtex(date.strftime("%b", item)))
|
|
||||||
|
|
||||||
elif field.startswith('#') and isinstance(item, (str, bytes)):
|
|
||||||
bibtex_entry.append('custom_%s = "%s"' % (field[1:],
|
|
||||||
bibtexdict.utf8ToBibtex(item)))
|
|
||||||
|
|
||||||
elif isinstance(item, (str, bytes)):
|
|
||||||
# elif field in ['title', 'publisher', 'cover', 'uuid', 'ondevice',
|
|
||||||
# 'author_sort', 'series', 'title_sort'] :
|
|
||||||
bibtex_entry.append('%s = "%s"' % (field, bibtexdict.utf8ToBibtex(item)))
|
|
||||||
|
|
||||||
bibtex_entry = ',\n '.join(bibtex_entry)
|
|
||||||
bibtex_entry += ' }\n\n'
|
|
||||||
|
|
||||||
return bibtex_entry
|
|
||||||
|
|
||||||
def check_entry_book_valid(entry):
|
|
||||||
# Check that the required fields are ok for a book entry
|
|
||||||
for field in ['title', 'authors', 'publisher'] :
|
|
||||||
if entry[field] is None or len(entry[field]) == 0 :
|
|
||||||
return False
|
|
||||||
if entry['pubdate'] is None :
|
|
||||||
return False
|
|
||||||
else :
|
|
||||||
return True
|
|
||||||
|
|
||||||
def make_bibtex_citation(entry, template_citation, bibtexclass):
|
|
||||||
|
|
||||||
# define a function to replace the template entry by its value
|
|
||||||
def tpl_replace(objtplname) :
|
|
||||||
|
|
||||||
tpl_field = re.sub(r'[\{\}]', '', objtplname.group())
|
|
||||||
|
|
||||||
if tpl_field in TEMPLATE_ALLOWED_FIELDS :
|
|
||||||
if tpl_field in ['pubdate', 'timestamp'] :
|
|
||||||
tpl_field = isoformat(entry[tpl_field]).partition('T')[0]
|
|
||||||
elif tpl_field in ['tags', 'authors'] :
|
|
||||||
tpl_field =entry[tpl_field][0]
|
|
||||||
elif tpl_field in ['id', 'series_index'] :
|
|
||||||
tpl_field = str(entry[tpl_field])
|
|
||||||
else :
|
|
||||||
tpl_field = entry[tpl_field]
|
|
||||||
return ascii_text(tpl_field)
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
if len(template_citation) >0 :
|
|
||||||
tpl_citation = bibtexclass.utf8ToBibtex(
|
|
||||||
bibtexclass.ValidateCitationKey(re.sub(r'\{[^{}]*\}',
|
|
||||||
tpl_replace, template_citation)))
|
|
||||||
|
|
||||||
if len(tpl_citation) >0 :
|
|
||||||
return tpl_citation
|
|
||||||
|
|
||||||
if len(entry["isbn"]) > 0 :
|
|
||||||
template_citation = '%s' % re.sub(r'[\D]','', entry["isbn"])
|
|
||||||
|
|
||||||
else :
|
|
||||||
template_citation = '%s' % str(entry["id"])
|
|
||||||
|
|
||||||
return bibtexclass.ValidateCitationKey(template_citation)
|
|
||||||
|
|
||||||
self.fmt = path_to_output.rpartition('.')[2]
|
|
||||||
self.notification = notification
|
|
||||||
|
|
||||||
# Combobox options
|
|
||||||
bibfile_enc = ['utf8', 'cp1252', 'ascii']
|
|
||||||
bibfile_enctag = ['strict', 'replace', 'ignore', 'backslashreplace']
|
|
||||||
bib_entry = ['mixed', 'misc', 'book']
|
|
||||||
|
|
||||||
# Needed beacause CLI return str vs int by widget
|
|
||||||
try:
|
|
||||||
bibfile_enc = bibfile_enc[opts.bibfile_enc]
|
|
||||||
bibfile_enctag = bibfile_enctag[opts.bibfile_enctag]
|
|
||||||
bib_entry = bib_entry[opts.bib_entry]
|
|
||||||
except:
|
|
||||||
if opts.bibfile_enc in bibfile_enc :
|
|
||||||
bibfile_enc = opts.bibfile_enc
|
|
||||||
else :
|
|
||||||
log.warn("Incorrect --choose-encoding flag, revert to default")
|
|
||||||
bibfile_enc = bibfile_enc[0]
|
|
||||||
if opts.bibfile_enctag in bibfile_enctag :
|
|
||||||
bibfile_enctag = opts.bibfile_enctag
|
|
||||||
else :
|
|
||||||
log.warn("Incorrect --choose-encoding-configuration flag, revert to default")
|
|
||||||
bibfile_enctag = bibfile_enctag[0]
|
|
||||||
if opts.bib_entry in bib_entry :
|
|
||||||
bib_entry = opts.bib_entry
|
|
||||||
else :
|
|
||||||
log.warn("Incorrect --entry-type flag, revert to default")
|
|
||||||
bib_entry = bib_entry[0]
|
|
||||||
|
|
||||||
if opts.verbose:
|
|
||||||
opts_dict = vars(opts)
|
|
||||||
log("%s(): Generating %s" % (self.name,self.fmt))
|
|
||||||
if opts.connected_device['is_device_connected']:
|
|
||||||
log(" connected_device: %s" % opts.connected_device['name'])
|
|
||||||
if opts_dict['search_text']:
|
|
||||||
log(" --search='%s'" % opts_dict['search_text'])
|
|
||||||
|
|
||||||
if opts_dict['ids']:
|
|
||||||
log(" Book count: %d" % len(opts_dict['ids']))
|
|
||||||
if opts_dict['search_text']:
|
|
||||||
log(" (--search ignored when a subset of the database is specified)")
|
|
||||||
|
|
||||||
if opts_dict['fields']:
|
|
||||||
if opts_dict['fields'] == 'all':
|
|
||||||
log(" Fields: %s" % ', '.join(FIELDS[1:]))
|
|
||||||
else:
|
|
||||||
log(" Fields: %s" % opts_dict['fields'])
|
|
||||||
|
|
||||||
log(" Output file will be encoded in %s with %s flag" % (bibfile_enc, bibfile_enctag))
|
|
||||||
|
|
||||||
log(" BibTeX entry type is %s with a citation like '%s' flag" % (bib_entry, opts_dict['bib_cit']))
|
|
||||||
|
|
||||||
# If a list of ids are provided, don't use search_text
|
|
||||||
if opts.ids:
|
|
||||||
opts.search_text = None
|
|
||||||
|
|
||||||
data = self.search_sort_db(db, opts)
|
|
||||||
|
|
||||||
if not len(data):
|
|
||||||
log.error("\nNo matching database entries for search criteria '%s'" % opts.search_text)
|
|
||||||
|
|
||||||
# Get the requested output fields as a list
|
|
||||||
fields = self.get_output_fields(db, opts)
|
|
||||||
|
|
||||||
if not len(data):
|
|
||||||
log.error("\nNo matching database entries for search criteria '%s'" % opts.search_text)
|
|
||||||
|
|
||||||
# Initialize BibTeX class
|
|
||||||
bibtexc = BibTeX()
|
|
||||||
|
|
||||||
# Entries writing after Bibtex formating (or not)
|
|
||||||
if bibfile_enc != 'ascii' :
|
|
||||||
bibtexc.ascii_bibtex = False
|
|
||||||
else :
|
|
||||||
bibtexc.ascii_bibtex = True
|
|
||||||
|
|
||||||
# Check citation choice and go to default in case of bad CLI
|
|
||||||
if isinstance(opts.impcit, (str, bytes)) :
|
|
||||||
if opts.impcit == 'False' :
|
|
||||||
citation_bibtex= False
|
|
||||||
elif opts.impcit == 'True' :
|
|
||||||
citation_bibtex= True
|
|
||||||
else :
|
|
||||||
log.warn("Incorrect --create-citation, revert to default")
|
|
||||||
citation_bibtex= True
|
|
||||||
else :
|
|
||||||
citation_bibtex= opts.impcit
|
|
||||||
|
|
||||||
# Check add file entry and go to default in case of bad CLI
|
|
||||||
if isinstance(opts.addfiles, (str, bytes)) :
|
|
||||||
if opts.addfiles == 'False' :
|
|
||||||
addfiles_bibtex = False
|
|
||||||
elif opts.addfiles == 'True' :
|
|
||||||
addfiles_bibtex = True
|
|
||||||
else :
|
|
||||||
log.warn("Incorrect --add-files-path, revert to default")
|
|
||||||
addfiles_bibtex= True
|
|
||||||
else :
|
|
||||||
addfiles_bibtex = opts.addfiles
|
|
||||||
|
|
||||||
# Preprocess for error and light correction
|
|
||||||
template_citation = preprocess_template(opts.bib_cit)
|
|
||||||
|
|
||||||
# Open output and write entries
|
|
||||||
with codecs.open(path_to_output, 'w', bibfile_enc, bibfile_enctag)\
|
|
||||||
as outfile:
|
|
||||||
# File header
|
|
||||||
nb_entries = len(data)
|
|
||||||
|
|
||||||
# check in book strict if all is ok else throw a warning into log
|
|
||||||
if bib_entry == 'book' :
|
|
||||||
nb_books = len(list(filter(check_entry_book_valid, data)))
|
|
||||||
if nb_books < nb_entries :
|
|
||||||
log.warn("Only %d entries in %d are book compatible" % (nb_books, nb_entries))
|
|
||||||
nb_entries = nb_books
|
|
||||||
|
|
||||||
# If connected device, add 'On Device' values to data
|
|
||||||
if opts.connected_device['is_device_connected'] and 'ondevice' in fields:
|
|
||||||
for entry in data:
|
|
||||||
entry['ondevice'] = db.catalog_plugin_on_device_temp_mapping[entry['id']]['ondevice']
|
|
||||||
|
|
||||||
outfile.write('%%%Calibre catalog\n%%%{0} entries in catalog\n\n'.format(nb_entries))
|
|
||||||
outfile.write('@preamble{"This catalog of %d entries was generated by calibre on %s"}\n\n'
|
|
||||||
% (nb_entries, date.strftime("%A, %d. %B %Y %H:%M")))
|
|
||||||
|
|
||||||
for entry in data:
|
|
||||||
outfile.write(create_bibtex_entry(entry, fields, bib_entry, template_citation,
|
|
||||||
bibtexc, db, citation_bibtex, addfiles_bibtex))
|
|
||||||
@@ -1,237 +0,0 @@
|
|||||||
import re, codecs, os
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from ebook_converter.customize import CatalogPlugin
|
|
||||||
from ebook_converter.library.catalogs import FIELDS
|
|
||||||
from ebook_converter.customize.conversion import DummyReporter
|
|
||||||
|
|
||||||
|
|
||||||
__license__ = 'GPL v3'
|
|
||||||
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
|
|
||||||
__docformat__ = 'restructuredtext en'
|
|
||||||
|
|
||||||
|
|
||||||
class CSV_XML(CatalogPlugin):
|
|
||||||
|
|
||||||
'CSV/XML catalog generator'
|
|
||||||
|
|
||||||
Option = namedtuple('Option', 'option, default, dest, action, help')
|
|
||||||
|
|
||||||
name = 'Catalog_CSV_XML'
|
|
||||||
description = 'CSV/XML catalog generator'
|
|
||||||
supported_platforms = ['osx', 'linux']
|
|
||||||
author = 'Greg Riker'
|
|
||||||
version = (1, 0, 0)
|
|
||||||
file_types = {'csv', 'xml'}
|
|
||||||
|
|
||||||
cli_options = [
|
|
||||||
Option('--fields',
|
|
||||||
default='all',
|
|
||||||
dest='fields',
|
|
||||||
action=None,
|
|
||||||
help='The fields to output when cataloging books in the '
|
|
||||||
'database. Should be a comma-separated list of fields.\n'
|
|
||||||
'Available fields: %(fields)s,\n'
|
|
||||||
'plus user-created custom fields.\n'
|
|
||||||
'Example: %(opt)s=title,authors,tags\n'
|
|
||||||
"Default: '%%default'\n"
|
|
||||||
"Applies to: CSV, XML output formats" % dict(
|
|
||||||
fields=', '.join(FIELDS), opt='--fields')),
|
|
||||||
|
|
||||||
Option('--sort-by',
|
|
||||||
default='id',
|
|
||||||
dest='sort_by',
|
|
||||||
action=None,
|
|
||||||
help='Output field to sort on.\n'
|
|
||||||
'Available fields: author_sort, id, rating, size, timestamp, title_sort\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: CSV, XML output formats")]
|
|
||||||
|
|
||||||
def run(self, path_to_output, opts, db, notification=DummyReporter()):
|
|
||||||
from ebook_converter.library import current_library_name
|
|
||||||
from ebook_converter.utils.date import isoformat
|
|
||||||
from ebook_converter.utils.html2text import html2text
|
|
||||||
from ebook_converter.utils.logging import default_log as log
|
|
||||||
from lxml import etree
|
|
||||||
from ebook_converter.ebooks.metadata import authors_to_string
|
|
||||||
|
|
||||||
self.fmt = path_to_output.rpartition('.')[2]
|
|
||||||
self.notification = notification
|
|
||||||
current_library = current_library_name()
|
|
||||||
if getattr(opts, 'library_path', None):
|
|
||||||
current_library = os.path.basename(opts.library_path)
|
|
||||||
|
|
||||||
if opts.verbose:
|
|
||||||
opts_dict = vars(opts)
|
|
||||||
log("%s('%s'): Generating %s" % (self.name, current_library, self.fmt.upper()))
|
|
||||||
if opts.connected_device['is_device_connected']:
|
|
||||||
log(" connected_device: %s" % opts.connected_device['name'])
|
|
||||||
if opts_dict['search_text']:
|
|
||||||
log(" --search='%s'" % opts_dict['search_text'])
|
|
||||||
|
|
||||||
if opts_dict['ids']:
|
|
||||||
log(" Book count: %d" % len(opts_dict['ids']))
|
|
||||||
if opts_dict['search_text']:
|
|
||||||
log(" (--search ignored when a subset of the database is specified)")
|
|
||||||
|
|
||||||
if opts_dict['fields']:
|
|
||||||
if opts_dict['fields'] == 'all':
|
|
||||||
log(" Fields: %s" % ', '.join(FIELDS[1:]))
|
|
||||||
else:
|
|
||||||
log(" Fields: %s" % opts_dict['fields'])
|
|
||||||
|
|
||||||
# If a list of ids are provided, don't use search_text
|
|
||||||
if opts.ids:
|
|
||||||
opts.search_text = None
|
|
||||||
|
|
||||||
data = self.search_sort_db(db, opts)
|
|
||||||
|
|
||||||
if not len(data):
|
|
||||||
log.error("\nNo matching database entries for search criteria '%s'" % opts.search_text)
|
|
||||||
# raise SystemExit(1)
|
|
||||||
|
|
||||||
# Get the requested output fields as a list
|
|
||||||
fields = self.get_output_fields(db, opts)
|
|
||||||
|
|
||||||
# If connected device, add 'On Device' values to data
|
|
||||||
if opts.connected_device['is_device_connected'] and 'ondevice' in fields:
|
|
||||||
for entry in data:
|
|
||||||
entry['ondevice'] = db.catalog_plugin_on_device_temp_mapping[entry['id']]['ondevice']
|
|
||||||
|
|
||||||
fm = {x: db.field_metadata.get(x, {}) for x in fields}
|
|
||||||
|
|
||||||
if self.fmt == 'csv':
|
|
||||||
outfile = codecs.open(path_to_output, 'w', 'utf8')
|
|
||||||
|
|
||||||
# Write a UTF-8 BOM
|
|
||||||
outfile.write('\ufeff')
|
|
||||||
|
|
||||||
# Output the field headers
|
|
||||||
outfile.write('%s\n' % ','.join(fields))
|
|
||||||
|
|
||||||
# Output the entry fields
|
|
||||||
for entry in data:
|
|
||||||
outstr = []
|
|
||||||
for field in fields:
|
|
||||||
if field.startswith('#'):
|
|
||||||
item = db.get_field(entry['id'], field, index_is_id=True)
|
|
||||||
if isinstance(item, (list, tuple)):
|
|
||||||
if fm.get(field, {}).get('display', {}).get('is_names', False):
|
|
||||||
item = ' & '.join(item)
|
|
||||||
else:
|
|
||||||
item = ', '.join(item)
|
|
||||||
elif field == 'library_name':
|
|
||||||
item = current_library
|
|
||||||
elif field == 'title_sort':
|
|
||||||
item = entry['sort']
|
|
||||||
else:
|
|
||||||
item = entry[field]
|
|
||||||
|
|
||||||
if item is None:
|
|
||||||
outstr.append('""')
|
|
||||||
continue
|
|
||||||
elif field == 'formats':
|
|
||||||
fmt_list = []
|
|
||||||
for format in item:
|
|
||||||
fmt_list.append(format.rpartition('.')[2].lower())
|
|
||||||
item = ', '.join(fmt_list)
|
|
||||||
elif field == 'authors':
|
|
||||||
item = authors_to_string(item)
|
|
||||||
elif field == 'tags':
|
|
||||||
item = ', '.join(item)
|
|
||||||
elif field == 'isbn':
|
|
||||||
# Could be 9, 10 or 13 digits, with hyphens, possibly ending in 'X'
|
|
||||||
item = '%s' % re.sub(r'[^\dX-]', '', item)
|
|
||||||
elif fm.get(field, {}).get('datatype') == 'datetime':
|
|
||||||
item = isoformat(item, as_utc=False)
|
|
||||||
elif field == 'comments':
|
|
||||||
item = item.replace('\r\n', ' ')
|
|
||||||
item = item.replace('\n', ' ')
|
|
||||||
elif fm.get(field, {}).get('datatype', None) == 'rating' and item:
|
|
||||||
item = '%.2g' % (item / 2)
|
|
||||||
|
|
||||||
# Convert HTML to markdown text
|
|
||||||
if isinstance(item, str):
|
|
||||||
opening_tag = re.search(r'<(\w+)( |>)', item)
|
|
||||||
if opening_tag:
|
|
||||||
closing_tag = re.search(r'<\/%s>$' % opening_tag.group(1), item)
|
|
||||||
if closing_tag:
|
|
||||||
item = html2text(item)
|
|
||||||
|
|
||||||
outstr.append('"%s"' % str(item).replace('"', '""'))
|
|
||||||
|
|
||||||
outfile.write(','.join(outstr) + '\n')
|
|
||||||
outfile.close()
|
|
||||||
|
|
||||||
elif self.fmt == 'xml':
|
|
||||||
from lxml.builder import E
|
|
||||||
|
|
||||||
root = E.calibredb()
|
|
||||||
for r in data:
|
|
||||||
record = E.record()
|
|
||||||
root.append(record)
|
|
||||||
|
|
||||||
for field in fields:
|
|
||||||
if field.startswith('#'):
|
|
||||||
val = db.get_field(r['id'], field, index_is_id=True)
|
|
||||||
if not isinstance(val, str):
|
|
||||||
val = str(val)
|
|
||||||
item = getattr(E, field.replace('#', '_'))(val)
|
|
||||||
record.append(item)
|
|
||||||
|
|
||||||
for field in ('id', 'uuid', 'publisher', 'rating', 'size',
|
|
||||||
'isbn', 'ondevice', 'identifiers'):
|
|
||||||
if field in fields:
|
|
||||||
val = r[field]
|
|
||||||
if not val:
|
|
||||||
continue
|
|
||||||
if not isinstance(val, (bytes, str)):
|
|
||||||
if (fm.get(field, {}).get('datatype', None) ==
|
|
||||||
'rating' and val):
|
|
||||||
val = '%.2g' % (val / 2)
|
|
||||||
val = str(val)
|
|
||||||
item = getattr(E, field)(val)
|
|
||||||
record.append(item)
|
|
||||||
|
|
||||||
if 'title' in fields:
|
|
||||||
title = E.title(r['title'], sort=r['sort'])
|
|
||||||
record.append(title)
|
|
||||||
|
|
||||||
if 'authors' in fields:
|
|
||||||
aus = E.authors(sort=r['author_sort'])
|
|
||||||
for au in r['authors']:
|
|
||||||
aus.append(E.author(au))
|
|
||||||
record.append(aus)
|
|
||||||
|
|
||||||
for field in ('timestamp', 'pubdate'):
|
|
||||||
if field in fields:
|
|
||||||
record.append(getattr(E, field)(isoformat(r[field], as_utc=False)))
|
|
||||||
|
|
||||||
if 'tags' in fields and r['tags']:
|
|
||||||
tags = E.tags()
|
|
||||||
for tag in r['tags']:
|
|
||||||
tags.append(E.tag(tag))
|
|
||||||
record.append(tags)
|
|
||||||
|
|
||||||
if 'comments' in fields and r['comments']:
|
|
||||||
record.append(E.comments(r['comments']))
|
|
||||||
|
|
||||||
if 'series' in fields and r['series']:
|
|
||||||
record.append(E.series(r['series'],
|
|
||||||
index=str(r['series_index'])))
|
|
||||||
|
|
||||||
if 'cover' in fields and r['cover']:
|
|
||||||
record.append(E.cover(r['cover'].replace(os.sep, '/')))
|
|
||||||
|
|
||||||
if 'formats' in fields and r['formats']:
|
|
||||||
fmt = E.formats()
|
|
||||||
for f in r['formats']:
|
|
||||||
fmt.append(E.format(f.replace(os.sep, '/')))
|
|
||||||
record.append(fmt)
|
|
||||||
|
|
||||||
if 'library_name' in fields:
|
|
||||||
record.append(E.library_name(current_library))
|
|
||||||
|
|
||||||
with open(path_to_output, 'wb') as f:
|
|
||||||
f.write(etree.tostring(root, encoding='utf-8',
|
|
||||||
xml_declaration=True, pretty_print=True))
|
|
||||||
@@ -1,527 +0,0 @@
|
|||||||
import datetime, os, time
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from ebook_converter.utils import date
|
|
||||||
from ebook_converter.customize import CatalogPlugin
|
|
||||||
from ebook_converter.customize.conversion import OptionRecommendation, DummyReporter
|
|
||||||
from ebook_converter.library import current_library_name
|
|
||||||
from ebook_converter.library.catalogs import AuthorSortMismatchException, EmptyCatalogException
|
|
||||||
from ebook_converter.ptempfile import PersistentTemporaryFile
|
|
||||||
from ebook_converter.utils.localization import langcode_to_name, canonicalize_lang, get_lang
|
|
||||||
|
|
||||||
|
|
||||||
Option = namedtuple('Option', 'option, default, dest, action, help')
|
|
||||||
|
|
||||||
|
|
||||||
class EPUB_MOBI(CatalogPlugin):
|
|
||||||
|
|
||||||
'EPUB catalog generator'
|
|
||||||
|
|
||||||
name = 'Catalog_EPUB_MOBI'
|
|
||||||
description = 'AZW3/EPUB/MOBI catalog generator'
|
|
||||||
supported_platforms = ['osx', 'linux']
|
|
||||||
minimum_calibre_version = (0, 7, 40)
|
|
||||||
author = 'Greg Riker'
|
|
||||||
version = (1, 0, 0)
|
|
||||||
file_types = {'azw3', 'epub', 'mobi'}
|
|
||||||
|
|
||||||
THUMB_SMALLEST = "1.0"
|
|
||||||
THUMB_LARGEST = "2.0"
|
|
||||||
|
|
||||||
cli_options = [Option('--catalog-title', # {{{
|
|
||||||
default='My Books',
|
|
||||||
dest='catalog_title',
|
|
||||||
action=None,
|
|
||||||
help='Title of generated catalog used as title in '
|
|
||||||
'metadata.\n'
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--cross-reference-authors',
|
|
||||||
default=False,
|
|
||||||
dest='cross_reference_authors',
|
|
||||||
action='store_true',
|
|
||||||
help="Create cross-references in Authors section "
|
|
||||||
"for books with multiple authors.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--debug-pipeline',
|
|
||||||
default=None,
|
|
||||||
dest='debug_pipeline',
|
|
||||||
action=None,
|
|
||||||
help="Save the output from different stages of the "
|
|
||||||
"conversion pipeline to the specified directory. "
|
|
||||||
"Useful if you are unsure at which stage of the "
|
|
||||||
"conversion process a bug is occurring.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--exclude-genre',
|
|
||||||
default=r'\[.+\]|^\+$',
|
|
||||||
dest='exclude_genre',
|
|
||||||
action=None,
|
|
||||||
help="Regex describing tags to exclude as genres.\n"
|
|
||||||
"Default: '%default' excludes bracketed tags, e.g. "
|
|
||||||
"'[Project Gutenberg]', and '+', the default tag "
|
|
||||||
"for read books.\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--exclusion-rules',
|
|
||||||
default="(('Catalogs','Tags','Catalog'),)",
|
|
||||||
dest='exclusion_rules',
|
|
||||||
action=None,
|
|
||||||
help="Specifies the rules used to exclude books "
|
|
||||||
"from the generated catalog.\n"
|
|
||||||
"The model for an exclusion rule is either\n"
|
|
||||||
"('<rule name>','Tags','<comma-separated list of "
|
|
||||||
"tags>') or\n"
|
|
||||||
"('<rule name>','<custom column>','<pattern>').\n"
|
|
||||||
"For example:\n"
|
|
||||||
"(('Archived books','#status','Archived'),)\n"
|
|
||||||
"will exclude a book with a value of 'Archived' in "
|
|
||||||
"the custom column 'status'.\n"
|
|
||||||
"When multiple rules are defined, all rules will be "
|
|
||||||
"applied.\n"
|
|
||||||
"Default: \n\"%default\"\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-authors',
|
|
||||||
default=False,
|
|
||||||
dest='generate_authors',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Authors' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-descriptions',
|
|
||||||
default=False,
|
|
||||||
dest='generate_descriptions',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Descriptions' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-genres',
|
|
||||||
default=False,
|
|
||||||
dest='generate_genres',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Genres' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-titles',
|
|
||||||
default=False,
|
|
||||||
dest='generate_titles',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Titles' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-series',
|
|
||||||
default=False,
|
|
||||||
dest='generate_series',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Series' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--generate-recently-added',
|
|
||||||
default=False,
|
|
||||||
dest='generate_recently_added',
|
|
||||||
action='store_true',
|
|
||||||
help="Include 'Recently Added' section in catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--genre-source-field',
|
|
||||||
default='Tags',
|
|
||||||
dest='genre_source_field',
|
|
||||||
action=None,
|
|
||||||
help="Source field for 'Genres' section.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--header-note-source-field',
|
|
||||||
default='',
|
|
||||||
dest='header_note_source_field',
|
|
||||||
action=None,
|
|
||||||
help="Custom field containing note text to insert "
|
|
||||||
"in Description header.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--merge-comments-rule',
|
|
||||||
default='::',
|
|
||||||
dest='merge_comments_rule',
|
|
||||||
action=None,
|
|
||||||
help="#<custom field>:[before|after]:[True|False] "
|
|
||||||
"specifying:\n"
|
|
||||||
" <custom field> Custom field containing notes to "
|
|
||||||
"merge with Comments\n"
|
|
||||||
" [before|after] Placement of notes with respect "
|
|
||||||
"to Comments\n"
|
|
||||||
" [True|False] - A horizontal rule is inserted "
|
|
||||||
"between notes and Comments\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--output-profile',
|
|
||||||
default=None,
|
|
||||||
dest='output_profile',
|
|
||||||
action=None,
|
|
||||||
help="Specifies the output profile. In some cases, "
|
|
||||||
"an output profile is required to optimize the "
|
|
||||||
"catalog for the device. For example, 'kindle' or "
|
|
||||||
"'kindle_dx' creates a structured Table of Contents "
|
|
||||||
"with Sections and Articles.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--prefix-rules',
|
|
||||||
default="(('Read books','tags','+','\u2713'),"
|
|
||||||
"('Wishlist item','tags','Wishlist','\u00d7'))",
|
|
||||||
dest='prefix_rules',
|
|
||||||
action=None,
|
|
||||||
help="Specifies the rules used to include prefixes "
|
|
||||||
"indicating read books, wishlist items and other "
|
|
||||||
"user-specified prefixes.\n"
|
|
||||||
"The model for a prefix rule is ('<rule name>',"
|
|
||||||
"'<source field>','<pattern>','<prefix>').\n"
|
|
||||||
"When multiple rules are defined, the first "
|
|
||||||
"matching rule will be used.\n"
|
|
||||||
"Default:\n" + '"' + '%default' + '"' + "\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--preset',
|
|
||||||
default=None,
|
|
||||||
dest='preset',
|
|
||||||
action=None,
|
|
||||||
help="Use a named preset created with the GUI "
|
|
||||||
"catalog builder.\n"
|
|
||||||
"A preset specifies all settings for building a "
|
|
||||||
"catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--use-existing-cover',
|
|
||||||
default=False,
|
|
||||||
dest='use_existing_cover',
|
|
||||||
action='store_true',
|
|
||||||
help="Replace existing cover when generating the "
|
|
||||||
"catalog.\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats"),
|
|
||||||
Option('--thumb-width',
|
|
||||||
default='1.0',
|
|
||||||
dest='thumb_width',
|
|
||||||
action=None,
|
|
||||||
help="Size hint (in inches) for book covers in "
|
|
||||||
"catalog.\n"
|
|
||||||
"Range: 1.0 - 2.0\n"
|
|
||||||
"Default: '%default'\n"
|
|
||||||
"Applies to: AZW3, EPUB, MOBI output formats")]
|
|
||||||
# }}}
|
|
||||||
|
|
||||||
def run(self, path_to_output, opts, db, notification=DummyReporter()):
|
|
||||||
from ebook_converter.library.catalogs.epub_mobi_builder import CatalogBuilder
|
|
||||||
from ebook_converter.utils.logging import default_log as log
|
|
||||||
from ebook_converter.utils.config import JSONConfig
|
|
||||||
|
|
||||||
# If preset specified from the cli, insert stored options from JSON file
|
|
||||||
if hasattr(opts, 'preset') and opts.preset:
|
|
||||||
available_presets = JSONConfig("catalog_presets")
|
|
||||||
if opts.preset not in available_presets:
|
|
||||||
if available_presets:
|
|
||||||
print('Error: Preset "%s" not found.' % opts.preset)
|
|
||||||
print('Stored presets: %s' %
|
|
||||||
', '.join([p for p in
|
|
||||||
sorted(available_presets.keys())]))
|
|
||||||
else:
|
|
||||||
print('Error: No stored presets.')
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# Copy the relevant preset values to the opts object
|
|
||||||
for item in available_presets[opts.preset]:
|
|
||||||
if item not in ['exclusion_rules_tw', 'format', 'prefix_rules_tw']:
|
|
||||||
setattr(opts, item, available_presets[opts.preset][item])
|
|
||||||
|
|
||||||
# Provide an unconnected device
|
|
||||||
opts.connected_device = {
|
|
||||||
'is_device_connected': False,
|
|
||||||
'kind': None,
|
|
||||||
'name': None,
|
|
||||||
'save_template': None,
|
|
||||||
'serial': None,
|
|
||||||
'storage': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Convert prefix_rules and exclusion_rules from JSON lists to tuples
|
|
||||||
prs = []
|
|
||||||
for rule in opts.prefix_rules:
|
|
||||||
prs.append(tuple(rule))
|
|
||||||
opts.prefix_rules = tuple(prs)
|
|
||||||
|
|
||||||
ers = []
|
|
||||||
for rule in opts.exclusion_rules:
|
|
||||||
ers.append(tuple(rule))
|
|
||||||
opts.exclusion_rules = tuple(ers)
|
|
||||||
|
|
||||||
opts.log = log
|
|
||||||
opts.fmt = self.fmt = path_to_output.rpartition('.')[2]
|
|
||||||
|
|
||||||
# Add local options
|
|
||||||
opts.creator = '%s, %s %s, %s' % (date.strftime('%A'), date.strftime('%B'), date.strftime('%d').lstrip('0'), date.strftime('%Y'))
|
|
||||||
opts.creator_sort_as = '%s %s' % ('calibre', date.strftime('%Y-%m-%d'))
|
|
||||||
opts.connected_kindle = False
|
|
||||||
|
|
||||||
# Finalize output_profile
|
|
||||||
op = opts.output_profile
|
|
||||||
if op is None:
|
|
||||||
op = 'default'
|
|
||||||
|
|
||||||
if opts.connected_device['name'] and 'kindle' in opts.connected_device['name'].lower():
|
|
||||||
opts.connected_kindle = True
|
|
||||||
if opts.connected_device['serial'] and \
|
|
||||||
opts.connected_device['serial'][:4] in ['B004', 'B005']:
|
|
||||||
op = "kindle_dx"
|
|
||||||
else:
|
|
||||||
op = "kindle"
|
|
||||||
|
|
||||||
opts.description_clip = 380 if op.endswith('dx') or 'kindle' not in op else 100
|
|
||||||
opts.author_clip = 100 if op.endswith('dx') or 'kindle' not in op else 60
|
|
||||||
opts.output_profile = op
|
|
||||||
|
|
||||||
opts.basename = "Catalog"
|
|
||||||
opts.cli_environment = not hasattr(opts, 'sync')
|
|
||||||
|
|
||||||
# Hard-wired to always sort descriptions by author, with series after non-series
|
|
||||||
opts.sort_descriptions_by_author = True
|
|
||||||
|
|
||||||
build_log = []
|
|
||||||
|
|
||||||
build_log.append("%s('%s'): Generating %s %sin %s environment, locale: '%s'" %
|
|
||||||
(self.name,
|
|
||||||
current_library_name(),
|
|
||||||
self.fmt,
|
|
||||||
'for %s ' % opts.output_profile if opts.output_profile else '',
|
|
||||||
'CLI' if opts.cli_environment else 'GUI',
|
|
||||||
langcode_to_name(canonicalize_lang(get_lang()), localize=False))
|
|
||||||
)
|
|
||||||
|
|
||||||
# If exclude_genre is blank, assume user wants all tags as genres
|
|
||||||
if opts.exclude_genre.strip() == '':
|
|
||||||
# opts.exclude_genre = '\[^.\]'
|
|
||||||
# build_log.append(" converting empty exclude_genre to '\[^.\]'")
|
|
||||||
opts.exclude_genre = 'a^'
|
|
||||||
build_log.append(" converting empty exclude_genre to 'a^'")
|
|
||||||
if opts.connected_device['is_device_connected'] and \
|
|
||||||
opts.connected_device['kind'] == 'device':
|
|
||||||
if opts.connected_device['serial']:
|
|
||||||
build_log.append(" connected_device: '%s' #%s%s " %
|
|
||||||
(opts.connected_device['name'],
|
|
||||||
opts.connected_device['serial'][0:4],
|
|
||||||
'x' * (len(opts.connected_device['serial']) - 4)))
|
|
||||||
for storage in opts.connected_device['storage']:
|
|
||||||
if storage:
|
|
||||||
build_log.append(" mount point: %s" % storage)
|
|
||||||
else:
|
|
||||||
build_log.append(" connected_device: '%s'" % opts.connected_device['name'])
|
|
||||||
try:
|
|
||||||
for storage in opts.connected_device['storage']:
|
|
||||||
if storage:
|
|
||||||
build_log.append(" mount point: %s" % storage)
|
|
||||||
except:
|
|
||||||
build_log.append(" (no mount points)")
|
|
||||||
else:
|
|
||||||
build_log.append(" connected_device: '%s'" % opts.connected_device['name'])
|
|
||||||
|
|
||||||
opts_dict = vars(opts)
|
|
||||||
if opts_dict['ids']:
|
|
||||||
build_log.append(" book count: %d" % len(opts_dict['ids']))
|
|
||||||
|
|
||||||
sections_list = []
|
|
||||||
if opts.generate_authors:
|
|
||||||
sections_list.append('Authors')
|
|
||||||
if opts.generate_titles:
|
|
||||||
sections_list.append('Titles')
|
|
||||||
if opts.generate_series:
|
|
||||||
sections_list.append('Series')
|
|
||||||
if opts.generate_genres:
|
|
||||||
sections_list.append('Genres')
|
|
||||||
if opts.generate_recently_added:
|
|
||||||
sections_list.append('Recently Added')
|
|
||||||
if opts.generate_descriptions:
|
|
||||||
sections_list.append('Descriptions')
|
|
||||||
|
|
||||||
if not sections_list:
|
|
||||||
if opts.cli_environment:
|
|
||||||
opts.log.warn('*** No Section switches specified, enabling all Sections ***')
|
|
||||||
opts.generate_authors = True
|
|
||||||
opts.generate_titles = True
|
|
||||||
opts.generate_series = True
|
|
||||||
opts.generate_genres = True
|
|
||||||
opts.generate_recently_added = True
|
|
||||||
opts.generate_descriptions = True
|
|
||||||
sections_list = ['Authors', 'Titles', 'Series', 'Genres', 'Recently Added', 'Descriptions']
|
|
||||||
else:
|
|
||||||
opts.log.warn('\n*** No enabled Sections, terminating catalog generation ***')
|
|
||||||
return ["No Included Sections", "No enabled Sections.\nCheck E-book options tab\n'Included sections'\n"]
|
|
||||||
if opts.fmt == 'mobi' and sections_list == ['Descriptions']:
|
|
||||||
warning = ("\n*** Adding 'By authors' section required for MOBI "
|
|
||||||
"output ***")
|
|
||||||
opts.log.warn(warning)
|
|
||||||
sections_list.insert(0, 'Authors')
|
|
||||||
opts.generate_authors = True
|
|
||||||
|
|
||||||
opts.log(" Sections: %s" % ', '.join(sections_list))
|
|
||||||
opts.section_list = sections_list
|
|
||||||
|
|
||||||
# Limit thumb_width to 1.0" - 2.0"
|
|
||||||
try:
|
|
||||||
if float(opts.thumb_width) < float(self.THUMB_SMALLEST):
|
|
||||||
log.warning("coercing thumb_width from '%s' to '%s'" % (opts.thumb_width, self.THUMB_SMALLEST))
|
|
||||||
opts.thumb_width = self.THUMB_SMALLEST
|
|
||||||
if float(opts.thumb_width) > float(self.THUMB_LARGEST):
|
|
||||||
log.warning("coercing thumb_width from '%s' to '%s'" % (opts.thumb_width, self.THUMB_LARGEST))
|
|
||||||
opts.thumb_width = self.THUMB_LARGEST
|
|
||||||
opts.thumb_width = "%.2f" % float(opts.thumb_width)
|
|
||||||
except:
|
|
||||||
log.error("coercing thumb_width from '%s' to '%s'" % (opts.thumb_width, self.THUMB_SMALLEST))
|
|
||||||
opts.thumb_width = "1.0"
|
|
||||||
|
|
||||||
# eval prefix_rules if passed from command line
|
|
||||||
if type(opts.prefix_rules) is not tuple:
|
|
||||||
try:
|
|
||||||
opts.prefix_rules = eval(opts.prefix_rules)
|
|
||||||
except:
|
|
||||||
log.error("malformed --prefix-rules: %s" % opts.prefix_rules)
|
|
||||||
raise
|
|
||||||
for rule in opts.prefix_rules:
|
|
||||||
if len(rule) != 4:
|
|
||||||
log.error("incorrect number of args for --prefix-rules: %s" % repr(rule))
|
|
||||||
|
|
||||||
# eval exclusion_rules if passed from command line
|
|
||||||
if type(opts.exclusion_rules) is not tuple:
|
|
||||||
try:
|
|
||||||
opts.exclusion_rules = eval(opts.exclusion_rules)
|
|
||||||
except:
|
|
||||||
log.error("malformed --exclusion-rules: %s" % opts.exclusion_rules)
|
|
||||||
raise
|
|
||||||
for rule in opts.exclusion_rules:
|
|
||||||
if len(rule) != 3:
|
|
||||||
log.error("incorrect number of args for --exclusion-rules: %s" % repr(rule))
|
|
||||||
|
|
||||||
# Display opts
|
|
||||||
keys = sorted(opts_dict.keys())
|
|
||||||
build_log.append(" opts:")
|
|
||||||
for key in keys:
|
|
||||||
if key in ['catalog_title', 'author_clip', 'connected_kindle', 'creator',
|
|
||||||
'cross_reference_authors', 'description_clip', 'exclude_book_marker',
|
|
||||||
'exclude_genre', 'exclude_tags', 'exclusion_rules', 'fmt',
|
|
||||||
'genre_source_field', 'header_note_source_field', 'merge_comments_rule',
|
|
||||||
'output_profile', 'prefix_rules', 'preset', 'read_book_marker',
|
|
||||||
'search_text', 'sort_by', 'sort_descriptions_by_author', 'sync',
|
|
||||||
'thumb_width', 'use_existing_cover', 'wishlist_tag']:
|
|
||||||
build_log.append(" %s: %s" % (key, repr(opts_dict[key])))
|
|
||||||
if opts.verbose:
|
|
||||||
log('\n'.join(line for line in build_log))
|
|
||||||
|
|
||||||
# Capture start_time
|
|
||||||
opts.start_time = time.time()
|
|
||||||
|
|
||||||
self.opts = opts
|
|
||||||
|
|
||||||
if opts.verbose:
|
|
||||||
log.info(" Begin catalog source generation (%s)" %
|
|
||||||
str(datetime.timedelta(seconds=int(time.time() - opts.start_time))))
|
|
||||||
|
|
||||||
# Launch the Catalog builder
|
|
||||||
catalog = CatalogBuilder(db, opts, self, report_progress=notification)
|
|
||||||
|
|
||||||
try:
|
|
||||||
catalog.build_sources()
|
|
||||||
if opts.verbose:
|
|
||||||
log.info(" Completed catalog source generation (%s)\n" %
|
|
||||||
str(datetime.timedelta(seconds=int(time.time() - opts.start_time))))
|
|
||||||
except (AuthorSortMismatchException, EmptyCatalogException) as e:
|
|
||||||
log.error(" *** Terminated catalog generation: %s ***" % e)
|
|
||||||
except:
|
|
||||||
log.error(" unhandled exception in catalog generator")
|
|
||||||
raise
|
|
||||||
|
|
||||||
else:
|
|
||||||
recommendations = []
|
|
||||||
recommendations.append(('remove_fake_margins', False,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
recommendations.append(('comments', '', OptionRecommendation.HIGH))
|
|
||||||
|
|
||||||
"""
|
|
||||||
>>> Use to debug generated catalog code before pipeline conversion <<<
|
|
||||||
"""
|
|
||||||
GENERATE_DEBUG_EPUB = False
|
|
||||||
if GENERATE_DEBUG_EPUB:
|
|
||||||
catalog_debug_path = os.path.join(os.path.expanduser('~'), 'Desktop', 'Catalog debug')
|
|
||||||
setattr(opts, 'debug_pipeline', os.path.expanduser(catalog_debug_path))
|
|
||||||
|
|
||||||
dp = getattr(opts, 'debug_pipeline', None)
|
|
||||||
if dp is not None:
|
|
||||||
recommendations.append(('debug_pipeline', dp,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
|
|
||||||
if opts.output_profile and opts.output_profile.startswith("kindle"):
|
|
||||||
recommendations.append(('output_profile', opts.output_profile,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
recommendations.append(('book_producer', opts.output_profile,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
if opts.fmt == 'mobi':
|
|
||||||
recommendations.append(('no_inline_toc', True,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
recommendations.append(('verbose', 2,
|
|
||||||
OptionRecommendation.HIGH))
|
|
||||||
|
|
||||||
# Use existing cover or generate new cover
|
|
||||||
cpath = None
|
|
||||||
existing_cover = False
|
|
||||||
try:
|
|
||||||
search_text = 'title:"%s" author:%s' % (
|
|
||||||
opts.catalog_title.replace('"', '\\"'), 'calibre')
|
|
||||||
matches = db.search(search_text, return_matches=True, sort_results=False)
|
|
||||||
if matches:
|
|
||||||
cpath = db.cover(matches[0], index_is_id=True, as_path=True)
|
|
||||||
if cpath and os.path.exists(cpath):
|
|
||||||
existing_cover = True
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if self.opts.use_existing_cover and not existing_cover:
|
|
||||||
log.warning("no existing catalog cover found")
|
|
||||||
|
|
||||||
if self.opts.use_existing_cover and existing_cover:
|
|
||||||
recommendations.append(('cover', cpath, OptionRecommendation.HIGH))
|
|
||||||
log.info("using existing catalog cover")
|
|
||||||
else:
|
|
||||||
# TODO(gryf): feature: generating cover with pillow.
|
|
||||||
pass
|
|
||||||
# from ebook_converter.ebooks.covers import calibre_cover2
|
|
||||||
# log.info("replacing catalog cover")
|
|
||||||
# new_cover_path = PersistentTemporaryFile(suffix='.jpg')
|
|
||||||
# # new_cover = calibre_cover2(opts.catalog_title, 'calibre')
|
|
||||||
# new_cover_path.write('')
|
|
||||||
# new_cover_path.close()
|
|
||||||
# recommendations.append(('cover', new_cover_path.name, OptionRecommendation.HIGH))
|
|
||||||
|
|
||||||
# Run ebook-convert
|
|
||||||
from ebook_converter.ebooks.conversion.plumber import Plumber
|
|
||||||
plumber = Plumber(os.path.join(catalog.catalog_path, opts.basename + '.opf'),
|
|
||||||
path_to_output, log, report_progress=notification,
|
|
||||||
abort_after_input_dump=False)
|
|
||||||
plumber.merge_ui_recommendations(recommendations)
|
|
||||||
plumber.run()
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.remove(cpath)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if GENERATE_DEBUG_EPUB:
|
|
||||||
from ebook_converter.ebooks.epub import initialize_container
|
|
||||||
from ebook_converter.ebooks.tweak import zip_rebuilder
|
|
||||||
from ebook_converter.utils.zipfile import ZipFile
|
|
||||||
input_path = os.path.join(catalog_debug_path, 'input')
|
|
||||||
epub_shell = os.path.join(catalog_debug_path, 'epub_shell.zip')
|
|
||||||
initialize_container(epub_shell, opf_name='content.opf')
|
|
||||||
with ZipFile(epub_shell, 'r') as zf:
|
|
||||||
zf.extractall(path=input_path)
|
|
||||||
os.remove(epub_shell)
|
|
||||||
zip_rebuilder(input_path, os.path.join(catalog_debug_path, 'input.epub'))
|
|
||||||
|
|
||||||
if opts.verbose:
|
|
||||||
log.info(" Catalog creation complete (%s)\n" %
|
|
||||||
str(datetime.timedelta(seconds=int(time.time() - opts.start_time))))
|
|
||||||
|
|
||||||
# returns to gui2.actions.catalog:catalog_generated()
|
|
||||||
return catalog.error
|
|
||||||
Reference in New Issue
Block a user