Merge branch 'Develop'
Separate library from metadata file embed metadata on download
This commit is contained in:
commit
8549689a0f
|
@ -49,9 +49,9 @@ sorted_modules = OrderedDict((sorted(modules.items(), key=lambda x: x[0].casefol
|
||||||
|
|
||||||
def collect_stats():
|
def collect_stats():
|
||||||
if constants.NIGHTLY_VERSION[0] == "$Format:%H$":
|
if constants.NIGHTLY_VERSION[0] == "$Format:%H$":
|
||||||
calibre_web_version = constants.STABLE_VERSION['version']
|
calibre_web_version = constants.STABLE_VERSION['version'].replace("b", " Beta")
|
||||||
else:
|
else:
|
||||||
calibre_web_version = (constants.STABLE_VERSION['version'] + ' - '
|
calibre_web_version = (constants.STABLE_VERSION['version'].replace("b", " Beta") + ' - '
|
||||||
+ constants.NIGHTLY_VERSION[0].replace('%', '%%') + ' - '
|
+ constants.NIGHTLY_VERSION[0].replace('%', '%%') + ' - '
|
||||||
+ constants.NIGHTLY_VERSION[1].replace('%', '%%'))
|
+ constants.NIGHTLY_VERSION[1].replace('%', '%%'))
|
||||||
|
|
||||||
|
|
18
cps/admin.py
18
cps/admin.py
|
@ -47,7 +47,7 @@ from . import constants, logger, helper, services, cli_param
|
||||||
from . import db, calibre_db, ub, web_server, config, updater_thread, gdriveutils, \
|
from . import db, calibre_db, ub, web_server, config, updater_thread, gdriveutils, \
|
||||||
kobo_sync_status, schedule
|
kobo_sync_status, schedule
|
||||||
from .helper import check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
|
from .helper import check_valid_domain, send_test_mail, reset_password, generate_password_hash, check_email, \
|
||||||
valid_email, check_username
|
valid_email, check_username, get_calibre_binarypath
|
||||||
from .gdriveutils import is_gdrive_ready, gdrive_support
|
from .gdriveutils import is_gdrive_ready, gdrive_support
|
||||||
from .render_template import render_title_template, get_sidebar_config
|
from .render_template import render_title_template, get_sidebar_config
|
||||||
from .services.worker import WorkerThread
|
from .services.worker import WorkerThread
|
||||||
|
@ -217,7 +217,7 @@ def admin():
|
||||||
form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
form_date += timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||||
commit = format_datetime(form_date - tz, format='short')
|
commit = format_datetime(form_date - tz, format='short')
|
||||||
else:
|
else:
|
||||||
commit = version['version']
|
commit = version['version'].replace("b", " Beta")
|
||||||
|
|
||||||
all_user = ub.session.query(ub.User).all()
|
all_user = ub.session.query(ub.User).all()
|
||||||
# email_settings = mail_config.get_mail_settings()
|
# email_settings = mail_config.get_mail_settings()
|
||||||
|
@ -1705,7 +1705,7 @@ def _db_configuration_update_helper():
|
||||||
return _db_configuration_result('{}'.format(ex), gdrive_error)
|
return _db_configuration_result('{}'.format(ex), gdrive_error)
|
||||||
|
|
||||||
if db_change or not db_valid or not config.db_configured \
|
if db_change or not db_valid or not config.db_configured \
|
||||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||||
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
|
if not os.path.exists(metadata_db) or not to_save['config_calibre_dir']:
|
||||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdrive_error)
|
||||||
else:
|
else:
|
||||||
|
@ -1728,6 +1728,9 @@ def _db_configuration_update_helper():
|
||||||
calibre_db.update_config(config)
|
calibre_db.update_config(config)
|
||||||
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
|
||||||
flash(_("DB is not Writeable"), category="warning")
|
flash(_("DB is not Writeable"), category="warning")
|
||||||
|
_config_string(to_save, "config_calibre_split_dir")
|
||||||
|
config.config_calibre_split = to_save.get('config_calibre_split', 0) == "on"
|
||||||
|
calibre_db.update_config(config)
|
||||||
config.save()
|
config.save()
|
||||||
return _db_configuration_result(None, gdrive_error)
|
return _db_configuration_result(None, gdrive_error)
|
||||||
|
|
||||||
|
@ -1748,6 +1751,7 @@ def _configuration_update_helper():
|
||||||
|
|
||||||
_config_checkbox_int(to_save, "config_uploading")
|
_config_checkbox_int(to_save, "config_uploading")
|
||||||
_config_checkbox_int(to_save, "config_unicode_filename")
|
_config_checkbox_int(to_save, "config_unicode_filename")
|
||||||
|
_config_checkbox_int(to_save, "config_embed_metadata")
|
||||||
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
|
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
|
||||||
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
|
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
|
||||||
and config.config_login_type == constants.LOGIN_LDAP)
|
and config.config_login_type == constants.LOGIN_LDAP)
|
||||||
|
@ -1764,8 +1768,14 @@ def _configuration_update_helper():
|
||||||
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
|
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
|
||||||
|
|
||||||
_config_string(to_save, "config_calibre")
|
_config_string(to_save, "config_calibre")
|
||||||
_config_string(to_save, "config_converterpath")
|
_config_string(to_save, "config_binariesdir")
|
||||||
_config_string(to_save, "config_kepubifypath")
|
_config_string(to_save, "config_kepubifypath")
|
||||||
|
if "config_binariesdir" in to_save:
|
||||||
|
calibre_status = helper.check_calibre(config.config_binariesdir)
|
||||||
|
if calibre_status:
|
||||||
|
return _configuration_result(calibre_status)
|
||||||
|
to_save["config_converterpath"] = get_calibre_binarypath("ebook-convert")
|
||||||
|
_config_string(to_save, "config_converterpath")
|
||||||
|
|
||||||
reboot_required |= _config_int(to_save, "config_login_type")
|
reboot_required |= _config_int(to_save, "config_login_type")
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,8 @@ from .constants import DEFAULT_SETTINGS_FILE, DEFAULT_GDRIVE_FILE
|
||||||
|
|
||||||
def version_info():
|
def version_info():
|
||||||
if _NIGHTLY_VERSION[1].startswith('$Format'):
|
if _NIGHTLY_VERSION[1].startswith('$Format'):
|
||||||
return "Calibre-Web version: %s - unknown git-clone" % _STABLE_VERSION['version']
|
return "Calibre-Web version: %s - unknown git-clone" % _STABLE_VERSION['version'].replace("b", " Beta")
|
||||||
return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'], _NIGHTLY_VERSION[1])
|
return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'].replace("b", " Beta"), _NIGHTLY_VERSION[1])
|
||||||
|
|
||||||
|
|
||||||
class CliParameter(object):
|
class CliParameter(object):
|
||||||
|
|
|
@ -34,6 +34,7 @@ except ImportError:
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
from . import constants, logger
|
from . import constants, logger
|
||||||
|
from .subproc_wrapper import process_wait
|
||||||
|
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
@ -69,6 +70,8 @@ class _Settings(_Base):
|
||||||
|
|
||||||
config_calibre_dir = Column(String)
|
config_calibre_dir = Column(String)
|
||||||
config_calibre_uuid = Column(String)
|
config_calibre_uuid = Column(String)
|
||||||
|
config_calibre_split = Column(Boolean, default=False)
|
||||||
|
config_calibre_split_dir = Column(String)
|
||||||
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||||
config_external_port = Column(Integer, default=constants.DEFAULT_PORT)
|
config_external_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||||
config_certfile = Column(String)
|
config_certfile = Column(String)
|
||||||
|
@ -138,10 +141,12 @@ class _Settings(_Base):
|
||||||
|
|
||||||
config_kepubifypath = Column(String, default=None)
|
config_kepubifypath = Column(String, default=None)
|
||||||
config_converterpath = Column(String, default=None)
|
config_converterpath = Column(String, default=None)
|
||||||
|
config_binariesdir = Column(String, default=None)
|
||||||
config_calibre = Column(String)
|
config_calibre = Column(String)
|
||||||
config_rarfile_location = Column(String, default=None)
|
config_rarfile_location = Column(String, default=None)
|
||||||
config_upload_formats = Column(String, default=','.join(constants.EXTENSIONS_UPLOAD))
|
config_upload_formats = Column(String, default=','.join(constants.EXTENSIONS_UPLOAD))
|
||||||
config_unicode_filename = Column(Boolean, default=False)
|
config_unicode_filename = Column(Boolean, default=False)
|
||||||
|
config_embed_metadata = Column(Boolean, default=True)
|
||||||
|
|
||||||
config_updatechannel = Column(Integer, default=constants.UPDATE_STABLE)
|
config_updatechannel = Column(Integer, default=constants.UPDATE_STABLE)
|
||||||
|
|
||||||
|
@ -184,9 +189,11 @@ class ConfigSQL(object):
|
||||||
self.load()
|
self.load()
|
||||||
|
|
||||||
change = False
|
change = False
|
||||||
if self.config_converterpath == None: # pylint: disable=access-member-before-definition
|
|
||||||
|
if self.config_binariesdir == None: # pylint: disable=access-member-before-definition
|
||||||
change = True
|
change = True
|
||||||
self.config_converterpath = autodetect_calibre_binary()
|
self.config_binariesdir = autodetect_calibre_binaries()
|
||||||
|
self.config_converterpath = autodetect_converter_binary(self.config_binariesdir)
|
||||||
|
|
||||||
if self.config_kepubifypath == None: # pylint: disable=access-member-before-definition
|
if self.config_kepubifypath == None: # pylint: disable=access-member-before-definition
|
||||||
change = True
|
change = True
|
||||||
|
@ -389,6 +396,9 @@ class ConfigSQL(object):
|
||||||
self.db_configured = False
|
self.db_configured = False
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
def get_book_path(self):
|
||||||
|
return self.config_calibre_split_dir if self.config_calibre_split_dir else self.config_calibre_dir
|
||||||
|
|
||||||
def store_calibre_uuid(self, calibre_db, Library_table):
|
def store_calibre_uuid(self, calibre_db, Library_table):
|
||||||
try:
|
try:
|
||||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||||
|
@ -469,17 +479,32 @@ def _migrate_table(session, orm_class, secret_key=None):
|
||||||
session.rollback()
|
session.rollback()
|
||||||
|
|
||||||
|
|
||||||
def autodetect_calibre_binary():
|
def autodetect_calibre_binaries():
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
calibre_path = ["C:\\program files\\calibre\\ebook-convert.exe",
|
calibre_path = ["C:\\program files\\calibre\\",
|
||||||
"C:\\program files(x86)\\calibre\\ebook-convert.exe",
|
"C:\\program files(x86)\\calibre\\",
|
||||||
"C:\\program files(x86)\\calibre2\\ebook-convert.exe",
|
"C:\\program files(x86)\\calibre2\\",
|
||||||
"C:\\program files\\calibre2\\ebook-convert.exe"]
|
"C:\\program files\\calibre2\\"]
|
||||||
else:
|
else:
|
||||||
calibre_path = ["/opt/calibre/ebook-convert"]
|
calibre_path = ["/opt/calibre/"]
|
||||||
for element in calibre_path:
|
for element in calibre_path:
|
||||||
if os.path.isfile(element) and os.access(element, os.X_OK):
|
supported_binary_paths = [os.path.join(element, binary) for binary in constants.SUPPORTED_CALIBRE_BINARIES.values()]
|
||||||
return element
|
if all(os.path.isfile(binary_path) and os.access(binary_path, os.X_OK) for binary_path in supported_binary_paths):
|
||||||
|
values = [process_wait([binary_path, "--version"], pattern='\(calibre (.*)\)') for binary_path in supported_binary_paths]
|
||||||
|
if all(values):
|
||||||
|
version = values[0].group(1)
|
||||||
|
log.debug("calibre version %s", version)
|
||||||
|
return element
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def autodetect_converter_binary(calibre_path):
|
||||||
|
if sys.platform == "win32":
|
||||||
|
converter_path = os.path.join(calibre_path, "ebook-convert.exe")
|
||||||
|
else:
|
||||||
|
converter_path = os.path.join(calibre_path, "ebook-convert")
|
||||||
|
if calibre_path and os.path.isfile(converter_path) and os.access(converter_path, os.X_OK):
|
||||||
|
return converter_path
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
@ -521,6 +546,7 @@ def load_configuration(session, secret_key):
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_flask_session_key(_session):
|
def get_flask_session_key(_session):
|
||||||
flask_settings = _session.query(_Flask_Settings).one_or_none()
|
flask_settings = _session.query(_Flask_Settings).one_or_none()
|
||||||
if flask_settings == None:
|
if flask_settings == None:
|
||||||
|
|
|
@ -156,6 +156,11 @@ EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'kepub', 'mobi', 'azw', 'azw3', 'cbr'
|
||||||
'prc', 'doc', 'docx', 'fb2', 'html', 'rtf', 'lit', 'odt', 'mp3', 'mp4', 'ogg',
|
'prc', 'doc', 'docx', 'fb2', 'html', 'rtf', 'lit', 'odt', 'mp3', 'mp4', 'ogg',
|
||||||
'opus', 'wav', 'flac', 'm4a', 'm4b'}
|
'opus', 'wav', 'flac', 'm4a', 'm4b'}
|
||||||
|
|
||||||
|
_extension = ""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
_extension = ".exe"
|
||||||
|
SUPPORTED_CALIBRE_BINARIES = {binary:binary + _extension for binary in ["ebook-convert", "calibredb"]}
|
||||||
|
|
||||||
|
|
||||||
def has_flag(value, bit_flag):
|
def has_flag(value, bit_flag):
|
||||||
return bit_flag == (bit_flag & (value or 0))
|
return bit_flag == (bit_flag & (value or 0))
|
||||||
|
@ -169,13 +174,11 @@ BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, d
|
||||||
'series_id, languages, publisher, pubdate, identifiers')
|
'series_id, languages, publisher, pubdate, identifiers')
|
||||||
|
|
||||||
# python build process likes to have x.y.zbw -> b for beta and w a counting number
|
# python build process likes to have x.y.zbw -> b for beta and w a counting number
|
||||||
STABLE_VERSION = {'version': '0.6.22 Beta'}
|
STABLE_VERSION = {'version': '0.6.22b'}
|
||||||
|
|
||||||
NIGHTLY_VERSION = dict()
|
NIGHTLY_VERSION = dict()
|
||||||
NIGHTLY_VERSION[0] = '$Format:%H$'
|
NIGHTLY_VERSION[0] = '$Format:%H$'
|
||||||
NIGHTLY_VERSION[1] = '$Format:%cI$'
|
NIGHTLY_VERSION[1] = '$Format:%cI$'
|
||||||
# NIGHTLY_VERSION[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
|
||||||
# NIGHTLY_VERSION[1] = '2018-09-09T10:13:08+02:00'
|
|
||||||
|
|
||||||
# CACHE
|
# CACHE
|
||||||
CACHE_TYPE_THUMBNAILS = 'thumbnails'
|
CACHE_TYPE_THUMBNAILS = 'thumbnails'
|
||||||
|
|
|
@ -137,7 +137,7 @@ def edit_book(book_id):
|
||||||
edited_books_id = book.id
|
edited_books_id = book.id
|
||||||
modify_date = True
|
modify_date = True
|
||||||
title_author_error = helper.update_dir_structure(edited_books_id,
|
title_author_error = helper.update_dir_structure(edited_books_id,
|
||||||
config.config_calibre_dir,
|
config.get_book_path(),
|
||||||
input_authors[0],
|
input_authors[0],
|
||||||
renamed_author=renamed)
|
renamed_author=renamed)
|
||||||
if title_author_error:
|
if title_author_error:
|
||||||
|
@ -282,7 +282,7 @@ def upload():
|
||||||
meta.extension.lower())
|
meta.extension.lower())
|
||||||
else:
|
else:
|
||||||
error = helper.update_dir_structure(book_id,
|
error = helper.update_dir_structure(book_id,
|
||||||
config.config_calibre_dir,
|
config.get_book_path(),
|
||||||
input_authors[0],
|
input_authors[0],
|
||||||
meta.file_path,
|
meta.file_path,
|
||||||
title_dir + meta.extension.lower(),
|
title_dir + meta.extension.lower(),
|
||||||
|
@ -332,7 +332,7 @@ def convert_bookformat(book_id):
|
||||||
return redirect(url_for('edit-book.show_edit_book', book_id=book_id))
|
return redirect(url_for('edit-book.show_edit_book', book_id=book_id))
|
||||||
|
|
||||||
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
|
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
|
||||||
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
|
rtn = helper.convert_book_format(book_id, config.get_book_path(), book_format_from.upper(),
|
||||||
book_format_to.upper(), current_user.name)
|
book_format_to.upper(), current_user.name)
|
||||||
|
|
||||||
if rtn is None:
|
if rtn is None:
|
||||||
|
@ -402,7 +402,7 @@ def edit_list_book(param):
|
||||||
elif param == 'title':
|
elif param == 'title':
|
||||||
sort_param = book.sort
|
sort_param = book.sort
|
||||||
if handle_title_on_edit(book, vals.get('value', "")):
|
if handle_title_on_edit(book, vals.get('value', "")):
|
||||||
rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir)
|
rename_error = helper.update_dir_structure(book.id, config.get_book_path())
|
||||||
if not rename_error:
|
if not rename_error:
|
||||||
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
|
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
|
@ -420,7 +420,7 @@ def edit_list_book(param):
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
elif param == 'authors':
|
elif param == 'authors':
|
||||||
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
||||||
rename_error = helper.update_dir_structure(book.id, config.config_calibre_dir, input_authors[0],
|
rename_error = helper.update_dir_structure(book.id, config.get_book_path(), input_authors[0],
|
||||||
renamed_author=renamed)
|
renamed_author=renamed)
|
||||||
if not rename_error:
|
if not rename_error:
|
||||||
ret = Response(json.dumps({
|
ret = Response(json.dumps({
|
||||||
|
@ -524,10 +524,10 @@ def merge_list_book():
|
||||||
for element in from_book.data:
|
for element in from_book.data:
|
||||||
if element.format not in to_file:
|
if element.format not in to_file:
|
||||||
# create new data entry with: book_id, book_format, uncompressed_size, name
|
# create new data entry with: book_id, book_format, uncompressed_size, name
|
||||||
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
|
filepath_new = os.path.normpath(os.path.join(config.get_book_path(),
|
||||||
to_book.path,
|
to_book.path,
|
||||||
to_name + "." + element.format.lower()))
|
to_name + "." + element.format.lower()))
|
||||||
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
|
filepath_old = os.path.normpath(os.path.join(config.get_book_path(),
|
||||||
from_book.path,
|
from_book.path,
|
||||||
element.name + "." + element.format.lower()))
|
element.name + "." + element.format.lower()))
|
||||||
copyfile(filepath_old, filepath_new)
|
copyfile(filepath_old, filepath_new)
|
||||||
|
@ -567,7 +567,7 @@ def table_xchange_author_title():
|
||||||
|
|
||||||
if edited_books_id:
|
if edited_books_id:
|
||||||
# toDo: Handle error
|
# toDo: Handle error
|
||||||
edit_error = helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0],
|
edit_error = helper.update_dir_structure(edited_books_id, config.get_book_path(), input_authors[0],
|
||||||
renamed_author=renamed)
|
renamed_author=renamed)
|
||||||
if modify_date:
|
if modify_date:
|
||||||
book.last_modified = datetime.utcnow()
|
book.last_modified = datetime.utcnow()
|
||||||
|
@ -764,7 +764,7 @@ def move_coverfile(meta, db_book):
|
||||||
cover_file = meta.cover
|
cover_file = meta.cover
|
||||||
else:
|
else:
|
||||||
cover_file = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
|
cover_file = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
|
||||||
new_cover_path = os.path.join(config.config_calibre_dir, db_book.path)
|
new_cover_path = os.path.join(config.get_book_path(), db_book.path)
|
||||||
try:
|
try:
|
||||||
os.makedirs(new_cover_path, exist_ok=True)
|
os.makedirs(new_cover_path, exist_ok=True)
|
||||||
copyfile(cover_file, os.path.join(new_cover_path, "cover.jpg"))
|
copyfile(cover_file, os.path.join(new_cover_path, "cover.jpg"))
|
||||||
|
@ -850,7 +850,7 @@ def delete_book_from_table(book_id, book_format, json_response):
|
||||||
book = calibre_db.get_book(book_id)
|
book = calibre_db.get_book(book_id)
|
||||||
if book:
|
if book:
|
||||||
try:
|
try:
|
||||||
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
|
result, error = helper.delete_book(book, config.get_book_path(), book_format=book_format.upper())
|
||||||
if not result:
|
if not result:
|
||||||
if json_response:
|
if json_response:
|
||||||
return json.dumps([{"location": url_for("edit-book.show_edit_book", book_id=book_id),
|
return json.dumps([{"location": url_for("edit-book.show_edit_book", book_id=book_id),
|
||||||
|
@ -1190,7 +1190,7 @@ def upload_single_file(file_request, book, book_id):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
file_name = book.path.rsplit('/', 1)[-1]
|
file_name = book.path.rsplit('/', 1)[-1]
|
||||||
filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))
|
filepath = os.path.normpath(os.path.join(config.get_book_path(), book.path))
|
||||||
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
|
saved_filename = os.path.join(filepath, file_name + '.' + file_ext)
|
||||||
|
|
||||||
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
||||||
|
|
30
cps/epub.py
30
cps/epub.py
|
@ -23,10 +23,12 @@ from lxml import etree
|
||||||
from . import isoLanguages, cover
|
from . import isoLanguages, cover
|
||||||
from . import config, logger
|
from . import config, logger
|
||||||
from .helper import split_authors
|
from .helper import split_authors
|
||||||
|
from .epub_helper import get_content_opf, default_ns
|
||||||
from .constants import BookMeta
|
from .constants import BookMeta
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
|
||||||
def _extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
|
def _extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
|
||||||
if cover_file is None:
|
if cover_file is None:
|
||||||
return None
|
return None
|
||||||
|
@ -44,23 +46,14 @@ def _extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
|
||||||
return cover.cover_processing(tmp_file_name, cf, extension)
|
return cover.cover_processing(tmp_file_name, cf, extension)
|
||||||
|
|
||||||
def get_epub_layout(book, book_data):
|
def get_epub_layout(book, book_data):
|
||||||
ns = {
|
file_path = os.path.normpath(os.path.join(config.get_book_path(),
|
||||||
'n': 'urn:oasis:names:tc:opendocument:xmlns:container',
|
book.path, book_data.name + "." + book_data.format.lower()))
|
||||||
'pkg': 'http://www.idpf.org/2007/opf',
|
|
||||||
}
|
|
||||||
file_path = os.path.normpath(os.path.join(config.config_calibre_dir, book.path, book_data.name + "." + book_data.format.lower()))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
epubZip = zipfile.ZipFile(file_path)
|
tree, __ = get_content_opf(file_path, default_ns)
|
||||||
txt = epubZip.read('META-INF/container.xml')
|
p = tree.xpath('/pkg:package/pkg:metadata', namespaces=default_ns)[0]
|
||||||
tree = etree.fromstring(txt)
|
|
||||||
cfname = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
|
|
||||||
cf = epubZip.read(cfname)
|
|
||||||
|
|
||||||
tree = etree.fromstring(cf)
|
layout = p.xpath('pkg:meta[@property="rendition:layout"]/text()', namespaces=default_ns)
|
||||||
p = tree.xpath('/pkg:package/pkg:metadata', namespaces=ns)[0]
|
|
||||||
|
|
||||||
layout = p.xpath('pkg:meta[@property="rendition:layout"]/text()', namespaces=ns)
|
|
||||||
except (etree.XMLSyntaxError, KeyError, IndexError) as e:
|
except (etree.XMLSyntaxError, KeyError, IndexError) as e:
|
||||||
log.error("Could not parse epub metadata of book {} during kobo sync: {}".format(book.id, e))
|
log.error("Could not parse epub metadata of book {} during kobo sync: {}".format(book.id, e))
|
||||||
layout = []
|
layout = []
|
||||||
|
@ -78,13 +71,7 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||||
'dc': 'http://purl.org/dc/elements/1.1/'
|
'dc': 'http://purl.org/dc/elements/1.1/'
|
||||||
}
|
}
|
||||||
|
|
||||||
epub_zip = zipfile.ZipFile(tmp_file_path)
|
tree, cf_name = get_content_opf(tmp_file_path, ns)
|
||||||
|
|
||||||
txt = epub_zip.read('META-INF/container.xml')
|
|
||||||
tree = etree.fromstring(txt)
|
|
||||||
cf_name = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
|
|
||||||
cf = epub_zip.read(cf_name)
|
|
||||||
tree = etree.fromstring(cf)
|
|
||||||
|
|
||||||
cover_path = os.path.dirname(cf_name)
|
cover_path = os.path.dirname(cf_name)
|
||||||
|
|
||||||
|
@ -127,6 +114,7 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||||
|
|
||||||
epub_metadata = parse_epub_series(ns, tree, epub_metadata)
|
epub_metadata = parse_epub_series(ns, tree, epub_metadata)
|
||||||
|
|
||||||
|
epub_zip = zipfile.ZipFile(tmp_file_path)
|
||||||
cover_file = parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path)
|
cover_file = parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path)
|
||||||
|
|
||||||
identifiers = []
|
identifiers = []
|
||||||
|
|
162
cps/epub_helper.py
Normal file
162
cps/epub_helper.py
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||||
|
# Copyright (C) 2018 lemmsh, Kennyl, Kyosfonica, matthazinski
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import zipfile
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from . import isoLanguages
|
||||||
|
|
||||||
|
default_ns = {
|
||||||
|
'n': 'urn:oasis:names:tc:opendocument:xmlns:container',
|
||||||
|
'pkg': 'http://www.idpf.org/2007/opf',
|
||||||
|
}
|
||||||
|
|
||||||
|
OPF_NAMESPACE = "http://www.idpf.org/2007/opf"
|
||||||
|
PURL_NAMESPACE = "http://purl.org/dc/elements/1.1/"
|
||||||
|
|
||||||
|
OPF = "{%s}" % OPF_NAMESPACE
|
||||||
|
PURL = "{%s}" % PURL_NAMESPACE
|
||||||
|
|
||||||
|
etree.register_namespace("opf", OPF_NAMESPACE)
|
||||||
|
etree.register_namespace("dc", PURL_NAMESPACE)
|
||||||
|
|
||||||
|
OPF_NS = {None: OPF_NAMESPACE} # the default namespace (no prefix)
|
||||||
|
NSMAP = {'dc': PURL_NAMESPACE, 'opf': OPF_NAMESPACE}
|
||||||
|
|
||||||
|
|
||||||
|
def updateEpub(src, dest, filename, data, ):
|
||||||
|
# create a temp copy of the archive without filename
|
||||||
|
with zipfile.ZipFile(src, 'r') as zin:
|
||||||
|
with zipfile.ZipFile(dest, 'w') as zout:
|
||||||
|
zout.comment = zin.comment # preserve the comment
|
||||||
|
for item in zin.infolist():
|
||||||
|
if item.filename != filename:
|
||||||
|
zout.writestr(item, zin.read(item.filename))
|
||||||
|
|
||||||
|
# now add filename with its new data
|
||||||
|
with zipfile.ZipFile(dest, mode='a', compression=zipfile.ZIP_DEFLATED) as zf:
|
||||||
|
zf.writestr(filename, data)
|
||||||
|
|
||||||
|
|
||||||
|
def get_content_opf(file_path, ns=default_ns):
|
||||||
|
epubZip = zipfile.ZipFile(file_path)
|
||||||
|
txt = epubZip.read('META-INF/container.xml')
|
||||||
|
tree = etree.fromstring(txt)
|
||||||
|
cf_name = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
|
||||||
|
cf = epubZip.read(cf_name)
|
||||||
|
|
||||||
|
return etree.fromstring(cf), cf_name
|
||||||
|
|
||||||
|
|
||||||
|
def create_new_metadata_backup(book, custom_columns, export_language, translated_cover_name, lang_type=3):
|
||||||
|
# generate root package element
|
||||||
|
package = etree.Element(OPF + "package", nsmap=OPF_NS)
|
||||||
|
package.set("unique-identifier", "uuid_id")
|
||||||
|
package.set("version", "2.0")
|
||||||
|
|
||||||
|
# generate metadata element and all sub elements of it
|
||||||
|
metadata = etree.SubElement(package, "metadata", nsmap=NSMAP)
|
||||||
|
identifier = etree.SubElement(metadata, PURL + "identifier", id="calibre_id", nsmap=NSMAP)
|
||||||
|
identifier.set(OPF + "scheme", "calibre")
|
||||||
|
identifier.text = str(book.id)
|
||||||
|
identifier2 = etree.SubElement(metadata, PURL + "identifier", id="uuid_id", nsmap=NSMAP)
|
||||||
|
identifier2.set(OPF + "scheme", "uuid")
|
||||||
|
identifier2.text = book.uuid
|
||||||
|
title = etree.SubElement(metadata, PURL + "title", nsmap=NSMAP)
|
||||||
|
title.text = book.title
|
||||||
|
for author in book.authors:
|
||||||
|
creator = etree.SubElement(metadata, PURL + "creator", nsmap=NSMAP)
|
||||||
|
creator.text = str(author.name)
|
||||||
|
creator.set(OPF + "file-as", book.author_sort) # ToDo Check
|
||||||
|
creator.set(OPF + "role", "aut")
|
||||||
|
contributor = etree.SubElement(metadata, PURL + "contributor", nsmap=NSMAP)
|
||||||
|
contributor.text = "calibre (5.7.2) [https://calibre-ebook.com]"
|
||||||
|
contributor.set(OPF + "file-as", "calibre") # ToDo Check
|
||||||
|
contributor.set(OPF + "role", "bkp")
|
||||||
|
|
||||||
|
date = etree.SubElement(metadata, PURL + "date", nsmap=NSMAP)
|
||||||
|
date.text = '{d.year:04}-{d.month:02}-{d.day:02}T{d.hour:02}:{d.minute:02}:{d.second:02}'.format(d=book.pubdate)
|
||||||
|
if book.comments and book.comments[0].text:
|
||||||
|
for b in book.comments:
|
||||||
|
description = etree.SubElement(metadata, PURL + "description", nsmap=NSMAP)
|
||||||
|
description.text = b.text
|
||||||
|
for b in book.publishers:
|
||||||
|
publisher = etree.SubElement(metadata, PURL + "publisher", nsmap=NSMAP)
|
||||||
|
publisher.text = str(b.name)
|
||||||
|
if not book.languages:
|
||||||
|
language = etree.SubElement(metadata, PURL + "language", nsmap=NSMAP)
|
||||||
|
language.text = export_language
|
||||||
|
else:
|
||||||
|
for b in book.languages:
|
||||||
|
language = etree.SubElement(metadata, PURL + "language", nsmap=NSMAP)
|
||||||
|
language.text = str(b.lang_code) if lang_type == 3 else isoLanguages.get(part3=b.lang_code).part1
|
||||||
|
for b in book.tags:
|
||||||
|
subject = etree.SubElement(metadata, PURL + "subject", nsmap=NSMAP)
|
||||||
|
subject.text = str(b.name)
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:author_link_map",
|
||||||
|
content="{" + ", ".join(['"' + str(a.name) + '": ""' for a in book.authors]) + "}",
|
||||||
|
nsmap=NSMAP)
|
||||||
|
for b in book.series:
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:series",
|
||||||
|
content=str(str(b.name)),
|
||||||
|
nsmap=NSMAP)
|
||||||
|
if book.series:
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:series_index",
|
||||||
|
content=str(book.series_index),
|
||||||
|
nsmap=NSMAP)
|
||||||
|
if len(book.ratings) and book.ratings[0].rating > 0:
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:rating",
|
||||||
|
content=str(book.ratings[0].rating),
|
||||||
|
nsmap=NSMAP)
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:timestamp",
|
||||||
|
content='{d.year:04}-{d.month:02}-{d.day:02}T{d.hour:02}:{d.minute:02}:{d.second:02}'.format(
|
||||||
|
d=book.timestamp),
|
||||||
|
nsmap=NSMAP)
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:title_sort",
|
||||||
|
content=book.sort,
|
||||||
|
nsmap=NSMAP)
|
||||||
|
sequence = 0
|
||||||
|
for cc in custom_columns:
|
||||||
|
value = None
|
||||||
|
extra = None
|
||||||
|
cc_entry = getattr(book, "custom_column_" + str(cc.id))
|
||||||
|
if cc_entry.__len__():
|
||||||
|
value = [c.value for c in cc_entry] if cc.is_multiple else cc_entry[0].value
|
||||||
|
extra = cc_entry[0].extra if hasattr(cc_entry[0], "extra") else None
|
||||||
|
etree.SubElement(metadata, "meta", name="calibre:user_metadata:#{}".format(cc.label),
|
||||||
|
content=cc.to_json(value, extra, sequence),
|
||||||
|
nsmap=NSMAP)
|
||||||
|
sequence += 1
|
||||||
|
|
||||||
|
# generate guide element and all sub elements of it
|
||||||
|
# Title is translated from default export language
|
||||||
|
guide = etree.SubElement(package, "guide")
|
||||||
|
etree.SubElement(guide, "reference", type="cover", title=translated_cover_name, href="cover.jpg")
|
||||||
|
|
||||||
|
return package
|
||||||
|
|
||||||
|
def replace_metadata(tree, package):
|
||||||
|
rep_element = tree.xpath('/pkg:package/pkg:metadata', namespaces=default_ns)[0]
|
||||||
|
new_element = package.xpath('//metadata', namespaces=default_ns)[0]
|
||||||
|
tree.replace(rep_element, new_element)
|
||||||
|
return etree.tostring(tree,
|
||||||
|
xml_declaration=True,
|
||||||
|
encoding='utf-8',
|
||||||
|
pretty_print=True).decode('utf-8')
|
||||||
|
|
||||||
|
|
32
cps/file_helper.py
Normal file
32
cps/file_helper.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||||
|
# Copyright (C) 2023 OzzieIsaacs
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from tempfile import gettempdir
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
def get_temp_dir():
|
||||||
|
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
||||||
|
if not os.path.isdir(tmp_dir):
|
||||||
|
os.mkdir(tmp_dir)
|
||||||
|
return tmp_dir
|
||||||
|
|
||||||
|
|
||||||
|
def del_temp_dir():
|
||||||
|
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
||||||
|
shutil.rmtree(tmp_dir)
|
|
@ -23,7 +23,6 @@
|
||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import tempfile
|
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from time import time
|
from time import time
|
||||||
from shutil import move, copyfile
|
from shutil import move, copyfile
|
||||||
|
@ -34,6 +33,7 @@ from flask_login import login_required
|
||||||
|
|
||||||
from . import logger, gdriveutils, config, ub, calibre_db, csrf
|
from . import logger, gdriveutils, config, ub, calibre_db, csrf
|
||||||
from .admin import admin_required
|
from .admin import admin_required
|
||||||
|
from .file_helper import get_temp_dir
|
||||||
|
|
||||||
gdrive = Blueprint('gdrive', __name__, url_prefix='/gdrive')
|
gdrive = Blueprint('gdrive', __name__, url_prefix='/gdrive')
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
@ -139,9 +139,7 @@ try:
|
||||||
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
|
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
|
||||||
if not response['deleted'] and response['file']['title'] == 'metadata.db' \
|
if not response['deleted'] and response['file']['title'] == 'metadata.db' \
|
||||||
and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec
|
and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec
|
||||||
tmp_dir = os.path.join(tempfile.gettempdir(), 'calibre_web')
|
tmp_dir = get_temp_dir()
|
||||||
if not os.path.isdir(tmp_dir):
|
|
||||||
os.mkdir(tmp_dir)
|
|
||||||
|
|
||||||
log.info('Database file updated')
|
log.info('Database file updated')
|
||||||
copyfile(dbpath, os.path.join(tmp_dir, "metadata.db_" + str(current_milli_time())))
|
copyfile(dbpath, os.path.join(tmp_dir, "metadata.db_" + str(current_milli_time())))
|
||||||
|
|
|
@ -34,7 +34,6 @@ except ImportError:
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy.exc import OperationalError, InvalidRequestError, IntegrityError
|
from sqlalchemy.exc import OperationalError, InvalidRequestError, IntegrityError
|
||||||
from sqlalchemy.orm.exc import StaleDataError
|
from sqlalchemy.orm.exc import StaleDataError
|
||||||
from sqlalchemy.sql.expression import text
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from httplib2 import __version__ as httplib2_version
|
from httplib2 import __version__ as httplib2_version
|
||||||
|
|
190
cps/helper.py
190
cps/helper.py
|
@ -25,9 +25,10 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from tempfile import gettempdir
|
|
||||||
import requests
|
import requests
|
||||||
import unidecode
|
import unidecode
|
||||||
|
from uuid import uuid4
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
from flask import send_from_directory, make_response, redirect, abort, url_for
|
from flask import send_from_directory, make_response, redirect, abort, url_for
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
@ -54,12 +55,14 @@ from . import calibre_db, cli_param
|
||||||
from .tasks.convert import TaskConvert
|
from .tasks.convert import TaskConvert
|
||||||
from . import logger, config, db, ub, fs
|
from . import logger, config, db, ub, fs
|
||||||
from . import gdriveutils as gd
|
from . import gdriveutils as gd
|
||||||
from .constants import STATIC_DIR as _STATIC_DIR, CACHE_TYPE_THUMBNAILS, THUMBNAIL_TYPE_COVER, THUMBNAIL_TYPE_SERIES
|
from .constants import STATIC_DIR as _STATIC_DIR, CACHE_TYPE_THUMBNAILS, THUMBNAIL_TYPE_COVER, THUMBNAIL_TYPE_SERIES, SUPPORTED_CALIBRE_BINARIES
|
||||||
from .subproc_wrapper import process_wait
|
from .subproc_wrapper import process_wait, process_open
|
||||||
from .services.worker import WorkerThread
|
from .services.worker import WorkerThread
|
||||||
from .tasks.mail import TaskEmail
|
from .tasks.mail import TaskEmail
|
||||||
from .tasks.thumbnail import TaskClearCoverThumbnailCache, TaskGenerateCoverThumbnails
|
from .tasks.thumbnail import TaskClearCoverThumbnailCache, TaskGenerateCoverThumbnails
|
||||||
from .tasks.metadata_backup import TaskBackupMetadata
|
from .tasks.metadata_backup import TaskBackupMetadata
|
||||||
|
from .file_helper import get_temp_dir
|
||||||
|
from .epub_helper import get_content_opf, create_new_metadata_backup, updateEpub, replace_metadata
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
@ -781,7 +784,7 @@ def get_book_cover_internal(book, resolution=None):
|
||||||
|
|
||||||
# Send the book cover from the Calibre directory
|
# Send the book cover from the Calibre directory
|
||||||
else:
|
else:
|
||||||
cover_file_path = os.path.join(config.config_calibre_dir, book.path)
|
cover_file_path = os.path.join(config.get_book_path(), book.path)
|
||||||
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
if os.path.isfile(os.path.join(cover_file_path, "cover.jpg")):
|
||||||
return send_from_directory(cover_file_path, "cover.jpg")
|
return send_from_directory(cover_file_path, "cover.jpg")
|
||||||
else:
|
else:
|
||||||
|
@ -921,10 +924,7 @@ def save_cover(img, book_path):
|
||||||
return False, _("Only jpg/jpeg files are supported as coverfile")
|
return False, _("Only jpg/jpeg files are supported as coverfile")
|
||||||
|
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
tmp_dir = get_temp_dir()
|
||||||
|
|
||||||
if not os.path.isdir(tmp_dir):
|
|
||||||
os.mkdir(tmp_dir)
|
|
||||||
ret, message = save_cover_from_filestorage(tmp_dir, "uploaded_cover.jpg", img)
|
ret, message = save_cover_from_filestorage(tmp_dir, "uploaded_cover.jpg", img)
|
||||||
if ret is True:
|
if ret is True:
|
||||||
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\", "/"),
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\", "/"),
|
||||||
|
@ -934,33 +934,92 @@ def save_cover(img, book_path):
|
||||||
else:
|
else:
|
||||||
return False, message
|
return False, message
|
||||||
else:
|
else:
|
||||||
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
|
return save_cover_from_filestorage(os.path.join(config.get_book_path(), book_path), "cover.jpg", img)
|
||||||
|
|
||||||
|
|
||||||
def do_download_file(book, book_format, client, data, headers):
|
def do_download_file(book, book_format, client, data, headers):
|
||||||
|
book_name = data.name
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
# startTime = time.time()
|
# startTime = time.time()
|
||||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
df = gd.getFileFromEbooksFolder(book.path, book_name + "." + book_format)
|
||||||
# log.debug('%s', time.time() - startTime)
|
# log.debug('%s', time.time() - startTime)
|
||||||
if df:
|
if df:
|
||||||
return gd.do_gdrive_download(df, headers)
|
if config.config_embed_metadata and (
|
||||||
|
(book_format == "kepub" and config.config_kepubifypath ) or
|
||||||
|
(book_format != "kepub" and config.config_binariesdir)):
|
||||||
|
output_path = os.path.join(config.config_calibre_dir, book.path)
|
||||||
|
if not os.path.exists(output_path):
|
||||||
|
os.makedirs(output_path)
|
||||||
|
output = os.path.join(config.config_calibre_dir, book.path, book_name + "." + book_format)
|
||||||
|
gd.downloadFile(book.path, book_name + "." + book_format, output)
|
||||||
|
if book_format == "kepub" and config.config_kepubifypath:
|
||||||
|
filename, download_name = do_kepubify_metadata_replace(book, output)
|
||||||
|
elif book_format != "kepub" and config.config_binariesdir:
|
||||||
|
filename, download_name = do_calibre_export(book.id, book_format)
|
||||||
|
else:
|
||||||
|
return gd.do_gdrive_download(df, headers)
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
else:
|
else:
|
||||||
filename = os.path.join(config.config_calibre_dir, book.path)
|
filename = os.path.join(config.get_book_path(), book.path)
|
||||||
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
|
if not os.path.isfile(os.path.join(filename, book_name + "." + book_format)):
|
||||||
# ToDo: improve error handling
|
# ToDo: improve error handling
|
||||||
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
|
log.error('File not found: %s', os.path.join(filename, book_name + "." + book_format))
|
||||||
|
|
||||||
if client == "kobo" and book_format == "kepub":
|
if client == "kobo" and book_format == "kepub":
|
||||||
headers["Content-Disposition"] = headers["Content-Disposition"].replace(".kepub", ".kepub.epub")
|
headers["Content-Disposition"] = headers["Content-Disposition"].replace(".kepub", ".kepub.epub")
|
||||||
|
|
||||||
response = make_response(send_from_directory(filename, data.name + "." + book_format))
|
if book_format == "kepub" and config.config_kepubifypath and config.config_embed_metadata:
|
||||||
# ToDo Check headers parameter
|
filename, download_name = do_kepubify_metadata_replace(book, os.path.join(filename,
|
||||||
for element in headers:
|
book_name + "." + book_format))
|
||||||
response.headers[element[0]] = element[1]
|
elif book_format != "kepub" and config.config_binariesdir and config.config_embed_metadata:
|
||||||
log.info('Downloading file: {}'.format(os.path.join(filename, data.name + "." + book_format)))
|
filename, download_name = do_calibre_export(book.id, book_format)
|
||||||
return response
|
else:
|
||||||
|
download_name = book_name
|
||||||
|
|
||||||
|
response = make_response(send_from_directory(filename, download_name + "." + book_format))
|
||||||
|
# ToDo Check headers parameter
|
||||||
|
for element in headers:
|
||||||
|
response.headers[element[0]] = element[1]
|
||||||
|
log.info('Downloading file: {}'.format(os.path.join(filename, book_name + "." + book_format)))
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def do_kepubify_metadata_replace(book, file_path):
|
||||||
|
custom_columns = (calibre_db.session.query(db.CustomColumns)
|
||||||
|
.filter(db.CustomColumns.mark_for_delete == 0)
|
||||||
|
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
||||||
|
.order_by(db.CustomColumns.label).all())
|
||||||
|
|
||||||
|
tree, cf_name = get_content_opf(file_path)
|
||||||
|
package = create_new_metadata_backup(book, custom_columns, current_user.locale, _("Cover"), lang_type=2)
|
||||||
|
content = replace_metadata(tree, package)
|
||||||
|
tmp_dir = get_temp_dir()
|
||||||
|
temp_file_name = str(uuid4())
|
||||||
|
# open zipfile and replace metadata block in content.opf
|
||||||
|
updateEpub(file_path, os.path.join(tmp_dir, temp_file_name + ".kepub"), cf_name, content)
|
||||||
|
return tmp_dir, temp_file_name
|
||||||
|
|
||||||
|
|
||||||
|
def do_calibre_export(book_id, book_format, ):
|
||||||
|
try:
|
||||||
|
quotes = [3, 5, 7, 9]
|
||||||
|
tmp_dir = get_temp_dir()
|
||||||
|
calibredb_binarypath = get_calibre_binarypath("calibredb")
|
||||||
|
temp_file_name = str(uuid4())
|
||||||
|
opf_command = [calibredb_binarypath, 'export', '--dont-write-opf', '--with-library', config.config_calibre_dir,
|
||||||
|
'--to-dir', tmp_dir, '--formats', book_format, "--template", "{}".format(temp_file_name),
|
||||||
|
str(book_id)]
|
||||||
|
p = process_open(opf_command, quotes)
|
||||||
|
_, err = p.communicate()
|
||||||
|
if err:
|
||||||
|
log.error('Metadata embedder encountered an error: %s', err)
|
||||||
|
return tmp_dir, temp_file_name
|
||||||
|
except OSError as ex:
|
||||||
|
# ToDo real error handling
|
||||||
|
log.error_or_exception(ex)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
##################################
|
##################################
|
||||||
|
|
||||||
|
@ -984,6 +1043,47 @@ def check_unrar(unrar_location):
|
||||||
return _('Error executing UnRar')
|
return _('Error executing UnRar')
|
||||||
|
|
||||||
|
|
||||||
|
def check_calibre(calibre_location):
|
||||||
|
if not calibre_location:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not os.path.exists(calibre_location):
|
||||||
|
return _('Could not find the specified directory')
|
||||||
|
|
||||||
|
if not os.path.isdir(calibre_location):
|
||||||
|
return _('Please specify a directory, not a file')
|
||||||
|
|
||||||
|
try:
|
||||||
|
supported_binary_paths = [os.path.join(calibre_location, binary)
|
||||||
|
for binary in SUPPORTED_CALIBRE_BINARIES.values()]
|
||||||
|
binaries_available = [os.path.isfile(binary_path) for binary_path in supported_binary_paths]
|
||||||
|
binaries_executable = [os.access(binary_path, os.X_OK) for binary_path in supported_binary_paths]
|
||||||
|
if all(binaries_available) and all(binaries_executable):
|
||||||
|
values = [process_wait([binary_path, "--version"], pattern='\(calibre (.*)\)')
|
||||||
|
for binary_path in supported_binary_paths]
|
||||||
|
if all(values):
|
||||||
|
version = values[0].group(1)
|
||||||
|
log.debug("calibre version %s", version)
|
||||||
|
else:
|
||||||
|
return _('Calibre binaries not viable')
|
||||||
|
else:
|
||||||
|
ret_val = []
|
||||||
|
missing_binaries=[path for path, available in
|
||||||
|
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_available) if not available]
|
||||||
|
|
||||||
|
missing_perms=[path for path, available in
|
||||||
|
zip(SUPPORTED_CALIBRE_BINARIES.values(), binaries_executable) if not available]
|
||||||
|
if missing_binaries:
|
||||||
|
ret_val.append(_('Missing calibre binaries: %(missing)s', missing=", ".join(missing_binaries)))
|
||||||
|
if missing_perms:
|
||||||
|
ret_val.append(_('Missing executable permissions: %(missing)s', missing=", ".join(missing_perms)))
|
||||||
|
return ", ".join(ret_val)
|
||||||
|
|
||||||
|
except (OSError, UnicodeDecodeError) as err:
|
||||||
|
log.error_or_exception(err)
|
||||||
|
return _('Error excecuting Calibre')
|
||||||
|
|
||||||
|
|
||||||
def json_serial(obj):
|
def json_serial(obj):
|
||||||
"""JSON serializer for objects not serializable by default json code"""
|
"""JSON serializer for objects not serializable by default json code"""
|
||||||
|
|
||||||
|
@ -1008,43 +1108,49 @@ def tags_filters():
|
||||||
|
|
||||||
|
|
||||||
# checks if domain is in database (including wildcards)
|
# checks if domain is in database (including wildcards)
|
||||||
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
|
||||||
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
|
||||||
# in all calls the email address is checked for validity
|
# in all calls the email address is checked for validity
|
||||||
def check_valid_domain(domain_text):
|
def check_valid_domain(domain_text):
|
||||||
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 1);"
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 1);"
|
||||||
result = ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
if not len(ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()):
|
||||||
if not len(result):
|
|
||||||
return False
|
return False
|
||||||
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 0);"
|
sql = "SELECT * FROM registration WHERE (:domain LIKE domain and allow = 0);"
|
||||||
result = ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all()
|
return not len(ub.session.query(ub.Registration).from_statement(text(sql)).params(domain=domain_text).all())
|
||||||
return not len(result)
|
|
||||||
|
|
||||||
|
|
||||||
def get_download_link(book_id, book_format, client):
|
def get_download_link(book_id, book_format, client):
|
||||||
book_format = book_format.split(".")[0]
|
book_format = book_format.split(".")[0]
|
||||||
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
||||||
data1= ""
|
|
||||||
if book:
|
if book:
|
||||||
data1 = calibre_db.get_book_format(book.id, book_format.upper())
|
data1 = calibre_db.get_book_format(book.id, book_format.upper())
|
||||||
|
if data1:
|
||||||
|
# collect downloaded books only for registered user and not for anonymous user
|
||||||
|
if current_user.is_authenticated:
|
||||||
|
ub.update_download(book_id, int(current_user.id))
|
||||||
|
file_name = book.title
|
||||||
|
if len(book.authors) > 0:
|
||||||
|
file_name = file_name + ' - ' + book.authors[0].name
|
||||||
|
file_name = get_valid_filename(file_name, replace_whitespace=False)
|
||||||
|
headers = Headers()
|
||||||
|
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
|
||||||
|
headers["Content-Disposition"] = "attachment; filename=%s.%s; filename*=UTF-8''%s.%s" % (
|
||||||
|
quote(file_name), book_format, quote(file_name), book_format)
|
||||||
|
return do_download_file(book, book_format, client, data1, headers)
|
||||||
else:
|
else:
|
||||||
log.error("Book id {} not found for downloading".format(book_id))
|
log.error("Book id {} not found for downloading".format(book_id))
|
||||||
abort(404)
|
abort(404)
|
||||||
if data1:
|
|
||||||
# collect downloaded books only for registered user and not for anonymous user
|
|
||||||
if current_user.is_authenticated:
|
def get_calibre_binarypath(binary):
|
||||||
ub.update_download(book_id, int(current_user.id))
|
binariesdir = config.config_binariesdir
|
||||||
file_name = book.title
|
if binariesdir:
|
||||||
if len(book.authors) > 0:
|
try:
|
||||||
file_name = file_name + ' - ' + book.authors[0].name
|
return os.path.join(binariesdir, SUPPORTED_CALIBRE_BINARIES[binary])
|
||||||
file_name = get_valid_filename(file_name, replace_whitespace=False)
|
except KeyError as ex:
|
||||||
headers = Headers()
|
log.error("Binary not supported by Calibre-Web: %s", SUPPORTED_CALIBRE_BINARIES[binary])
|
||||||
headers["Content-Type"] = mimetypes.types_map.get('.' + book_format, "application/octet-stream")
|
pass
|
||||||
headers["Content-Disposition"] = "attachment; filename=%s.%s; filename*=UTF-8''%s.%s" % (
|
return ""
|
||||||
quote(file_name), book_format, quote(file_name), book_format)
|
|
||||||
return do_download_file(book, book_format, client, data1, headers)
|
|
||||||
else:
|
|
||||||
abort(404)
|
|
||||||
|
|
||||||
|
|
||||||
def clear_cover_thumbnail_cache(book_id):
|
def clear_cover_thumbnail_cache(book_id):
|
||||||
|
|
|
@ -208,7 +208,7 @@ def HandleSyncRequest():
|
||||||
for book in books:
|
for book in books:
|
||||||
formats = [data.format for data in book.Books.data]
|
formats = [data.format for data in book.Books.data]
|
||||||
if 'KEPUB' not in formats and config.config_kepubifypath and 'EPUB' in formats:
|
if 'KEPUB' not in formats and config.config_kepubifypath and 'EPUB' in formats:
|
||||||
helper.convert_book_format(book.Books.id, config.config_calibre_dir, 'EPUB', 'KEPUB', current_user.name)
|
helper.convert_book_format(book.Books.id, config.get_book_path(), 'EPUB', 'KEPUB', current_user.name)
|
||||||
|
|
||||||
kobo_reading_state = get_or_create_reading_state(book.Books.id)
|
kobo_reading_state = get_or_create_reading_state(book.Books.id)
|
||||||
entitlement = {
|
entitlement = {
|
||||||
|
|
|
@ -502,7 +502,7 @@ def render_element_index(database_column, linked_table, folder):
|
||||||
entries = entries.join(linked_table).join(db.Books)
|
entries = entries.join(linked_table).join(db.Books)
|
||||||
entries = entries.filter(calibre_db.common_filters()).group_by(func.upper(func.substr(database_column, 1, 1))).all()
|
entries = entries.filter(calibre_db.common_filters()).group_by(func.upper(func.substr(database_column, 1, 1))).all()
|
||||||
elements = []
|
elements = []
|
||||||
if off == 0:
|
if off == 0 and entries:
|
||||||
elements.append({'id': "00", 'name': _("All")})
|
elements.append({'id': "00", 'name': _("All")})
|
||||||
shift = 1
|
shift = 1
|
||||||
for entry in entries[
|
for entry in entries[
|
||||||
|
|
|
@ -21,6 +21,7 @@ import datetime
|
||||||
from . import config, constants
|
from . import config, constants
|
||||||
from .services.background_scheduler import BackgroundScheduler, CronTrigger, use_APScheduler
|
from .services.background_scheduler import BackgroundScheduler, CronTrigger, use_APScheduler
|
||||||
from .tasks.database import TaskReconnectDatabase
|
from .tasks.database import TaskReconnectDatabase
|
||||||
|
from .tasks.tempFolder import TaskDeleteTempFolder
|
||||||
from .tasks.thumbnail import TaskGenerateCoverThumbnails, TaskGenerateSeriesThumbnails, TaskClearCoverThumbnailCache
|
from .tasks.thumbnail import TaskGenerateCoverThumbnails, TaskGenerateSeriesThumbnails, TaskClearCoverThumbnailCache
|
||||||
from .services.worker import WorkerThread
|
from .services.worker import WorkerThread
|
||||||
from .tasks.metadata_backup import TaskBackupMetadata
|
from .tasks.metadata_backup import TaskBackupMetadata
|
||||||
|
@ -31,6 +32,9 @@ def get_scheduled_tasks(reconnect=True):
|
||||||
if reconnect:
|
if reconnect:
|
||||||
tasks.append([lambda: TaskReconnectDatabase(), 'reconnect', False])
|
tasks.append([lambda: TaskReconnectDatabase(), 'reconnect', False])
|
||||||
|
|
||||||
|
# Delete temp folder
|
||||||
|
tasks.append([lambda: TaskDeleteTempFolder(), 'delete temp', True])
|
||||||
|
|
||||||
# Generate metadata.opf file for each changed book
|
# Generate metadata.opf file for each changed book
|
||||||
if config.schedule_metadata_backup:
|
if config.schedule_metadata_backup:
|
||||||
tasks.append([lambda: TaskBackupMetadata("en"), 'backup metadata', False])
|
tasks.append([lambda: TaskBackupMetadata("en"), 'backup metadata', False])
|
||||||
|
@ -86,6 +90,8 @@ def register_startup_tasks():
|
||||||
# Ignore tasks that should currently be running, as these will be added when registering scheduled tasks
|
# Ignore tasks that should currently be running, as these will be added when registering scheduled tasks
|
||||||
if constants.APP_MODE in ['development', 'test'] and not should_task_be_running(start, duration):
|
if constants.APP_MODE in ['development', 'test'] and not should_task_be_running(start, duration):
|
||||||
scheduler.schedule_tasks_immediately(tasks=get_scheduled_tasks(False))
|
scheduler.schedule_tasks_immediately(tasks=get_scheduled_tasks(False))
|
||||||
|
else:
|
||||||
|
scheduler.schedule_tasks_immediately(tasks=[[lambda: TaskDeleteTempFolder(), 'delete temp', True]])
|
||||||
|
|
||||||
|
|
||||||
def should_task_be_running(start, duration):
|
def should_task_be_running(start, duration):
|
||||||
|
|
|
@ -179,8 +179,9 @@ kthoom.ImageFile = function(file) {
|
||||||
};
|
};
|
||||||
|
|
||||||
function updateDirectionButtons(){
|
function updateDirectionButtons(){
|
||||||
var left, right = 1;
|
var left = 1;
|
||||||
if (currentImage == 0 ) {
|
var right = 1;
|
||||||
|
if (currentImage <= 0 ) {
|
||||||
if (settings.direction === 0) {
|
if (settings.direction === 0) {
|
||||||
left = 0;
|
left = 0;
|
||||||
} else {
|
} else {
|
||||||
|
|
70
cps/tasks/convert.py
Executable file → Normal file
70
cps/tasks/convert.py
Executable file → Normal file
|
@ -19,8 +19,10 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from glob import glob
|
from glob import glob
|
||||||
from shutil import copyfile
|
from shutil import copyfile, copyfileobj
|
||||||
from markupsafe import escape
|
from markupsafe import escape
|
||||||
|
from time import time
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
@ -32,13 +34,15 @@ from cps.subproc_wrapper import process_open
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
from cps.kobo_sync_status import remove_synced_book
|
from cps.kobo_sync_status import remove_synced_book
|
||||||
from cps.ub import init_db_thread
|
from cps.ub import init_db_thread
|
||||||
|
from cps.file_helper import get_temp_dir
|
||||||
|
|
||||||
from cps.tasks.mail import TaskEmail
|
from cps.tasks.mail import TaskEmail
|
||||||
from cps import gdriveutils
|
from cps import gdriveutils, helper
|
||||||
|
from cps.constants import SUPPORTED_CALIBRE_BINARIES
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
current_milli_time = lambda: int(round(time() * 1000))
|
||||||
|
|
||||||
class TaskConvert(CalibreTask):
|
class TaskConvert(CalibreTask):
|
||||||
def __init__(self, file_path, book_id, task_message, settings, ereader_mail, user=None):
|
def __init__(self, file_path, book_id, task_message, settings, ereader_mail, user=None):
|
||||||
|
@ -61,24 +65,33 @@ class TaskConvert(CalibreTask):
|
||||||
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
data = worker_db.get_book_format(self.book_id, self.settings['old_book_format'])
|
||||||
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
df = gdriveutils.getFileFromEbooksFolder(cur_book.path,
|
||||||
data.name + "." + self.settings['old_book_format'].lower())
|
data.name + "." + self.settings['old_book_format'].lower())
|
||||||
|
df_cover = gdriveutils.getFileFromEbooksFolder(cur_book.path, "cover.jpg")
|
||||||
if df:
|
if df:
|
||||||
datafile = os.path.join(config.config_calibre_dir,
|
datafile = os.path.join(config.get_book_path(),
|
||||||
cur_book.path,
|
cur_book.path,
|
||||||
data.name + "." + self.settings['old_book_format'].lower())
|
data.name + "." + self.settings['old_book_format'].lower())
|
||||||
if not os.path.exists(os.path.join(config.config_calibre_dir, cur_book.path)):
|
if df_cover:
|
||||||
os.makedirs(os.path.join(config.config_calibre_dir, cur_book.path))
|
datafile_cover = os.path.join(config.get_book_path(),
|
||||||
|
cur_book.path, "cover.jpg")
|
||||||
|
if not os.path.exists(os.path.join(config.get_book_path(), cur_book.path)):
|
||||||
|
os.makedirs(os.path.join(config.get_book_path(), cur_book.path))
|
||||||
df.GetContentFile(datafile)
|
df.GetContentFile(datafile)
|
||||||
|
if df_cover:
|
||||||
|
df_cover.GetContentFile(datafile_cover)
|
||||||
worker_db.session.close()
|
worker_db.session.close()
|
||||||
else:
|
else:
|
||||||
|
# ToDo Include cover in error handling
|
||||||
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
error_message = _("%(format)s not found on Google Drive: %(fn)s",
|
||||||
format=self.settings['old_book_format'],
|
format=self.settings['old_book_format'],
|
||||||
fn=data.name + "." + self.settings['old_book_format'].lower())
|
fn=data.name + "." + self.settings['old_book_format'].lower())
|
||||||
worker_db.session.close()
|
worker_db.session.close()
|
||||||
return error_message
|
return self._handleError(self, error_message)
|
||||||
|
|
||||||
filename = self._convert_ebook_format()
|
filename = self._convert_ebook_format()
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
os.remove(self.file_path + '.' + self.settings['old_book_format'].lower())
|
os.remove(self.file_path + '.' + self.settings['old_book_format'].lower())
|
||||||
|
if df_cover:
|
||||||
|
os.remove(os.path.join(config.config_calibre_dir, cur_book.path, "cover.jpg"))
|
||||||
|
|
||||||
if filename:
|
if filename:
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
|
@ -112,7 +125,7 @@ class TaskConvert(CalibreTask):
|
||||||
|
|
||||||
# check to see if destination format already exists - or if book is in database
|
# check to see if destination format already exists - or if book is in database
|
||||||
# if it does - mark the conversion task as complete and return a success
|
# if it does - mark the conversion task as complete and return a success
|
||||||
# this will allow send to E-Reader workflow to continue to work
|
# this will allow to send to E-Reader workflow to continue to work
|
||||||
if os.path.isfile(file_path + format_new_ext) or\
|
if os.path.isfile(file_path + format_new_ext) or\
|
||||||
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
local_db.get_book_format(self.book_id, self.settings['new_book_format']):
|
||||||
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
log.info("Book id %d already converted to %s", book_id, format_new_ext)
|
||||||
|
@ -152,7 +165,8 @@ class TaskConvert(CalibreTask):
|
||||||
if not os.path.exists(config.config_converterpath):
|
if not os.path.exists(config.config_converterpath):
|
||||||
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
self._handleError(N_("Calibre ebook-convert %(tool)s not found", tool=config.config_converterpath))
|
||||||
return
|
return
|
||||||
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext)
|
has_cover = local_db.get_book(book_id).has_cover
|
||||||
|
check, error_message = self._convert_calibre(file_path, format_old_ext, format_new_ext, has_cover)
|
||||||
|
|
||||||
if check == 0:
|
if check == 0:
|
||||||
cur_book = local_db.get_book(book_id)
|
cur_book = local_db.get_book(book_id)
|
||||||
|
@ -194,8 +208,15 @@ class TaskConvert(CalibreTask):
|
||||||
return
|
return
|
||||||
|
|
||||||
def _convert_kepubify(self, file_path, format_old_ext, format_new_ext):
|
def _convert_kepubify(self, file_path, format_old_ext, format_new_ext):
|
||||||
|
if config.config_embed_metadata and config.config_binariesdir:
|
||||||
|
tmp_dir, temp_file_name = helper.do_calibre_export(self.book_id, format_old_ext[1:])
|
||||||
|
filename = os.path.join(tmp_dir, temp_file_name + format_old_ext)
|
||||||
|
temp_file_path = tmp_dir
|
||||||
|
else:
|
||||||
|
filename = file_path + format_old_ext
|
||||||
|
temp_file_path = os.path.dirname(file_path)
|
||||||
quotes = [1, 3]
|
quotes = [1, 3]
|
||||||
command = [config.config_kepubifypath, (file_path + format_old_ext), '-o', os.path.dirname(file_path)]
|
command = [config.config_kepubifypath, filename, '-o', temp_file_path, '-i']
|
||||||
try:
|
try:
|
||||||
p = process_open(command, quotes)
|
p = process_open(command, quotes)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
|
@ -209,13 +230,12 @@ class TaskConvert(CalibreTask):
|
||||||
if p.poll() is not None:
|
if p.poll() is not None:
|
||||||
break
|
break
|
||||||
|
|
||||||
# ToD Handle
|
|
||||||
# process returncode
|
# process returncode
|
||||||
check = p.returncode
|
check = p.returncode
|
||||||
|
|
||||||
# move file
|
# move file
|
||||||
if check == 0:
|
if check == 0:
|
||||||
converted_file = glob(os.path.join(os.path.dirname(file_path), "*.kepub.epub"))
|
converted_file = glob(os.path.splitext(filename)[0] + "*.kepub.epub")
|
||||||
if len(converted_file) == 1:
|
if len(converted_file) == 1:
|
||||||
copyfile(converted_file[0], (file_path + format_new_ext))
|
copyfile(converted_file[0], (file_path + format_new_ext))
|
||||||
os.unlink(converted_file[0])
|
os.unlink(converted_file[0])
|
||||||
|
@ -224,16 +244,28 @@ class TaskConvert(CalibreTask):
|
||||||
folder=os.path.dirname(file_path))
|
folder=os.path.dirname(file_path))
|
||||||
return check, None
|
return check, None
|
||||||
|
|
||||||
def _convert_calibre(self, file_path, format_old_ext, format_new_ext):
|
def _convert_calibre(self, file_path, format_old_ext, format_new_ext, has_cover):
|
||||||
try:
|
try:
|
||||||
# Linux py2.7 encode as list without quotes no empty element for parameters
|
# path_tmp_opf = self._embed_metadata()
|
||||||
# linux py3.x no encode and as list without quotes no empty element for parameters
|
if config.config_embed_metadata:
|
||||||
# windows py2.7 encode as string with quotes empty element for parameters is okay
|
quotes = [3, 5]
|
||||||
# windows py 3.x no encode and as string with quotes empty element for parameters is okay
|
tmp_dir = get_temp_dir()
|
||||||
# separate handling for windows and linux
|
calibredb_binarypath = os.path.join(config.config_binariesdir, SUPPORTED_CALIBRE_BINARIES["calibredb"])
|
||||||
quotes = [1, 2]
|
opf_command = [calibredb_binarypath, 'show_metadata', '--as-opf', str(self.book_id),
|
||||||
|
'--with-library', config.config_calibre_dir]
|
||||||
|
p = process_open(opf_command, quotes)
|
||||||
|
p.wait()
|
||||||
|
path_tmp_opf = os.path.join(tmp_dir, "metadata_" + str(uuid4()) + ".opf")
|
||||||
|
with open(path_tmp_opf, 'w') as fd:
|
||||||
|
copyfileobj(p.stdout, fd)
|
||||||
|
|
||||||
|
quotes = [1, 2, 4, 6]
|
||||||
command = [config.config_converterpath, (file_path + format_old_ext),
|
command = [config.config_converterpath, (file_path + format_old_ext),
|
||||||
(file_path + format_new_ext)]
|
(file_path + format_new_ext)]
|
||||||
|
if config.config_embed_metadata:
|
||||||
|
command.extend(['--from-opf', path_tmp_opf])
|
||||||
|
if has_cover:
|
||||||
|
command.extend(['--cover', os.path.join(os.path.dirname(file_path), 'cover.jpg')])
|
||||||
quotes_index = 3
|
quotes_index = 3
|
||||||
if config.config_calibre:
|
if config.config_calibre:
|
||||||
parameters = config.config_calibre.split(" ")
|
parameters = config.config_calibre.split(" ")
|
||||||
|
|
2
cps/tasks/mail.py
Executable file → Normal file
2
cps/tasks/mail.py
Executable file → Normal file
|
@ -239,7 +239,7 @@ class TaskEmail(CalibreTask):
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_attachment(cls, book_path, filename):
|
def _get_attachment(cls, book_path, filename):
|
||||||
"""Get file as MIMEBase message"""
|
"""Get file as MIMEBase message"""
|
||||||
calibre_path = config.config_calibre_dir
|
calibre_path = config.get_book_path()
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
df = gdriveutils.getFileFromEbooksFolder(book_path, filename)
|
df = gdriveutils.getFileFromEbooksFolder(book_path, filename)
|
||||||
if df:
|
if df:
|
||||||
|
|
|
@ -17,26 +17,13 @@
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from urllib.request import urlopen
|
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
|
|
||||||
|
|
||||||
from cps import config, db, gdriveutils, logger
|
from cps import config, db, gdriveutils, logger
|
||||||
from cps.services.worker import CalibreTask
|
from cps.services.worker import CalibreTask
|
||||||
from flask_babel import lazy_gettext as N_
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
OPF_NAMESPACE = "http://www.idpf.org/2007/opf"
|
from ..epub_helper import create_new_metadata_backup
|
||||||
PURL_NAMESPACE = "http://purl.org/dc/elements/1.1/"
|
|
||||||
|
|
||||||
OPF = "{%s}" % OPF_NAMESPACE
|
|
||||||
PURL = "{%s}" % PURL_NAMESPACE
|
|
||||||
|
|
||||||
etree.register_namespace("opf", OPF_NAMESPACE)
|
|
||||||
etree.register_namespace("dc", PURL_NAMESPACE)
|
|
||||||
|
|
||||||
OPF_NS = {None: OPF_NAMESPACE} # the default namespace (no prefix)
|
|
||||||
NSMAP = {'dc': PURL_NAMESPACE, 'opf': OPF_NAMESPACE}
|
|
||||||
|
|
||||||
|
|
||||||
class TaskBackupMetadata(CalibreTask):
|
class TaskBackupMetadata(CalibreTask):
|
||||||
|
|
||||||
|
@ -101,7 +88,8 @@ class TaskBackupMetadata(CalibreTask):
|
||||||
self.calibre_db.session.close()
|
self.calibre_db.session.close()
|
||||||
|
|
||||||
def open_metadata(self, book, custom_columns):
|
def open_metadata(self, book, custom_columns):
|
||||||
package = self.create_new_metadata_backup(book, custom_columns)
|
# package = self.create_new_metadata_backup(book, custom_columns)
|
||||||
|
package = create_new_metadata_backup(book, custom_columns, self.export_language, self.translated_title)
|
||||||
if config.config_use_google_drive:
|
if config.config_use_google_drive:
|
||||||
if not gdriveutils.is_gdrive_ready():
|
if not gdriveutils.is_gdrive_ready():
|
||||||
raise Exception('Google Drive is configured but not ready')
|
raise Exception('Google Drive is configured but not ready')
|
||||||
|
@ -114,7 +102,7 @@ class TaskBackupMetadata(CalibreTask):
|
||||||
True)
|
True)
|
||||||
else:
|
else:
|
||||||
# ToDo: Handle book folder not found or not readable
|
# ToDo: Handle book folder not found or not readable
|
||||||
book_metadata_filepath = os.path.join(config.config_calibre_dir, book.path, 'metadata.opf')
|
book_metadata_filepath = os.path.join(config.get_book_path(), book.path, 'metadata.opf')
|
||||||
# prepare finalize everything and output
|
# prepare finalize everything and output
|
||||||
doc = etree.ElementTree(package)
|
doc = etree.ElementTree(package)
|
||||||
try:
|
try:
|
||||||
|
@ -123,7 +111,7 @@ class TaskBackupMetadata(CalibreTask):
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Exception('Writing Metadata failed with error: {} '.format(ex))
|
raise Exception('Writing Metadata failed with error: {} '.format(ex))
|
||||||
|
|
||||||
def create_new_metadata_backup(self, book, custom_columns):
|
'''def create_new_metadata_backup(self, book, custom_columns):
|
||||||
# generate root package element
|
# generate root package element
|
||||||
package = etree.Element(OPF + "package", nsmap=OPF_NS)
|
package = etree.Element(OPF + "package", nsmap=OPF_NS)
|
||||||
package.set("unique-identifier", "uuid_id")
|
package.set("unique-identifier", "uuid_id")
|
||||||
|
@ -208,7 +196,7 @@ class TaskBackupMetadata(CalibreTask):
|
||||||
guide = etree.SubElement(package, "guide")
|
guide = etree.SubElement(package, "guide")
|
||||||
etree.SubElement(guide, "reference", type="cover", title=self.translated_title, href="cover.jpg")
|
etree.SubElement(guide, "reference", type="cover", title=self.translated_title, href="cover.jpg")
|
||||||
|
|
||||||
return package
|
return package'''
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
|
|
47
cps/tasks/tempFolder.py
Normal file
47
cps/tasks/tempFolder.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||||
|
# Copyright (C) 2023 OzzieIsaacs
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
from flask_babel import lazy_gettext as N_
|
||||||
|
|
||||||
|
from cps import logger, file_helper
|
||||||
|
from cps.services.worker import CalibreTask
|
||||||
|
|
||||||
|
|
||||||
|
class TaskDeleteTempFolder(CalibreTask):
|
||||||
|
def __init__(self, task_message=N_('Delete temp folder contents')):
|
||||||
|
super(TaskDeleteTempFolder, self).__init__(task_message)
|
||||||
|
self.log = logger.create()
|
||||||
|
|
||||||
|
def run(self, worker_thread):
|
||||||
|
try:
|
||||||
|
file_helper.del_temp_dir()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
except (PermissionError, OSError) as e:
|
||||||
|
self.log.error("Error deleting temp folder: {}".format(e))
|
||||||
|
self._handleSuccess()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return "Delete Temp Folder"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_cancellable(self):
|
||||||
|
return False
|
|
@ -209,7 +209,7 @@ class TaskGenerateCoverThumbnails(CalibreTask):
|
||||||
if stream is not None:
|
if stream is not None:
|
||||||
stream.close()
|
stream.close()
|
||||||
else:
|
else:
|
||||||
book_cover_filepath = os.path.join(config.config_calibre_dir, book.path, 'cover.jpg')
|
book_cover_filepath = os.path.join(config.get_book_path(), book.path, 'cover.jpg')
|
||||||
if not os.path.isfile(book_cover_filepath):
|
if not os.path.isfile(book_cover_filepath):
|
||||||
raise Exception('Book cover file not found')
|
raise Exception('Book cover file not found')
|
||||||
|
|
||||||
|
@ -404,7 +404,7 @@ class TaskGenerateSeriesThumbnails(CalibreTask):
|
||||||
if stream is not None:
|
if stream is not None:
|
||||||
stream.close()
|
stream.close()
|
||||||
|
|
||||||
book_cover_filepath = os.path.join(config.config_calibre_dir, book.path, 'cover.jpg')
|
book_cover_filepath = os.path.join(config.get_book_path(), book.path, 'cover.jpg')
|
||||||
if not os.path.isfile(book_cover_filepath):
|
if not os.path.isfile(book_cover_filepath):
|
||||||
raise Exception('Book cover file not found')
|
raise Exception('Book cover file not found')
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,18 @@
|
||||||
<button type="button" data-toggle="modal" id="calibre_modal_path" data-link="config_calibre_dir" data-filefilter="metadata.db" data-target="#fileModal" id="library_path" class="btn btn-default"><span class="glyphicon glyphicon-folder-open"></span></button>
|
<button type="button" data-toggle="modal" id="calibre_modal_path" data-link="config_calibre_dir" data-filefilter="metadata.db" data-target="#fileModal" id="library_path" class="btn btn-default"><span class="glyphicon glyphicon-folder-open"></span></button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group required">
|
||||||
|
<input type="checkbox" id="config_calibre_split" name="config_calibre_split" data-control="split_settings" data-t ="{{ config.config_calibre_split_dir }}" {% if config.config_calibre_split %}checked{% endif %} >
|
||||||
|
<label for="config_calibre_split">{{_('Separate Book files from Library (Highly experimental, might not work at all)')}}</label>
|
||||||
|
</div>
|
||||||
|
<div data-related="split_settings">
|
||||||
|
<div class="form-group required input-group">
|
||||||
|
<input type="text" class="form-control" id="config_calibre_split_dir" name="config_calibre_split_dir" value="{% if config.config_calibre_split_dir != None %}{{ config.config_calibre_split_dir }}{% endif %}" autocomplete="off">
|
||||||
|
<span class="input-group-btn">
|
||||||
|
<button type="button" data-toggle="modal" id="calibre_modal_split_path" data-link="config_calibre_split_dir" data-filefilter="" data-target="#fileModal" id="book_path" class="btn btn-default"><span class="glyphicon glyphicon-folder-open"></span></button>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
{% if feature_support['gdrive'] %}
|
{% if feature_support['gdrive'] %}
|
||||||
<div class="form-group required">
|
<div class="form-group required">
|
||||||
<input type="checkbox" id="config_use_google_drive" name="config_use_google_drive" data-control="gdrive_settings" {% if config.config_use_google_drive %}checked{% endif %} >
|
<input type="checkbox" id="config_use_google_drive" name="config_use_google_drive" data-control="gdrive_settings" {% if config.config_use_google_drive %}checked{% endif %} >
|
||||||
|
|
|
@ -103,6 +103,10 @@
|
||||||
<input type="checkbox" id="config_unicode_filename" name="config_unicode_filename" {% if config.config_unicode_filename %}checked{% endif %}>
|
<input type="checkbox" id="config_unicode_filename" name="config_unicode_filename" {% if config.config_unicode_filename %}checked{% endif %}>
|
||||||
<label for="config_unicode_filename">{{_('Convert non-English characters in title and author while saving to disk')}}</label>
|
<label for="config_unicode_filename">{{_('Convert non-English characters in title and author while saving to disk')}}</label>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<input type="checkbox" id="config_embed_metadata" name="config_embed_metadata" {% if config.config_embed_metadata %}checked{% endif %}>
|
||||||
|
<label for="config_embed_metadata">{{_('Embed Metadata to Ebook File on Download and Conversion (needs Calibre/Kepubify binaries)')}}</label>
|
||||||
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<input type="checkbox" id="config_uploading" data-control="upload_settings" name="config_uploading" {% if config.config_uploading %}checked{% endif %}>
|
<input type="checkbox" id="config_uploading" data-control="upload_settings" name="config_uploading" {% if config.config_uploading %}checked{% endif %}>
|
||||||
<label for="config_uploading">{{_('Enable Uploads')}} {{_('(Please ensure that users also have upload permissions)')}}</label>
|
<label for="config_uploading">{{_('Enable Uploads')}} {{_('(Please ensure that users also have upload permissions)')}}</label>
|
||||||
|
@ -323,12 +327,12 @@
|
||||||
</div>
|
</div>
|
||||||
<div id="collapsefive" class="panel-collapse collapse">
|
<div id="collapsefive" class="panel-collapse collapse">
|
||||||
<div class="panel-body">
|
<div class="panel-body">
|
||||||
<label for="config_converterpath">{{_('Path to Calibre E-Book Converter')}}</label>
|
<label for="config_binariesdir">{{_('Path to Calibre Binaries')}}</label>
|
||||||
<div class="form-group input-group">
|
<div class="form-group input-group">
|
||||||
<input type="text" class="form-control" id="config_converterpath" name="config_converterpath" value="{% if config.config_converterpath != None %}{{ config.config_converterpath }}{% endif %}" autocomplete="off">
|
<input type="text" class="form-control" id="config_binariesdir" name="config_binariesdir" value="{% if config.config_binariesdir != None %}{{ config.config_binariesdir }}{% endif %}" autocomplete="off">
|
||||||
<span class="input-group-btn">
|
<span class="input-group-btn">
|
||||||
<button type="button" data-toggle="modal" id="converter_modal_path" data-link="config_converterpath" data-target="#fileModal" class="btn btn-default"><span class="glyphicon glyphicon-folder-open"></span></button>
|
<button type="button" data-toggle="modal" id="binaries_modal_path" data-link="config_binariesdir" data-folderonly="true" data-target="#fileModal" class="btn btn-default"><span class="glyphicon glyphicon-folder-open"></span></button>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="config_calibre">{{_('Calibre E-Book Converter Settings')}}</label>
|
<label for="config_calibre">{{_('Calibre E-Book Converter Settings')}}</label>
|
||||||
|
|
|
@ -19,13 +19,6 @@
|
||||||
<link href="{{ url_for('static', filename='css/caliBlur.css') }}" rel="stylesheet" media="screen">
|
<link href="{{ url_for('static', filename='css/caliBlur.css') }}" rel="stylesheet" media="screen">
|
||||||
<link href="{{ url_for('static', filename='css/caliBlur_override.css') }}" rel="stylesheet" media="screen">
|
<link href="{{ url_for('static', filename='css/caliBlur_override.css') }}" rel="stylesheet" media="screen">
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
|
|
||||||
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
|
|
||||||
<!--[if lt IE 9]>
|
|
||||||
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
|
|
||||||
<script src="https://oss.maxcdn.com/libs/respond.js/1.3.0/respond.min.js"></script>
|
|
||||||
<![endif]-->
|
|
||||||
|
|
||||||
{% block header %}{% endblock %}
|
{% block header %}{% endblock %}
|
||||||
</head>
|
</head>
|
||||||
<body class="{{ page }} shelf-down">
|
<body class="{{ page }} shelf-down">
|
||||||
|
|
|
@ -25,13 +25,13 @@ import threading
|
||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from tempfile import gettempdir
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from flask_babel import format_datetime
|
from flask_babel import format_datetime
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
|
||||||
from . import constants, logger # config, web_server
|
from . import constants, logger # config, web_server
|
||||||
|
from .file_helper import get_temp_dir
|
||||||
|
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
@ -85,7 +85,7 @@ class Updater(threading.Thread):
|
||||||
z = zipfile.ZipFile(BytesIO(r.content))
|
z = zipfile.ZipFile(BytesIO(r.content))
|
||||||
self.status = 3
|
self.status = 3
|
||||||
log.debug('Extracting zipfile')
|
log.debug('Extracting zipfile')
|
||||||
tmp_dir = gettempdir()
|
tmp_dir = get_temp_dir()
|
||||||
z.extractall(tmp_dir)
|
z.extractall(tmp_dir)
|
||||||
folder_name = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
folder_name = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
||||||
if not os.path.isdir(folder_name):
|
if not os.path.isdir(folder_name):
|
||||||
|
@ -566,7 +566,7 @@ class Updater(threading.Thread):
|
||||||
try:
|
try:
|
||||||
current_version[2] = int(current_version[2])
|
current_version[2] = int(current_version[2])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
current_version[2] = int(current_version[2].split(' ')[0])-1
|
current_version[2] = int(current_version[2].replace("b", "").split(' ')[0])-1
|
||||||
|
|
||||||
# Check if major versions are identical search for newest non-equal commit and update to this one
|
# Check if major versions are identical search for newest non-equal commit and update to this one
|
||||||
if major_version_update == current_version[0]:
|
if major_version_update == current_version[0]:
|
||||||
|
|
|
@ -18,12 +18,12 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
from tempfile import gettempdir
|
|
||||||
from flask_babel import gettext as _
|
from flask_babel import gettext as _
|
||||||
|
|
||||||
from . import logger, comic, isoLanguages
|
from . import logger, comic, isoLanguages
|
||||||
from .constants import BookMeta
|
from .constants import BookMeta
|
||||||
from .helper import split_authors
|
from .helper import split_authors
|
||||||
|
from .file_helper import get_temp_dir
|
||||||
|
|
||||||
log = logger.create()
|
log = logger.create()
|
||||||
|
|
||||||
|
@ -249,10 +249,7 @@ def get_magick_version():
|
||||||
|
|
||||||
|
|
||||||
def upload(uploadfile, rar_excecutable):
|
def upload(uploadfile, rar_excecutable):
|
||||||
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
|
tmp_dir = get_temp_dir()
|
||||||
|
|
||||||
if not os.path.isdir(tmp_dir):
|
|
||||||
os.mkdir(tmp_dir)
|
|
||||||
|
|
||||||
filename = uploadfile.filename
|
filename = uploadfile.filename
|
||||||
filename_root, file_extension = os.path.splitext(filename)
|
filename_root, file_extension = os.path.splitext(filename)
|
||||||
|
|
8
cps/web.py
Executable file → Normal file
8
cps/web.py
Executable file → Normal file
|
@ -1192,7 +1192,7 @@ def serve_book(book_id, book_format, anyname):
|
||||||
if book_format.upper() == 'TXT':
|
if book_format.upper() == 'TXT':
|
||||||
log.info('Serving book: %s', data.name)
|
log.info('Serving book: %s', data.name)
|
||||||
try:
|
try:
|
||||||
rawdata = open(os.path.join(config.config_calibre_dir, book.path, data.name + "." + book_format),
|
rawdata = open(os.path.join(config.get_book_path(), book.path, data.name + "." + book_format),
|
||||||
"rb").read()
|
"rb").read()
|
||||||
result = chardet.detect(rawdata)
|
result = chardet.detect(rawdata)
|
||||||
try:
|
try:
|
||||||
|
@ -1209,7 +1209,7 @@ def serve_book(book_id, book_format, anyname):
|
||||||
return "File Not Found"
|
return "File Not Found"
|
||||||
# enable byte range read of pdf
|
# enable byte range read of pdf
|
||||||
response = make_response(
|
response = make_response(
|
||||||
send_from_directory(os.path.join(config.config_calibre_dir, book.path), data.name + "." + book_format))
|
send_from_directory(os.path.join(config.get_book_path(), book.path), data.name + "." + book_format))
|
||||||
if not range_header:
|
if not range_header:
|
||||||
log.info('Serving book: %s', data.name)
|
log.info('Serving book: %s', data.name)
|
||||||
response.headers['Accept-Ranges'] = 'bytes'
|
response.headers['Accept-Ranges'] = 'bytes'
|
||||||
|
@ -1233,7 +1233,7 @@ def send_to_ereader(book_id, book_format, convert):
|
||||||
response = [{'type': "danger", 'message': _("Please configure the SMTP mail settings first...")}]
|
response = [{'type': "danger", 'message': _("Please configure the SMTP mail settings first...")}]
|
||||||
return Response(json.dumps(response), mimetype='application/json')
|
return Response(json.dumps(response), mimetype='application/json')
|
||||||
elif current_user.kindle_mail:
|
elif current_user.kindle_mail:
|
||||||
result = send_mail(book_id, book_format, convert, current_user.kindle_mail, config.config_calibre_dir,
|
result = send_mail(book_id, book_format, convert, current_user.kindle_mail, config.get_book_path(),
|
||||||
current_user.name)
|
current_user.name)
|
||||||
if result is None:
|
if result is None:
|
||||||
ub.update_download(book_id, int(current_user.id))
|
ub.update_download(book_id, int(current_user.id))
|
||||||
|
@ -1354,7 +1354,7 @@ def login():
|
||||||
@limiter.limit("3/minute", key_func=lambda: request.form.get('username', "").strip().lower())
|
@limiter.limit("3/minute", key_func=lambda: request.form.get('username', "").strip().lower())
|
||||||
def login_post():
|
def login_post():
|
||||||
form = request.form.to_dict()
|
form = request.form.to_dict()
|
||||||
username = form.get('username', "").strip().lower().replace("\n","\\n").replace("\r","")
|
username = form.get('username', "").strip().lower().replace("\n","").replace("\r","")
|
||||||
try:
|
try:
|
||||||
limiter.check()
|
limiter.check()
|
||||||
except RateLimitExceeded:
|
except RateLimitExceeded:
|
||||||
|
|
|
@ -9,7 +9,7 @@ iso-639>=0.4.5,<0.5.0
|
||||||
PyPDF>=3.0.0,<3.16.0
|
PyPDF>=3.0.0,<3.16.0
|
||||||
pytz>=2016.10
|
pytz>=2016.10
|
||||||
requests>=2.28.0,<2.32.0
|
requests>=2.28.0,<2.32.0
|
||||||
SQLAlchemy>=1.3.0,<2.0.0
|
SQLAlchemy>=1.3.0,<2.1.0
|
||||||
tornado>=6.3,<6.4
|
tornado>=6.3,<6.4
|
||||||
Wand>=0.4.4,<0.7.0
|
Wand>=0.4.4,<0.7.0
|
||||||
unidecode>=0.04.19,<1.4.0
|
unidecode>=0.04.19,<1.4.0
|
||||||
|
|
12
setup.cfg
12
setup.cfg
|
@ -41,7 +41,7 @@ install_requires =
|
||||||
Werkzeug<3.0.0
|
Werkzeug<3.0.0
|
||||||
APScheduler>=3.6.3,<3.11.0
|
APScheduler>=3.6.3,<3.11.0
|
||||||
Babel>=1.3,<3.0
|
Babel>=1.3,<3.0
|
||||||
Flask-Babel>=0.11.1,<3.2.0
|
Flask-Babel>=0.11.1,<4.1.0
|
||||||
Flask-Login>=0.3.2,<0.6.3
|
Flask-Login>=0.3.2,<0.6.3
|
||||||
Flask-Principal>=0.3.2,<0.5.1
|
Flask-Principal>=0.3.2,<0.5.1
|
||||||
Flask>=1.0.2,<2.4.0
|
Flask>=1.0.2,<2.4.0
|
||||||
|
@ -49,15 +49,15 @@ install_requires =
|
||||||
PyPDF>=3.0.0,<3.16.0
|
PyPDF>=3.0.0,<3.16.0
|
||||||
pytz>=2016.10
|
pytz>=2016.10
|
||||||
requests>=2.28.0,<2.32.0
|
requests>=2.28.0,<2.32.0
|
||||||
SQLAlchemy>=1.3.0,<2.0.0
|
SQLAlchemy>=1.3.0,<2.1.0
|
||||||
tornado>=6.3,<6.4
|
tornado>=6.3,<6.4
|
||||||
Wand>=0.4.4,<0.7.0
|
Wand>=0.4.4,<0.7.0
|
||||||
unidecode>=0.04.19,<1.4.0
|
unidecode>=0.04.19,<1.4.0
|
||||||
lxml>=3.8.0,<5.0.0
|
lxml>=3.8.0,<5.0.0
|
||||||
flask-wtf>=0.14.2,<1.2.0
|
flask-wtf>=0.14.2,<1.3.0
|
||||||
chardet>=3.0.0,<4.1.0
|
chardet>=3.0.0,<4.1.0
|
||||||
advocate>=1.0.0,<1.1.0
|
advocate>=1.0.0,<1.1.0
|
||||||
Flask-Limiter>=2.3.0,<3.5.0
|
Flask-Limiter>=2.3.0,<3.6.0
|
||||||
|
|
||||||
|
|
||||||
[options.packages.find]
|
[options.packages.find]
|
||||||
|
@ -66,7 +66,7 @@ include = cps/services*
|
||||||
|
|
||||||
[options.extras_require]
|
[options.extras_require]
|
||||||
gdrive =
|
gdrive =
|
||||||
google-api-python-client>=1.7.11,<2.98.0
|
google-api-python-client>=1.7.11,<2.108.0
|
||||||
gevent>20.6.0,<24.0.0
|
gevent>20.6.0,<24.0.0
|
||||||
greenlet>=0.4.17,<2.1.0
|
greenlet>=0.4.17,<2.1.0
|
||||||
httplib2>=0.9.2,<0.23.0
|
httplib2>=0.9.2,<0.23.0
|
||||||
|
@ -79,7 +79,7 @@ gdrive =
|
||||||
rsa>=3.4.2,<4.10.0
|
rsa>=3.4.2,<4.10.0
|
||||||
gmail =
|
gmail =
|
||||||
google-auth-oauthlib>=0.4.3,<1.1.0
|
google-auth-oauthlib>=0.4.3,<1.1.0
|
||||||
google-api-python-client>=1.7.11,<2.98.0
|
google-api-python-client>=1.7.11,<2.108.0
|
||||||
goodreads =
|
goodreads =
|
||||||
goodreads>=0.3.2,<0.4.0
|
goodreads>=0.3.2,<0.4.0
|
||||||
python-Levenshtein>=0.12.0,<0.22.0
|
python-Levenshtein>=0.12.0,<0.22.0
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user