Merge branch 'master' into cover_thumbnail
# Conflicts: # cps/editbooks.py # cps/helper.py # cps/web.py # test/Calibre-Web TestSummary_Linux.html
This commit is contained in:
commit
d9a83e0638
14
README.md
14
README.md
|
@ -40,16 +40,12 @@ Calibre-Web is a web app providing a clean interface for browsing, reading and d
|
|||
## Installation
|
||||
|
||||
#### Installation via pip (recommended)
|
||||
1. Install calibre web via pip with the command `pip install calibreweb` (Depending on your OS and or distro the command could also be `pip3`).
|
||||
2. Optional features can also be installed via pip, please refer to [this page](https://github.com/janeczku/calibre-web/wiki/Dependencies-in-Calibre-Web-Linux-Windows) for details
|
||||
3. Calibre-Web can be started afterwards by typing `cps` or `python3 -m cps`
|
||||
1. To avoid problems with already installed python dependencies, it's recommended to create a virtual environment for Calibre-Web
|
||||
2. Install Calibre-Web via pip with the command `pip install calibreweb` (Depending on your OS and or distro the command could also be `pip3`).
|
||||
3. Optional features can also be installed via pip, please refer to [this page](https://github.com/janeczku/calibre-web/wiki/Dependencies-in-Calibre-Web-Linux-Windows) for details
|
||||
4. Calibre-Web can be started afterwards by typing `cps` or `python3 -m cps`
|
||||
|
||||
#### Manual installation
|
||||
1. Install dependencies by running `pip3 install --target vendor -r requirements.txt` (python3.x). Alternativly set up a python virtual environment.
|
||||
2. Execute the command: `python3 cps.py` (or `nohup python3 cps.py` - recommended if you want to exit the terminal window)
|
||||
|
||||
Issues with Ubuntu:
|
||||
Please note that running the above install command can fail on some versions of Ubuntu, saying `"can't combine user with prefix"`. This is a [known bug](https://github.com/pypa/pip/issues/3826) and can be remedied by using the command `pip install --system --target vendor -r requirements.txt` instead.
|
||||
In the Wiki there are also examples for a [manual installation](https://github.com/janeczku/calibre-web/wiki/Manual-installation) and for installation on [Linux Mint](https://github.com/janeczku/calibre-web/wiki/How-To:Install-Calibre-Web-in-Linux-Mint-19-or-20)
|
||||
|
||||
## Quick start
|
||||
|
||||
|
|
|
@ -32,6 +32,8 @@ To receive fixes for security vulnerabilities it is required to always upgrade t
|
|||
| V 0.6.16 | JavaScript could get executed on authors page. Thanks to @alicaz ||
|
||||
| V 0.6.16 | Localhost can no longer be used to upload covers. Thanks to @scara31 ||
|
||||
| V 0.6.16 | Another case where public shelfs could be created without permission is prevented. Thanks to @nhiephon ||
|
||||
| V 0.6.17 | The SSRF Protection can no longer be bypassed via an HTTP redirect. Thanks to @416e6e61 ||
|
||||
| V 0.6.17 | The SSRF Protection can no longer be bypassed via 0.0.0.0 and it's ipv6 equivalent. Thanks to @r0hanSH ||
|
||||
|
||||
|
||||
## Staement regarding Log4j (CVE-2021-44228 and related)
|
||||
|
|
4
cps.py
4
cps.py
|
@ -40,7 +40,7 @@ from cps.about import about
|
|||
from cps.shelf import shelf
|
||||
from cps.admin import admi
|
||||
from cps.gdrive import gdrive
|
||||
from cps.editbooks import editbook
|
||||
from cps.editbooks import EditBook
|
||||
from cps.remotelogin import remotelogin
|
||||
from cps.search_metadata import meta
|
||||
from cps.error_handler import init_errorhandler
|
||||
|
@ -74,7 +74,7 @@ def main():
|
|||
app.register_blueprint(remotelogin)
|
||||
app.register_blueprint(meta)
|
||||
app.register_blueprint(gdrive)
|
||||
app.register_blueprint(editbook)
|
||||
app.register_blueprint(EditBook)
|
||||
if kobo_available:
|
||||
app.register_blueprint(kobo)
|
||||
app.register_blueprint(kobo_auth)
|
||||
|
|
|
@ -156,7 +156,7 @@ def create_app():
|
|||
services.goodreads_support.connect(config.config_goodreads_api_key,
|
||||
config.config_goodreads_api_secret,
|
||||
config.config_use_goodreads)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
config.store_calibre_uuid(calibre_db, db.LibraryId)
|
||||
return app
|
||||
|
||||
|
||||
|
|
106
cps/admin.py
106
cps/admin.py
|
@ -27,8 +27,9 @@ import json
|
|||
import time
|
||||
import operator
|
||||
from datetime import datetime, timedelta
|
||||
from functools import wraps
|
||||
|
||||
from babel import Locale as LC
|
||||
from babel import Locale
|
||||
from babel.dates import format_datetime
|
||||
from flask import Blueprint, flash, redirect, url_for, abort, request, make_response, send_from_directory, g, Response
|
||||
from flask_login import login_required, current_user, logout_user, confirm_login
|
||||
|
@ -49,7 +50,6 @@ from .render_template import render_title_template, get_sidebar_config
|
|||
from .services.worker import WorkerThread
|
||||
from . import debug_info, _BABEL_TRANSLATIONS
|
||||
|
||||
from functools import wraps
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
@ -191,10 +191,10 @@ def admin():
|
|||
else:
|
||||
commit = version['version']
|
||||
|
||||
allUser = ub.session.query(ub.User).all()
|
||||
all_user = ub.session.query(ub.User).all()
|
||||
email_settings = config.get_mail_settings()
|
||||
kobo_support = feature_support['kobo'] and config.config_kobo_sync
|
||||
return render_title_template("admin.html", allUser=allUser, email=email_settings, config=config, commit=commit,
|
||||
return render_title_template("admin.html", allUser=all_user, email=email_settings, config=config, commit=commit,
|
||||
feature_support=feature_support, kobo_support=kobo_support,
|
||||
title=_(u"Admin page"), page="admin")
|
||||
|
||||
|
@ -244,12 +244,12 @@ def calibreweb_alive():
|
|||
@login_required
|
||||
@admin_required
|
||||
def view_configuration():
|
||||
read_column = calibre_db.session.query(db.Custom_Columns)\
|
||||
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||
restrict_columns = calibre_db.session.query(db.Custom_Columns)\
|
||||
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all()
|
||||
read_column = calibre_db.session.query(db.CustomColumns)\
|
||||
.filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all()
|
||||
restrict_columns = calibre_db.session.query(db.CustomColumns)\
|
||||
.filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all()
|
||||
languages = calibre_db.speaking_language()
|
||||
translations = [LC('en')] + babel.list_translations()
|
||||
translations = [Locale('en')] + babel.list_translations()
|
||||
return render_title_template("config_view_edit.html", conf=config, readColumns=read_column,
|
||||
restrictColumns=restrict_columns,
|
||||
languages=languages,
|
||||
|
@ -263,8 +263,8 @@ def view_configuration():
|
|||
def edit_user_table():
|
||||
visibility = current_user.view_settings.get('useredit', {})
|
||||
languages = calibre_db.speaking_language()
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
allUser = ub.session.query(ub.User)
|
||||
translations = babel.list_translations() + [Locale('en')]
|
||||
all_user = ub.session.query(ub.User)
|
||||
tags = calibre_db.session.query(db.Tags)\
|
||||
.join(db.books_tags_link)\
|
||||
.join(db.Books)\
|
||||
|
@ -276,10 +276,10 @@ def edit_user_table():
|
|||
else:
|
||||
custom_values = []
|
||||
if not config.config_anonbrowse:
|
||||
allUser = allUser.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
|
||||
all_user = all_user.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS)
|
||||
kobo_support = feature_support['kobo'] and config.config_kobo_sync
|
||||
return render_title_template("user_table.html",
|
||||
users=allUser.all(),
|
||||
users=all_user.all(),
|
||||
tags=tags,
|
||||
custom_values=custom_values,
|
||||
translations=translations,
|
||||
|
@ -300,10 +300,13 @@ def list_users():
|
|||
limit = int(request.args.get("limit") or 10)
|
||||
search = request.args.get("search")
|
||||
sort = request.args.get("sort", "id")
|
||||
order = request.args.get("order", "").lower()
|
||||
state = None
|
||||
if sort == "state":
|
||||
state = json.loads(request.args.get("state", "[]"))
|
||||
else:
|
||||
if sort not in ub.User.__table__.columns.keys():
|
||||
sort = "id"
|
||||
order = request.args.get("order", "").lower()
|
||||
|
||||
if sort != "state" and order:
|
||||
order = text(sort + " " + order)
|
||||
|
@ -331,7 +334,7 @@ def list_users():
|
|||
if user.default_language == "all":
|
||||
user.default = _("All")
|
||||
else:
|
||||
user.default = LC.parse(user.default_language).get_language_name(get_locale())
|
||||
user.default = Locale.parse(user.default_language).get_language_name(get_locale())
|
||||
|
||||
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": users}
|
||||
js_list = json.dumps(table_entries, cls=db.AlchemyEncoder)
|
||||
|
@ -379,7 +382,7 @@ def delete_user():
|
|||
@login_required
|
||||
@admin_required
|
||||
def table_get_locale():
|
||||
locale = babel.list_translations() + [LC('en')]
|
||||
locale = babel.list_translations() + [Locale('en')]
|
||||
ret = list()
|
||||
current_locale = get_locale()
|
||||
for loc in locale:
|
||||
|
@ -443,7 +446,7 @@ def edit_list_user(param):
|
|||
elif param.endswith('role'):
|
||||
value = int(vals['field_index'])
|
||||
if user.name == "Guest" and value in \
|
||||
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
|
||||
[constants.ROLE_ADMIN, constants.ROLE_PASSWD, constants.ROLE_EDIT_SHELFS]:
|
||||
raise Exception(_("Guest can't have this role"))
|
||||
# check for valid value, last on checks for power of 2 value
|
||||
if value > 0 and value <= constants.ROLE_VIEWER and (value & value-1 == 0 or value == 1):
|
||||
|
@ -498,7 +501,7 @@ def edit_list_user(param):
|
|||
else:
|
||||
return _("Parameter not found"), 400
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
return str(ex), 400
|
||||
ub.session_commit()
|
||||
return ""
|
||||
|
@ -523,16 +526,16 @@ def update_table_settings():
|
|||
|
||||
def check_valid_read_column(column):
|
||||
if column != "0":
|
||||
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
|
||||
.filter(and_(db.Custom_Columns.datatype == 'bool', db.Custom_Columns.mark_for_delete == 0)).all():
|
||||
if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \
|
||||
.filter(and_(db.CustomColumns.datatype == 'bool', db.CustomColumns.mark_for_delete == 0)).all():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def check_valid_restricted_column(column):
|
||||
if column != "0":
|
||||
if not calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.id == column) \
|
||||
.filter(and_(db.Custom_Columns.datatype == 'text', db.Custom_Columns.mark_for_delete == 0)).all():
|
||||
if not calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.id == column) \
|
||||
.filter(and_(db.CustomColumns.datatype == 'text', db.CustomColumns.mark_for_delete == 0)).all():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -862,10 +865,10 @@ def delete_restriction(res_type, user_id):
|
|||
usr = current_user
|
||||
if element['id'].startswith('a'):
|
||||
usr.allowed_tags = restriction_deletion(element, usr.list_allowed_tags)
|
||||
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
|
||||
ub.session_commit("Deleted allowed tags of user {}: {}".format(usr.name, element['Element']))
|
||||
elif element['id'].startswith('d'):
|
||||
usr.denied_tags = restriction_deletion(element, usr.list_denied_tags)
|
||||
ub.session_commit("Deleted denied tags of user {}: {}".format(usr.name, usr.list_allowed_tags))
|
||||
ub.session_commit("Deleted denied tag of user {}: {}".format(usr.name, element['Element']))
|
||||
elif res_type == 3: # Columns per user
|
||||
if isinstance(user_id, int):
|
||||
usr = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first()
|
||||
|
@ -874,12 +877,12 @@ def delete_restriction(res_type, user_id):
|
|||
if element['id'].startswith('a'):
|
||||
usr.allowed_column_value = restriction_deletion(element, usr.list_allowed_column_values)
|
||||
ub.session_commit("Deleted allowed columns of user {}: {}".format(usr.name,
|
||||
usr.list_allowed_column_values))
|
||||
usr.list_allowed_column_values()))
|
||||
|
||||
elif element['id'].startswith('d'):
|
||||
usr.denied_column_value = restriction_deletion(element, usr.list_denied_column_values)
|
||||
ub.session_commit("Deleted denied columns of user {}: {}".format(usr.name,
|
||||
usr.list_denied_column_values))
|
||||
usr.list_denied_column_values()))
|
||||
return ""
|
||||
|
||||
|
||||
|
@ -1077,12 +1080,12 @@ def _configuration_oauth_helper(to_save):
|
|||
reboot_required = False
|
||||
for element in oauthblueprints:
|
||||
if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \
|
||||
or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']:
|
||||
or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']:
|
||||
reboot_required = True
|
||||
element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"]
|
||||
element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"]
|
||||
if to_save["config_" + str(element['id']) + "_oauth_client_id"] \
|
||||
and to_save["config_" + str(element['id']) + "_oauth_client_secret"]:
|
||||
and to_save["config_" + str(element['id']) + "_oauth_client_secret"]:
|
||||
active_oauths += 1
|
||||
element["active"] = 1
|
||||
else:
|
||||
|
@ -1135,7 +1138,7 @@ def _configuration_ldap_helper(to_save):
|
|||
if not config.config_ldap_provider_url \
|
||||
or not config.config_ldap_port \
|
||||
or not config.config_ldap_dn \
|
||||
or not config.config_ldap_user_object:
|
||||
or not config.config_ldap_user_object:
|
||||
return reboot_required, _configuration_result(_('Please Enter a LDAP Provider, '
|
||||
'Port, DN and User Object Identifier'))
|
||||
|
||||
|
@ -1210,15 +1213,16 @@ def _db_configuration_update_helper():
|
|||
'',
|
||||
to_save['config_calibre_dir'],
|
||||
flags=re.IGNORECASE)
|
||||
db_valid = False
|
||||
try:
|
||||
db_change, db_valid = _db_simulate_change()
|
||||
|
||||
# gdrive_error drive setup
|
||||
gdrive_error = _configuration_gdrive_helper(to_save)
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
_db_configuration_result(_("Settings DB is not Writeable"), gdrive_error)
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
_db_configuration_result(_(u"Database error: %(error)s.", error=e.orig), gdrive_error)
|
||||
try:
|
||||
metadata_db = os.path.join(to_save['config_calibre_dir'], "metadata.db")
|
||||
if config.config_use_google_drive and is_gdrive_ready() and not os.path.exists(metadata_db):
|
||||
|
@ -1228,11 +1232,11 @@ def _db_configuration_update_helper():
|
|||
return _db_configuration_result('{}'.format(ex), gdrive_error)
|
||||
|
||||
if db_change or not db_valid or not config.db_configured \
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||
if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path):
|
||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
||||
gdrive_error)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
config.store_calibre_uuid(calibre_db, db.LibraryId)
|
||||
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
||||
if db_change:
|
||||
log.info("Calibre Database changed, delete all Calibre-Web info related to old Database")
|
||||
|
@ -1271,7 +1275,7 @@ def _configuration_update_helper():
|
|||
_config_checkbox_int(to_save, "config_unicode_filename")
|
||||
# Reboot on config_anonbrowse with enabled ldap, as decoraters are changed in this case
|
||||
reboot_required |= (_config_checkbox_int(to_save, "config_anonbrowse")
|
||||
and config.config_login_type == constants.LOGIN_LDAP)
|
||||
and config.config_login_type == constants.LOGIN_LDAP)
|
||||
_config_checkbox_int(to_save, "config_public_reg")
|
||||
_config_checkbox_int(to_save, "config_register_email")
|
||||
reboot_required |= _config_checkbox_int(to_save, "config_kobo_sync")
|
||||
|
@ -1331,10 +1335,10 @@ def _configuration_update_helper():
|
|||
unrar_status = helper.check_unrar(config.config_rarfile_location)
|
||||
if unrar_status:
|
||||
return _configuration_result(unrar_status)
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
_configuration_result(_("Settings DB is not Writeable"))
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
_configuration_result(_(u"Database error: %(error)s.", error=e.orig))
|
||||
|
||||
config.save()
|
||||
if reboot_required:
|
||||
|
@ -1429,10 +1433,10 @@ def _handle_new_user(to_save, content, languages, translations, kobo_support):
|
|||
ub.session.rollback()
|
||||
log.error("Found an existing account for {} or {}".format(content.name, content.email))
|
||||
flash(_("Found an existing account for this e-mail address or name."), category="error")
|
||||
except OperationalError:
|
||||
except OperationalError as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
|
||||
|
||||
def _delete_user(content):
|
||||
|
@ -1546,10 +1550,10 @@ def _handle_edit_user(to_save, content, languages, translations, kobo_support):
|
|||
ub.session.rollback()
|
||||
log.error("An unknown error occurred while changing user: {}".format(str(ex)))
|
||||
flash(_(u"An unknown error occurred. Please try again later."), category="error")
|
||||
except OperationalError:
|
||||
except OperationalError as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return ""
|
||||
|
||||
|
||||
|
@ -1559,7 +1563,7 @@ def _handle_edit_user(to_save, content, languages, translations, kobo_support):
|
|||
def new_user():
|
||||
content = ub.User()
|
||||
languages = calibre_db.speaking_language()
|
||||
translations = [LC('en')] + babel.list_translations()
|
||||
translations = [Locale('en')] + babel.list_translations()
|
||||
kobo_support = feature_support['kobo'] and config.config_kobo_sync
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
|
@ -1615,10 +1619,10 @@ def update_mailsettings():
|
|||
_config_int(to_save, "mail_size", lambda y: int(y)*1024*1024)
|
||||
try:
|
||||
config.save()
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return edit_mailsettings()
|
||||
|
||||
if to_save.get("test"):
|
||||
|
@ -1685,7 +1689,7 @@ def edit_user(user_id):
|
|||
flash(_(u"User not found"), category="error")
|
||||
return redirect(url_for('admin.admin'))
|
||||
languages = calibre_db.speaking_language(return_all_languages=True)
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
translations = babel.list_translations() + [Locale('en')]
|
||||
kobo_support = feature_support['kobo'] and config.config_kobo_sync
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
|
@ -1889,7 +1893,7 @@ def import_ldap_users():
|
|||
try:
|
||||
new_users = services.ldap.get_group_members(config.config_ldap_group_name)
|
||||
except (services.ldap.LDAPException, TypeError, AttributeError, KeyError) as e:
|
||||
log.debug_or_exception(e)
|
||||
log.error_or_exception(e)
|
||||
showtext['text'] = _(u'Error: %(ldaperror)s', ldaperror=e)
|
||||
return json.dumps(showtext)
|
||||
if not new_users:
|
||||
|
@ -1917,7 +1921,7 @@ def import_ldap_users():
|
|||
try:
|
||||
user_data = services.ldap.get_object_details(user=user_identifier, query_filter=query_filter)
|
||||
except AttributeError as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
continue
|
||||
if user_data:
|
||||
user_count, message = ldap_import_create_user(user, user_data)
|
||||
|
|
80
cps/comic.py
80
cps/comic.py
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2018 OzzieIsaacs
|
||||
# Copyright (C) 2018-2022 OzzieIsaacs
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -18,19 +18,16 @@
|
|||
|
||||
import os
|
||||
|
||||
from . import logger, isoLanguages
|
||||
from . import logger, isoLanguages, cover
|
||||
from .constants import BookMeta
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
try:
|
||||
from wand.image import Image
|
||||
use_IM = True
|
||||
except (ImportError, RuntimeError) as e:
|
||||
use_IM = False
|
||||
|
||||
log = logger.create()
|
||||
|
||||
try:
|
||||
from comicapi.comicarchive import ComicArchive, MetaDataStyle
|
||||
|
@ -51,29 +48,8 @@ except (ImportError, LookupError) as e:
|
|||
use_rarfile = False
|
||||
use_comic_meta = False
|
||||
|
||||
NO_JPEG_EXTENSIONS = ['.png', '.webp', '.bmp']
|
||||
COVER_EXTENSIONS = ['.png', '.webp', '.bmp', '.jpg', '.jpeg']
|
||||
|
||||
def _cover_processing(tmp_file_name, img, extension):
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
||||
if extension in NO_JPEG_EXTENSIONS:
|
||||
if use_IM:
|
||||
with Image(blob=img) as imgc:
|
||||
imgc.format = 'jpeg'
|
||||
imgc.transform_colorspace('rgb')
|
||||
imgc.save(filename=tmp_cover_name)
|
||||
return tmp_cover_name
|
||||
else:
|
||||
return None
|
||||
if img:
|
||||
with open(tmp_cover_name, 'wb') as f:
|
||||
f.write(img)
|
||||
return tmp_cover_name
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _extract_Cover_from_archive(original_file_extension, tmp_file_name, rarExecutable):
|
||||
def _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_executable):
|
||||
cover_data = extension = None
|
||||
if original_file_extension.upper() == '.CBZ':
|
||||
cf = zipfile.ZipFile(tmp_file_name)
|
||||
|
@ -81,7 +57,7 @@ def _extract_Cover_from_archive(original_file_extension, tmp_file_name, rarExecu
|
|||
ext = os.path.splitext(name)
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in COVER_EXTENSIONS:
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cover_data = cf.read(name)
|
||||
break
|
||||
elif original_file_extension.upper() == '.CBT':
|
||||
|
@ -90,44 +66,44 @@ def _extract_Cover_from_archive(original_file_extension, tmp_file_name, rarExecu
|
|||
ext = os.path.splitext(name)
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in COVER_EXTENSIONS:
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cover_data = cf.extractfile(name).read()
|
||||
break
|
||||
elif original_file_extension.upper() == '.CBR' and use_rarfile:
|
||||
try:
|
||||
rarfile.UNRAR_TOOL = rarExecutable
|
||||
rarfile.UNRAR_TOOL = rar_executable
|
||||
cf = rarfile.RarFile(tmp_file_name)
|
||||
for name in cf.getnames():
|
||||
for name in cf.namelist():
|
||||
ext = os.path.splitext(name)
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in COVER_EXTENSIONS:
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cover_data = cf.read(name)
|
||||
break
|
||||
except Exception as ex:
|
||||
log.debug('Rarfile failed with error: %s', ex)
|
||||
log.debug('Rarfile failed with error: {}'.format(ex))
|
||||
return cover_data, extension
|
||||
|
||||
|
||||
def _extractCover(tmp_file_name, original_file_extension, rarExecutable):
|
||||
def _extract_cover(tmp_file_name, original_file_extension, rar_executable):
|
||||
cover_data = extension = None
|
||||
if use_comic_meta:
|
||||
archive = ComicArchive(tmp_file_name, rar_exe_path=rarExecutable)
|
||||
archive = ComicArchive(tmp_file_name, rar_exe_path=rar_executable)
|
||||
for index, name in enumerate(archive.getPageNameList()):
|
||||
ext = os.path.splitext(name)
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in COVER_EXTENSIONS:
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cover_data = archive.getPage(index)
|
||||
break
|
||||
else:
|
||||
cover_data, extension = _extract_Cover_from_archive(original_file_extension, tmp_file_name, rarExecutable)
|
||||
return _cover_processing(tmp_file_name, cover_data, extension)
|
||||
cover_data, extension = _extract_cover_from_archive(original_file_extension, tmp_file_name, rar_executable)
|
||||
return cover.cover_processing(tmp_file_name, cover_data, extension)
|
||||
|
||||
|
||||
def get_comic_info(tmp_file_path, original_file_name, original_file_extension, rarExecutable):
|
||||
def get_comic_info(tmp_file_path, original_file_name, original_file_extension, rar_executable):
|
||||
if use_comic_meta:
|
||||
archive = ComicArchive(tmp_file_path, rar_exe_path=rarExecutable)
|
||||
archive = ComicArchive(tmp_file_path, rar_exe_path=rar_executable)
|
||||
if archive.seemsToBeAComicArchive():
|
||||
if archive.hasMetadata(MetaDataStyle.CIX):
|
||||
style = MetaDataStyle.CIX
|
||||
|
@ -137,23 +113,23 @@ def get_comic_info(tmp_file_path, original_file_name, original_file_extension, r
|
|||
style = None
|
||||
|
||||
# if style is not None:
|
||||
loadedMetadata = archive.readMetadata(style)
|
||||
loaded_metadata = archive.readMetadata(style)
|
||||
|
||||
lang = loadedMetadata.language or ""
|
||||
loadedMetadata.language = isoLanguages.get_lang3(lang)
|
||||
lang = loaded_metadata.language or ""
|
||||
loaded_metadata.language = isoLanguages.get_lang3(lang)
|
||||
|
||||
return BookMeta(
|
||||
file_path=tmp_file_path,
|
||||
extension=original_file_extension,
|
||||
title=loadedMetadata.title or original_file_name,
|
||||
title=loaded_metadata.title or original_file_name,
|
||||
author=" & ".join([credit["person"]
|
||||
for credit in loadedMetadata.credits if credit["role"] == "Writer"]) or u'Unknown',
|
||||
cover=_extractCover(tmp_file_path, original_file_extension, rarExecutable),
|
||||
description=loadedMetadata.comments or "",
|
||||
for credit in loaded_metadata.credits if credit["role"] == "Writer"]) or 'Unknown',
|
||||
cover=_extract_cover(tmp_file_path, original_file_extension, rar_executable),
|
||||
description=loaded_metadata.comments or "",
|
||||
tags="",
|
||||
series=loadedMetadata.series or "",
|
||||
series_id=loadedMetadata.issue or "",
|
||||
languages=loadedMetadata.language,
|
||||
series=loaded_metadata.series or "",
|
||||
series_id=loaded_metadata.issue or "",
|
||||
languages=loaded_metadata.language,
|
||||
publisher="")
|
||||
|
||||
return BookMeta(
|
||||
|
@ -161,7 +137,7 @@ def get_comic_info(tmp_file_path, original_file_name, original_file_extension, r
|
|||
extension=original_file_extension,
|
||||
title=original_file_name,
|
||||
author=u'Unknown',
|
||||
cover=_extractCover(tmp_file_path, original_file_extension, rarExecutable),
|
||||
cover=_extract_cover(tmp_file_path, original_file_extension, rar_executable),
|
||||
description="",
|
||||
tags="",
|
||||
series="",
|
||||
|
|
|
@ -161,7 +161,7 @@ def selected_roles(dictionary):
|
|||
BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, description, tags, series, '
|
||||
'series_id, languages, publisher')
|
||||
|
||||
STABLE_VERSION = {'version': '0.6.17 Beta'}
|
||||
STABLE_VERSION = {'version': '0.6.18 Beta'}
|
||||
|
||||
NIGHTLY_VERSION = dict()
|
||||
NIGHTLY_VERSION[0] = '$Format:%H$'
|
||||
|
|
|
@ -27,7 +27,6 @@ from .subproc_wrapper import process_wait
|
|||
log = logger.create()
|
||||
|
||||
# _() necessary to make babel aware of string for translation
|
||||
_NOT_CONFIGURED = _('not configured')
|
||||
_NOT_INSTALLED = _('not installed')
|
||||
_EXECUTION_ERROR = _('Execution permissions missing')
|
||||
|
||||
|
@ -48,14 +47,16 @@ def _get_command_version(path, pattern, argument=None):
|
|||
|
||||
|
||||
def get_calibre_version():
|
||||
return _get_command_version(config.config_converterpath, r'ebook-convert.*\(calibre', '--version') \
|
||||
or _NOT_CONFIGURED
|
||||
return _get_command_version(config.config_converterpath, r'ebook-convert.*\(calibre', '--version')
|
||||
|
||||
|
||||
def get_unrar_version():
|
||||
return _get_command_version(config.config_rarfile_location, r'UNRAR.*\d') or _NOT_CONFIGURED
|
||||
unrar_version = _get_command_version(config.config_rarfile_location, r'UNRAR.*\d')
|
||||
if unrar_version == "not installed":
|
||||
unrar_version = _get_command_version(config.config_rarfile_location, r'unrar.*\d','-V')
|
||||
return unrar_version
|
||||
|
||||
def get_kepubify_version():
|
||||
return _get_command_version(config.config_kepubifypath, r'kepubify\s','--version') or _NOT_CONFIGURED
|
||||
return _get_command_version(config.config_kepubifypath, r'kepubify\s','--version')
|
||||
|
||||
|
||||
|
|
48
cps/cover.py
Normal file
48
cps/cover.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
||||
# Copyright (C) 2022 OzzieIsaacs
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
from wand.image import Image
|
||||
use_IM = True
|
||||
except (ImportError, RuntimeError) as e:
|
||||
use_IM = False
|
||||
|
||||
|
||||
NO_JPEG_EXTENSIONS = ['.png', '.webp', '.bmp']
|
||||
COVER_EXTENSIONS = ['.png', '.webp', '.bmp', '.jpg', '.jpeg']
|
||||
|
||||
|
||||
def cover_processing(tmp_file_name, img, extension):
|
||||
tmp_cover_name = os.path.join(os.path.dirname(tmp_file_name), 'cover.jpg')
|
||||
if extension in NO_JPEG_EXTENSIONS:
|
||||
if use_IM:
|
||||
with Image(blob=img) as imgc:
|
||||
imgc.format = 'jpeg'
|
||||
imgc.transform_colorspace('rgb')
|
||||
imgc.save(filename=tmp_cover_name)
|
||||
return tmp_cover_name
|
||||
else:
|
||||
return None
|
||||
if img:
|
||||
with open(tmp_cover_name, 'wb') as f:
|
||||
f.write(img)
|
||||
return tmp_cover_name
|
||||
else:
|
||||
return None
|
88
cps/db.py
88
cps/db.py
|
@ -17,13 +17,13 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import ast
|
||||
import json
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote
|
||||
import unidecode
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy import Table, Column, ForeignKey, CheckConstraint
|
||||
|
@ -49,11 +49,6 @@ from .pagination import Pagination
|
|||
|
||||
from weakref import WeakSet
|
||||
|
||||
try:
|
||||
import unidecode
|
||||
use_unidecode = True
|
||||
except ImportError:
|
||||
use_unidecode = False
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
@ -93,7 +88,7 @@ books_publishers_link = Table('books_publishers_link', Base.metadata,
|
|||
)
|
||||
|
||||
|
||||
class Library_Id(Base):
|
||||
class LibraryId(Base):
|
||||
__tablename__ = 'library_id'
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String, nullable=False)
|
||||
|
@ -112,7 +107,7 @@ class Identifiers(Base):
|
|||
self.type = id_type
|
||||
self.book = book
|
||||
|
||||
def formatType(self):
|
||||
def format_type(self):
|
||||
format_type = self.type.lower()
|
||||
if format_type == 'amazon':
|
||||
return u"Amazon"
|
||||
|
@ -184,8 +179,8 @@ class Comments(Base):
|
|||
book = Column(Integer, ForeignKey('books.id'), nullable=False, unique=True)
|
||||
text = Column(String(collation='NOCASE'), nullable=False)
|
||||
|
||||
def __init__(self, text, book):
|
||||
self.text = text
|
||||
def __init__(self, comment, book):
|
||||
self.text = comment
|
||||
self.book = book
|
||||
|
||||
def get(self):
|
||||
|
@ -367,7 +362,6 @@ class Books(Base):
|
|||
self.path = path
|
||||
self.has_cover = (has_cover != None)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return u"<Books('{0},{1}{2}{3}{4}{5}{6}{7}{8}')>".format(self.title, self.sort, self.author_sort,
|
||||
self.timestamp, self.pubdate, self.series_index,
|
||||
|
@ -375,10 +369,10 @@ class Books(Base):
|
|||
|
||||
@property
|
||||
def atom_timestamp(self):
|
||||
return (self.timestamp.strftime('%Y-%m-%dT%H:%M:%S+00:00') or '')
|
||||
return self.timestamp.strftime('%Y-%m-%dT%H:%M:%S+00:00') or ''
|
||||
|
||||
|
||||
class Custom_Columns(Base):
|
||||
class CustomColumns(Base):
|
||||
__tablename__ = 'custom_columns'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
@ -436,7 +430,7 @@ class AlchemyEncoder(json.JSONEncoder):
|
|||
return json.JSONEncoder.default(self, o)
|
||||
|
||||
|
||||
class CalibreDB():
|
||||
class CalibreDB:
|
||||
_init = False
|
||||
engine = None
|
||||
config = None
|
||||
|
@ -460,7 +454,7 @@ class CalibreDB():
|
|||
self.update_title_sort(self.config)
|
||||
|
||||
@classmethod
|
||||
def setup_db_cc_classes(self, cc):
|
||||
def setup_db_cc_classes(cls, cc):
|
||||
cc_ids = []
|
||||
books_custom_column_links = {}
|
||||
for row in cc:
|
||||
|
@ -539,16 +533,16 @@ class CalibreDB():
|
|||
return False, False
|
||||
try:
|
||||
check_engine = create_engine('sqlite://',
|
||||
echo=False,
|
||||
isolation_level="SERIALIZABLE",
|
||||
connect_args={'check_same_thread': False},
|
||||
poolclass=StaticPool)
|
||||
echo=False,
|
||||
isolation_level="SERIALIZABLE",
|
||||
connect_args={'check_same_thread': False},
|
||||
poolclass=StaticPool)
|
||||
with check_engine.begin() as connection:
|
||||
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
||||
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
||||
local_session = scoped_session(sessionmaker())
|
||||
local_session.configure(bind=connection)
|
||||
database_uuid = local_session().query(Library_Id).one_or_none()
|
||||
database_uuid = local_session().query(LibraryId).one_or_none()
|
||||
# local_session.dispose()
|
||||
|
||||
check_engine.connect()
|
||||
|
@ -597,7 +591,7 @@ class CalibreDB():
|
|||
cc = conn.execute(text("SELECT id, datatype FROM custom_columns"))
|
||||
cls.setup_db_cc_classes(cc)
|
||||
except OperationalError as e:
|
||||
log.debug_or_exception(e)
|
||||
log.error_or_exception(e)
|
||||
|
||||
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
|
||||
autoflush=True,
|
||||
|
@ -644,12 +638,10 @@ class CalibreDB():
|
|||
# Language and content filters for displaying in the UI
|
||||
def common_filters(self, allow_show_archived=False, return_all_languages=False):
|
||||
if not allow_show_archived:
|
||||
archived_books = (
|
||||
ub.session.query(ub.ArchivedBook)
|
||||
.filter(ub.ArchivedBook.user_id == int(current_user.id))
|
||||
.filter(ub.ArchivedBook.is_archived == True)
|
||||
.all()
|
||||
)
|
||||
archived_books = (ub.session.query(ub.ArchivedBook)
|
||||
.filter(ub.ArchivedBook.user_id == int(current_user.id))
|
||||
.filter(ub.ArchivedBook.is_archived == True)
|
||||
.all())
|
||||
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
|
||||
archived_filter = Books.id.notin_(archived_book_ids)
|
||||
else:
|
||||
|
@ -668,11 +660,11 @@ class CalibreDB():
|
|||
pos_cc_list = current_user.allowed_column_value.split(',')
|
||||
pos_content_cc_filter = true() if pos_cc_list == [''] else \
|
||||
getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
|
||||
any(cc_classes[self.config.config_restricted_column].value.in_(pos_cc_list))
|
||||
any(cc_classes[self.config.config_restricted_column].value.in_(pos_cc_list))
|
||||
neg_cc_list = current_user.denied_column_value.split(',')
|
||||
neg_content_cc_filter = false() if neg_cc_list == [''] else \
|
||||
getattr(Books, 'custom_column_' + str(self.config.config_restricted_column)). \
|
||||
any(cc_classes[self.config.config_restricted_column].value.in_(neg_cc_list))
|
||||
any(cc_classes[self.config.config_restricted_column].value.in_(neg_cc_list))
|
||||
except (KeyError, AttributeError):
|
||||
pos_content_cc_filter = false()
|
||||
neg_content_cc_filter = true()
|
||||
|
@ -729,7 +721,7 @@ class CalibreDB():
|
|||
query = (self.session.query(database, ub.ReadBook.read_status, ub.ArchivedBook.is_archived)
|
||||
.select_from(Books)
|
||||
.outerjoin(ub.ReadBook,
|
||||
and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == Books.id)))
|
||||
and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == Books.id)))
|
||||
else:
|
||||
try:
|
||||
read_column = cc_classes[config_read_column]
|
||||
|
@ -739,7 +731,7 @@ class CalibreDB():
|
|||
except (KeyError, AttributeError):
|
||||
log.error("Custom Column No.%d is not existing in calibre database", read_column)
|
||||
# Skip linking read column and return None instead of read status
|
||||
query =self.session.query(database, None, ub.ArchivedBook.is_archived)
|
||||
query = self.session.query(database, None, ub.ArchivedBook.is_archived)
|
||||
query = query.outerjoin(ub.ArchivedBook, and_(Books.id == ub.ArchivedBook.book_id,
|
||||
int(current_user.id) == ub.ArchivedBook.user_id))
|
||||
else:
|
||||
|
@ -770,13 +762,15 @@ class CalibreDB():
|
|||
len(query.all()))
|
||||
entries = query.order_by(*order).offset(off).limit(pagesize).all()
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
# display authors in right order
|
||||
entries = self.order_authors(entries, True, join_archive_read)
|
||||
return entries, randm, pagination
|
||||
|
||||
# Orders all Authors in the list according to authors sort
|
||||
def order_authors(self, entries, list_return=False, combined=False):
|
||||
# entries_copy = copy.deepcopy(entries)
|
||||
# entries_copy =[]
|
||||
for entry in entries:
|
||||
if combined:
|
||||
sort_authors = entry.Books.author_sort.split('&')
|
||||
|
@ -786,25 +780,30 @@ class CalibreDB():
|
|||
sort_authors = entry.author_sort.split('&')
|
||||
ids = [a.id for a in entry.authors]
|
||||
authors_ordered = list()
|
||||
error = False
|
||||
# error = False
|
||||
for auth in sort_authors:
|
||||
results = self.session.query(Authors).filter(Authors.sort == auth.lstrip().strip()).all()
|
||||
# ToDo: How to handle not found authorname
|
||||
# ToDo: How to handle not found author name
|
||||
if not len(results):
|
||||
error = True
|
||||
log.error("Author {} not found to display name in right order".format(auth.strip()))
|
||||
# error = True
|
||||
break
|
||||
for r in results:
|
||||
if r.id in ids:
|
||||
authors_ordered.append(r)
|
||||
if not error:
|
||||
ids.remove(r.id)
|
||||
for author_id in ids:
|
||||
result = self.session.query(Authors).filter(Authors.id == author_id).first()
|
||||
authors_ordered.append(result)
|
||||
|
||||
if list_return:
|
||||
if combined:
|
||||
entry.Books.authors = authors_ordered
|
||||
else:
|
||||
entry.authors = authors_ordered
|
||||
if list_return:
|
||||
return entries
|
||||
else:
|
||||
return authors_ordered
|
||||
entry.ordered_authors = authors_ordered
|
||||
else:
|
||||
return authors_ordered
|
||||
return entries
|
||||
|
||||
def get_typeahead(self, database, query, replace=('', ''), tag_filter=true()):
|
||||
query = query or ''
|
||||
|
@ -865,7 +864,7 @@ class CalibreDB():
|
|||
))
|
||||
|
||||
# read search results from calibre-database and return it (function is used for feed and simple search
|
||||
def get_search_results(self, term, offset=None, order=None, limit=None, allow_show_archived=False,
|
||||
def get_search_results(self, term, offset=None, order=None, limit=None,
|
||||
config_read_column=False, *join):
|
||||
order = order[0] if order else [Books.sort]
|
||||
pagination = None
|
||||
|
@ -908,7 +907,6 @@ class CalibreDB():
|
|||
lang.name = isoLanguages.get_language_name(get_locale(), lang.lang_code)
|
||||
return sorted(languages, key=lambda x: x.name, reverse=reverse_order)
|
||||
|
||||
|
||||
def update_title_sort(self, config, conn=None):
|
||||
# user defined sort function for calibre databases (Series, etc.)
|
||||
def _title_sort(title):
|
||||
|
@ -966,6 +964,6 @@ def lcase(s):
|
|||
try:
|
||||
return unidecode.unidecode(s.lower())
|
||||
except Exception as ex:
|
||||
log = logger.create()
|
||||
log.debug_or_exception(ex)
|
||||
_log = logger.create()
|
||||
_log.error_or_exception(ex)
|
||||
return s.lower()
|
||||
|
|
473
cps/editbooks.py
473
cps/editbooks.py
|
@ -31,7 +31,7 @@ from functools import wraps
|
|||
try:
|
||||
from lxml.html.clean import clean_html
|
||||
except ImportError:
|
||||
pass
|
||||
clean_html = None
|
||||
|
||||
from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response
|
||||
from flask_babel import gettext as _
|
||||
|
@ -48,7 +48,7 @@ from .usermanagement import login_required_if_no_ano
|
|||
from .kobo_sync_status import change_archived_books
|
||||
|
||||
|
||||
editbook = Blueprint('editbook', __name__)
|
||||
EditBook = Blueprint('edit-book', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
||||
|
@ -61,6 +61,7 @@ def upload_required(f):
|
|||
|
||||
return inner
|
||||
|
||||
|
||||
def edit_required(f):
|
||||
@wraps(f)
|
||||
def inner(*args, **kwargs):
|
||||
|
@ -70,6 +71,7 @@ def edit_required(f):
|
|||
|
||||
return inner
|
||||
|
||||
|
||||
def search_objects_remove(db_book_object, db_type, input_elements):
|
||||
del_elements = []
|
||||
for c_elements in db_book_object:
|
||||
|
@ -119,6 +121,7 @@ def remove_objects(db_book_object, db_session, del_elements):
|
|||
db_session.delete(del_element)
|
||||
return changed
|
||||
|
||||
|
||||
def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
|
||||
changed = False
|
||||
if db_type == 'languages':
|
||||
|
@ -128,7 +131,7 @@ def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
|
|||
else:
|
||||
db_filter = db_object.name
|
||||
for add_element in add_elements:
|
||||
# check if a element with that name exists
|
||||
# check if an element with that name exists
|
||||
db_element = db_session.query(db_object).filter(db_filter == add_element).first()
|
||||
# if no element is found add it
|
||||
if db_type == 'author':
|
||||
|
@ -147,7 +150,6 @@ def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
|
|||
db_book_object.append(new_element)
|
||||
else:
|
||||
db_element = create_objects_for_addition(db_element, add_element, db_type)
|
||||
changed = True
|
||||
# add element to book
|
||||
changed = True
|
||||
db_book_object.append(db_element)
|
||||
|
@ -178,7 +180,7 @@ def create_objects_for_addition(db_element, add_element, db_type):
|
|||
return db_element
|
||||
|
||||
|
||||
# Modifies different Database objects, first check if elements if elements have to be deleted,
|
||||
# Modifies different Database objects, first check if elements have to be deleted,
|
||||
# because they are no longer used, than check if elements have to be added to database
|
||||
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
|
||||
# passing input_elements not as a list may lead to undesired results
|
||||
|
@ -207,7 +209,7 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
|||
input_dict = dict([(identifier.type.lower(), identifier) for identifier in input_identifiers])
|
||||
if len(input_identifiers) != len(input_dict):
|
||||
error = True
|
||||
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers ])
|
||||
db_dict = dict([(identifier.type.lower(), identifier) for identifier in db_identifiers])
|
||||
# delete db identifiers not present in input or modify them with input val
|
||||
for identifier_type, identifier in db_dict.items():
|
||||
if identifier_type not in input_dict.keys():
|
||||
|
@ -224,14 +226,15 @@ def modify_identifiers(input_identifiers, db_identifiers, db_session):
|
|||
changed = True
|
||||
return changed, error
|
||||
|
||||
@editbook.route("/ajax/delete/<int:book_id>", methods=["POST"])
|
||||
|
||||
@EditBook.route("/ajax/delete/<int:book_id>", methods=["POST"])
|
||||
@login_required
|
||||
def delete_book_from_details(book_id):
|
||||
return Response(delete_book_from_table(book_id, "", True), mimetype='application/json')
|
||||
|
||||
|
||||
@editbook.route("/delete/<int:book_id>", defaults={'book_format': ""}, methods=["POST"])
|
||||
@editbook.route("/delete/<int:book_id>/<string:book_format>", methods=["POST"])
|
||||
@EditBook.route("/delete/<int:book_id>", defaults={'book_format': ""}, methods=["POST"])
|
||||
@EditBook.route("/delete/<int:book_id>/<string:book_format>", methods=["POST"])
|
||||
@login_required
|
||||
def delete_book_ajax(book_id, book_format):
|
||||
return delete_book_from_table(book_id, book_format, False)
|
||||
|
@ -252,8 +255,8 @@ def delete_whole_book(book_id, book):
|
|||
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
|
||||
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
|
||||
|
||||
cc = calibre_db.session.query(db.Custom_Columns). \
|
||||
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
cc = calibre_db.session.query(db.CustomColumns). \
|
||||
filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).all()
|
||||
for c in cc:
|
||||
cc_string = "custom_column_" + str(c.id)
|
||||
if not c.is_multiple:
|
||||
|
@ -283,18 +286,18 @@ def delete_whole_book(book_id, book):
|
|||
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
|
||||
|
||||
|
||||
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
|
||||
def render_delete_book_result(book_format, json_response, warning, book_id):
|
||||
if book_format:
|
||||
if jsonResponse:
|
||||
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
|
||||
if json_response:
|
||||
return json.dumps([warning, {"location": url_for("edit-book.edit_book", book_id=book_id),
|
||||
"type": "success",
|
||||
"format": book_format,
|
||||
"message": _('Book Format Successfully Deleted')}])
|
||||
else:
|
||||
flash(_('Book Format Successfully Deleted'), category="success")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
else:
|
||||
if jsonResponse:
|
||||
if json_response:
|
||||
return json.dumps([warning, {"location": url_for('web.index'),
|
||||
"type": "success",
|
||||
"format": book_format,
|
||||
|
@ -304,7 +307,7 @@ def render_delete_book_result(book_format, jsonResponse, warning, book_id):
|
|||
return redirect(url_for('web.index'))
|
||||
|
||||
|
||||
def delete_book_from_table(book_id, book_format, jsonResponse):
|
||||
def delete_book_from_table(book_id, book_format, json_response):
|
||||
warning = {}
|
||||
if current_user.role_delete_books():
|
||||
book = calibre_db.get_book(book_id)
|
||||
|
@ -312,20 +315,20 @@ def delete_book_from_table(book_id, book_format, jsonResponse):
|
|||
try:
|
||||
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
|
||||
if not result:
|
||||
if jsonResponse:
|
||||
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
|
||||
"type": "danger",
|
||||
"format": "",
|
||||
"message": error}])
|
||||
if json_response:
|
||||
return json.dumps([{"location": url_for("edit-book.edit_book", book_id=book_id),
|
||||
"type": "danger",
|
||||
"format": "",
|
||||
"message": error}])
|
||||
else:
|
||||
flash(error, category="error")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
if error:
|
||||
if jsonResponse:
|
||||
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
|
||||
"type": "warning",
|
||||
"format": "",
|
||||
"message": error}
|
||||
if json_response:
|
||||
warning = {"location": url_for("edit-book.edit_book", book_id=book_id),
|
||||
"type": "warning",
|
||||
"format": "",
|
||||
"message": error}
|
||||
else:
|
||||
flash(error, category="warning")
|
||||
if not book_format:
|
||||
|
@ -337,37 +340,38 @@ def delete_book_from_table(book_id, book_format, jsonResponse):
|
|||
kobo_sync_status.remove_synced_book(book.id, True)
|
||||
calibre_db.session.commit()
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
calibre_db.session.rollback()
|
||||
if jsonResponse:
|
||||
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
|
||||
if json_response:
|
||||
return json.dumps([{"location": url_for("edit-book.edit_book", book_id=book_id),
|
||||
"type": "danger",
|
||||
"format": "",
|
||||
"message": ex}])
|
||||
else:
|
||||
flash(str(ex), category="error")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
|
||||
else:
|
||||
# book not found
|
||||
log.error('Book with id "%s" could not be deleted: not found', book_id)
|
||||
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
|
||||
return render_delete_book_result(book_format, json_response, warning, book_id)
|
||||
message = _("You are missing permissions to delete books")
|
||||
if jsonResponse:
|
||||
return json.dumps({"location": url_for("editbook.edit_book", book_id=book_id),
|
||||
if json_response:
|
||||
return json.dumps({"location": url_for("edit-book.edit_book", book_id=book_id),
|
||||
"type": "danger",
|
||||
"format": "",
|
||||
"message": message})
|
||||
else:
|
||||
flash(message, category="error")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
|
||||
|
||||
def render_edit_book(book_id):
|
||||
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
cc = calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).all()
|
||||
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
||||
if not book:
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
|
||||
category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
for lang in book.languages:
|
||||
|
@ -380,9 +384,9 @@ def render_edit_book(book_id):
|
|||
author_names.append(authr.name.replace('|', ','))
|
||||
|
||||
# Option for showing convertbook button
|
||||
valid_source_formats=list()
|
||||
valid_source_formats = list()
|
||||
allowed_conversion_formats = list()
|
||||
kepub_possible=None
|
||||
kepub_possible = None
|
||||
if config.config_converterpath:
|
||||
for file in book.data:
|
||||
if file.format.lower() in constants.EXTENSIONS_CONVERT_FROM:
|
||||
|
@ -430,6 +434,7 @@ def edit_book_ratings(to_save, book):
|
|||
changed = True
|
||||
return changed
|
||||
|
||||
|
||||
def edit_book_tags(tags, book):
|
||||
input_tags = tags.split(',')
|
||||
input_tags = list(map(lambda it: it.strip(), input_tags))
|
||||
|
@ -446,48 +451,48 @@ def edit_book_series(series, book):
|
|||
|
||||
def edit_book_series_index(series_index, book):
|
||||
# Add default series_index to book
|
||||
modif_date = False
|
||||
modify_date = False
|
||||
series_index = series_index or '1'
|
||||
if not series_index.replace('.', '', 1).isdigit():
|
||||
flash(_("%(seriesindex)s is not a valid number, skipping", seriesindex=series_index), category="warning")
|
||||
return False
|
||||
if str(book.series_index) != series_index:
|
||||
book.series_index = series_index
|
||||
modif_date = True
|
||||
return modif_date
|
||||
modify_date = True
|
||||
return modify_date
|
||||
|
||||
|
||||
# Handle book comments/description
|
||||
def edit_book_comments(comments, book):
|
||||
modif_date = False
|
||||
modify_date = False
|
||||
if comments:
|
||||
comments = clean_html(comments)
|
||||
if len(book.comments):
|
||||
if book.comments[0].text != comments:
|
||||
book.comments[0].text = comments
|
||||
modif_date = True
|
||||
modify_date = True
|
||||
else:
|
||||
if comments:
|
||||
book.comments.append(db.Comments(text=comments, book=book.id))
|
||||
modif_date = True
|
||||
return modif_date
|
||||
book.comments.append(db.Comments(comment=comments, book=book.id))
|
||||
modify_date = True
|
||||
return modify_date
|
||||
|
||||
|
||||
def edit_book_languages(languages, book, upload=False, invalid=None):
|
||||
def edit_book_languages(languages, book, upload_mode=False, invalid=None):
|
||||
input_languages = languages.split(',')
|
||||
unknown_languages = []
|
||||
if not upload:
|
||||
if not upload_mode:
|
||||
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
|
||||
else:
|
||||
input_l = isoLanguages.get_valid_language_codes(get_locale(), input_languages, unknown_languages)
|
||||
for l in unknown_languages:
|
||||
log.error("'%s' is not a valid language", l)
|
||||
for lang in unknown_languages:
|
||||
log.error("'%s' is not a valid language", lang)
|
||||
if isinstance(invalid, list):
|
||||
invalid.append(l)
|
||||
invalid.append(lang)
|
||||
else:
|
||||
raise ValueError(_(u"'%(langname)s' is not a valid language", langname=l))
|
||||
raise ValueError(_(u"'%(langname)s' is not a valid language", langname=lang))
|
||||
# ToDo: Not working correct
|
||||
if upload and len(input_l) == 1:
|
||||
if upload_mode and len(input_l) == 1:
|
||||
# If the language of the file is excluded from the users view, it's not imported, to allow the user to view
|
||||
# the book it's language is set to the filter language
|
||||
if input_l[0] != current_user.filter_language() and current_user.filter_language() != "all":
|
||||
|
@ -571,17 +576,20 @@ def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
|
|||
getattr(book, cc_string).append(new_cc)
|
||||
return changed, to_save
|
||||
|
||||
|
||||
def edit_single_cc_data(book_id, book, column_id, to_save):
|
||||
cc = (calibre_db.session.query(db.Custom_Columns)
|
||||
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions))
|
||||
.filter(db.Custom_Columns.id == column_id)
|
||||
cc = (calibre_db.session.query(db.CustomColumns)
|
||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions))
|
||||
.filter(db.CustomColumns.id == column_id)
|
||||
.all())
|
||||
return edit_cc_data(book_id, book, to_save, cc)
|
||||
|
||||
|
||||
def edit_all_cc_data(book_id, book, to_save):
|
||||
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
cc = calibre_db.session.query(db.CustomColumns).filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).all()
|
||||
return edit_cc_data(book_id, book, to_save, cc)
|
||||
|
||||
|
||||
def edit_cc_data(book_id, book, to_save, cc):
|
||||
changed = False
|
||||
for c in cc:
|
||||
|
@ -614,10 +622,11 @@ def edit_cc_data(book_id, book, to_save, cc):
|
|||
'custom')
|
||||
return changed
|
||||
|
||||
def upload_single_file(request, book, book_id):
|
||||
|
||||
def upload_single_file(file_request, book, book_id):
|
||||
# Check and handle Uploaded file
|
||||
if 'btn-upload-format' in request.files:
|
||||
requested_file = request.files['btn-upload-format']
|
||||
if 'btn-upload-format' in file_request.files:
|
||||
requested_file = file_request.files['btn-upload-format']
|
||||
# check for empty request
|
||||
if requested_file.filename != '':
|
||||
if not current_user.role_upload():
|
||||
|
@ -663,23 +672,23 @@ def upload_single_file(request, book, book_id):
|
|||
calibre_db.update_title_sort(config)
|
||||
except (OperationalError, IntegrityError) as e:
|
||||
calibre_db.session.rollback()
|
||||
log.error('Database error: %s', e)
|
||||
flash(_(u"Database error: %(error)s.", error=e), category="error")
|
||||
log.error_or_exception("Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
|
||||
# Queue uploader info
|
||||
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book.id), escape(book.title))
|
||||
uploadText=_(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=link)
|
||||
WorkerThread.add(current_user.name, TaskUpload(uploadText, escape(book.title)))
|
||||
upload_text = _(u"File format %(ext)s added to %(book)s", ext=file_ext.upper(), book=link)
|
||||
WorkerThread.add(current_user.name, TaskUpload(upload_text, escape(book.title)))
|
||||
|
||||
return uploader.process(
|
||||
saved_filename, *os.path.splitext(requested_file.filename),
|
||||
rarExecutable=config.config_rarfile_location)
|
||||
|
||||
|
||||
def upload_cover(request, book):
|
||||
if 'btn-upload-cover' in request.files:
|
||||
requested_file = request.files['btn-upload-cover']
|
||||
def upload_cover(cover_request, book):
|
||||
if 'btn-upload-cover' in cover_request.files:
|
||||
requested_file = cover_request.files['btn-upload-cover']
|
||||
# check for empty request
|
||||
if requested_file.filename != '':
|
||||
if not current_user.role_upload():
|
||||
|
@ -707,8 +716,8 @@ def handle_title_on_edit(book, book_title):
|
|||
|
||||
def handle_author_on_edit(book, author_name, update_stored=True):
|
||||
# handle author(s)
|
||||
# renamed = False
|
||||
input_authors = author_name.split('&')
|
||||
input_authors, renamed = prepare_authors(author_name)
|
||||
'''input_authors = author_name.split('&')
|
||||
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
|
||||
# Remove duplicates in authors list
|
||||
input_authors = helper.uniq(input_authors)
|
||||
|
@ -726,7 +735,7 @@ def handle_author_on_edit(book, author_name, update_stored=True):
|
|||
sorted_renamed_author = helper.get_sorted_author(renamed_author.name)
|
||||
sorted_old_author = helper.get_sorted_author(in_aut)
|
||||
for one_book in all_books:
|
||||
one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author)
|
||||
one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author)'''
|
||||
|
||||
change = modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
|
||||
|
||||
|
@ -747,17 +756,17 @@ def handle_author_on_edit(book, author_name, update_stored=True):
|
|||
return input_authors, change, renamed
|
||||
|
||||
|
||||
@editbook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
|
||||
@EditBook.route("/admin/book/<int:book_id>", methods=['GET', 'POST'])
|
||||
@login_required_if_no_ano
|
||||
@edit_required
|
||||
def edit_book(book_id):
|
||||
modif_date = False
|
||||
modify_date = False
|
||||
|
||||
# create the function for sorting...
|
||||
try:
|
||||
calibre_db.update_title_sort(config)
|
||||
except sqliteOperationalError as e:
|
||||
log.debug_or_exception(e)
|
||||
log.error_or_exception(e)
|
||||
calibre_db.session.rollback()
|
||||
|
||||
# Show form
|
||||
|
@ -768,13 +777,14 @@ def edit_book(book_id):
|
|||
|
||||
# Book not found
|
||||
if not book:
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
|
||||
category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
meta = upload_single_file(request, book, book_id)
|
||||
if upload_cover(request, book) is True:
|
||||
book.has_cover = 1
|
||||
modif_date = True
|
||||
modify_date = True
|
||||
try:
|
||||
to_save = request.form.to_dict()
|
||||
merge_metadata(to_save, meta)
|
||||
|
@ -787,15 +797,15 @@ def edit_book(book_id):
|
|||
input_authors, authorchange, renamed = handle_author_on_edit(book, to_save["author_name"])
|
||||
if authorchange or title_change:
|
||||
edited_books_id = book.id
|
||||
modif_date = True
|
||||
modify_date = True
|
||||
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
|
||||
error = False
|
||||
error = ""
|
||||
if edited_books_id:
|
||||
error = helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0],
|
||||
renamed_author=renamed)
|
||||
renamed_author=renamed)
|
||||
|
||||
if not error:
|
||||
if "cover_url" in to_save:
|
||||
|
@ -809,33 +819,33 @@ def edit_book(book_id):
|
|||
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
|
||||
if result is True:
|
||||
book.has_cover = 1
|
||||
modif_date = True
|
||||
modify_date = True
|
||||
helper.clear_cover_thumbnail_cache(book.id)
|
||||
else:
|
||||
flash(error, category="error")
|
||||
|
||||
# Add default series_index to book
|
||||
modif_date |= edit_book_series_index(to_save["series_index"], book)
|
||||
modify_date |= edit_book_series_index(to_save["series_index"], book)
|
||||
# Handle book comments/description
|
||||
modif_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
|
||||
modify_date |= edit_book_comments(Markup(to_save['description']).unescape(), book)
|
||||
# Handle identifiers
|
||||
input_identifiers = identifier_list(to_save, book)
|
||||
modification, warning = modify_identifiers(input_identifiers, book.identifiers, calibre_db.session)
|
||||
if warning:
|
||||
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
|
||||
modif_date |= modification
|
||||
modify_date |= modification
|
||||
# Handle book tags
|
||||
modif_date |= edit_book_tags(to_save['tags'], book)
|
||||
modify_date |= edit_book_tags(to_save['tags'], book)
|
||||
# Handle book series
|
||||
modif_date |= edit_book_series(to_save["series"], book)
|
||||
modify_date |= edit_book_series(to_save["series"], book)
|
||||
# handle book publisher
|
||||
modif_date |= edit_book_publisher(to_save['publisher'], book)
|
||||
modify_date |= edit_book_publisher(to_save['publisher'], book)
|
||||
# handle book languages
|
||||
modif_date |= edit_book_languages(to_save['languages'], book)
|
||||
modify_date |= edit_book_languages(to_save['languages'], book)
|
||||
# handle book ratings
|
||||
modif_date |= edit_book_ratings(to_save, book)
|
||||
modify_date |= edit_book_ratings(to_save, book)
|
||||
# handle cc data
|
||||
modif_date |= edit_all_cc_data(book_id, book, to_save)
|
||||
modify_date |= edit_all_cc_data(book_id, book, to_save)
|
||||
|
||||
if to_save["pubdate"]:
|
||||
try:
|
||||
|
@ -845,7 +855,7 @@ def edit_book(book_id):
|
|||
else:
|
||||
book.pubdate = db.Books.DEFAULT_PUBDATE
|
||||
|
||||
if modif_date:
|
||||
if modify_date:
|
||||
book.last_modified = datetime.utcnow()
|
||||
kobo_sync_status.remove_synced_book(edited_books_id, all=True)
|
||||
|
||||
|
@ -866,8 +876,13 @@ def edit_book(book_id):
|
|||
calibre_db.session.rollback()
|
||||
flash(str(e), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
except (OperationalError, IntegrityError) as e:
|
||||
log.error_or_exception("Database error: {}".format(e))
|
||||
calibre_db.session.rollback()
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
calibre_db.session.rollback()
|
||||
flash(_("Error editing book, please check logfile for details"), category="error")
|
||||
return redirect(url_for('web.show_book', book_id=book.id))
|
||||
|
@ -902,14 +917,7 @@ def identifier_list(to_save, book):
|
|||
return result
|
||||
|
||||
|
||||
def prepare_authors_on_upload(title, authr):
|
||||
if title != _(u'Unknown') and authr != _(u'Unknown'):
|
||||
entry = calibre_db.check_exists_book(authr, title)
|
||||
if entry:
|
||||
log.info("Uploaded book probably exists in library")
|
||||
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
|
||||
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
||||
|
||||
def prepare_authors(authr):
|
||||
# handle authors
|
||||
input_authors = authr.split('&')
|
||||
# handle_authors(input_authors)
|
||||
|
@ -932,6 +940,18 @@ def prepare_authors_on_upload(title, authr):
|
|||
sorted_old_author = helper.get_sorted_author(in_aut)
|
||||
for one_book in all_books:
|
||||
one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author)
|
||||
return input_authors, renamed
|
||||
|
||||
|
||||
def prepare_authors_on_upload(title, authr):
|
||||
if title != _(u'Unknown') and authr != _(u'Unknown'):
|
||||
entry = calibre_db.check_exists_book(authr, title)
|
||||
if entry:
|
||||
log.info("Uploaded book probably exists in library")
|
||||
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
|
||||
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
||||
|
||||
input_authors, renamed = prepare_authors(authr)
|
||||
|
||||
sort_authors_list = list()
|
||||
db_author = None
|
||||
|
@ -952,7 +972,7 @@ def prepare_authors_on_upload(title, authr):
|
|||
return sort_authors, input_authors, db_author, renamed
|
||||
|
||||
|
||||
def create_book_on_upload(modif_date, meta):
|
||||
def create_book_on_upload(modify_date, meta):
|
||||
title = meta.title
|
||||
authr = meta.author
|
||||
sort_authors, input_authors, db_author, renamed_authors = prepare_authors_on_upload(title, authr)
|
||||
|
@ -960,34 +980,34 @@ def create_book_on_upload(modif_date, meta):
|
|||
title_dir = helper.get_valid_filename(title, chars=96)
|
||||
author_dir = helper.get_valid_filename(db_author.name, chars=96)
|
||||
|
||||
# combine path and normalize path from windows systems
|
||||
# combine path and normalize path from Windows systems
|
||||
path = os.path.join(author_dir, title_dir).replace('\\', '/')
|
||||
|
||||
# Calibre adds books with utc as timezone
|
||||
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
|
||||
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
|
||||
|
||||
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
|
||||
'author')
|
||||
modify_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
|
||||
'author')
|
||||
|
||||
# Add series_index to book
|
||||
modif_date |= edit_book_series_index(meta.series_id, db_book)
|
||||
modify_date |= edit_book_series_index(meta.series_id, db_book)
|
||||
|
||||
# add languages
|
||||
invalid=[]
|
||||
modif_date |= edit_book_languages(meta.languages, db_book, upload=True, invalid=invalid)
|
||||
invalid = []
|
||||
modify_date |= edit_book_languages(meta.languages, db_book, upload_mode=True, invalid=invalid)
|
||||
if invalid:
|
||||
for l in invalid:
|
||||
flash(_(u"'%(langname)s' is not a valid language", langname=l), category="warning")
|
||||
for lang in invalid:
|
||||
flash(_(u"'%(langname)s' is not a valid language", langname=lang), category="warning")
|
||||
|
||||
# handle tags
|
||||
modif_date |= edit_book_tags(meta.tags, db_book)
|
||||
modify_date |= edit_book_tags(meta.tags, db_book)
|
||||
|
||||
# handle publisher
|
||||
modif_date |= edit_book_publisher(meta.publisher, db_book)
|
||||
modify_date |= edit_book_publisher(meta.publisher, db_book)
|
||||
|
||||
# handle series
|
||||
modif_date |= edit_book_series(meta.series, db_book)
|
||||
modify_date |= edit_book_series(meta.series, db_book)
|
||||
|
||||
# Add file to book
|
||||
file_size = os.path.getsize(meta.file_path)
|
||||
|
@ -999,6 +1019,7 @@ def create_book_on_upload(modif_date, meta):
|
|||
calibre_db.session.flush()
|
||||
return db_book, input_authors, title_dir, renamed_authors
|
||||
|
||||
|
||||
def file_handling_on_upload(requested_file):
|
||||
# check if file extension is correct
|
||||
if '.' in requested_file.filename:
|
||||
|
@ -1042,7 +1063,7 @@ def move_coverfile(meta, db_book):
|
|||
category="error")
|
||||
|
||||
|
||||
@editbook.route("/upload", methods=["POST"])
|
||||
@EditBook.route("/upload", methods=["POST"])
|
||||
@login_required_if_no_ano
|
||||
@upload_required
|
||||
def upload():
|
||||
|
@ -1051,7 +1072,7 @@ def upload():
|
|||
if request.method == 'POST' and 'btn-upload' in request.files:
|
||||
for requested_file in request.files.getlist("btn-upload"):
|
||||
try:
|
||||
modif_date = False
|
||||
modify_date = False
|
||||
# create the function for sorting...
|
||||
calibre_db.update_title_sort(config)
|
||||
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
|
||||
|
@ -1060,10 +1081,10 @@ def upload():
|
|||
if error:
|
||||
return error
|
||||
|
||||
db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modif_date, meta)
|
||||
db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modify_date, meta)
|
||||
|
||||
# Comments needs book id therefore only possible after flush
|
||||
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
||||
# Comments need book id therefore only possible after flush
|
||||
modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
||||
|
||||
book_id = db_book.id
|
||||
title = db_book.title
|
||||
|
@ -1093,24 +1114,24 @@ def upload():
|
|||
if error:
|
||||
flash(error, category="error")
|
||||
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book_id), escape(title))
|
||||
uploadText = _(u"File %(file)s uploaded", file=link)
|
||||
WorkerThread.add(current_user.name, TaskUpload(uploadText, escape(title)))
|
||||
upload_text = _(u"File %(file)s uploaded", file=link)
|
||||
WorkerThread.add(current_user.name, TaskUpload(upload_text, escape(title)))
|
||||
|
||||
if len(request.files.getlist("btn-upload")) < 2:
|
||||
if current_user.role_edit() or current_user.role_admin():
|
||||
resp = {"location": url_for('editbook.edit_book', book_id=book_id)}
|
||||
resp = {"location": url_for('edit-book.edit_book', book_id=book_id)}
|
||||
return Response(json.dumps(resp), mimetype='application/json')
|
||||
else:
|
||||
resp = {"location": url_for('web.show_book', book_id=book_id)}
|
||||
return Response(json.dumps(resp), mimetype='application/json')
|
||||
except (OperationalError, IntegrityError) as e:
|
||||
calibre_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
flash(_(u"Database error: %(error)s.", error=e), category="error")
|
||||
log.error_or_exception("Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||
|
||||
|
||||
@editbook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
|
||||
@EditBook.route("/admin/book/convert/<int:book_id>", methods=['POST'])
|
||||
@login_required_if_no_ano
|
||||
@edit_required
|
||||
def convert_bookformat(book_id):
|
||||
|
@ -1120,7 +1141,7 @@ def convert_bookformat(book_id):
|
|||
|
||||
if (book_format_from is None) or (book_format_to is None):
|
||||
flash(_(u"Source or destination format for conversion missing"), category="error")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
|
||||
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
|
||||
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
|
||||
|
@ -1128,118 +1149,128 @@ def convert_bookformat(book_id):
|
|||
|
||||
if rtn is None:
|
||||
flash(_(u"Book successfully queued for converting to %(book_format)s",
|
||||
book_format=book_format_to),
|
||||
category="success")
|
||||
book_format=book_format_to),
|
||||
category="success")
|
||||
else:
|
||||
flash(_(u"There was an error converting this book: %(res)s", res=rtn), category="error")
|
||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||
return redirect(url_for('edit-book.edit_book', book_id=book_id))
|
||||
|
||||
@editbook.route("/ajax/getcustomenum/<int:c_id>")
|
||||
|
||||
@EditBook.route("/ajax/getcustomenum/<int:c_id>")
|
||||
@login_required
|
||||
def table_get_custom_enum(c_id):
|
||||
ret = list()
|
||||
cc = (calibre_db.session.query(db.Custom_Columns)
|
||||
.filter(db.Custom_Columns.id == c_id)
|
||||
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).one_or_none())
|
||||
cc = (calibre_db.session.query(db.CustomColumns)
|
||||
.filter(db.CustomColumns.id == c_id)
|
||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).one_or_none())
|
||||
ret.append({'value': "", 'text': ""})
|
||||
for idx, en in enumerate(cc.get_display_dict()['enum_values']):
|
||||
ret.append({'value': en, 'text': en})
|
||||
return json.dumps(ret)
|
||||
|
||||
|
||||
@editbook.route("/ajax/editbooks/<param>", methods=['POST'])
|
||||
@EditBook.route("/ajax/editbooks/<param>", methods=['POST'])
|
||||
@login_required_if_no_ano
|
||||
@edit_required
|
||||
def edit_list_book(param):
|
||||
vals = request.form.to_dict()
|
||||
book = calibre_db.get_book(vals['pk'])
|
||||
sort_param = ""
|
||||
# ret = ""
|
||||
if param == 'series_index':
|
||||
edit_book_series_index(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
|
||||
elif param == 'tags':
|
||||
edit_book_tags(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'series':
|
||||
edit_book_series(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'publishers':
|
||||
edit_book_publisher(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True,
|
||||
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'languages':
|
||||
invalid = list()
|
||||
edit_book_languages(vals['value'], book, invalid=invalid)
|
||||
if invalid:
|
||||
ret = Response(json.dumps({'success': False,
|
||||
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
|
||||
mimetype='application/json')
|
||||
else:
|
||||
lang_names = list()
|
||||
for lang in book.languages:
|
||||
lang_names.append(isoLanguages.get_language_name(get_locale(), lang.lang_code))
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
|
||||
mimetype='application/json')
|
||||
elif param == 'author_sort':
|
||||
book.author_sort = vals['value']
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
|
||||
mimetype='application/json')
|
||||
elif param == 'title':
|
||||
sort = book.sort
|
||||
handle_title_on_edit(book, vals.get('value', ""))
|
||||
helper.update_dir_structure(book.id, config.config_calibre_dir)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
|
||||
mimetype='application/json')
|
||||
elif param == 'sort':
|
||||
book.sort = vals['value']
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
|
||||
mimetype='application/json')
|
||||
elif param == 'comments':
|
||||
edit_book_comments(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}),
|
||||
mimetype='application/json')
|
||||
elif param == 'authors':
|
||||
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
||||
helper.update_dir_structure(book.id, config.config_calibre_dir, input_authors[0], renamed_author=renamed)
|
||||
ret = Response(json.dumps({'success': True,
|
||||
'newValue': ' & '.join([author.replace('|',',') for author in input_authors])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'is_archived':
|
||||
change_archived_books(book.id, vals['value'] == "True")
|
||||
ret = ""
|
||||
elif param == 'read_status':
|
||||
ret = helper.edit_book_read_status(book.id, vals['value'] == "True")
|
||||
if ret:
|
||||
return ret, 400
|
||||
elif param.startswith("custom_column_"):
|
||||
new_val = dict()
|
||||
new_val[param] = vals['value']
|
||||
edit_single_cc_data(book.id, book, param[14:], new_val)
|
||||
# ToDo: Very hacky find better solution
|
||||
if vals['value'] in ["True", "False"]:
|
||||
ret = ""
|
||||
else:
|
||||
ret = Response(json.dumps({'success': True, 'newValue': vals['value']}),
|
||||
mimetype='application/json')
|
||||
else:
|
||||
return _("Parameter not found"), 400
|
||||
book.last_modified = datetime.utcnow()
|
||||
try:
|
||||
if param == 'series_index':
|
||||
edit_book_series_index(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.series_index}), mimetype='application/json')
|
||||
elif param == 'tags':
|
||||
edit_book_tags(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([tag.name for tag in book.tags])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'series':
|
||||
edit_book_series(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join([serie.name for serie in book.series])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'publishers':
|
||||
edit_book_publisher(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True,
|
||||
'newValue': ', '.join([publisher.name for publisher in book.publishers])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'languages':
|
||||
invalid = list()
|
||||
edit_book_languages(vals['value'], book, invalid=invalid)
|
||||
if invalid:
|
||||
ret = Response(json.dumps({'success': False,
|
||||
'msg': 'Invalid languages in request: {}'.format(','.join(invalid))}),
|
||||
mimetype='application/json')
|
||||
else:
|
||||
lang_names = list()
|
||||
for lang in book.languages:
|
||||
lang_names.append(isoLanguages.get_language_name(get_locale(), lang.lang_code))
|
||||
ret = Response(json.dumps({'success': True, 'newValue': ', '.join(lang_names)}),
|
||||
mimetype='application/json')
|
||||
elif param == 'author_sort':
|
||||
book.author_sort = vals['value']
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.author_sort}),
|
||||
mimetype='application/json')
|
||||
elif param == 'title':
|
||||
sort_param = book.sort
|
||||
handle_title_on_edit(book, vals.get('value', ""))
|
||||
helper.update_dir_structure(book.id, config.config_calibre_dir)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.title}),
|
||||
mimetype='application/json')
|
||||
elif param == 'sort':
|
||||
book.sort = vals['value']
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.sort}),
|
||||
mimetype='application/json')
|
||||
elif param == 'comments':
|
||||
edit_book_comments(vals['value'], book)
|
||||
ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}),
|
||||
mimetype='application/json')
|
||||
elif param == 'authors':
|
||||
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
|
||||
helper.update_dir_structure(book.id, config.config_calibre_dir, input_authors[0], renamed_author=renamed)
|
||||
ret = Response(json.dumps({
|
||||
'success': True,
|
||||
'newValue': ' & '.join([author.replace('|', ',') for author in input_authors])}),
|
||||
mimetype='application/json')
|
||||
elif param == 'is_archived':
|
||||
is_archived = change_archived_books(book.id, vals['value'] == "True",
|
||||
message="Book {} archive bit set to: {}".format(book.id, vals['value']))
|
||||
if is_archived:
|
||||
kobo_sync_status.remove_synced_book(book.id)
|
||||
return ""
|
||||
elif param == 'read_status':
|
||||
ret = helper.edit_book_read_status(book.id, vals['value'] == "True")
|
||||
if ret:
|
||||
return ret, 400
|
||||
elif param.startswith("custom_column_"):
|
||||
new_val = dict()
|
||||
new_val[param] = vals['value']
|
||||
edit_single_cc_data(book.id, book, param[14:], new_val)
|
||||
# ToDo: Very hacky find better solution
|
||||
if vals['value'] in ["True", "False"]:
|
||||
ret = ""
|
||||
else:
|
||||
ret = Response(json.dumps({'success': True, 'newValue': vals['value']}),
|
||||
mimetype='application/json')
|
||||
else:
|
||||
return _("Parameter not found"), 400
|
||||
book.last_modified = datetime.utcnow()
|
||||
|
||||
calibre_db.session.commit()
|
||||
# revert change for sort if automatic fields link is deactivated
|
||||
if param == 'title' and vals.get('checkT') == "false":
|
||||
book.sort = sort
|
||||
book.sort = sort_param
|
||||
calibre_db.session.commit()
|
||||
except (OperationalError, IntegrityError) as e:
|
||||
calibre_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
log.error_or_exception("Database error: {}".format(e))
|
||||
ret = Response(json.dumps({'success': False,
|
||||
'msg': 'Database error: {}'.format(e.orig)}),
|
||||
mimetype='application/json')
|
||||
return ret
|
||||
|
||||
|
||||
@editbook.route("/ajax/sort_value/<field>/<int:bookid>")
|
||||
@EditBook.route("/ajax/sort_value/<field>/<int:bookid>")
|
||||
@login_required
|
||||
def get_sorted_entry(field, bookid):
|
||||
if field in ['title', 'authors', 'sort', 'author_sort']:
|
||||
|
@ -1256,7 +1287,7 @@ def get_sorted_entry(field, bookid):
|
|||
return ""
|
||||
|
||||
|
||||
@editbook.route("/ajax/simulatemerge", methods=['POST'])
|
||||
@EditBook.route("/ajax/simulatemerge", methods=['POST'])
|
||||
@login_required
|
||||
@edit_required
|
||||
def simulate_merge_list_book():
|
||||
|
@ -1265,14 +1296,14 @@ def simulate_merge_list_book():
|
|||
to_book = calibre_db.get_book(vals[0]).title
|
||||
vals.pop(0)
|
||||
if to_book:
|
||||
from_book = []
|
||||
for book_id in vals:
|
||||
from_book = []
|
||||
from_book.append(calibre_db.get_book(book_id).title)
|
||||
return json.dumps({'to': to_book, 'from': from_book})
|
||||
return ""
|
||||
|
||||
|
||||
@editbook.route("/ajax/mergebooks", methods=['POST'])
|
||||
@EditBook.route("/ajax/mergebooks", methods=['POST'])
|
||||
@login_required
|
||||
@edit_required
|
||||
def merge_list_book():
|
||||
|
@ -1285,8 +1316,9 @@ def merge_list_book():
|
|||
if to_book:
|
||||
for file in to_book.data:
|
||||
to_file.append(file.format)
|
||||
to_name = helper.get_valid_filename(to_book.title, chars=96) + ' - ' + \
|
||||
helper.get_valid_filename(to_book.authors[0].name, chars=96)
|
||||
to_name = helper.get_valid_filename(to_book.title,
|
||||
chars=96) + ' - ' + helper.get_valid_filename(to_book.authors[0].name,
|
||||
chars=96)
|
||||
for book_id in vals:
|
||||
from_book = calibre_db.get_book(book_id)
|
||||
if from_book:
|
||||
|
@ -1304,19 +1336,20 @@ def merge_list_book():
|
|||
element.format,
|
||||
element.uncompressed_size,
|
||||
to_name))
|
||||
delete_book_from_table(from_book.id,"", True)
|
||||
delete_book_from_table(from_book.id, "", True)
|
||||
return json.dumps({'success': True})
|
||||
return ""
|
||||
|
||||
|
||||
@editbook.route("/ajax/xchange", methods=['POST'])
|
||||
@EditBook.route("/ajax/xchange", methods=['POST'])
|
||||
@login_required
|
||||
@edit_required
|
||||
def table_xchange_author_title():
|
||||
vals = request.get_json().get('xchange')
|
||||
edited_books_id = False
|
||||
if vals:
|
||||
for val in vals:
|
||||
modif_date = False
|
||||
modify_date = False
|
||||
book = calibre_db.get_book(val)
|
||||
authors = book.title
|
||||
book.authors = calibre_db.order_authors([book])
|
||||
|
@ -1328,21 +1361,21 @@ def table_xchange_author_title():
|
|||
input_authors, authorchange, renamed = handle_author_on_edit(book, authors)
|
||||
if authorchange or title_change:
|
||||
edited_books_id = book.id
|
||||
modif_date = True
|
||||
modify_date = True
|
||||
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
|
||||
if edited_books_id:
|
||||
helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0],
|
||||
renamed_author=renamed)
|
||||
if modif_date:
|
||||
renamed_author=renamed)
|
||||
if modify_date:
|
||||
book.last_modified = datetime.utcnow()
|
||||
try:
|
||||
calibre_db.session.commit()
|
||||
except (OperationalError, IntegrityError) as e:
|
||||
calibre_db.session.rollback()
|
||||
log.error("Database error: %s", e)
|
||||
log.error_or_exception("Database error: %s", e)
|
||||
return json.dumps({'success': False})
|
||||
|
||||
if config.config_use_google_drive:
|
||||
|
|
49
cps/epub.py
49
cps/epub.py
|
@ -20,23 +20,26 @@ import os
|
|||
import zipfile
|
||||
from lxml import etree
|
||||
|
||||
from . import isoLanguages
|
||||
from . import isoLanguages, cover
|
||||
from .helper import split_authors
|
||||
from .constants import BookMeta
|
||||
|
||||
|
||||
def extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
|
||||
def _extract_cover(zip_file, cover_file, cover_path, tmp_file_name):
|
||||
if cover_file is None:
|
||||
return None
|
||||
else:
|
||||
cf = extension = None
|
||||
zip_cover_path = os.path.join(cover_path, cover_file).replace('\\', '/')
|
||||
cf = zip_file.read(zip_cover_path)
|
||||
|
||||
prefix = os.path.splitext(tmp_file_name)[0]
|
||||
tmp_cover_name = prefix + '.' + os.path.basename(zip_cover_path)
|
||||
image = open(tmp_cover_name, 'wb')
|
||||
image.write(cf)
|
||||
image.close()
|
||||
return tmp_cover_name
|
||||
ext = os.path.splitext(tmp_cover_name)
|
||||
if len(ext) > 1:
|
||||
extension = ext[1].lower()
|
||||
if extension in cover.COVER_EXTENSIONS:
|
||||
cf = zip_file.read(zip_cover_path)
|
||||
return cover.cover_processing(tmp_file_name, cf, extension)
|
||||
|
||||
|
||||
def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||
|
@ -50,11 +53,11 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
|
||||
txt = epub_zip.read('META-INF/container.xml')
|
||||
tree = etree.fromstring(txt)
|
||||
cfname = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
|
||||
cf = epub_zip.read(cfname)
|
||||
cf_name = tree.xpath('n:rootfiles/n:rootfile/@full-path', namespaces=ns)[0]
|
||||
cf = epub_zip.read(cf_name)
|
||||
tree = etree.fromstring(cf)
|
||||
|
||||
coverpath = os.path.dirname(cfname)
|
||||
cover_path = os.path.dirname(cf_name)
|
||||
|
||||
p = tree.xpath('/pkg:package/pkg:metadata', namespaces=ns)[0]
|
||||
|
||||
|
@ -70,9 +73,9 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
else:
|
||||
epub_metadata[s] = tmp[0]
|
||||
else:
|
||||
epub_metadata[s] = u'Unknown'
|
||||
epub_metadata[s] = 'Unknown'
|
||||
|
||||
if epub_metadata['subject'] == u'Unknown':
|
||||
if epub_metadata['subject'] == 'Unknown':
|
||||
epub_metadata['subject'] = ''
|
||||
|
||||
if epub_metadata['description'] == u'Unknown':
|
||||
|
@ -87,7 +90,7 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
|
||||
epub_metadata = parse_epub_series(ns, tree, epub_metadata)
|
||||
|
||||
cover_file = parse_epub_cover(ns, tree, epub_zip, coverpath, tmp_file_path)
|
||||
cover_file = parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path)
|
||||
|
||||
if not epub_metadata['title']:
|
||||
title = original_file_name
|
||||
|
@ -111,9 +114,12 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
def parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path):
|
||||
cover_section = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='cover-image']/@href", namespaces=ns)
|
||||
cover_file = None
|
||||
if len(cover_section) > 0:
|
||||
cover_file = extract_cover(epub_zip, cover_section[0], cover_path, tmp_file_path)
|
||||
else:
|
||||
# if len(cover_section) > 0:
|
||||
for cs in cover_section:
|
||||
cover_file = _extract_cover(epub_zip, cs, cover_path, tmp_file_path)
|
||||
if cover_file:
|
||||
break
|
||||
if not cover_file:
|
||||
meta_cover = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='cover']/@content", namespaces=ns)
|
||||
if len(meta_cover) > 0:
|
||||
cover_section = tree.xpath(
|
||||
|
@ -123,10 +129,10 @@ def parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path):
|
|||
"/pkg:package/pkg:manifest/pkg:item[@properties='" + meta_cover[0] + "']/@href", namespaces=ns)
|
||||
else:
|
||||
cover_section = tree.xpath("/pkg:package/pkg:guide/pkg:reference/@href", namespaces=ns)
|
||||
if len(cover_section) > 0:
|
||||
filetype = cover_section[0].rsplit('.', 1)[-1]
|
||||
for cs in cover_section:
|
||||
filetype = cs.rsplit('.', 1)[-1]
|
||||
if filetype == "xhtml" or filetype == "html": # if cover is (x)html format
|
||||
markup = epub_zip.read(os.path.join(cover_path, cover_section[0]))
|
||||
markup = epub_zip.read(os.path.join(cover_path, cs))
|
||||
markup_tree = etree.fromstring(markup)
|
||||
# no matter xhtml or html with no namespace
|
||||
img_src = markup_tree.xpath("//*[local-name() = 'img']/@src")
|
||||
|
@ -137,9 +143,10 @@ def parse_epub_cover(ns, tree, epub_zip, cover_path, tmp_file_path):
|
|||
# img_src maybe start with "../"" so fullpath join then relpath to cwd
|
||||
filename = os.path.relpath(os.path.join(os.path.dirname(os.path.join(cover_path, cover_section[0])),
|
||||
img_src[0]))
|
||||
cover_file = extract_cover(epub_zip, filename, "", tmp_file_path)
|
||||
cover_file = _extract_cover(epub_zip, filename, "", tmp_file_path)
|
||||
else:
|
||||
cover_file = extract_cover(epub_zip, cover_section[0], cover_path, tmp_file_path)
|
||||
cover_file = _extract_cover(epub_zip, cs, cover_path, tmp_file_path)
|
||||
if cover_file: break
|
||||
return cover_file
|
||||
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ try:
|
|||
move(os.path.join(tmp_dir, "tmp_metadata.db"), dbpath)
|
||||
calibre_db.reconnect_db(config, ub.app_DB_path)
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
return ''
|
||||
except AttributeError:
|
||||
pass
|
||||
|
|
|
@ -215,7 +215,7 @@ def getDrive(drive=None, gauth=None):
|
|||
except RefreshError as e:
|
||||
log.error("Google Drive error: %s", e)
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
else:
|
||||
# Initialize the saved creds
|
||||
gauth.Authorize()
|
||||
|
|
189
cps/helper.py
189
cps/helper.py
|
@ -23,11 +23,10 @@ import mimetypes
|
|||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import unicodedata
|
||||
from datetime import datetime, timedelta
|
||||
from tempfile import gettempdir
|
||||
from urllib.parse import urlparse
|
||||
import requests
|
||||
import unidecode
|
||||
|
||||
from babel.dates import format_datetime
|
||||
from babel.units import format_unit
|
||||
|
@ -41,11 +40,15 @@ from werkzeug.security import generate_password_hash
|
|||
from markupsafe import escape
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
try:
|
||||
import unidecode
|
||||
use_unidecode = True
|
||||
import advocate
|
||||
from advocate.exceptions import UnacceptableAddressException
|
||||
use_advocate = True
|
||||
except ImportError:
|
||||
use_unidecode = False
|
||||
use_advocate = False
|
||||
advocate = requests
|
||||
UnacceptableAddressException = MissingSchema = BaseException
|
||||
|
||||
from . import calibre_db, cli
|
||||
from .tasks.convert import TaskConvert
|
||||
|
@ -145,7 +148,7 @@ def check_send_to_kindle_with_converter(formats):
|
|||
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
||||
orig='Epub',
|
||||
format='Mobi')})
|
||||
if 'AZW3' in formats and not 'MOBI' in formats:
|
||||
if 'AZW3' in formats and 'MOBI' not in formats:
|
||||
bookformats.append({'format': 'Mobi',
|
||||
'convert': 2,
|
||||
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
|
||||
|
@ -187,11 +190,11 @@ def check_send_to_kindle(entry):
|
|||
# Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return
|
||||
# list with supported formats
|
||||
def check_read_formats(entry):
|
||||
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU'}
|
||||
extensions_reader = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU'}
|
||||
bookformats = list()
|
||||
if len(entry.data):
|
||||
for ele in iter(entry.data):
|
||||
if ele.format.upper() in EXTENSIONS_READER:
|
||||
if ele.format.upper() in extensions_reader:
|
||||
bookformats.append(ele.format.lower())
|
||||
return bookformats
|
||||
|
||||
|
@ -215,10 +218,10 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
|
|||
if entry.format.upper() == book_format.upper():
|
||||
converted_file_name = entry.name + '.' + book_format.lower()
|
||||
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book_id), escape(book.title))
|
||||
EmailText = _(u"%(book)s send to Kindle", book=link)
|
||||
email_text = _(u"%(book)s send to Kindle", book=link)
|
||||
WorkerThread.add(user_id, TaskEmail(_(u"Send to Kindle"), book.path, converted_file_name,
|
||||
config.get_mail_settings(), kindle_mail,
|
||||
EmailText, _(u'This e-mail has been sent via Calibre-Web.')))
|
||||
email_text, _(u'This e-mail has been sent via Calibre-Web.')))
|
||||
return
|
||||
return _(u"The requested file could not be read. Maybe wrong permissions?")
|
||||
|
||||
|
@ -231,15 +234,8 @@ def get_valid_filename(value, replace_whitespace=True, chars=128):
|
|||
if value[-1:] == u'.':
|
||||
value = value[:-1]+u'_'
|
||||
value = value.replace("/", "_").replace(":", "_").strip('\0')
|
||||
if use_unidecode:
|
||||
if config.config_unicode_filename:
|
||||
value = (unidecode.unidecode(value))
|
||||
else:
|
||||
value = value.replace(u'§', u'SS')
|
||||
value = value.replace(u'ß', u'ss')
|
||||
value = unicodedata.normalize('NFKD', value)
|
||||
re_slugify = re.compile(r'[\W\s-]', re.UNICODE)
|
||||
value = re_slugify.sub('', value)
|
||||
if config.config_unicode_filename:
|
||||
value = (unidecode.unidecode(value))
|
||||
if replace_whitespace:
|
||||
# *+:\"/<>? are replaced by _
|
||||
value = re.sub(r'[*+:\\\"/<>?]+', u'_', value, flags=re.U)
|
||||
|
@ -268,6 +264,7 @@ def split_authors(values):
|
|||
|
||||
|
||||
def get_sorted_author(value):
|
||||
value2 = None
|
||||
try:
|
||||
if ',' not in value:
|
||||
regexes = [r"^(JR|SR)\.?$", r"^I{1,3}\.?$", r"^IV\.?$"]
|
||||
|
@ -292,6 +289,7 @@ def get_sorted_author(value):
|
|||
value2 = value
|
||||
return value2
|
||||
|
||||
|
||||
def edit_book_read_status(book_id, read_status=None):
|
||||
if not config.config_read_column:
|
||||
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
|
||||
|
@ -305,9 +303,9 @@ def edit_book_read_status(book_id, read_status=None):
|
|||
else:
|
||||
book.read_status = ub.ReadBook.STATUS_FINISHED if read_status else ub.ReadBook.STATUS_UNREAD
|
||||
else:
|
||||
readBook = ub.ReadBook(user_id=current_user.id, book_id = book_id)
|
||||
readBook.read_status = ub.ReadBook.STATUS_FINISHED
|
||||
book = readBook
|
||||
read_book = ub.ReadBook(user_id=current_user.id, book_id=book_id)
|
||||
read_book.read_status = ub.ReadBook.STATUS_FINISHED
|
||||
book = read_book
|
||||
if not book.kobo_reading_state:
|
||||
kobo_reading_state = ub.KoboReadingState(user_id=current_user.id, book_id=book_id)
|
||||
kobo_reading_state.current_bookmark = ub.KoboBookmark()
|
||||
|
@ -334,12 +332,13 @@ def edit_book_read_status(book_id, read_status=None):
|
|||
except (KeyError, AttributeError):
|
||||
log.error(u"Custom Column No.%d is not existing in calibre database", config.config_read_column)
|
||||
return "Custom Column No.{} is not existing in calibre database".format(config.config_read_column)
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
except (OperationalError, InvalidRequestError) as ex:
|
||||
calibre_db.session.rollback()
|
||||
log.error(u"Read status could not set: {}".format(e))
|
||||
return "Read status could not set: {}".format(e), 400
|
||||
log.error(u"Read status could not set: {}".format(ex))
|
||||
return _("Read status could not set: {}".format(ex.orig))
|
||||
return ""
|
||||
|
||||
|
||||
# Deletes a book fro the local filestorage, returns True if deleting is successfull, otherwise false
|
||||
def delete_book_file(book, calibrepath, book_format=None):
|
||||
# check that path is 2 elements deep, check that target path has no subfolders
|
||||
|
@ -363,15 +362,15 @@ def delete_book_file(book, calibrepath, book_format=None):
|
|||
id=book.id,
|
||||
path=book.path)
|
||||
shutil.rmtree(path)
|
||||
except (IOError, OSError) as e:
|
||||
log.error("Deleting book %s failed: %s", book.id, e)
|
||||
return False, _("Deleting book %(id)s failed: %(message)s", id=book.id, message=e)
|
||||
except (IOError, OSError) as ex:
|
||||
log.error("Deleting book %s failed: %s", book.id, ex)
|
||||
return False, _("Deleting book %(id)s failed: %(message)s", id=book.id, message=ex)
|
||||
authorpath = os.path.join(calibrepath, os.path.split(book.path)[0])
|
||||
if not os.listdir(authorpath):
|
||||
try:
|
||||
shutil.rmtree(authorpath)
|
||||
except (IOError, OSError) as e:
|
||||
log.error("Deleting authorpath for book %s failed: %s", book.id, e)
|
||||
except (IOError, OSError) as ex:
|
||||
log.error("Deleting authorpath for book %s failed: %s", book.id, ex)
|
||||
return True, None
|
||||
|
||||
log.error("Deleting book %s from database only, book path in database not valid: %s",
|
||||
|
@ -397,21 +396,21 @@ def clean_author_database(renamed_author, calibre_path="", local_book=None, gdri
|
|||
all_titledir = book.path.split('/')[1]
|
||||
all_new_path = os.path.join(calibre_path, all_new_authordir, all_titledir)
|
||||
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
|
||||
+ get_valid_filename(new_author.name, chars=42)
|
||||
+ get_valid_filename(new_author.name, chars=42)
|
||||
# change location in database to new author/title path
|
||||
book.path = os.path.join(all_new_authordir, all_titledir).replace('\\', '/')
|
||||
for file_format in book.data:
|
||||
if not gdrive:
|
||||
shutil.move(os.path.normcase(os.path.join(all_new_path,
|
||||
file_format.name + '.' + file_format.format.lower())),
|
||||
os.path.normcase(os.path.join(all_new_path,
|
||||
all_new_name + '.' + file_format.format.lower())))
|
||||
os.path.normcase(os.path.join(all_new_path,
|
||||
all_new_name + '.' + file_format.format.lower())))
|
||||
else:
|
||||
gFile = gd.getFileFromEbooksFolder(all_new_path,
|
||||
file_format.name + '.' + file_format.format.lower())
|
||||
if gFile:
|
||||
gd.moveGdriveFileRemote(gFile, all_new_name + u'.' + file_format.format.lower())
|
||||
gd.updateDatabaseOnEdit(gFile['id'], all_new_name + u'.' + file_format.format.lower())
|
||||
g_file = gd.getFileFromEbooksFolder(all_new_path,
|
||||
file_format.name + '.' + file_format.format.lower())
|
||||
if g_file:
|
||||
gd.moveGdriveFileRemote(g_file, all_new_name + u'.' + file_format.format.lower())
|
||||
gd.updateDatabaseOnEdit(g_file['id'], all_new_name + u'.' + file_format.format.lower())
|
||||
else:
|
||||
log.error("File {} not found on gdrive"
|
||||
.format(all_new_path, file_format.name + '.' + file_format.format.lower()))
|
||||
|
@ -428,16 +427,16 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
|
|||
old_author_dir = get_valid_filename(r, chars=96)
|
||||
new_author_rename_dir = get_valid_filename(new_author.name, chars=96)
|
||||
if gdrive:
|
||||
gFile = gd.getFileFromEbooksFolder(None, old_author_dir)
|
||||
if gFile:
|
||||
gd.moveGdriveFolderRemote(gFile, new_author_rename_dir)
|
||||
g_file = gd.getFileFromEbooksFolder(None, old_author_dir)
|
||||
if g_file:
|
||||
gd.moveGdriveFolderRemote(g_file, new_author_rename_dir)
|
||||
else:
|
||||
if os.path.isdir(os.path.join(calibre_path, old_author_dir)):
|
||||
try:
|
||||
old_author_path = os.path.join(calibre_path, old_author_dir)
|
||||
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
|
||||
shutil.move(os.path.normcase(old_author_path), os.path.normcase(new_author_path))
|
||||
except (OSError) as ex:
|
||||
except OSError as ex:
|
||||
log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex)
|
||||
log.debug(ex, exc_info=True)
|
||||
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
|
@ -446,6 +445,7 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
|
|||
new_authordir = get_valid_filename(localbook.authors[0].name, chars=96)
|
||||
return new_authordir
|
||||
|
||||
|
||||
# Moves files in file storage during author/title rename, or from temp dir to file storage
|
||||
def update_dir_structure_file(book_id, calibre_path, first_author, original_filepath, db_filename, renamed_author):
|
||||
# get book database entry from id, if original path overwrite source with original_filepath
|
||||
|
@ -485,23 +485,19 @@ def update_dir_structure_file(book_id, calibre_path, first_author, original_file
|
|||
|
||||
|
||||
def upload_new_file_gdrive(book_id, first_author, renamed_author, title, title_dir, original_filepath, filename_ext):
|
||||
error = False
|
||||
book = calibre_db.get_book(book_id)
|
||||
file_name = get_valid_filename(title, chars=42) + ' - ' + \
|
||||
get_valid_filename(first_author, chars=42) + \
|
||||
filename_ext
|
||||
get_valid_filename(first_author, chars=42) + filename_ext
|
||||
rename_all_authors(first_author, renamed_author, gdrive=True)
|
||||
gdrive_path = os.path.join(get_valid_filename(first_author, chars=96),
|
||||
title_dir + " (" + str(book_id) + ")")
|
||||
book.path = gdrive_path.replace("\\", "/")
|
||||
gd.uploadFileToEbooksFolder(os.path.join(gdrive_path, file_name).replace("\\", "/"), original_filepath)
|
||||
error |= rename_files_on_change(first_author, renamed_author, localbook=book, gdrive=True)
|
||||
return error
|
||||
return rename_files_on_change(first_author, renamed_author, localbook=book, gdrive=True)
|
||||
|
||||
|
||||
|
||||
def update_dir_structure_gdrive(book_id, first_author, renamed_author):
|
||||
error = False
|
||||
book = calibre_db.get_book(book_id)
|
||||
|
||||
authordir = book.path.split('/')[0]
|
||||
|
@ -510,27 +506,26 @@ def update_dir_structure_gdrive(book_id, first_author, renamed_author):
|
|||
new_titledir = get_valid_filename(book.title, chars=96) + u" (" + str(book_id) + u")"
|
||||
|
||||
if titledir != new_titledir:
|
||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
||||
if gFile:
|
||||
gd.moveGdriveFileRemote(gFile, new_titledir)
|
||||
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
||||
if g_file:
|
||||
gd.moveGdriveFileRemote(g_file, new_titledir)
|
||||
book.path = book.path.split('/')[0] + u'/' + new_titledir
|
||||
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
||||
gd.updateDatabaseOnEdit(g_file['id'], book.path) # only child folder affected
|
||||
else:
|
||||
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
||||
return _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
||||
|
||||
if authordir != new_authordir and authordir not in renamed_author:
|
||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
||||
if gFile:
|
||||
gd.moveGdriveFolderRemote(gFile, new_authordir)
|
||||
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
||||
if g_file:
|
||||
gd.moveGdriveFolderRemote(g_file, new_authordir)
|
||||
book.path = new_authordir + u'/' + book.path.split('/')[1]
|
||||
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
||||
gd.updateDatabaseOnEdit(g_file['id'], book.path)
|
||||
else:
|
||||
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
||||
return _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
||||
|
||||
# change location in database to new author/title path
|
||||
book.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
|
||||
error |= rename_files_on_change(first_author, renamed_author, book, gdrive=True)
|
||||
return error
|
||||
return rename_files_on_change(first_author, renamed_author, book, gdrive=True)
|
||||
|
||||
|
||||
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
|
||||
|
@ -548,15 +543,15 @@ def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, d
|
|||
# move original path to new path
|
||||
log.debug("Moving title: %s to %s", path, new_path)
|
||||
shutil.move(os.path.normcase(path), os.path.normcase(new_path))
|
||||
else: # path is valid copy only files to new location (merge)
|
||||
else: # path is valid copy only files to new location (merge)
|
||||
log.info("Moving title: %s into existing: %s", path, new_path)
|
||||
# Take all files and subfolder from old path (strange command)
|
||||
for dir_name, __, file_list in os.walk(path):
|
||||
for file in file_list:
|
||||
shutil.move(os.path.normcase(os.path.join(dir_name, file)),
|
||||
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
|
||||
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
|
||||
# change location in database to new author/title path
|
||||
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\','/')
|
||||
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
|
||||
except OSError as ex:
|
||||
log.error("Rename title from: %s to %s: %s", path, new_path, ex)
|
||||
log.debug(ex, exc_info=True)
|
||||
|
@ -593,12 +588,12 @@ def delete_book_gdrive(book, book_format):
|
|||
for entry in book.data:
|
||||
if entry.format.upper() == book_format:
|
||||
name = entry.name + '.' + book_format
|
||||
gFile = gd.getFileFromEbooksFolder(book.path, name)
|
||||
g_file = gd.getFileFromEbooksFolder(book.path, name)
|
||||
else:
|
||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
|
||||
if gFile:
|
||||
gd.deleteDatabaseEntry(gFile['id'])
|
||||
gFile.Trash()
|
||||
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
|
||||
if g_file:
|
||||
gd.deleteDatabaseEntry(g_file['id'])
|
||||
g_file.Trash()
|
||||
else:
|
||||
error = _(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
||||
|
||||
|
@ -630,7 +625,7 @@ def generate_random_password():
|
|||
|
||||
def uniq(inpt):
|
||||
output = []
|
||||
inpt = [ " ".join(inp.split()) for inp in inpt]
|
||||
inpt = [" ".join(inp.split()) for inp in inpt]
|
||||
for x in inpt:
|
||||
if x not in output:
|
||||
output.append(x)
|
||||
|
@ -649,7 +644,7 @@ def check_username(username):
|
|||
username = username.strip()
|
||||
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).scalar():
|
||||
log.error(u"This username is already taken")
|
||||
raise Exception (_(u"This username is already taken"))
|
||||
raise Exception(_(u"This username is already taken"))
|
||||
return username
|
||||
|
||||
|
||||
|
@ -731,7 +726,7 @@ def get_book_cover_internal(book, use_generic_cover_on_failure, resolution=None)
|
|||
log.error('%s/cover.jpg not found on Google Drive', book.path)
|
||||
return get_cover_on_failure(use_generic_cover_on_failure)
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
return get_cover_on_failure(use_generic_cover_on_failure)
|
||||
|
||||
# Send the book cover from the Calibre directory
|
||||
|
@ -798,13 +793,13 @@ def get_series_thumbnail(series_id, resolution):
|
|||
# saves book cover from url
|
||||
def save_cover_from_url(url, book_path):
|
||||
try:
|
||||
if not cli.allow_localhost:
|
||||
# 127.0.x.x, localhost, [::1], [::ffff:7f00:1]
|
||||
ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]
|
||||
if ip.startswith("127.") or ip.startswith('::ffff:7f') or ip == "::1":
|
||||
log.error("Localhost was accessed for cover upload")
|
||||
return False, _("You are not allowed to access localhost for cover uploads")
|
||||
img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling
|
||||
if cli.allow_localhost:
|
||||
img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling
|
||||
elif use_advocate:
|
||||
img = advocate.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling
|
||||
else:
|
||||
log.error("python modul advocate is not installed but is needed")
|
||||
return False, _("Python modul 'advocate' is not installed but is needed for cover downloads")
|
||||
img.raise_for_status()
|
||||
return save_cover(img, book_path)
|
||||
except (socket.gaierror,
|
||||
|
@ -816,6 +811,9 @@ def save_cover_from_url(url, book_path):
|
|||
except MissingDelegateError as ex:
|
||||
log.info(u'File Format Error %s', ex)
|
||||
return False, _("Cover Format Error")
|
||||
except UnacceptableAddressException:
|
||||
log.error("Localhost was accessed for cover upload")
|
||||
return False, _("You are not allowed to access localhost for cover uploads")
|
||||
|
||||
|
||||
def save_cover_from_filestorage(filepath, saved_filename, img):
|
||||
|
@ -878,7 +876,7 @@ def save_cover(img, book_path):
|
|||
os.mkdir(tmp_dir)
|
||||
ret, message = save_cover_from_filestorage(tmp_dir, "uploaded_cover.jpg", img)
|
||||
if ret is True:
|
||||
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\","/"),
|
||||
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\", "/"),
|
||||
os.path.join(tmp_dir, "uploaded_cover.jpg"))
|
||||
log.info("Cover is saved on Google Drive")
|
||||
return True, None
|
||||
|
@ -890,9 +888,9 @@ def save_cover(img, book_path):
|
|||
|
||||
def do_download_file(book, book_format, client, data, headers):
|
||||
if config.config_use_google_drive:
|
||||
#startTime = time.time()
|
||||
# startTime = time.time()
|
||||
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
||||
#log.debug('%s', time.time() - startTime)
|
||||
# log.debug('%s', time.time() - startTime)
|
||||
if df:
|
||||
return gd.do_gdrive_download(df, headers)
|
||||
else:
|
||||
|
@ -916,22 +914,22 @@ def do_download_file(book, book_format, client, data, headers):
|
|||
##################################
|
||||
|
||||
|
||||
def check_unrar(unrarLocation):
|
||||
if not unrarLocation:
|
||||
def check_unrar(unrar_location):
|
||||
if not unrar_location:
|
||||
return
|
||||
|
||||
if not os.path.exists(unrarLocation):
|
||||
if not os.path.exists(unrar_location):
|
||||
return _('Unrar binary file not found')
|
||||
|
||||
try:
|
||||
unrarLocation = [unrarLocation]
|
||||
value = process_wait(unrarLocation, pattern='UNRAR (.*) freeware')
|
||||
unrar_location = [unrar_location]
|
||||
value = process_wait(unrar_location, pattern='UNRAR (.*) freeware')
|
||||
if value:
|
||||
version = value.group(1)
|
||||
log.debug("unrar version %s", version)
|
||||
|
||||
except (OSError, UnicodeDecodeError) as err:
|
||||
log.debug_or_exception(err)
|
||||
log.error_or_exception(err)
|
||||
return _('Error excecuting UnRar')
|
||||
|
||||
|
||||
|
@ -952,19 +950,19 @@ def json_serial(obj):
|
|||
|
||||
# helper function for displaying the runtime of tasks
|
||||
def format_runtime(runtime):
|
||||
retVal = ""
|
||||
ret_val = ""
|
||||
if runtime.days:
|
||||
retVal = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', '
|
||||
ret_val = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', '
|
||||
mins, seconds = divmod(runtime.seconds, 60)
|
||||
hours, minutes = divmod(mins, 60)
|
||||
# ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ?
|
||||
if hours:
|
||||
retVal += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds)
|
||||
ret_val += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds)
|
||||
elif minutes:
|
||||
retVal += '{:2d}:{:02d}s'.format(minutes, seconds)
|
||||
ret_val += '{:2d}:{:02d}s'.format(minutes, seconds)
|
||||
else:
|
||||
retVal += '{:2d}s'.format(seconds)
|
||||
return retVal
|
||||
ret_val += '{:2d}s'.format(seconds)
|
||||
return ret_val
|
||||
|
||||
|
||||
# helper function to apply localize status information in tasklist entries
|
||||
|
@ -1031,8 +1029,8 @@ def check_valid_domain(domain_text):
|
|||
|
||||
|
||||
def get_cc_columns(filter_config_custom_read=False):
|
||||
tmpcc = calibre_db.session.query(db.Custom_Columns)\
|
||||
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||
tmpcc = calibre_db.session.query(db.CustomColumns)\
|
||||
.filter(db.CustomColumns.datatype.notin_(db.cc_exceptions)).all()
|
||||
cc = []
|
||||
r = None
|
||||
if config.config_columns_to_ignore:
|
||||
|
@ -1051,6 +1049,7 @@ def get_cc_columns(filter_config_custom_read=False):
|
|||
def get_download_link(book_id, book_format, client):
|
||||
book_format = book_format.split(".")[0]
|
||||
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
|
||||
data1= ""
|
||||
if book:
|
||||
data1 = calibre_db.get_book_format(book.id, book_format.upper())
|
||||
else:
|
||||
|
|
|
@ -42,7 +42,7 @@ logging.addLevelName(logging.CRITICAL, "CRIT")
|
|||
|
||||
class _Logger(logging.Logger):
|
||||
|
||||
def debug_or_exception(self, message, stacklevel=2, *args, **kwargs):
|
||||
def error_or_exception(self, message, stacklevel=2, *args, **kwargs):
|
||||
if sys.version_info > (3, 7):
|
||||
if is_debug_enabled():
|
||||
self.exception(message, stacklevel=stacklevel, *args, **kwargs)
|
||||
|
|
|
@ -25,8 +25,11 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
from cps.services.Metadata import MetaRecord, MetaSourceInfo, Metadata
|
||||
import cps.logger as logger
|
||||
|
||||
#from time import time
|
||||
from operator import itemgetter
|
||||
log = logger.create()
|
||||
|
||||
class Amazon(Metadata):
|
||||
__name__ = "Amazon"
|
||||
|
@ -48,15 +51,15 @@ class Amazon(Metadata):
|
|||
self, query: str, generic_cover: str = "", locale: str = "en"
|
||||
):
|
||||
#timer=time()
|
||||
def inner(link,index)->[dict,int]:
|
||||
with self.session as session:
|
||||
r = session.get(f"https://www.amazon.com/{link}")
|
||||
r.raise_for_status()
|
||||
long_soup = BS(r.text, "lxml") #~4sec :/
|
||||
soup2 = long_soup.find("div", attrs={"cel_widget_id": "dpx-books-ppd_csm_instrumentation_wrapper"})
|
||||
if soup2 is None:
|
||||
return
|
||||
try:
|
||||
def inner(link, index) -> [dict, int]:
|
||||
try:
|
||||
with self.session as session:
|
||||
r = session.get(f"https://www.amazon.com{link}")
|
||||
r.raise_for_status()
|
||||
long_soup = BS(r.text, "lxml") #~4sec :/
|
||||
soup2 = long_soup.find("div", attrs={"cel_widget_id": "dpx-books-ppd_csm_instrumentation_wrapper"})
|
||||
if soup2 is None:
|
||||
return
|
||||
match = MetaRecord(
|
||||
title = "",
|
||||
authors = "",
|
||||
|
@ -65,7 +68,7 @@ class Amazon(Metadata):
|
|||
description="Amazon Books",
|
||||
link="https://amazon.com/"
|
||||
),
|
||||
url = f"https://www.amazon.com/{link}",
|
||||
url = f"https://www.amazon.com{link}",
|
||||
#the more searches the slower, these are too hard to find in reasonable time or might not even exist
|
||||
publisher= "", # very unreliable
|
||||
publishedDate= "", # very unreliable
|
||||
|
@ -101,22 +104,27 @@ class Amazon(Metadata):
|
|||
except (AttributeError, TypeError):
|
||||
match.cover = ""
|
||||
return match, index
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return
|
||||
except Exception as e:
|
||||
log.error_or_exception(e)
|
||||
return
|
||||
|
||||
val = list()
|
||||
if self.active:
|
||||
results = self.session.get(
|
||||
f"https://www.amazon.com/s?k={query.replace(' ', '+')}&i=digital-text&sprefix={query.replace(' ', '+')}"
|
||||
f"%2Cdigital-text&ref=nb_sb_noss",
|
||||
headers=self.headers)
|
||||
results.raise_for_status()
|
||||
soup = BS(results.text, 'html.parser')
|
||||
links_list = [next(filter(lambda i: "digital-text" in i["href"], x.findAll("a")))["href"] for x in
|
||||
soup.findAll("div", attrs={"data-component-type": "s-search-result"})]
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
||||
fut = {executor.submit(inner, link, index) for index, link in enumerate(links_list[:5])}
|
||||
val=list(map(lambda x : x.result() ,concurrent.futures.as_completed(fut)))
|
||||
result=list(filter(lambda x: x, val))
|
||||
return [x[0] for x in sorted(result, key=itemgetter(1))] #sort by amazons listing order for best relevance
|
||||
try:
|
||||
if self.active:
|
||||
results = self.session.get(
|
||||
f"https://www.amazon.com/s?k={query.replace(' ', '+')}"
|
||||
f"&i=digital-text&sprefix={query.replace(' ', '+')}"
|
||||
f"%2Cdigital-text&ref=nb_sb_noss",
|
||||
headers=self.headers)
|
||||
results.raise_for_status()
|
||||
soup = BS(results.text, 'html.parser')
|
||||
links_list = [next(filter(lambda i: "digital-text" in i["href"], x.findAll("a")))["href"] for x in
|
||||
soup.findAll("div", attrs={"data-component-type": "s-search-result"})]
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
||||
fut = {executor.submit(inner, link, index) for index, link in enumerate(links_list[:5])}
|
||||
val = list(map(lambda x: x.result(), concurrent.futures.as_completed(fut)))
|
||||
result = list(filter(lambda x: x, val))
|
||||
return [x[0] for x in sorted(result, key=itemgetter(1))] #sort by amazons listing order for best relevance
|
||||
except requests.exceptions.HTTPError as e:
|
||||
log.error_or_exception(e)
|
||||
return []
|
||||
|
|
|
@ -46,7 +46,7 @@ class Google(Metadata):
|
|||
tokens = [quote(t.encode("utf-8")) for t in title_tokens]
|
||||
query = "+".join(tokens)
|
||||
results = requests.get(Google.SEARCH_URL + query)
|
||||
for result in results.json()["items"]:
|
||||
for result in results.json().get("items", []):
|
||||
val.append(
|
||||
self._parse_search_result(
|
||||
result=result, generic_cover=generic_cover, locale=locale
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import itertools
|
||||
from typing import Dict, List, Optional
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import quote, unquote
|
||||
|
||||
try:
|
||||
from fake_useragent.errors import FakeUserAgentError
|
||||
|
@ -47,7 +47,7 @@ class scholar(Metadata):
|
|||
scholar_gen = itertools.islice(scholarly.search_pubs(query), 10)
|
||||
for result in scholar_gen:
|
||||
match = self._parse_search_result(
|
||||
result=result, generic_cover=generic_cover, locale=locale
|
||||
result=result, generic_cover="", locale=locale
|
||||
)
|
||||
val.append(match)
|
||||
return val
|
||||
|
@ -66,7 +66,7 @@ class scholar(Metadata):
|
|||
)
|
||||
|
||||
match.cover = result.get("image", {}).get("original_url", generic_cover)
|
||||
match.description = result["bib"].get("abstract", "")
|
||||
match.description = unquote(result["bib"].get("abstract", ""))
|
||||
match.publisher = result["bib"].get("venue", "")
|
||||
match.publishedDate = result["bib"].get("pub_year") + "-01-01"
|
||||
match.identifiers = {"scholar": match.id}
|
||||
|
|
|
@ -149,7 +149,7 @@ def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider
|
|||
log.info("Link to {} Succeeded".format(provider_name))
|
||||
return redirect(url_for('web.profile'))
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
ub.session.rollback()
|
||||
else:
|
||||
flash(_(u"Login failed, No User Linked With OAuth Account"), category="error")
|
||||
|
@ -197,7 +197,7 @@ def unlink_oauth(provider):
|
|||
flash(_(u"Unlink to %(oauth)s Succeeded", oauth=oauth_check[provider]), category="success")
|
||||
log.info("Unlink to {} Succeeded".format(oauth_check[provider]))
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
ub.session.rollback()
|
||||
flash(_(u"Unlink to %(oauth)s Failed", oauth=oauth_check[provider]), category="error")
|
||||
except NoResultFound:
|
||||
|
|
245
cps/opds.py
245
cps/opds.py
|
@ -26,16 +26,15 @@ from functools import wraps
|
|||
|
||||
from flask import Blueprint, request, render_template, Response, g, make_response, abort
|
||||
from flask_login import current_user
|
||||
from sqlalchemy.sql.expression import func, text, or_, and_, true
|
||||
from sqlalchemy.sql.expression import func, text, or_, and_, any_, true
|
||||
from werkzeug.security import check_password_hash
|
||||
from tornado.httputil import HTTPServerRequest
|
||||
from . import constants, logger, config, db, calibre_db, ub, services, get_locale, isoLanguages
|
||||
from .helper import get_download_link, get_book_cover
|
||||
from .pagination import Pagination
|
||||
from .web import render_read_books
|
||||
from .usermanagement import load_user_from_request
|
||||
from flask_babel import gettext as _
|
||||
|
||||
from sqlalchemy.orm import InstrumentedAttribute
|
||||
opds = Blueprint('opds', __name__)
|
||||
|
||||
log = logger.create()
|
||||
|
@ -99,26 +98,7 @@ def feed_normal_search():
|
|||
@opds.route("/opds/books")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_booksindex():
|
||||
shift = 0
|
||||
off = int(request.args.get("offset") or 0)
|
||||
entries = calibre_db.session.query(func.upper(func.substr(db.Books.sort, 1, 1)).label('id'))\
|
||||
.filter(calibre_db.common_filters()).group_by(func.upper(func.substr(db.Books.sort, 1, 1))).all()
|
||||
|
||||
elements = []
|
||||
if off == 0:
|
||||
elements.append({'id': "00", 'name':_("All")})
|
||||
shift = 1
|
||||
for entry in entries[
|
||||
off + shift - 1:
|
||||
int(off + int(config.config_books_per_page) - shift)]:
|
||||
elements.append({'id': entry.id, 'name': entry.id})
|
||||
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(entries) + 1)
|
||||
return render_xml_template('feed.xml',
|
||||
letterelements=elements,
|
||||
folder='opds.feed_letter_books',
|
||||
pagination=pagination)
|
||||
return render_element_index(db.Books.sort, None, 'opds.feed_letter_books')
|
||||
|
||||
|
||||
@opds.route("/opds/books/letter/<book_id>")
|
||||
|
@ -171,43 +151,23 @@ def feed_hot():
|
|||
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
||||
entries = list()
|
||||
for book in hot_books:
|
||||
downloadBook = calibre_db.get_book(book.Downloads.book_id)
|
||||
if downloadBook:
|
||||
download_book = calibre_db.get_book(book.Downloads.book_id)
|
||||
if download_book:
|
||||
entries.append(
|
||||
calibre_db.get_filtered_book(book.Downloads.book_id)
|
||||
)
|
||||
else:
|
||||
ub.delete_download(book.Downloads.book_id)
|
||||
numBooks = entries.__len__()
|
||||
num_books = entries.__len__()
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1),
|
||||
config.config_books_per_page, numBooks)
|
||||
config.config_books_per_page, num_books)
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
@opds.route("/opds/author")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_authorindex():
|
||||
shift = 0
|
||||
off = int(request.args.get("offset") or 0)
|
||||
entries = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('id'))\
|
||||
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters())\
|
||||
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
|
||||
|
||||
elements = []
|
||||
if off == 0:
|
||||
elements.append({'id': "00", 'name':_("All")})
|
||||
shift = 1
|
||||
for entry in entries[
|
||||
off + shift - 1:
|
||||
int(off + int(config.config_books_per_page) - shift)]:
|
||||
elements.append({'id': entry.id, 'name': entry.id})
|
||||
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(entries) + 1)
|
||||
return render_xml_template('feed.xml',
|
||||
letterelements=elements,
|
||||
folder='opds.feed_letter_author',
|
||||
pagination=pagination)
|
||||
return render_element_index(db.Authors.sort, db.books_authors_link, 'opds.feed_letter_author')
|
||||
|
||||
|
||||
@opds.route("/opds/author/letter/<book_id>")
|
||||
|
@ -228,12 +188,7 @@ def feed_letter_author(book_id):
|
|||
@opds.route("/opds/author/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_author(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
|
||||
db.Books,
|
||||
db.Books.authors.any(db.Authors.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
return render_xml_dataset(db.Authors, book_id)
|
||||
|
||||
|
||||
@opds.route("/opds/publisher")
|
||||
|
@ -254,37 +209,14 @@ def feed_publisherindex():
|
|||
@opds.route("/opds/publisher/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_publisher(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
|
||||
db.Books,
|
||||
db.Books.publishers.any(db.Publishers.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
return render_xml_dataset(db.Publishers, book_id)
|
||||
|
||||
|
||||
@opds.route("/opds/category")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_categoryindex():
|
||||
shift = 0
|
||||
off = int(request.args.get("offset") or 0)
|
||||
entries = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('id'))\
|
||||
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters())\
|
||||
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
|
||||
elements = []
|
||||
if off == 0:
|
||||
elements.append({'id': "00", 'name':_("All")})
|
||||
shift = 1
|
||||
for entry in entries[
|
||||
off + shift - 1:
|
||||
int(off + int(config.config_books_per_page) - shift)]:
|
||||
elements.append({'id': entry.id, 'name': entry.id})
|
||||
return render_element_index(db.Tags.name, db.books_tags_link, 'opds.feed_letter_category')
|
||||
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(entries) + 1)
|
||||
return render_xml_template('feed.xml',
|
||||
letterelements=elements,
|
||||
folder='opds.feed_letter_category',
|
||||
pagination=pagination)
|
||||
|
||||
@opds.route("/opds/category/letter/<book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
|
@ -306,36 +238,14 @@ def feed_letter_category(book_id):
|
|||
@opds.route("/opds/category/<int:book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_category(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
|
||||
db.Books,
|
||||
db.Books.tags.any(db.Tags.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
return render_xml_dataset(db.Tags, book_id)
|
||||
|
||||
|
||||
@opds.route("/opds/series")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_seriesindex():
|
||||
shift = 0
|
||||
off = int(request.args.get("offset") or 0)
|
||||
entries = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('id'))\
|
||||
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters())\
|
||||
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
|
||||
elements = []
|
||||
if off == 0:
|
||||
elements.append({'id': "00", 'name':_("All")})
|
||||
shift = 1
|
||||
for entry in entries[
|
||||
off + shift - 1:
|
||||
int(off + int(config.config_books_per_page) - shift)]:
|
||||
elements.append({'id': entry.id, 'name': entry.id})
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(entries) + 1)
|
||||
return render_xml_template('feed.xml',
|
||||
letterelements=elements,
|
||||
folder='opds.feed_letter_series',
|
||||
pagination=pagination)
|
||||
return render_element_index(db.Series.sort, db.books_series_link, 'opds.feed_letter_series')
|
||||
|
||||
|
||||
@opds.route("/opds/series/letter/<book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
|
@ -370,7 +280,7 @@ def feed_series(book_id):
|
|||
def feed_ratingindex():
|
||||
off = request.args.get("offset") or 0
|
||||
entries = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
|
||||
(db.Ratings.rating / 2).label('name')) \
|
||||
(db.Ratings.rating / 2).label('name')) \
|
||||
.join(db.books_ratings_link)\
|
||||
.join(db.Books)\
|
||||
.filter(calibre_db.common_filters()) \
|
||||
|
@ -388,12 +298,7 @@ def feed_ratingindex():
|
|||
@opds.route("/opds/ratings/<book_id>")
|
||||
@requires_basic_auth_if_no_ano
|
||||
def feed_ratings(book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
|
||||
db.Books,
|
||||
db.Books.ratings.any(db.Ratings.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
return render_xml_dataset(db.Ratings, book_id)
|
||||
|
||||
|
||||
@opds.route("/opds/formats")
|
||||
|
@ -491,7 +396,7 @@ def feed_shelf(book_id):
|
|||
@requires_basic_auth_if_no_ano
|
||||
def opds_download_link(book_id, book_format):
|
||||
# I gave up with this: With enabled ldap login, the user doesn't get logged in, therefore it's always guest
|
||||
# workaround, loading the user from the request and checking it's download rights here
|
||||
# workaround, loading the user from the request and checking its download rights here
|
||||
# in case of anonymous browsing user is None
|
||||
user = load_user_from_request(request) or current_user
|
||||
if not user.role_download():
|
||||
|
@ -517,48 +422,6 @@ def get_metadata_calibre_companion(uuid, library):
|
|||
return ""
|
||||
|
||||
|
||||
def feed_search(term):
|
||||
if term:
|
||||
entries, __, ___ = calibre_db.get_search_results(term, config_read_column=config.config_read_column)
|
||||
entries_count = len(entries) if len(entries) > 0 else 1
|
||||
pagination = Pagination(1, entries_count, entries_count)
|
||||
items = [entry[0] for entry in entries]
|
||||
return render_xml_template('feed.xml', searchterm=term, entries=items, pagination=pagination)
|
||||
else:
|
||||
return render_xml_template('feed.xml', searchterm="")
|
||||
|
||||
|
||||
def check_auth(username, password):
|
||||
try:
|
||||
username = username.encode('windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
username = username.encode('utf-8')
|
||||
user = ub.session.query(ub.User).filter(func.lower(ub.User.name) ==
|
||||
username.decode('utf-8').lower()).first()
|
||||
if bool(user and check_password_hash(str(user.password), password)):
|
||||
return True
|
||||
else:
|
||||
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
log.warning('OPDS Login failed for user "%s" IP-address: %s', username.decode('utf-8'), ip_Address)
|
||||
return False
|
||||
|
||||
|
||||
def authenticate():
|
||||
return Response(
|
||||
'Could not verify your access level for that URL.\n'
|
||||
'You have to login with proper credentials', 401,
|
||||
{'WWW-Authenticate': 'Basic realm="Login Required"'})
|
||||
|
||||
|
||||
def render_xml_template(*args, **kwargs):
|
||||
# ToDo: return time in current timezone similar to %z
|
||||
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
||||
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
|
||||
response = make_response(xml)
|
||||
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
@opds.route("/opds/thumb_240_240/<book_id>")
|
||||
@opds.route("/opds/cover_240_240/<book_id>")
|
||||
@opds.route("/opds/cover_90_90/<book_id>")
|
||||
|
@ -582,3 +445,77 @@ def feed_unread_books():
|
|||
off = request.args.get("offset") or 0
|
||||
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, False, True)
|
||||
return render_xml_template('feed.xml', entries=result, pagination=pagination)
|
||||
|
||||
|
||||
def feed_search(term):
|
||||
if term:
|
||||
entries, __, ___ = calibre_db.get_search_results(term, config_read_column=config.config_read_column)
|
||||
entries_count = len(entries) if len(entries) > 0 else 1
|
||||
pagination = Pagination(1, entries_count, entries_count)
|
||||
items = [entry[0] for entry in entries]
|
||||
return render_xml_template('feed.xml', searchterm=term, entries=items, pagination=pagination)
|
||||
else:
|
||||
return render_xml_template('feed.xml', searchterm="")
|
||||
|
||||
|
||||
def check_auth(username, password):
|
||||
try:
|
||||
username = username.encode('windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
username = username.encode('utf-8')
|
||||
user = ub.session.query(ub.User).filter(func.lower(ub.User.name) ==
|
||||
username.decode('utf-8').lower()).first()
|
||||
if bool(user and check_password_hash(str(user.password), password)):
|
||||
return True
|
||||
else:
|
||||
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
log.warning('OPDS Login failed for user "%s" IP-address: %s', username.decode('utf-8'), ip_address)
|
||||
return False
|
||||
|
||||
|
||||
def authenticate():
|
||||
return Response(
|
||||
'Could not verify your access level for that URL.\n'
|
||||
'You have to login with proper credentials', 401,
|
||||
{'WWW-Authenticate': 'Basic realm="Login Required"'})
|
||||
|
||||
|
||||
def render_xml_template(*args, **kwargs):
|
||||
# ToDo: return time in current timezone similar to %z
|
||||
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
||||
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
|
||||
response = make_response(xml)
|
||||
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
|
||||
return response
|
||||
|
||||
|
||||
def render_xml_dataset(data_table, book_id):
|
||||
off = request.args.get("offset") or 0
|
||||
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
|
||||
db.Books,
|
||||
getattr(db.Books, data_table.__tablename__).any(data_table.id == book_id),
|
||||
[db.Books.timestamp.desc()])
|
||||
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
|
||||
|
||||
|
||||
def render_element_index(database_column, linked_table, folder):
|
||||
shift = 0
|
||||
off = int(request.args.get("offset") or 0)
|
||||
entries = calibre_db.session.query(func.upper(func.substr(database_column, 1, 1)).label('id'))
|
||||
if linked_table is not None:
|
||||
entries = entries.join(linked_table).join(db.Books)
|
||||
entries = entries.filter(calibre_db.common_filters()).group_by(func.upper(func.substr(database_column, 1, 1))).all()
|
||||
elements = []
|
||||
if off == 0:
|
||||
elements.append({'id': "00", 'name': _("All")})
|
||||
shift = 1
|
||||
for entry in entries[
|
||||
off + shift - 1:
|
||||
int(off + int(config.config_books_per_page) - shift)]:
|
||||
elements.append({'id': entry.id, 'name': entry.id})
|
||||
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
|
||||
len(entries) + 1)
|
||||
return render_xml_template('feed.xml',
|
||||
letterelements=elements,
|
||||
folder=folder,
|
||||
pagination=pagination)
|
||||
|
|
|
@ -57,10 +57,10 @@ class Pagination(object):
|
|||
def has_next(self):
|
||||
return self.page < self.pages
|
||||
|
||||
# right_edge: last right_edges count of all pages are shown as number, means, if 10 pages are paginated -> 9,10 shwn
|
||||
# left_edge: first left_edges count of all pages are shown as number -> 1,2 shwn
|
||||
# left_current: left_current count below current page are shown as number, means if current page 5 -> 3,4 shwn
|
||||
# left_current: right_current count above current page are shown as number, means if current page 5 -> 6,7 shwn
|
||||
# right_edge: last right_edges count of all pages are shown as number, means, if 10 pages are paginated -> 9,10 shown
|
||||
# left_edge: first left_edges count of all pages are shown as number -> 1,2 shown
|
||||
# left_current: left_current count below current page are shown as number, means if current page 5 -> 3,4 shown
|
||||
# left_current: right_current count above current page are shown as number, means if current page 5 -> 6,7 shown
|
||||
def iter_pages(self, left_edge=2, left_current=2,
|
||||
right_current=4, right_edge=2):
|
||||
last = 0
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
|
||||
from flask import Blueprint, request, make_response, abort, url_for, flash, redirect
|
||||
from flask_login import login_required, current_user, login_user
|
||||
|
@ -31,10 +32,6 @@ from sqlalchemy.sql.expression import true
|
|||
from . import config, logger, ub
|
||||
from .render_template import render_title_template
|
||||
|
||||
try:
|
||||
from functools import wraps
|
||||
except ImportError:
|
||||
pass # We're not using Python 3
|
||||
|
||||
remotelogin = Blueprint('remotelogin', __name__)
|
||||
log = logger.create()
|
||||
|
|
|
@ -198,7 +198,7 @@ class CalibreTask:
|
|||
self.run(*args)
|
||||
except Exception as ex:
|
||||
self._handleError(str(ex))
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
|
||||
self.end_time = datetime.now()
|
||||
|
||||
|
|
44
cps/shelf.py
44
cps/shelf.py
|
@ -94,10 +94,10 @@ def add_to_shelf(shelf_id, book_id):
|
|||
try:
|
||||
ub.session.merge(shelf)
|
||||
ub.session.commit()
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_(u"Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
if "HTTP_REFERER" in request.environ:
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
||||
else:
|
||||
|
@ -154,10 +154,10 @@ def search_to_shelf(shelf_id):
|
|||
ub.session.merge(shelf)
|
||||
ub.session.commit()
|
||||
flash(_(u"Books have been added to shelf: %(sname)s", sname=shelf.name), category="success")
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
else:
|
||||
log.error("Could not add books to shelf: {}".format(shelf.name))
|
||||
flash(_(u"Could not add books to shelf: %(sname)s", sname=shelf.name), category="error")
|
||||
|
@ -197,10 +197,10 @@ def remove_from_shelf(shelf_id, book_id):
|
|||
ub.session.delete(book_shelf)
|
||||
shelf.last_modified = datetime.utcnow()
|
||||
ub.session.commit()
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
if "HTTP_REFERER" in request.environ:
|
||||
return redirect(request.environ["HTTP_REFERER"])
|
||||
else:
|
||||
|
@ -273,12 +273,12 @@ def create_edit_shelf(shelf, page_title, page, shelf_id=False):
|
|||
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
|
||||
except (OperationalError, InvalidRequestError) as ex:
|
||||
ub.session.rollback()
|
||||
log.debug_or_exception(ex)
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception(ex)
|
||||
log.error_or_exception("Settings Database error: {}".format(ex))
|
||||
flash(_(u"Database error: %(error)s.", error=ex.orig), category="error")
|
||||
except Exception as ex:
|
||||
ub.session.rollback()
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
flash(_(u"There was an error"), category="error")
|
||||
return render_title_template('shelf_edit.html',
|
||||
shelf=shelf,
|
||||
|
@ -337,10 +337,10 @@ def delete_shelf(shelf_id):
|
|||
flash(_("Error deleting Shelf"), category="error")
|
||||
else:
|
||||
flash(_("Shelf successfully deleted"), category="success")
|
||||
except InvalidRequestError:
|
||||
except InvalidRequestError as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
return redirect(url_for('web.index'))
|
||||
|
||||
|
||||
|
@ -374,10 +374,10 @@ def order_shelf(shelf_id):
|
|||
# if order diffrent from before -> shelf.last_modified = datetime.utcnow()
|
||||
try:
|
||||
ub.session.commit()
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
|
||||
result = list()
|
||||
if shelf:
|
||||
|
@ -450,10 +450,10 @@ def render_show_shelf(shelf_type, shelf_id, page_no, sort_param):
|
|||
try:
|
||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == entry.book_id).delete()
|
||||
ub.session.commit()
|
||||
except (OperationalError, InvalidRequestError):
|
||||
except (OperationalError, InvalidRequestError) as e:
|
||||
ub.session.rollback()
|
||||
log.error("Settings DB is not Writeable")
|
||||
flash(_("Settings DB is not Writeable"), category="error")
|
||||
log.error_or_exception("Settings Database error: {}".format(e))
|
||||
flash(_(u"Database error: %(error)s.", error=e.orig), category="error")
|
||||
|
||||
return render_title_template(page,
|
||||
entries=result,
|
||||
|
|
|
@ -28,14 +28,24 @@ $("#have_read_cb").on("change", function() {
|
|||
data: $(this).closest("form").serialize(),
|
||||
error: function(response) {
|
||||
var data = [{type:"danger", message:response.responseText}]
|
||||
$("#flash_success").remove();
|
||||
// $("#flash_success").parent().remove();
|
||||
$("#flash_danger").remove();
|
||||
$(".row-fluid.text-center").remove();
|
||||
if (!jQuery.isEmptyObject(data)) {
|
||||
data.forEach(function (item) {
|
||||
$(".navbar").after('<div class="row-fluid text-center" >' +
|
||||
'<div id="flash_' + item.type + '" class="alert alert-' + item.type + '">' + item.message + '</div>' +
|
||||
'</div>');
|
||||
});
|
||||
$("#have_read_cb").prop("checked", !$("#have_read_cb").prop("checked"));
|
||||
if($("#bookDetailsModal").is(":visible")) {
|
||||
data.forEach(function (item) {
|
||||
$(".modal-header").after('<div id="flash_' + item.type +
|
||||
'" class="text-center alert alert-' + item.type + '">' + item.message + '</div>');
|
||||
});
|
||||
} else
|
||||
{
|
||||
data.forEach(function (item) {
|
||||
$(".navbar").after('<div class="row-fluid text-center" >' +
|
||||
'<div id="flash_' + item.type + '" class="alert alert-' + item.type + '">' + item.message + '</div>' +
|
||||
'</div>');
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -515,6 +515,7 @@ $(function() {
|
|||
|
||||
$("#bookDetailsModal")
|
||||
.on("show.bs.modal", function(e) {
|
||||
$("#flash_danger").remove();
|
||||
var $modalBody = $(this).find(".modal-body");
|
||||
|
||||
// Prevent static assets from loading multiple times
|
||||
|
|
|
@ -125,8 +125,9 @@ $(function() {
|
|||
url: window.location.pathname + "/../ajax/simulatemerge",
|
||||
data: JSON.stringify({"Merge_books":selections}),
|
||||
success: function success(booTitles) {
|
||||
$('#merge_from').empty();
|
||||
$.each(booTitles.from, function(i, item) {
|
||||
$("<span>- " + item + "</span>").appendTo("#merge_from");
|
||||
$("<span>- " + item + "</span><p></p>").appendTo("#merge_from");
|
||||
});
|
||||
$("#merge_to").text("- " + booTitles.to);
|
||||
|
||||
|
@ -843,11 +844,13 @@ function checkboxChange(checkbox, userId, field, field_index) {
|
|||
|
||||
function BookCheckboxChange(checkbox, userId, field) {
|
||||
var value = checkbox.checked ? "True" : "False";
|
||||
var element = checkbox;
|
||||
$.ajax({
|
||||
method: "post",
|
||||
url: getPath() + "/ajax/editbooks/" + field,
|
||||
data: {"pk": userId, "value": value},
|
||||
error: function(data) {
|
||||
element.checked = !element.checked;
|
||||
handleListServerResponse([{type:"danger", message:data.responseText}])
|
||||
},
|
||||
success: handleListServerResponse
|
||||
|
|
|
@ -35,6 +35,8 @@ from cps.ub import init_db_thread
|
|||
|
||||
from cps.tasks.mail import TaskEmail
|
||||
from cps import gdriveutils
|
||||
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
||||
|
|
88
cps/tasks/mail.py
Normal file → Executable file
88
cps/tasks/mail.py
Normal file → Executable file
|
@ -16,24 +16,15 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import smtplib
|
||||
import threading
|
||||
import socket
|
||||
import mimetypes
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
from email.MIMEBase import MIMEBase
|
||||
from email.MIMEMultipart import MIMEMultipart
|
||||
from email.MIMEText import MIMEText
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
from io import StringIO
|
||||
from email.message import EmailMessage
|
||||
from email.utils import parseaddr
|
||||
|
||||
|
||||
from email import encoders
|
||||
|
@ -45,6 +36,7 @@ from cps.services import gmail
|
|||
from cps import logger, config
|
||||
|
||||
from cps import gdriveutils
|
||||
import uuid
|
||||
|
||||
log = logger.create()
|
||||
|
||||
|
@ -130,20 +122,37 @@ class TaskEmail(CalibreTask):
|
|||
self.asyncSMTP = None
|
||||
self.results = dict()
|
||||
|
||||
# from calibre code:
|
||||
# https://github.com/kovidgoyal/calibre/blob/731ccd92a99868de3e2738f65949f19768d9104c/src/calibre/utils/smtp.py#L60
|
||||
def get_msgid_domain(self):
|
||||
try:
|
||||
# Parse out the address from the From line, and then the domain from that
|
||||
from_email = parseaddr(self.settings["mail_from"])[1]
|
||||
msgid_domain = from_email.partition('@')[2].strip()
|
||||
# This can sometimes sneak through parseaddr if the input is malformed
|
||||
msgid_domain = msgid_domain.rstrip('>').strip()
|
||||
except Exception:
|
||||
msgid_domain = ''
|
||||
return msgid_domain or 'calibre-web.com'
|
||||
|
||||
def prepare_message(self):
|
||||
message = MIMEMultipart()
|
||||
message['to'] = self.recipent
|
||||
message['from'] = self.settings["mail_from"]
|
||||
message['subject'] = self.subject
|
||||
message['Message-Id'] = make_msgid('calibre-web')
|
||||
message = EmailMessage()
|
||||
# message = MIMEMultipart()
|
||||
message['From'] = self.settings["mail_from"]
|
||||
message['To'] = self.recipent
|
||||
message['Subject'] = self.subject
|
||||
message['Date'] = formatdate(localtime=True)
|
||||
text = self.text
|
||||
msg = MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8')
|
||||
message.attach(msg)
|
||||
message['Message-Id'] = "{}@{}".format(uuid.uuid4(), self.get_msgid_domain()) # f"<{uuid.uuid4()}@{get_msgid_domain(from_)}>" # make_msgid('calibre-web')
|
||||
message.set_content(self.text.encode('UTF-8'), "text", "plain")
|
||||
if self.attachment:
|
||||
result = self._get_attachment(self.filepath, self.attachment)
|
||||
if result:
|
||||
message.attach(result)
|
||||
data = self._get_attachment(self.filepath, self.attachment)
|
||||
if data:
|
||||
# Set mimetype
|
||||
content_type, encoding = mimetypes.guess_type(self.attachment)
|
||||
if content_type is None or encoding is not None:
|
||||
content_type = 'application/octet-stream'
|
||||
main_type, sub_type = content_type.split('/', 1)
|
||||
message.add_attachment(data, maintype=main_type, subtype=sub_type, filename=self.attachment)
|
||||
else:
|
||||
self._handleError(u"Attachment not found")
|
||||
return
|
||||
|
@ -158,10 +167,10 @@ class TaskEmail(CalibreTask):
|
|||
else:
|
||||
self.send_gmail_email(msg)
|
||||
except MemoryError as e:
|
||||
log.debug_or_exception(e, stacklevel=3)
|
||||
log.error_or_exception(e, stacklevel=3)
|
||||
self._handleError(u'MemoryError sending e-mail: {}'.format(str(e)))
|
||||
except (smtplib.SMTPException, smtplib.SMTPAuthenticationError) as e:
|
||||
log.debug_or_exception(e, stacklevel=3)
|
||||
log.error_or_exception(e, stacklevel=3)
|
||||
if hasattr(e, "smtp_error"):
|
||||
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
||||
elif hasattr(e, "message"):
|
||||
|
@ -172,10 +181,10 @@ class TaskEmail(CalibreTask):
|
|||
text = ''
|
||||
self._handleError(u'Smtplib Error sending e-mail: {}'.format(text))
|
||||
except (socket.error) as e:
|
||||
log.debug_or_exception(e, stacklevel=3)
|
||||
log.error_or_exception(e, stacklevel=3)
|
||||
self._handleError(u'Socket Error sending e-mail: {}'.format(e.strerror))
|
||||
except Exception as ex:
|
||||
log.debug_or_exception(ex, stacklevel=3)
|
||||
log.error_or_exception(ex, stacklevel=3)
|
||||
self._handleError(u'Error sending e-mail: {}'.format(ex))
|
||||
|
||||
def send_standard_email(self, msg):
|
||||
|
@ -226,15 +235,15 @@ class TaskEmail(CalibreTask):
|
|||
self._progress = x
|
||||
|
||||
@classmethod
|
||||
def _get_attachment(cls, bookpath, filename):
|
||||
def _get_attachment(cls, book_path, filename):
|
||||
"""Get file as MIMEBase message"""
|
||||
calibre_path = config.config_calibre_dir
|
||||
if config.config_use_google_drive:
|
||||
df = gdriveutils.getFileFromEbooksFolder(bookpath, filename)
|
||||
df = gdriveutils.getFileFromEbooksFolder(book_path, filename)
|
||||
if df:
|
||||
datafile = os.path.join(calibre_path, bookpath, filename)
|
||||
if not os.path.exists(os.path.join(calibre_path, bookpath)):
|
||||
os.makedirs(os.path.join(calibre_path, bookpath))
|
||||
datafile = os.path.join(calibre_path, book_path, filename)
|
||||
if not os.path.exists(os.path.join(calibre_path, book_path)):
|
||||
os.makedirs(os.path.join(calibre_path, book_path))
|
||||
df.GetContentFile(datafile)
|
||||
else:
|
||||
return None
|
||||
|
@ -244,23 +253,14 @@ class TaskEmail(CalibreTask):
|
|||
os.remove(datafile)
|
||||
else:
|
||||
try:
|
||||
file_ = open(os.path.join(calibre_path, bookpath, filename), 'rb')
|
||||
file_ = open(os.path.join(calibre_path, book_path, filename), 'rb')
|
||||
data = file_.read()
|
||||
file_.close()
|
||||
except IOError as e:
|
||||
log.debug_or_exception(e, stacklevel=3)
|
||||
log.error_or_exception(e, stacklevel=3)
|
||||
log.error(u'The requested file could not be read. Maybe wrong permissions?')
|
||||
return None
|
||||
# Set mimetype
|
||||
content_type, encoding = mimetypes.guess_type(filename)
|
||||
if content_type is None or encoding is not None:
|
||||
content_type = 'application/octet-stream'
|
||||
main_type, sub_type = content_type.split('/', 1)
|
||||
attachment = MIMEBase(main_type, sub_type)
|
||||
attachment.set_payload(data)
|
||||
encoders.encode_base64(attachment)
|
||||
attachment.add_header('Content-Disposition', 'attachment', filename=filename)
|
||||
return attachment
|
||||
return data
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
|
|
@ -47,7 +47,7 @@
|
|||
<p title="{{ entry.title }}" class="title">{{entry.title|shortentitle}}</p>
|
||||
</a>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
{% if not loop.first %}
|
||||
<span class="author-hidden-divider">&</span>
|
||||
|
@ -110,7 +110,7 @@
|
|||
<div class="meta">
|
||||
<p title="{{ entry.title }}" class="title">{{entry.title|shortentitle}}</p>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
<a class="author-name author-hidden" href="https://www.goodreads.com/author/show/{{ author.gid }}" target="_blank" rel="noopener">{{author.name.replace('|',',')}}</a>
|
||||
{% if loop.last %}
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
{% if source_formats|length > 0 and conversion_formats|length > 0 %}
|
||||
<div class="text-center more-stuff"><h4>{{_('Convert book format:')}}</h4>
|
||||
<form class="padded-bottom" action="{{ url_for('editbook.convert_bookformat', book_id=book.id) }}" method="post" id="book_convert_frm">
|
||||
<form class="padded-bottom" action="{{ url_for('edit-book.convert_bookformat', book_id=book.id) }}" method="post" id="book_convert_frm">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div class="form-group">
|
||||
<div class="text-left">
|
||||
|
@ -49,7 +49,7 @@
|
|||
{% endif %}
|
||||
|
||||
</div>
|
||||
<form role="form" action="{{ url_for('editbook.edit_book', book_id=book.id) }}" method="post" enctype="multipart/form-data" id="book_edit_frm">
|
||||
<form role="form" action="{{ url_for('edit-book.edit_book', book_id=book.id) }}" method="post" enctype="multipart/form-data" id="book_edit_frm">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div class="col-sm-9 col-xs-12">
|
||||
<div class="form-group">
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
data-escape="true"
|
||||
{% if g.user.role_edit() %}
|
||||
data-editable-type="text"
|
||||
data-editable-url="{{ url_for('editbook.edit_list_book', param=parameter)}}"
|
||||
data-editable-url="{{ url_for('edit-book.edit_list_book', param=parameter)}}"
|
||||
data-editable-title="{{ edit_text }}"
|
||||
data-edit="true"
|
||||
{% if validate %}data-edit-validate="{{ _('This Field is Required') }}" {% endif %}
|
||||
|
@ -66,30 +66,30 @@
|
|||
{{ text_table_row('authors', _('Enter Authors'),_('Authors'), true, true) }}
|
||||
{{ text_table_row('tags', _('Enter Categories'),_('Categories'), false, true) }}
|
||||
{{ text_table_row('series', _('Enter Series'),_('Series'), false, true) }}
|
||||
<th data-field="series_index" id="series_index" data-visible="{{visiblility.get('series_index')}}" data-edit-validate="{{ _('This Field is Required') }}" data-sortable="true" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.01" data-editable-min="0" data-editable-url="{{ url_for('editbook.edit_list_book', param='series_index')}}" data-edit="true" data-editable-title="{{_('Enter Title')}}"{% endif %}>{{_('Series Index')}}</th>
|
||||
<th data-field="series_index" id="series_index" data-visible="{{visiblility.get('series_index')}}" data-edit-validate="{{ _('This Field is Required') }}" data-sortable="true" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.01" data-editable-min="0" data-editable-url="{{ url_for('edit-book.edit_list_book', param='series_index')}}" data-edit="true" data-editable-title="{{_('Enter Title')}}"{% endif %}>{{_('Series Index')}}</th>
|
||||
{{ text_table_row('languages', _('Enter Languages'),_('Languages'), false, true) }}
|
||||
<!--th data-field="pubdate" data-type="date" data-visible="{{visiblility.get('pubdate')}}" data-viewformat="dd.mm.yyyy" id="pubdate" data-sortable="true">{{_('Publishing Date')}}</th-->
|
||||
{{ text_table_row('publishers', _('Enter Publishers'),_('Publishers'), false, true) }}
|
||||
<th data-field="comments" id="comments" data-escape="true" data-editable-mode="popup" data-visible="{{visiblility.get('comments')}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="wysihtml5" data-editable-url="{{ url_for('editbook.edit_list_book', param='comments')}}" data-edit="true" data-editable-title="{{_('Enter comments')}}"{% endif %}>{{_('Comments')}}</th>
|
||||
<th data-field="comments" id="comments" data-escape="true" data-editable-mode="popup" data-visible="{{visiblility.get('comments')}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="wysihtml5" data-editable-url="{{ url_for('edit-book.edit_list_book', param='comments')}}" data-edit="true" data-editable-title="{{_('Enter comments')}}"{% endif %}>{{_('Comments')}}</th>
|
||||
{% if g.user.check_visibility(32768) %}
|
||||
{{ book_checkbox_row('is_archived', _('Archiv Status'), false)}}
|
||||
{% endif %}
|
||||
{{ book_checkbox_row('read_status', _('Read Status'), false)}}
|
||||
{% for c in cc %}
|
||||
{% if c.datatype == "int" %}
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="1" data-editable-url="{{ url_for('editbook.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="1" data-editable-url="{{ url_for('edit-book.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
{% elif c.datatype == "rating" %}
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-formatter="ratingFormatter" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.5" data-editable-step="1" data-editable-min="1" data-editable-max="5" data-editable-url="{{ url_for('editbook.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-formatter="ratingFormatter" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.5" data-editable-step="1" data-editable-min="1" data-editable-max="5" data-editable-url="{{ url_for('edit-book.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
{% elif c.datatype == "float" %}
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.01" data-editable-url="{{ url_for('editbook.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="number" data-editable-placeholder="1" data-editable-step="0.01" data-editable-url="{{ url_for('edit-book.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
{% elif c.datatype == "enumeration" %}
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="select" data-editable-source={{ url_for('editbook.table_get_custom_enum', c_id=c.id) }} data-editable-url="{{ url_for('editbook.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="select" data-editable-source={{ url_for('edit-book.table_get_custom_enum', c_id=c.id) }} data-editable-url="{{ url_for('edit-book.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
{% elif c.datatype in ["datetime"] %}
|
||||
<!-- missing -->
|
||||
{% elif c.datatype == "text" %}
|
||||
{{ text_table_row('custom_column_' + c.id|string, _('Enter ') + c.name, c.name, false, false) }}
|
||||
{% elif c.datatype == "comments" %}
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-escape="true" data-editable-mode="popup" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="wysihtml5" data-editable-url="{{ url_for('editbook.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
<th data-field="custom_column_{{ c.id|string }}" id="custom_column_{{ c.id|string }}" data-escape="true" data-editable-mode="popup" data-visible="{{visiblility.get('custom_column_'+ c.id|string)}}" data-sortable="false" {% if g.user.role_edit() %} data-editable-type="wysihtml5" data-editable-url="{{ url_for('edit-book.edit_list_book', param='custom_column_'+ c.id|string)}}" data-edit="true" data-editable-title="{{_('Enter ') + c.name}}"{% endif %}>{{c.name}}</th>
|
||||
{% elif c.datatype == "bool" %}
|
||||
{{ book_checkbox_row('custom_column_' + c.id|string, c.name, false)}}
|
||||
{% else %}
|
||||
|
|
|
@ -100,7 +100,7 @@
|
|||
</div>
|
||||
<h2 id="title">{{entry.title}}</h2>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
<a href="{{url_for('web.books_list', data='author', sort_param='stored', book_id=author.id ) }}">{{author.name.replace('|',',')}}</a>
|
||||
{% if not loop.last %}
|
||||
&
|
||||
|
@ -139,7 +139,7 @@
|
|||
<p>
|
||||
<span class="glyphicon glyphicon-link"></span>
|
||||
{% for identifier in entry.identifiers %}
|
||||
<a href="{{identifier}}" target="_blank" class="btn btn-xs btn-success" role="button">{{identifier.formatType()}}</a>
|
||||
<a href="{{identifier}}" target="_blank" class="btn btn-xs btn-success" role="button">{{identifier.format_type()}}</a>
|
||||
{%endfor%}
|
||||
</p>
|
||||
</div>
|
||||
|
@ -296,7 +296,7 @@
|
|||
{% if g.user.role_edit() %}
|
||||
<div class="btn-toolbar" role="toolbar">
|
||||
<div class="btn-group" role="group" aria-label="Edit/Delete book">
|
||||
<a href="{{ url_for('editbook.edit_book', book_id=entry.id) }}" class="btn btn-sm btn-primary" id="edit_book" role="button"><span class="glyphicon glyphicon-edit"></span> {{_('Edit Metadata')}}</a>
|
||||
<a href="{{ url_for('edit-book.edit_book', book_id=entry.id) }}" class="btn btn-sm btn-primary" id="edit_book" role="button"><span class="glyphicon glyphicon-edit"></span> {{_('Edit Metadata')}}</a>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
<p title="{{ entry.title }}" class="title">{{entry.title|shortentitle}}</p>
|
||||
</a>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
{% if not loop.first %}
|
||||
<span class="author-hidden-divider">&</span>
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
<p title="{{entry.title}}" class="title">{{entry.title|shortentitle}}</p>
|
||||
</a>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
{% if not loop.first %}
|
||||
<span class="author-hidden-divider">&</span>
|
||||
|
@ -102,7 +102,7 @@
|
|||
<p title="{{ entry.title }}" class="title">{{entry.title|shortentitle}}</p>
|
||||
</a>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
{% if not loop.first %}
|
||||
<span class="author-hidden-divider">&</span>
|
||||
|
|
|
@ -61,7 +61,7 @@
|
|||
{% if g.user.is_authenticated or g.allow_anonymous %}
|
||||
{% if g.user.role_upload() and g.allow_upload %}
|
||||
<li>
|
||||
<form id="form-upload" class="navbar-form" action="{{ url_for('editbook.upload') }}" data-title="{{_('Uploading...')}}" data-footer="{{_('Close')}}" data-failed="{{_('Error')}}" data-message="{{_('Upload done, processing, please wait...')}}" method="post" enctype="multipart/form-data">
|
||||
<form id="form-upload" class="navbar-form" action="{{ url_for('edit-book.upload') }}" data-title="{{_('Uploading...')}}" data-footer="{{_('Close')}}" data-failed="{{_('Error')}}" data-message="{{_('Upload done, processing, please wait...')}}" method="post" enctype="multipart/form-data">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}">
|
||||
<div class="form-group">
|
||||
<span class="btn btn-default btn-file">{{_('Upload')}}<input id="btn-upload" name="btn-upload"
|
||||
|
|
|
@ -105,7 +105,7 @@
|
|||
|
||||
<div class="sm2-playlist-wrapper">
|
||||
<ul class="sm2-playlist-bd">
|
||||
<li><a href="{{ url_for('web.serve_book', book_id=mp3file,book_format=audioformat)}}"><b>{% for author in entry.authors %}{{author.name.replace('|',',')}}
|
||||
<li><a href="{{ url_for('web.serve_book', book_id=mp3file,book_format=audioformat)}}"><b>{% for author in entry.ordered_authors %}{{author.name.replace('|',',')}}
|
||||
{% if not loop.last %} & {% endif %} {% endfor %}</b> - {{entry.title}}</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
@ -134,7 +134,7 @@ window.calibre = {
|
|||
filePath: "{{ url_for('static', filename='js/libs/') }}",
|
||||
cssPath: "{{ url_for('static', filename='css/') }}",
|
||||
bookUrl: "{{ url_for('static', filename=mp3file) }}/",
|
||||
bookmarkUrl: "{{ url_for('web.bookmark', book_id=mp3file, book_format=audioformat.upper()) }}",
|
||||
bookmarkUrl: "{{ url_for('web.set_bookmark', book_id=mp3file, book_format=audioformat.upper()) }}",
|
||||
bookmark: "{{ bookmark.bookmark_key if bookmark != None }}",
|
||||
useBookmarks: "{{ g.user.is_authenticated | tojson }}"
|
||||
};
|
||||
|
|
|
@ -86,7 +86,7 @@
|
|||
window.calibre = {
|
||||
filePath: "{{ url_for('static', filename='js/libs/') }}",
|
||||
cssPath: "{{ url_for('static', filename='css/') }}",
|
||||
bookmarkUrl: "{{ url_for('web.bookmark', book_id=bookid, book_format='EPUB') }}",
|
||||
bookmarkUrl: "{{ url_for('web.set_bookmark', book_id=bookid, book_format='EPUB') }}",
|
||||
bookUrl: "{{ url_for('web.serve_book', book_id=bookid, book_format='epub', anyname='file.epub') }}",
|
||||
bookmark: "{{ bookmark.bookmark_key if bookmark != None }}",
|
||||
useBookmarks: "{{ g.user.is_authenticated | tojson }}"
|
||||
|
|
|
@ -45,7 +45,7 @@
|
|||
<p title="{{entry.title}}" class="title">{{entry.title|shortentitle}}</p>
|
||||
</a>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
{% if loop.index > g.config_authors_max and g.config_authors_max != 0 %}
|
||||
{% if not loop.first %}
|
||||
<span class="author-hidden-divider">&</span>
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
<div class="meta">
|
||||
<p title="{{entry.title}}" class="title">{{entry.title|shortentitle}}</p>
|
||||
<p class="author">
|
||||
{% for author in entry.authors %}
|
||||
{% for author in entry.ordered_authors %}
|
||||
<a href="{{url_for('web.books_list', data='author', sort_param='new', book_id=author.id) }}">{{author.name.replace('|',',')}}</a>
|
||||
{% if not loop.last %}
|
||||
&
|
||||
|
|
|
@ -888,5 +888,5 @@ def session_commit(success=None, _session=None):
|
|||
log.info(success)
|
||||
except (exc.OperationalError, exc.InvalidRequestError) as e:
|
||||
s.rollback()
|
||||
log.debug_or_exception(e)
|
||||
log.error_or_exception(e)
|
||||
return ""
|
||||
|
|
|
@ -117,7 +117,7 @@ class Updater(threading.Thread):
|
|||
except (IOError, OSError) as ex:
|
||||
self.status = 12
|
||||
log.error(u'Possible Reason for error: update file could not be saved in temp dir')
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
self.pause()
|
||||
return False
|
||||
|
||||
|
@ -214,7 +214,7 @@ class Updater(threading.Thread):
|
|||
if not os.path.exists(dst_dir):
|
||||
try:
|
||||
os.makedirs(dst_dir)
|
||||
log.debug('Create directory: {}', dst_dir)
|
||||
log.debug('Create directory: {}'.format(dst_dir))
|
||||
except OSError as e:
|
||||
log.error('Failed creating folder: {} with error {}'.format(dst_dir, e))
|
||||
if change_permissions:
|
||||
|
@ -233,7 +233,7 @@ class Updater(threading.Thread):
|
|||
permission = os.stat(dst_file)
|
||||
try:
|
||||
os.remove(dst_file)
|
||||
log.debug('Remove file before copy: %s', dst_file)
|
||||
log.debug('Remove file before copy: {}'.format(dst_file))
|
||||
except OSError as e:
|
||||
log.error('Failed removing file: {} with error {}'.format(dst_file, e))
|
||||
else:
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
import base64
|
||||
import binascii
|
||||
from functools import wraps
|
||||
|
||||
from sqlalchemy.sql.expression import func
|
||||
from werkzeug.security import check_password_hash
|
||||
|
@ -25,10 +26,6 @@ from flask_login import login_required, login_user
|
|||
|
||||
from . import lm, ub, config, constants, services
|
||||
|
||||
try:
|
||||
from functools import wraps
|
||||
except ImportError:
|
||||
pass # We're not using Python 3
|
||||
|
||||
def login_required_if_no_ano(func):
|
||||
@wraps(func)
|
||||
|
|
454
cps/web.py
454
cps/web.py
|
@ -26,9 +26,10 @@ import json
|
|||
import mimetypes
|
||||
import chardet # dependency of requests
|
||||
import copy
|
||||
from functools import wraps
|
||||
|
||||
from babel.dates import format_date
|
||||
from babel import Locale as LC
|
||||
from babel import Locale
|
||||
from flask import Blueprint, jsonify
|
||||
from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for
|
||||
from flask import session as flask_session
|
||||
|
@ -67,15 +68,12 @@ feature_support = {
|
|||
|
||||
try:
|
||||
from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status
|
||||
|
||||
feature_support['oauth'] = True
|
||||
except ImportError:
|
||||
feature_support['oauth'] = False
|
||||
oauth_check = {}
|
||||
|
||||
try:
|
||||
from functools import wraps
|
||||
except ImportError:
|
||||
pass # We're not using Python 3
|
||||
register_user_with_oauth = logout_oauth_user = get_oauth_status = None
|
||||
|
||||
try:
|
||||
from natsort import natsorted as sort
|
||||
|
@ -85,8 +83,11 @@ except ImportError:
|
|||
|
||||
@app.after_request
|
||||
def add_security_headers(resp):
|
||||
resp.headers['Content-Security-Policy'] = "default-src 'self'" + ''.join([' '+host for host in config.config_trustedhosts.strip().split(',')]) + " 'unsafe-inline' 'unsafe-eval'; font-src 'self' data:; img-src 'self' data:"
|
||||
if request.endpoint == "editbook.edit_book" or config.config_use_google_drive:
|
||||
csp = "default-src 'self'"
|
||||
csp += ''.join([' ' + host for host in config.config_trustedhosts.strip().split(',')])
|
||||
csp += " 'unsafe-inline' 'unsafe-eval'; font-src 'self' data:; img-src 'self' data:"
|
||||
resp.headers['Content-Security-Policy'] = csp
|
||||
if request.endpoint == "edit-book.edit_book" or config.config_use_google_drive:
|
||||
resp.headers['Content-Security-Policy'] += " *"
|
||||
elif request.endpoint == "web.read_book":
|
||||
resp.headers['Content-Security-Policy'] += " blob:;style-src-elem 'self' blob: 'unsafe-inline';"
|
||||
|
@ -96,6 +97,7 @@ def add_security_headers(resp):
|
|||
resp.headers['Strict-Transport-Security'] = 'max-age=31536000;'
|
||||
return resp
|
||||
|
||||
|
||||
web = Blueprint('web', __name__)
|
||||
log = logger.create()
|
||||
|
||||
|
@ -122,6 +124,7 @@ def viewer_required(f):
|
|||
|
||||
return inner
|
||||
|
||||
|
||||
# ################################### data provider functions #########################################################
|
||||
|
||||
|
||||
|
@ -134,7 +137,7 @@ def get_email_status_json():
|
|||
|
||||
@web.route("/ajax/bookmark/<int:book_id>/<book_format>", methods=['POST'])
|
||||
@login_required
|
||||
def bookmark(book_id, book_format):
|
||||
def set_bookmark(book_id, book_format):
|
||||
bookmark_key = request.form["bookmark"]
|
||||
ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
|
||||
ub.Bookmark.book_id == book_id,
|
||||
|
@ -143,11 +146,11 @@ def bookmark(book_id, book_format):
|
|||
ub.session_commit()
|
||||
return "", 204
|
||||
|
||||
lbookmark = ub.Bookmark(user_id=current_user.id,
|
||||
book_id=book_id,
|
||||
format=book_format,
|
||||
bookmark_key=bookmark_key)
|
||||
ub.session.merge(lbookmark)
|
||||
l_bookmark = ub.Bookmark(user_id=current_user.id,
|
||||
book_id=book_id,
|
||||
format=book_format,
|
||||
bookmark_key=bookmark_key)
|
||||
ub.session.merge(l_bookmark)
|
||||
ub.session_commit("Bookmark for user {} in book {} created".format(current_user.id, book_id))
|
||||
return "", 201
|
||||
|
||||
|
@ -165,7 +168,7 @@ def toggle_read(book_id):
|
|||
@web.route("/ajax/togglearchived/<int:book_id>", methods=['POST'])
|
||||
@login_required
|
||||
def toggle_archived(book_id):
|
||||
is_archived = change_archived_books(book_id, message="Book {} archivebit toggled".format(book_id))
|
||||
is_archived = change_archived_books(book_id, message="Book {} archive bit toggled".format(book_id))
|
||||
if is_archived:
|
||||
remove_synced_book(book_id)
|
||||
return ""
|
||||
|
@ -233,6 +236,7 @@ def get_comic_book(book_id, book_format, page):
|
|||
return "", 204
|
||||
'''
|
||||
|
||||
|
||||
# ################################### Typeahead ##################################################################
|
||||
|
||||
|
||||
|
@ -300,43 +304,49 @@ def get_matching_tags():
|
|||
return json_dumps
|
||||
|
||||
|
||||
def get_sort_function(sort, data):
|
||||
def generate_char_list(data_colum, db_link):
|
||||
return (calibre_db.session.query(func.upper(func.substr(data_colum, 1, 1)).label('char'))
|
||||
.join(db_link).join(db.Books).filter(calibre_db.common_filters())
|
||||
.group_by(func.upper(func.substr(data_colum, 1, 1))).all())
|
||||
|
||||
|
||||
def get_sort_function(sort_param, data):
|
||||
order = [db.Books.timestamp.desc()]
|
||||
if sort == 'stored':
|
||||
sort = current_user.get_view_property(data, 'stored')
|
||||
if sort_param == 'stored':
|
||||
sort_param = current_user.get_view_property(data, 'stored')
|
||||
else:
|
||||
current_user.set_view_property(data, 'stored', sort)
|
||||
if sort == 'pubnew':
|
||||
current_user.set_view_property(data, 'stored', sort_param)
|
||||
if sort_param == 'pubnew':
|
||||
order = [db.Books.pubdate.desc()]
|
||||
if sort == 'pubold':
|
||||
if sort_param == 'pubold':
|
||||
order = [db.Books.pubdate]
|
||||
if sort == 'abc':
|
||||
if sort_param == 'abc':
|
||||
order = [db.Books.sort]
|
||||
if sort == 'zyx':
|
||||
if sort_param == 'zyx':
|
||||
order = [db.Books.sort.desc()]
|
||||
if sort == 'new':
|
||||
if sort_param == 'new':
|
||||
order = [db.Books.timestamp.desc()]
|
||||
if sort == 'old':
|
||||
if sort_param == 'old':
|
||||
order = [db.Books.timestamp]
|
||||
if sort == 'authaz':
|
||||
if sort_param == 'authaz':
|
||||
order = [db.Books.author_sort.asc(), db.Series.name, db.Books.series_index]
|
||||
if sort == 'authza':
|
||||
if sort_param == 'authza':
|
||||
order = [db.Books.author_sort.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
|
||||
if sort == 'seriesasc':
|
||||
if sort_param == 'seriesasc':
|
||||
order = [db.Books.series_index.asc()]
|
||||
if sort == 'seriesdesc':
|
||||
if sort_param == 'seriesdesc':
|
||||
order = [db.Books.series_index.desc()]
|
||||
if sort == 'hotdesc':
|
||||
if sort_param == 'hotdesc':
|
||||
order = [func.count(ub.Downloads.book_id).desc()]
|
||||
if sort == 'hotasc':
|
||||
if sort_param == 'hotasc':
|
||||
order = [func.count(ub.Downloads.book_id).asc()]
|
||||
if sort is None:
|
||||
sort = "new"
|
||||
return order, sort
|
||||
if sort_param is None:
|
||||
sort_param = "new"
|
||||
return order, sort_param
|
||||
|
||||
|
||||
def render_books_list(data, sort, book_id, page):
|
||||
order = get_sort_function(sort, data)
|
||||
def render_books_list(data, sort_param, book_id, page):
|
||||
order = get_sort_function(sort_param, data)
|
||||
if data == "rated":
|
||||
return render_rated_books(page, book_id, order=order)
|
||||
elif data == "discover":
|
||||
|
@ -376,7 +386,7 @@ def render_books_list(data, sort, book_id, page):
|
|||
else:
|
||||
website = data or "newest"
|
||||
entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books, True, order[0],
|
||||
False, 0,
|
||||
False, 0,
|
||||
db.books_series_link,
|
||||
db.Books.id == db.books_series_link.c.book,
|
||||
db.Series)
|
||||
|
@ -410,12 +420,13 @@ def render_discover_books(page, book_id):
|
|||
else:
|
||||
abort(404)
|
||||
|
||||
|
||||
def render_hot_books(page, order):
|
||||
if current_user.check_visibility(constants.SIDEBAR_HOT):
|
||||
if order[1] not in ['hotasc', 'hotdesc']:
|
||||
# Unary expression comparsion only working (for this expression) in sqlalchemy 1.4+
|
||||
#if not (order[0][0].compare(func.count(ub.Downloads.book_id).desc()) or
|
||||
# order[0][0].compare(func.count(ub.Downloads.book_id).asc())):
|
||||
# Unary expression comparsion only working (for this expression) in sqlalchemy 1.4+
|
||||
# if not (order[0][0].compare(func.count(ub.Downloads.book_id).desc()) or
|
||||
# order[0][0].compare(func.count(ub.Downloads.book_id).asc())):
|
||||
order = [func.count(ub.Downloads.book_id).desc()], 'hotdesc'
|
||||
if current_user.show_detail_random():
|
||||
random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
|
||||
|
@ -423,19 +434,19 @@ def render_hot_books(page, order):
|
|||
else:
|
||||
random = false()
|
||||
off = int(int(config.config_books_per_page) * (page - 1))
|
||||
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id))\
|
||||
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)) \
|
||||
.order_by(*order[0]).group_by(ub.Downloads.book_id)
|
||||
hot_books = all_books.offset(off).limit(config.config_books_per_page)
|
||||
entries = list()
|
||||
for book in hot_books:
|
||||
downloadBook = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
|
||||
download_book = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()).filter(
|
||||
db.Books.id == book.Downloads.book_id).first()
|
||||
if downloadBook:
|
||||
entries.append(downloadBook)
|
||||
if download_book:
|
||||
entries.append(download_book)
|
||||
else:
|
||||
ub.delete_download(book.Downloads.book_id)
|
||||
numBooks = entries.__len__()
|
||||
pagination = Pagination(page, config.config_books_per_page, numBooks)
|
||||
num_books = entries.__len__()
|
||||
pagination = Pagination(page, config.config_books_per_page, num_books)
|
||||
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
||||
title=_(u"Hot Books (Most Downloaded)"), page="hot", order=order[1])
|
||||
else:
|
||||
|
@ -465,8 +476,8 @@ def render_downloaded_books(page, order, user_id):
|
|||
db.Series,
|
||||
ub.Downloads, db.Books.id == ub.Downloads.book_id)
|
||||
for book in entries:
|
||||
if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()) \
|
||||
.filter(db.Books.id == book.id).first():
|
||||
if not calibre_db.session.query(db.Books).\
|
||||
filter(calibre_db.common_filters()).filter(db.Books.id == book.id).first():
|
||||
ub.delete_download(book.id)
|
||||
user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
|
||||
return render_title_template('index.html',
|
||||
|
@ -474,7 +485,7 @@ def render_downloaded_books(page, order, user_id):
|
|||
entries=entries,
|
||||
pagination=pagination,
|
||||
id=user_id,
|
||||
title=_(u"Downloaded books by %(user)s",user=user.name),
|
||||
title=_(u"Downloaded books by %(user)s", user=user.name),
|
||||
page="download",
|
||||
order=order[1])
|
||||
else:
|
||||
|
@ -604,7 +615,7 @@ def render_language_books(page, name, order):
|
|||
|
||||
|
||||
def render_read_books(page, are_read, as_xml=False, order=None):
|
||||
sort = order[0] if order else []
|
||||
sort_param = order[0] if order else []
|
||||
if not config.config_read_column:
|
||||
if are_read:
|
||||
db_filter = and_(ub.ReadBook.user_id == int(current_user.id),
|
||||
|
@ -614,7 +625,7 @@ def render_read_books(page, are_read, as_xml=False, order=None):
|
|||
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
|
||||
db.Books,
|
||||
db_filter,
|
||||
sort,
|
||||
sort_param,
|
||||
False, 0,
|
||||
db.books_series_link,
|
||||
db.Books.id == db.books_series_link.c.book,
|
||||
|
@ -629,7 +640,7 @@ def render_read_books(page, are_read, as_xml=False, order=None):
|
|||
entries, random, pagination = calibre_db.fill_indexpage(page, 0,
|
||||
db.Books,
|
||||
db_filter,
|
||||
sort,
|
||||
sort_param,
|
||||
False, 0,
|
||||
db.books_series_link,
|
||||
db.Books.id == db.books_series_link.c.book,
|
||||
|
@ -642,28 +653,27 @@ def render_read_books(page, are_read, as_xml=False, order=None):
|
|||
column=config.config_read_column),
|
||||
category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
# ToDo: Handle error Case for opds
|
||||
return [] # ToDo: Handle error Case for opds
|
||||
|
||||
if as_xml:
|
||||
return entries, pagination
|
||||
else:
|
||||
if are_read:
|
||||
name = _(u'Read Books') + ' (' + str(pagination.total_count) + ')'
|
||||
pagename = "read"
|
||||
page_name = "read"
|
||||
else:
|
||||
name = _(u'Unread Books') + ' (' + str(pagination.total_count) + ')'
|
||||
pagename = "unread"
|
||||
page_name = "unread"
|
||||
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
||||
title=name, page=pagename, order=order[1])
|
||||
title=name, page=page_name, order=order[1])
|
||||
|
||||
|
||||
def render_archived_books(page, sort):
|
||||
order = sort[0] or []
|
||||
archived_books = (
|
||||
ub.session.query(ub.ArchivedBook)
|
||||
.filter(ub.ArchivedBook.user_id == int(current_user.id))
|
||||
.filter(ub.ArchivedBook.is_archived == True)
|
||||
.all()
|
||||
)
|
||||
def render_archived_books(page, sort_param):
|
||||
order = sort_param[0] or []
|
||||
archived_books = (ub.session.query(ub.ArchivedBook)
|
||||
.filter(ub.ArchivedBook.user_id == int(current_user.id))
|
||||
.filter(ub.ArchivedBook.is_archived == True)
|
||||
.all())
|
||||
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
|
||||
|
||||
archived_filter = db.Books.id.in_(archived_book_ids)
|
||||
|
@ -676,40 +686,40 @@ def render_archived_books(page, sort):
|
|||
False, 0)
|
||||
|
||||
name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'
|
||||
pagename = "archived"
|
||||
page_name = "archived"
|
||||
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
|
||||
title=name, page=pagename, order=sort[1])
|
||||
title=name, page=page_name, order=sort_param[1])
|
||||
|
||||
|
||||
def render_prepare_search_form(cc):
|
||||
# prepare data for search-form
|
||||
tags = calibre_db.session.query(db.Tags)\
|
||||
.join(db.books_tags_link)\
|
||||
.join(db.Books)\
|
||||
tags = calibre_db.session.query(db.Tags) \
|
||||
.join(db.books_tags_link) \
|
||||
.join(db.Books) \
|
||||
.filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_tags_link.tag'))\
|
||||
.group_by(text('books_tags_link.tag')) \
|
||||
.order_by(db.Tags.name).all()
|
||||
series = calibre_db.session.query(db.Series)\
|
||||
.join(db.books_series_link)\
|
||||
.join(db.Books)\
|
||||
series = calibre_db.session.query(db.Series) \
|
||||
.join(db.books_series_link) \
|
||||
.join(db.Books) \
|
||||
.filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_series_link.series'))\
|
||||
.order_by(db.Series.name)\
|
||||
.group_by(text('books_series_link.series')) \
|
||||
.order_by(db.Series.name) \
|
||||
.filter(calibre_db.common_filters()).all()
|
||||
shelves = ub.session.query(ub.Shelf)\
|
||||
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id)))\
|
||||
shelves = ub.session.query(ub.Shelf) \
|
||||
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id))) \
|
||||
.order_by(ub.Shelf.name).all()
|
||||
extensions = calibre_db.session.query(db.Data)\
|
||||
.join(db.Books)\
|
||||
extensions = calibre_db.session.query(db.Data) \
|
||||
.join(db.Books) \
|
||||
.filter(calibre_db.common_filters()) \
|
||||
.group_by(db.Data.format)\
|
||||
.group_by(db.Data.format) \
|
||||
.order_by(db.Data.format).all()
|
||||
if current_user.filter_language() == u"all":
|
||||
languages = calibre_db.speaking_language()
|
||||
else:
|
||||
languages = None
|
||||
return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions,
|
||||
series=series,shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch")
|
||||
series=series, shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch")
|
||||
|
||||
|
||||
def render_search_results(term, offset=None, order=None, limit=None):
|
||||
|
@ -718,7 +728,6 @@ def render_search_results(term, offset=None, order=None, limit=None):
|
|||
offset,
|
||||
order,
|
||||
limit,
|
||||
False,
|
||||
config.config_read_column,
|
||||
*join)
|
||||
return render_title_template('search.html',
|
||||
|
@ -767,40 +776,41 @@ def books_table():
|
|||
def list_books():
|
||||
off = int(request.args.get("offset") or 0)
|
||||
limit = int(request.args.get("limit") or config.config_books_per_page)
|
||||
search = request.args.get("search")
|
||||
sort = request.args.get("sort", "id")
|
||||
search_param = request.args.get("search")
|
||||
sort_param = request.args.get("sort", "id")
|
||||
order = request.args.get("order", "").lower()
|
||||
state = None
|
||||
join = tuple()
|
||||
|
||||
if sort == "state":
|
||||
if sort_param == "state":
|
||||
state = json.loads(request.args.get("state", "[]"))
|
||||
elif sort == "tags":
|
||||
elif sort_param == "tags":
|
||||
order = [db.Tags.name.asc()] if order == "asc" else [db.Tags.name.desc()]
|
||||
join = db.books_tags_link, db.Books.id == db.books_tags_link.c.book, db.Tags
|
||||
elif sort == "series":
|
||||
elif sort_param == "series":
|
||||
order = [db.Series.name.asc()] if order == "asc" else [db.Series.name.desc()]
|
||||
join = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
|
||||
elif sort == "publishers":
|
||||
elif sort_param == "publishers":
|
||||
order = [db.Publishers.name.asc()] if order == "asc" else [db.Publishers.name.desc()]
|
||||
join = db.books_publishers_link, db.Books.id == db.books_publishers_link.c.book, db.Publishers
|
||||
elif sort == "authors":
|
||||
elif sort_param == "authors":
|
||||
order = [db.Authors.name.asc(), db.Series.name, db.Books.series_index] if order == "asc" \
|
||||
else [db.Authors.name.desc(), db.Series.name.desc(), db.Books.series_index.desc()]
|
||||
join = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, \
|
||||
db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
|
||||
elif sort == "languages":
|
||||
join = db.books_authors_link, db.Books.id == db.books_authors_link.c.book, db.Authors, db.books_series_link, \
|
||||
db.Books.id == db.books_series_link.c.book, db.Series
|
||||
elif sort_param == "languages":
|
||||
order = [db.Languages.lang_code.asc()] if order == "asc" else [db.Languages.lang_code.desc()]
|
||||
join = db.books_languages_link, db.Books.id == db.books_languages_link.c.book, db.Languages
|
||||
elif order and sort in ["sort", "title", "authors_sort", "series_index"]:
|
||||
order = [text(sort + " " + order)]
|
||||
elif order and sort_param in ["sort", "title", "authors_sort", "series_index"]:
|
||||
order = [text(sort_param + " " + order)]
|
||||
elif not state:
|
||||
order = [db.Books.timestamp.desc()]
|
||||
|
||||
total_count = filtered_count = calibre_db.session.query(db.Books).filter(calibre_db.common_filters(allow_show_archived=True)).count()
|
||||
total_count = filtered_count = calibre_db.session.query(db.Books).filter(
|
||||
calibre_db.common_filters(allow_show_archived=True)).count()
|
||||
if state is not None:
|
||||
if search:
|
||||
books = calibre_db.search_query(search, config.config_read_column).all()
|
||||
if search_param:
|
||||
books = calibre_db.search_query(search_param, config.config_read_column).all()
|
||||
filtered_count = len(books)
|
||||
else:
|
||||
if not config.config_read_column:
|
||||
|
@ -810,6 +820,7 @@ def list_books():
|
|||
and_(ub.ReadBook.user_id == int(current_user.id),
|
||||
ub.ReadBook.book_id == db.Books.id)))
|
||||
else:
|
||||
read_column = ""
|
||||
try:
|
||||
read_column = db.cc_classes[config.config_read_column]
|
||||
books = (calibre_db.session.query(db.Books, read_column.value, ub.ArchivedBook.is_archived)
|
||||
|
@ -818,17 +829,16 @@ def list_books():
|
|||
except (KeyError, AttributeError):
|
||||
log.error("Custom Column No.%d is not existing in calibre database", read_column)
|
||||
# Skip linking read column and return None instead of read status
|
||||
books =calibre_db.session.query(db.Books, None, ub.ArchivedBook.is_archived)
|
||||
books = calibre_db.session.query(db.Books, None, ub.ArchivedBook.is_archived)
|
||||
books = (books.outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id,
|
||||
int(current_user.id) == ub.ArchivedBook.user_id))
|
||||
int(current_user.id) == ub.ArchivedBook.user_id))
|
||||
.filter(calibre_db.common_filters(allow_show_archived=True)).all())
|
||||
entries = calibre_db.get_checkbox_sorted(books, state, off, limit, order, True)
|
||||
elif search:
|
||||
entries, filtered_count, __ = calibre_db.get_search_results(search,
|
||||
elif search_param:
|
||||
entries, filtered_count, __ = calibre_db.get_search_results(search_param,
|
||||
off,
|
||||
[order,''],
|
||||
[order, ''],
|
||||
limit,
|
||||
True,
|
||||
config.config_read_column,
|
||||
*join)
|
||||
else:
|
||||
|
@ -847,9 +857,9 @@ def list_books():
|
|||
val = entry[0]
|
||||
val.read_status = entry[1] == ub.ReadBook.STATUS_FINISHED
|
||||
val.is_archived = entry[2] is True
|
||||
for index in range(0, len(val.languages)):
|
||||
val.languages[index].language_name = isoLanguages.get_language_name(get_locale(), val.languages[
|
||||
index].lang_code)
|
||||
for lang_index in range(0, len(val.languages)):
|
||||
val.languages[lang_index].language_name = isoLanguages.get_language_name(get_locale(), val.languages[
|
||||
lang_index].lang_code)
|
||||
result.append(val)
|
||||
|
||||
table_entries = {'totalNotFiltered': total_count, 'total': filtered_count, "rows": result}
|
||||
|
@ -889,19 +899,18 @@ def author_list():
|
|||
entries = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
|
||||
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_authors_link.author')).order_by(order).all()
|
||||
charlist = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
|
||||
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
|
||||
char_list = generate_char_list(db.Authors.sort, db.books_authors_link)
|
||||
# If not creating a copy, readonly databases can not display authornames with "|" in it as changing the name
|
||||
# starts a change session
|
||||
autor_copy = copy.deepcopy(entries)
|
||||
for entry in autor_copy:
|
||||
author_copy = copy.deepcopy(entries)
|
||||
for entry in author_copy:
|
||||
entry.Authors.name = entry.Authors.name.replace('|', ',')
|
||||
return render_title_template('list.html', entries=autor_copy, folder='web.books_list', charlist=charlist,
|
||||
return render_title_template('list.html', entries=author_copy, folder='web.books_list', charlist=char_list,
|
||||
title=u"Authors", page="authorlist", data='author', order=order_no)
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
|
||||
@web.route("/downloadlist")
|
||||
@login_required_if_no_ano
|
||||
def download_list():
|
||||
|
@ -912,12 +921,12 @@ def download_list():
|
|||
order = ub.User.name.asc()
|
||||
order_no = 1
|
||||
if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD) and current_user.role_admin():
|
||||
entries = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count'))\
|
||||
entries = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label('count')) \
|
||||
.join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(order).all()
|
||||
charlist = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
|
||||
char_list = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).label('char')) \
|
||||
.filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) != constants.ROLE_ANONYMOUS) \
|
||||
.group_by(func.upper(func.substr(ub.User.name, 1, 1))).all()
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list,
|
||||
title=_(u"Downloads"), page="downloadlist", data="download", order=order_no)
|
||||
else:
|
||||
abort(404)
|
||||
|
@ -936,11 +945,8 @@ def publisher_list():
|
|||
entries = calibre_db.session.query(db.Publishers, func.count('books_publishers_link.book').label('count')) \
|
||||
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_publishers_link.publisher')).order_by(order).all()
|
||||
charlist = calibre_db.session.query(func.upper(func.substr(db.Publishers.name, 1, 1)).label('char')) \
|
||||
.join(db.books_publishers_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(func.upper(func.substr(db.Publishers.name, 1, 1))).all()
|
||||
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
|
||||
char_list = generate_char_list(db.Publishers.name, db.books_publishers_link)
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list,
|
||||
title=_(u"Publishers"), page="publisherlist", data="publisher", order=order_no)
|
||||
else:
|
||||
abort(404)
|
||||
|
@ -956,25 +962,19 @@ def series_list():
|
|||
else:
|
||||
order = db.Series.sort.asc()
|
||||
order_no = 1
|
||||
char_list = generate_char_list(db.Series.sort, db.books_series_link)
|
||||
if current_user.get_view_property('series', 'series_view') == 'list':
|
||||
entries = calibre_db.session.query(db.Series, func.count('books_series_link.book').label('count')) \
|
||||
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_series_link.series')).order_by(order).all()
|
||||
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
|
||||
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list,
|
||||
title=_(u"Series"), page="serieslist", data="series", order=order_no)
|
||||
else:
|
||||
entries = calibre_db.session.query(db.Books, func.count('books_series_link').label('count'),
|
||||
func.max(db.Books.series_index), db.Books.id) \
|
||||
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters())\
|
||||
.join(db.books_series_link).join(db.Series).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_series_link.series')).order_by(order).all()
|
||||
charlist = calibre_db.session.query(func.upper(func.substr(db.Series.sort, 1, 1)).label('char')) \
|
||||
.join(db.books_series_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(func.upper(func.substr(db.Series.sort, 1, 1))).all()
|
||||
|
||||
return render_title_template('grid.html', entries=entries, folder='web.books_list', charlist=charlist,
|
||||
return render_title_template('grid.html', entries=entries, folder='web.books_list', charlist=char_list,
|
||||
title=_(u"Series"), page="serieslist", data="series", bodyClass="grid-view",
|
||||
order=order_no)
|
||||
else:
|
||||
|
@ -992,7 +992,7 @@ def ratings_list():
|
|||
order = db.Ratings.rating.asc()
|
||||
order_no = 1
|
||||
entries = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
|
||||
(db.Ratings.rating / 2).label('name')) \
|
||||
(db.Ratings.rating / 2).label('name')) \
|
||||
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_ratings_link.rating')).order_by(order).all()
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
|
||||
|
@ -1027,14 +1027,14 @@ def formats_list():
|
|||
def language_overview():
|
||||
if current_user.check_visibility(constants.SIDEBAR_LANGUAGE) and current_user.filter_language() == u"all":
|
||||
order_no = 0 if current_user.get_view_property('language', 'dir') == 'desc' else 1
|
||||
charlist = list()
|
||||
char_list = list()
|
||||
languages = calibre_db.speaking_language(reverse_order=not order_no, with_count=True)
|
||||
for lang in languages:
|
||||
upper_lang = lang[0].name[0].upper()
|
||||
if upper_lang not in charlist:
|
||||
charlist.append(upper_lang)
|
||||
if upper_lang not in char_list:
|
||||
char_list.append(upper_lang)
|
||||
return render_title_template('languages.html', languages=languages,
|
||||
charlist=charlist, title=_(u"Languages"), page="langlist",
|
||||
charlist=char_list, title=_(u"Languages"), page="langlist",
|
||||
data="language", order=order_no)
|
||||
else:
|
||||
abort(404)
|
||||
|
@ -1053,10 +1053,8 @@ def category_list():
|
|||
entries = calibre_db.session.query(db.Tags, func.count('books_tags_link.book').label('count')) \
|
||||
.join(db.books_tags_link).join(db.Books).order_by(order).filter(calibre_db.common_filters()) \
|
||||
.group_by(text('books_tags_link.tag')).all()
|
||||
charlist = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('char')) \
|
||||
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters()) \
|
||||
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=charlist,
|
||||
char_list = generate_char_list(db.Tags.name, db.books_tags_link)
|
||||
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=char_list,
|
||||
title=_(u"Categories"), page="catlist", data="category", order=order_no)
|
||||
else:
|
||||
abort(404)
|
||||
|
@ -1131,27 +1129,6 @@ def adv_search_custom_columns(cc, term, q):
|
|||
return q
|
||||
|
||||
|
||||
def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):
|
||||
if current_user.filter_language() != "all":
|
||||
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
|
||||
else:
|
||||
for language in include_languages_inputs:
|
||||
q = q.filter(db.Books.languages.any(db.Languages.id == language))
|
||||
for language in exclude_languages_inputs:
|
||||
q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))
|
||||
return q
|
||||
|
||||
|
||||
def adv_search_ratings(q, rating_high, rating_low):
|
||||
if rating_high:
|
||||
rating_high = int(rating_high) * 2
|
||||
q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high))
|
||||
if rating_low:
|
||||
rating_low = int(rating_low) * 2
|
||||
q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low))
|
||||
return q
|
||||
|
||||
|
||||
def adv_search_read_status(q, read_status):
|
||||
if read_status:
|
||||
if config.config_read_column:
|
||||
|
@ -1180,36 +1157,40 @@ def adv_search_read_status(q, read_status):
|
|||
return q
|
||||
|
||||
|
||||
def adv_search_extension(q, include_extension_inputs, exclude_extension_inputs):
|
||||
for extension in include_extension_inputs:
|
||||
q = q.filter(db.Books.data.any(db.Data.format == extension))
|
||||
for extension in exclude_extension_inputs:
|
||||
q = q.filter(not_(db.Books.data.any(db.Data.format == extension)))
|
||||
def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):
|
||||
if current_user.filter_language() != "all":
|
||||
q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()))
|
||||
else:
|
||||
return adv_search_text(q, include_languages_inputs, exclude_languages_inputs, db.Languages.id)
|
||||
return q
|
||||
|
||||
|
||||
def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs):
|
||||
for tag in include_tag_inputs:
|
||||
q = q.filter(db.Books.tags.any(db.Tags.id == tag))
|
||||
for tag in exclude_tag_inputs:
|
||||
q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))
|
||||
def adv_search_ratings(q, rating_high, rating_low):
|
||||
if rating_high:
|
||||
rating_high = int(rating_high) * 2
|
||||
q = q.filter(db.Books.ratings.any(db.Ratings.rating <= rating_high))
|
||||
if rating_low:
|
||||
rating_low = int(rating_low) * 2
|
||||
q = q.filter(db.Books.ratings.any(db.Ratings.rating >= rating_low))
|
||||
return q
|
||||
|
||||
|
||||
def adv_search_serie(q, include_series_inputs, exclude_series_inputs):
|
||||
for serie in include_series_inputs:
|
||||
q = q.filter(db.Books.series.any(db.Series.id == serie))
|
||||
for serie in exclude_series_inputs:
|
||||
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
|
||||
def adv_search_text(q, include_inputs, exclude_inputs, data_table):
|
||||
for inp in include_inputs:
|
||||
q = q.filter(getattr(db.Books, data_table.class_.__tablename__).any(data_table == inp))
|
||||
for excl in exclude_inputs:
|
||||
q = q.filter(not_(getattr(db.Books, data_table.class_.__tablename__).any(data_table == excl)))
|
||||
return q
|
||||
|
||||
|
||||
def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs):
|
||||
q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
|
||||
q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id) \
|
||||
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs)))
|
||||
if len(include_shelf_inputs) > 0:
|
||||
q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs))
|
||||
return q
|
||||
|
||||
|
||||
def extend_search_term(searchterm,
|
||||
author_name,
|
||||
book_title,
|
||||
|
@ -1236,7 +1217,7 @@ def extend_search_term(searchterm,
|
|||
format='medium', locale=get_locale())])
|
||||
except ValueError:
|
||||
pub_end = u""
|
||||
elements = {'tag': db.Tags, 'serie':db.Series, 'shelf':ub.Shelf}
|
||||
elements = {'tag': db.Tags, 'serie': db.Series, 'shelf': ub.Shelf}
|
||||
for key, db_element in elements.items():
|
||||
tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(tags['include_' + key])).all()
|
||||
searchterm.extend(tag.name for tag in tag_names)
|
||||
|
@ -1266,7 +1247,7 @@ def extend_search_term(searchterm,
|
|||
|
||||
|
||||
def render_adv_search_results(term, offset=None, order=None, limit=None):
|
||||
sort = order[0] if order else [db.Books.sort]
|
||||
sort_param = order[0] if order else [db.Books.sort]
|
||||
pagination = None
|
||||
|
||||
cc = get_cc_columns(filter_config_custom_read=True)
|
||||
|
@ -1288,8 +1269,8 @@ def render_adv_search_results(term, offset=None, order=None, limit=None):
|
|||
query = query.outerjoin(ub.ArchivedBook, and_(db.Books.id == ub.ArchivedBook.book_id,
|
||||
int(current_user.id) == ub.ArchivedBook.user_id))
|
||||
|
||||
q = query.outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book)\
|
||||
.outerjoin(db.Series)\
|
||||
q = query.outerjoin(db.books_series_link, db.Books.id == db.books_series_link.c.book) \
|
||||
.outerjoin(db.Series) \
|
||||
.filter(calibre_db.common_filters(True))
|
||||
|
||||
# parse multiselects to a complete dict
|
||||
|
@ -1315,43 +1296,43 @@ def render_adv_search_results(term, offset=None, order=None, limit=None):
|
|||
if publisher:
|
||||
publisher = publisher.strip().lower()
|
||||
|
||||
searchterm = []
|
||||
search_term = []
|
||||
cc_present = False
|
||||
for c in cc:
|
||||
if c.datatype == "datetime":
|
||||
column_start = term.get('custom_column_' + str(c.id) + '_start')
|
||||
column_end = term.get('custom_column_' + str(c.id) + '_end')
|
||||
if column_start:
|
||||
searchterm.extend([u"{} >= {}".format(c.name,
|
||||
format_date(datetime.strptime(column_start, "%Y-%m-%d").date(),
|
||||
format='medium',
|
||||
locale=get_locale())
|
||||
)])
|
||||
search_term.extend([u"{} >= {}".format(c.name,
|
||||
format_date(datetime.strptime(column_start, "%Y-%m-%d").date(),
|
||||
format='medium',
|
||||
locale=get_locale())
|
||||
)])
|
||||
cc_present = True
|
||||
if column_end:
|
||||
searchterm.extend([u"{} <= {}".format(c.name,
|
||||
format_date(datetime.strptime(column_end, "%Y-%m-%d").date(),
|
||||
format='medium',
|
||||
locale=get_locale())
|
||||
)])
|
||||
search_term.extend([u"{} <= {}".format(c.name,
|
||||
format_date(datetime.strptime(column_end, "%Y-%m-%d").date(),
|
||||
format='medium',
|
||||
locale=get_locale())
|
||||
)])
|
||||
cc_present = True
|
||||
elif term.get('custom_column_' + str(c.id)):
|
||||
searchterm.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))])
|
||||
search_term.extend([(u"{}: {}".format(c.name, term.get('custom_column_' + str(c.id))))])
|
||||
cc_present = True
|
||||
|
||||
|
||||
if any(tags.values()) or author_name or book_title or publisher or pub_start or pub_end or rating_low \
|
||||
or rating_high or description or cc_present or read_status:
|
||||
searchterm, pub_start, pub_end = extend_search_term(searchterm,
|
||||
author_name,
|
||||
book_title,
|
||||
publisher,
|
||||
pub_start,
|
||||
pub_end,
|
||||
tags,
|
||||
rating_high,
|
||||
rating_low,
|
||||
read_status)
|
||||
if any(tags.values()) or author_name or book_title or \
|
||||
publisher or pub_start or pub_end or rating_low or rating_high \
|
||||
or description or cc_present or read_status:
|
||||
search_term, pub_start, pub_end = extend_search_term(search_term,
|
||||
author_name,
|
||||
book_title,
|
||||
publisher,
|
||||
pub_start,
|
||||
pub_end,
|
||||
tags,
|
||||
rating_high,
|
||||
rating_low,
|
||||
read_status)
|
||||
# q = q.filter()
|
||||
if author_name:
|
||||
q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike("%" + author_name + "%")))
|
||||
|
@ -1364,12 +1345,12 @@ def render_adv_search_results(term, offset=None, order=None, limit=None):
|
|||
q = adv_search_read_status(q, read_status)
|
||||
if publisher:
|
||||
q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike("%" + publisher + "%")))
|
||||
q = adv_search_tag(q, tags['include_tag'], tags['exclude_tag'])
|
||||
q = adv_search_serie(q, tags['include_serie'], tags['exclude_serie'])
|
||||
q = adv_search_text(q, tags['include_tag'], tags['exclude_tag'], db.Tags.id)
|
||||
q = adv_search_text(q, tags['include_serie'], tags['exclude_serie'], db.Series.id)
|
||||
q = adv_search_text(q, tags['include_extension'], tags['exclude_extension'], db.Data.format)
|
||||
q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf'])
|
||||
q = adv_search_extension(q, tags['include_extension'], tags['exclude_extension'])
|
||||
q = adv_search_language(q, tags['include_language'], tags['exclude_language'])
|
||||
q = adv_search_ratings(q, rating_high, rating_low)
|
||||
q = adv_search_ratings(q, rating_high, rating_low, )
|
||||
|
||||
if description:
|
||||
q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike("%" + description + "%")))
|
||||
|
@ -1378,10 +1359,10 @@ def render_adv_search_results(term, offset=None, order=None, limit=None):
|
|||
try:
|
||||
q = adv_search_custom_columns(cc, term, q)
|
||||
except AttributeError as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
flash(_("Error on search for custom columns, please restart Calibre-Web"), category="error")
|
||||
|
||||
q = q.order_by(*sort).all()
|
||||
q = q.order_by(*sort_param).all()
|
||||
flask_session['query'] = json.dumps(term)
|
||||
ub.store_combo_ids(q)
|
||||
result_count = len(q)
|
||||
|
@ -1394,7 +1375,7 @@ def render_adv_search_results(term, offset=None, order=None, limit=None):
|
|||
limit_all = result_count
|
||||
entries = calibre_db.order_authors(q[offset:limit_all], list_return=True, combined=True)
|
||||
return render_title_template('search.html',
|
||||
adv_searchterm=searchterm,
|
||||
adv_searchterm=search_term,
|
||||
pagination=pagination,
|
||||
entries=entries,
|
||||
result_count=result_count,
|
||||
|
@ -1442,6 +1423,7 @@ def get_series_cover(series_id, resolution=None):
|
|||
return get_series_cover_thumbnail(series_id, cover_resolution)
|
||||
|
||||
|
||||
|
||||
@web.route("/robots.txt")
|
||||
def get_robots():
|
||||
return send_from_directory(constants.STATIC_DIR, "robots.txt")
|
||||
|
@ -1465,7 +1447,7 @@ def serve_book(book_id, book_format, anyname):
|
|||
df = getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
||||
return do_gdrive_download(df, headers, (book_format.upper() == 'TXT'))
|
||||
except AttributeError as ex:
|
||||
log.debug_or_exception(ex)
|
||||
log.error_or_exception(ex)
|
||||
return "File Not Found"
|
||||
else:
|
||||
if book_format.upper() == 'TXT':
|
||||
|
@ -1590,7 +1572,7 @@ def login():
|
|||
category="success")
|
||||
return redirect_back(url_for("web.index"))
|
||||
elif login_result is None and user and check_password_hash(str(user.password), form['password']) \
|
||||
and user.name != "Guest":
|
||||
and user.name != "Guest":
|
||||
login_user(user, remember=bool(form.get('remember_me')))
|
||||
ub.store_user_session()
|
||||
log.info("Local Fallback Login as: '%s'", user.name)
|
||||
|
@ -1602,23 +1584,23 @@ def login():
|
|||
log.info(error)
|
||||
flash(_(u"Could not login: %(message)s", message=error), category="error")
|
||||
else:
|
||||
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
log.warning('LDAP Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
|
||||
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
log.warning('LDAP Login failed for user "%s" IP-address: %s', form['username'], ip_address)
|
||||
flash(_(u"Wrong Username or Password"), category="error")
|
||||
else:
|
||||
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
||||
if 'forgot' in form and form['forgot'] == 'forgot':
|
||||
if user is not None and user.name != "Guest":
|
||||
ret, __ = reset_password(user.id)
|
||||
if ret == 1:
|
||||
flash(_(u"New Password was send to your email address"), category="info")
|
||||
log.info('Password reset for user "%s" IP-address: %s', form['username'], ip_Address)
|
||||
log.info('Password reset for user "%s" IP-address: %s', form['username'], ip_address)
|
||||
else:
|
||||
log.error(u"An unknown error occurred. Please try again later")
|
||||
flash(_(u"An unknown error occurred. Please try again later."), category="error")
|
||||
else:
|
||||
flash(_(u"Please enter valid username to reset password"), category="error")
|
||||
log.warning('Username missing for password reset IP-address: %s', ip_Address)
|
||||
log.warning('Username missing for password reset IP-address: %s', ip_address)
|
||||
else:
|
||||
if user and check_password_hash(str(user.password), form['password']) and user.name != "Guest":
|
||||
login_user(user, remember=bool(form.get('remember_me')))
|
||||
|
@ -1628,7 +1610,7 @@ def login():
|
|||
config.config_is_initial = False
|
||||
return redirect_back(url_for("web.index"))
|
||||
else:
|
||||
log.warning('Login failed for user "%s" IP-address: %s', form['username'], ip_Address)
|
||||
log.warning('Login failed for user "%s" IP-address: %s', form['username'], ip_address)
|
||||
flash(_(u"Wrong Username or Password"), category="error")
|
||||
|
||||
next_url = request.args.get('next', default=url_for("web.index"), type=str)
|
||||
|
@ -1646,7 +1628,7 @@ def login():
|
|||
@login_required
|
||||
def logout():
|
||||
if current_user is not None and current_user.is_authenticated:
|
||||
ub.delete_user_session(current_user.id, flask_session.get('_id',""))
|
||||
ub.delete_user_session(current_user.id, flask_session.get('_id', ""))
|
||||
logout_user()
|
||||
if feature_support['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3):
|
||||
logout_oauth_user()
|
||||
|
@ -1668,7 +1650,7 @@ def change_profile(kobo_support, local_oauth_check, oauth_status, translations,
|
|||
current_user.email = check_email(to_save["email"])
|
||||
if current_user.role_admin():
|
||||
if to_save.get("name", current_user.name) != current_user.name:
|
||||
# Query User name, if not existing, change
|
||||
# Query username, if not existing, change
|
||||
current_user.name = check_username(to_save["name"])
|
||||
current_user.random_books = 1 if to_save.get("show_random") == "on" else 0
|
||||
if to_save.get("default_language"):
|
||||
|
@ -1722,7 +1704,7 @@ def change_profile(kobo_support, local_oauth_check, oauth_status, translations,
|
|||
@login_required
|
||||
def profile():
|
||||
languages = calibre_db.speaking_language()
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
translations = babel.list_translations() + [Locale('en')]
|
||||
kobo_support = feature_support['kobo'] and config.config_kobo_sync
|
||||
if feature_support['oauth'] and config.config_login_type == 2:
|
||||
oauth_status = get_oauth_status()
|
||||
|
@ -1753,12 +1735,15 @@ def profile():
|
|||
@viewer_required
|
||||
def read_book(book_id, book_format):
|
||||
book = calibre_db.get_filtered_book(book_id)
|
||||
book.ordered_authors = calibre_db.order_authors([book], False)
|
||||
|
||||
if not book:
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
|
||||
category="error")
|
||||
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
# check if book has bookmark
|
||||
# check if book has a bookmark
|
||||
bookmark = None
|
||||
if current_user.is_authenticated:
|
||||
bookmark = ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(current_user.id),
|
||||
|
@ -1795,7 +1780,8 @@ def read_book(book_id, book_format):
|
|||
return render_title_template('readcbr.html', comicfile=all_name, title=title,
|
||||
extension=fileExt)
|
||||
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category="error")
|
||||
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"),
|
||||
category="error")
|
||||
return redirect(url_for("web.index"))
|
||||
|
||||
|
||||
|
@ -1809,18 +1795,18 @@ def show_book(book_id):
|
|||
entry = entries[0]
|
||||
entry.read_status = read_book == ub.ReadBook.STATUS_FINISHED
|
||||
entry.is_archived = archived_book
|
||||
for index in range(0, len(entry.languages)):
|
||||
entry.languages[index].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
|
||||
index].lang_code)
|
||||
for lang_index in range(0, len(entry.languages)):
|
||||
entry.languages[lang_index].language_name = isoLanguages.get_language_name(get_locale(), entry.languages[
|
||||
lang_index].lang_code)
|
||||
cc = get_cc_columns(filter_config_custom_read=True)
|
||||
book_in_shelfs = []
|
||||
book_in_shelves = []
|
||||
shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).all()
|
||||
for sh in shelfs:
|
||||
book_in_shelfs.append(sh.shelf)
|
||||
book_in_shelves.append(sh.shelf)
|
||||
|
||||
entry.tags = sort(entry.tags, key=lambda tag: tag.name)
|
||||
|
||||
entry.authors = calibre_db.order_authors([entry])
|
||||
entry.ordered_authors = calibre_db.order_authors([entry])
|
||||
|
||||
entry.kindle_list = check_send_to_kindle(entry)
|
||||
entry.reader_list = check_read_formats(entry)
|
||||
|
@ -1833,9 +1819,9 @@ def show_book(book_id):
|
|||
return render_title_template('detail.html',
|
||||
entry=entry,
|
||||
cc=cc,
|
||||
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest',
|
||||
is_xhr=request.headers.get('X-Requested-With') == 'XMLHttpRequest',
|
||||
title=entry.title,
|
||||
books_shelfs=book_in_shelfs,
|
||||
books_shelfs=book_in_shelves,
|
||||
page="book")
|
||||
else:
|
||||
log.debug(u"Oops! Selected book title is unavailable. File does not exist or is not accessible")
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# GDrive Integration
|
||||
google-api-python-client>=1.7.11,<2.37.0
|
||||
google-api-python-client>=1.7.11,<2.41.0
|
||||
gevent>20.6.0,<22.0.0
|
||||
greenlet>=0.4.17,<1.2.0
|
||||
httplib2>=0.9.2,<0.21.0
|
||||
|
@ -12,8 +12,8 @@ PyYAML>=3.12
|
|||
rsa>=3.4.2,<4.9.0
|
||||
|
||||
# Gmail
|
||||
google-auth-oauthlib>=0.4.3,<0.5.0
|
||||
google-api-python-client>=1.7.11,<2.37.0
|
||||
google-auth-oauthlib>=0.4.3,<0.6.0
|
||||
google-api-python-client>=1.7.11,<2.41.0
|
||||
|
||||
# goodreads
|
||||
goodreads>=0.3.2,<0.4.0
|
||||
|
@ -28,8 +28,8 @@ Flask-Dance>=2.0.0,<5.2.0
|
|||
SQLAlchemy-Utils>=0.33.5,<0.39.0
|
||||
|
||||
# metadata extraction
|
||||
rarfile>=2.7
|
||||
scholarly>=1.2.0,<1.6
|
||||
rarfile>=3.2
|
||||
scholarly>=1.2.0,<1.7
|
||||
markdown2>=2.0.0,<2.5.0
|
||||
html2text>=2020.1.16,<2022.1.1
|
||||
python-dateutil>=2.1,<2.9.0
|
||||
|
|
|
@ -13,6 +13,7 @@ SQLAlchemy>=1.3.0,<1.5.0
|
|||
tornado>=4.1,<6.2
|
||||
Wand>=0.4.4,<0.7.0
|
||||
unidecode>=0.04.19,<1.4.0
|
||||
lxml>=3.8.0,<4.8.0
|
||||
lxml>=3.8.0,<4.9.0
|
||||
flask-wtf>=0.14.2,<1.1.0
|
||||
chardet>=3.0.0,<4.1.0
|
||||
advocate>=1.0.0,<1.1.0
|
||||
|
|
Loading…
Reference in New Issue
Block a user