Merge branch 'Develop'

Update translations
This commit is contained in:
Ozzie Isaacs 2024-06-30 12:20:42 +02:00
commit 789912d0d4
69 changed files with 6355 additions and 5550 deletions

View File

@ -56,23 +56,26 @@ except ImportError:
mimetypes.init()
mimetypes.add_type('application/xhtml+xml', '.xhtml')
mimetypes.add_type('application/epub+zip', '.epub')
mimetypes.add_type('application/fb2+zip', '.fb2')
mimetypes.add_type('application/x-mobipocket-ebook', '.mobi')
mimetypes.add_type('application/x-mobipocket-ebook', '.prc')
mimetypes.add_type('application/epub+zip', '.kepub')
mimetypes.add_type('text/xml', '.fb2')
mimetypes.add_type('application/octet-stream', '.mobi')
mimetypes.add_type('application/octet-stream', '.prc')
mimetypes.add_type('application/vnd.amazon.ebook', '.azw')
mimetypes.add_type('application/x-mobi8-ebook', '.azw3')
mimetypes.add_type('application/x-cbr', '.cbr')
mimetypes.add_type('application/x-cbz', '.cbz')
mimetypes.add_type('application/x-cbt', '.cbt')
mimetypes.add_type('application/x-cb7', '.cb7')
mimetypes.add_type('application/x-rar', '.cbr')
mimetypes.add_type('application/zip', '.cbz')
mimetypes.add_type('application/x-tar', '.cbt')
mimetypes.add_type('application/x-7z-compressed', '.cb7')
mimetypes.add_type('image/vnd.djv', '.djv')
mimetypes.add_type('image/vnd.djv', '.djvu')
mimetypes.add_type('application/mpeg', '.mpeg')
mimetypes.add_type('application/mpeg', '.mp3')
mimetypes.add_type('audio/mpeg', '.mp3')
mimetypes.add_type('application/mp4', '.m4a')
mimetypes.add_type('application/mp4', '.m4b')
mimetypes.add_type('application/ogg', '.ogg')
mimetypes.add_type('audio/ogg', '.ogg')
mimetypes.add_type('application/ogg', '.oga')
mimetypes.add_type('text/css', '.css')
mimetypes.add_type('application/x-ms-reader', '.lit')
mimetypes.add_type('text/javascript; charset=UTF-8', '.js')
log = logger.create()

View File

@ -1777,7 +1777,7 @@ def _configuration_update_helper():
to_save["config_upload_formats"] = ','.join(
helper.uniq([x.lstrip().rstrip().lower() for x in to_save["config_upload_formats"].split(',')]))
_config_string(to_save, "config_upload_formats")
constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
# constants.EXTENSIONS_UPLOAD = config.config_upload_formats.split(',')
_config_string(to_save, "config_calibre")
_config_string(to_save, "config_binariesdir")
@ -1827,6 +1827,7 @@ def _configuration_update_helper():
reboot_required |= reboot
# security configuration
_config_checkbox(to_save, "config_check_extensions")
_config_checkbox(to_save, "config_password_policy")
_config_checkbox(to_save, "config_password_number")
_config_checkbox(to_save, "config_password_lower")

View File

@ -172,6 +172,7 @@ class _Settings(_Base):
config_ratelimiter = Column(Boolean, default=True)
config_limiter_uri = Column(String, default="")
config_limiter_options = Column(String, default="")
config_check_extensions = Column(Boolean, default=True)
def __repr__(self):
return self.__class__.__name__
@ -371,7 +372,7 @@ class ConfigSQL(object):
db_file = os.path.join(self.config_calibre_dir, 'metadata.db')
have_metadata_db = os.path.isfile(db_file)
self.db_configured = have_metadata_db
constants.EXTENSIONS_UPLOAD = [x.lstrip().rstrip().lower() for x in self.config_upload_formats.split(',')]
# constants.EXTENSIONS_UPLOAD = [x.lstrip().rstrip().lower() for x in self.config_upload_formats.split(',')]
from . import cli_param
if os.environ.get('FLASK_DEBUG'):
logfile = logger.setup(logger.LOG_TO_STDOUT, logger.logging.DEBUG)

View File

@ -23,7 +23,7 @@
import os
from datetime import datetime
import json
from shutil import copyfile
from shutil import copyfile, move
from uuid import uuid4
from markupsafe import escape, Markup # dependency of flask
from functools import wraps
@ -46,7 +46,7 @@ from .render_template import render_title_template
from .usermanagement import login_required_if_no_ano
from .kobo_sync_status import change_archived_books
from .redirect import get_redirect_location
from .file_helper import validate_mime_type
editbook = Blueprint('edit-book', __name__)
log = logger.create()
@ -118,14 +118,13 @@ def edit_book(book_id):
# handle book title change
title_change = handle_title_on_edit(book, to_save["book_title"])
# handle book author change
input_authors, author_change, renamed = handle_author_on_edit(book, to_save["author_name"])
input_authors, author_change = handle_author_on_edit(book, to_save["author_name"])
if author_change or title_change:
edited_books_id = book.id
modify_date = True
title_author_error = helper.update_dir_structure(edited_books_id,
config.get_book_path(),
input_authors[0],
renamed_author=renamed)
input_authors[0])
if title_author_error:
flash(title_author_error, category="error")
calibre_db.session.rollback()
@ -251,7 +250,7 @@ def upload():
if error:
return error
db_book, input_authors, title_dir, renamed_authors = create_book_on_upload(modify_date, meta)
db_book, input_authors, title_dir = create_book_on_upload(modify_date, meta)
# Comments need book id therefore only possible after flush
modify_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
@ -261,18 +260,19 @@ def upload():
if config.config_use_google_drive:
helper.upload_new_file_gdrive(book_id,
input_authors[0],
renamed_authors,
title,
title_dir,
meta.file_path,
meta.extension.lower())
for file_format in db_book.data:
file_format.name = (helper.get_valid_filename(title, chars=42) + ' - '
+ helper.get_valid_filename(input_authors[0], chars=42))
else:
error = helper.update_dir_structure(book_id,
config.get_book_path(),
input_authors[0],
meta.file_path,
title_dir + meta.extension.lower(),
renamed_author=renamed_authors)
title_dir + meta.extension.lower())
move_coverfile(meta, db_book)
@ -405,9 +405,8 @@ def edit_list_book(param):
ret = Response(json.dumps({'success': True, 'newValue': book.comments[0].text}),
mimetype='application/json')
elif param == 'authors':
input_authors, __, renamed = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
rename_error = helper.update_dir_structure(book.id, config.get_book_path(), input_authors[0],
renamed_author=renamed)
input_authors, __ = handle_author_on_edit(book, vals['value'], vals.get('checkA', None) == "true")
rename_error = helper.update_dir_structure(book.id, config.get_book_path(), input_authors[0])
if not rename_error:
ret = Response(json.dumps({
'success': True,
@ -543,7 +542,7 @@ def table_xchange_author_title():
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, author_change, renamed = handle_author_on_edit(book, authors)
input_authors, author_change = handle_author_on_edit(book, authors)
if author_change or title_change:
edited_books_id = book.id
modify_date = True
@ -553,8 +552,7 @@ def table_xchange_author_title():
if edited_books_id:
# toDo: Handle error
edit_error = helper.update_dir_structure(edited_books_id, config.get_book_path(), input_authors[0],
renamed_author=renamed)
edit_error = helper.update_dir_structure(edited_books_id, config.get_book_path(), input_authors[0])
if modify_date:
book.last_modified = datetime.utcnow()
calibre_db.set_metadata_dirty(book.id)
@ -602,7 +600,9 @@ def identifier_list(to_save, book):
return result
def prepare_authors(authr):
def prepare_authors(authr, calibre_path, gdrive=False):
if gdrive:
calibre_path = ""
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
@ -614,18 +614,44 @@ def prepare_authors(authr):
if input_authors == ['']:
input_authors = [_('Unknown')] # prevent empty Author
renamed = list()
for in_aut in input_authors:
renamed_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == in_aut).first()
renamed_author = calibre_db.session.query(db.Authors).filter(func.lower(db.Authors.name).ilike(in_aut)).first()
if renamed_author and in_aut != renamed_author.name:
renamed.append(renamed_author.name)
old_author_name = renamed_author.name
# rename author in Database
create_objects_for_addition(renamed_author, in_aut,"author")
# rename all Books with this author as first author:
# rename all book author_sort strings with the new author name
all_books = calibre_db.session.query(db.Books) \
.filter(db.Books.authors.any(db.Authors.name == renamed_author.name)).all()
sorted_renamed_author = helper.get_sorted_author(renamed_author.name)
sorted_old_author = helper.get_sorted_author(in_aut)
for one_book in all_books:
one_book.author_sort = one_book.author_sort.replace(sorted_renamed_author, sorted_old_author)
return input_authors, renamed
# ToDo: check
sorted_old_author = helper.get_sorted_author(old_author_name)
sorted_renamed_author = helper.get_sorted_author(in_aut)
# change author sort path
try:
author_index = one_book.author_sort.index(sorted_old_author)
one_book.author_sort = one_book.author_sort.replace(sorted_old_author, sorted_renamed_author)
except ValueError:
log.error("Sorted author {} not found in database".format(sorted_old_author))
author_index = -1
# change book path if changed author is first author -> match on first position
if author_index == 0:
one_titledir = one_book.path.split('/')[1]
one_old_authordir = one_book.path.split('/')[0]
# rename author path only once per renamed author -> search all books with author name in book.path
# das muss einmal geschehen aber pro Buch geprüft werden ansonsten habe ich das Problem das vlt. 2 gleiche Ordner bis auf Groß/Kleinschreibung vorhanden sind im Umzug
new_author_dir = helper.rename_author_path(in_aut, one_old_authordir, renamed_author.name, calibre_path, gdrive)
one_book.path = os.path.join(new_author_dir, one_titledir).replace('\\', '/')
# rename all books in book data with the new author name and move corresponding files to new locations
# old_path = os.path.join(calibre_path, new_author_dir, one_titledir)
new_path = os.path.join(calibre_path, new_author_dir, one_titledir)
all_new_name = helper.get_valid_filename(one_book.title, chars=42) + ' - ' \
+ helper.get_valid_filename(renamed_author.name, chars=42)
# change location in database to new author/title path
helper.rename_all_files_on_change(one_book, new_path, new_path, all_new_name, gdrive)
return input_authors
def prepare_authors_on_upload(title, authr):
@ -636,12 +662,13 @@ def prepare_authors_on_upload(title, authr):
flash(_("Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
input_authors, renamed = prepare_authors(authr)
input_authors = prepare_authors(authr, config.get_book_path(), config.config_use_google_drive)
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
# stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
stored_author = calibre_db.session.query(db.Authors).filter(func.lower(db.Authors.name).ilike(inp)).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
@ -654,13 +681,13 @@ def prepare_authors_on_upload(title, authr):
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author, renamed
return sort_authors, input_authors, db_author
def create_book_on_upload(modify_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author, renamed_authors = prepare_authors_on_upload(title, authr)
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title, chars=96)
author_dir = helper.get_valid_filename(db_author.name, chars=96)
@ -717,14 +744,20 @@ def create_book_on_upload(modify_date, meta):
flash(_("Identifiers are not Case Sensitive, Overwriting Old Identifier"), category="warning")
modify_date |= modification
return db_book, input_authors, title_dir, renamed_authors
return db_book, input_authors, title_dir
def file_handling_on_upload(requested_file):
# check if file extension is correct
allowed_extensions = config.config_upload_formats.split(',')
if requested_file:
if config.config_check_extensions and allowed_extensions != ['']:
if not validate_mime_type(requested_file, allowed_extensions):
flash(_("File type isn't allowed to be uploaded to this server"), category="error")
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
if file_ext not in allowed_extensions and '' not in allowed_extensions:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
@ -1152,7 +1185,12 @@ def edit_cc_data(book_id, book, to_save, cc):
def upload_single_file(file_request, book, book_id):
# Check and handle Uploaded file
requested_file = file_request.files.get('btn-upload-format', None)
allowed_extensions = config.config_upload_formats.split(',')
if requested_file:
if config.config_check_extensions and allowed_extensions != ['']:
if not validate_mime_type(requested_file, allowed_extensions):
flash(_("File type isn't allowed to be uploaded to this server"), category="error")
return False
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
@ -1160,7 +1198,7 @@ def upload_single_file(file_request, book, book_id):
return False
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
if file_ext not in allowed_extensions and '' not in allowed_extensions:
flash(_("File extension '%(ext)s' is not allowed to be uploaded to this server", ext=file_ext),
category="error")
return False
@ -1177,7 +1215,8 @@ def upload_single_file(file_request, book, book_id):
try:
os.makedirs(filepath)
except OSError:
flash(_("Failed to create path %(path)s (Permission denied).", path=filepath), category="error")
flash(_("Failed to create path %(path)s (Permission denied).", path=filepath),
category="error")
return False
try:
requested_file.save(saved_filename)
@ -1247,7 +1286,7 @@ def handle_title_on_edit(book, book_title):
def handle_author_on_edit(book, author_name, update_stored=True):
change = False
# handle author(s)
input_authors, renamed = prepare_authors(author_name)
input_authors = prepare_authors(author_name, config.get_book_path(), config.config_use_google_drive)
# change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
# Search for each author if author is in database, if not, author name and sorted author name is generated new
@ -1267,7 +1306,7 @@ def handle_author_on_edit(book, author_name, update_stored=True):
change |= modify_database_object(input_authors, book.authors, db.Authors, calibre_db.session, 'author')
return input_authors, change, renamed
return input_authors, change
def search_objects_remove(db_book_object, db_type, input_elements):
@ -1351,8 +1390,8 @@ def add_objects(db_book_object, db_object, db_session, db_type, add_elements):
if db_no_case:
# check for new case of element
db_element = create_objects_for_addition(db_element, add_element, db_type)
else:
db_element = create_objects_for_addition(db_element, add_element, db_type)
#else:
# db_element = create_objects_for_addition(db_element, add_element, db_type)
# add element to book
db_book_object.append(db_element)

View File

@ -19,6 +19,18 @@
from tempfile import gettempdir
import os
import shutil
import zipfile
import mimetypes
import copy
from io import BytesIO
try:
import magic
except ImportError:
pass
from . import logger
log = logger.create()
def get_temp_dir():
@ -31,3 +43,29 @@ def get_temp_dir():
def del_temp_dir():
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
shutil.rmtree(tmp_dir)
def validate_mime_type(file_buffer, allowed_extensions):
mime = magic.Magic(mime=True)
allowed_mimetypes =list()
for x in allowed_extensions:
try:
allowed_mimetypes.append(mimetypes.types_map["." + x])
except KeyError as e:
log.error("Unkown mimetype for Extension: {}".format(x))
tmp_mime_type = mime.from_buffer(file_buffer.read())
file_buffer.seek(0)
if any(mime_type in tmp_mime_type for mime_type in allowed_mimetypes):
return True
# Some epubs show up as zip mimetypes
elif "zip" in tmp_mime_type:
try:
with zipfile.ZipFile(BytesIO(file_buffer.read()), 'r') as epub:
file_buffer.seek(0)
if "mimetype" in epub.namelist():
return True
except:
file_buffer.seek(0)
pass
return False

View File

@ -21,6 +21,7 @@ import json
import shutil
import chardet
import ssl
import sqlite3
from flask import Response, stream_with_context
from sqlalchemy import create_engine
@ -258,17 +259,22 @@ def getEbooksFolderId(drive=None):
return gDriveId.gdrive_id
def getFile(pathId, fileName, drive):
metaDataFile = "'%s' in parents and trashed = false and title = '%s'" % (pathId, fileName.replace("'", r"\'"))
def getFile(pathId, fileName, drive, nocase):
metaDataFile = "'%s' in parents and trashed = false and title contains '%s'" % (pathId, fileName.replace("'", r"\'"))
fileList = drive.ListFile({'q': metaDataFile}).GetList()
if fileList.__len__() == 0:
return None
else:
if nocase:
return fileList[0]
for f in fileList:
if f['title'] == fileName:
return f
return None
def getFolderId(path, drive):
# drive = getDrive(drive)
log.info(f"GetFolder: {path}")
currentFolderId = None
try:
currentFolderId = getEbooksFolderId(drive)
@ -302,7 +308,7 @@ def getFolderId(path, drive):
session.commit()
else:
currentFolderId = storedPathName.gdrive_id
except (OperationalError, IntegrityError, StaleDataError) as ex:
except (OperationalError, IntegrityError, StaleDataError, sqlite3.IntegrityError) as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
except ApiRequestError as ex:
@ -313,7 +319,7 @@ def getFolderId(path, drive):
return currentFolderId
def getFileFromEbooksFolder(path, fileName):
def getFileFromEbooksFolder(path, fileName, nocase=False):
drive = getDrive(Gdrive.Instance().drive)
if path:
# sqlCheckPath=path if path[-1] =='/' else path + '/'
@ -321,7 +327,7 @@ def getFileFromEbooksFolder(path, fileName):
else:
folderId = getEbooksFolderId(drive)
if folderId:
return getFile(folderId, fileName, drive)
return getFile(folderId, fileName, drive, nocase)
else:
return None
@ -337,12 +343,12 @@ def downloadFile(path, filename, output):
f.GetContentFile(output)
def moveGdriveFolderRemote(origin_file, target_folder):
def moveGdriveFolderRemote(origin_file, target_folder, single_book=False):
drive = getDrive(Gdrive.Instance().drive)
previous_parents = ",".join([parent["id"] for parent in origin_file.get('parents')])
children = drive.auth.service.children().list(folderId=previous_parents).execute()
gFileTargetDir = getFileFromEbooksFolder(None, target_folder)
if not gFileTargetDir:
if single_book:
# gFileTargetDir = getFileFromEbooksFolder(None, target_folder, nocase=True)
gFileTargetDir = drive.CreateFile(
{'title': target_folder, 'parents': [{"kind": "drive#fileLink", 'id': getEbooksFolderId()}],
"mimeType": "application/vnd.google-apps.folder"})
@ -352,18 +358,20 @@ def moveGdriveFolderRemote(origin_file, target_folder):
addParents=gFileTargetDir['id'],
removeParents=previous_parents,
fields='id, parents').execute()
elif gFileTargetDir['title'] != target_folder:
elif origin_file['title'] != target_folder:
#gFileTargetDir = getFileFromEbooksFolder(None, target_folder, nocase=True)
#if gFileTargetDir:
deleteDatabasePath(origin_file['title'])
# Folder is not existing, create, and move folder
drive.auth.service.files().patch(fileId=origin_file['id'],
body={'title': target_folder},
fields='title').execute()
else:
'''else:
# Move the file to the new folder
drive.auth.service.files().update(fileId=origin_file['id'],
addParents=gFileTargetDir['id'],
removeParents=previous_parents,
fields='id, parents').execute()
fields='id, parents').execute()'''
# if previous_parents has no children anymore, delete original fileparent
if len(children['items']) == 1:
deleteDatabaseEntry(previous_parents)
@ -563,6 +571,14 @@ def deleteDatabaseEntry(ID):
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
def deleteDatabasePath(Pathname):
session.query(GdriveId).filter(GdriveId.path.contains(Pathname)).delete()
try:
session.commit()
except OperationalError as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
# Gets cover file from gdrive
# ToDo: Check is this right everyone get read permissions on cover files?
@ -581,7 +597,7 @@ def get_cover_via_gdrive(cover_path):
session.add(permissionAdded)
try:
session.commit()
except OperationalError as ex:
except (OperationalError, IntegrityError) as ex:
log.error_or_exception('Database error: {}'.format(ex))
session.rollback()
return df.metadata.get('webContentLink')

View File

@ -388,45 +388,30 @@ def delete_book_file(book, calibrepath, book_format=None):
id=book.id,
path=book.path)
def clean_author_database(renamed_author, calibre_path="", local_book=None, gdrive=None):
valid_filename_authors = [get_valid_filename(r, chars=96) for r in renamed_author]
for r in renamed_author:
if local_book:
all_books = [local_book]
else:
all_books = calibre_db.session.query(db.Books) \
.filter(db.Books.authors.any(db.Authors.name == r)).all()
for book in all_books:
book_author_path = book.path.split('/')[0]
if book_author_path in valid_filename_authors or local_book:
new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == r).first()
all_new_authordir = get_valid_filename(new_author.name, chars=96)
all_titledir = book.path.split('/')[1]
all_new_path = os.path.join(calibre_path, all_new_authordir, all_titledir)
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
+ get_valid_filename(new_author.name, chars=42)
# change location in database to new author/title path
book.path = os.path.join(all_new_authordir, all_titledir).replace('\\', '/')
for file_format in book.data:
def rename_all_files_on_change(one_book, new_path, old_path, all_new_name, gdrive=False):
for file_format in one_book.data:
if not gdrive:
shutil.move(os.path.normcase(os.path.join(all_new_path,
file_format.name + '.' + file_format.format.lower())),
os.path.normcase(os.path.join(all_new_path,
all_new_name + '.' + file_format.format.lower())))
if not os.path.exists(new_path):
os.makedirs(new_path)
shutil.move(os.path.normcase(
os.path.join(old_path, file_format.name + '.' + file_format.format.lower())),
os.path.normcase(
os.path.join(new_path, all_new_name + '.' + file_format.format.lower())))
else:
g_file = gd.getFileFromEbooksFolder(all_new_path,
g_file = gd.getFileFromEbooksFolder(old_path,
file_format.name + '.' + file_format.format.lower())
if g_file:
gd.moveGdriveFileRemote(g_file, all_new_name + '.' + file_format.format.lower())
gd.updateDatabaseOnEdit(g_file['id'], all_new_name + '.' + file_format.format.lower())
else:
log.error("File {} not found on gdrive"
.format(all_new_path, file_format.name + '.' + file_format.format.lower()))
.format(old_path, file_format.name + '.' + file_format.format.lower()))
# change name in Database
file_format.name = all_new_name
def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=None, gdrive=False):
'''def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=None, gdrive=False):
# Create new_author_dir from parameter or from database
# Create new title_dir from database and add id
if first_author:
@ -435,6 +420,34 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == r).first()
old_author_dir = get_valid_filename(r, chars=96)
new_author_rename_dir = get_valid_filename(new_author.name, chars=96)
if gdrive:
g_file = gd.getFileFromEbooksFolder(None, old_author_dir)
if g_file:
gd.moveGdriveFolderRemote(g_file, new_author_rename_dir)
gd.updateDatabaseOnEdit(g_file['id'], new_author_rename_dir)
else:
if os.path.isdir(os.path.join(calibre_path, old_author_dir)):
old_author_path = os.path.join(calibre_path, old_author_dir)
new_author_path = os.path.join(calibre_path, new_author_rename_dir)
try:
shutil.move(os.path.normcase(old_author_path), os.path.normcase(new_author_path))
except OSError as ex:
log.error("Rename author from: %s to %s: %s", old_author_path, new_author_path, ex)
log.debug(ex, exc_info=True)
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=old_author_path, dest=new_author_path, error=str(ex))
else:
new_authordir = get_valid_filename(localbook.authors[0].name, chars=96)
return new_authordir'''
def rename_author_path(first_author, old_author_dir, renamed_author, calibre_path="", gdrive=False):
# Create new_author_dir from parameter or from database
# Create new title_dir from database and add id
new_authordir = get_valid_filename(first_author, chars=96)
# new_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == renamed_author).first()
# old_author_dir = get_valid_filename(old_author_name, chars=96)
new_author_rename_dir = get_valid_filename(renamed_author, chars=96)
if gdrive:
g_file = gd.getFileFromEbooksFolder(None, old_author_dir)
if g_file:
@ -450,13 +463,10 @@ def rename_all_authors(first_author, renamed_author, calibre_path="", localbook=
log.debug(ex, exc_info=True)
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=old_author_path, dest=new_author_path, error=str(ex))
else:
new_authordir = get_valid_filename(localbook.authors[0].name, chars=96)
return new_authordir
# Moves files in file storage during author/title rename, or from temp dir to file storage
def update_dir_structure_file(book_id, calibre_path, first_author, original_filepath, db_filename, renamed_author):
def update_dir_structure_file(book_id, calibre_path, original_filepath, new_author, db_filename):
# get book database entry from id, if original path overwrite source with original_filepath
local_book = calibre_db.get_book(book_id)
if original_filepath:
@ -468,15 +478,11 @@ def update_dir_structure_file(book_id, calibre_path, first_author, original_file
author_dir = local_book.path.split('/')[0]
title_dir = local_book.path.split('/')[1]
# Create new_author_dir from parameter or from database
# Create new title_dir from database and add id
new_author_dir = rename_all_authors(first_author, renamed_author, calibre_path, local_book)
if first_author:
if first_author.lower() in [r.lower() for r in renamed_author]:
if os.path.isdir(os.path.join(calibre_path, new_author_dir)):
path = os.path.join(calibre_path, new_author_dir, title_dir)
new_title_dir = get_valid_filename(local_book.title, chars=96) + " (" + str(book_id) + ")"
if new_author:
new_author_dir = get_valid_filename(new_author, chars=96)
else:
new_author = new_author_dir = author_dir
if title_dir != new_title_dir or author_dir != new_author_dir or original_filepath:
error = move_files_on_change(calibre_path,
@ -486,31 +492,38 @@ def update_dir_structure_file(book_id, calibre_path, first_author, original_file
db_filename,
original_filepath,
path)
# old_path = os.path.join(calibre_path, author_dir, new_title_dir).replace('\\', '/')
new_path = os.path.join(calibre_path, new_author_dir, new_title_dir).replace('\\', '/')
all_new_name = get_valid_filename(local_book.title, chars=42) + ' - ' \
+ get_valid_filename(new_author, chars=42)
# Book folder already moved, only files need to be renamed
rename_all_files_on_change(local_book, new_path, new_path, all_new_name)
if error:
return error
# Rename all files from old names to new names
return rename_files_on_change(first_author, renamed_author, local_book, original_filepath, path, calibre_path)
return False
def upload_new_file_gdrive(book_id, first_author, renamed_author, title, title_dir, original_filepath, filename_ext):
def upload_new_file_gdrive(book_id, first_author, title, title_dir, original_filepath, filename_ext):
book = calibre_db.get_book(book_id)
file_name = get_valid_filename(title, chars=42) + ' - ' + \
get_valid_filename(first_author, chars=42) + filename_ext
rename_all_authors(first_author, renamed_author, gdrive=True)
gdrive_path = os.path.join(get_valid_filename(first_author, chars=96),
title_dir + " (" + str(book_id) + ")")
book.path = gdrive_path.replace("\\", "/")
gd.uploadFileToEbooksFolder(os.path.join(gdrive_path, file_name).replace("\\", "/"), original_filepath)
return rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
return False # rename_files_on_change(first_author, renamed_author, local_book=book, gdrive=True)
def update_dir_structure_gdrive(book_id, first_author, renamed_author):
def update_dir_structure_gdrive(book_id, first_author):
book = calibre_db.get_book(book_id)
authordir = book.path.split('/')[0]
titledir = book.path.split('/')[1]
new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
# new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
new_authordir = get_valid_filename(first_author, chars=96)
new_titledir = get_valid_filename(book.title, chars=96) + " (" + str(book_id) + ")"
if titledir != new_titledir:
@ -522,29 +535,32 @@ def update_dir_structure_gdrive(book_id, first_author, renamed_author):
else:
return _('File %(file)s not found on Google Drive', file=book.path) # file not found
if authordir != new_authordir and authordir not in renamed_author:
g_file = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
if authordir != new_authordir:
g_file = gd.getFileFromEbooksFolder(authordir, new_titledir)
if g_file:
gd.moveGdriveFolderRemote(g_file, new_authordir)
gd.moveGdriveFolderRemote(g_file, new_authordir, single_book=True)
book.path = new_authordir + '/' + book.path.split('/')[1]
gd.updateDatabaseOnEdit(g_file['id'], book.path)
else:
return _('File %(file)s not found on Google Drive', file=authordir) # file not found
return _('File %(file)s not found on Google Drive', file=authordir) # file not found'''
if titledir != new_titledir or authordir != new_authordir :
all_new_name = get_valid_filename(book.title, chars=42) + ' - ' \
+ get_valid_filename(new_authordir, chars=42)
rename_all_files_on_change(book, book.path, book.path, all_new_name, gdrive=True) # todo: Move filenames on gdrive
# change location in database to new author/title path
book.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
return rename_files_on_change(first_author, renamed_author, book, gdrive=True)
# book.path = os.path.join(authordir, new_titledir).replace('\\', '/')
return False
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
new_path = os.path.join(calibre_path, new_authordir, new_titledir)
new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir
def move_files_on_change(calibre_path, new_author_dir, new_titledir, localbook, db_filename, original_filepath, path):
new_path = os.path.join(calibre_path, new_author_dir, new_titledir)
# new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_author_dir
try:
if original_filepath:
if not os.path.isdir(new_path):
os.makedirs(new_path)
shutil.move(os.path.normcase(original_filepath), os.path.normcase(os.path.join(new_path, db_filename)))
log.debug("Moving title: %s to %s/%s", original_filepath, new_path, new_name)
log.debug("Moving title: %s to %s/%s", original_filepath, new_path)
else:
# Check new path is not valid path
if not os.path.exists(new_path):
@ -558,8 +574,13 @@ def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, d
for file in file_list:
shutil.move(os.path.normcase(os.path.join(dir_name, file)),
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
if not os.listdir(os.path.split(path)[0]):
try:
shutil.rmtree(os.path.split(path)[0])
except (IOError, OSError) as ex:
log.error("Deleting authorpath for book %s failed: %s", localbook.id, ex)
# change location in database to new author/title path
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
localbook.path = os.path.join(new_author_dir, new_titledir).replace('\\', '/')
except OSError as ex:
log.error_or_exception("Rename title from {} to {} failed with error: {}".format(path, new_path, ex))
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
@ -575,15 +596,15 @@ def rename_files_on_change(first_author,
calibre_path="",
gdrive=False):
# Rename all files from old names to new names
try:
clean_author_database(renamed_author, calibre_path, gdrive=gdrive)
if first_author and first_author not in renamed_author:
clean_author_database([first_author], calibre_path, local_book, gdrive)
if not gdrive and not renamed_author and not original_filepath and len(os.listdir(os.path.dirname(path))) == 0:
shutil.rmtree(os.path.dirname(path))
except (OSError, FileNotFoundError) as ex:
log.error_or_exception("Error in rename file in path {}".format(ex))
return _("Error in rename file in path: {}".format(str(ex)))
#try:
#clean_author_database(renamed_author, calibre_path, gdrive=gdrive)
#if first_author and first_author not in renamed_author:
# clean_author_database([first_author], calibre_path, local_book, gdrive)
#if not gdrive and not renamed_author and not original_filepath and len(os.listdir(os.path.dirname(path))) == 0:
# shutil.rmtree(os.path.dirname(path))
#except (OSError, FileNotFoundError) as ex:
# log.error_or_exception("Error in rename file in path {}".format(ex))
# return _("Error in rename file in path: {}".format(str(ex)))
return False
@ -648,12 +669,6 @@ def generate_random_password(min_length):
return ''.join(password)
'''def generate_random_password(min_length):
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
passlen = min_length
return "".join(s[c % len(s)] for c in os.urandom(passlen))'''
def uniq(inpt):
output = []
inpt = [" ".join(inp.split()) for inp in inpt]
@ -717,17 +732,15 @@ def update_dir_structure(book_id,
calibre_path,
first_author=None, # change author of book to this author
original_filepath=None,
db_filename=None,
renamed_author=None):
renamed_author = renamed_author or []
db_filename=None):
if config.config_use_google_drive:
return update_dir_structure_gdrive(book_id, first_author, renamed_author)
return update_dir_structure_gdrive(book_id, first_author)
else:
return update_dir_structure_file(book_id,
calibre_path,
first_author,
original_filepath,
db_filename, renamed_author)
first_author,
db_filename)
def delete_book(book, calibrepath, book_format):
@ -947,7 +960,7 @@ def do_download_file(book, book_format, client, data, headers):
download_name = filename = None
if config.config_use_google_drive:
# startTime = time.time()
df = gd.getFileFromEbooksFolder(book.path, book_name + "." + book_format)
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
# log.debug('%s', time.time() - startTime)
if df:
if config.config_embed_metadata and (

View File

@ -112,7 +112,7 @@ def render_title_template(*args, **kwargs):
sidebar, simple = get_sidebar_config(kwargs)
try:
return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, simple=simple,
accept=constants.EXTENSIONS_UPLOAD,
accept=config.config_upload_formats.split(','),
*args, **kwargs)
except PermissionError:
log.error("No permission to access {} file.".format(args[0]))

View File

@ -373,10 +373,14 @@
<input type="text" class="form-control" id="config_limiter_uri" name="config_limiter_uri" value="{% if config.config_limiter_uri != None %}{{ config.config_limiter_uri }}{% endif %}" autocomplete="off">
</div>
<div class="form-group" style="margin-left:10px;">
<label for="config_calibre">{{_('Options for Limiter')}}</label>
<label for="config_calibre">{{_('Options for Limiter Backend')}}</label>
<input type="text" class="form-control" id="config_limiter_options" name="config_limiter_options" value="{% if config.config_limiter_options != None %}{{ config.config_limiter_options }}{% endif %}" autocomplete="off">
</div>
</div>
<div class="form-group">
<input type="checkbox" id="config_check_extensions" name="config_check_extensions" {% if config.config_check_extensions %}checked{% endif %}>
<label for="config_check_extensions">{{_('Check if file extensions matches file content on upload')}}</label>
</div>
<div class="form-group">
<label for="config_session">{{_('Session protection')}}</label>
<select name="config_session" id="config_session" class="form-control">

View File

@ -251,7 +251,7 @@
<input id="have_read_cb" data-checked="{{ _('Mark As Unread') }}"
data-unchecked="{{ _('Mark As Read') }}" type="checkbox"
{% if entry.read_status %}checked{% endif %}>
<span>{{ _('Read') }}</span>
<span data-toggle="tooltip" title="{{_('Mark Book as Read or Unread')}}">{{ _('Read') }}</span>
</label>
</form>
</p>
@ -264,7 +264,7 @@
<input id="archived_cb" data-checked="{{ _('Restore from archive') }}"
data-unchecked="{{ _('Add to archive') }}" type="checkbox"
{% if entry.is_archived %}checked{% endif %}>
<span>{{ _('Archived') }}</span>
<span data-toggle="tooltip" title="{{_('Mark Book as archived or not, to hide it in Calibre-Web and delete it from Kobo Reader')}}">{{ _('Archive') }}</span>
</label>
</form>
</p>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -23,7 +23,7 @@ from flask_babel import gettext as _
from . import logger, comic, isoLanguages
from .constants import BookMeta
from .helper import split_authors
from .file_helper import get_temp_dir
from .file_helper import get_temp_dir, validate_mime_type
log = logger.create()

View File

@ -107,7 +107,7 @@ def add_security_headers(resp):
resp.headers['X-Content-Type-Options'] = 'nosniff'
resp.headers['X-Frame-Options'] = 'SAMEORIGIN'
resp.headers['X-XSS-Protection'] = '1; mode=block'
resp.headers['Strict-Transport-Security'] = 'max-age=31536000;'
resp.headers['Strict-Transport-Security'] = 'max-age=31536000';
return resp
@ -1582,7 +1582,8 @@ def read_book(book_id, book_format):
return render_title_template('readtxt.html', txtfile=book_id, title=book.title)
elif book_format.lower() in ["djvu", "djv"]:
log.debug("Start djvu reader for %d", book_id)
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title, extension=book_format.lower())
return render_title_template('readdjvu.html', djvufile=book_id, title=book.title,
extension=book_format.lower())
else:
for fileExt in constants.EXTENSIONS_AUDIO:
if book_format.lower() == fileExt:

File diff suppressed because it is too large Load Diff

View File

@ -20,3 +20,4 @@ advocate>=1.0.0,<1.1.0
Flask-Limiter>=2.3.0,<3.6.0
regex>=2022.3.2,<2024.6.25
bleach>=6.0.0,<6.2.0
python-magic>=0.4.27,<0.5.0

View File

@ -60,6 +60,7 @@ install_requires =
Flask-Limiter>=2.3.0,<3.6.0
regex>=2022.3.2,<2024.2.25
bleach>=6.0.0,<6.2.0
python-magic>=0.4.27,<0.5.0
[options.packages.find]

File diff suppressed because it is too large Load Diff