This commit is contained in:
cbartondock 2021-03-15 21:09:51 -04:00
commit d0671ec58c
73 changed files with 6496 additions and 4373 deletions

View File

@ -22,7 +22,7 @@ Calibre-Web is a web app providing a clean interface for browsing, reading and d
- Support for public user registration - Support for public user registration
- Send eBooks to Kindle devices with the click of a button - Send eBooks to Kindle devices with the click of a button
- Sync your Kobo devices through Calibre-Web with your Calibre library - Sync your Kobo devices through Calibre-Web with your Calibre library
- Support for reading eBooks directly in the browser (.txt, .epub, .pdf, .cbr, .cbt, .cbz) - Support for reading eBooks directly in the browser (.txt, .epub, .pdf, .cbr, .cbt, .cbz, .djvu)
- Upload new books in many formats, including audio formats (.mp3, .m4a, .m4b) - Upload new books in many formats, including audio formats (.mp3, .m4a, .m4b)
- Support for Calibre Custom Columns - Support for Calibre Custom Columns
- Ability to hide content based on categories and Custom Column content per user - Ability to hide content based on categories and Custom Column content per user

View File

@ -37,6 +37,7 @@ try:
except ImportError: except ImportError:
from flask_login.__about__ import __version__ as flask_loginVersion from flask_login.__about__ import __version__ as flask_loginVersion
try: try:
# pylint: disable=unused-import
import unidecode import unidecode
# _() necessary to make babel aware of string for translation # _() necessary to make babel aware of string for translation
unidecode_version = _(u'installed') unidecode_version = _(u'installed')

View File

@ -49,7 +49,7 @@ def init_cache_busting(app):
# compute version component # compute version component
rooted_filename = os.path.join(dirpath, filename) rooted_filename = os.path.join(dirpath, filename)
with open(rooted_filename, 'rb') as f: with open(rooted_filename, 'rb') as f:
file_hash = hashlib.md5(f.read()).hexdigest()[:7] file_hash = hashlib.md5(f.read()).hexdigest()[:7] # nosec
# save version to tables # save version to tables
file_path = rooted_filename.replace(static_folder, "") file_path = rooted_filename.replace(static_folder, "")
@ -64,6 +64,7 @@ def init_cache_busting(app):
return filename.split("?", 1)[0] return filename.split("?", 1)[0]
@app.url_defaults @app.url_defaults
# pylint: disable=unused-variable
def reverse_to_cache_busted_url(endpoint, values): def reverse_to_cache_busted_url(endpoint, values):
""" """
Make `url_for` produce busted filenames when using the 'static' endpoint. Make `url_for` produce busted filenames when using the 'static' endpoint.

View File

@ -104,7 +104,7 @@ LDAP_AUTH_SIMPLE = 0
DEFAULT_MAIL_SERVER = "mail.example.org" DEFAULT_MAIL_SERVER = "mail.example.org"
DEFAULT_PASSWORD = "admin123" # nosec # noqa DEFAULT_PASSWORD = "admin123" # nosec
DEFAULT_PORT = 8083 DEFAULT_PORT = 8083
env_CALIBRE_PORT = os.environ.get("CALIBRE_PORT", DEFAULT_PORT) env_CALIBRE_PORT = os.environ.get("CALIBRE_PORT", DEFAULT_PORT)
try: try:

View File

@ -211,6 +211,74 @@ def delete_book_from_details(book_id):
def delete_book_ajax(book_id, book_format): def delete_book_ajax(book_id, book_format):
return delete_book(book_id,book_format, False) return delete_book(book_id,book_format, False)
def delete_whole_book(book_id, book):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse): def delete_book(book_id, book_format, jsonResponse):
warning = {} warning = {}
if current_user.role_delete_books(): if current_user.role_delete_books():
@ -236,49 +304,7 @@ def delete_book(book_id, book_format, jsonResponse):
else: else:
flash(error, category="warning") flash(error, category="warning")
if not book_format: if not book_format:
# delete book from Shelfs, Downloads, Read list delete_whole_book(book_id, book)
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
ub.delete_download(book_id)
ub.session_commit()
# check if only this book links to:
# author, language, series, tags, custom columns
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns).\
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
elif c.datatype == 'rating':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
calibre_db.session.delete(del_cc)
calibre_db.session.commit()
else:
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
else: else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\ calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete() filter(db.Data.format == book_format).delete()
@ -289,24 +315,7 @@ def delete_book(book_id, book_format, jsonResponse):
else: else:
# book not found # book not found
log.error('Book with id "%s" could not be deleted: not found', book_id) log.error('Book with id "%s" could not be deleted: not found', book_id)
if book_format: return render_delete_book_result(book_format, jsonResponse, warning, book_id)
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
def render_edit_book(book_id): def render_edit_book(book_id):
@ -447,6 +456,59 @@ def edit_book_publisher(to_save, book):
return changed return changed
def edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False
if to_save[cc_string] == 'None':
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
return changed, to_save
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save): def edit_cc_data(book_id, book, to_save):
changed = False changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
@ -459,51 +521,9 @@ def edit_cc_data(book_id, book, to_save):
cc_db_value = None cc_db_value = None
if to_save[cc_string].strip(): if to_save[cc_string].strip():
if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float': if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float':
if to_save[cc_string] == 'None': changed, to_save = edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string)
to_save[cc_string] = None
elif c.datatype == 'bool':
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
if to_save[cc_string] != cc_db_value:
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
calibre_db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc)
changed = True
else: else:
if c.datatype == 'rating': changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value:
if cc_db_value is not None:
# remove old cc_val
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
calibre_db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# if no cc val is found add it
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
calibre_db.session.add(new_cc)
changed = True
calibre_db.session.flush()
new_cc = calibre_db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book
getattr(book, cc_string).append(new_cc)
else: else:
if cc_db_value is not None: if cc_db_value is not None:
# remove old cc_val # remove old cc_val
@ -766,6 +786,7 @@ def merge_metadata(to_save, meta):
to_save["description"] = to_save["description"] or Markup( to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape() getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book): def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information""" """Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-' id_type_prefix = 'identifier-type-'
@ -780,6 +801,85 @@ def identifier_list(to_save, book):
result.append(db.Identifiers(to_save[val_key], type_value, book.id)) result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result return result
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list = list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
return db_book, input_authors, title_dir
@editbook.route("/upload", methods=["GET", "POST"]) @editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano @login_required_if_no_ano
@upload_required @upload_required
@ -814,76 +914,8 @@ def upload():
flash(_(u"File %(filename)s could not saved to temp dir", flash(_(u"File %(filename)s could not saved to temp dir",
filename= requested_file.filename), category="error") filename= requested_file.filename), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json') return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
title = meta.title
authr = meta.author
if title != _(u'Unknown') and authr != _(u'Unknown'): db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
entry = calibre_db.check_exists_book(authr, title)
if entry:
log.info("Uploaded book probably exists in library")
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
# handle authors
input_authors = authr.split('&')
# handle_authors(input_authors)
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
# Remove duplicates in authors list
input_authors = helper.uniq(input_authors)
# we have all author names now
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list=list()
db_author = None
for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
if not stored_author:
if not db_author:
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
calibre_db.session.add(db_author)
calibre_db.session.commit()
sort_author = helper.get_sorted_author(inp)
else:
if not db_author:
db_author = stored_author
sort_author = stored_author.sort
sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list)
title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
'author')
# Add series_index to book
modif_date |= edit_book_series_index(meta.series_id, db_book)
# add languages
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
# handle tags
modif_date |= edit_book_tags(meta.tags, db_book)
# handle series
modif_date |= edit_book_series(meta.series, db_book)
# Add file to book
file_size = os.path.getsize(meta.file_path)
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
db_book.data.append(db_data)
calibre_db.session.add(db_book)
# flush content, get db_book.id available
calibre_db.session.flush()
# Comments needs book id therfore only possible after flush # Comments needs book id therfore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book) modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)

View File

@ -60,14 +60,8 @@ def init_errorhandler():
if services.ldap: if services.ldap:
# Only way of catching the LDAPException upon logging in with LDAP server down # Only way of catching the LDAPException upon logging in with LDAP server down
@app.errorhandler(services.ldap.LDAPException) @app.errorhandler(services.ldap.LDAPException)
# pylint: disable=unused-variable
def handle_exception(e): def handle_exception(e):
log.debug('LDAP server not accessible while trying to login to opds feed') log.debug('LDAP server not accessible while trying to login to opds feed')
return error_http(FailedDependency()) return error_http(FailedDependency())
# @app.errorhandler(InvalidRequestError)
#@app.errorhandler(OperationalError)
#def handle_db_exception(e):
# db.session.rollback()
# log.error('Database request error: %s',e)
# return internal_error(InternalServerError(e))

View File

@ -142,7 +142,7 @@ def on_received_watch_confirmation():
else: else:
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode() dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
if not response['deleted'] and response['file']['title'] == 'metadata.db' \ if not response['deleted'] and response['file']['title'] == 'metadata.db' \
and response['file']['md5Checksum'] != hashlib.md5(dbpath): and response['file']['md5Checksum'] != hashlib.md5(dbpath): # nosec
tmp_dir = os.path.join(tempfile.gettempdir(), 'calibre_web') tmp_dir = os.path.join(tempfile.gettempdir(), 'calibre_web')
if not os.path.isdir(tmp_dir): if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir) os.mkdir(tmp_dir)

View File

@ -137,43 +137,8 @@ def send_registration_mail(e_mail, user_name, default_password, resend=False):
return return
def check_send_to_kindle_without_converter(entry): def check_send_to_kindle_with_converter(formats):
bookformats = list() bookformats = list()
# no converter - only for mobi and pdf formats
for ele in iter(entry.data):
if ele.uncompressed_size < config.mail_size:
if 'MOBI' in ele.format:
bookformats.append({'format': 'Mobi',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Mobi')})
if 'PDF' in ele.format:
bookformats.append({'format': 'Pdf',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Pdf')})
if 'AZW' in ele.format:
bookformats.append({'format': 'Azw',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Azw')})
return bookformats
def check_send_to_kindle_with_converter(entry):
bookformats = list()
formats = list()
for ele in iter(entry.data):
if ele.uncompressed_size < config.mail_size:
formats.append(ele.format)
if 'MOBI' in formats:
bookformats.append({'format': 'Mobi',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Mobi')})
if 'AZW' in formats:
bookformats.append({'format': 'Azw',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Azw')})
if 'PDF' in formats:
bookformats.append({'format': 'Pdf',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Pdf')})
if 'EPUB' in formats and 'MOBI' not in formats: if 'EPUB' in formats and 'MOBI' not in formats:
bookformats.append({'format': 'Mobi', bookformats.append({'format': 'Mobi',
'convert': 1, 'convert': 1,
@ -193,12 +158,27 @@ def check_send_to_kindle(entry):
""" """
returns all available book formats for sending to Kindle returns all available book formats for sending to Kindle
""" """
formats = list()
bookformats = list()
if len(entry.data): if len(entry.data):
if not config.config_converterpath: for ele in iter(entry.data):
book_formats = check_send_to_kindle_with_converter(entry) if ele.uncompressed_size < config.mail_size:
else: formats.append(ele.format)
book_formats = check_send_to_kindle_with_converter(entry) if 'MOBI' in formats:
return book_formats bookformats.append({'format': 'Mobi',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Mobi')})
if 'PDF' in formats:
bookformats.append({'format': 'Pdf',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Pdf')})
if 'AZW' in formats:
bookformats.append({'format': 'Azw',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Azw')})
if config.config_converterpath:
bookformats.extend(check_send_to_kindle_with_converter(formats))
return bookformats
else: else:
log.error(u'Cannot find book entry %d', entry.id) log.error(u'Cannot find book entry %d', entry.id)
return None return None
@ -207,7 +187,7 @@ def check_send_to_kindle(entry):
# Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return # Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return
# list with supported formats # list with supported formats
def check_read_formats(entry): def check_read_formats(entry):
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR'} EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU'}
bookformats = list() bookformats = list()
if len(entry.data): if len(entry.data):
for ele in iter(entry.data): for ele in iter(entry.data):
@ -750,7 +730,7 @@ def format_runtime(runtime):
# helper function to apply localize status information in tasklist entries # helper function to apply localize status information in tasklist entries
def render_task_status(tasklist): def render_task_status(tasklist):
renderedtasklist = list() renderedtasklist = list()
for __, user, added, task in tasklist: for __, user, __, task in tasklist:
if user == current_user.nickname or current_user.role_admin(): if user == current_user.nickname or current_user.role_admin():
ret = {} ret = {}
if task.start_time: if task.start_time:

View File

@ -918,7 +918,7 @@ def HandleAuthRequest():
if config.config_kobo_proxy: if config.config_kobo_proxy:
try: try:
return redirect_or_proxy_request() return redirect_or_proxy_request()
except: except Exception:
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.") log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
return make_calibre_web_auth_response() return make_calibre_web_auth_response()

View File

@ -81,6 +81,7 @@ log = logger.create()
def register_url_value_preprocessor(kobo): def register_url_value_preprocessor(kobo):
@kobo.url_value_preprocessor @kobo.url_value_preprocessor
# pylint: disable=unused-variable
def pop_auth_token(__, values): def pop_auth_token(__, values):
g.auth_token = values.pop("auth_token") g.auth_token = values.pop("auth_token")

View File

@ -96,7 +96,113 @@ def logout_oauth_user():
session.pop(str(oauth_key) + '_oauth_user_id') session.pop(str(oauth_key) + '_oauth_user_id')
if ub.oauth_support: def oauth_update_token(provider_id, token, provider_user_id):
session[provider_id + "_oauth_user_id"] = provider_user_id
session[provider_id + "_oauth_token"] = token
# Find this OAuth token in the database, or create it
query = ub.session.query(ub.OAuth).filter_by(
provider=provider_id,
provider_user_id=provider_user_id,
)
try:
oauth_entry = query.one()
# update token
oauth_entry.token = token
except NoResultFound:
oauth_entry = ub.OAuth(
provider=provider_id,
provider_user_id=provider_user_id,
token=token,
)
ub.session.add(oauth_entry)
ub.session_commit()
# Disable Flask-Dance's default behavior for saving the OAuth token
# Value differrs depending on flask-dance version
return backend_resultcode
def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name):
query = ub.session.query(ub.OAuth).filter_by(
provider=provider_id,
provider_user_id=provider_user_id,
)
try:
oauth_entry = query.first()
# already bind with user, just login
if oauth_entry.user:
login_user(oauth_entry.user)
log.debug(u"You are now logged in as: '%s'", oauth_entry.user.nickname)
flash(_(u"you are now logged in as: '%(nickname)s'", nickname= oauth_entry.user.nickname),
category="success")
return redirect(url_for('web.index'))
else:
# bind to current user
if current_user and current_user.is_authenticated:
oauth_entry.user = current_user
try:
ub.session.add(oauth_entry)
ub.session.commit()
flash(_(u"Link to %(oauth)s Succeeded", oauth=provider_name), category="success")
return redirect(url_for('web.profile'))
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
else:
flash(_(u"Login failed, No User Linked With OAuth Account"), category="error")
log.info('Login failed, No User Linked With OAuth Account')
return redirect(url_for('web.login'))
# return redirect(url_for('web.login'))
# if config.config_public_reg:
# return redirect(url_for('web.register'))
# else:
# flash(_(u"Public registration is not enabled"), category="error")
# return redirect(url_for(redirect_url))
except (NoResultFound, AttributeError):
return redirect(url_for(redirect_url))
def get_oauth_status():
status = []
query = ub.session.query(ub.OAuth).filter_by(
user_id=current_user.id,
)
try:
oauths = query.all()
for oauth_entry in oauths:
status.append(int(oauth_entry.provider))
return status
except NoResultFound:
return None
def unlink_oauth(provider):
if request.host_url + 'me' != request.referrer:
pass
query = ub.session.query(ub.OAuth).filter_by(
provider=provider,
user_id=current_user.id,
)
try:
oauth_entry = query.one()
if current_user and current_user.is_authenticated:
oauth_entry.user = current_user
try:
ub.session.delete(oauth_entry)
ub.session.commit()
logout_oauth_user()
flash(_(u"Unlink to %(oauth)s Succeeded", oauth=oauth_check[provider]), category="success")
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
flash(_(u"Unlink to %(oauth)s Failed", oauth=oauth_check[provider]), category="error")
except NoResultFound:
log.warning("oauth %s for user %d not found", provider, current_user.id)
flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error")
return redirect(url_for('web.profile'))
def generate_oauth_blueprints():
oauthblueprints = [] oauthblueprints = []
if not ub.session.query(ub.OAuthProvider).count(): if not ub.session.query(ub.OAuthProvider).count():
for provider in ("github", "google"): for provider in ("github", "google"):
@ -141,8 +247,12 @@ if ub.oauth_support:
app.register_blueprint(blueprint, url_prefix="/login") app.register_blueprint(blueprint, url_prefix="/login")
if element['active']: if element['active']:
register_oauth_blueprint(element['id'], element['provider_name']) register_oauth_blueprint(element['id'], element['provider_name'])
return oauthblueprints
if ub.oauth_support:
oauthblueprints = generate_oauth_blueprints()
@oauth_authorized.connect_via(oauthblueprints[0]['blueprint']) @oauth_authorized.connect_via(oauthblueprints[0]['blueprint'])
def github_logged_in(blueprint, token): def github_logged_in(blueprint, token):
if not token: if not token:
@ -175,113 +285,6 @@ if ub.oauth_support:
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id) return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
def oauth_update_token(provider_id, token, provider_user_id):
session[provider_id + "_oauth_user_id"] = provider_user_id
session[provider_id + "_oauth_token"] = token
# Find this OAuth token in the database, or create it
query = ub.session.query(ub.OAuth).filter_by(
provider=provider_id,
provider_user_id=provider_user_id,
)
try:
oauth_entry = query.one()
# update token
oauth_entry.token = token
except NoResultFound:
oauth_entry = ub.OAuth(
provider=provider_id,
provider_user_id=provider_user_id,
token=token,
)
ub.session.add(oauth_entry)
ub.session_commit()
# Disable Flask-Dance's default behavior for saving the OAuth token
# Value differrs depending on flask-dance version
return backend_resultcode
def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name):
query = ub.session.query(ub.OAuth).filter_by(
provider=provider_id,
provider_user_id=provider_user_id,
)
try:
oauth_entry = query.first()
# already bind with user, just login
if oauth_entry.user:
login_user(oauth_entry.user)
log.debug(u"You are now logged in as: '%s'", oauth_entry.user.nickname)
flash(_(u"you are now logged in as: '%(nickname)s'", nickname= oauth_entry.user.nickname),
category="success")
return redirect(url_for('web.index'))
else:
# bind to current user
if current_user and current_user.is_authenticated:
oauth_entry.user = current_user
try:
ub.session.add(oauth_entry)
ub.session.commit()
flash(_(u"Link to %(oauth)s Succeeded", oauth=provider_name), category="success")
return redirect(url_for('web.profile'))
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
else:
flash(_(u"Login failed, No User Linked With OAuth Account"), category="error")
log.info('Login failed, No User Linked With OAuth Account')
return redirect(url_for('web.login'))
# return redirect(url_for('web.login'))
# if config.config_public_reg:
# return redirect(url_for('web.register'))
# else:
# flash(_(u"Public registration is not enabled"), category="error")
# return redirect(url_for(redirect_url))
except (NoResultFound, AttributeError):
return redirect(url_for(redirect_url))
def get_oauth_status():
status = []
query = ub.session.query(ub.OAuth).filter_by(
user_id=current_user.id,
)
try:
oauths = query.all()
for oauth_entry in oauths:
status.append(int(oauth_entry.provider))
return status
except NoResultFound:
return None
def unlink_oauth(provider):
if request.host_url + 'me' != request.referrer:
pass
query = ub.session.query(ub.OAuth).filter_by(
provider=provider,
user_id=current_user.id,
)
try:
oauth_entry = query.one()
if current_user and current_user.is_authenticated:
oauth_entry.user = current_user
try:
ub.session.delete(oauth_entry)
ub.session.commit()
logout_oauth_user()
flash(_(u"Unlink to %(oauth)s Succeeded", oauth=oauth_check[provider]), category="success")
except Exception as e:
log.debug_or_exception(e)
ub.session.rollback()
flash(_(u"Unlink to %(oauth)s Failed", oauth=oauth_check[provider]), category="error")
except NoResultFound:
log.warning("oauth %s for user %d not found", provider, current_user.id)
flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error")
return redirect(url_for('web.profile'))
# notify on OAuth provider error # notify on OAuth provider error
@oauth_error.connect_via(oauthblueprints[0]['blueprint']) @oauth_error.connect_via(oauthblueprints[0]['blueprint'])
def github_error(blueprint, error, error_description=None, error_uri=None): def github_error(blueprint, error, error_description=None, error_uri=None):

View File

@ -22,7 +22,7 @@ import os
import errno import errno
import signal import signal
import socket import socket
import subprocess import subprocess # nosec
try: try:
from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIServer
@ -253,13 +253,13 @@ class WebServer(object):
if not self.restart: if not self.restart:
log.info("Performing shutdown of Calibre-Web") log.info("Performing shutdown of Calibre-Web")
# prevent irritiating log of pending tasks message from asyncio # prevent irritating log of pending tasks message from asyncio
logger.get('asyncio').setLevel(logger.logging.CRITICAL) logger.get('asyncio').setLevel(logger.logging.CRITICAL)
return True return True
log.info("Performing restart of Calibre-Web") log.info("Performing restart of Calibre-Web")
args = self._get_args_for_reloading() args = self._get_args_for_reloading()
subprocess.call(args, close_fds=True) subprocess.call(args, close_fds=True) # nosec
return True return True
def _killServer(self, __, ___): def _killServer(self, __, ___):

View File

@ -22,6 +22,7 @@ from base64 import b64decode, b64encode
from jsonschema import validate, exceptions, __version__ from jsonschema import validate, exceptions, __version__
from datetime import datetime from datetime import datetime
try: try:
# pylint: disable=unused-import
from urllib import unquote from urllib import unquote
except ImportError: except ImportError:
from urllib.parse import unquote from urllib.parse import unquote
@ -91,14 +92,14 @@ class SyncToken:
def __init__( def __init__(
self, self,
raw_kobo_store_token="", # nosec raw_kobo_store_token="",
books_last_created=datetime.min, books_last_created=datetime.min,
books_last_modified=datetime.min, books_last_modified=datetime.min,
archive_last_modified=datetime.min, archive_last_modified=datetime.min,
reading_state_last_modified=datetime.min, reading_state_last_modified=datetime.min,
tags_last_modified=datetime.min, tags_last_modified=datetime.min,
books_last_id=-1 books_last_id=-1
): ): # nosec
self.raw_kobo_store_token = raw_kobo_store_token self.raw_kobo_store_token = raw_kobo_store_token
self.books_last_created = books_last_created self.books_last_created = books_last_created
self.books_last_modified = books_last_modified self.books_last_modified = books_last_modified

View File

@ -1783,6 +1783,12 @@ body > div.container-fluid > div > div.col-sm-10 > div.discover {
margin-top: 0 margin-top: 0
} }
.container-fluid .book .meta .series {
/* font-weight: 400; */
/* font-size: 12px; */
color: hsla(0, 0%, 100%, .45);
}
.container-fluid .book .meta > p { .container-fluid .book .meta > p {
-o-text-overflow: ellipsis; -o-text-overflow: ellipsis;
text-overflow: ellipsis; text-overflow: ellipsis;

View File

@ -0,0 +1,194 @@
body {
margin: 0px;
}
#djvuContainer {
position: absolute;
width: 100%;
height: 100%;
max-width: 100%;
text-align: center;
overflow: hidden;
}
.toolbar {
position: relative;
display: inline-block;
padding-top: 10px;
transform: translate(0, 0);
-webkit-transform: translate(0, 0);
-ms-transform: translate(0, 0);
transition: transform 0.3s;
-webkit-transition: -webkit-transform 0.3s;
}
.toolbarHidden {
transform: translate(0, -100%);
-webkit-transform: translate(0, -100%);
-ms-transform: translate(0, -100%);
transition: transform 1s;
-webkit-transition: transform 1s;
}
.toolbarSquareButton {
float: left;
width: 40px;
height: 40px;
background-image: url("img/toolbar-buttons.png");
background-repeat: no-repeat;
background-size: 500% 300%;
}
.scrollbar {
position: absolute;
border-radius: 6px;
opacity: 0.6;
box-shadow: inset 0 0 0 1px black, inset 0 0 0 2px white, inset 0 0 0 10px #BBB;
transition: opacity 0.3s;
}
.scrollbar:hover {
box-shadow: inset 0 0 0 1px black, inset 0 0 0 2px white, inset 0 0 0 10px #999;
}
.scrollbarClicked, .scrollbarClicked:hover {
box-shadow: inset 0 0 0 1px black, inset 0 0 0 2px white, inset 0 0 0 10px #777;
}
.scrollbarHidden {
opacity: 0;
transition: opacity 0.6s;
}
.scrollbarVertical {
right: 0px;
border-right: 1px solid transparent;
width: 13px;
}
.scrollbarHorizontal {
bottom: 0px;
border-bottom: 1px solid transparent;
height: 13px;
}
.content {
overflow: hidden;
position: absolute;
height: 100%;
width: 100%;
}
.textLayer {
position: absolute;
height: 120%;
width: 120%;
overflow: scroll;
text-align: left;
}
_:-ms-lang(x), .textLayer {
height: 100%;
width: 100%;
-ms-overflow-style: none;
}
.textPage {
margin-top: 100vh;
margin-bottom: 100vh;
padding-right: 100vw;
}
.textPage span {
font-family: sans-serif;
color: #000;
color: rgba(0, 0, 0, 0);
white-space: nowrap;
}
.visibleTextPage span {
display: inline-block;
position: relative;
top: 50%;
transform: translateY(-50%);
-webkit-transform: translateY(-50%);
-ms-transform: translateY(-50%);
/* border: 1px solid red; /* for easy debug */
}
.buttonZoomIn {
background-position: 25% 0;
}
.buttonZoomIn:hover {
background-position: 25% 50%;
}
.buttonZoomIn:disabled {
background-position: 25% 100%;
}
.buttonZoomOut {
background-position: 0 0;
}
.buttonZoomOut:hover {
background-position: 0 50%;
}
.buttonZoomOut:disabled {
background-position: 0 100%;
}
.buttonPagePrev {
background-position: 50% 0;
}
.buttonPagePrev:hover {
background-position: 50% 50%;
}
.buttonPagePrev:disabled {
background-position: 50% 100%;
}
.buttonPageNext {
background-position: 75% 0;
}
.buttonPageNext:hover {
background-position: 75% 50%;
}
.buttonPageNext:disabled {
background-position: 75% 100%;
}
.toolbarItem {
display: inline-block;
margin: 0 10px;
}
.comboBox {
float: left;
position: relative;
}
.comboBoxSelection {
width: 8.25ex;
margin: 10px 12px 0px 12px;
}
.comboBoxText {
width: 5ex;
border: none;
padding: 0px;
outline: none;
position: absolute;
margin: 10px 0px 0px 12px;
top: 2px;
left: 3px;
}
.statusImage {
position: absolute;
left: 50%;
top: 50%;
width: 128px;
height: 128px;
margin: -72px 0 0 -64px;
background-image: url("img/status.png");
background-repeat: no-repeat;
}
.blankImage {
background-image: url("img/blank.jpg");
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 B

View File

@ -0,0 +1,16 @@
091ECB3AE852C68866FBC86AA8FCDB1F.cache.js
user.agent ie8
1A420474460884E73E3288F783AD7024.cache.js
user.agent ie10
3CFDA9D9AFA2798299BBAE243DB2E9B5.cache.js
user.agent safari
564CB406D925C79CCD67EB98CA5AD4EF.cache.js
user.agent gecko1_8
A8305F17E026239876FCBC730B035A55.cache.js
user.agent ie9
Devmode:devmode.js

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,26 @@
function djvu_html5(){var O='bootstrap',P='begin',Q='gwt.codesvr.djvu_html5=',R='gwt.codesvr=',S='djvu_html5',T='startup',U='DUMMY',V=0,W=1,X='iframe',Y='javascript:""',Z='position:absolute; width:0; height:0; border:none; left: -1000px;',$=' top: -1000px;',_='CSS1Compat',ab='<!doctype html>',bb='',cb='<html><head><\/head><body><\/body><\/html>',db='undefined',eb='readystatechange',fb=10,gb='Chrome',hb='eval("',ib='");',jb='script',kb='javascript',lb='moduleStartup',mb='moduleRequested',nb='Failed to load ',ob='head',pb='meta',qb='name',rb='djvu_html5::',sb='::',tb='gwt:property',ub='content',vb='=',wb='gwt:onPropertyErrorFn',xb='Bad handler "',yb='" for "gwt:onPropertyErrorFn"',zb='gwt:onLoadErrorFn',Ab='" for "gwt:onLoadErrorFn"',Bb='#',Cb='?',Db='/',Eb='img',Fb='clear.cache.gif',Gb='baseUrl',Hb='djvu_html5.nocache.js',Ib='base',Jb='//',Kb='user.agent',Lb='webkit',Mb='safari',Nb='msie',Ob=11,Pb='ie10',Qb=9,Rb='ie9',Sb=8,Tb='ie8',Ub='gecko',Vb='gecko1_8',Wb=2,Xb=3,Yb=4,Zb='selectingPermutation',$b='djvu_html5.devmode.js',_b='091ECB3AE852C68866FBC86AA8FCDB1F',ac='1A420474460884E73E3288F783AD7024',bc='3CFDA9D9AFA2798299BBAE243DB2E9B5',cc='564CB406D925C79CCD67EB98CA5AD4EF',dc='A8305F17E026239876FCBC730B035A55',ec=':',fc='.cache.js',gc='loadExternalRefs',hc='end',ic='http:',jc='file:',kc='_gwt_dummy_',lc='__gwtDevModeHook:djvu_html5',mc='Ignoring non-whitelisted Dev Mode URL: ',nc=':moduleBase';var o=window;var p=document;r(O,P);function q(){var a=o.location.search;return a.indexOf(Q)!=-1||a.indexOf(R)!=-1}
function r(a,b){if(o.__gwtStatsEvent){o.__gwtStatsEvent({moduleName:S,sessionId:o.__gwtStatsSessionId,subSystem:T,evtGroup:a,millis:(new Date).getTime(),type:b})}}
djvu_html5.__sendStats=r;djvu_html5.__moduleName=S;djvu_html5.__errFn=null;djvu_html5.__moduleBase=U;djvu_html5.__softPermutationId=V;djvu_html5.__computePropValue=null;djvu_html5.__getPropMap=null;djvu_html5.__installRunAsyncCode=function(){};djvu_html5.__gwtStartLoadingFragment=function(){return null};djvu_html5.__gwt_isKnownPropertyValue=function(){return false};djvu_html5.__gwt_getMetaProperty=function(){return null};var s=null;var t=o.__gwt_activeModules=o.__gwt_activeModules||{};t[S]={moduleName:S};djvu_html5.__moduleStartupDone=function(e){var f=t[S].bindings;t[S].bindings=function(){var a=f?f():{};var b=e[djvu_html5.__softPermutationId];for(var c=V;c<b.length;c++){var d=b[c];a[d[V]]=d[W]}return a}};var u;function v(){w();return u}
function w(){if(u){return}var a=p.createElement(X);a.src=Y;a.id=S;a.style.cssText=Z+$;a.tabIndex=-1;p.body.appendChild(a);u=a.contentDocument;if(!u){u=a.contentWindow.document}u.open();var b=document.compatMode==_?ab:bb;u.write(b+cb);u.close()}
function A(k){function l(a){function b(){if(typeof p.readyState==db){return typeof p.body!=db&&p.body!=null}return /loaded|complete/.test(p.readyState)}
var c=b();if(c){a();return}function d(){if(!c){if(!b()){return}c=true;a();if(p.removeEventListener){p.removeEventListener(eb,d,false)}if(e){clearInterval(e)}}}
if(p.addEventListener){p.addEventListener(eb,d,false)}var e=setInterval(function(){d()},fb)}
function m(c){function d(a,b){a.removeChild(b)}
var e=v();var f=e.body;var g;if(navigator.userAgent.indexOf(gb)>-1&&window.JSON){var h=e.createDocumentFragment();h.appendChild(e.createTextNode(hb));for(var i=V;i<c.length;i++){var j=window.JSON.stringify(c[i]);h.appendChild(e.createTextNode(j.substring(W,j.length-W)))}h.appendChild(e.createTextNode(ib));g=e.createElement(jb);g.language=kb;g.appendChild(h);f.appendChild(g);d(f,g)}else{for(var i=V;i<c.length;i++){g=e.createElement(jb);g.language=kb;g.text=c[i];f.appendChild(g);d(f,g)}}}
djvu_html5.onScriptDownloaded=function(a){l(function(){m(a)})};r(lb,mb);var n=p.createElement(jb);n.src=k;if(djvu_html5.__errFn){n.onerror=function(){djvu_html5.__errFn(S,new Error(nb+code))}}p.getElementsByTagName(ob)[V].appendChild(n)}
djvu_html5.__startLoadingFragment=function(a){return D(a)};djvu_html5.__installRunAsyncCode=function(a){var b=v();var c=b.body;var d=b.createElement(jb);d.language=kb;d.text=a;c.appendChild(d);c.removeChild(d)};function B(){var c={};var d;var e;var f=p.getElementsByTagName(pb);for(var g=V,h=f.length;g<h;++g){var i=f[g],j=i.getAttribute(qb),k;if(j){j=j.replace(rb,bb);if(j.indexOf(sb)>=V){continue}if(j==tb){k=i.getAttribute(ub);if(k){var l,m=k.indexOf(vb);if(m>=V){j=k.substring(V,m);l=k.substring(m+W)}else{j=k;l=bb}c[j]=l}}else if(j==wb){k=i.getAttribute(ub);if(k){try{d=eval(k)}catch(a){alert(xb+k+yb)}}}else if(j==zb){k=i.getAttribute(ub);if(k){try{e=eval(k)}catch(a){alert(xb+k+Ab)}}}}}__gwt_getMetaProperty=function(a){var b=c[a];return b==null?null:b};s=d;djvu_html5.__errFn=e}
function C(){function e(a){var b=a.lastIndexOf(Bb);if(b==-1){b=a.length}var c=a.indexOf(Cb);if(c==-1){c=a.length}var d=a.lastIndexOf(Db,Math.min(c,b));return d>=V?a.substring(V,d+W):bb}
function f(a){if(a.match(/^\w+:\/\//)){}else{var b=p.createElement(Eb);b.src=a+Fb;a=e(b.src)}return a}
function g(){var a=__gwt_getMetaProperty(Gb);if(a!=null){return a}return bb}
function h(){var a=p.getElementsByTagName(jb);for(var b=V;b<a.length;++b){if(a[b].src.indexOf(Hb)!=-1){return e(a[b].src)}}return bb}
function i(){var a=p.getElementsByTagName(Ib);if(a.length>V){return a[a.length-W].href}return bb}
function j(){var a=p.location;return a.href==a.protocol+Jb+a.host+a.pathname+a.search+a.hash}
var k=g();if(k==bb){k=h()}if(k==bb){k=i()}if(k==bb&&j()){k=e(p.location.href)}k=f(k);return k}
function D(a){if(a.match(/^\//)){return a}if(a.match(/^[a-zA-Z]+:\/\//)){return a}return djvu_html5.__moduleBase+a}
function F(){var f=[];var g=V;function h(a,b){var c=f;for(var d=V,e=a.length-W;d<e;++d){c=c[a[d]]||(c[a[d]]=[])}c[a[e]]=b}
var i=[];var j=[];function k(a){var b=j[a](),c=i[a];if(b in c){return b}var d=[];for(var e in c){d[c[e]]=e}if(s){s(a,d,b)}throw null}
j[Kb]=function(){var a=navigator.userAgent.toLowerCase();var b=p.documentMode;if(function(){return a.indexOf(Lb)!=-1}())return Mb;if(function(){return a.indexOf(Nb)!=-1&&(b>=fb&&b<Ob)}())return Pb;if(function(){return a.indexOf(Nb)!=-1&&(b>=Qb&&b<Ob)}())return Rb;if(function(){return a.indexOf(Nb)!=-1&&(b>=Sb&&b<Ob)}())return Tb;if(function(){return a.indexOf(Ub)!=-1||b>=Ob}())return Vb;return bb};i[Kb]={'gecko1_8':V,'ie10':W,'ie8':Wb,'ie9':Xb,'safari':Yb};__gwt_isKnownPropertyValue=function(a,b){return b in i[a]};djvu_html5.__getPropMap=function(){var a={};for(var b in i){if(i.hasOwnProperty(b)){a[b]=k(b)}}return a};djvu_html5.__computePropValue=k;o.__gwt_activeModules[S].bindings=djvu_html5.__getPropMap;r(O,Zb);if(q()){return D($b)}var l;try{h([Tb],_b);h([Pb],ac);h([Mb],bc);h([Vb],cc);h([Rb],dc);l=f[k(Kb)];var m=l.indexOf(ec);if(m!=-1){g=parseInt(l.substring(m+W),fb);l=l.substring(V,m)}}catch(a){}djvu_html5.__softPermutationId=g;return D(l+fc)}
function G(){if(!o.__gwt_stylesLoaded){o.__gwt_stylesLoaded={}}r(gc,P);r(gc,hc)}
B();djvu_html5.__moduleBase=C();t[S].moduleBase=djvu_html5.__moduleBase;var H=F();if(o){var I=!!(o.location.protocol==ic||o.location.protocol==jc);o.__gwt_activeModules[S].canRedirect=I;function J(){var b=kc;try{o.sessionStorage.setItem(b,b);o.sessionStorage.removeItem(b);return true}catch(a){return false}}
if(I&&J()){var K=lc;var L=o.sessionStorage[K];if(!/^http:\/\/(localhost|127\.0\.0\.1)(:\d+)?\/.*$/.test(L)){if(L&&(window.console&&console.log)){console.log(mc+L)}L=bb}if(L&&!o[K]){o[K]=true;o[K+nc]=C();var M=p.createElement(jb);M.src=L;var N=p.getElementsByTagName(ob)[V];N.insertBefore(M,N.firstElementChild||N.children[V]);return false}}}G();r(O,hc);A(H);return true}
djvu_html5.succeeded=djvu_html5();

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 B

View File

@ -0,0 +1,2 @@
self.$wnd=self;self.$doc=self;function djvu_worker(){var c='81EB501BD7AB47786C30D3175CE1EA2B',d='.cache.js',e='djvu_worker',f='';var b;try{b=c}catch(a){return}importScripts(b+d);gwtOnLoad(undefined,e,f)}
djvu_worker();

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@ -87,7 +87,7 @@ $(".container-fluid").bind('drop', function (e) {
var files = e.originalEvent.dataTransfer.files; var files = e.originalEvent.dataTransfer.files;
var test = $("#btn-upload")[0].accept; var test = $("#btn-upload")[0].accept;
$(this).css('background', ''); $(this).css('background', '');
const dt = new DataTransfer() const dt = new DataTransfer();
jQuery.each(files, function (index, item) { jQuery.each(files, function (index, item) {
if (test.indexOf(item.name.substr(item.name.lastIndexOf('.'))) !== -1) { if (test.indexOf(item.name.substr(item.name.lastIndexOf('.'))) !== -1) {
dt.items.add(item); dt.items.add(item);

View File

@ -41,7 +41,7 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
else: else:
exc_command = [x for x in command] exc_command = [x for x in command]
return subprocess.Popen(exc_command, shell=False, stdout=sout, stderr=serr, universal_newlines=newlines, env=env) return subprocess.Popen(exc_command, shell=False, stdout=sout, stderr=serr, universal_newlines=newlines, env=env) # nosec
def process_wait(command, serr=subprocess.PIPE): def process_wait(command, serr=subprocess.PIPE):

View File

@ -55,6 +55,7 @@
{% endif %} {% endif %}
{% if reader_list and g.user.role_viewer() %} {% if reader_list and g.user.role_viewer() %}
<div class="btn-group" role="group"> <div class="btn-group" role="group">
{% if reader_list|length > 1 %}
<button id="read-in-browser" type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> <button id="read-in-browser" type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<span class="glyphicon glyphicon-eye-open"></span> {{_('Read in Browser')}} <span class="glyphicon glyphicon-eye-open"></span> {{_('Read in Browser')}}
<span class="caret"></span> <span class="caret"></span>
@ -64,15 +65,19 @@
<li><a target="_blank" href="{{ url_for('web.read_book', book_id=entry.id, book_format=format) }}">{{format}}</a></li> <li><a target="_blank" href="{{ url_for('web.read_book', book_id=entry.id, book_format=format) }}">{{format}}</a></li>
{%endfor%} {%endfor%}
</ul> </ul>
{% else %}
<a target="_blank" href="{{url_for('web.read_book', book_id=entry.id, book_format=reader_list[0])}}" id="readbtn" class="btn btn-primary" role="button"><span class="glyphicon glyphicon-eye-open"></span> {{_('Read in Browser')}} - {{reader_list[0]}}</a>
{% endif %}
</div> </div>
{% endif %} {% endif %}
{% if audioentries|length > 0 and g.user.role_viewer() %} {% if audioentries|length > 0 and g.user.role_viewer() %}
<div class="btn-group" role="group"> <div class="btn-group" role="group">
{% if audioentries|length > 1 %}
<button id="listen-in-browser" type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"> <button id="listen-in-browser" type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<span class="glyphicon glyphicon-music"></span> {{_('Listen in Browser')}} <span class="glyphicon glyphicon-music"></span> {{_('Listen in Browser')}}
<span class="caret"></span> <span class="caret"></span>
</button> </button>
<ul class="dropdown-menu" aria-labelledby="read-in-browser"> <ul class="dropdown-menu" aria-labelledby="listen-in-browser">
{% for format in reader_list %} {% for format in reader_list %}
<li><a target="_blank" href="{{ url_for('web.read_book', book_id=entry.id, book_format=format) }}">{{format}}</a></li> <li><a target="_blank" href="{{ url_for('web.read_book', book_id=entry.id, book_format=format) }}">{{format}}</a></li>
{%endfor%} {%endfor%}
@ -85,6 +90,9 @@
{% endif %} {% endif %}
{% endfor %} {% endfor %}
</ul> </ul>
{% else %}
<a target="_blank" href="{{url_for('web.read_book', book_id=entry.id, book_format=audioentries[0])}}" id="listenbtn" class="btn btn-primary" role="button"><span class="glyphicon glyphicon-music"></span> {{_('Listen in Browser')}} - {{audioentries[0]}}</a>
{% endif %}
</div> </div>
{% endif %} {% endif %}
</div> </div>

View File

@ -0,0 +1,35 @@
<!doctype html>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link type="text/css" rel="stylesheet" href="{{ url_for('static', filename='js/libs/djvu_html5/Djvu_html5.css') }}">
<title>Djvu HTML5 browser demo</title>
<script type="text/javascript" language="javascript"
src="{{ url_for('static', filename='js/libs/djvu_html5/djvu_html5/djvu_html5.nocache.js') }}"></script>
</head>
<body>
<!-- RECOMMENDED if your web app will not function without JavaScript enabled -->
<noscript>
<div
style="width: 22em; position: absolute; left: 50%; margin-left: -11em; color: red; background-color: white; border: 1px solid red; padding: 4px; font-family: sans-serif">
Your web browser must have JavaScript enabled in order for this
application to display correctly.</div>
</noscript>
<div id="djvuContainer" file="{{ url_for('web.serve_book', book_id=djvufile,book_format='djvu') }}"></div>
<script type="text/javascript">
var DJVU_CONTEXT = {
background: "#666",
uiHideDelay: 1500,
};
</script>
</body>
</html>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -284,6 +284,64 @@ class Updater(threading.Thread):
def _stable_version_info(cls): def _stable_version_info(cls):
return constants.STABLE_VERSION # Current version return constants.STABLE_VERSION # Current version
@staticmethod
def _populate_parent_commits(update_data, status, locale, tz, parents):
try:
parent_commit = update_data['parents'][0]
# limit the maximum search depth
remaining_parents_cnt = 10
except (IndexError, KeyError):
remaining_parents_cnt = None
if remaining_parents_cnt is not None:
while True:
if remaining_parents_cnt == 0:
break
# check if we are more than one update behind if so, go up the tree
if parent_commit['sha'] != status['current_commit_hash']:
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
r.raise_for_status()
parent_data = r.json()
parent_commit_date = datetime.datetime.strptime(
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
parent_commit_date = format_datetime(
parent_commit_date, format='short', locale=locale)
parents.append([parent_commit_date,
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
parent_commit = parent_data['parents'][0]
remaining_parents_cnt -= 1
except Exception:
# it isn't crucial if we can't get information about the parent
break
else:
# parent is our current version
break
return parents
@staticmethod
def _load_nightly_data(repository_url, commit, status):
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
headers=headers,
timeout=10)
r.raise_for_status()
update_data = r.json()
except requests.exceptions.HTTPError as e:
status['message'] = _(u'HTTP Error') + ' ' + str(e)
except requests.exceptions.ConnectionError:
status['message'] = _(u'Connection error')
except requests.exceptions.Timeout:
status['message'] = _(u'Timeout while establishing connection')
except (requests.exceptions.RequestException, ValueError):
status['message'] = _(u'General error')
return status, update_data
def _nightly_available_updates(self, request_method, locale): def _nightly_available_updates(self, request_method, locale):
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone) tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
if request_method == "GET": if request_method == "GET":
@ -309,22 +367,7 @@ class Updater(threading.Thread):
# a new update is available # a new update is available
status['update'] = True status['update'] = True
status, update_data = self._load_nightly_data(repository_url, commit, status)
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
headers=headers,
timeout=10)
r.raise_for_status()
update_data = r.json()
except requests.exceptions.HTTPError as e:
status['message'] = _(u'HTTP Error') + ' ' + str(e)
except requests.exceptions.ConnectionError:
status['message'] = _(u'Connection error')
except requests.exceptions.Timeout:
status['message'] = _(u'Timeout while establishing connection')
except (requests.exceptions.RequestException, ValueError):
status['message'] = _(u'General error')
if status['message'] != '': if status['message'] != '':
return json.dumps(status) return json.dumps(status)
@ -346,41 +389,7 @@ class Updater(threading.Thread):
) )
# it only makes sense to analyze the parents if we know the current commit hash # it only makes sense to analyze the parents if we know the current commit hash
if status['current_commit_hash'] != '': if status['current_commit_hash'] != '':
try: parents = self._populate_parent_commits(update_data, status, locale, tz, parents)
parent_commit = update_data['parents'][0]
# limit the maximum search depth
remaining_parents_cnt = 10
except (IndexError, KeyError):
remaining_parents_cnt = None
if remaining_parents_cnt is not None:
while True:
if remaining_parents_cnt == 0:
break
# check if we are more than one update behind if so, go up the tree
if parent_commit['sha'] != status['current_commit_hash']:
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
r.raise_for_status()
parent_data = r.json()
parent_commit_date = datetime.datetime.strptime(
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
parent_commit_date = format_datetime(
parent_commit_date, format='short', locale=locale)
parents.append([parent_commit_date,
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
parent_commit = parent_data['parents'][0]
remaining_parents_cnt -= 1
except Exception:
# it isn't crucial if we can't get information about the parent
break
else:
# parent is our current version
break
status['history'] = parents[::-1] status['history'] = parents[::-1]
except (IndexError, KeyError): except (IndexError, KeyError):
status['success'] = False status['success'] = False

View File

@ -191,7 +191,7 @@ def upload(uploadfile, rarExcecutable):
filename = uploadfile.filename filename = uploadfile.filename
filename_root, file_extension = os.path.splitext(filename) filename_root, file_extension = os.path.splitext(filename)
md5 = hashlib.md5(filename.encode('utf-8')).hexdigest() md5 = hashlib.md5(filename.encode('utf-8')).hexdigest() # nosec
tmp_file_path = os.path.join(tmp_dir, md5) tmp_file_path = os.path.join(tmp_dir, md5)
log.debug("Temporary file: %s", tmp_file_path) log.debug("Temporary file: %s", tmp_file_path)
uploadfile.save(tmp_file_path) uploadfile.save(tmp_file_path)

View File

@ -1632,6 +1632,9 @@ def read_book(book_id, book_format):
elif book_format.lower() == "txt": elif book_format.lower() == "txt":
log.debug(u"Start txt reader for %d", book_id) log.debug(u"Start txt reader for %d", book_id)
return render_title_template('readtxt.html', txtfile=book_id, title=_(u"Read a Book")) return render_title_template('readtxt.html', txtfile=book_id, title=_(u"Read a Book"))
elif book_format.lower() == "djvu":
log.debug(u"Start djvu reader for %d", book_id)
return render_title_template('readdjvu.html', djvufile=book_id, title=_(u"Read a Book"))
else: else:
for fileExt in constants.EXTENSIONS_AUDIO: for fileExt in constants.EXTENSIONS_AUDIO:
if book_format.lower() == fileExt: if book_format.lower() == fileExt:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff