Refactored code
Testrun
This commit is contained in:
parent
994bc8b0e4
commit
9a963bbe79
380
cps/editbooks.py
380
cps/editbooks.py
|
@ -211,6 +211,74 @@ def delete_book_from_details(book_id):
|
||||||
def delete_book_ajax(book_id, book_format):
|
def delete_book_ajax(book_id, book_format):
|
||||||
return delete_book(book_id,book_format, False)
|
return delete_book(book_id,book_format, False)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_whole_book(book_id, book):
|
||||||
|
# delete book from Shelfs, Downloads, Read list
|
||||||
|
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
||||||
|
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
||||||
|
ub.delete_download(book_id)
|
||||||
|
ub.session_commit()
|
||||||
|
|
||||||
|
# check if only this book links to:
|
||||||
|
# author, language, series, tags, custom columns
|
||||||
|
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
|
||||||
|
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
|
||||||
|
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
|
||||||
|
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
|
||||||
|
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
|
||||||
|
|
||||||
|
cc = calibre_db.session.query(db.Custom_Columns). \
|
||||||
|
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||||
|
for c in cc:
|
||||||
|
cc_string = "custom_column_" + str(c.id)
|
||||||
|
if not c.is_multiple:
|
||||||
|
if len(getattr(book, cc_string)) > 0:
|
||||||
|
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
|
||||||
|
del_cc = getattr(book, cc_string)[0]
|
||||||
|
getattr(book, cc_string).remove(del_cc)
|
||||||
|
log.debug('remove ' + str(c.id))
|
||||||
|
calibre_db.session.delete(del_cc)
|
||||||
|
calibre_db.session.commit()
|
||||||
|
elif c.datatype == 'rating':
|
||||||
|
del_cc = getattr(book, cc_string)[0]
|
||||||
|
getattr(book, cc_string).remove(del_cc)
|
||||||
|
if len(del_cc.books) == 0:
|
||||||
|
log.debug('remove ' + str(c.id))
|
||||||
|
calibre_db.session.delete(del_cc)
|
||||||
|
calibre_db.session.commit()
|
||||||
|
else:
|
||||||
|
del_cc = getattr(book, cc_string)[0]
|
||||||
|
getattr(book, cc_string).remove(del_cc)
|
||||||
|
log.debug('remove ' + str(c.id))
|
||||||
|
calibre_db.session.delete(del_cc)
|
||||||
|
calibre_db.session.commit()
|
||||||
|
else:
|
||||||
|
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
|
||||||
|
calibre_db.session, 'custom')
|
||||||
|
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
|
||||||
|
if book_format:
|
||||||
|
if jsonResponse:
|
||||||
|
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
|
||||||
|
"type": "success",
|
||||||
|
"format": book_format,
|
||||||
|
"message": _('Book Format Successfully Deleted')}])
|
||||||
|
else:
|
||||||
|
flash(_('Book Format Successfully Deleted'), category="success")
|
||||||
|
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
||||||
|
else:
|
||||||
|
if jsonResponse:
|
||||||
|
return json.dumps([warning, {"location": url_for('web.index'),
|
||||||
|
"type": "success",
|
||||||
|
"format": book_format,
|
||||||
|
"message": _('Book Successfully Deleted')}])
|
||||||
|
else:
|
||||||
|
flash(_('Book Successfully Deleted'), category="success")
|
||||||
|
return redirect(url_for('web.index'))
|
||||||
|
|
||||||
|
|
||||||
def delete_book(book_id, book_format, jsonResponse):
|
def delete_book(book_id, book_format, jsonResponse):
|
||||||
warning = {}
|
warning = {}
|
||||||
if current_user.role_delete_books():
|
if current_user.role_delete_books():
|
||||||
|
@ -236,49 +304,7 @@ def delete_book(book_id, book_format, jsonResponse):
|
||||||
else:
|
else:
|
||||||
flash(error, category="warning")
|
flash(error, category="warning")
|
||||||
if not book_format:
|
if not book_format:
|
||||||
# delete book from Shelfs, Downloads, Read list
|
delete_whole_book(book_id, book)
|
||||||
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
|
|
||||||
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
|
|
||||||
ub.delete_download(book_id)
|
|
||||||
ub.session_commit()
|
|
||||||
|
|
||||||
# check if only this book links to:
|
|
||||||
# author, language, series, tags, custom columns
|
|
||||||
modify_database_object([u''], book.authors, db.Authors, calibre_db.session, 'author')
|
|
||||||
modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')
|
|
||||||
modify_database_object([u''], book.series, db.Series, calibre_db.session, 'series')
|
|
||||||
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
|
|
||||||
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
|
|
||||||
|
|
||||||
cc = calibre_db.session.query(db.Custom_Columns).\
|
|
||||||
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
|
||||||
for c in cc:
|
|
||||||
cc_string = "custom_column_" + str(c.id)
|
|
||||||
if not c.is_multiple:
|
|
||||||
if len(getattr(book, cc_string)) > 0:
|
|
||||||
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
|
|
||||||
del_cc = getattr(book, cc_string)[0]
|
|
||||||
getattr(book, cc_string).remove(del_cc)
|
|
||||||
log.debug('remove ' + str(c.id))
|
|
||||||
calibre_db.session.delete(del_cc)
|
|
||||||
calibre_db.session.commit()
|
|
||||||
elif c.datatype == 'rating':
|
|
||||||
del_cc = getattr(book, cc_string)[0]
|
|
||||||
getattr(book, cc_string).remove(del_cc)
|
|
||||||
if len(del_cc.books) == 0:
|
|
||||||
log.debug('remove ' + str(c.id))
|
|
||||||
calibre_db.session.delete(del_cc)
|
|
||||||
calibre_db.session.commit()
|
|
||||||
else:
|
|
||||||
del_cc = getattr(book, cc_string)[0]
|
|
||||||
getattr(book, cc_string).remove(del_cc)
|
|
||||||
log.debug('remove ' + str(c.id))
|
|
||||||
calibre_db.session.delete(del_cc)
|
|
||||||
calibre_db.session.commit()
|
|
||||||
else:
|
|
||||||
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
|
|
||||||
calibre_db.session, 'custom')
|
|
||||||
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
|
|
||||||
else:
|
else:
|
||||||
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
|
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
|
||||||
filter(db.Data.format == book_format).delete()
|
filter(db.Data.format == book_format).delete()
|
||||||
|
@ -289,24 +315,7 @@ def delete_book(book_id, book_format, jsonResponse):
|
||||||
else:
|
else:
|
||||||
# book not found
|
# book not found
|
||||||
log.error('Book with id "%s" could not be deleted: not found', book_id)
|
log.error('Book with id "%s" could not be deleted: not found', book_id)
|
||||||
if book_format:
|
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
|
||||||
if jsonResponse:
|
|
||||||
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
|
|
||||||
"type": "success",
|
|
||||||
"format": book_format,
|
|
||||||
"message": _('Book Format Successfully Deleted')}])
|
|
||||||
else:
|
|
||||||
flash(_('Book Format Successfully Deleted'), category="success")
|
|
||||||
return redirect(url_for('editbook.edit_book', book_id=book_id))
|
|
||||||
else:
|
|
||||||
if jsonResponse:
|
|
||||||
return json.dumps([warning, {"location": url_for('web.index'),
|
|
||||||
"type": "success",
|
|
||||||
"format": book_format,
|
|
||||||
"message": _('Book Successfully Deleted')}])
|
|
||||||
else:
|
|
||||||
flash(_('Book Successfully Deleted'), category="success")
|
|
||||||
return redirect(url_for('web.index'))
|
|
||||||
|
|
||||||
|
|
||||||
def render_edit_book(book_id):
|
def render_edit_book(book_id):
|
||||||
|
@ -447,6 +456,59 @@ def edit_book_publisher(to_save, book):
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
def edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string):
|
||||||
|
changed = False
|
||||||
|
if to_save[cc_string] == 'None':
|
||||||
|
to_save[cc_string] = None
|
||||||
|
elif c.datatype == 'bool':
|
||||||
|
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
|
||||||
|
|
||||||
|
if to_save[cc_string] != cc_db_value:
|
||||||
|
if cc_db_value is not None:
|
||||||
|
if to_save[cc_string] is not None:
|
||||||
|
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
del_cc = getattr(book, cc_string)[0]
|
||||||
|
getattr(book, cc_string).remove(del_cc)
|
||||||
|
calibre_db.session.delete(del_cc)
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
cc_class = db.cc_classes[c.id]
|
||||||
|
new_cc = cc_class(value=to_save[cc_string], book=book_id)
|
||||||
|
calibre_db.session.add(new_cc)
|
||||||
|
changed = True
|
||||||
|
return changed, to_save
|
||||||
|
|
||||||
|
|
||||||
|
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
|
||||||
|
changed = False
|
||||||
|
if c.datatype == 'rating':
|
||||||
|
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
|
||||||
|
if to_save[cc_string].strip() != cc_db_value:
|
||||||
|
if cc_db_value is not None:
|
||||||
|
# remove old cc_val
|
||||||
|
del_cc = getattr(book, cc_string)[0]
|
||||||
|
getattr(book, cc_string).remove(del_cc)
|
||||||
|
if len(del_cc.books) == 0:
|
||||||
|
calibre_db.session.delete(del_cc)
|
||||||
|
changed = True
|
||||||
|
cc_class = db.cc_classes[c.id]
|
||||||
|
new_cc = calibre_db.session.query(cc_class).filter(
|
||||||
|
cc_class.value == to_save[cc_string].strip()).first()
|
||||||
|
# if no cc val is found add it
|
||||||
|
if new_cc is None:
|
||||||
|
new_cc = cc_class(value=to_save[cc_string].strip())
|
||||||
|
calibre_db.session.add(new_cc)
|
||||||
|
changed = True
|
||||||
|
calibre_db.session.flush()
|
||||||
|
new_cc = calibre_db.session.query(cc_class).filter(
|
||||||
|
cc_class.value == to_save[cc_string].strip()).first()
|
||||||
|
# add cc value to book
|
||||||
|
getattr(book, cc_string).append(new_cc)
|
||||||
|
return changed, to_save
|
||||||
|
|
||||||
|
|
||||||
def edit_cc_data(book_id, book, to_save):
|
def edit_cc_data(book_id, book, to_save):
|
||||||
changed = False
|
changed = False
|
||||||
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
|
||||||
|
@ -459,51 +521,9 @@ def edit_cc_data(book_id, book, to_save):
|
||||||
cc_db_value = None
|
cc_db_value = None
|
||||||
if to_save[cc_string].strip():
|
if to_save[cc_string].strip():
|
||||||
if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float':
|
if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float':
|
||||||
if to_save[cc_string] == 'None':
|
changed, to_save = edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string)
|
||||||
to_save[cc_string] = None
|
|
||||||
elif c.datatype == 'bool':
|
|
||||||
to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0
|
|
||||||
|
|
||||||
if to_save[cc_string] != cc_db_value:
|
|
||||||
if cc_db_value is not None:
|
|
||||||
if to_save[cc_string] is not None:
|
|
||||||
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
|
|
||||||
changed = True
|
|
||||||
else:
|
|
||||||
del_cc = getattr(book, cc_string)[0]
|
|
||||||
getattr(book, cc_string).remove(del_cc)
|
|
||||||
calibre_db.session.delete(del_cc)
|
|
||||||
changed = True
|
|
||||||
else:
|
|
||||||
cc_class = db.cc_classes[c.id]
|
|
||||||
new_cc = cc_class(value=to_save[cc_string], book=book_id)
|
|
||||||
calibre_db.session.add(new_cc)
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if c.datatype == 'rating':
|
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
|
||||||
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
|
|
||||||
if to_save[cc_string].strip() != cc_db_value:
|
|
||||||
if cc_db_value is not None:
|
|
||||||
# remove old cc_val
|
|
||||||
del_cc = getattr(book, cc_string)[0]
|
|
||||||
getattr(book, cc_string).remove(del_cc)
|
|
||||||
if len(del_cc.books) == 0:
|
|
||||||
calibre_db.session.delete(del_cc)
|
|
||||||
changed = True
|
|
||||||
cc_class = db.cc_classes[c.id]
|
|
||||||
new_cc = calibre_db.session.query(cc_class).filter(
|
|
||||||
cc_class.value == to_save[cc_string].strip()).first()
|
|
||||||
# if no cc val is found add it
|
|
||||||
if new_cc is None:
|
|
||||||
new_cc = cc_class(value=to_save[cc_string].strip())
|
|
||||||
calibre_db.session.add(new_cc)
|
|
||||||
changed = True
|
|
||||||
calibre_db.session.flush()
|
|
||||||
new_cc = calibre_db.session.query(cc_class).filter(
|
|
||||||
cc_class.value == to_save[cc_string].strip()).first()
|
|
||||||
# add cc value to book
|
|
||||||
getattr(book, cc_string).append(new_cc)
|
|
||||||
else:
|
else:
|
||||||
if cc_db_value is not None:
|
if cc_db_value is not None:
|
||||||
# remove old cc_val
|
# remove old cc_val
|
||||||
|
@ -766,6 +786,7 @@ def merge_metadata(to_save, meta):
|
||||||
to_save["description"] = to_save["description"] or Markup(
|
to_save["description"] = to_save["description"] or Markup(
|
||||||
getattr(meta, 'description', '')).unescape()
|
getattr(meta, 'description', '')).unescape()
|
||||||
|
|
||||||
|
|
||||||
def identifier_list(to_save, book):
|
def identifier_list(to_save, book):
|
||||||
"""Generate a list of Identifiers from form information"""
|
"""Generate a list of Identifiers from form information"""
|
||||||
id_type_prefix = 'identifier-type-'
|
id_type_prefix = 'identifier-type-'
|
||||||
|
@ -780,6 +801,85 @@ def identifier_list(to_save, book):
|
||||||
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
|
result.append(db.Identifiers(to_save[val_key], type_value, book.id))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_authors_on_upload(title, authr):
|
||||||
|
if title != _(u'Unknown') and authr != _(u'Unknown'):
|
||||||
|
entry = calibre_db.check_exists_book(authr, title)
|
||||||
|
if entry:
|
||||||
|
log.info("Uploaded book probably exists in library")
|
||||||
|
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
|
||||||
|
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
||||||
|
|
||||||
|
# handle authors
|
||||||
|
input_authors = authr.split('&')
|
||||||
|
# handle_authors(input_authors)
|
||||||
|
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
|
||||||
|
# Remove duplicates in authors list
|
||||||
|
input_authors = helper.uniq(input_authors)
|
||||||
|
|
||||||
|
# we have all author names now
|
||||||
|
if input_authors == ['']:
|
||||||
|
input_authors = [_(u'Unknown')] # prevent empty Author
|
||||||
|
|
||||||
|
sort_authors_list = list()
|
||||||
|
db_author = None
|
||||||
|
for inp in input_authors:
|
||||||
|
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
||||||
|
if not stored_author:
|
||||||
|
if not db_author:
|
||||||
|
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
|
||||||
|
calibre_db.session.add(db_author)
|
||||||
|
calibre_db.session.commit()
|
||||||
|
sort_author = helper.get_sorted_author(inp)
|
||||||
|
else:
|
||||||
|
if not db_author:
|
||||||
|
db_author = stored_author
|
||||||
|
sort_author = stored_author.sort
|
||||||
|
sort_authors_list.append(sort_author)
|
||||||
|
sort_authors = ' & '.join(sort_authors_list)
|
||||||
|
return sort_authors, input_authors, db_author
|
||||||
|
|
||||||
|
|
||||||
|
def create_book_on_upload(modif_date, meta):
|
||||||
|
title = meta.title
|
||||||
|
authr = meta.author
|
||||||
|
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
|
||||||
|
|
||||||
|
title_dir = helper.get_valid_filename(title)
|
||||||
|
author_dir = helper.get_valid_filename(db_author.name)
|
||||||
|
|
||||||
|
# combine path and normalize path from windows systems
|
||||||
|
path = os.path.join(author_dir, title_dir).replace('\\', '/')
|
||||||
|
|
||||||
|
# Calibre adds books with utc as timezone
|
||||||
|
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
|
||||||
|
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
|
||||||
|
|
||||||
|
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
|
||||||
|
'author')
|
||||||
|
|
||||||
|
# Add series_index to book
|
||||||
|
modif_date |= edit_book_series_index(meta.series_id, db_book)
|
||||||
|
|
||||||
|
# add languages
|
||||||
|
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
|
||||||
|
|
||||||
|
# handle tags
|
||||||
|
modif_date |= edit_book_tags(meta.tags, db_book)
|
||||||
|
|
||||||
|
# handle series
|
||||||
|
modif_date |= edit_book_series(meta.series, db_book)
|
||||||
|
|
||||||
|
# Add file to book
|
||||||
|
file_size = os.path.getsize(meta.file_path)
|
||||||
|
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
|
||||||
|
db_book.data.append(db_data)
|
||||||
|
calibre_db.session.add(db_book)
|
||||||
|
|
||||||
|
# flush content, get db_book.id available
|
||||||
|
calibre_db.session.flush()
|
||||||
|
return db_book, input_authors, title_dir
|
||||||
|
|
||||||
@editbook.route("/upload", methods=["GET", "POST"])
|
@editbook.route("/upload", methods=["GET", "POST"])
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
@upload_required
|
@upload_required
|
||||||
|
@ -814,76 +914,8 @@ def upload():
|
||||||
flash(_(u"File %(filename)s could not saved to temp dir",
|
flash(_(u"File %(filename)s could not saved to temp dir",
|
||||||
filename= requested_file.filename), category="error")
|
filename= requested_file.filename), category="error")
|
||||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||||
title = meta.title
|
|
||||||
authr = meta.author
|
|
||||||
|
|
||||||
if title != _(u'Unknown') and authr != _(u'Unknown'):
|
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
|
||||||
entry = calibre_db.check_exists_book(authr, title)
|
|
||||||
if entry:
|
|
||||||
log.info("Uploaded book probably exists in library")
|
|
||||||
flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ")
|
|
||||||
+ Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning")
|
|
||||||
|
|
||||||
# handle authors
|
|
||||||
input_authors = authr.split('&')
|
|
||||||
# handle_authors(input_authors)
|
|
||||||
input_authors = list(map(lambda it: it.strip().replace(',', '|'), input_authors))
|
|
||||||
# Remove duplicates in authors list
|
|
||||||
input_authors = helper.uniq(input_authors)
|
|
||||||
|
|
||||||
# we have all author names now
|
|
||||||
if input_authors == ['']:
|
|
||||||
input_authors = [_(u'Unknown')] # prevent empty Author
|
|
||||||
|
|
||||||
sort_authors_list=list()
|
|
||||||
db_author = None
|
|
||||||
for inp in input_authors:
|
|
||||||
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
|
||||||
if not stored_author:
|
|
||||||
if not db_author:
|
|
||||||
db_author = db.Authors(inp, helper.get_sorted_author(inp), "")
|
|
||||||
calibre_db.session.add(db_author)
|
|
||||||
calibre_db.session.commit()
|
|
||||||
sort_author = helper.get_sorted_author(inp)
|
|
||||||
else:
|
|
||||||
if not db_author:
|
|
||||||
db_author = stored_author
|
|
||||||
sort_author = stored_author.sort
|
|
||||||
sort_authors_list.append(sort_author)
|
|
||||||
sort_authors = ' & '.join(sort_authors_list)
|
|
||||||
|
|
||||||
title_dir = helper.get_valid_filename(title)
|
|
||||||
author_dir = helper.get_valid_filename(db_author.name)
|
|
||||||
|
|
||||||
# combine path and normalize path from windows systems
|
|
||||||
path = os.path.join(author_dir, title_dir).replace('\\', '/')
|
|
||||||
# Calibre adds books with utc as timezone
|
|
||||||
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
|
|
||||||
'1', datetime.utcnow(), path, meta.cover, db_author, [], "")
|
|
||||||
|
|
||||||
modif_date |= modify_database_object(input_authors, db_book.authors, db.Authors, calibre_db.session,
|
|
||||||
'author')
|
|
||||||
|
|
||||||
# Add series_index to book
|
|
||||||
modif_date |= edit_book_series_index(meta.series_id, db_book)
|
|
||||||
|
|
||||||
# add languages
|
|
||||||
modif_date |= edit_book_languages(meta.languages, db_book, upload=True)
|
|
||||||
|
|
||||||
# handle tags
|
|
||||||
modif_date |= edit_book_tags(meta.tags, db_book)
|
|
||||||
|
|
||||||
# handle series
|
|
||||||
modif_date |= edit_book_series(meta.series, db_book)
|
|
||||||
|
|
||||||
# Add file to book
|
|
||||||
file_size = os.path.getsize(meta.file_path)
|
|
||||||
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
|
|
||||||
db_book.data.append(db_data)
|
|
||||||
calibre_db.session.add(db_book)
|
|
||||||
|
|
||||||
# flush content, get db_book.id available
|
|
||||||
calibre_db.session.flush()
|
|
||||||
|
|
||||||
# Comments needs book id therfore only possible after flush
|
# Comments needs book id therfore only possible after flush
|
||||||
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
||||||
|
|
|
@ -730,7 +730,7 @@ def format_runtime(runtime):
|
||||||
# helper function to apply localize status information in tasklist entries
|
# helper function to apply localize status information in tasklist entries
|
||||||
def render_task_status(tasklist):
|
def render_task_status(tasklist):
|
||||||
renderedtasklist = list()
|
renderedtasklist = list()
|
||||||
for __, user, added, task in tasklist:
|
for __, user, __, task in tasklist:
|
||||||
if user == current_user.nickname or current_user.role_admin():
|
if user == current_user.nickname or current_user.role_admin():
|
||||||
ret = {}
|
ret = {}
|
||||||
if task.start_time:
|
if task.start_time:
|
||||||
|
|
|
@ -918,7 +918,7 @@ def HandleAuthRequest():
|
||||||
if config.config_kobo_proxy:
|
if config.config_kobo_proxy:
|
||||||
try:
|
try:
|
||||||
return redirect_or_proxy_request()
|
return redirect_or_proxy_request()
|
||||||
except:
|
except Exception:
|
||||||
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
|
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
|
||||||
return make_calibre_web_auth_response()
|
return make_calibre_web_auth_response()
|
||||||
|
|
||||||
|
|
219
cps/oauth_bb.py
219
cps/oauth_bb.py
|
@ -96,7 +96,113 @@ def logout_oauth_user():
|
||||||
session.pop(str(oauth_key) + '_oauth_user_id')
|
session.pop(str(oauth_key) + '_oauth_user_id')
|
||||||
|
|
||||||
|
|
||||||
if ub.oauth_support:
|
def oauth_update_token(provider_id, token, provider_user_id):
|
||||||
|
session[provider_id + "_oauth_user_id"] = provider_user_id
|
||||||
|
session[provider_id + "_oauth_token"] = token
|
||||||
|
|
||||||
|
# Find this OAuth token in the database, or create it
|
||||||
|
query = ub.session.query(ub.OAuth).filter_by(
|
||||||
|
provider=provider_id,
|
||||||
|
provider_user_id=provider_user_id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
oauth_entry = query.one()
|
||||||
|
# update token
|
||||||
|
oauth_entry.token = token
|
||||||
|
except NoResultFound:
|
||||||
|
oauth_entry = ub.OAuth(
|
||||||
|
provider=provider_id,
|
||||||
|
provider_user_id=provider_user_id,
|
||||||
|
token=token,
|
||||||
|
)
|
||||||
|
ub.session.add(oauth_entry)
|
||||||
|
ub.session_commit()
|
||||||
|
|
||||||
|
# Disable Flask-Dance's default behavior for saving the OAuth token
|
||||||
|
# Value differrs depending on flask-dance version
|
||||||
|
return backend_resultcode
|
||||||
|
|
||||||
|
|
||||||
|
def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name):
|
||||||
|
query = ub.session.query(ub.OAuth).filter_by(
|
||||||
|
provider=provider_id,
|
||||||
|
provider_user_id=provider_user_id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
oauth_entry = query.first()
|
||||||
|
# already bind with user, just login
|
||||||
|
if oauth_entry.user:
|
||||||
|
login_user(oauth_entry.user)
|
||||||
|
log.debug(u"You are now logged in as: '%s'", oauth_entry.user.nickname)
|
||||||
|
flash(_(u"you are now logged in as: '%(nickname)s'", nickname= oauth_entry.user.nickname),
|
||||||
|
category="success")
|
||||||
|
return redirect(url_for('web.index'))
|
||||||
|
else:
|
||||||
|
# bind to current user
|
||||||
|
if current_user and current_user.is_authenticated:
|
||||||
|
oauth_entry.user = current_user
|
||||||
|
try:
|
||||||
|
ub.session.add(oauth_entry)
|
||||||
|
ub.session.commit()
|
||||||
|
flash(_(u"Link to %(oauth)s Succeeded", oauth=provider_name), category="success")
|
||||||
|
return redirect(url_for('web.profile'))
|
||||||
|
except Exception as e:
|
||||||
|
log.debug_or_exception(e)
|
||||||
|
ub.session.rollback()
|
||||||
|
else:
|
||||||
|
flash(_(u"Login failed, No User Linked With OAuth Account"), category="error")
|
||||||
|
log.info('Login failed, No User Linked With OAuth Account')
|
||||||
|
return redirect(url_for('web.login'))
|
||||||
|
# return redirect(url_for('web.login'))
|
||||||
|
# if config.config_public_reg:
|
||||||
|
# return redirect(url_for('web.register'))
|
||||||
|
# else:
|
||||||
|
# flash(_(u"Public registration is not enabled"), category="error")
|
||||||
|
# return redirect(url_for(redirect_url))
|
||||||
|
except (NoResultFound, AttributeError):
|
||||||
|
return redirect(url_for(redirect_url))
|
||||||
|
|
||||||
|
|
||||||
|
def get_oauth_status():
|
||||||
|
status = []
|
||||||
|
query = ub.session.query(ub.OAuth).filter_by(
|
||||||
|
user_id=current_user.id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
oauths = query.all()
|
||||||
|
for oauth_entry in oauths:
|
||||||
|
status.append(int(oauth_entry.provider))
|
||||||
|
return status
|
||||||
|
except NoResultFound:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def unlink_oauth(provider):
|
||||||
|
if request.host_url + 'me' != request.referrer:
|
||||||
|
pass
|
||||||
|
query = ub.session.query(ub.OAuth).filter_by(
|
||||||
|
provider=provider,
|
||||||
|
user_id=current_user.id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
oauth_entry = query.one()
|
||||||
|
if current_user and current_user.is_authenticated:
|
||||||
|
oauth_entry.user = current_user
|
||||||
|
try:
|
||||||
|
ub.session.delete(oauth_entry)
|
||||||
|
ub.session.commit()
|
||||||
|
logout_oauth_user()
|
||||||
|
flash(_(u"Unlink to %(oauth)s Succeeded", oauth=oauth_check[provider]), category="success")
|
||||||
|
except Exception as e:
|
||||||
|
log.debug_or_exception(e)
|
||||||
|
ub.session.rollback()
|
||||||
|
flash(_(u"Unlink to %(oauth)s Failed", oauth=oauth_check[provider]), category="error")
|
||||||
|
except NoResultFound:
|
||||||
|
log.warning("oauth %s for user %d not found", provider, current_user.id)
|
||||||
|
flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error")
|
||||||
|
return redirect(url_for('web.profile'))
|
||||||
|
|
||||||
|
def generate_oauth_blueprints():
|
||||||
oauthblueprints = []
|
oauthblueprints = []
|
||||||
if not ub.session.query(ub.OAuthProvider).count():
|
if not ub.session.query(ub.OAuthProvider).count():
|
||||||
for provider in ("github", "google"):
|
for provider in ("github", "google"):
|
||||||
|
@ -141,8 +247,12 @@ if ub.oauth_support:
|
||||||
app.register_blueprint(blueprint, url_prefix="/login")
|
app.register_blueprint(blueprint, url_prefix="/login")
|
||||||
if element['active']:
|
if element['active']:
|
||||||
register_oauth_blueprint(element['id'], element['provider_name'])
|
register_oauth_blueprint(element['id'], element['provider_name'])
|
||||||
|
return oauthblueprints
|
||||||
|
|
||||||
|
|
||||||
|
if ub.oauth_support:
|
||||||
|
oauthblueprints = generate_oauth_blueprints()
|
||||||
|
|
||||||
@oauth_authorized.connect_via(oauthblueprints[0]['blueprint'])
|
@oauth_authorized.connect_via(oauthblueprints[0]['blueprint'])
|
||||||
def github_logged_in(blueprint, token):
|
def github_logged_in(blueprint, token):
|
||||||
if not token:
|
if not token:
|
||||||
|
@ -175,113 +285,6 @@ if ub.oauth_support:
|
||||||
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
|
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
|
||||||
|
|
||||||
|
|
||||||
def oauth_update_token(provider_id, token, provider_user_id):
|
|
||||||
session[provider_id + "_oauth_user_id"] = provider_user_id
|
|
||||||
session[provider_id + "_oauth_token"] = token
|
|
||||||
|
|
||||||
# Find this OAuth token in the database, or create it
|
|
||||||
query = ub.session.query(ub.OAuth).filter_by(
|
|
||||||
provider=provider_id,
|
|
||||||
provider_user_id=provider_user_id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
oauth_entry = query.one()
|
|
||||||
# update token
|
|
||||||
oauth_entry.token = token
|
|
||||||
except NoResultFound:
|
|
||||||
oauth_entry = ub.OAuth(
|
|
||||||
provider=provider_id,
|
|
||||||
provider_user_id=provider_user_id,
|
|
||||||
token=token,
|
|
||||||
)
|
|
||||||
ub.session.add(oauth_entry)
|
|
||||||
ub.session_commit()
|
|
||||||
|
|
||||||
# Disable Flask-Dance's default behavior for saving the OAuth token
|
|
||||||
# Value differrs depending on flask-dance version
|
|
||||||
return backend_resultcode
|
|
||||||
|
|
||||||
|
|
||||||
def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name):
|
|
||||||
query = ub.session.query(ub.OAuth).filter_by(
|
|
||||||
provider=provider_id,
|
|
||||||
provider_user_id=provider_user_id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
oauth_entry = query.first()
|
|
||||||
# already bind with user, just login
|
|
||||||
if oauth_entry.user:
|
|
||||||
login_user(oauth_entry.user)
|
|
||||||
log.debug(u"You are now logged in as: '%s'", oauth_entry.user.nickname)
|
|
||||||
flash(_(u"you are now logged in as: '%(nickname)s'", nickname= oauth_entry.user.nickname),
|
|
||||||
category="success")
|
|
||||||
return redirect(url_for('web.index'))
|
|
||||||
else:
|
|
||||||
# bind to current user
|
|
||||||
if current_user and current_user.is_authenticated:
|
|
||||||
oauth_entry.user = current_user
|
|
||||||
try:
|
|
||||||
ub.session.add(oauth_entry)
|
|
||||||
ub.session.commit()
|
|
||||||
flash(_(u"Link to %(oauth)s Succeeded", oauth=provider_name), category="success")
|
|
||||||
return redirect(url_for('web.profile'))
|
|
||||||
except Exception as e:
|
|
||||||
log.debug_or_exception(e)
|
|
||||||
ub.session.rollback()
|
|
||||||
else:
|
|
||||||
flash(_(u"Login failed, No User Linked With OAuth Account"), category="error")
|
|
||||||
log.info('Login failed, No User Linked With OAuth Account')
|
|
||||||
return redirect(url_for('web.login'))
|
|
||||||
# return redirect(url_for('web.login'))
|
|
||||||
# if config.config_public_reg:
|
|
||||||
# return redirect(url_for('web.register'))
|
|
||||||
# else:
|
|
||||||
# flash(_(u"Public registration is not enabled"), category="error")
|
|
||||||
# return redirect(url_for(redirect_url))
|
|
||||||
except (NoResultFound, AttributeError):
|
|
||||||
return redirect(url_for(redirect_url))
|
|
||||||
|
|
||||||
|
|
||||||
def get_oauth_status():
|
|
||||||
status = []
|
|
||||||
query = ub.session.query(ub.OAuth).filter_by(
|
|
||||||
user_id=current_user.id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
oauths = query.all()
|
|
||||||
for oauth_entry in oauths:
|
|
||||||
status.append(int(oauth_entry.provider))
|
|
||||||
return status
|
|
||||||
except NoResultFound:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def unlink_oauth(provider):
|
|
||||||
if request.host_url + 'me' != request.referrer:
|
|
||||||
pass
|
|
||||||
query = ub.session.query(ub.OAuth).filter_by(
|
|
||||||
provider=provider,
|
|
||||||
user_id=current_user.id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
oauth_entry = query.one()
|
|
||||||
if current_user and current_user.is_authenticated:
|
|
||||||
oauth_entry.user = current_user
|
|
||||||
try:
|
|
||||||
ub.session.delete(oauth_entry)
|
|
||||||
ub.session.commit()
|
|
||||||
logout_oauth_user()
|
|
||||||
flash(_(u"Unlink to %(oauth)s Succeeded", oauth=oauth_check[provider]), category="success")
|
|
||||||
except Exception as e:
|
|
||||||
log.debug_or_exception(e)
|
|
||||||
ub.session.rollback()
|
|
||||||
flash(_(u"Unlink to %(oauth)s Failed", oauth=oauth_check[provider]), category="error")
|
|
||||||
except NoResultFound:
|
|
||||||
log.warning("oauth %s for user %d not found", provider, current_user.id)
|
|
||||||
flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error")
|
|
||||||
return redirect(url_for('web.profile'))
|
|
||||||
|
|
||||||
|
|
||||||
# notify on OAuth provider error
|
# notify on OAuth provider error
|
||||||
@oauth_error.connect_via(oauthblueprints[0]['blueprint'])
|
@oauth_error.connect_via(oauthblueprints[0]['blueprint'])
|
||||||
def github_error(blueprint, error, error_description=None, error_uri=None):
|
def github_error(blueprint, error, error_description=None, error_uri=None):
|
||||||
|
|
|
@ -253,7 +253,7 @@ class WebServer(object):
|
||||||
|
|
||||||
if not self.restart:
|
if not self.restart:
|
||||||
log.info("Performing shutdown of Calibre-Web")
|
log.info("Performing shutdown of Calibre-Web")
|
||||||
# prevent irritiating log of pending tasks message from asyncio
|
# prevent irritating log of pending tasks message from asyncio
|
||||||
logger.get('asyncio').setLevel(logger.logging.CRITICAL)
|
logger.get('asyncio').setLevel(logger.logging.CRITICAL)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
109
cps/updater.py
109
cps/updater.py
|
@ -284,6 +284,62 @@ class Updater(threading.Thread):
|
||||||
def _stable_version_info(cls):
|
def _stable_version_info(cls):
|
||||||
return constants.STABLE_VERSION # Current version
|
return constants.STABLE_VERSION # Current version
|
||||||
|
|
||||||
|
def _populate_parent_commits(self, update_data, status, locale, tz, parents):
|
||||||
|
try:
|
||||||
|
parent_commit = update_data['parents'][0]
|
||||||
|
# limit the maximum search depth
|
||||||
|
remaining_parents_cnt = 10
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
remaining_parents_cnt = None
|
||||||
|
|
||||||
|
if remaining_parents_cnt is not None:
|
||||||
|
while True:
|
||||||
|
if remaining_parents_cnt == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
# check if we are more than one update behind if so, go up the tree
|
||||||
|
if parent_commit['sha'] != status['current_commit_hash']:
|
||||||
|
try:
|
||||||
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
||||||
|
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
|
||||||
|
r.raise_for_status()
|
||||||
|
parent_data = r.json()
|
||||||
|
|
||||||
|
parent_commit_date = datetime.datetime.strptime(
|
||||||
|
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||||
|
parent_commit_date = format_datetime(
|
||||||
|
parent_commit_date, format='short', locale=locale)
|
||||||
|
|
||||||
|
parents.append([parent_commit_date,
|
||||||
|
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
||||||
|
parent_commit = parent_data['parents'][0]
|
||||||
|
remaining_parents_cnt -= 1
|
||||||
|
except Exception:
|
||||||
|
# it isn't crucial if we can't get information about the parent
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# parent is our current version
|
||||||
|
break
|
||||||
|
return parents
|
||||||
|
|
||||||
|
def _load_nightly_data(self, repository_url, commit, status):
|
||||||
|
try:
|
||||||
|
headers = {'Accept': 'application/vnd.github.v3+json'}
|
||||||
|
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
|
||||||
|
headers=headers,
|
||||||
|
timeout=10)
|
||||||
|
r.raise_for_status()
|
||||||
|
update_data = r.json()
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
status['message'] = _(u'HTTP Error') + ' ' + str(e)
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
status['message'] = _(u'Connection error')
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
status['message'] = _(u'Timeout while establishing connection')
|
||||||
|
except (requests.exceptions.RequestException, ValueError):
|
||||||
|
status['message'] = _(u'General error')
|
||||||
|
return status, update_data
|
||||||
|
|
||||||
def _nightly_available_updates(self, request_method, locale):
|
def _nightly_available_updates(self, request_method, locale):
|
||||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||||
if request_method == "GET":
|
if request_method == "GET":
|
||||||
|
@ -309,22 +365,7 @@ class Updater(threading.Thread):
|
||||||
|
|
||||||
# a new update is available
|
# a new update is available
|
||||||
status['update'] = True
|
status['update'] = True
|
||||||
|
status, update_data = self._load_nightly_data(repository_url, commit, status)
|
||||||
try:
|
|
||||||
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
||||||
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
|
|
||||||
headers=headers,
|
|
||||||
timeout=10)
|
|
||||||
r.raise_for_status()
|
|
||||||
update_data = r.json()
|
|
||||||
except requests.exceptions.HTTPError as e:
|
|
||||||
status['message'] = _(u'HTTP Error') + ' ' + str(e)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
status['message'] = _(u'Connection error')
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
status['message'] = _(u'Timeout while establishing connection')
|
|
||||||
except (requests.exceptions.RequestException, ValueError):
|
|
||||||
status['message'] = _(u'General error')
|
|
||||||
|
|
||||||
if status['message'] != '':
|
if status['message'] != '':
|
||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
|
@ -346,41 +387,7 @@ class Updater(threading.Thread):
|
||||||
)
|
)
|
||||||
# it only makes sense to analyze the parents if we know the current commit hash
|
# it only makes sense to analyze the parents if we know the current commit hash
|
||||||
if status['current_commit_hash'] != '':
|
if status['current_commit_hash'] != '':
|
||||||
try:
|
parents = self._populate_parent_commits(update_data, status, locale, tz, parents)
|
||||||
parent_commit = update_data['parents'][0]
|
|
||||||
# limit the maximum search depth
|
|
||||||
remaining_parents_cnt = 10
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
remaining_parents_cnt = None
|
|
||||||
|
|
||||||
if remaining_parents_cnt is not None:
|
|
||||||
while True:
|
|
||||||
if remaining_parents_cnt == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
# check if we are more than one update behind if so, go up the tree
|
|
||||||
if parent_commit['sha'] != status['current_commit_hash']:
|
|
||||||
try:
|
|
||||||
headers = {'Accept': 'application/vnd.github.v3+json'}
|
|
||||||
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
|
|
||||||
r.raise_for_status()
|
|
||||||
parent_data = r.json()
|
|
||||||
|
|
||||||
parent_commit_date = datetime.datetime.strptime(
|
|
||||||
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
|
||||||
parent_commit_date = format_datetime(
|
|
||||||
parent_commit_date, format='short', locale=locale)
|
|
||||||
|
|
||||||
parents.append([parent_commit_date,
|
|
||||||
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
|
||||||
parent_commit = parent_data['parents'][0]
|
|
||||||
remaining_parents_cnt -= 1
|
|
||||||
except Exception:
|
|
||||||
# it isn't crucial if we can't get information about the parent
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# parent is our current version
|
|
||||||
break
|
|
||||||
status['history'] = parents[::-1]
|
status['history'] = parents[::-1]
|
||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
status['success'] = False
|
status['success'] = False
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user