Code cosmetics bugfix error message on failed emails refactored language name generation
This commit is contained in:
parent
34abf95fb2
commit
d414bf3263
|
@ -390,7 +390,7 @@ function rarMakeDecodeTables(BitLength, offset, dec, size) {
|
|||
for (I = 1; I < 16; ++I) {
|
||||
N = 2 * (N + LenCount[I]);
|
||||
M = (N << (15 - I));
|
||||
if (M > 0xFFFF){
|
||||
if (M > 0xFFFF) {
|
||||
M = 0xFFFF;
|
||||
}
|
||||
DecodeLen[I] = M;
|
||||
|
@ -419,6 +419,8 @@ var lastLength = 0;
|
|||
function Unpack20(bstream) { //, Solid) {
|
||||
var destUnpSize = rBuffer.data.length;
|
||||
var oldDistPtr = 0;
|
||||
var Length;
|
||||
var Distance;
|
||||
rarReadTables20(bstream);
|
||||
while (destUnpSize > rBuffer.ptr) {
|
||||
var num = rarDecodeNumber(bstream, LD);
|
||||
|
@ -428,12 +430,12 @@ function Unpack20(bstream) { //, Solid) {
|
|||
continue;
|
||||
}
|
||||
if (num > 269) {
|
||||
var Length = rLDecode[num -= 270] + 3;
|
||||
Length = rLDecode[num -= 270] + 3;
|
||||
if ((Bits = rLBits[num]) > 0) {
|
||||
Length += bstream.readBits(Bits);
|
||||
}
|
||||
var DistNumber = rarDecodeNumber(bstream, DD);
|
||||
var Distance = rDDecode[DistNumber] + 1;
|
||||
Distance = rDDecode[DistNumber] + 1;
|
||||
if ((Bits = rDBits[DistNumber]) > 0) {
|
||||
Distance += bstream.readBits(Bits);
|
||||
}
|
||||
|
@ -459,16 +461,16 @@ function Unpack20(bstream) { //, Solid) {
|
|||
continue;
|
||||
}
|
||||
if (num < 261) {
|
||||
var Distance = rOldDist[(oldDistPtr - (num - 256)) & 3];
|
||||
Distance = rOldDist[(oldDistPtr - (num - 256)) & 3];
|
||||
var LengthNumber = rarDecodeNumber(bstream, RD);
|
||||
var Length = rLDecode[LengthNumber] +2;
|
||||
Length = rLDecode[LengthNumber] + 2;
|
||||
if ((Bits = rLBits[LengthNumber]) > 0) {
|
||||
Length += bstream.readBits(Bits);
|
||||
}
|
||||
if (Distance >= 0x101) {
|
||||
Length++;
|
||||
if (Distance >= 0x2000) {
|
||||
Length++
|
||||
Length++;
|
||||
if (Distance >= 0x40000) Length++;
|
||||
}
|
||||
}
|
||||
|
@ -488,7 +490,7 @@ function Unpack20(bstream) { //, Solid) {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
rarUpdateProgress()
|
||||
rarUpdateProgress();
|
||||
}
|
||||
|
||||
function rarUpdateProgress() {
|
||||
|
@ -511,20 +513,23 @@ function rarReadTables20(bstream) {
|
|||
var BitLength = new Array(rBC20);
|
||||
var Table = new Array(rMC20 * 4);
|
||||
var TableSize, N, I;
|
||||
var AudioBlock = bstream.readBits(1);
|
||||
if (!bstream.readBits(1))
|
||||
for (var i = UnpOldTable20.length; i--;) UnpOldTable20[i] = 0;
|
||||
bstream.readBits(1);
|
||||
if (!bstream.readBits(1)) {
|
||||
var i;
|
||||
for (i = UnpOldTable20.length; i--;) UnpOldTable20[i] = 0;
|
||||
}
|
||||
TableSize = rNC20 + rDC20 + rRC20;
|
||||
for (var I = 0; I < rBC20; I++)
|
||||
BitLength[I] = bstream.readBits(4);
|
||||
for (I = 0; I < rBC20; I++) {
|
||||
BitLength[I] = bstream.readBits(4);
|
||||
}
|
||||
rarMakeDecodeTables(BitLength, 0, BD, rBC20);
|
||||
I = 0;
|
||||
while (I < TableSize) {
|
||||
var num = rarDecodeNumber(bstream, BD);
|
||||
if (num < 16) {
|
||||
Table[I] = num + UnpOldTable20[I] & 0xf;
|
||||
I++;
|
||||
} else if(num === 16) {
|
||||
Table[I] = num + UnpOldTable20[I] & 0xf;
|
||||
I++;
|
||||
} else if (num === 16) {
|
||||
N = bstream.readBits(2) + 3;
|
||||
while (N-- > 0 && I < TableSize) {
|
||||
Table[I] = Table[I - 1];
|
||||
|
@ -544,7 +549,7 @@ function rarReadTables20(bstream) {
|
|||
rarMakeDecodeTables(Table, 0, LD, rNC20);
|
||||
rarMakeDecodeTables(Table, rNC20, DD, rDC20);
|
||||
rarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20);
|
||||
for (var i = UnpOldTable20.length; i--;) UnpOldTable20[i] = Table[i];
|
||||
for (i = UnpOldTable20.length; i--;) UnpOldTable20[i] = Table[i];
|
||||
}
|
||||
|
||||
|
||||
|
@ -554,9 +559,9 @@ function Unpack29(bstream, Solid) {
|
|||
var DDecode = new Array(rDC);
|
||||
var DBits = new Array(rDC);
|
||||
|
||||
var Dist=0,BitLength=0,Slot=0;
|
||||
|
||||
for (var I = 0; I < rDBitLengthCounts.length; I++,BitLength++) {
|
||||
var Dist = 0, BitLength = 0, Slot = 0;
|
||||
var I;
|
||||
for (I = 0; I < rDBitLengthCounts.length; I++,BitLength++) {
|
||||
for (var J = 0; J < rDBitLengthCounts[I]; J++,Slot++,Dist+=(1<<BitLength)) {
|
||||
DDecode[Slot]=Dist;
|
||||
DBits[Slot]=BitLength;
|
||||
|
@ -570,8 +575,8 @@ function Unpack29(bstream, Solid) {
|
|||
|
||||
lastDist = 0;
|
||||
lastLength = 0;
|
||||
|
||||
for (var i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
|
||||
var i;
|
||||
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
|
||||
|
||||
// read in Huffman tables
|
||||
RarReadTables(bstream);
|
||||
|
@ -693,7 +698,7 @@ function RarReadVMCode(bstream) {
|
|||
var Length = (FirstByte & 7) + 1;
|
||||
if (Length === 7) {
|
||||
Length = bstream.readBits(8) + 7;
|
||||
} else if(Length === 8) {
|
||||
} else if (Length === 8) {
|
||||
Length = bstream.readBits(16);
|
||||
}
|
||||
var vmCode = [];
|
||||
|
@ -725,9 +730,9 @@ function RarInsertOldDist(distance) {
|
|||
//this is the real function, the other one is for debugging
|
||||
function rarCopyString(length, distance) {
|
||||
var destPtr = rBuffer.ptr - distance;
|
||||
if(destPtr < 0){
|
||||
if (destPtr < 0) {
|
||||
var l = rOldBuffers.length;
|
||||
while(destPtr < 0){
|
||||
while (destPtr < 0) {
|
||||
destPtr = rOldBuffers[--l].data.length + destPtr;
|
||||
}
|
||||
//TODO: lets hope that it never needs to read beyond file boundaries
|
||||
|
@ -864,7 +869,7 @@ var unrar = function(arrayBuffer) {
|
|||
return aname > bname ? 1 : -1;
|
||||
});
|
||||
|
||||
info(localFiles.map(function(a){return a.filename}).join(', '));
|
||||
info(localFiles.map(function(a) {return a.filename}).join(', '));
|
||||
for (var i = 0; i < localFiles.length; ++i) {
|
||||
var localfile = localFiles[i];
|
||||
|
||||
|
@ -883,8 +888,7 @@ var unrar = function(arrayBuffer) {
|
|||
|
||||
postProgress();
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
err("Invalid RAR file");
|
||||
}
|
||||
postMessage(new bitjs.archive.UnarchiveFinishEvent());
|
||||
|
|
|
@ -8,11 +8,34 @@
|
|||
{% else %}
|
||||
<h2>{{entries|length}} {{_('Results for:')}} {{searchterm}}</h2>
|
||||
{%endif%}
|
||||
{% if g.user.is_authenticated %}
|
||||
<!--a href="{{ url_for('read_books') }}" class="btn btn-primary">{{ _('Search result to shelf') }} </a-->
|
||||
{% if g.user.is_authenticated %}
|
||||
{% if g.user.shelf.all() or g.public_shelfes %}
|
||||
<div id="shelf-actions" class="btn-toolbar" role="toolbar">
|
||||
<div class="btn-group" role="group" aria-label="Add to shelves">
|
||||
<button id="add-to-shelf" type="button" class="btn btn-primary btn-sm dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
|
||||
<span class="glyphicon glyphicon-list"></span> {{_('Add to shelf')}}
|
||||
<span class="caret"></span>
|
||||
</button>
|
||||
<ul id="add-to-shelves" class="dropdown-menu" aria-labelledby="add-to-shelf">
|
||||
{% for shelf in g.user.shelf %}
|
||||
{% if shelf.is_public != 1 %}
|
||||
<li>
|
||||
<a href="{{ url_for('search_to_shelf', shelf_id=shelf.id) }}"> {{shelf.name}}</a>
|
||||
|
||||
</li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% for shelf in g.public_shelfes %}
|
||||
<li>
|
||||
<a href="{{ url_for('search_to_shelf', shelf_id=shelf.id) }}">{{shelf.name}}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
<div class="row">
|
||||
|
||||
{% for entry in entries %}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
{% if g.user.is_authenticated %}
|
||||
{% if (g.user.role_edit_shelfs() and shelf.is_public ) or not shelf.is_public %}
|
||||
<div data-toggle="modal" data-target="#DeleteShelfDialog" class="btn btn-danger">{{ _('Delete this Shelf') }} </div>
|
||||
<a href="{{ url_for('edit_shelf', shelf_id=shelf.id) }}" class="btn btn-primary">{{ _('Edit Shelf name') }} </a>
|
||||
<a href="{{ url_for('edit_shelf', shelf_id=shelf.id) }}" class="btn btn-primary">{{ _('Edit Shelf') }} </a>
|
||||
<a href="{{ url_for('order_shelf', shelf_id=shelf.id) }}" class="btn btn-primary">{{ _('Change order') }} </a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
|
|
@ -1515,8 +1515,8 @@ msgid "Delete this Shelf"
|
|||
msgstr "Lösche dieses Bücherregal"
|
||||
|
||||
#: cps/templates/shelf.html:8
|
||||
msgid "Edit Shelf name"
|
||||
msgstr "Bücherregal umbenennen"
|
||||
msgid "Edit Shelf"
|
||||
msgstr "Bücherregal editieren"
|
||||
|
||||
#: cps/templates/shelf.html:9 cps/templates/shelf_order.html:11
|
||||
msgid "Change order"
|
||||
|
|
10
cps/ub.py
10
cps/ub.py
|
@ -46,9 +46,8 @@ DEFAULT_PASS = "admin123"
|
|||
DEFAULT_PORT = int(os.environ.get("CALIBRE_PORT", 8083))
|
||||
|
||||
|
||||
|
||||
|
||||
class UserBase:
|
||||
|
||||
@property
|
||||
def is_authenticated(self):
|
||||
return True
|
||||
|
@ -170,7 +169,7 @@ class User(UserBase, Base):
|
|||
theme = Column(Integer, default=0)
|
||||
|
||||
|
||||
# Class for anonymous user is derived from User base and complets overrides methods and properties for the
|
||||
# Class for anonymous user is derived from User base and completly overrides methods and properties for the
|
||||
# anonymous user
|
||||
class Anonymous(AnonymousUserMixin, UserBase):
|
||||
def __init__(self):
|
||||
|
@ -204,7 +203,7 @@ class Anonymous(AnonymousUserMixin, UserBase):
|
|||
return False
|
||||
|
||||
|
||||
# Baseclass representing Shelfs in calibre-web inapp.db
|
||||
# Baseclass representing Shelfs in calibre-web in app.db
|
||||
class Shelf(Base):
|
||||
__tablename__ = 'shelf'
|
||||
|
||||
|
@ -774,5 +773,6 @@ else:
|
|||
migrate_Database()
|
||||
clean_database()
|
||||
|
||||
# Generate global Settings Object accecable from every file
|
||||
# Generate global Settings Object accessible from every file
|
||||
config = Config()
|
||||
searched_ids = {}
|
||||
|
|
157
cps/web.py
157
cps/web.py
|
@ -47,7 +47,7 @@ from flask_principal import Principal
|
|||
from flask_principal import __version__ as flask_principalVersion
|
||||
from flask_babel import Babel
|
||||
from flask_babel import gettext as _
|
||||
import pytz
|
||||
|
||||
import requests
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
from werkzeug.datastructures import Headers
|
||||
|
@ -184,14 +184,6 @@ lm.anonymous_user = ub.Anonymous
|
|||
app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT')
|
||||
db.setup_db()
|
||||
|
||||
'''if config.config_log_level == logging.DEBUG:
|
||||
logging.getLogger("sqlalchemy.engine").addHandler(file_handler)
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||
logging.getLogger("sqlalchemy.pool").addHandler(file_handler)
|
||||
logging.getLogger("sqlalchemy.pool").setLevel(config.config_log_level)
|
||||
logging.getLogger("sqlalchemy.orm").addHandler(file_handler)
|
||||
logging.getLogger("sqlalchemy.orm").setLevel(config.config_log_level)'''
|
||||
|
||||
|
||||
def is_gdrive_ready():
|
||||
return os.path.exists(os.path.join(config.get_main_dir,'settings.yaml')) and \
|
||||
|
@ -462,7 +454,7 @@ def edit_required(f):
|
|||
return inner
|
||||
|
||||
|
||||
# Language and content filters
|
||||
# Language and content filters for displaying in the UI
|
||||
def common_filters():
|
||||
if current_user.filter_language() != "all":
|
||||
lang_filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
|
||||
|
@ -473,6 +465,19 @@ def common_filters():
|
|||
return and_(lang_filter, ~content_rating_filter)
|
||||
|
||||
|
||||
# Creates for all stored languages a translated speaking name in the array for the UI
|
||||
def speaking_language(languages = None):
|
||||
if not languages:
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
return languages
|
||||
|
||||
|
||||
# Fill indexpage with all requested data from database
|
||||
def fill_indexpage(page, database, db_filter, order, *join):
|
||||
if current_user.show_detail_random():
|
||||
|
@ -489,6 +494,8 @@ def fill_indexpage(page, database, db_filter, order, *join):
|
|||
return entries, random, pagination
|
||||
|
||||
|
||||
# Modifies different Database objects, first check if elements have to be added to database, than check
|
||||
# if elements have to be deleted, because they are no longer used
|
||||
def modify_database_object(input_elements, db_book_object, db_object, db_session, db_type):
|
||||
input_elements = [x for x in input_elements if x != '']
|
||||
# we have all input element (authors, series, tags) names now
|
||||
|
@ -557,7 +564,7 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session
|
|||
db_book_object.append(new_element)
|
||||
|
||||
|
||||
# read search results from calibre-database and return it
|
||||
# read search results from calibre-database and return it (function is used for feed and simple search
|
||||
def get_search_results(term):
|
||||
q = list()
|
||||
authorterms = re.split("[, ]+", term)
|
||||
|
@ -581,7 +588,6 @@ def feed_search(term):
|
|||
entriescount = len(entries) if len(entries) > 0 else 1
|
||||
pagination = Pagination(1, entriescount, entriescount)
|
||||
return render_xml_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
|
||||
|
||||
else:
|
||||
return render_xml_template('feed.xml', searchterm="")
|
||||
|
||||
|
@ -907,13 +913,7 @@ def get_tags_json():
|
|||
def get_languages_json():
|
||||
if request.method == "GET":
|
||||
query = request.args.get('q').lower()
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
entries = [s for s in languages if query in s.name.lower()]
|
||||
json_dumps = json.dumps([dict(name=r.name) for r in entries])
|
||||
return json_dumps
|
||||
|
@ -1212,13 +1212,7 @@ def series(book_id, page):
|
|||
def language_overview():
|
||||
if current_user.show_language():
|
||||
if current_user.filter_language() == u"all":
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
else:
|
||||
try:
|
||||
cur_l = LC.parse(current_user.filter_language())
|
||||
|
@ -1521,8 +1515,7 @@ def google_drive_callback():
|
|||
f.write(credentials.to_json())
|
||||
except ValueError as error:
|
||||
app.logger.error(error)
|
||||
finally:
|
||||
return redirect(url_for('configuration'))
|
||||
return redirect(url_for('configuration'))
|
||||
|
||||
|
||||
@app.route("/gdrive/watch/subscribe")
|
||||
|
@ -1730,12 +1723,8 @@ def advanced_search():
|
|||
serie_names = db.session.query(db.Series).filter(db.Series.id.in_(include_series_inputs)).all()
|
||||
searchterm.extend(serie.name for serie in serie_names)
|
||||
language_names = db.session.query(db.Languages).filter(db.Languages.id.in_(include_languages_inputs)).all()
|
||||
for lang in language_names:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
if language_names:
|
||||
language_names = speaking_language(language_names)
|
||||
searchterm.extend(language.name for language in language_names)
|
||||
if rating_high:
|
||||
searchterm.extend([_(u"Rating <= %s" % rating_high)])
|
||||
|
@ -1794,22 +1783,19 @@ def advanced_search():
|
|||
q = q.filter(getattr(db.Books, 'custom_column_'+str(c.id)).any(
|
||||
db.cc_classes[c.id].value.ilike("%" + custom_query + "%")))
|
||||
q = q.all()
|
||||
ids = list()
|
||||
for element in q:
|
||||
ids.append(element.id)
|
||||
ub.searched_ids[current_user.id] = ids
|
||||
return render_title_template('search.html', searchterm=searchterm,
|
||||
entries=q, title=_(u"search"), page="search")
|
||||
# prepare data for search-form
|
||||
tags = db.session.query(db.Tags).order_by(db.Tags.name).all()
|
||||
series = db.session.query(db.Series).order_by(db.Series.name).all()
|
||||
if current_user.filter_language() == u"all":
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
else:
|
||||
languages = None
|
||||
|
||||
return render_title_template('search_form.html', tags=tags, languages=languages,
|
||||
series=series, title=_(u"search"), cc=cc, page="advsearch")
|
||||
|
||||
|
@ -2213,7 +2199,57 @@ def add_to_shelf(shelf_id, book_id):
|
|||
return redirect(request.environ["HTTP_REFERER"])
|
||||
return "", 204
|
||||
|
||||
@app.route("/shelf/massadd/<int:shelf_id>")
|
||||
@login_required
|
||||
def search_to_shelf(shelf_id):
|
||||
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
|
||||
if shelf is None:
|
||||
app.logger.info("Invalid shelf specified")
|
||||
return "Invalid shelf specified", 400
|
||||
|
||||
if not shelf.is_public and not shelf.user_id == int(current_user.id):
|
||||
app.logger.info("Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name)
|
||||
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
|
||||
|
||||
if shelf.is_public and not current_user.role_edit_shelfs():
|
||||
app.logger.info("User is not allowed to edit public shelves")
|
||||
return "User is not allowed to edit public shelves", 403
|
||||
|
||||
if current_user.id in ub.searched_ids and ub.searched_ids[current_user.id]:
|
||||
books_for_shelf = list()
|
||||
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).all()
|
||||
if books_in_shelf:
|
||||
book_ids = list()
|
||||
for book_id in books_in_shelf:
|
||||
book_ids.append(book_id.book_id)
|
||||
for id in ub.searched_ids[current_user.id]:
|
||||
if id not in book_ids:
|
||||
books_for_shelf.append(id)
|
||||
else:
|
||||
books_for_shelf = ub.searched_ids[current_user.id]
|
||||
|
||||
if not books_for_shelf:
|
||||
app.logger.info("Books are already part of the shelf: %s" % shelf.name)
|
||||
flash(_(u"Books are already part of the shelf: %s" % shelf.name), category="error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
maxOrder = ub.session.query(func.max(ub.BookShelf.order)).filter(ub.BookShelf.shelf == shelf_id).first()
|
||||
if maxOrder[0] is None:
|
||||
maxOrder = 0
|
||||
else:
|
||||
maxOrder = maxOrder[0]
|
||||
|
||||
for book in books_for_shelf:
|
||||
maxOrder = maxOrder + 1
|
||||
ins = ub.BookShelf(shelf=shelf.id, book_id=book, order=maxOrder)
|
||||
ub.session.add(ins)
|
||||
ub.session.commit()
|
||||
flash(_(u"Books have been added to shelf: %(sname)s", sname=shelf.name), category="success")
|
||||
else:
|
||||
flash(_(u"Could not add books to shelf: %(sname)s", sname=shelf.name), category="error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
|
||||
@app.route("/shelf/remove/<int:shelf_id>/<int:book_id>")
|
||||
@login_required
|
||||
def remove_from_shelf(shelf_id, book_id):
|
||||
|
@ -2397,13 +2433,7 @@ def order_shelf(shelf_id):
|
|||
def profile():
|
||||
content = ub.session.query(ub.User).filter(ub.User.id == int(current_user.id)).first()
|
||||
downloads = list()
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
for book in content.downloads:
|
||||
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
|
@ -2769,13 +2799,7 @@ def configuration_helper(origin):
|
|||
@admin_required
|
||||
def new_user():
|
||||
content = ub.User()
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
translations = [LC('en')] + babel.list_translations()
|
||||
if request.method == "POST":
|
||||
to_save = request.form.to_dict()
|
||||
|
@ -2880,13 +2904,7 @@ def edit_mailsettings():
|
|||
def edit_user(user_id):
|
||||
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User
|
||||
downloads = list()
|
||||
languages = db.session.query(db.Languages).all()
|
||||
for lang in languages:
|
||||
try:
|
||||
cur_l = LC.parse(lang.lang_code)
|
||||
lang.name = cur_l.get_language_name(get_locale())
|
||||
except Exception:
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
languages = speaking_language()
|
||||
translations = babel.list_translations() + [LC('en')]
|
||||
for book in content.downloads:
|
||||
downloadBook = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
|
||||
|
@ -3099,7 +3117,6 @@ def edit_book(book_id):
|
|||
uploadText=_(u"File format %s added to %s" % (file_ext.upper(),book.title))
|
||||
helper.global_WorkerThread.add_upload(current_user.nickname,
|
||||
"<a href=\""+ url_for('show_book', book_id=book.id) +"\">"+ uploadText + "</a>")
|
||||
|
||||
|
||||
to_save = request.form.to_dict()
|
||||
|
||||
|
@ -3284,7 +3301,7 @@ def edit_book(book_id):
|
|||
title=_(u"edit metadata"), page="editbook")
|
||||
else:
|
||||
db.session.rollback()
|
||||
flash( error, category="error")
|
||||
flash(error, category="error")
|
||||
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
|
||||
title=_(u"edit metadata"), page="editbook")
|
||||
except Exception as e:
|
||||
|
@ -3394,7 +3411,7 @@ def upload():
|
|||
db_book.series.append(db_series)
|
||||
if db_language is not None:
|
||||
db_book.languages.append(db_language)
|
||||
file_size = os.path.getsize(saved_filename)
|
||||
file_size = os.path.getsize(saved_filename)
|
||||
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)
|
||||
|
||||
# handle tags
|
||||
|
@ -3406,7 +3423,7 @@ def upload():
|
|||
# flush content, get db_book.id available
|
||||
db_book.data.append(db_data)
|
||||
db.session.add(db_book)
|
||||
db.session.flush()
|
||||
db.session.flush()
|
||||
|
||||
# add comment
|
||||
book_id = db_book.id
|
||||
|
@ -3429,12 +3446,12 @@ def upload():
|
|||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
if error:
|
||||
flash(error, category="error")
|
||||
uploadText=_(u"File %s uploaded" % book.title)
|
||||
uploadText=_(u"File %s uploaded" % book.title)
|
||||
helper.global_WorkerThread.add_upload(current_user.nickname,
|
||||
"<a href=\"" + url_for('show_book', book_id=book.id) + "\">" + uploadText + "</a>")
|
||||
|
||||
# create data for displaying display Full language name instead of iso639.part3language
|
||||
if db_language is not None:
|
||||
# create data for displaying display Full language name instead of iso639.part3language
|
||||
if db_language is not None:
|
||||
book.languages[0].language_name = _(meta.languages)
|
||||
author_names = []
|
||||
for author in db_book.authors:
|
||||
|
|
|
@ -205,7 +205,7 @@ class WorkerThread(threading.Thread):
|
|||
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
||||
datetime.now() - self.queue[self.current]['starttime'])
|
||||
return self.UIqueue
|
||||
|
||||
|
||||
def convert_mobi(self):
|
||||
# convert book, and upload in case of google drive
|
||||
self.queue[self.current]['status'] = STAT_STARTED
|
||||
|
@ -275,7 +275,7 @@ class WorkerThread(threading.Thread):
|
|||
# kindlegen returncodes
|
||||
# 0 = Info(prcgen):I1036: Mobi file built successfully
|
||||
# 1 = Info(prcgen):I1037: Mobi file built with WARNINGS!
|
||||
# 2 = Info(prcgen):I1038: MOBI file could not be generated because of errors!
|
||||
# 2 = Info(prcgen):I1038: MOBI file could not be generated because of errors!
|
||||
if ( check < 2 and web.ub.config.config_ebookconverter == 1) or \
|
||||
(check == 0 and web.ub.config.config_ebookconverter == 2):
|
||||
cur_book = web.db.session.query(web.db.Books).filter(web.db.Books.id == bookid).first()
|
||||
|
@ -292,7 +292,7 @@ class WorkerThread(threading.Thread):
|
|||
self.UIqueue[self.current]['progress'] = "100 %"
|
||||
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
|
||||
datetime.now() - self.queue[self.current]['starttime'])
|
||||
return file_path + ".mobi"
|
||||
return file_path + ".mobi"
|
||||
else:
|
||||
web.app.logger.info("ebook converter failed with error while converting book")
|
||||
if not error_message:
|
||||
|
@ -352,7 +352,7 @@ class WorkerThread(threading.Thread):
|
|||
|
||||
def send_raw_email(self):
|
||||
self.queue[self.current]['starttime'] = datetime.now()
|
||||
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
|
||||
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
|
||||
self.queue[self.current]['status'] = STAT_STARTED
|
||||
self.UIqueue[self.current]['status'] = _('Started')
|
||||
obj=self.queue[self.current]
|
||||
|
@ -414,7 +414,7 @@ class WorkerThread(threading.Thread):
|
|||
sys.stderr = org_stderr
|
||||
|
||||
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException) as e:
|
||||
self._handleError(error_message)
|
||||
self._handleError(e)
|
||||
return None
|
||||
|
||||
def _formatRuntime(self, runtime):
|
||||
|
|
Loading…
Reference in New Issue
Block a user