Code refactoring
This commit is contained in:
parent
82e15d2e98
commit
59ebc1af8a
89
cps/admin.py
89
cps/admin.py
|
@ -1495,6 +1495,51 @@ def get_updater_status():
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
def ldap_import_create_user(user, user_data):
|
||||||
|
user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object)
|
||||||
|
|
||||||
|
username = user_data[user_login_field][0].decode('utf-8')
|
||||||
|
# check for duplicate username
|
||||||
|
if ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == username.lower()).first():
|
||||||
|
# if ub.session.query(ub.User).filter(ub.User.nickname == username).first():
|
||||||
|
log.warning("LDAP User %s Already in Database", user_data)
|
||||||
|
return 0, None
|
||||||
|
|
||||||
|
kindlemail = ''
|
||||||
|
if 'mail' in user_data:
|
||||||
|
useremail = user_data['mail'][0].decode('utf-8')
|
||||||
|
if len(user_data['mail']) > 1:
|
||||||
|
kindlemail = user_data['mail'][1].decode('utf-8')
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.debug('No Mail Field Found in LDAP Response')
|
||||||
|
useremail = username + '@email.com'
|
||||||
|
# check for duplicate email
|
||||||
|
if ub.session.query(ub.User).filter(func.lower(ub.User.email) == useremail.lower()).first():
|
||||||
|
log.warning("LDAP Email %s Already in Database", user_data)
|
||||||
|
return 0, None
|
||||||
|
content = ub.User()
|
||||||
|
content.nickname = username
|
||||||
|
content.password = '' # dummy password which will be replaced by ldap one
|
||||||
|
content.email = useremail
|
||||||
|
content.kindle_mail = kindlemail
|
||||||
|
content.role = config.config_default_role
|
||||||
|
content.sidebar_view = config.config_default_show
|
||||||
|
content.allowed_tags = config.config_allowed_tags
|
||||||
|
content.denied_tags = config.config_denied_tags
|
||||||
|
content.allowed_column_value = config.config_allowed_column_value
|
||||||
|
content.denied_column_value = config.config_denied_column_value
|
||||||
|
ub.session.add(content)
|
||||||
|
try:
|
||||||
|
ub.session.commit()
|
||||||
|
return 1, None # increase no of users
|
||||||
|
except Exception as e:
|
||||||
|
log.warning("Failed to create LDAP user: %s - %s", user, e)
|
||||||
|
ub.session.rollback()
|
||||||
|
message = _(u'Failed to Create at Least One LDAP User')
|
||||||
|
return 0, message
|
||||||
|
|
||||||
|
|
||||||
@admi.route('/import_ldap_users')
|
@admi.route('/import_ldap_users')
|
||||||
@login_required
|
@login_required
|
||||||
@admin_required
|
@admin_required
|
||||||
|
@ -1534,47 +1579,11 @@ def import_ldap_users():
|
||||||
log.debug_or_exception(e)
|
log.debug_or_exception(e)
|
||||||
continue
|
continue
|
||||||
if user_data:
|
if user_data:
|
||||||
user_login_field = extract_dynamic_field_from_filter(user, config.config_ldap_user_object)
|
user_count, message = ldap_import_create_user(user, user_data, showtext)
|
||||||
|
if message:
|
||||||
username = user_data[user_login_field][0].decode('utf-8')
|
showtext['text'] = message
|
||||||
# check for duplicate username
|
|
||||||
if ub.session.query(ub.User).filter(func.lower(ub.User.nickname) == username.lower()).first():
|
|
||||||
# if ub.session.query(ub.User).filter(ub.User.nickname == username).first():
|
|
||||||
log.warning("LDAP User %s Already in Database", user_data)
|
|
||||||
continue
|
|
||||||
|
|
||||||
kindlemail = ''
|
|
||||||
if 'mail' in user_data:
|
|
||||||
useremail = user_data['mail'][0].decode('utf-8')
|
|
||||||
if len(user_data['mail']) > 1:
|
|
||||||
kindlemail = user_data['mail'][1].decode('utf-8')
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.debug('No Mail Field Found in LDAP Response')
|
imported += user_count
|
||||||
useremail = username + '@email.com'
|
|
||||||
# check for duplicate email
|
|
||||||
if ub.session.query(ub.User).filter(func.lower(ub.User.email) == useremail.lower()).first():
|
|
||||||
log.warning("LDAP Email %s Already in Database", user_data)
|
|
||||||
continue
|
|
||||||
content = ub.User()
|
|
||||||
content.nickname = username
|
|
||||||
content.password = '' # dummy password which will be replaced by ldap one
|
|
||||||
content.email = useremail
|
|
||||||
content.kindle_mail = kindlemail
|
|
||||||
content.role = config.config_default_role
|
|
||||||
content.sidebar_view = config.config_default_show
|
|
||||||
content.allowed_tags = config.config_allowed_tags
|
|
||||||
content.denied_tags = config.config_denied_tags
|
|
||||||
content.allowed_column_value = config.config_allowed_column_value
|
|
||||||
content.denied_column_value = config.config_denied_column_value
|
|
||||||
ub.session.add(content)
|
|
||||||
try:
|
|
||||||
ub.session.commit()
|
|
||||||
imported += 1
|
|
||||||
except Exception as e:
|
|
||||||
log.warning("Failed to create LDAP user: %s - %s", user, e)
|
|
||||||
ub.session.rollback()
|
|
||||||
showtext['text'] = _(u'Failed to Create at Least One LDAP User')
|
|
||||||
else:
|
else:
|
||||||
log.warning("LDAP User: %s Not Found", user)
|
log.warning("LDAP User: %s Not Found", user)
|
||||||
showtext['text'] = _(u'At Least One LDAP User Not Found in Database')
|
showtext['text'] = _(u'At Least One LDAP User Not Found in Database')
|
||||||
|
|
138
cps/db.py
138
cps/db.py
|
@ -442,12 +442,80 @@ class CalibreDB():
|
||||||
|
|
||||||
self.instances.add(self)
|
self.instances.add(self)
|
||||||
|
|
||||||
|
|
||||||
def initSession(self, expire_on_commit=True):
|
def initSession(self, expire_on_commit=True):
|
||||||
self.session = self.session_factory()
|
self.session = self.session_factory()
|
||||||
self.session.expire_on_commit = expire_on_commit
|
self.session.expire_on_commit = expire_on_commit
|
||||||
self.update_title_sort(self.config)
|
self.update_title_sort(self.config)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_db_cc_classes(self, cc):
|
||||||
|
cc_ids = []
|
||||||
|
books_custom_column_links = {}
|
||||||
|
for row in cc:
|
||||||
|
if row.datatype not in cc_exceptions:
|
||||||
|
if row.datatype == 'series':
|
||||||
|
dicttable = {'__tablename__': 'books_custom_column_' + str(row.id) + '_link',
|
||||||
|
'id': Column(Integer, primary_key=True),
|
||||||
|
'book': Column(Integer, ForeignKey('books.id'),
|
||||||
|
primary_key=True),
|
||||||
|
'map_value': Column('value', Integer,
|
||||||
|
ForeignKey('custom_column_' +
|
||||||
|
str(row.id) + '.id'),
|
||||||
|
primary_key=True),
|
||||||
|
'extra': Column(Float),
|
||||||
|
'asoc': relationship('custom_column_' + str(row.id), uselist=False),
|
||||||
|
'value': association_proxy('asoc', 'value')
|
||||||
|
}
|
||||||
|
books_custom_column_links[row.id] = type(str('books_custom_column_' + str(row.id) + '_link'),
|
||||||
|
(Base,), dicttable)
|
||||||
|
else:
|
||||||
|
books_custom_column_links[row.id] = Table('books_custom_column_' + str(row.id) + '_link',
|
||||||
|
Base.metadata,
|
||||||
|
Column('book', Integer, ForeignKey('books.id'),
|
||||||
|
primary_key=True),
|
||||||
|
Column('value', Integer,
|
||||||
|
ForeignKey('custom_column_' +
|
||||||
|
str(row.id) + '.id'),
|
||||||
|
primary_key=True)
|
||||||
|
)
|
||||||
|
cc_ids.append([row.id, row.datatype])
|
||||||
|
|
||||||
|
ccdict = {'__tablename__': 'custom_column_' + str(row.id),
|
||||||
|
'id': Column(Integer, primary_key=True)}
|
||||||
|
if row.datatype == 'float':
|
||||||
|
ccdict['value'] = Column(Float)
|
||||||
|
elif row.datatype == 'int':
|
||||||
|
ccdict['value'] = Column(Integer)
|
||||||
|
elif row.datatype == 'bool':
|
||||||
|
ccdict['value'] = Column(Boolean)
|
||||||
|
else:
|
||||||
|
ccdict['value'] = Column(String)
|
||||||
|
if row.datatype in ['float', 'int', 'bool']:
|
||||||
|
ccdict['book'] = Column(Integer, ForeignKey('books.id'))
|
||||||
|
cc_classes[row.id] = type(str('custom_column_' + str(row.id)), (Base,), ccdict)
|
||||||
|
|
||||||
|
for cc_id in cc_ids:
|
||||||
|
if (cc_id[1] == 'bool') or (cc_id[1] == 'int') or (cc_id[1] == 'float'):
|
||||||
|
setattr(Books,
|
||||||
|
'custom_column_' + str(cc_id[0]),
|
||||||
|
relationship(cc_classes[cc_id[0]],
|
||||||
|
primaryjoin=(
|
||||||
|
Books.id == cc_classes[cc_id[0]].book),
|
||||||
|
backref='books'))
|
||||||
|
elif (cc_id[1] == 'series'):
|
||||||
|
setattr(Books,
|
||||||
|
'custom_column_' + str(cc_id[0]),
|
||||||
|
relationship(books_custom_column_links[cc_id[0]],
|
||||||
|
backref='books'))
|
||||||
|
else:
|
||||||
|
setattr(Books,
|
||||||
|
'custom_column_' + str(cc_id[0]),
|
||||||
|
relationship(cc_classes[cc_id[0]],
|
||||||
|
secondary=books_custom_column_links[cc_id[0]],
|
||||||
|
backref='books'))
|
||||||
|
|
||||||
|
return cc_classes
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_db(cls, config, app_db_path):
|
def setup_db(cls, config, app_db_path):
|
||||||
cls.config = config
|
cls.config = config
|
||||||
|
@ -483,72 +551,8 @@ class CalibreDB():
|
||||||
config.db_configured = True
|
config.db_configured = True
|
||||||
|
|
||||||
if not cc_classes:
|
if not cc_classes:
|
||||||
cc = conn.execute(text("SELECT id, datatype FROM custom_columns"))
|
cc = conn.execute("SELECT id, datatype FROM custom_columns")
|
||||||
|
cls.setup_db_cc_classes(cc)
|
||||||
cc_ids = []
|
|
||||||
books_custom_column_links = {}
|
|
||||||
for row in cc:
|
|
||||||
if row.datatype not in cc_exceptions:
|
|
||||||
if row.datatype == 'series':
|
|
||||||
dicttable = {'__tablename__': 'books_custom_column_' + str(row.id) + '_link',
|
|
||||||
'id': Column(Integer, primary_key=True),
|
|
||||||
'book': Column(Integer, ForeignKey('books.id'),
|
|
||||||
primary_key=True),
|
|
||||||
'map_value': Column('value', Integer,
|
|
||||||
ForeignKey('custom_column_' +
|
|
||||||
str(row.id) + '.id'),
|
|
||||||
primary_key=True),
|
|
||||||
'extra': Column(Float),
|
|
||||||
'asoc': relationship('custom_column_' + str(row.id), uselist=False),
|
|
||||||
'value': association_proxy('asoc', 'value')
|
|
||||||
}
|
|
||||||
books_custom_column_links[row.id] = type(str('books_custom_column_' + str(row.id) + '_link'),
|
|
||||||
(Base,), dicttable)
|
|
||||||
else:
|
|
||||||
books_custom_column_links[row.id] = Table('books_custom_column_' + str(row.id) + '_link',
|
|
||||||
Base.metadata,
|
|
||||||
Column('book', Integer, ForeignKey('books.id'),
|
|
||||||
primary_key=True),
|
|
||||||
Column('value', Integer,
|
|
||||||
ForeignKey('custom_column_' +
|
|
||||||
str(row.id) + '.id'),
|
|
||||||
primary_key=True)
|
|
||||||
)
|
|
||||||
cc_ids.append([row.id, row.datatype])
|
|
||||||
|
|
||||||
ccdict = {'__tablename__': 'custom_column_' + str(row.id),
|
|
||||||
'id': Column(Integer, primary_key=True)}
|
|
||||||
if row.datatype == 'float':
|
|
||||||
ccdict['value'] = Column(Float)
|
|
||||||
elif row.datatype == 'int':
|
|
||||||
ccdict['value'] = Column(Integer)
|
|
||||||
elif row.datatype == 'bool':
|
|
||||||
ccdict['value'] = Column(Boolean)
|
|
||||||
else:
|
|
||||||
ccdict['value'] = Column(String)
|
|
||||||
if row.datatype in ['float', 'int', 'bool']:
|
|
||||||
ccdict['book'] = Column(Integer, ForeignKey('books.id'))
|
|
||||||
cc_classes[row.id] = type(str('custom_column_' + str(row.id)), (Base,), ccdict)
|
|
||||||
|
|
||||||
for cc_id in cc_ids:
|
|
||||||
if (cc_id[1] == 'bool') or (cc_id[1] == 'int') or (cc_id[1] == 'float'):
|
|
||||||
setattr(Books,
|
|
||||||
'custom_column_' + str(cc_id[0]),
|
|
||||||
relationship(cc_classes[cc_id[0]],
|
|
||||||
primaryjoin=(
|
|
||||||
Books.id == cc_classes[cc_id[0]].book),
|
|
||||||
backref='books'))
|
|
||||||
elif (cc_id[1] == 'series'):
|
|
||||||
setattr(Books,
|
|
||||||
'custom_column_' + str(cc_id[0]),
|
|
||||||
relationship(books_custom_column_links[cc_id[0]],
|
|
||||||
backref='books'))
|
|
||||||
else:
|
|
||||||
setattr(Books,
|
|
||||||
'custom_column_' + str(cc_id[0]),
|
|
||||||
relationship(cc_classes[cc_id[0]],
|
|
||||||
secondary=books_custom_column_links[cc_id[0]],
|
|
||||||
backref='books'))
|
|
||||||
|
|
||||||
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
|
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
|
||||||
autoflush=True,
|
autoflush=True,
|
||||||
|
|
|
@ -883,6 +883,48 @@ def create_book_on_upload(modif_date, meta):
|
||||||
calibre_db.session.flush()
|
calibre_db.session.flush()
|
||||||
return db_book, input_authors, title_dir
|
return db_book, input_authors, title_dir
|
||||||
|
|
||||||
|
def file_handling_on_upload(requested_file):
|
||||||
|
# check if file extension is correct
|
||||||
|
if '.' in requested_file.filename:
|
||||||
|
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
||||||
|
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
|
||||||
|
flash(
|
||||||
|
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
||||||
|
ext=file_ext), category="error")
|
||||||
|
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||||
|
else:
|
||||||
|
flash(_('File to be uploaded must have an extension'), category="error")
|
||||||
|
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||||
|
|
||||||
|
# extract metadata from file
|
||||||
|
try:
|
||||||
|
meta = uploader.upload(requested_file, config.config_rarfile_location)
|
||||||
|
except (IOError, OSError):
|
||||||
|
log.error("File %s could not saved to temp dir", requested_file.filename)
|
||||||
|
flash(_(u"File %(filename)s could not saved to temp dir",
|
||||||
|
filename=requested_file.filename), category="error")
|
||||||
|
return None, Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
||||||
|
return meta, None
|
||||||
|
|
||||||
|
|
||||||
|
def move_coverfile(meta, db_book):
|
||||||
|
# move cover to final directory, including book id
|
||||||
|
if meta.cover:
|
||||||
|
coverfile = meta.cover
|
||||||
|
else:
|
||||||
|
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
|
||||||
|
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
|
||||||
|
try:
|
||||||
|
copyfile(coverfile, new_coverpath)
|
||||||
|
if meta.cover:
|
||||||
|
os.unlink(meta.cover)
|
||||||
|
except OSError as e:
|
||||||
|
log.error("Failed to move cover file %s: %s", new_coverpath, e)
|
||||||
|
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
|
||||||
|
error=e),
|
||||||
|
category="error")
|
||||||
|
|
||||||
|
|
||||||
@editbook.route("/upload", methods=["GET", "POST"])
|
@editbook.route("/upload", methods=["GET", "POST"])
|
||||||
@login_required_if_no_ano
|
@login_required_if_no_ano
|
||||||
@upload_required
|
@upload_required
|
||||||
|
@ -897,30 +939,15 @@ def upload():
|
||||||
calibre_db.update_title_sort(config)
|
calibre_db.update_title_sort(config)
|
||||||
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
|
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
|
||||||
|
|
||||||
# check if file extension is correct
|
response, error = file_handling_on_upload(requested_file)
|
||||||
if '.' in requested_file.filename:
|
if error:
|
||||||
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
|
return response
|
||||||
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
|
|
||||||
flash(
|
|
||||||
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
|
|
||||||
ext=file_ext), category="error")
|
|
||||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
|
||||||
else:
|
else:
|
||||||
flash(_('File to be uploaded must have an extension'), category="error")
|
meta = response
|
||||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
|
||||||
|
|
||||||
# extract metadata from file
|
|
||||||
try:
|
|
||||||
meta = uploader.upload(requested_file, config.config_rarfile_location)
|
|
||||||
except (IOError, OSError):
|
|
||||||
log.error("File %s could not saved to temp dir", requested_file.filename)
|
|
||||||
flash(_(u"File %(filename)s could not saved to temp dir",
|
|
||||||
filename= requested_file.filename), category="error")
|
|
||||||
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
|
|
||||||
|
|
||||||
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
|
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
|
||||||
|
|
||||||
# Comments needs book id therfore only possible after flush
|
# Comments needs book id therefore only possible after flush
|
||||||
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
|
||||||
|
|
||||||
book_id = db_book.id
|
book_id = db_book.id
|
||||||
|
@ -932,21 +959,7 @@ def upload():
|
||||||
meta.file_path,
|
meta.file_path,
|
||||||
title_dir + meta.extension)
|
title_dir + meta.extension)
|
||||||
|
|
||||||
# move cover to final directory, including book id
|
move_coverfile(meta, db_book)
|
||||||
if meta.cover:
|
|
||||||
coverfile = meta.cover
|
|
||||||
else:
|
|
||||||
coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')
|
|
||||||
new_coverpath = os.path.join(config.config_calibre_dir, db_book.path, "cover.jpg")
|
|
||||||
try:
|
|
||||||
copyfile(coverfile, new_coverpath)
|
|
||||||
if meta.cover:
|
|
||||||
os.unlink(meta.cover)
|
|
||||||
except OSError as e:
|
|
||||||
log.error("Failed to move cover file %s: %s", new_coverpath, e)
|
|
||||||
flash(_(u"Failed to Move Cover File %(file)s: %(error)s", file=new_coverpath,
|
|
||||||
error=e),
|
|
||||||
category="error")
|
|
||||||
|
|
||||||
# save data to database, reread data
|
# save data to database, reread data
|
||||||
calibre_db.session.commit()
|
calibre_db.session.commit()
|
||||||
|
|
58
cps/epub.py
58
cps/epub.py
|
@ -87,18 +87,29 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||||
lang = epub_metadata['language'].split('-', 1)[0].lower()
|
lang = epub_metadata['language'].split('-', 1)[0].lower()
|
||||||
epub_metadata['language'] = isoLanguages.get_lang3(lang)
|
epub_metadata['language'] = isoLanguages.get_lang3(lang)
|
||||||
|
|
||||||
series = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='calibre:series']/@content", namespaces=ns)
|
epub_metadata = parse_epbub_series(tree, epub_metadata)
|
||||||
if len(series) > 0:
|
|
||||||
epub_metadata['series'] = series[0]
|
|
||||||
else:
|
|
||||||
epub_metadata['series'] = ''
|
|
||||||
|
|
||||||
series_id = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='calibre:series_index']/@content", namespaces=ns)
|
coverfile = parse_ebpub_cover(ns, tree, epubZip, coverpath, tmp_file_path)
|
||||||
if len(series_id) > 0:
|
|
||||||
epub_metadata['series_id'] = series_id[0]
|
|
||||||
else:
|
|
||||||
epub_metadata['series_id'] = '1'
|
|
||||||
|
|
||||||
|
if not epub_metadata['title']:
|
||||||
|
title = original_file_name
|
||||||
|
else:
|
||||||
|
title = epub_metadata['title']
|
||||||
|
|
||||||
|
return BookMeta(
|
||||||
|
file_path=tmp_file_path,
|
||||||
|
extension=original_file_extension,
|
||||||
|
title=title.encode('utf-8').decode('utf-8'),
|
||||||
|
author=epub_metadata['creator'].encode('utf-8').decode('utf-8'),
|
||||||
|
cover=coverfile,
|
||||||
|
description=epub_metadata['description'],
|
||||||
|
tags=epub_metadata['subject'].encode('utf-8').decode('utf-8'),
|
||||||
|
series=epub_metadata['series'].encode('utf-8').decode('utf-8'),
|
||||||
|
series_id=epub_metadata['series_id'].encode('utf-8').decode('utf-8'),
|
||||||
|
languages=epub_metadata['language'],
|
||||||
|
publisher="")
|
||||||
|
|
||||||
|
def parse_ebpub_cover(ns, tree, epubZip, coverpath, tmp_file_path):
|
||||||
coversection = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='cover-image']/@href", namespaces=ns)
|
coversection = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='cover-image']/@href", namespaces=ns)
|
||||||
coverfile = None
|
coverfile = None
|
||||||
if len(coversection) > 0:
|
if len(coversection) > 0:
|
||||||
|
@ -126,21 +137,18 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
||||||
coverfile = extractCover(epubZip, filename, "", tmp_file_path)
|
coverfile = extractCover(epubZip, filename, "", tmp_file_path)
|
||||||
else:
|
else:
|
||||||
coverfile = extractCover(epubZip, coversection[0], coverpath, tmp_file_path)
|
coverfile = extractCover(epubZip, coversection[0], coverpath, tmp_file_path)
|
||||||
|
return coverfile
|
||||||
|
|
||||||
if not epub_metadata['title']:
|
def parse_epbub_series(tree, epub_metadata):
|
||||||
title = original_file_name
|
series = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='calibre:series']/@content", namespaces=ns)
|
||||||
|
if len(series) > 0:
|
||||||
|
epub_metadata['series'] = series[0]
|
||||||
else:
|
else:
|
||||||
title = epub_metadata['title']
|
epub_metadata['series'] = ''
|
||||||
|
|
||||||
return BookMeta(
|
series_id = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='calibre:series_index']/@content", namespaces=ns)
|
||||||
file_path=tmp_file_path,
|
if len(series_id) > 0:
|
||||||
extension=original_file_extension,
|
epub_metadata['series_id'] = series_id[0]
|
||||||
title=title.encode('utf-8').decode('utf-8'),
|
else:
|
||||||
author=epub_metadata['creator'].encode('utf-8').decode('utf-8'),
|
epub_metadata['series_id'] = '1'
|
||||||
cover=coverfile,
|
return epub_metadata
|
||||||
description=epub_metadata['description'],
|
|
||||||
tags=epub_metadata['subject'].encode('utf-8').decode('utf-8'),
|
|
||||||
series=epub_metadata['series'].encode('utf-8').decode('utf-8'),
|
|
||||||
series_id=epub_metadata['series_id'].encode('utf-8').decode('utf-8'),
|
|
||||||
languages=epub_metadata['language'],
|
|
||||||
publisher="")
|
|
||||||
|
|
|
@ -299,39 +299,6 @@ if ub.oauth_support:
|
||||||
) # ToDo: Translate
|
) # ToDo: Translate
|
||||||
flash(msg, category="error")
|
flash(msg, category="error")
|
||||||
|
|
||||||
|
|
||||||
@oauth.route('/link/github')
|
|
||||||
@oauth_required
|
|
||||||
def github_login():
|
|
||||||
if not github.authorized:
|
|
||||||
return redirect(url_for('github.login'))
|
|
||||||
account_info = github.get('/user')
|
|
||||||
if account_info.ok:
|
|
||||||
account_info_json = account_info.json()
|
|
||||||
return bind_oauth_or_register(oauthblueprints[0]['id'], account_info_json['id'], 'github.login', 'github')
|
|
||||||
flash(_(u"GitHub Oauth error, please retry later."), category="error")
|
|
||||||
return redirect(url_for('web.login'))
|
|
||||||
|
|
||||||
|
|
||||||
@oauth.route('/unlink/github', methods=["GET"])
|
|
||||||
@login_required
|
|
||||||
def github_login_unlink():
|
|
||||||
return unlink_oauth(oauthblueprints[0]['id'])
|
|
||||||
|
|
||||||
|
|
||||||
@oauth.route('/link/google')
|
|
||||||
@oauth_required
|
|
||||||
def google_login():
|
|
||||||
if not google.authorized:
|
|
||||||
return redirect(url_for("google.login"))
|
|
||||||
resp = google.get("/oauth2/v2/userinfo")
|
|
||||||
if resp.ok:
|
|
||||||
account_info_json = resp.json()
|
|
||||||
return bind_oauth_or_register(oauthblueprints[1]['id'], account_info_json['id'], 'google.login', 'google')
|
|
||||||
flash(_(u"Google Oauth error, please retry later."), category="error")
|
|
||||||
return redirect(url_for('web.login'))
|
|
||||||
|
|
||||||
|
|
||||||
@oauth_error.connect_via(oauthblueprints[1]['blueprint'])
|
@oauth_error.connect_via(oauthblueprints[1]['blueprint'])
|
||||||
def google_error(blueprint, error, error_description=None, error_uri=None):
|
def google_error(blueprint, error, error_description=None, error_uri=None):
|
||||||
msg = (
|
msg = (
|
||||||
|
@ -346,7 +313,39 @@ if ub.oauth_support:
|
||||||
flash(msg, category="error")
|
flash(msg, category="error")
|
||||||
|
|
||||||
|
|
||||||
@oauth.route('/unlink/google', methods=["GET"])
|
@oauth.route('/link/github')
|
||||||
@login_required
|
@oauth_required
|
||||||
def google_login_unlink():
|
def github_login():
|
||||||
return unlink_oauth(oauthblueprints[1]['id'])
|
if not github.authorized:
|
||||||
|
return redirect(url_for('github.login'))
|
||||||
|
account_info = github.get('/user')
|
||||||
|
if account_info.ok:
|
||||||
|
account_info_json = account_info.json()
|
||||||
|
return bind_oauth_or_register(oauthblueprints[0]['id'], account_info_json['id'], 'github.login', 'github')
|
||||||
|
flash(_(u"GitHub Oauth error, please retry later."), category="error")
|
||||||
|
return redirect(url_for('web.login'))
|
||||||
|
|
||||||
|
|
||||||
|
@oauth.route('/unlink/github', methods=["GET"])
|
||||||
|
@login_required
|
||||||
|
def github_login_unlink():
|
||||||
|
return unlink_oauth(oauthblueprints[0]['id'])
|
||||||
|
|
||||||
|
|
||||||
|
@oauth.route('/link/google')
|
||||||
|
@oauth_required
|
||||||
|
def google_login():
|
||||||
|
if not google.authorized:
|
||||||
|
return redirect(url_for("google.login"))
|
||||||
|
resp = google.get("/oauth2/v2/userinfo")
|
||||||
|
if resp.ok:
|
||||||
|
account_info_json = resp.json()
|
||||||
|
return bind_oauth_or_register(oauthblueprints[1]['id'], account_info_json['id'], 'google.login', 'google')
|
||||||
|
flash(_(u"Google Oauth error, please retry later."), category="error")
|
||||||
|
return redirect(url_for('web.login'))
|
||||||
|
|
||||||
|
|
||||||
|
@oauth.route('/unlink/google', methods=["GET"])
|
||||||
|
@login_required
|
||||||
|
def google_login_unlink():
|
||||||
|
return unlink_oauth(oauthblueprints[1]['id'])
|
||||||
|
|
|
@ -110,8 +110,7 @@ class TaskEmail(CalibreTask):
|
||||||
|
|
||||||
self.results = dict()
|
self.results = dict()
|
||||||
|
|
||||||
def run(self, worker_thread):
|
def prepare_message(self):
|
||||||
# create MIME message
|
|
||||||
msg = MIMEMultipart()
|
msg = MIMEMultipart()
|
||||||
msg['Subject'] = self.subject
|
msg['Subject'] = self.subject
|
||||||
msg['Message-Id'] = make_msgid('calibre-web')
|
msg['Message-Id'] = make_msgid('calibre-web')
|
||||||
|
@ -128,19 +127,22 @@ class TaskEmail(CalibreTask):
|
||||||
|
|
||||||
msg['From'] = self.settings["mail_from"]
|
msg['From'] = self.settings["mail_from"]
|
||||||
msg['To'] = self.recipent
|
msg['To'] = self.recipent
|
||||||
|
# convert MIME message to string
|
||||||
|
fp = StringIO()
|
||||||
|
gen = Generator(fp, mangle_from_=False)
|
||||||
|
gen.flatten(msg)
|
||||||
|
return fp.getvalue()
|
||||||
|
|
||||||
|
def run(self, worker_thread):
|
||||||
|
# create MIME message
|
||||||
|
msg = self.prepare_message()
|
||||||
|
|
||||||
use_ssl = int(self.settings.get('mail_use_ssl', 0))
|
use_ssl = int(self.settings.get('mail_use_ssl', 0))
|
||||||
try:
|
try:
|
||||||
# convert MIME message to string
|
|
||||||
fp = StringIO()
|
|
||||||
gen = Generator(fp, mangle_from_=False)
|
|
||||||
gen.flatten(msg)
|
|
||||||
msg = fp.getvalue()
|
|
||||||
|
|
||||||
# send email
|
# send email
|
||||||
timeout = 600 # set timeout to 5mins
|
timeout = 600 # set timeout to 5mins
|
||||||
|
|
||||||
# redirect output to logfile on python2 pn python3 debugoutput is caught with overwritten
|
# redirect output to logfile on python2 on python3 debugoutput is caught with overwritten
|
||||||
# _print_debug function
|
# _print_debug function
|
||||||
if sys.version_info < (3, 0):
|
if sys.version_info < (3, 0):
|
||||||
org_smtpstderr = smtplib.stderr
|
org_smtpstderr = smtplib.stderr
|
||||||
|
@ -169,7 +171,6 @@ class TaskEmail(CalibreTask):
|
||||||
except (MemoryError) as e:
|
except (MemoryError) as e:
|
||||||
log.debug_or_exception(e)
|
log.debug_or_exception(e)
|
||||||
self._handleError(u'MemoryError sending email: ' + str(e))
|
self._handleError(u'MemoryError sending email: ' + str(e))
|
||||||
# return None
|
|
||||||
except (smtplib.SMTPException, smtplib.SMTPAuthenticationError) as e:
|
except (smtplib.SMTPException, smtplib.SMTPAuthenticationError) as e:
|
||||||
if hasattr(e, "smtp_error"):
|
if hasattr(e, "smtp_error"):
|
||||||
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
text = e.smtp_error.decode('utf-8').replace("\n", '. ')
|
||||||
|
@ -181,10 +182,8 @@ class TaskEmail(CalibreTask):
|
||||||
log.debug_or_exception(e)
|
log.debug_or_exception(e)
|
||||||
text = ''
|
text = ''
|
||||||
self._handleError(u'Smtplib Error sending email: ' + text)
|
self._handleError(u'Smtplib Error sending email: ' + text)
|
||||||
# return None
|
|
||||||
except (socket.error) as e:
|
except (socket.error) as e:
|
||||||
self._handleError(u'Socket Error sending email: ' + e.strerror)
|
self._handleError(u'Socket Error sending email: ' + e.strerror)
|
||||||
# return None
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -403,6 +403,52 @@ class Updater(threading.Thread):
|
||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
def _stable_updater_set_status(self, i, newer, status, parents, commit):
|
||||||
|
if i == -1 and newer == False:
|
||||||
|
status.update({
|
||||||
|
'update': True,
|
||||||
|
'success': True,
|
||||||
|
'message': _(
|
||||||
|
u'Click on the button below to update to the latest stable version.'),
|
||||||
|
'history': parents
|
||||||
|
})
|
||||||
|
self.updateFile = commit[0]['zipball_url']
|
||||||
|
elif i == -1 and newer == True:
|
||||||
|
status.update({
|
||||||
|
'update': True,
|
||||||
|
'success': True,
|
||||||
|
'message': _(u'A new update is available. Click on the button below to '
|
||||||
|
u'update to version: %(version)s', version=commit[0]['tag_name']),
|
||||||
|
'history': parents
|
||||||
|
})
|
||||||
|
self.updateFile = commit[0]['zipball_url']
|
||||||
|
return status
|
||||||
|
|
||||||
|
def _stable_updater_parse_major_version(self, commit, i, parents, current_version, status):
|
||||||
|
if int(commit[i + 1]['tag_name'].split('.')[1]) == current_version[1]:
|
||||||
|
parents.append([commit[i]['tag_name'],
|
||||||
|
commit[i]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
||||||
|
status.update({
|
||||||
|
'update': True,
|
||||||
|
'success': True,
|
||||||
|
'message': _(u'A new update is available. Click on the button below to '
|
||||||
|
u'update to version: %(version)s', version=commit[i]['tag_name']),
|
||||||
|
'history': parents
|
||||||
|
})
|
||||||
|
self.updateFile = commit[i]['zipball_url']
|
||||||
|
else:
|
||||||
|
parents.append([commit[i + 1]['tag_name'],
|
||||||
|
commit[i + 1]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
||||||
|
status.update({
|
||||||
|
'update': True,
|
||||||
|
'success': True,
|
||||||
|
'message': _(u'A new update is available. Click on the button below to '
|
||||||
|
u'update to version: %(version)s', version=commit[i + 1]['tag_name']),
|
||||||
|
'history': parents
|
||||||
|
})
|
||||||
|
self.updateFile = commit[i + 1]['zipball_url']
|
||||||
|
return status, parents
|
||||||
|
|
||||||
def _stable_available_updates(self, request_method):
|
def _stable_available_updates(self, request_method):
|
||||||
if request_method == "GET":
|
if request_method == "GET":
|
||||||
parents = []
|
parents = []
|
||||||
|
@ -464,48 +510,15 @@ class Updater(threading.Thread):
|
||||||
# before major update
|
# before major update
|
||||||
if i == (len(commit) - 1):
|
if i == (len(commit) - 1):
|
||||||
i -= 1
|
i -= 1
|
||||||
if int(commit[i+1]['tag_name'].split('.')[1]) == current_version[1]:
|
status, parents = self._stable_updater_parse_major_version(self,
|
||||||
parents.append([commit[i]['tag_name'],
|
commit,
|
||||||
commit[i]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
i,
|
||||||
status.update({
|
parents,
|
||||||
'update': True,
|
current_version,
|
||||||
'success': True,
|
status)
|
||||||
'message': _(u'A new update is available. Click on the button below to '
|
|
||||||
u'update to version: %(version)s', version=commit[i]['tag_name']),
|
|
||||||
'history': parents
|
|
||||||
})
|
|
||||||
self.updateFile = commit[i]['zipball_url']
|
|
||||||
else:
|
|
||||||
parents.append([commit[i+1]['tag_name'],
|
|
||||||
commit[i+1]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
|
||||||
status.update({
|
|
||||||
'update': True,
|
|
||||||
'success': True,
|
|
||||||
'message': _(u'A new update is available. Click on the button below to '
|
|
||||||
u'update to version: %(version)s', version=commit[i+1]['tag_name']),
|
|
||||||
'history': parents
|
|
||||||
})
|
|
||||||
self.updateFile = commit[i+1]['zipball_url']
|
|
||||||
break
|
break
|
||||||
if i == -1 and newer == False:
|
|
||||||
status.update({
|
|
||||||
'update': True,
|
|
||||||
'success': True,
|
|
||||||
'message': _(
|
|
||||||
u'Click on the button below to update to the latest stable version.'),
|
|
||||||
'history': parents
|
|
||||||
})
|
|
||||||
self.updateFile = commit[0]['zipball_url']
|
|
||||||
elif i == -1 and newer == True:
|
|
||||||
status.update({
|
|
||||||
'update': True,
|
|
||||||
'success': True,
|
|
||||||
'message': _(u'A new update is available. Click on the button below to '
|
|
||||||
u'update to version: %(version)s', version=commit[0]['tag_name']),
|
|
||||||
'history': parents
|
|
||||||
})
|
|
||||||
self.updateFile = commit[0]['zipball_url']
|
|
||||||
|
|
||||||
|
status = self._stable_updater_set_status(self, i, newer, status, parents, commit)
|
||||||
return json.dumps(status)
|
return json.dumps(status)
|
||||||
|
|
||||||
def _get_request_path(self):
|
def _get_request_path(self):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user