Additional fix for #1407 (metadata.db is now held in memory, app.db is attached to it -> joins between both databases possible -> book_read_link is joined for getting result)

This commit is contained in:
Ozzieisaacs 2020-05-21 09:28:35 +02:00
parent 81fc1eccd3
commit b75497231e
7 changed files with 37 additions and 23 deletions

View File

@ -98,7 +98,7 @@ def create_app():
app.secret_key = os.getenv('SECRET_KEY', config_sql.get_flask_session_key(ub.session)) app.secret_key = os.getenv('SECRET_KEY', config_sql.get_flask_session_key(ub.session))
web_server.init_app(app, config) web_server.init_app(app, config)
db.setup_db(config) db.setup_db(config, cli.settingspath)
babel.init_app(app) babel.init_app(app)
_BABEL_TRANSLATIONS.update(str(item) for item in babel.list_translations()) _BABEL_TRANSLATIONS.update(str(item) for item in babel.list_translations())

View File

@ -99,7 +99,7 @@ def shutdown():
if task == 2: if task == 2:
log.warning("reconnecting to calibre database") log.warning("reconnecting to calibre database")
db.setup_db(config) db.setup_db(config, ub.app_DB_path)
showtext['text'] = _(u'Reconnect successful') showtext['text'] = _(u'Reconnect successful')
return json.dumps(showtext) return json.dumps(showtext)
@ -688,7 +688,7 @@ def _configuration_update_helper():
return _configuration_result('%s' % e, gdriveError) return _configuration_result('%s' % e, gdriveError)
if db_change: if db_change:
if not db.setup_db(config): if not db.setup_db(config, ub.app_DB_path):
return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdriveError) return _configuration_result(_('DB Location is not Valid, Please Enter Correct Path'), gdriveError)
if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK): if not os.access(os.path.join(config.config_calibre_dir, "metadata.db"), os.W_OK):
flash(_(u"DB is not Writeable"), category="warning") flash(_(u"DB is not Writeable"), category="warning")

View File

@ -24,7 +24,7 @@ import re
import ast import ast
from datetime import datetime from datetime import datetime
from sqlalchemy import create_engine from sqlalchemy import create_engine, event
from sqlalchemy import Table, Column, ForeignKey, CheckConstraint from sqlalchemy import Table, Column, ForeignKey, CheckConstraint
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float
from sqlalchemy.orm import relationship, sessionmaker, scoped_session from sqlalchemy.orm import relationship, sessionmaker, scoped_session
@ -329,7 +329,7 @@ def update_title_sort(config, conn=None):
conn.create_function("title_sort", 1, _title_sort) conn.create_function("title_sort", 1, _title_sort)
def setup_db(config): def setup_db(config, app_db_path):
dispose() dispose()
global engine global engine
@ -343,10 +343,14 @@ def setup_db(config):
return False return False
try: try:
engine = create_engine('sqlite:///{0}'.format(dbpath), #engine = create_engine('sqlite:///{0}'.format(dbpath),
engine = create_engine('sqlite://',
echo=False, echo=False,
isolation_level="SERIALIZABLE", isolation_level="SERIALIZABLE",
connect_args={'check_same_thread': False}) connect_args={'check_same_thread': False})
engine.execute("attach database '{}' as calibre;".format(dbpath))
engine.execute("attach database '{}' as app_settings;".format(app_db_path))
conn = engine.connect() conn = engine.connect()
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302 # conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
except Exception as e: except Exception as e:
@ -438,7 +442,8 @@ def dispose():
if table is not None: if table is not None:
Base.metadata.remove(table) Base.metadata.remove(table)
def reconnect_db(config):
def reconnect_db(config, app_db_path):
session.close() session.close()
engine.dispose() engine.dispose()
setup_db(config) setup_db(config, app_db_path)

View File

@ -39,7 +39,7 @@ try:
except ImportError: except ImportError:
pass pass
from . import logger, gdriveutils, config, db from . import logger, gdriveutils, config, db, ub
from .web import admin_required from .web import admin_required
@ -145,7 +145,8 @@ def on_received_watch_confirmation():
dbpath = os.path.join(config.config_calibre_dir, "metadata.db") dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
else: else:
dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode() dbpath = os.path.join(config.config_calibre_dir, "metadata.db").encode()
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath): if not response['deleted'] and response['file']['title'] == 'metadata.db' \
and response['file']['md5Checksum'] != hashlib.md5(dbpath):
tmpDir = tempfile.gettempdir() tmpDir = tempfile.gettempdir()
log.info('Database file updated') log.info('Database file updated')
copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time()))) copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time())))
@ -154,7 +155,7 @@ def on_received_watch_confirmation():
log.info('Setting up new DB') log.info('Setting up new DB')
# prevent error on windows, as os.rename does on exisiting files # prevent error on windows, as os.rename does on exisiting files
move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath) move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath)
db.setup_db(config) db.setup_db(config, ub.app_DB_path)
except Exception as e: except Exception as e:
log.exception(e) log.exception(e)
updateMetaData() updateMetaData()

View File

@ -143,7 +143,7 @@ def HandleSyncRequest():
# We reload the book database so that the user get's a fresh view of the library # We reload the book database so that the user get's a fresh view of the library
# in case of external changes (e.g: adding a book through Calibre). # in case of external changes (e.g: adding a book through Calibre).
db.reconnect_db(config) db.reconnect_db(config, ub.app_DB_path)
archived_books = ( archived_books = (
ub.session.query(ub.ArchivedBook) ub.session.query(ub.ArchivedBook)

View File

@ -49,6 +49,7 @@ from . import constants
session = None session = None
app_DB_path = None
Base = declarative_base() Base = declarative_base()
@ -664,8 +665,12 @@ def create_admin_user(session):
def init_db(app_db_path): def init_db(app_db_path):
# Open session for database connection # Open session for database connection
global session global session
global app_DB_path
app_DB_path = app_db_path
engine = create_engine(u'sqlite:///{0}'.format(app_db_path), echo=False) engine = create_engine(u'sqlite:///{0}'.format(app_db_path), echo=False)
# engine.execute("attach database '{0}' as app_settings;".format(app_db_path))
Session = sessionmaker() Session = sessionmaker()
Session.configure(bind=engine) Session.configure(bind=engine)

View File

@ -970,7 +970,7 @@ def get_tasks_status():
@app.route("/reconnect") @app.route("/reconnect")
def reconnect(): def reconnect():
db.reconnect_db(config) db.reconnect_db(config, ub.app_DB_path)
return json.dumps({}) return json.dumps({})
@ -1154,30 +1154,33 @@ def advanced_search():
def render_read_books(page, are_read, as_xml=False, order=None, *args, **kwargs): def render_read_books(page, are_read, as_xml=False, order=None, *args, **kwargs):
order = order or [] order = order or []
if not config.config_read_column: if not config.config_read_column:
readBooks = ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id))\
.filter(ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED).all()
readBookIds = [x.book_id for x in readBooks]
if are_read: if are_read:
db_filter = db.Books.id.in_(readBookIds) db_filter = and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED)
else: else:
db_filter = ~db.Books.id.in_(readBookIds) db_filter = coalesce(ub.ReadBook.read_status, 0) != ub.ReadBook.STATUS_FINISHED
entries, random, pagination = fill_indexpage(page, db.Books, db_filter, order) entries, random, pagination = fill_indexpage(page, db.Books,
db_filter,
order,
ub.ReadBook, db.Books.id==ub.ReadBook.book_id)
else: else:
try: try:
if are_read: if are_read:
db_filter = db.cc_classes[config.config_read_column].value == True db_filter = db.cc_classes[config.config_read_column].value == True
else: else:
db_filter = coalesce(db.cc_classes[config.config_read_column].value, False) != True db_filter = coalesce(db.cc_classes[config.config_read_column].value, False) != True
# book_count = db.session.query(func.count(db.Books.id)).filter(common_filters()).filter(db_filter).scalar()
entries, random, pagination = fill_indexpage(page, db.Books, entries, random, pagination = fill_indexpage(page, db.Books,
db_filter, db_filter,
order, order,
db.cc_classes[config.config_read_column]) db.cc_classes[config.config_read_column])
except KeyError: except KeyError:
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column) log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
book_count = 0 if not as_xml:
flash(_("Custom Column No.%(column)d is not existing in calibre database",
column=config.config_read_column),
category="error")
return redirect(url_for("web.index"))
# ToDo: Handle error Case for opds
if as_xml: if as_xml:
return entries, pagination return entries, pagination
else: else: