refactored about
Update dependencies in setup.cfg Improved detection of library change
This commit is contained in:
commit
86b621e768
|
@ -27,8 +27,9 @@ from flask import session
|
|||
|
||||
class MyLoginManager(LoginManager):
|
||||
def _session_protection_failed(self):
|
||||
sess = session._get_current_object()
|
||||
_session = session._get_current_object()
|
||||
ident = self._session_identifier_generator()
|
||||
if(sess and not (len(sess) == 1 and sess.get('csrf_token', None))) and ident != sess.get('_id', None):
|
||||
if(_session and not (len(_session) == 1
|
||||
and _session.get('csrf_token', None))) and ident != _session.get('_id', None):
|
||||
return super(). _session_protection_failed()
|
||||
return False
|
||||
|
|
|
@ -156,7 +156,7 @@ def create_app():
|
|||
services.goodreads_support.connect(config.config_goodreads_api_key,
|
||||
config.config_goodreads_api_secret,
|
||||
config.config_use_goodreads)
|
||||
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
return app
|
||||
|
||||
@babel.localeselector
|
||||
|
|
87
cps/about.py
87
cps/about.py
|
@ -25,47 +25,15 @@ import platform
|
|||
import sqlite3
|
||||
from collections import OrderedDict
|
||||
|
||||
import babel
|
||||
import pytz
|
||||
import requests
|
||||
import sqlalchemy
|
||||
import werkzeug
|
||||
import flask
|
||||
import flask_login
|
||||
import flask_principal
|
||||
import jinja2
|
||||
from flask_babel import gettext as _
|
||||
try:
|
||||
from flask_wtf import __version__ as flaskwtf_version
|
||||
except ImportError:
|
||||
flaskwtf_version = _(u'not installed')
|
||||
|
||||
from . import db, calibre_db, converter, uploader, server, isoLanguages, constants, gdriveutils, dep_check
|
||||
from . import db, calibre_db, converter, uploader, constants, dep_check
|
||||
from .render_template import render_title_template
|
||||
|
||||
try:
|
||||
from flask_login import __version__ as flask_loginVersion
|
||||
except ImportError:
|
||||
from flask_login.__about__ import __version__ as flask_loginVersion
|
||||
try:
|
||||
# pylint: disable=unused-import
|
||||
import unidecode
|
||||
# _() necessary to make babel aware of string for translation
|
||||
unidecode_version = _(u'installed')
|
||||
except ImportError:
|
||||
unidecode_version = _(u'not installed')
|
||||
|
||||
try:
|
||||
from flask_dance import __version__ as flask_danceVersion
|
||||
except ImportError:
|
||||
flask_danceVersion = None
|
||||
|
||||
try:
|
||||
from greenlet import __version__ as greenlet_Version
|
||||
except ImportError:
|
||||
greenlet_Version = None
|
||||
|
||||
from . import services
|
||||
|
||||
about = flask.Blueprint('about', __name__)
|
||||
|
||||
|
@ -81,52 +49,23 @@ else:
|
|||
calibre_web_version = (constants.STABLE_VERSION['version'] + ' - '
|
||||
+ constants.NIGHTLY_VERSION[0].replace('%', '%%') + ' - '
|
||||
+ constants.NIGHTLY_VERSION[1].replace('%', '%%'))
|
||||
|
||||
if getattr(sys, 'frozen', False):
|
||||
calibre_web_version += " - Exe-Version"
|
||||
elif constants.HOME_CONFIG:
|
||||
calibre_web_version += " - pyPi"
|
||||
|
||||
if not ret:
|
||||
_VERSIONS = OrderedDict(
|
||||
Platform='{0[0]} {0[2]} {0[3]} {0[4]} {0[5]}'.format(platform.uname()),
|
||||
Python=sys.version,
|
||||
Calibre_Web=calibre_web_version,
|
||||
WebServer=server.VERSION,
|
||||
Flask=flask.__version__,
|
||||
Flask_Login=flask_loginVersion,
|
||||
Flask_Principal=flask_principal.__version__,
|
||||
Flask_WTF=flaskwtf_version,
|
||||
Werkzeug=werkzeug.__version__,
|
||||
Babel=babel.__version__,
|
||||
Jinja2=jinja2.__version__,
|
||||
Requests=requests.__version__,
|
||||
SqlAlchemy=sqlalchemy.__version__,
|
||||
pySqlite=sqlite3.version,
|
||||
SQLite=sqlite3.sqlite_version,
|
||||
iso639=isoLanguages.__version__,
|
||||
pytz=pytz.__version__,
|
||||
Unidecode=unidecode_version,
|
||||
Flask_SimpleLDAP=u'installed' if bool(services.ldap) else None,
|
||||
python_LDAP=services.ldapVersion if bool(services.ldapVersion) else None,
|
||||
Goodreads=u'installed' if bool(services.goodreads_support) else None,
|
||||
jsonschema=services.SyncToken.__version__ if bool(services.SyncToken) else None,
|
||||
flask_dance=flask_danceVersion,
|
||||
greenlet=greenlet_Version
|
||||
)
|
||||
_VERSIONS.update(gdriveutils.get_versions())
|
||||
_VERSIONS.update(uploader.get_versions(True))
|
||||
else:
|
||||
_VERSIONS = OrderedDict(
|
||||
Platform='{0[0]} {0[2]} {0[3]} {0[4]} {0[5]}'.format(platform.uname()),
|
||||
Python=sys.version,
|
||||
Calibre_Web=calibre_web_version,
|
||||
Werkzeug=werkzeug.__version__,
|
||||
Jinja2=jinja2.__version__,
|
||||
pySqlite=sqlite3.version,
|
||||
SQLite=sqlite3.sqlite_version,
|
||||
)
|
||||
_VERSIONS.update(ret)
|
||||
_VERSIONS.update(uploader.get_versions(False))
|
||||
_VERSIONS = OrderedDict(
|
||||
Platform='{0[0]} {0[2]} {0[3]} {0[4]} {0[5]}'.format(platform.uname()),
|
||||
Python=sys.version,
|
||||
Calibre_Web=calibre_web_version,
|
||||
Werkzeug=werkzeug.__version__,
|
||||
Jinja2=jinja2.__version__,
|
||||
pySqlite=sqlite3.version,
|
||||
SQLite=sqlite3.sqlite_version,
|
||||
)
|
||||
_VERSIONS.update(ret)
|
||||
_VERSIONS.update(uploader.get_versions(False))
|
||||
|
||||
|
||||
def collect_stats():
|
||||
|
|
11
cps/admin.py
11
cps/admin.py
|
@ -1192,8 +1192,10 @@ def _db_simulate_change():
|
|||
'',
|
||||
param['config_calibre_dir'],
|
||||
flags=re.IGNORECASE).strip()
|
||||
db_change = config.config_calibre_dir != to_save["config_calibre_dir"] and config.config_calibre_dir
|
||||
db_valid = calibre_db.check_valid_db(to_save["config_calibre_dir"], ub.app_DB_path)
|
||||
db_valid, db_change = calibre_db.check_valid_db(to_save["config_calibre_dir"],
|
||||
ub.app_DB_path,
|
||||
config.config_calibre_uuid)
|
||||
db_change = bool(db_change and config.config_calibre_dir)
|
||||
return db_change, db_valid
|
||||
|
||||
|
||||
|
@ -1223,12 +1225,15 @@ def _db_configuration_update_helper():
|
|||
except Exception as ex:
|
||||
return _db_configuration_result('{}'.format(ex), gdrive_error)
|
||||
|
||||
if db_change or not db_valid or not config.db_configured:
|
||||
if db_change or not db_valid or not config.db_configured \
|
||||
or config.config_calibre_dir != to_save["config_calibre_dir"]:
|
||||
if not calibre_db.setup_db(to_save['config_calibre_dir'], ub.app_DB_path):
|
||||
return _db_configuration_result(_('DB Location is not Valid, Please Enter Correct Path'),
|
||||
gdrive_error)
|
||||
config.store_calibre_uuid(calibre_db, db.Library_Id)
|
||||
# if db changed -> delete shelfs, delete download books, delete read books, kobo sync...
|
||||
if db_change:
|
||||
log.info("Calibre Database changed, delete all Calibre-Web info related to old Database")
|
||||
ub.session.query(ub.Downloads).delete()
|
||||
ub.session.query(ub.ArchivedBook).delete()
|
||||
ub.session.query(ub.ReadBook).delete()
|
||||
|
|
|
@ -62,6 +62,7 @@ class _Settings(_Base):
|
|||
mail_gmail_token = Column(JSON, default={})
|
||||
|
||||
config_calibre_dir = Column(String)
|
||||
config_calibre_uuid = Column(String)
|
||||
config_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||
config_external_port = Column(Integer, default=constants.DEFAULT_PORT)
|
||||
config_certfile = Column(String)
|
||||
|
@ -350,6 +351,14 @@ class _ConfigSQL(object):
|
|||
# self.config_calibre_dir = None
|
||||
self.save()
|
||||
|
||||
def store_calibre_uuid(self, calibre_db, Library_table):
|
||||
try:
|
||||
calibre_uuid = calibre_db.session.query(Library_table).one_or_none()
|
||||
if self.config_calibre_uuid != calibre_uuid.uuid:
|
||||
self.config_calibre_uuid = calibre_uuid.uuid
|
||||
self.save()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def _migrate_table(session, orm_class):
|
||||
changed = False
|
||||
|
@ -438,19 +447,12 @@ def load_configuration(session):
|
|||
session.add(_Settings())
|
||||
session.commit()
|
||||
conf = _ConfigSQL(session)
|
||||
# Migrate from global restrictions to user based restrictions
|
||||
#if bool(conf.config_default_show & constants.MATURE_CONTENT) and conf.config_denied_tags == "":
|
||||
# conf.config_denied_tags = conf.config_mature_content_tags
|
||||
# conf.save()
|
||||
# session.query(ub.User).filter(ub.User.mature_content != True). \
|
||||
# update({"denied_tags": conf.config_mature_content_tags}, synchronize_session=False)
|
||||
# session.commit()
|
||||
return conf
|
||||
|
||||
def get_flask_session_key(session):
|
||||
flask_settings = session.query(_Flask_Settings).one_or_none()
|
||||
def get_flask_session_key(_session):
|
||||
flask_settings = _session.query(_Flask_Settings).one_or_none()
|
||||
if flask_settings == None:
|
||||
flask_settings = _Flask_Settings(os.urandom(32))
|
||||
session.add(flask_settings)
|
||||
session.commit()
|
||||
_session.add(flask_settings)
|
||||
_session.commit()
|
||||
return flask_settings.flask_session_key
|
||||
|
|
22
cps/db.py
22
cps/db.py
|
@ -93,6 +93,12 @@ books_publishers_link = Table('books_publishers_link', Base.metadata,
|
|||
)
|
||||
|
||||
|
||||
class Library_Id(Base):
|
||||
__tablename__ = 'library_id'
|
||||
id = Column(Integer, primary_key=True)
|
||||
uuid = Column(String, nullable=False)
|
||||
|
||||
|
||||
class Identifiers(Base):
|
||||
__tablename__ = 'identifiers'
|
||||
|
||||
|
@ -525,12 +531,12 @@ class CalibreDB():
|
|||
return cc_classes
|
||||
|
||||
@classmethod
|
||||
def check_valid_db(cls, config_calibre_dir, app_db_path):
|
||||
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
|
||||
if not config_calibre_dir:
|
||||
return False
|
||||
return False, False
|
||||
dbpath = os.path.join(config_calibre_dir, "metadata.db")
|
||||
if not os.path.exists(dbpath):
|
||||
return False
|
||||
return False, False
|
||||
try:
|
||||
check_engine = create_engine('sqlite://',
|
||||
echo=False,
|
||||
|
@ -540,10 +546,16 @@ class CalibreDB():
|
|||
with check_engine.begin() as connection:
|
||||
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
|
||||
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
|
||||
local_session = scoped_session(sessionmaker())
|
||||
local_session.configure(bind=connection)
|
||||
database_uuid = local_session().query(Library_Id).one_or_none()
|
||||
# local_session.dispose()
|
||||
|
||||
check_engine.connect()
|
||||
db_change = config_calibre_uuid != database_uuid.uuid
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
return False, False
|
||||
return True, db_change
|
||||
|
||||
@classmethod
|
||||
def update_config(cls, config):
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
|
||||
from .constants import BASE_DIR
|
||||
try:
|
||||
|
@ -8,7 +10,7 @@ try:
|
|||
ImportNotFound = BaseException
|
||||
except ImportError:
|
||||
importlib = False
|
||||
|
||||
version = None
|
||||
|
||||
if not importlib:
|
||||
try:
|
||||
|
@ -20,6 +22,13 @@ if not importlib:
|
|||
|
||||
def load_dependencys(optional=False):
|
||||
deps = list()
|
||||
if getattr(sys, 'frozen', False):
|
||||
pip_installed = os.path.join(BASE_DIR, ".pip_installed")
|
||||
if os.path.exists(pip_installed):
|
||||
with open(pip_installed) as f:
|
||||
exe_deps = json.loads("".join(f.readlines()))
|
||||
else:
|
||||
return deps
|
||||
if importlib or pkgresources:
|
||||
if optional:
|
||||
req_path = os.path.join(BASE_DIR, "optional-requirements.txt")
|
||||
|
@ -31,11 +40,14 @@ def load_dependencys(optional=False):
|
|||
if not line.startswith('#') and not line == '\n' and not line.startswith('git'):
|
||||
res = re.match(r'(.*?)([<=>\s]+)([\d\.]+),?\s?([<=>\s]+)?([\d\.]+)?', line.strip())
|
||||
try:
|
||||
if importlib:
|
||||
dep_version = version(res.group(1))
|
||||
if getattr(sys, 'frozen', False):
|
||||
dep_version = exe_deps[res.group(1).lower().replace('_','-')]
|
||||
else:
|
||||
dep_version = pkg_resources.get_distribution(res.group(1)).version
|
||||
except ImportNotFound:
|
||||
if importlib:
|
||||
dep_version = version(res.group(1))
|
||||
else:
|
||||
dep_version = pkg_resources.get_distribution(res.group(1)).version
|
||||
except (ImportNotFound, KeyError):
|
||||
if optional:
|
||||
continue
|
||||
dep_version = "not installed"
|
||||
|
|
|
@ -47,7 +47,7 @@ def remove_synced_book(book_id, all=False, session=None):
|
|||
ub.session_commit()
|
||||
else:
|
||||
session.query(ub.KoboSyncedBooks).filter(ub.KoboSyncedBooks.book_id == book_id).filter(user).delete()
|
||||
ub.session_commit(sess=session)
|
||||
ub.session_commit(_session=session)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ from cps import logger, config
|
|||
from cps.subproc_wrapper import process_open
|
||||
from flask_babel import gettext as _
|
||||
from cps.kobo_sync_status import remove_synced_book
|
||||
from cps.ub import ini
|
||||
from cps.ub import init_db_thread
|
||||
|
||||
from cps.tasks.mail import TaskEmail
|
||||
from cps import gdriveutils
|
||||
|
@ -166,7 +166,7 @@ class TaskConvert(CalibreTask):
|
|||
local_db.session.merge(new_format)
|
||||
local_db.session.commit()
|
||||
if self.settings['new_book_format'].upper() in ['KEPUB', 'EPUB', 'EPUB3']:
|
||||
ub_session = ini()
|
||||
ub_session = init_db_thread()
|
||||
remove_synced_book(book_id, True, ub_session)
|
||||
ub_session.close()
|
||||
except SQLAlchemyError as e:
|
||||
|
|
123
cps/ub.py
123
cps/ub.py
|
@ -37,6 +37,7 @@ except ImportError as e:
|
|||
from flask_dance.consumer.storage.sqla import OAuthConsumerMixin
|
||||
oauth_support = True
|
||||
except ImportError as e:
|
||||
OAuthConsumerMixin = BaseException
|
||||
oauth_support = False
|
||||
from sqlalchemy import create_engine, exc, exists, event, text
|
||||
from sqlalchemy import Column, ForeignKey
|
||||
|
@ -510,7 +511,7 @@ class RemoteAuthToken(Base):
|
|||
|
||||
|
||||
# Add missing tables during migration of database
|
||||
def add_missing_tables(engine, session):
|
||||
def add_missing_tables(engine, _session):
|
||||
if not engine.dialect.has_table(engine.connect(), "book_read_link"):
|
||||
ReadBook.__table__.create(bind=engine)
|
||||
if not engine.dialect.has_table(engine.connect(), "bookmark"):
|
||||
|
@ -527,26 +528,26 @@ def add_missing_tables(engine, session):
|
|||
Registration.__table__.create(bind=engine)
|
||||
with engine.connect() as conn:
|
||||
conn.execute("insert into registration (domain, allow) values('%.%',1)")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
# migrate all settings missing in registration table
|
||||
def migrate_registration_table(engine, session):
|
||||
def migrate_registration_table(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(Registration.allow)).scalar()
|
||||
session.commit()
|
||||
_session.query(exists().where(Registration.allow)).scalar()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE registration ADD column 'allow' INTEGER")
|
||||
conn.execute("update registration set 'allow' = 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
# Handle table exists, but no content
|
||||
cnt = session.query(Registration).count()
|
||||
cnt = _session.query(Registration).count()
|
||||
if not cnt:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("insert into registration (domain, allow) values('%.%',1)")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not writeable
|
||||
print('Settings database is not writeable. Exiting...')
|
||||
sys.exit(2)
|
||||
|
@ -564,9 +565,9 @@ def migrate_guest_password(engine):
|
|||
sys.exit(2)
|
||||
|
||||
|
||||
def migrate_shelfs(engine, session):
|
||||
def migrate_shelfs(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(Shelf.uuid)).scalar()
|
||||
_session.query(exists().where(Shelf.uuid)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
|
||||
|
@ -574,33 +575,33 @@ def migrate_shelfs(engine, session):
|
|||
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
for shelf in session.query(Shelf).all():
|
||||
for shelf in _session.query(Shelf).all():
|
||||
shelf.uuid = str(uuid.uuid4())
|
||||
shelf.created = datetime.datetime.now()
|
||||
shelf.last_modified = datetime.datetime.now()
|
||||
for book_shelf in session.query(BookShelf).all():
|
||||
for book_shelf in _session.query(BookShelf).all():
|
||||
book_shelf.date_added = datetime.datetime.now()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
session.query(exists().where(Shelf.kobo_sync)).scalar()
|
||||
_session.query(exists().where(Shelf.kobo_sync)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
|
||||
conn.execute("ALTER TABLE shelf ADD column 'kobo_sync' BOOLEAN DEFAULT false")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
session.query(exists().where(BookShelf.order)).scalar()
|
||||
_session.query(exists().where(BookShelf.order)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_shelf_link ADD column 'order' INTEGER DEFAULT 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
def migrate_readBook(engine, session):
|
||||
def migrate_readBook(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(ReadBook.read_status)).scalar()
|
||||
_session.query(exists().where(ReadBook.read_status)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
|
||||
|
@ -608,46 +609,46 @@ def migrate_readBook(engine, session):
|
|||
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
|
||||
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
test = session.query(ReadBook).filter(ReadBook.last_modified == None).all()
|
||||
_session.commit()
|
||||
test = _session.query(ReadBook).filter(ReadBook.last_modified == None).all()
|
||||
for book in test:
|
||||
book.last_modified = datetime.datetime.utcnow()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
def migrate_remoteAuthToken(engine, session):
|
||||
def migrate_remoteAuthToken(engine, _session):
|
||||
try:
|
||||
session.query(exists().where(RemoteAuthToken.token_type)).scalar()
|
||||
session.commit()
|
||||
_session.query(exists().where(RemoteAuthToken.token_type)).scalar()
|
||||
_session.commit()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
|
||||
conn.execute("update remote_auth_token set 'token_type' = 0")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
# Migrate database to current version, has to be updated after every database change. Currently migration from
|
||||
# everywhere to current should work. Migration is done by checking if relevant columns are existing, and than adding
|
||||
# rows with SQL commands
|
||||
def migrate_Database(session):
|
||||
engine = session.bind
|
||||
add_missing_tables(engine, session)
|
||||
migrate_registration_table(engine, session)
|
||||
migrate_readBook(engine, session)
|
||||
migrate_remoteAuthToken(engine, session)
|
||||
migrate_shelfs(engine, session)
|
||||
def migrate_Database(_session):
|
||||
engine = _session.bind
|
||||
add_missing_tables(engine, _session)
|
||||
migrate_registration_table(engine, _session)
|
||||
migrate_readBook(engine, _session)
|
||||
migrate_remoteAuthToken(engine, _session)
|
||||
migrate_shelfs(engine, _session)
|
||||
try:
|
||||
create = False
|
||||
session.query(exists().where(User.sidebar_view)).scalar()
|
||||
_session.query(exists().where(User.sidebar_view)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `sidebar_view` Integer DEFAULT 1")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
create = True
|
||||
try:
|
||||
if create:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("SELECT language_books FROM user")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
|
||||
|
@ -657,32 +658,32 @@ def migrate_Database(session):
|
|||
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
|
||||
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
|
||||
'detail_random': constants.DETAIL_RANDOM})
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.denied_tags)).scalar()
|
||||
_session.query(exists().where(User.denied_tags)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some columns are missing
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `denied_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `denied_column_value` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE user ADD column `allowed_column_value` String DEFAULT ''")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.view_settings)).scalar()
|
||||
_session.query(exists().where(User.view_settings)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `view_settings` VARCHAR(10) DEFAULT '{}'")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
try:
|
||||
session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
|
||||
_session.query(exists().where(User.kobo_only_shelves_sync)).scalar()
|
||||
except exc.OperationalError:
|
||||
with engine.connect() as conn:
|
||||
conn.execute("ALTER TABLE user ADD column `kobo_only_shelves_sync` SMALLINT DEFAULT 0")
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
try:
|
||||
# check if name is in User table instead of nickname
|
||||
session.query(exists().where(User.name)).scalar()
|
||||
_session.query(exists().where(User.name)).scalar()
|
||||
except exc.OperationalError:
|
||||
# Create new table user_id and copy contents of table user into it
|
||||
with engine.connect() as conn:
|
||||
|
@ -712,20 +713,20 @@ def migrate_Database(session):
|
|||
# delete old user table and rename new user_id table to user:
|
||||
conn.execute(text("DROP TABLE user"))
|
||||
conn.execute(text("ALTER TABLE user_id RENAME TO user"))
|
||||
session.commit()
|
||||
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
|
||||
_session.commit()
|
||||
if _session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
|
||||
is None:
|
||||
create_anonymous_user(session)
|
||||
create_anonymous_user(_session)
|
||||
|
||||
migrate_guest_password(engine)
|
||||
|
||||
|
||||
def clean_database(session):
|
||||
def clean_database(_session):
|
||||
# Remove expired remote login tokens
|
||||
now = datetime.datetime.now()
|
||||
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
||||
_session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
|
||||
filter(RemoteAuthToken.token_type != 1).delete()
|
||||
session.commit()
|
||||
_session.commit()
|
||||
|
||||
|
||||
# Save downloaded books per user in calibre-web's own database
|
||||
|
@ -750,22 +751,22 @@ def delete_download(book_id):
|
|||
session.rollback()
|
||||
|
||||
# Generate user Guest (translated text), as anonymous user, no rights
|
||||
def create_anonymous_user(session):
|
||||
def create_anonymous_user(_session):
|
||||
user = User()
|
||||
user.name = "Guest"
|
||||
user.email = 'no@email'
|
||||
user.role = constants.ROLE_ANONYMOUS
|
||||
user.password = ''
|
||||
|
||||
session.add(user)
|
||||
_session.add(user)
|
||||
try:
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
_session.rollback()
|
||||
|
||||
|
||||
# Generate User admin with admin123 password, and access to everything
|
||||
def create_admin_user(session):
|
||||
def create_admin_user(_session):
|
||||
user = User()
|
||||
user.name = "admin"
|
||||
user.role = constants.ADMIN_USER_ROLES
|
||||
|
@ -773,13 +774,13 @@ def create_admin_user(session):
|
|||
|
||||
user.password = generate_password_hash(constants.DEFAULT_PASSWORD)
|
||||
|
||||
session.add(user)
|
||||
_session.add(user)
|
||||
try:
|
||||
session.commit()
|
||||
_session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
_session.rollback()
|
||||
|
||||
def ini():
|
||||
def init_db_thread():
|
||||
global app_DB_path
|
||||
engine = create_engine(u'sqlite:///{0}'.format(app_DB_path), echo=False)
|
||||
|
||||
|
@ -844,8 +845,8 @@ def dispose():
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
def session_commit(success=None, sess=None):
|
||||
s = sess if sess else session
|
||||
def session_commit(success=None, _session=None):
|
||||
s = _session if _session else session
|
||||
try:
|
||||
s.commit()
|
||||
if success:
|
||||
|
|
|
@ -10,7 +10,6 @@ pyasn1>=0.1.9,<0.5.0
|
|||
PyDrive2>=1.3.1,<1.11.0
|
||||
PyYAML>=3.12
|
||||
rsa>=3.4.2,<4.9.0
|
||||
# six>=1.10.0,<1.17.0
|
||||
|
||||
# Gmail
|
||||
google-auth-oauthlib>=0.4.3,<0.5.0
|
||||
|
@ -34,7 +33,7 @@ scholarly>=1.2.0,<1.6
|
|||
markdown2>=2.0.0,<2.5.0
|
||||
html2text>=2020.1.16,<2022.1.1
|
||||
python-dateutil>=2.1,<2.9.0
|
||||
beautifulsoup4>=4.0.1,<4.2.0
|
||||
beautifulsoup4>=4.0.1,<4.11.0
|
||||
cchardet>=2.0.0,<2.2.0
|
||||
|
||||
# Comics
|
||||
|
|
|
@ -5,7 +5,7 @@ Flask-Principal>=0.3.2,<0.5.1
|
|||
backports_abc>=0.4
|
||||
Flask>=1.0.2,<2.1.0
|
||||
iso-639>=0.4.5,<0.5.0
|
||||
PyPDF3>=1.0.0,<1.0.6
|
||||
PyPDF3>=1.0.0,<1.0.7
|
||||
pytz>=2016.10
|
||||
requests>=2.11.1,<2.28.0
|
||||
SQLAlchemy>=1.3.0,<1.5.0
|
||||
|
|
|
@ -45,7 +45,7 @@ install_requires =
|
|||
backports_abc>=0.4
|
||||
Flask>=1.0.2,<2.1.0
|
||||
iso-639>=0.4.5,<0.5.0
|
||||
PyPDF3>=1.0.0,<1.0.6
|
||||
PyPDF3>=1.0.0,<1.0.7
|
||||
pytz>=2016.10
|
||||
requests>=2.11.1,<2.28.0
|
||||
SQLAlchemy>=1.3.0,<1.5.0
|
||||
|
@ -70,7 +70,6 @@ gdrive =
|
|||
PyDrive2>=1.3.1,<1.11.0
|
||||
PyYAML>=3.12
|
||||
rsa>=3.4.2,<4.9.0
|
||||
six>=1.10.0,<1.17.0
|
||||
gmail =
|
||||
google-auth-oauthlib>=0.4.3,<0.5.0
|
||||
google-api-python-client>=1.7.11,<2.37.0
|
||||
|
@ -89,7 +88,7 @@ metadata =
|
|||
markdown2>=2.0.0,<2.5.0
|
||||
html2text>=2020.1.16,<2022.1.1
|
||||
python-dateutil>=2.1,<2.9.0
|
||||
beautifulsoup4>=4.0.1,<4.2.0
|
||||
beautifulsoup4>=4.0.1,<4.11.0
|
||||
cchardet>=2.0.0,<2.2.0
|
||||
comics =
|
||||
natsort>=2.2.0,<8.2.0
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user