2015-08-02 18:59:11 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
import db
|
|
|
|
import ub
|
2016-03-28 19:07:13 +00:00
|
|
|
from flask import current_app as app
|
2016-12-23 08:53:39 +00:00
|
|
|
import logging
|
2015-08-02 18:59:11 +00:00
|
|
|
import smtplib
|
2017-02-20 18:52:00 +00:00
|
|
|
from tempfile import gettempdir
|
2016-06-17 12:42:39 +00:00
|
|
|
import socket
|
2015-08-02 18:59:11 +00:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import traceback
|
2016-03-26 15:12:29 +00:00
|
|
|
import re
|
|
|
|
import unicodedata
|
2018-04-10 16:50:04 +00:00
|
|
|
from io import BytesIO
|
2017-04-02 08:05:07 +00:00
|
|
|
|
2017-03-05 09:40:39 +00:00
|
|
|
try:
|
2017-03-06 03:44:54 +00:00
|
|
|
from StringIO import StringIO
|
|
|
|
from email.MIMEBase import MIMEBase
|
|
|
|
from email.MIMEMultipart import MIMEMultipart
|
|
|
|
from email.MIMEText import MIMEText
|
2017-03-07 20:08:28 +00:00
|
|
|
except ImportError as e:
|
|
|
|
from io import StringIO
|
|
|
|
from email.mime.base import MIMEBase
|
|
|
|
from email.mime.multipart import MIMEMultipart
|
|
|
|
from email.mime.text import MIMEText
|
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
from email import encoders
|
|
|
|
from email.generator import Generator
|
2017-02-23 03:26:39 +00:00
|
|
|
from email.utils import formatdate
|
|
|
|
from email.utils import make_msgid
|
2016-11-09 18:24:33 +00:00
|
|
|
from flask_babel import gettext as _
|
2015-08-02 18:59:11 +00:00
|
|
|
import subprocess
|
2017-02-20 18:52:00 +00:00
|
|
|
import threading
|
2017-01-30 17:58:36 +00:00
|
|
|
import shutil
|
2017-02-20 18:52:00 +00:00
|
|
|
import requests
|
|
|
|
import zipfile
|
|
|
|
from tornado.ioloop import IOLoop
|
2017-03-16 23:36:37 +00:00
|
|
|
try:
|
|
|
|
import gdriveutils as gd
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2017-03-01 22:38:03 +00:00
|
|
|
import web
|
2017-02-20 18:52:00 +00:00
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
try:
|
|
|
|
import unidecode
|
2017-03-31 14:52:25 +00:00
|
|
|
use_unidecode = True
|
2017-11-30 15:49:46 +00:00
|
|
|
except ImportError:
|
2017-03-31 14:52:25 +00:00
|
|
|
use_unidecode = False
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-02-20 18:52:00 +00:00
|
|
|
# Global variables
|
|
|
|
global_task = None
|
|
|
|
updater_thread = None
|
|
|
|
|
2017-05-19 19:30:39 +00:00
|
|
|
RET_SUCCESS = 1
|
|
|
|
RET_FAIL = 0
|
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2015-08-02 18:59:11 +00:00
|
|
|
def update_download(book_id, user_id):
|
2016-12-23 08:53:39 +00:00
|
|
|
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
|
|
|
|
book_id).first()
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
if not check:
|
|
|
|
new_download = ub.Downloads(user_id=user_id, book_id=book_id)
|
|
|
|
ub.session.add(new_download)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def make_mobi(book_id, calibrepath):
|
2017-05-19 19:30:39 +00:00
|
|
|
error_message = None
|
2017-01-28 19:16:40 +00:00
|
|
|
vendorpath = os.path.join(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) +
|
|
|
|
os.sep + "../vendor" + os.sep))
|
2016-12-23 08:53:39 +00:00
|
|
|
if sys.platform == "win32":
|
2017-04-26 18:39:45 +00:00
|
|
|
kindlegen = (os.path.join(vendorpath, u"kindlegen.exe")).encode(sys.getfilesystemencoding())
|
2016-11-12 09:52:59 +00:00
|
|
|
else:
|
2017-04-26 18:39:45 +00:00
|
|
|
kindlegen = (os.path.join(vendorpath, u"kindlegen")).encode(sys.getfilesystemencoding())
|
2015-08-02 18:59:11 +00:00
|
|
|
if not os.path.exists(kindlegen):
|
2017-05-19 19:30:39 +00:00
|
|
|
error_message = _(u"kindlegen binary %(kindlepath)s not found", kindlepath=kindlegen)
|
|
|
|
app.logger.error("make_mobi: " + error_message)
|
|
|
|
return error_message, RET_FAIL
|
2015-08-02 18:59:11 +00:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2016-03-27 21:36:51 +00:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == 'EPUB').first()
|
|
|
|
if not data:
|
2017-05-19 19:30:39 +00:00
|
|
|
error_message = _(u"epub format not found for book id: %(book)d", book=book_id)
|
|
|
|
app.logger.error("make_mobi: " + error_message)
|
|
|
|
return error_message, RET_FAIL
|
2016-03-27 21:36:51 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
file_path = os.path.join(calibrepath, book.path, data.name)
|
2016-11-12 09:52:59 +00:00
|
|
|
if os.path.exists(file_path + u".epub"):
|
2017-05-25 07:30:20 +00:00
|
|
|
try:
|
|
|
|
p = subprocess.Popen((kindlegen + " \"" + file_path + u".epub\"").encode(sys.getfilesystemencoding()),
|
2017-07-16 20:04:40 +00:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
2017-07-09 18:15:15 +00:00
|
|
|
except Exception:
|
2017-11-12 13:56:49 +00:00
|
|
|
error_message = _(u"kindlegen failed, no execution permissions")
|
2017-07-09 18:15:15 +00:00
|
|
|
app.logger.error("make_mobi: " + error_message)
|
2017-05-25 07:30:20 +00:00
|
|
|
return error_message, RET_FAIL
|
2016-12-23 08:53:39 +00:00
|
|
|
# Poll process for new output until finished
|
|
|
|
while True:
|
|
|
|
nextline = p.stdout.readline()
|
|
|
|
if nextline == '' and p.poll() is not None:
|
|
|
|
break
|
|
|
|
if nextline != "\r\n":
|
2017-05-19 19:30:39 +00:00
|
|
|
# Format of error message (kindlegen translates its output texts):
|
|
|
|
# Error(prcgen):E23006: Language not recognized in metadata.The dc:Language field is mandatory.Aborting.
|
2017-11-30 15:49:46 +00:00
|
|
|
conv_error = re.search(".*\(.*\):(E\d+):\s(.*)", nextline)
|
2017-05-19 19:30:39 +00:00
|
|
|
# If error occoures, log in every case
|
|
|
|
if conv_error:
|
|
|
|
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
|
|
|
|
error=conv_error.group(1), message=conv_error.group(2).decode('utf-8'))
|
|
|
|
app.logger.info("make_mobi: " + error_message)
|
|
|
|
app.logger.info(nextline.strip('\r\n'))
|
2016-12-23 08:53:39 +00:00
|
|
|
app.logger.debug(nextline.strip('\r\n'))
|
|
|
|
|
|
|
|
check = p.returncode
|
2015-08-02 18:59:11 +00:00
|
|
|
if not check or check < 2:
|
|
|
|
book.data.append(db.Data(
|
|
|
|
name=book.data[0].name,
|
2017-04-02 08:42:33 +00:00
|
|
|
book_format="MOBI",
|
2015-08-02 18:59:11 +00:00
|
|
|
book=book.id,
|
|
|
|
uncompressed_size=os.path.getsize(file_path + ".mobi")
|
|
|
|
))
|
|
|
|
db.session.commit()
|
2017-05-19 19:30:39 +00:00
|
|
|
return file_path + ".mobi", RET_SUCCESS
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-07-16 20:04:40 +00:00
|
|
|
app.logger.info("make_mobi: kindlegen failed with error while converting book")
|
|
|
|
if not error_message:
|
2017-11-30 15:49:46 +00:00
|
|
|
error_message = 'kindlegen failed, no excecution permissions'
|
2017-07-16 20:04:40 +00:00
|
|
|
return error_message, RET_FAIL
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2017-07-16 20:04:40 +00:00
|
|
|
error_message = "make_mobi: epub not found: %s.epub" % file_path
|
|
|
|
return error_message, RET_FAIL
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
class StderrLogger(object):
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
buffer = ''
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.logger = logging.getLogger('cps.web')
|
|
|
|
|
|
|
|
def write(self, message):
|
2017-01-28 19:16:40 +00:00
|
|
|
if message == '\n':
|
2016-12-23 08:53:39 +00:00
|
|
|
self.logger.debug(self.buffer)
|
2017-01-28 19:16:40 +00:00
|
|
|
self.buffer = ''
|
2016-12-23 08:53:39 +00:00
|
|
|
else:
|
2017-01-28 19:16:40 +00:00
|
|
|
self.buffer += message
|
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def send_raw_email(kindle_mail, msg):
|
2016-12-23 08:53:39 +00:00
|
|
|
settings = ub.get_mail_settings()
|
2017-01-22 15:44:37 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
msg['From'] = settings["mail_from"]
|
|
|
|
msg['To'] = kindle_mail
|
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
use_ssl = int(settings.get('mail_use_ssl', 0))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
# convert MIME message to string
|
|
|
|
fp = StringIO()
|
|
|
|
gen = Generator(fp, mangle_from_=False)
|
|
|
|
gen.flatten(msg)
|
|
|
|
msg = fp.getvalue()
|
|
|
|
|
|
|
|
# send email
|
|
|
|
try:
|
2017-01-28 19:16:40 +00:00
|
|
|
timeout = 600 # set timeout to 5mins
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2018-01-13 13:11:59 +00:00
|
|
|
org_stderr = sys.stderr
|
|
|
|
sys.stderr = StderrLogger()
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
if use_ssl == 2:
|
2017-01-16 07:37:42 +00:00
|
|
|
mailserver = smtplib.SMTP_SSL(settings["mail_server"], settings["mail_port"], timeout)
|
|
|
|
else:
|
|
|
|
mailserver = smtplib.SMTP(settings["mail_server"], settings["mail_port"], timeout)
|
2016-12-23 08:53:39 +00:00
|
|
|
mailserver.set_debuglevel(1)
|
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
if use_ssl == 1:
|
2017-01-18 18:07:45 +00:00
|
|
|
mailserver.starttls()
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
if settings["mail_password"]:
|
2017-07-29 15:06:16 +00:00
|
|
|
mailserver.login(str(settings["mail_login"]), str(settings["mail_password"]))
|
2017-07-11 14:16:19 +00:00
|
|
|
mailserver.sendmail(settings["mail_from"], kindle_mail, msg)
|
2016-12-23 08:53:39 +00:00
|
|
|
mailserver.quit()
|
|
|
|
|
|
|
|
smtplib.stderr = org_stderr
|
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException) as ex:
|
2016-12-23 08:53:39 +00:00
|
|
|
app.logger.error(traceback.print_exc())
|
2017-11-30 15:49:46 +00:00
|
|
|
return _("Failed to send mail: %s" % str(ex))
|
2016-12-23 08:53:39 +00:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
def send_test_mail(kindle_mail):
|
|
|
|
msg = MIMEMultipart()
|
|
|
|
msg['Subject'] = _(u'Calibre-web test email')
|
|
|
|
text = _(u'This email has been sent via calibre web.')
|
|
|
|
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
|
2017-01-28 19:16:40 +00:00
|
|
|
return send_raw_email(kindle_mail, msg)
|
2017-01-22 15:44:37 +00:00
|
|
|
|
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def send_mail(book_id, kindle_mail, calibrepath):
|
2016-12-23 08:53:39 +00:00
|
|
|
"""Send email with attachments"""
|
2015-08-02 18:59:11 +00:00
|
|
|
# create MIME message
|
|
|
|
msg = MIMEMultipart()
|
2017-01-18 18:07:45 +00:00
|
|
|
msg['Subject'] = _(u'Send to Kindle')
|
2017-02-23 03:26:39 +00:00
|
|
|
msg['Message-Id'] = make_msgid('calibre-web')
|
|
|
|
msg['Date'] = formatdate(localtime=True)
|
2017-01-18 18:07:45 +00:00
|
|
|
text = _(u'This email has been sent via calibre web.')
|
2017-01-16 07:37:42 +00:00
|
|
|
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2016-03-27 21:36:51 +00:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id)
|
|
|
|
|
|
|
|
formats = {}
|
|
|
|
|
|
|
|
for entry in data:
|
|
|
|
if entry.format == "MOBI":
|
2017-01-22 15:44:37 +00:00
|
|
|
formats["mobi"] = os.path.join(calibrepath, book.path, entry.name + ".mobi")
|
2016-03-27 21:36:51 +00:00
|
|
|
if entry.format == "EPUB":
|
2017-01-22 15:44:37 +00:00
|
|
|
formats["epub"] = os.path.join(calibrepath, book.path, entry.name + ".epub")
|
2016-03-27 21:36:51 +00:00
|
|
|
if entry.format == "PDF":
|
2017-01-22 15:44:37 +00:00
|
|
|
formats["pdf"] = os.path.join(calibrepath, book.path, entry.name + ".pdf")
|
2016-03-27 21:36:51 +00:00
|
|
|
|
|
|
|
if len(formats) == 0:
|
2016-11-09 18:24:33 +00:00
|
|
|
return _("Could not find any formats suitable for sending by email")
|
2016-03-27 21:36:51 +00:00
|
|
|
|
2016-03-28 23:25:25 +00:00
|
|
|
if 'mobi' in formats:
|
2016-03-27 21:36:51 +00:00
|
|
|
msg.attach(get_attachment(formats['mobi']))
|
|
|
|
elif 'epub' in formats:
|
2017-05-19 19:30:39 +00:00
|
|
|
data, resultCode = make_mobi(book.id, calibrepath)
|
|
|
|
if resultCode == RET_SUCCESS:
|
|
|
|
msg.attach(get_attachment(data))
|
|
|
|
else:
|
2017-11-30 15:49:46 +00:00
|
|
|
app.logger.error = data
|
|
|
|
return data # _("Could not convert epub to mobi")
|
2016-03-27 21:36:51 +00:00
|
|
|
elif 'pdf' in formats:
|
|
|
|
msg.attach(get_attachment(formats['pdf']))
|
2015-08-02 18:59:11 +00:00
|
|
|
else:
|
2016-11-09 18:24:33 +00:00
|
|
|
return _("Could not find any formats suitable for sending by email")
|
2015-08-02 18:59:11 +00:00
|
|
|
|
2017-01-22 15:44:37 +00:00
|
|
|
return send_raw_email(kindle_mail, msg)
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_attachment(file_path):
|
2016-12-23 08:53:39 +00:00
|
|
|
"""Get file as MIMEBase message"""
|
2015-08-02 18:59:11 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
file_ = open(file_path, 'rb')
|
|
|
|
attachment = MIMEBase('application', 'octet-stream')
|
|
|
|
attachment.set_payload(file_.read())
|
|
|
|
file_.close()
|
|
|
|
encoders.encode_base64(attachment)
|
|
|
|
|
|
|
|
attachment.add_header('Content-Disposition', 'attachment',
|
|
|
|
filename=os.path.basename(file_path))
|
|
|
|
return attachment
|
|
|
|
except IOError:
|
|
|
|
traceback.print_exc()
|
2017-11-30 15:49:46 +00:00
|
|
|
app.logger.error = u'The requested file could not be read. Maybe wrong permissions?'
|
2015-08-02 18:59:11 +00:00
|
|
|
return None
|
2016-03-26 15:12:29 +00:00
|
|
|
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2016-04-03 21:52:32 +00:00
|
|
|
def get_valid_filename(value, replace_whitespace=True):
|
2016-03-26 15:12:29 +00:00
|
|
|
"""
|
|
|
|
Returns the given string converted to a string that can be used for a clean
|
|
|
|
filename. Limits num characters to 128 max.
|
|
|
|
"""
|
2017-03-31 14:52:25 +00:00
|
|
|
if value[-1:] == u'.':
|
2017-02-15 17:09:17 +00:00
|
|
|
value = value[:-1]+u'_'
|
2017-09-16 17:57:00 +00:00
|
|
|
value = value.replace("/", "_").replace(":", "_").strip('\0')
|
2017-02-15 17:09:17 +00:00
|
|
|
if use_unidecode:
|
2017-11-30 15:49:46 +00:00
|
|
|
value = (unidecode.unidecode(value)).strip()
|
2017-02-15 17:09:17 +00:00
|
|
|
else:
|
2017-11-30 15:49:46 +00:00
|
|
|
value = value.replace(u'§', u'SS')
|
|
|
|
value = value.replace(u'ß', u'ss')
|
2017-02-15 17:09:17 +00:00
|
|
|
value = unicodedata.normalize('NFKD', value)
|
|
|
|
re_slugify = re.compile('[\W\s-]', re.UNICODE)
|
2017-11-30 15:49:46 +00:00
|
|
|
if isinstance(value, str): # Python3 str, Python2 unicode
|
2017-03-06 05:42:00 +00:00
|
|
|
value = re_slugify.sub('', value).strip()
|
|
|
|
else:
|
2017-03-05 10:48:59 +00:00
|
|
|
value = unicode(re_slugify.sub('', value).strip())
|
2016-04-03 21:52:32 +00:00
|
|
|
if replace_whitespace:
|
2017-11-30 15:49:46 +00:00
|
|
|
# *+:\"/<>? are replaced by _
|
2017-11-12 17:48:44 +00:00
|
|
|
value = re.sub(r'[\*\+:\\\"/<>\?]+', u'_', value, flags=re.U)
|
2017-11-28 07:54:21 +00:00
|
|
|
# pipe has to be replaced with comma
|
|
|
|
value = re.sub(r'[\|]+', u',', value, flags=re.U)
|
2017-02-15 17:09:17 +00:00
|
|
|
value = value[:128]
|
2017-04-23 06:22:10 +00:00
|
|
|
if not value:
|
|
|
|
raise ValueError("Filename cannot be empty")
|
|
|
|
|
2016-03-26 15:12:29 +00:00
|
|
|
return value
|
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2017-02-15 17:09:17 +00:00
|
|
|
def get_sorted_author(value):
|
2017-11-30 15:49:46 +00:00
|
|
|
try:
|
2017-12-01 14:33:55 +00:00
|
|
|
regexes = ["^(JR|SR)\.?$", "^I{1,3}\.?$", "^IV\.?$"]
|
|
|
|
combined = "(" + ")|(".join(regexes) + ")"
|
2017-11-30 15:49:46 +00:00
|
|
|
value = value.split(" ")
|
|
|
|
if re.match(combined, value[-1].upper()):
|
|
|
|
value2 = value[-2] + ", " + " ".join(value[:-2]) + " " + value[-1]
|
|
|
|
else:
|
|
|
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
|
|
|
except Exception:
|
|
|
|
logging.getLogger('cps.web').error("Sorting author " + str(value) + "failed")
|
|
|
|
value2 = value
|
2017-02-15 17:09:17 +00:00
|
|
|
return value2
|
2016-12-23 08:53:39 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2017-04-15 16:18:33 +00:00
|
|
|
def delete_book(book, calibrepath):
|
2018-01-10 20:16:51 +00:00
|
|
|
if "/" in book.path:
|
|
|
|
path = os.path.join(calibrepath, book.path)
|
|
|
|
shutil.rmtree(path, ignore_errors=True)
|
|
|
|
else:
|
|
|
|
logging.getLogger('cps.web').error("Deleting book " + str(book.id) + " failed, book path value: "+ book.path)
|
2017-11-30 15:49:46 +00:00
|
|
|
|
|
|
|
# ToDo: Implement delete book on gdrive
|
2017-04-15 16:24:48 +00:00
|
|
|
def delete_book_gdrive(book):
|
|
|
|
pass
|
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
|
2017-01-28 19:16:40 +00:00
|
|
|
def update_dir_stucture(book_id, calibrepath):
|
2017-11-30 15:49:46 +00:00
|
|
|
localbook = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
path = os.path.join(calibrepath, localbook.path)
|
2017-03-30 19:17:18 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
authordir = localbook.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(localbook.authors[0].name)
|
2017-03-30 19:17:18 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
titledir = localbook.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(localbook.title) + " (" + str(book_id) + ")"
|
2017-04-03 18:05:55 +00:00
|
|
|
|
2017-11-30 15:49:46 +00:00
|
|
|
if titledir != new_titledir:
|
|
|
|
try:
|
|
|
|
new_title_path = os.path.join(os.path.dirname(path), new_titledir)
|
2018-06-04 01:17:22 +00:00
|
|
|
if not os.path.exists(new_title_path):
|
|
|
|
os.renames(path, new_title_path)
|
|
|
|
else:
|
|
|
|
for dir_name, subdir_list, file_list in os.walk(path):
|
|
|
|
for file in file_list:
|
|
|
|
os.renames(os.path.join(dir_name, file), os.path.join(new_title_path + dir_name[len(path):], file))
|
2017-11-30 15:49:46 +00:00
|
|
|
path = new_title_path
|
|
|
|
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
|
|
|
except OSError as ex:
|
|
|
|
logging.getLogger('cps.web').error("Rename title from: " + path + " to " + new_title_path)
|
|
|
|
logging.getLogger('cps.web').error(ex, exc_info=True)
|
|
|
|
return _('Rename title from: "%s" to "%s" failed with error: %s' % (path, new_title_path, str(ex)))
|
2016-04-03 21:52:32 +00:00
|
|
|
if authordir != new_authordir:
|
2017-11-30 15:49:46 +00:00
|
|
|
try:
|
|
|
|
new_author_path = os.path.join(os.path.join(calibrepath, new_authordir), os.path.basename(path))
|
|
|
|
os.renames(path, new_author_path)
|
|
|
|
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
|
|
|
except OSError as ex:
|
|
|
|
logging.getLogger('cps.web').error("Rename author from: " + path + " to " + new_author_path)
|
|
|
|
logging.getLogger('cps.web').error(ex, exc_info=True)
|
|
|
|
return _('Rename author from: "%s" to "%s" failed with error: %s' % (path, new_title_path, str(ex)))
|
|
|
|
return False
|
2017-01-30 17:58:36 +00:00
|
|
|
|
2017-03-31 14:52:25 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
def update_dir_structure_gdrive(book_id):
|
2017-11-30 15:49:46 +00:00
|
|
|
error = False
|
2017-03-01 22:38:03 +00:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-04-04 17:05:09 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
|
|
|
titledir = book.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
2017-07-09 18:15:15 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
if titledir != new_titledir:
|
|
|
|
print (titledir)
|
2017-11-30 15:49:46 +00:00
|
|
|
gFile = gd.getFileFromEbooksFolder(web.Gdrive.Instance().drive, os.path.dirname(book.path), titledir)
|
|
|
|
gFile['title'] = new_titledir
|
2017-03-01 22:38:03 +00:00
|
|
|
gFile.Upload()
|
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
2017-07-11 14:13:33 +00:00
|
|
|
|
2017-03-01 22:38:03 +00:00
|
|
|
if authordir != new_authordir:
|
2017-11-30 15:49:46 +00:00
|
|
|
gFile = gd.getFileFromEbooksFolder(web.Gdrive.Instance().drive, None, authordir)
|
2017-03-31 14:52:25 +00:00
|
|
|
gFile['title'] = new_authordir
|
2017-03-01 22:38:03 +00:00
|
|
|
gFile.Upload()
|
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
2017-11-30 15:49:46 +00:00
|
|
|
return error
|
2017-03-01 22:38:03 +00:00
|
|
|
|
|
|
|
|
2017-02-20 18:52:00 +00:00
|
|
|
class Updater(threading.Thread):
|
2017-01-30 17:58:36 +00:00
|
|
|
|
2017-02-20 18:52:00 +00:00
|
|
|
def __init__(self):
|
|
|
|
threading.Thread.__init__(self)
|
2017-11-30 15:49:46 +00:00
|
|
|
self.status = 0
|
2017-02-20 18:52:00 +00:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
global global_task
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 1
|
2017-02-20 18:52:00 +00:00
|
|
|
r = requests.get('https://api.github.com/repos/janeczku/calibre-web/zipball/master', stream=True)
|
|
|
|
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 2
|
2018-04-10 16:50:04 +00:00
|
|
|
z = zipfile.ZipFile(BytesIO(r.content))
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 3
|
2017-02-20 18:52:00 +00:00
|
|
|
tmp_dir = gettempdir()
|
|
|
|
z.extractall(tmp_dir)
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 4
|
2017-11-30 15:49:46 +00:00
|
|
|
self.update_source(os.path.join(tmp_dir, os.path.splitext(fname)[0]), ub.config.get_main_dir)
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 5
|
2017-02-20 18:52:00 +00:00
|
|
|
global_task = 0
|
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
|
|
|
ub.session.close()
|
|
|
|
ub.engine.dispose()
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 6
|
2017-03-13 00:44:20 +00:00
|
|
|
|
|
|
|
if web.gevent_server:
|
|
|
|
web.gevent_server.stop()
|
|
|
|
else:
|
2017-03-30 19:17:18 +00:00
|
|
|
# stop tornado server
|
2017-03-13 00:44:20 +00:00
|
|
|
server = IOLoop.instance()
|
|
|
|
server.add_callback(server.stop)
|
2017-03-31 14:52:25 +00:00
|
|
|
self.status = 7
|
2017-02-20 18:52:00 +00:00
|
|
|
|
|
|
|
def get_update_status(self):
|
|
|
|
return self.status
|
|
|
|
|
2017-04-03 18:05:55 +00:00
|
|
|
@classmethod
|
2017-11-30 15:49:46 +00:00
|
|
|
def file_to_list(self, filelist):
|
|
|
|
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
2017-02-20 18:52:00 +00:00
|
|
|
|
2017-04-03 18:05:55 +00:00
|
|
|
@classmethod
|
2017-02-20 18:52:00 +00:00
|
|
|
def one_minus_two(self, one, two):
|
|
|
|
return [x for x in one if x not in set(two)]
|
|
|
|
|
2017-04-03 19:05:28 +00:00
|
|
|
@classmethod
|
2017-02-20 18:52:00 +00:00
|
|
|
def reduce_dirs(self, delete_files, new_list):
|
|
|
|
new_delete = []
|
2017-11-30 15:49:46 +00:00
|
|
|
for filename in delete_files:
|
|
|
|
parts = filename.split(os.sep)
|
2017-02-20 18:52:00 +00:00
|
|
|
sub = ''
|
2017-07-09 18:15:15 +00:00
|
|
|
for part in parts:
|
|
|
|
sub = os.path.join(sub, part)
|
2017-02-20 18:52:00 +00:00
|
|
|
if sub == '':
|
|
|
|
sub = os.sep
|
|
|
|
count = 0
|
|
|
|
for song in new_list:
|
|
|
|
if song.startswith(sub):
|
|
|
|
count += 1
|
|
|
|
break
|
|
|
|
if count == 0:
|
|
|
|
if sub != '\\':
|
|
|
|
new_delete.append(sub)
|
2017-01-30 17:58:36 +00:00
|
|
|
break
|
2017-02-20 18:52:00 +00:00
|
|
|
return list(set(new_delete))
|
|
|
|
|
2017-04-03 18:05:55 +00:00
|
|
|
@classmethod
|
2017-02-20 18:52:00 +00:00
|
|
|
def reduce_files(self, remove_items, exclude_items):
|
|
|
|
rf = []
|
|
|
|
for item in remove_items:
|
2017-02-21 18:40:22 +00:00
|
|
|
if not item.startswith(exclude_items):
|
2017-02-20 18:52:00 +00:00
|
|
|
rf.append(item)
|
|
|
|
return rf
|
|
|
|
|
2017-04-03 18:05:55 +00:00
|
|
|
@classmethod
|
2017-02-20 18:52:00 +00:00
|
|
|
def moveallfiles(self, root_src_dir, root_dst_dir):
|
|
|
|
change_permissions = True
|
|
|
|
if sys.platform == "win32" or sys.platform == "darwin":
|
|
|
|
change_permissions = False
|
|
|
|
else:
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
2017-02-20 18:52:00 +00:00
|
|
|
new_permissions = os.stat(root_dst_dir)
|
|
|
|
# print new_permissions
|
2017-07-09 18:15:15 +00:00
|
|
|
for src_dir, __, files in os.walk(root_src_dir):
|
2017-02-20 18:52:00 +00:00
|
|
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
|
|
|
if not os.path.exists(dst_dir):
|
|
|
|
os.makedirs(dst_dir)
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
2017-01-30 18:45:03 +00:00
|
|
|
if change_permissions:
|
2017-02-20 18:52:00 +00:00
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
|
|
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
|
|
|
for file_ in files:
|
|
|
|
src_file = os.path.join(src_dir, file_)
|
|
|
|
dst_file = os.path.join(dst_dir, file_)
|
|
|
|
if os.path.exists(dst_file):
|
|
|
|
if change_permissions:
|
|
|
|
permission = os.stat(dst_file)
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
2017-02-20 18:52:00 +00:00
|
|
|
os.remove(dst_file)
|
|
|
|
else:
|
|
|
|
if change_permissions:
|
|
|
|
permission = new_permissions
|
|
|
|
shutil.move(src_file, dst_dir)
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
2017-02-20 18:52:00 +00:00
|
|
|
if change_permissions:
|
|
|
|
try:
|
2017-07-08 12:57:39 +00:00
|
|
|
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
2018-04-10 16:50:04 +00:00
|
|
|
except (Exception) as e:
|
2017-11-30 15:49:46 +00:00
|
|
|
# ex = sys.exc_info()
|
2017-07-08 12:57:39 +00:00
|
|
|
old_permissions = os.stat(dst_file)
|
|
|
|
logging.getLogger('cps.web').debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
|
|
|
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
|
|
|
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
2017-02-20 18:52:00 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
def update_source(self, source, destination):
|
|
|
|
# destination files
|
|
|
|
old_list = list()
|
|
|
|
exclude = (
|
2017-03-31 14:52:25 +00:00
|
|
|
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
|
|
|
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
2017-02-20 18:52:00 +00:00
|
|
|
for root, dirs, files in os.walk(destination, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
for name in dirs:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
# source files
|
|
|
|
new_list = list()
|
|
|
|
for root, dirs, files in os.walk(source, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
for name in dirs:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
|
|
|
delete_files = self.one_minus_two(old_list, new_list)
|
|
|
|
|
|
|
|
rf = self.reduce_files(delete_files, exclude)
|
|
|
|
|
|
|
|
remove_items = self.reduce_dirs(rf, new_list)
|
|
|
|
|
|
|
|
self.moveallfiles(source, destination)
|
|
|
|
|
|
|
|
for item in remove_items:
|
|
|
|
item_path = os.path.join(destination, item[1:])
|
|
|
|
if os.path.isdir(item_path):
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
2017-02-20 18:52:00 +00:00
|
|
|
shutil.rmtree(item_path)
|
2017-01-30 18:45:03 +00:00
|
|
|
else:
|
|
|
|
try:
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
2017-03-31 14:52:25 +00:00
|
|
|
# log_from_thread("Delete file " + item_path)
|
2017-02-20 18:52:00 +00:00
|
|
|
os.remove(item_path)
|
2017-03-29 19:43:55 +00:00
|
|
|
except Exception:
|
2017-02-21 18:40:22 +00:00
|
|
|
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
2017-02-20 18:52:00 +00:00
|
|
|
shutil.rmtree(source, ignore_errors=True)
|