Upload support detection of language
This commit is contained in:
parent
4afb486986
commit
879510da82
|
@ -64,7 +64,7 @@ def default_meta(tmp_file_path, original_file_name, original_file_extension):
|
|||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages=None)
|
||||
languages="")
|
||||
|
||||
|
||||
def pdf_meta(tmp_file_path, original_file_name, original_file_extension):
|
||||
|
@ -93,7 +93,7 @@ def pdf_meta(tmp_file_path, original_file_name, original_file_extension):
|
|||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages=None)
|
||||
languages="")
|
||||
|
||||
|
||||
def pdf_preview(tmp_file_path, tmp_dir):
|
||||
|
|
|
@ -247,7 +247,7 @@ class Books(Base):
|
|||
identifiers = relationship('Identifiers', backref='books')
|
||||
|
||||
def __init__(self, title, sort, author_sort, timestamp, pubdate, series_index, last_modified, path, has_cover,
|
||||
authors, tags):
|
||||
authors, tags, languages = None):
|
||||
self.title = title
|
||||
self.sort = sort
|
||||
self.author_sort = author_sort
|
||||
|
|
21
cps/epub.py
21
cps/epub.py
|
@ -5,7 +5,7 @@ import zipfile
|
|||
from lxml import etree
|
||||
import os
|
||||
import uploader
|
||||
|
||||
from iso639 import languages as isoLanguages
|
||||
|
||||
def extractCover(zip, coverFile, coverpath, tmp_file_name):
|
||||
if coverFile is None:
|
||||
|
@ -47,6 +47,15 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
epub_metadata[s] = p.xpath('dc:%s/text()' % s, namespaces=ns)[0]
|
||||
else:
|
||||
epub_metadata[s] = "Unknown"
|
||||
#detect lang need futher modification in web.py /upload
|
||||
lang = p.xpath('dc:language/text()', namespaces=ns)[0]
|
||||
lang = lang.split('-', 1)[0]
|
||||
if len(lang) == 2:
|
||||
epub_metadata['languages'] = isoLanguages.get(part1=lang).name
|
||||
elif len(lang) == 3:
|
||||
epub_metadata['languages'] = isoLanguages.get(part3=lang).name
|
||||
else:
|
||||
epub_metadata['languages'] = ""
|
||||
|
||||
coversection = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='cover-image']/@href", namespaces=ns)
|
||||
if len(coversection) > 0:
|
||||
|
@ -59,16 +68,16 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
markup = zip.read(os.path.join(coverpath,coversection[0]))
|
||||
markupTree = etree.fromstring(markup)
|
||||
#no matter xhtml or html with no namespace
|
||||
imgsrc = markupTree.xpath( "//*[local-name() = 'img']/@src")
|
||||
imgsrc = markupTree.xpath("//*[local-name() = 'img']/@src")
|
||||
#imgsrc maybe startwith "../"" so fullpath join then relpath to cwd
|
||||
filename = os.path.relpath(os.path.join(os.path.dirname(os.path.join(coverpath,coversection[0])),imgsrc[0]))
|
||||
coverfile = extractCover(zip, filename, "", tmp_file_path)
|
||||
filename = os.path.relpath(os.path.join(os.path.dirname(os.path.join(coverpath, coversection[0])), imgsrc[0]))
|
||||
coverfile = extractCover(zip, filename, "", tmp_file_path)
|
||||
else:
|
||||
coverfile = extractCover(zip, coversection[0], coverpath, tmp_file_path)
|
||||
else:
|
||||
meta_cover = tree.xpath("/pkg:package/pkg:metadata/pkg:meta[@name='cover']/@content", namespaces=ns)
|
||||
if len(meta_cover) > 0:
|
||||
meta_cover_content = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='"+meta_cover[0]+"']/@href",namespaces=ns)
|
||||
meta_cover_content = tree.xpath("/pkg:package/pkg:manifest/pkg:item[@id='"+meta_cover[0]+"']/@href", namespaces=ns)
|
||||
if len(meta_cover_content) > 0:
|
||||
coverfile = extractCover(zip, meta_cover_content[0], coverpath, tmp_file_path)
|
||||
else:
|
||||
|
@ -89,4 +98,4 @@ def get_epub_info(tmp_file_path, original_file_name, original_file_extension):
|
|||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages=None)
|
||||
languages=epub_metadata['languages'])
|
||||
|
|
|
@ -60,4 +60,4 @@ def get_fb2_info(tmp_file_path, original_file_extension):
|
|||
tags="",
|
||||
series="",
|
||||
series_id="",
|
||||
languages=None)
|
||||
languages="")
|
||||
|
|
18
cps/web.py
18
cps/web.py
|
@ -2209,16 +2209,32 @@ def upload():
|
|||
else:
|
||||
db_author = db.Authors(author, helper.get_sorted_author(author), "")
|
||||
db.session.add(db_author)
|
||||
|
||||
#add language actually one value in list
|
||||
input_language = meta.languages
|
||||
db_language = None
|
||||
if input_language != "":
|
||||
input_language = isoLanguages.get(name=input_language).part3
|
||||
hasLanguage = db.session.query(db.Languages).filter(db.Languages.lang_code == input_language).first()
|
||||
if hasLanguage:
|
||||
db_language = hasLanguage
|
||||
else:
|
||||
db_language = db.Languages(input_language)
|
||||
db.session.add(db_language)
|
||||
# combine path and normalize path from windows systems
|
||||
path = os.path.join(author_dir, title_dir).replace('\\','/')
|
||||
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 01, 01), 1,
|
||||
datetime.datetime.now(), path, has_cover, db_author, [])
|
||||
datetime.datetime.now(), path, has_cover, db_author, [], db_language)
|
||||
db_book.authors.append(db_author)
|
||||
if db_language is not None:
|
||||
db_book.languages.append(db_language)
|
||||
db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, data_name)
|
||||
db_book.data.append(db_data)
|
||||
|
||||
db.session.add(db_book)
|
||||
db.session.commit()
|
||||
if db_language is not None: #display Full name instead of iso639.part3
|
||||
db_book.languages[0].language_name = meta.languages
|
||||
author_names = []
|
||||
for author in db_book.authors:
|
||||
author_names.append(author.name)
|
||||
|
|
Loading…
Reference in New Issue
Block a user