Fix uncompressed cbz files
merge from master -> file extension limitation
This commit is contained in:
		
							parent
							
								
									1561a4abdf
								
							
						
					
					
						commit
						feb6a71f95
					
				|  | @ -22,11 +22,10 @@ | ||||||
| #  along with this program. If not, see <http://www.gnu.org/licenses/>. | #  along with this program. If not, see <http://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| # opds routing functions | # opds routing functions | ||||||
| from cps import config, language_table, get_locale, app, ub, global_WorkerThread | from cps import config, language_table, get_locale, app, ub, global_WorkerThread, db | ||||||
| from flask import request, flash, redirect, url_for, abort, Markup, Response | from flask import request, flash, redirect, url_for, abort, Markup, Response | ||||||
| from flask import Blueprint | from flask import Blueprint | ||||||
| import datetime | import datetime | ||||||
| from cps import db |  | ||||||
| import os | import os | ||||||
| import json | import json | ||||||
| from flask_babel import gettext as _ | from flask_babel import gettext as _ | ||||||
|  | @ -34,7 +33,7 @@ from uuid import uuid4 | ||||||
| import helper | import helper | ||||||
| from flask_login import current_user | from flask_login import current_user | ||||||
| from web import login_required_if_no_ano, common_filters, order_authors, render_title_template, edit_required, \ | from web import login_required_if_no_ano, common_filters, order_authors, render_title_template, edit_required, \ | ||||||
|     upload_required, login_required |     upload_required, login_required, EXTENSIONS_UPLOAD | ||||||
| import gdriveutils | import gdriveutils | ||||||
| from shutil import move, copyfile | from shutil import move, copyfile | ||||||
| import uploader | import uploader | ||||||
|  | @ -44,8 +43,6 @@ editbook = Blueprint('editbook', __name__) | ||||||
| 
 | 
 | ||||||
| EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'} | EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'} | ||||||
| 
 | 
 | ||||||
| EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx', |  | ||||||
|                       'fb2', 'html', 'rtf', 'odt', 'mp3',  'm4a', 'm4b'} |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -99,7 +99,8 @@ kthoom.setSettings = function() { | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| var createURLFromArray = function(array, mimeType) { | var createURLFromArray = function(array, mimeType) { | ||||||
|     var offset = array.byteOffset, len = array.byteLength; |     var offset = array.byteOffset; | ||||||
|  |     var len = array.byteLength; | ||||||
|     var url; |     var url; | ||||||
|     var blob; |     var blob; | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -68,23 +68,23 @@ var ZipLocalFile = function(bstream) { | ||||||
|         this.filename = bstream.readString(this.fileNameLength); |         this.filename = bstream.readString(this.fileNameLength); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     console.log("Zip Local File Header:"); |     info("Zip Local File Header:"); | ||||||
|     console.log(" version=" + this.version); |     info(" version=" + this.version); | ||||||
|     console.log(" general purpose=" + this.generalPurpose); |     info(" general purpose=" + this.generalPurpose); | ||||||
|     console.log(" compression method=" + this.compressionMethod); |     info(" compression method=" + this.compressionMethod); | ||||||
|     console.log(" last mod file time=" + this.lastModFileTime); |     info(" last mod file time=" + this.lastModFileTime); | ||||||
|     console.log(" last mod file date=" + this.lastModFileDate); |     info(" last mod file date=" + this.lastModFileDate); | ||||||
|     console.log(" crc32=" + this.crc32); |     info(" crc32=" + this.crc32); | ||||||
|     console.log(" compressed size=" + this.compressedSize); |     info(" compressed size=" + this.compressedSize); | ||||||
|     console.log(" uncompressed size=" + this.uncompressedSize); |     info(" uncompressed size=" + this.uncompressedSize); | ||||||
|     console.log(" file name length=" + this.fileNameLength); |     info(" file name length=" + this.fileNameLength); | ||||||
|     console.log(" extra field length=" + this.extraFieldLength); |     info(" extra field length=" + this.extraFieldLength); | ||||||
|     console.log(" filename = '" + this.filename + "'"); |     info(" filename = '" + this.filename + "'"); | ||||||
| 
 | 
 | ||||||
|     this.extraField = null; |     this.extraField = null; | ||||||
|     if (this.extraFieldLength > 0) { |     if (this.extraFieldLength > 0) { | ||||||
|         this.extraField = bstream.readString(this.extraFieldLength); |         this.extraField = bstream.readString(this.extraFieldLength); | ||||||
|         console.log(" extra field=" + this.extraField); |         info(" extra field=" + this.extraField); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     // read in the compressed data
 |     // read in the compressed data
 | ||||||
|  | @ -110,13 +110,14 @@ ZipLocalFile.prototype.unzip = function() { | ||||||
| 
 | 
 | ||||||
|     // Zip Version 1.0, no compression (store only)
 |     // Zip Version 1.0, no compression (store only)
 | ||||||
|     if (this.compressionMethod == 0 ) { |     if (this.compressionMethod == 0 ) { | ||||||
|         console.log("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)"); |         info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)"); | ||||||
|         currentBytesUnarchivedInFile = this.compressedSize; |         currentBytesUnarchivedInFile = this.compressedSize; | ||||||
|         currentBytesUnarchived += this.compressedSize; |         currentBytesUnarchived += this.compressedSize; | ||||||
|  |         this.fileData = zeroCompression(this.fileData, this.uncompressedSize); | ||||||
|     } |     } | ||||||
|     // version == 20, compression method == 8 (DEFLATE)
 |     // version == 20, compression method == 8 (DEFLATE)
 | ||||||
|     else if (this.compressionMethod == 8) { |     else if (this.compressionMethod == 8) { | ||||||
|         console.log("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)"); |         info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)"); | ||||||
|         this.fileData = inflate(this.fileData, this.uncompressedSize); |         this.fileData = inflate(this.fileData, this.uncompressedSize); | ||||||
|     } |     } | ||||||
|     else { |     else { | ||||||
|  | @ -164,7 +165,7 @@ var unzip = function(arrayBuffer) { | ||||||
| 
 | 
 | ||||||
|         // archive extra data record
 |         // archive extra data record
 | ||||||
|         if (bstream.peekNumber(4) == zArchiveExtraDataSignature) { |         if (bstream.peekNumber(4) == zArchiveExtraDataSignature) { | ||||||
|             console.log(" Found an Archive Extra Data Signature"); |             info(" Found an Archive Extra Data Signature"); | ||||||
| 
 | 
 | ||||||
|             // skipping this record for now
 |             // skipping this record for now
 | ||||||
|             bstream.readNumber(4); |             bstream.readNumber(4); | ||||||
|  | @ -175,7 +176,7 @@ var unzip = function(arrayBuffer) { | ||||||
|         // central directory structure
 |         // central directory structure
 | ||||||
|         // TODO: handle the rest of the structures (Zip64 stuff)
 |         // TODO: handle the rest of the structures (Zip64 stuff)
 | ||||||
|         if (bstream.peekNumber(4) == zCentralFileHeaderSignature) { |         if (bstream.peekNumber(4) == zCentralFileHeaderSignature) { | ||||||
|             console.log(" Found a Central File Header"); |             info(" Found a Central File Header"); | ||||||
| 
 | 
 | ||||||
|             // read all file headers
 |             // read all file headers
 | ||||||
|             while (bstream.peekNumber(4) == zCentralFileHeaderSignature) { |             while (bstream.peekNumber(4) == zCentralFileHeaderSignature) { | ||||||
|  | @ -205,7 +206,7 @@ var unzip = function(arrayBuffer) { | ||||||
| 
 | 
 | ||||||
|         // digital signature
 |         // digital signature
 | ||||||
|         if (bstream.peekNumber(4) == zDigitalSignatureSignature) { |         if (bstream.peekNumber(4) == zDigitalSignatureSignature) { | ||||||
|             console.log(" Found a Digital Signature"); |             info(" Found a Digital Signature"); | ||||||
| 
 | 
 | ||||||
|             bstream.readNumber(4); |             bstream.readNumber(4); | ||||||
|             var sizeOfSignature = bstream.readNumber(2); |             var sizeOfSignature = bstream.readNumber(2); | ||||||
|  | @ -493,6 +494,16 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) { | ||||||
|     return blockSize; |     return blockSize; | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | function zeroCompression(compressedData, numDecompressedBytes) { | ||||||
|  |     var bstream = new bitjs.io.BitStream(compressedData.buffer, | ||||||
|  |         false /* rtl */, | ||||||
|  |         compressedData.byteOffset, | ||||||
|  |         compressedData.byteLength); | ||||||
|  |     var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes); | ||||||
|  |     buffer.insertBytes(bstream.readBytes(numDecompressedBytes)); | ||||||
|  |     return buffer.data; | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // {Uint8Array} compressedData A Uint8Array of the compressed file data.
 | // {Uint8Array} compressedData A Uint8Array of the compressed file data.
 | ||||||
| // compression method 8
 | // compression method 8
 | ||||||
| // deflate: http://tools.ietf.org/html/rfc1951
 | // deflate: http://tools.ietf.org/html/rfc1951
 | ||||||
|  |  | ||||||
|  | @ -62,7 +62,8 @@ | ||||||
|                   <li> |                   <li> | ||||||
|                     <form id="form-upload" class="navbar-form" action="{{ url_for('editbook.upload') }}" method="post" enctype="multipart/form-data"> |                     <form id="form-upload" class="navbar-form" action="{{ url_for('editbook.upload') }}" method="post" enctype="multipart/form-data"> | ||||||
|                       <div class="form-group"> |                       <div class="form-group"> | ||||||
|                         <span class="btn btn-default btn-file">{{_('Upload')}}<input id="btn-upload" name="btn-upload" type="file" multiple></span> |                         <span class="btn btn-default btn-file">{{_('Upload')}}<input id="btn-upload" name="btn-upload" | ||||||
|  |                         type="file" accept="{% for format in accept %}.{{format}}{{ ',' if not loop.last }}{% endfor %}" multiple></span> | ||||||
|                       </div> |                       </div> | ||||||
|                     </form> |                     </form> | ||||||
|                   </li> |                   </li> | ||||||
|  |  | ||||||
							
								
								
									
										11
									
								
								cps/web.py
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								cps/web.py
									
									
									
									
									
								
							|  | @ -47,7 +47,6 @@ from cps import lm, babel, ub, config, get_locale, language_table, app, db | ||||||
| from pagination import Pagination | from pagination import Pagination | ||||||
| from sqlalchemy.sql.expression import text | from sqlalchemy.sql.expression import text | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| feature_support = dict() | feature_support = dict() | ||||||
| try: | try: | ||||||
|     from oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status |     from oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status | ||||||
|  | @ -106,6 +105,10 @@ from flask import Blueprint | ||||||
| 
 | 
 | ||||||
| EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'} | EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'} | ||||||
| 
 | 
 | ||||||
|  | EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx', | ||||||
|  |                       'fb2', 'html', 'rtf', 'odt', 'mp3',  'm4a', 'm4b'} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| '''EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +  | '''EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +  | ||||||
|                         (['rar','cbr'] if feature_support['rar'] else []))''' |                         (['rar','cbr'] if feature_support['rar'] else []))''' | ||||||
| 
 | 
 | ||||||
|  | @ -133,10 +136,7 @@ web = Blueprint('web', __name__) | ||||||
| 
 | 
 | ||||||
| @lm.user_loader | @lm.user_loader | ||||||
| def load_user(user_id): | def load_user(user_id): | ||||||
|     try: |  | ||||||
|     return ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() |     return ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() | ||||||
|     except Exception as e: |  | ||||||
|         print(e) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @lm.header_loader | @lm.header_loader | ||||||
|  | @ -325,7 +325,8 @@ def get_search_results(term): | ||||||
| # Returns the template for rendering and includes the instance name | # Returns the template for rendering and includes the instance name | ||||||
| def render_title_template(*args, **kwargs): | def render_title_template(*args, **kwargs): | ||||||
|     sidebar=ub.get_sidebar_config(kwargs) |     sidebar=ub.get_sidebar_config(kwargs) | ||||||
|     return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, *args, **kwargs) |     return render_template(instance=config.config_calibre_web_title, sidebar=sidebar, accept=EXTENSIONS_UPLOAD, | ||||||
|  |                            *args, **kwargs) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @web.before_app_request | @web.before_app_request | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user