diff --git a/cps/cache_buster.py b/cps/cache_buster.py
index 2e2b9869..31c98c79 100644
--- a/cps/cache_buster.py
+++ b/cps/cache_buster.py
@@ -20,11 +20,11 @@ def init_cache_busting(app):
app.logger.debug('Computing cache-busting values...')
# compute file hashes
- for dirpath, dirnames, filenames in os.walk(static_folder):
+ for dirpath, __, filenames in os.walk(static_folder):
for filename in filenames:
# compute version component
rooted_filename = os.path.join(dirpath, filename)
- with open(rooted_filename, 'r') as f:
+ with open(rooted_filename, 'rb') as f:
file_hash = hashlib.md5(f.read()).hexdigest()[:7]
# save version to tables
diff --git a/cps/gdriveutils.py b/cps/gdriveutils.py
index 550426ca..8909db15 100644
--- a/cps/gdriveutils.py
+++ b/cps/gdriveutils.py
@@ -73,26 +73,27 @@ if not os.path.exists(dbpath):
migrate()
-def getDrive(gauth=None):
- if not gauth:
- gauth = GoogleAuth(settings_file='settings.yaml')
- # Try to load saved client credentials
- gauth.LoadCredentialsFile("gdrive_credentials")
- if gauth.access_token_expired:
- # Refresh them if expired
- gauth.Refresh()
- else:
- # Initialize the saved creds
- gauth.Authorize()
- # Save the current credentials to a file
- return GoogleDrive(gauth)
+def getDrive(drive=None, gauth=None):
+ if not drive:
+ if not gauth:
+ gauth = GoogleAuth(settings_file='settings.yaml')
+ # Try to load saved client credentials
+ gauth.LoadCredentialsFile("gdrive_credentials")
+ if gauth.access_token_expired:
+ # Refresh them if expired
+ gauth.Refresh()
+ else:
+ # Initialize the saved creds
+ gauth.Authorize()
+ # Save the current credentials to a file
+ return GoogleDrive(gauth)
+ if drive.auth.access_token_expired:
+ drive.auth.Refresh()
+ return drive
def getEbooksFolder(drive=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
ebooksFolder = "title = '%s' and 'root' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false" % config.config_google_drive_folder
fileList = drive.ListFile({'q': ebooksFolder}).GetList()
@@ -113,20 +114,14 @@ def getEbooksFolderId(drive=None):
def getFolderInFolder(parentId, folderName, drive=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
folder = "title = '%s' and '%s' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false" % (folderName.replace("'", "\\'"), parentId)
fileList = drive.ListFile({'q': folder}).GetList()
return fileList[0]
def getFile(pathId, fileName, drive=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
metaDataFile = "'%s' in parents and trashed = false and title = '%s'" % (pathId, fileName.replace("'", "\\'"))
fileList = drive.ListFile({'q': metaDataFile}).GetList()
@@ -134,10 +129,7 @@ def getFile(pathId, fileName, drive=None):
def getFolderId(path, drive=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
currentFolderId = getEbooksFolderId(drive)
sqlCheckPath = path if path[-1] == '/' else path + '/'
storedPathName = session.query(GdriveId).filter(GdriveId.path == sqlCheckPath).first()
@@ -168,10 +160,7 @@ def getFolderId(path, drive=None):
def getFileFromEbooksFolder(drive, path, fileName):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
if path:
# sqlCheckPath=path if path[-1] =='/' else path + '/'
folderId = getFolderId(path, drive)
@@ -182,10 +171,7 @@ def getFileFromEbooksFolder(drive, path, fileName):
def copyDriveFileRemote(drive, origin_file_id, copy_title):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
copied_file = {'title': copy_title}
try:
file_data = drive.auth.service.files().copy(
@@ -197,19 +183,13 @@ def copyDriveFileRemote(drive, origin_file_id, copy_title):
def downloadFile(drive, path, filename, output):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
f = getFileFromEbooksFolder(drive, path, filename)
f.GetContentFile(output)
def backupCalibreDbAndOptionalDownload(drive, f=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
metaDataFile = "'%s' in parents and title = 'metadata.db' and trashed = false" % getEbooksFolderId()
fileList = drive.ListFile({'q': metaDataFile}).GetList()
@@ -221,12 +201,10 @@ def backupCalibreDbAndOptionalDownload(drive, f=None):
def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
- ignoreFiles=[],
+ ignoreFiles=None,
parent=None, prevDir=''):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ ignoreFiles = ignoreFiles or []
+ drive = getDrive(drive)
isInitial = not bool(parent)
if not parent:
parent = getEbooksFolder(drive)
@@ -254,10 +232,7 @@ def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
def uploadFileToEbooksFolder(drive, destFile, f):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
parent = getEbooksFolder(drive)
splitDir = destFile.split('/')
for i, x in enumerate(splitDir):
@@ -281,10 +256,7 @@ def uploadFileToEbooksFolder(drive, destFile, f):
def watchChange(drive, channel_id, channel_type, channel_address,
channel_token=None, expiration=None):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
# Watch for all changes to a user's Drive.
# Args:
# service: Drive API service instance.
@@ -327,10 +299,7 @@ def watchFile(drive, file_id, channel_id, channel_type, channel_address,
Raises:
apiclient.errors.HttpError: if http request to create channel fails.
"""
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
body = {
'id': channel_id,
@@ -353,10 +322,7 @@ def stopChannel(drive, channel_id, resource_id):
Raises:
apiclient.errors.HttpError: if http request to create channel fails.
"""
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
# service=drive.auth.service
body = {
'id': channel_id,
@@ -366,10 +332,7 @@ def stopChannel(drive, channel_id, resource_id):
def getChangeById (drive, change_id):
- if not drive:
- drive = getDrive()
- if drive.auth.access_token_expired:
- drive.auth.Refresh()
+ drive = getDrive(drive)
# Print a single Change resource information.
#
# Args:
diff --git a/cps/helper.py b/cps/helper.py
index 0c38b684..0b2e50ef 100755
--- a/cps/helper.py
+++ b/cps/helper.py
@@ -266,6 +266,7 @@ def get_valid_filename(value, replace_whitespace=True):
"""
if value[-1:] == u'.':
value = value[:-1]+u'_'
+ value = value.replace("/", "_").replace(":", "_").strip('\0')
if use_unidecode:
value=(unidecode.unidecode(value)).strip()
else:
diff --git a/cps/static/css/kthoom.css b/cps/static/css/kthoom.css
new file mode 100644
index 00000000..a6b41a32
--- /dev/null
+++ b/cps/static/css/kthoom.css
@@ -0,0 +1,105 @@
+body {
+ background: #444;
+ overflow: hidden;
+ color: white;
+ font-family: sans-serif;
+ margin: 0px;
+}
+
+.main {
+ position: re;
+ left: 5px;
+ overflow: hidden;
+ right: 5px;
+ text-align: center;
+ top: 5px;
+}
+
+#progress {
+ position: absolute;
+ display: inline;
+ left: 90px;
+ right: 160px;
+ height: 20px;
+ margin-top: 1px;
+ text-align: right;
+}
+
+.hide {
+ display: none !important;
+}
+
+#mainText {
+ text-align: left;
+ width: 90%;
+ position: relative;
+ top: 10px;
+ background: #ccc;
+ color: black;
+ margin-right: auto;
+ margin-left: auto;
+ padding: 10px;
+ word-wrap: break-word;
+}
+
+#mainImage{
+ margin-top: 32px;
+}
+
+#titlebar.main {
+ opacity: 0;
+ position: absolute;
+ top: 0;
+ height: 30px;
+ left: 0;
+ right: 0;
+ background-color: black;
+ padding-bottom: 70px;
+ -webkit-transition: opacity 0.2s ease;
+ -moz-transition: opacity 0.2s ease;
+ transition: opacity 0.2s ease;
+ background: -moz-linear-gradient(top, rgba(0,2,34,1) 0%, rgba(0,1,24,1) 30%, rgba(0,0,0,0) 100%); /* FF3.6+ */
+ background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,rgba(0,2,34,1)), color-stop(30%,rgba(0,1,24,1)), color-stop(100%,rgba(0,0,0,0))); /* Chrome,Safari4+ */
+ background: -webkit-linear-gradient(top, rgba(0,2,34,1) 0%,rgba(0,1,24,1) 30%,rgba(0,0,0,0) 100%); /* Chrome10+,Safari5.1+ */
+ background: -o-linear-gradient(top, rgba(0,2,34,1) 0%,rgba(0,1,24,1) 30%,rgba(0,0,0,0) 100%); /* Opera11.10+ */
+ background: -ms-linear-gradient(top, rgba(0,2,34,1) 0%,rgba(0,1,24,1) 30%,rgba(0,0,0,0) 100%); /* IE10+ */
+ filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#000222', endColorstr='#00000000',GradientType=0 ); /* IE6-9 */
+ background: linear-gradient(top, rgba(0,2,34,1) 0%,rgba(0,1,24,1) 30%,rgba(0,0,0,0) 100%); /* W3C */
+}
+
+#prev {
+ left: 40px;
+}
+
+#next {
+ right: 40px;
+}
+
+.arrow {
+ position: absolute;
+ top: 50%;
+ margin-top: -32px;
+ font-size: 64px;
+ color: #E2E2E2;
+ font-family: arial, sans-serif;
+ font-weight: bold;
+ cursor: pointer;
+ -webkit-user-select: none;
+ -khtml-user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ user-select: none;
+}
+
+.arrow:hover {
+ color: #777;
+}
+
+.arrow:active,
+.arrow.active {
+ color: #000;
+}
+
+
+
+
diff --git a/cps/static/css/style.css b/cps/static/css/style.css
index 673623c5..a89291e5 100644
--- a/cps/static/css/style.css
+++ b/cps/static/css/style.css
@@ -90,3 +90,9 @@ input.pill:not(:checked) + label .glyphicon {
#meta-info img { max-height: 150px; max-width: 100px; cursor: pointer; }
.padded-bottom { margin-bottom: 15px; }
+
+.upload-format-input-text {display: initial;}
+#btn-upload-format {display: none;}
+
+.upload-format-input-text {display: initial;}
+#btn-upload-format {display: none;}
diff --git a/cps/static/img/goodreads.svg b/cps/static/img/goodreads.svg
index f89130e9..32695526 100644
--- a/cps/static/img/goodreads.svg
+++ b/cps/static/img/goodreads.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/cps/static/js/archive.js b/cps/static/js/archive.js
new file mode 100644
index 00000000..28aae182
--- /dev/null
+++ b/cps/static/js/archive.js
@@ -0,0 +1,364 @@
+/**
+ * archive.js
+ *
+ * Provides base functionality for unarchiving.
+ *
+ * Licensed under the MIT License
+ *
+ * Copyright(c) 2011 Google Inc.
+ */
+
+/* global bitjs */
+
+var bitjs = bitjs || {};
+bitjs.archive = bitjs.archive || {};
+
+(function() {
+
+ // ===========================================================================
+ // Stolen from Closure because it's the best way to do Java-like inheritance.
+ bitjs.base = function(me, optMethodName, varArgs) {
+ var caller = arguments.callee.caller;
+ if (caller.superClass_) {
+ // This is a constructor. Call the superclass constructor.
+ return caller.superClass_.constructor.apply(
+ me, Array.prototype.slice.call(arguments, 1));
+ }
+
+ var args = Array.prototype.slice.call(arguments, 2);
+ var foundCaller = false;
+ for (var ctor = me.constructor;
+ ctor; ctor = ctor.superClass_ && ctor.superClass_.constructor) {
+ if (ctor.prototype[optMethodName] === caller) {
+ foundCaller = true;
+ } else if (foundCaller) {
+ return ctor.prototype[optMethodName].apply(me, args);
+ }
+ }
+
+ // If we did not find the caller in the prototype chain,
+ // then one of two things happened:
+ // 1) The caller is an instance method.
+ // 2) This method was not called by the right caller.
+ if (me[optMethodName] === caller) {
+ return me.constructor.prototype[optMethodName].apply(me, args);
+ } else {
+ throw Error(
+ "goog.base called from a method of one name " +
+ "to a method of a different name");
+ }
+ };
+ bitjs.inherits = function(childCtor, parentCtor) {
+ /** @constructor */
+ function TempCtor() {}
+ TempCtor.prototype = parentCtor.prototype;
+ childCtor.superClass_ = parentCtor.prototype;
+ childCtor.prototype = new TempCtor();
+ childCtor.prototype.constructor = childCtor;
+ };
+ // ===========================================================================
+
+ /**
+ * An unarchive event.
+ *
+ * @param {string} type The event type.
+ * @constructor
+ */
+ bitjs.archive.UnarchiveEvent = function(type) {
+ /**
+ * The event type.
+ *
+ * @type {string}
+ */
+ this.type = type;
+ };
+
+ /**
+ * The UnarchiveEvent types.
+ */
+ bitjs.archive.UnarchiveEvent.Type = {
+ START: "start",
+ PROGRESS: "progress",
+ EXTRACT: "extract",
+ FINISH: "finish",
+ INFO: "info",
+ ERROR: "error"
+ };
+
+ /**
+ * Useful for passing info up to the client (for debugging).
+ *
+ * @param {string} msg The info message.
+ */
+ bitjs.archive.UnarchiveInfoEvent = function(msg) {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.INFO);
+
+ /**
+ * The information message.
+ *
+ * @type {string}
+ */
+ this.msg = msg;
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveInfoEvent, bitjs.archive.UnarchiveEvent);
+
+ /**
+ * An unrecoverable error has occured.
+ *
+ * @param {string} msg The error message.
+ */
+ bitjs.archive.UnarchiveErrorEvent = function(msg) {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.ERROR);
+
+ /**
+ * The information message.
+ *
+ * @type {string}
+ */
+ this.msg = msg;
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveErrorEvent, bitjs.archive.UnarchiveEvent);
+
+ /**
+ * Start event.
+ *
+ * @param {string} msg The info message.
+ */
+ bitjs.archive.UnarchiveStartEvent = function() {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.START);
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveStartEvent, bitjs.archive.UnarchiveEvent);
+
+ /**
+ * Finish event.
+ *
+ * @param {string} msg The info message.
+ */
+ bitjs.archive.UnarchiveFinishEvent = function() {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.FINISH);
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveFinishEvent, bitjs.archive.UnarchiveEvent);
+
+ /**
+ * Progress event.
+ */
+ bitjs.archive.UnarchiveProgressEvent = function(
+ currentFilename,
+ currentFileNumber,
+ currentBytesUnarchivedInFile,
+ currentBytesUnarchived,
+ totalUncompressedBytesInArchive,
+ totalFilesInArchive)
+ {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.PROGRESS);
+
+ this.currentFilename = currentFilename;
+ this.currentFileNumber = currentFileNumber;
+ this.currentBytesUnarchivedInFile = currentBytesUnarchivedInFile;
+ this.totalFilesInArchive = totalFilesInArchive;
+ this.currentBytesUnarchived = currentBytesUnarchived;
+ this.totalUncompressedBytesInArchive = totalUncompressedBytesInArchive;
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveProgressEvent, bitjs.archive.UnarchiveEvent);
+
+ /**
+ * All extracted files returned by an Unarchiver will implement
+ * the following interface:
+ *
+ * interface UnarchivedFile {
+ * string filename
+ * TypedArray fileData
+ * }
+ *
+ */
+
+ /**
+ * Extract event.
+ */
+ bitjs.archive.UnarchiveExtractEvent = function(unarchivedFile) {
+ bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.EXTRACT);
+
+ /**
+ * @type {UnarchivedFile}
+ */
+ this.unarchivedFile = unarchivedFile;
+ };
+ bitjs.inherits(bitjs.archive.UnarchiveExtractEvent, bitjs.archive.UnarchiveEvent);
+
+
+ /**
+ * Base class for all Unarchivers.
+ *
+ * @param {ArrayBuffer} arrayBuffer The Array Buffer.
+ * @param {string} optPathToBitJS Optional string for where the BitJS files are located.
+ * @constructor
+ */
+ bitjs.archive.Unarchiver = function(arrayBuffer, optPathToBitJS) {
+ /**
+ * The ArrayBuffer object.
+ * @type {ArrayBuffer}
+ * @protected
+ */
+ this.ab = arrayBuffer;
+
+ /**
+ * The path to the BitJS files.
+ * @type {string}
+ * @private
+ */
+ this.pathToBitJS_ = optPathToBitJS || "";
+
+ /**
+ * A map from event type to an array of listeners.
+ * @type {Map.}
+ */
+ this.listeners_ = {};
+ for (var type in bitjs.archive.UnarchiveEvent.Type) {
+ this.listeners_[bitjs.archive.UnarchiveEvent.Type[type]] = [];
+ }
+ };
+
+ /**
+ * Private web worker initialized during start().
+ * @type {Worker}
+ * @private
+ */
+ bitjs.archive.Unarchiver.prototype.worker_ = null;
+
+ /**
+ * This method must be overridden by the subclass to return the script filename.
+ * @return {string} The script filename.
+ * @protected.
+ */
+ bitjs.archive.Unarchiver.prototype.getScriptFileName = function() {
+ throw "Subclasses of AbstractUnarchiver must overload getScriptFileName()";
+ };
+
+ /**
+ * Adds an event listener for UnarchiveEvents.
+ *
+ * @param {string} Event type.
+ * @param {function} An event handler function.
+ */
+ bitjs.archive.Unarchiver.prototype.addEventListener = function(type, listener) {
+ if (type in this.listeners_) {
+ if (this.listeners_[type].indexOf(listener) === -1) {
+ this.listeners_[type].push(listener);
+ }
+ }
+ };
+
+ /**
+ * Removes an event listener.
+ *
+ * @param {string} Event type.
+ * @param {EventListener|function} An event listener or handler function.
+ */
+ bitjs.archive.Unarchiver.prototype.removeEventListener = function(type, listener) {
+ if (type in this.listeners_) {
+ var index = this.listeners_[type].indexOf(listener);
+ if (index !== -1) {
+ this.listeners_[type].splice(index, 1);
+ }
+ }
+ };
+
+ /**
+ * Receive an event and pass it to the listener functions.
+ *
+ * @param {bitjs.archive.UnarchiveEvent} e
+ * @private
+ */
+ bitjs.archive.Unarchiver.prototype.handleWorkerEvent_ = function(e) {
+ if ((e instanceof bitjs.archive.UnarchiveEvent || e.type) &&
+ this.listeners_[e.type] instanceof Array) {
+ this.listeners_[e.type].forEach(function (listener) {
+ listener(e);
+ });
+ if (e.type === bitjs.archive.UnarchiveEvent.Type.FINISH) {
+ this.worker_.terminate();
+ }
+ } else {
+ console.log(e);
+ }
+ };
+
+ /**
+ * Starts the unarchive in a separate Web Worker thread and returns immediately.
+ */
+ bitjs.archive.Unarchiver.prototype.start = function() {
+ var me = this;
+ var scriptFileName = this.pathToBitJS_ + this.getScriptFileName();
+ if (scriptFileName) {
+ this.worker_ = new Worker(scriptFileName);
+
+ this.worker_.onerror = function(e) {
+ console.log("Worker error: message = " + e.message);
+ throw e;
+ };
+
+ this.worker_.onmessage = function(e) {
+ if (typeof e.data === "string") {
+ // Just log any strings the workers pump our way.
+ console.log(e.data);
+ } else {
+ // Assume that it is an UnarchiveEvent. Some browsers preserve the 'type'
+ // so that instanceof UnarchiveEvent returns true, but others do not.
+ me.handleWorkerEvent_(e.data);
+ }
+ };
+
+ this.worker_.postMessage({file: this.ab});
+ }
+ };
+
+ /**
+ * Terminates the Web Worker for this Unarchiver and returns immediately.
+ */
+ bitjs.archive.Unarchiver.prototype.stop = function() {
+ if (this.worker_) {
+ this.worker_.terminate();
+ }
+ };
+
+
+ /**
+ * Unzipper
+ * @extends {bitjs.archive.Unarchiver}
+ * @constructor
+ */
+ bitjs.archive.Unzipper = function(arrayBuffer, optPathToBitJS) {
+ bitjs.base(this, arrayBuffer, optPathToBitJS);
+ };
+ bitjs.inherits(bitjs.archive.Unzipper, bitjs.archive.Unarchiver);
+ bitjs.archive.Unzipper.prototype.getScriptFileName = function() {
+ return "unzip.js";
+ };
+
+ /**
+ * Unrarrer
+ * @extends {bitjs.archive.Unarchiver}
+ * @constructor
+ */
+ bitjs.archive.Unrarrer = function(arrayBuffer, optPathToBitJS) {
+ bitjs.base(this, arrayBuffer, optPathToBitJS);
+ };
+ bitjs.inherits(bitjs.archive.Unrarrer, bitjs.archive.Unarchiver);
+ bitjs.archive.Unrarrer.prototype.getScriptFileName = function() {
+ return "unrar.js";
+ };
+
+ /**
+ * Untarrer
+ * @extends {bitjs.archive.Unarchiver}
+ * @constructor
+ */
+ bitjs.archive.Untarrer = function(arrayBuffer, optPathToBitJS) {
+ bitjs.base(this, arrayBuffer, optPathToBitJS);
+ };
+ bitjs.inherits(bitjs.archive.Untarrer, bitjs.archive.Unarchiver);
+ bitjs.archive.Untarrer.prototype.getScriptFileName = function() {
+ return "untar.js";
+ };
+
+})();
diff --git a/cps/static/js/edit_books.js b/cps/static/js/edit_books.js
index 18acccc1..3932e3b6 100644
--- a/cps/static/js/edit_books.js
+++ b/cps/static/js/edit_books.js
@@ -4,31 +4,31 @@
/* global Bloodhound, language, Modernizr, tinymce */
if ($("#description").length) {
-tinymce.init({
- selector: "#description",
- branding: false,
- menubar: "edit view format",
- language
-});
+ tinymce.init({
+ selector: "#description",
+ branding: false,
+ menubar: "edit view format",
+ language: language
+ });
+ if (!Modernizr.inputtypes.date) {
+ $("#pubdate").datepicker({
+ format: "yyyy-mm-dd",
+ language: language
+ }).on("change", function () {
+ // Show localized date over top of the standard YYYY-MM-DD date
+ var pubDate;
+ var results = /(\d{4})[-\/\\](\d{1,2})[-\/\\](\d{1,2})/.exec(this.value); // YYYY-MM-DD
+ if (results) {
+ pubDate = new Date(results[1], parseInt(results[2], 10) - 1, results[3]) || new Date(this.value);
+ $("#fake_pubdate")
+ .val(pubDate.toLocaleDateString(language))
+ .removeClass("hidden");
+ }
+ }).trigger("change");
+ }
+}
-if (!Modernizr.inputtypes.date) {
- $("#pubdate").datepicker({
- format: "yyyy-mm-dd",
- language
- }).on("change", function () {
- // Show localized date over top of the standard YYYY-MM-DD date
- let pubDate;
- const results = /(\d{4})[-\/\\](\d{1,2})[-\/\\](\d{1,2})/.exec(this.value); // YYYY-MM-DD
- if (results) {
- pubDate = new Date(results[1], parseInt(results[2], 10)-1, results[3]) || new Date(this.value);
- }
- $("#fake_pubdate")
- .val(pubDate.toLocaleDateString(language))
- .removeClass("hidden");
- }).trigger("change");
-}
-}
/*
Takes a prefix, query typeahead callback, Bloodhound typeahead adapter
and returns the completions it gets from the bloodhound engine prefixed.
@@ -43,6 +43,7 @@ function prefixedSource(prefix, query, cb, bhAdapter) {
cb(matches);
});
}
+
function getPath() {
var jsFileLocation = $("script[src*=edit_books]").attr("src"); // the js file path
jsFileLocation = jsFileLocation.replace("/static/js/edit_books.js", ""); // the js folder path
@@ -56,7 +57,7 @@ var authors = new Bloodhound({
},
queryTokenizer: Bloodhound.tokenizers.whitespace,
remote: {
- url: getPath()+"/get_authors_json?q=%QUERY"
+ url: getPath() + "/get_authors_json?q=%QUERY"
}
});
@@ -69,9 +70,9 @@ var series = new Bloodhound({
return [query];
},
remote: {
- url: getPath()+"/get_series_json?q=",
+ url: getPath() + "/get_series_json?q=",
replace: function replace(url, query) {
- return url+encodeURIComponent(query);
+ return url + encodeURIComponent(query);
}
}
});
@@ -84,11 +85,11 @@ var tags = new Bloodhound({
},
queryTokenizer: function queryTokenizer(query) {
var tokens = query.split(",");
- tokens = [tokens[tokens.length-1].trim()];
+ tokens = [tokens[tokens.length - 1].trim()];
return tokens;
},
remote: {
- url: getPath()+"/get_tags_json?q=%QUERY"
+ url: getPath() + "/get_tags_json?q=%QUERY"
}
});
@@ -101,9 +102,9 @@ var languages = new Bloodhound({
return [query];
},
remote: {
- url: getPath()+"/get_languages_json?q=",
+ url: getPath() + "/get_languages_json?q=",
replace: function replace(url, query) {
- return url+encodeURIComponent(query);
+ return url + encodeURIComponent(query);
}
}
});
@@ -112,9 +113,9 @@ function sourceSplit(query, cb, split, source) {
var bhAdapter = source.ttAdapter();
var tokens = query.split(split);
- var currentSource = tokens[tokens.length-1].trim();
+ var currentSource = tokens[tokens.length - 1].trim();
- tokens.splice(tokens.length-1, 1); // remove last element
+ tokens.splice(tokens.length - 1, 1); // remove last element
var prefix = "";
var newSplit;
if (split === "&") {
@@ -192,7 +193,7 @@ promiseLanguages.done(function() {
$("#search").on("change input.typeahead:selected", function() {
var form = $("form").serialize();
- $.getJSON( getPath()+"/get_matching_tags", form, function( data ) {
+ $.getJSON( getPath() + "/get_matching_tags", form, function( data ) {
$(".tags_click").each(function() {
if ($.inArray(parseInt($(this).children("input").first().val(), 10), data.tags) === -1 ) {
if (!($(this).hasClass("active"))) {
@@ -204,3 +205,11 @@ $("#search").on("change input.typeahead:selected", function() {
});
});
});
+
+$("#btn-upload-format").on("change", function () {
+ var filename = $(this).val();
+ if (filename.substring(3, 11) === "fakepath") {
+ filename = filename.substring(12);
+ } // Remove c:\fake at beginning from localhost chrome
+ $("#upload-format").html(filename);
+});
diff --git a/cps/static/js/get_meta.js b/cps/static/js/get_meta.js
index 8dd34cdf..99ae0a7b 100644
--- a/cps/static/js/get_meta.js
+++ b/cps/static/js/get_meta.js
@@ -3,8 +3,8 @@
* Created by idalin
* Google Books api document: https://developers.google.com/books/docs/v1/using
* Douban Books api document: https://developers.douban.com/wiki/?title=book_v2 (Chinese Only)
- */
- /* global _, i18nMsg, tinymce */
+*/
+/* global _, i18nMsg, tinymce */
var dbResults = [];
var ggResults = [];
@@ -103,6 +103,10 @@ $(function () {
}
};
+ if (book.rating > 0) {
+ book.rating /= 2;
+ }
+
var $book = $(templates.bookResult(book));
$book.find("img").on("click", function () {
populateForm(book);
diff --git a/cps/static/js/io.js b/cps/static/js/io.js
new file mode 100644
index 00000000..6cc4d81c
--- /dev/null
+++ b/cps/static/js/io.js
@@ -0,0 +1,484 @@
+/*
+ * io.js
+ *
+ * Provides readers for bit/byte streams (reading) and a byte buffer (writing).
+ *
+ * Licensed under the MIT License
+ *
+ * Copyright(c) 2011 Google Inc.
+ * Copyright(c) 2011 antimatter15
+ */
+
+/* global bitjs, Uint8Array */
+
+var bitjs = bitjs || {};
+bitjs.io = bitjs.io || {};
+
+(function() {
+
+ // mask for getting the Nth bit (zero-based)
+ bitjs.BIT = [ 0x01, 0x02, 0x04, 0x08,
+ 0x10, 0x20, 0x40, 0x80,
+ 0x100, 0x200, 0x400, 0x800,
+ 0x1000, 0x2000, 0x4000, 0x8000];
+
+ // mask for getting N number of bits (0-8)
+ var BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];
+
+
+ /**
+ * This bit stream peeks and consumes bits out of a binary stream.
+ *
+ * @param {ArrayBuffer} ab An ArrayBuffer object or a Uint8Array.
+ * @param {boolean} rtl Whether the stream reads bits from the byte starting
+ * from bit 7 to 0 (true) or bit 0 to 7 (false).
+ * @param {Number} optOffset The offset into the ArrayBuffer
+ * @param {Number} optLength The length of this BitStream
+ */
+ bitjs.io.BitStream = function(ab, rtl, optOffset, optLength) {
+ if (!ab || !ab.toString || ab.toString() !== "[object ArrayBuffer]") {
+ throw "Error! BitArray constructed with an invalid ArrayBuffer object";
+ }
+
+ var offset = optOffset || 0;
+ var length = optLength || ab.byteLength;
+ this.bytes = new Uint8Array(ab, offset, length);
+ this.bytePtr = 0; // tracks which byte we are on
+ this.bitPtr = 0; // tracks which bit we are on (can have values 0 through 7)
+ this.peekBits = rtl ? this.peekBitsRtl : this.peekBitsLtr;
+ };
+
+
+ /**
+ * byte0 byte1 byte2 byte3
+ * 7......0 | 7......0 | 7......0 | 7......0
+ *
+ * The bit pointer starts at bit0 of byte0 and moves left until it reaches
+ * bit7 of byte0, then jumps to bit0 of byte1, etc.
+ * @param {number} n The number of bits to peek.
+ * @param {boolean=} movePointers Whether to move the pointer, defaults false.
+ * @return {number} The peeked bits, as an unsigned number.
+ */
+ bitjs.io.BitStream.prototype.peekBitsLtr = function(n, movePointers) {
+ if (n <= 0 || typeof n !== typeof 1) {
+ return 0;
+ }
+
+ var movePointers = movePointers || false;
+ var bytePtr = this.bytePtr;
+ var bitPtr = this.bitPtr;
+ var result = 0;
+ var bitsIn = 0;
+ var bytes = this.bytes;
+
+ // keep going until we have no more bits left to peek at
+ // TODO: Consider putting all bits from bytes we will need into a variable and then
+ // shifting/masking it to just extract the bits we want.
+ // This could be considerably faster when reading more than 3 or 4 bits at a time.
+ while (n > 0) {
+ if (bytePtr >= bytes.length) {
+ throw "Error! Overflowed the bit stream! n=" + n + ", bytePtr=" + bytePtr + ", bytes.length=" +
+ bytes.length + ", bitPtr=" + bitPtr;
+ }
+
+ var numBitsLeftInThisByte = (8 - bitPtr);
+ var mask;
+ if (n >= numBitsLeftInThisByte) {
+ mask = (BITMASK[numBitsLeftInThisByte] << bitPtr);
+ result |= (((bytes[bytePtr] & mask) >> bitPtr) << bitsIn);
+
+ bytePtr++;
+ bitPtr = 0;
+ bitsIn += numBitsLeftInThisByte;
+ n -= numBitsLeftInThisByte;
+ } else {
+ mask = (BITMASK[n] << bitPtr);
+ result |= (((bytes[bytePtr] & mask) >> bitPtr) << bitsIn);
+
+ bitPtr += n;
+ bitsIn += n;
+ n = 0;
+ }
+ }
+
+ if (movePointers) {
+ this.bitPtr = bitPtr;
+ this.bytePtr = bytePtr;
+ }
+
+ return result;
+ };
+
+
+ /**
+ * byte0 byte1 byte2 byte3
+ * 7......0 | 7......0 | 7......0 | 7......0
+ *
+ * The bit pointer starts at bit7 of byte0 and moves right until it reaches
+ * bit0 of byte0, then goes to bit7 of byte1, etc.
+ * @param {number} n The number of bits to peek.
+ * @param {boolean=} movePointers Whether to move the pointer, defaults false.
+ * @return {number} The peeked bits, as an unsigned number.
+ */
+ bitjs.io.BitStream.prototype.peekBitsRtl = function(n, movePointers) {
+ if (n <= 0 || typeof n != typeof 1) {
+ return 0;
+ }
+
+ var movePointers = movePointers || false;
+ var bytePtr = this.bytePtr;
+ var bitPtr = this.bitPtr;
+ var result = 0;
+ var bytes = this.bytes;
+
+ // keep going until we have no more bits left to peek at
+ // TODO: Consider putting all bits from bytes we will need into a variable and then
+ // shifting/masking it to just extract the bits we want.
+ // This could be considerably faster when reading more than 3 or 4 bits at a time.
+ while (n > 0) {
+
+ if (bytePtr >= bytes.length) {
+ throw "Error! Overflowed the bit stream! n=" + n + ", bytePtr=" + bytePtr + ", bytes.length=" +
+ bytes.length + ", bitPtr=" + bitPtr;
+ // return -1;
+ }
+
+ var numBitsLeftInThisByte = (8 - bitPtr);
+ if (n >= numBitsLeftInThisByte) {
+ result <<= numBitsLeftInThisByte;
+ result |= (BITMASK[numBitsLeftInThisByte] & bytes[bytePtr]);
+ bytePtr++;
+ bitPtr = 0;
+ n -= numBitsLeftInThisByte;
+ }
+ else {
+ result <<= n;
+ result |= ((bytes[bytePtr] & (BITMASK[n] << (8 - n - bitPtr))) >> (8 - n - bitPtr));
+
+ bitPtr += n;
+ n = 0;
+ }
+ }
+
+ if (movePointers) {
+ this.bitPtr = bitPtr;
+ this.bytePtr = bytePtr;
+ }
+
+ return result;
+ };
+
+
+ /**
+ * Some voodoo magic.
+ */
+ bitjs.io.BitStream.prototype.getBits = function() {
+ return (((((this.bytes[this.bytePtr] & 0xff) << 16) +
+ ((this.bytes[this.bytePtr + 1] & 0xff) << 8) +
+ ((this.bytes[this.bytePtr + 2] & 0xff))) >>> (8 - this.bitPtr)) & 0xffff);
+ };
+
+
+ /**
+ * Reads n bits out of the stream, consuming them (moving the bit pointer).
+ * @param {number} n The number of bits to read.
+ * @return {number} The read bits, as an unsigned number.
+ */
+ bitjs.io.BitStream.prototype.readBits = function(n) {
+ return this.peekBits(n, true);
+ };
+
+
+ /**
+ * This returns n bytes as a sub-array, advancing the pointer if movePointers
+ * is true. Only use this for uncompressed blocks as this throws away remaining
+ * bits in the current byte.
+ * @param {number} n The number of bytes to peek.
+ * @param {boolean=} movePointers Whether to move the pointer, defaults false.
+ * @return {Uint8Array} The subarray.
+ */
+ bitjs.io.BitStream.prototype.peekBytes = function(n, movePointers) {
+ if (n <= 0 || typeof n != typeof 1) {
+ return 0;
+ }
+
+ // from http://tools.ietf.org/html/rfc1951#page-11
+ // "Any bits of input up to the next byte boundary are ignored."
+ while (this.bitPtr !== 0) {
+ this.readBits(1);
+ }
+
+ movePointers = movePointers || false;
+ var bytePtr = this.bytePtr;
+ // var bitPtr = this.bitPtr;
+
+ var result = this.bytes.subarray(bytePtr, bytePtr + n);
+
+ if (movePointers) {
+ this.bytePtr += n;
+ }
+
+ return result;
+ };
+
+
+ /**
+ * @param {number} n The number of bytes to read.
+ * @return {Uint8Array} The subarray.
+ */
+ bitjs.io.BitStream.prototype.readBytes = function(n) {
+ return this.peekBytes(n, true);
+ };
+
+
+ /**
+ * This object allows you to peek and consume bytes as numbers and strings
+ * out of an ArrayBuffer. In this buffer, everything must be byte-aligned.
+ *
+ * @param {ArrayBuffer} ab The ArrayBuffer object.
+ * @param {number=} optOffset The offset into the ArrayBuffer
+ * @param {number=} optLength The length of this BitStream
+ * @constructor
+ */
+ bitjs.io.ByteStream = function(ab, optOffset, optLength) {
+ var offset = optOffset || 0;
+ var length = optLength || ab.byteLength;
+ this.bytes = new Uint8Array(ab, offset, length);
+ this.ptr = 0;
+ };
+
+
+ /**
+ * Peeks at the next n bytes as an unsigned number but does not advance the
+ * pointer
+ * TODO: This apparently cannot read more than 4 bytes as a number?
+ * @param {number} n The number of bytes to peek at.
+ * @return {number} The n bytes interpreted as an unsigned number.
+ */
+ bitjs.io.ByteStream.prototype.peekNumber = function(n) {
+ // TODO: return error if n would go past the end of the stream?
+ if (n <= 0 || typeof n !== typeof 1) {
+ return -1;
+ }
+
+ var result = 0;
+ // read from last byte to first byte and roll them in
+ var curByte = this.ptr + n - 1;
+ while (curByte >= this.ptr) {
+ result <<= 8;
+ result |= this.bytes[curByte];
+ --curByte;
+ }
+ return result;
+ };
+
+
+ /**
+ * Returns the next n bytes as an unsigned number (or -1 on error)
+ * and advances the stream pointer n bytes.
+ * @param {number} n The number of bytes to read.
+ * @return {number} The n bytes interpreted as an unsigned number.
+ */
+ bitjs.io.ByteStream.prototype.readNumber = function(n) {
+ var num = this.peekNumber( n );
+ this.ptr += n;
+ return num;
+ };
+
+
+ /**
+ * Returns the next n bytes as a signed number but does not advance the
+ * pointer.
+ * @param {number} n The number of bytes to read.
+ * @return {number} The bytes interpreted as a signed number.
+ */
+ bitjs.io.ByteStream.prototype.peekSignedNumber = function(n) {
+ var num = this.peekNumber(n);
+ var HALF = Math.pow(2, (n * 8) - 1);
+ var FULL = HALF * 2;
+
+ if (num >= HALF) num -= FULL;
+
+ return num;
+ };
+
+
+ /**
+ * Returns the next n bytes as a signed number and advances the stream pointer.
+ * @param {number} n The number of bytes to read.
+ * @return {number} The bytes interpreted as a signed number.
+ */
+ bitjs.io.ByteStream.prototype.readSignedNumber = function(n) {
+ var num = this.peekSignedNumber(n);
+ this.ptr += n;
+ return num;
+ };
+
+
+ /**
+ * This returns n bytes as a sub-array, advancing the pointer if movePointers
+ * is true.
+ * @param {number} n The number of bytes to read.
+ * @param {boolean} movePointers Whether to move the pointers.
+ * @return {Uint8Array} The subarray.
+ */
+ bitjs.io.ByteStream.prototype.peekBytes = function(n, movePointers) {
+ if (n <= 0 || typeof n != typeof 1) {
+ return null;
+ }
+
+ var result = this.bytes.subarray(this.ptr, this.ptr + n);
+
+ if (movePointers) {
+ this.ptr += n;
+ }
+
+ return result;
+ };
+
+
+ /**
+ * Reads the next n bytes as a sub-array.
+ * @param {number} n The number of bytes to read.
+ * @return {Uint8Array} The subarray.
+ */
+ bitjs.io.ByteStream.prototype.readBytes = function(n) {
+ return this.peekBytes(n, true);
+ };
+
+
+ /**
+ * Peeks at the next n bytes as a string but does not advance the pointer.
+ * @param {number} n The number of bytes to peek at.
+ * @return {string} The next n bytes as a string.
+ */
+ bitjs.io.ByteStream.prototype.peekString = function(n) {
+ if (n <= 0 || typeof n != typeof 1) {
+ return "";
+ }
+
+ var result = "";
+ for (var p = this.ptr, end = this.ptr + n; p < end; ++p) {
+ result += String.fromCharCode(this.bytes[p]);
+ }
+ return result;
+ };
+
+
+ /**
+ * Returns the next n bytes as an ASCII string and advances the stream pointer
+ * n bytes.
+ * @param {number} n The number of bytes to read.
+ * @return {string} The next n bytes as a string.
+ */
+ bitjs.io.ByteStream.prototype.readString = function(n) {
+ var strToReturn = this.peekString(n);
+ this.ptr += n;
+ return strToReturn;
+ };
+
+
+ /**
+ * A write-only Byte buffer which uses a Uint8 Typed Array as a backing store.
+ * @param {number} numBytes The number of bytes to allocate.
+ * @constructor
+ */
+ bitjs.io.ByteBuffer = function(numBytes) {
+ if (typeof numBytes !== typeof 1 || numBytes <= 0) {
+ throw "Error! ByteBuffer initialized with '" + numBytes + "'";
+ }
+ this.data = new Uint8Array(numBytes);
+ this.ptr = 0;
+ };
+
+
+ /**
+ * @param {number} b The byte to insert.
+ */
+ bitjs.io.ByteBuffer.prototype.insertByte = function(b) {
+ // TODO: throw if byte is invalid?
+ this.data[this.ptr++] = b;
+ };
+
+
+ /**
+ * @param {Array.|Uint8Array|Int8Array} bytes The bytes to insert.
+ */
+ bitjs.io.ByteBuffer.prototype.insertBytes = function(bytes) {
+ // TODO: throw if bytes is invalid?
+ this.data.set(bytes, this.ptr);
+ this.ptr += bytes.length;
+ };
+
+
+ /**
+ * Writes an unsigned number into the next n bytes. If the number is too large
+ * to fit into n bytes or is negative, an error is thrown.
+ * @param {number} num The unsigned number to write.
+ * @param {number} numBytes The number of bytes to write the number into.
+ */
+ bitjs.io.ByteBuffer.prototype.writeNumber = function(num, numBytes) {
+ if (numBytes < 1) {
+ throw "Trying to write into too few bytes: " + numBytes;
+ }
+ if (num < 0) {
+ throw "Trying to write a negative number (" + num +
+ ") as an unsigned number to an ArrayBuffer";
+ }
+ if (num > (Math.pow(2, numBytes * 8) - 1)) {
+ throw "Trying to write " + num + " into only " + numBytes + " bytes";
+ }
+
+ // Roll 8-bits at a time into an array of bytes.
+ var bytes = [];
+ while (numBytes-- > 0) {
+ var eightBits = num & 255;
+ bytes.push(eightBits);
+ num >>= 8;
+ }
+
+ this.insertBytes(bytes);
+ };
+
+
+ /**
+ * Writes a signed number into the next n bytes. If the number is too large
+ * to fit into n bytes, an error is thrown.
+ * @param {number} num The signed number to write.
+ * @param {number} numBytes The number of bytes to write the number into.
+ */
+ bitjs.io.ByteBuffer.prototype.writeSignedNumber = function(num, numBytes) {
+ if (numBytes < 1) {
+ throw "Trying to write into too few bytes: " + numBytes;
+ }
+
+ var HALF = Math.pow(2, (numBytes * 8) - 1);
+ if (num >= HALF || num < -HALF) {
+ throw "Trying to write " + num + " into only " + numBytes + " bytes";
+ }
+
+ // Roll 8-bits at a time into an array of bytes.
+ var bytes = [];
+ while (numBytes-- > 0) {
+ var eightBits = num & 255;
+ bytes.push(eightBits);
+ num >>= 8;
+ }
+
+ this.insertBytes(bytes);
+ };
+
+
+ /**
+ * @param {string} str The ASCII string to write.
+ */
+ bitjs.io.ByteBuffer.prototype.writeASCIIString = function(str) {
+ for (var i = 0; i < str.length; ++i) {
+ var curByte = str.charCodeAt(i);
+ if (curByte < 0 || curByte > 255) {
+ throw "Trying to write a non-ASCII string!";
+ }
+ this.insertByte(curByte);
+ }
+ };
+})();
diff --git a/cps/static/js/kthoom.js b/cps/static/js/kthoom.js
new file mode 100644
index 00000000..17ee8097
--- /dev/null
+++ b/cps/static/js/kthoom.js
@@ -0,0 +1,592 @@
+/*
+ * kthoom.js
+ *
+ * Licensed under the MIT License
+ *
+ * Copyright(c) 2011 Google Inc.
+ * Copyright(c) 2011 antimatter15
+*/
+
+/* Reference Documentation:
+
+ * Web Workers: http://www.whatwg.org/specs/web-workers/current-work/
+ * Web Workers in Mozilla: https://developer.mozilla.org/En/Using_web_workers
+ * File API (FileReader): http://www.w3.org/TR/FileAPI/
+ * Typed Arrays: http://www.khronos.org/registry/typedarray/specs/latest/#6
+
+*/
+/* global bitjs */
+
+if (window.opera) {
+ window.console.log = function(str) {
+ opera.postError(str);
+ };
+}
+
+var kthoom;
+
+// gets the element with the given id
+function getElem(id) {
+ if (document.documentElement.querySelector) {
+ // querySelector lookup
+ return document.body.querySelector("#" + id);
+ }
+ // getElementById lookup
+ return document.getElementById(id);
+}
+
+if (window.kthoom === undefined) {
+ kthoom = {};
+}
+
+// key codes
+kthoom.Key = {
+ ESCAPE: 27,
+ LEFT: 37,
+ UP: 38,
+ RIGHT: 39,
+ DOWN: 40,
+ A: 65, B: 66, C: 67, D: 68, E: 69, F: 70, G: 71, H: 72, I: 73, J: 74, K: 75, L: 76, M: 77,
+ N: 78, O: 79, P: 80, Q: 81, R: 82, S: 83, T: 84, U: 85, V: 86, W: 87, X: 88, Y: 89, Z: 90,
+ QUESTION_MARK: 191,
+ LEFT_SQUARE_BRACKET: 219,
+ RIGHT_SQUARE_BRACKET: 221
+};
+
+// The rotation orientation of the comic.
+kthoom.rotateTimes = 0;
+
+// global variables
+var unarchiver = null;
+var currentImage = 0;
+var imageFiles = [];
+var imageFilenames = [];
+var totalImages = 0;
+var lastCompletion = 0;
+
+var hflip = false, vflip = false, fitMode = kthoom.Key.B;
+var canKeyNext = true, canKeyPrev = true;
+
+kthoom.saveSettings = function() {
+ localStorage.kthoomSettings = JSON.stringify({
+ rotateTimes: kthoom.rotateTimes,
+ hflip: hflip,
+ vflip: vflip,
+ fitMode: fitMode
+ });
+};
+
+kthoom.loadSettings = function() {
+ try {
+ if (localStorage.kthoomSettings.length < 10){
+ return;
+ }
+ var s = JSON.parse(localStorage.kthoomSettings);
+ kthoom.rotateTimes = s.rotateTimes;
+ hflip = s.hflip;
+ vflip = s.vflip;
+ fitMode = s.fitMode;
+ } catch (err) {
+ alert("Error load settings");
+ }
+};
+
+var createURLFromArray = function(array, mimeType) {
+ var offset = array.byteOffset, len = array.byteLength;
+ var url;
+ var blob;
+
+ // TODO: Move all this browser support testing to a common place
+ // and do it just once.
+
+ // Blob constructor, see http://dev.w3.org/2006/webapi/FileAPI/#dfn-Blob.
+ if (typeof Blob === "function") {
+ blob = new Blob([array], {type: mimeType});
+ } else {
+ throw "Browser support for Blobs is missing.";
+ }
+
+ if (blob.slice) {
+ blob = blob.slice(offset, offset + len, mimeType);
+ } else {
+ throw "Browser support for Blobs is missing.";
+ }
+
+ if ((typeof URL !== "function" && typeof URL !== "object") ||
+ typeof URL.createObjectURL !== "function") {
+ throw "Browser support for Object URLs is missing";
+ }
+
+ return URL.createObjectURL(blob);
+};
+
+
+// Stores an image filename and its data: URI.
+// TODO: investigate if we really need to store as base64 (leave off ;base64 and just
+// non-safe URL characters are encoded as %xx ?)
+// This would save 25% on memory since base64-encoded strings are 4/3 the size of the binary
+kthoom.ImageFile = function(file) {
+ this.filename = file.filename;
+ var fileExtension = file.filename.split(".").pop().toLowerCase();
+ var mimeType = fileExtension === "png" ? "image/png" :
+ (fileExtension === "jpg" || fileExtension === "jpeg") ? "image/jpeg" :
+ fileExtension === "gif" ? "image/gif" : null;
+ this.dataURI = createURLFromArray(file.fileData, mimeType);
+ this.data = file;
+};
+
+
+kthoom.initProgressMeter = function() {
+ var svgns = "http://www.w3.org/2000/svg";
+ var pdiv = $("#progress")[0];
+ var svg = document.createElementNS(svgns, "svg");
+ svg.style.width = "100%";
+ svg.style.height = "100%";
+
+ var defs = document.createElementNS(svgns, "defs");
+
+ var patt = document.createElementNS(svgns, "pattern");
+ patt.id = "progress_pattern";
+ patt.setAttribute("width", "30");
+ patt.setAttribute("height", "20");
+ patt.setAttribute("patternUnits", "userSpaceOnUse");
+
+ var rect = document.createElementNS(svgns, "rect");
+ rect.setAttribute("width", "100%");
+ rect.setAttribute("height", "100%");
+ rect.setAttribute("fill", "#cc2929");
+
+ var poly = document.createElementNS(svgns, "polygon");
+ poly.setAttribute("fill", "yellow");
+ poly.setAttribute("points", "15,0 30,0 15,20 0,20");
+
+ patt.appendChild(rect);
+ patt.appendChild(poly);
+ defs.appendChild(patt);
+
+ svg.appendChild(defs);
+
+ var g = document.createElementNS(svgns, "g");
+
+ var outline = document.createElementNS(svgns, "rect");
+ outline.setAttribute("y", "1");
+ outline.setAttribute("width", "100%");
+ outline.setAttribute("height", "15");
+ outline.setAttribute("fill", "#777");
+ outline.setAttribute("stroke", "white");
+ outline.setAttribute("rx", "5");
+ outline.setAttribute("ry", "5");
+ g.appendChild(outline);
+
+ var title = document.createElementNS(svgns, "text");
+ title.id = "progress_title";
+ title.appendChild(document.createTextNode("0%"));
+ title.setAttribute("y", "13");
+ title.setAttribute("x", "99.5%");
+ title.setAttribute("fill", "white");
+ title.setAttribute("font-size", "12px");
+ title.setAttribute("text-anchor", "end");
+ g.appendChild(title);
+
+ var meter = document.createElementNS(svgns, "rect");
+ meter.id = "meter";
+ meter.setAttribute("width", "0%");
+ meter.setAttribute("height", "17");
+ meter.setAttribute("fill", "url(#progress_pattern)");
+ meter.setAttribute("rx", "5");
+ meter.setAttribute("ry", "5");
+
+ var meter2 = document.createElementNS(svgns, "rect");
+ meter2.id = "meter2";
+ meter2.setAttribute("width", "0%");
+ meter2.setAttribute("height", "17");
+ meter2.setAttribute("opacity", "0.8");
+ meter2.setAttribute("fill", "#007fff");
+ meter2.setAttribute("rx", "5");
+ meter2.setAttribute("ry", "5");
+
+ g.appendChild(meter);
+ g.appendChild(meter2);
+
+ var page = document.createElementNS(svgns, "text");
+ page.id = "page";
+ page.appendChild(document.createTextNode("0/0"));
+ page.setAttribute("y", "13");
+ page.setAttribute("x", "0.5%");
+ page.setAttribute("fill", "white");
+ page.setAttribute("font-size", "12px");
+ g.appendChild(page);
+
+
+ svg.appendChild(g);
+ pdiv.appendChild(svg);
+ var l;
+ svg.onclick = function(e) {
+ for (var x = pdiv, l = 0; x !== document.documentElement; x = x.parentNode) l += x.offsetLeft;
+ var page = Math.max(1, Math.ceil(((e.clientX - l) / pdiv.offsetWidth) * totalImages)) - 1;
+ currentImage = page;
+ updatePage();
+ };
+}
+
+kthoom.setProgressMeter = function(pct, optLabel) {
+ pct = (pct * 100);
+ var part = 1 / totalImages;
+ var remain = ((pct - lastCompletion) / 100) / part;
+ var fract = Math.min(1, remain);
+ var smartpct = ((imageFiles.length / totalImages) + (fract * part)) * 100;
+ if (totalImages === 0) smartpct = pct;
+
+ // + Math.min((pct - lastCompletion), 100/totalImages * 0.9 + (pct - lastCompletion - 100/totalImages)/2, 100/totalImages);
+ var oldval = parseFloat(getElem("meter").getAttribute("width"));
+ if (isNaN(oldval)) oldval = 0;
+ var weight = 0.5;
+ smartpct = ((weight * smartpct) + ((1 - weight) * oldval));
+ if (pct === 100) smartpct = 100;
+
+ if (!isNaN(smartpct)) {
+ getElem("meter").setAttribute("width", smartpct + "%");
+ }
+ var title = getElem("progress_title");
+ while (title.firstChild) title.removeChild(title.firstChild);
+
+ var labelText = pct.toFixed(2) + "% " + imageFiles.length + "/" + totalImages + "";
+ if (optLabel) {
+ labelText = optLabel + " " + labelText;
+ }
+ title.appendChild(document.createTextNode(labelText));
+
+ getElem("meter2").setAttribute("width",
+ 100 * (totalImages === 0 ? 0 : ((currentImage + 1) / totalImages)) + "%");
+
+ var titlePage = getElem("page");
+ while (titlePage.firstChild) titlePage.removeChild(titlePage.firstChild);
+ titlePage.appendChild(document.createTextNode( (currentImage + 1) + "/" + totalImages ));
+
+ if (pct > 0) {
+ //getElem('nav').className = '';
+ getElem("progress").className = "";
+ }
+}
+
+function loadFromArrayBuffer(ab) {
+ var start = (new Date).getTime();
+ var h = new Uint8Array(ab, 0, 10);
+ var pathToBitJS = "../../static/js/";
+ if (h[0] === 0x52 && h[1] === 0x61 && h[2] === 0x72 && h[3] === 0x21) { //Rar!
+ unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS);
+ } else if (h[0] === 80 && h[1] === 75) { //PK (Zip)
+ unarchiver = new bitjs.archive.Unzipper(ab, pathToBitJS);
+ } else { // Try with tar
+ unarchiver = new bitjs.archive.Untarrer(ab, pathToBitJS);
+ }
+ // Listen for UnarchiveEvents.
+ if (unarchiver) {
+ unarchiver.addEventListener(bitjs.archive.UnarchiveEvent.Type.PROGRESS,
+ function(e) {
+ var percentage = e.currentBytesUnarchived / e.totalUncompressedBytesInArchive;
+ totalImages = e.totalFilesInArchive;
+ kthoom.setProgressMeter(percentage, "Unzipping");
+ // display nav
+ lastCompletion = percentage * 100;
+ });
+ unarchiver.addEventListener(bitjs.archive.UnarchiveEvent.Type.EXTRACT,
+ function(e) {
+ // convert DecompressedFile into a bunch of ImageFiles
+ if (e.unarchivedFile) {
+ var f = e.unarchivedFile;
+ // add any new pages based on the filename
+ if (imageFilenames.indexOf(f.filename) === -1) {
+ imageFilenames.push(f.filename);
+ imageFiles.push(new kthoom.ImageFile(f));
+ }
+ }
+ // display first page if we haven't yet
+ if (imageFiles.length === currentImage + 1) {
+ updatePage();
+ }
+ });
+ unarchiver.addEventListener(bitjs.archive.UnarchiveEvent.Type.FINISH,
+ function() {
+ var diff = ((new Date).getTime() - start) / 1000;
+ console.log("Unarchiving done in " + diff + "s");
+ });
+ unarchiver.start();
+ } else {
+ alert("Some error");
+ }
+}
+
+
+function updatePage() {
+ var title = getElem("page");
+ while (title.firstChild) title.removeChild(title.firstChild);
+ title.appendChild(document.createTextNode( (currentImage + 1 ) + "/" + totalImages ));
+
+ getElem("meter2").setAttribute("width",
+ 100 * (totalImages === 0 ? 0 : ((currentImage + 1 ) / totalImages)) + "%");
+ if (imageFiles[currentImage]) {
+ setImage(imageFiles[currentImage].dataURI);
+ } else {
+ setImage("loading");
+ }
+}
+
+function setImage(url) {
+ var canvas = $("#mainImage")[0];
+ var x = $("#mainImage")[0].getContext("2d");
+ $("#mainText").hide();
+ if (url === "loading") {
+ updateScale(true);
+ canvas.width = innerWidth - 100;
+ canvas.height = 200;
+ x.fillStyle = "red";
+ x.font = "50px sans-serif";
+ x.strokeStyle = "black";
+ x.fillText("Loading Page #" + (currentImage + 1), 100, 100);
+ } else {
+ if ($("body").css("scrollHeight") / innerHeight > 1) {
+ $("body").css("overflowY", "scroll");
+ }
+
+ var img = new Image();
+ img.onerror = function() {
+ canvas.width = innerWidth - 100;
+ canvas.height = 300;
+ updateScale(true);
+ x.fillStyle = "orange";
+ x.font = "50px sans-serif";
+ x.strokeStyle = "black";
+ x.fillText("Page #" + (currentImage + 1) + " (" +
+ imageFiles[currentImage].filename + ")", 100, 100);
+ x.fillStyle = "red";
+ x.fillText("Is corrupt or not an image", 100, 200);
+
+ var xhr = new XMLHttpRequest();
+ if (/(html|htm)$/.test(imageFiles[currentImage].filename)) {
+ xhr.open("GET", url, true);
+ xhr.onload = function() {
+ //document.getElementById('mainText').style.display = '';
+ $("#mainText").css("display", "");
+ $("#mainText").innerHTML("");
+ }
+ xhr.send(null);
+ } else if (!/(jpg|jpeg|png|gif)$/.test(imageFiles[currentImage].filename) && imageFiles[currentImage].data.uncompressedSize < 10 * 1024) {
+ xhr.open("GET", url, true);
+ xhr.onload = function() {
+ $("#mainText").css("display", "");
+ $("#mainText").innerText(xhr.responseText);
+ };
+ xhr.send(null);
+ }
+ };
+ img.onload = function() {
+ var h = img.height,
+ w = img.width,
+ sw = w,
+ sh = h;
+ kthoom.rotateTimes = (4 + kthoom.rotateTimes) % 4;
+ x.save();
+ if (kthoom.rotateTimes % 2 === 1) {
+ sh = w;
+ sw = h;
+ }
+ canvas.height = sh;
+ canvas.width = sw;
+ x.translate(sw / 2, sh / 2);
+ x.rotate(Math.PI / 2 * kthoom.rotateTimes);
+ x.translate(-w / 2, -h / 2);
+ if (vflip) {
+ x.scale(1, -1);
+ x.translate(0, -h);
+ }
+ if (hflip) {
+ x.scale(-1, 1);
+ x.translate(-w, 0);
+ }
+ canvas.style.display = "none";
+ scrollTo(0, 0);
+ x.drawImage(img, 0, 0);
+
+ updateScale();
+
+ canvas.style.display = "";
+ $("body").css("overflowY", "");
+ x.restore();
+ };
+ img.src = url;
+ }
+}
+
+function showPrevPage() {
+ currentImage--;
+ if (currentImage < 0) {
+ // Freeze on the current page.
+ currentImage++;
+ } else {
+ updatePage();
+ }
+}
+
+function showNextPage() {
+ currentImage++;
+ if (currentImage >= Math.max(totalImages, imageFiles.length)) {
+ // Freeze on the current page.
+ currentImage--;
+ } else {
+ updatePage();
+ }
+}
+
+function updateScale(clear) {
+ var mainImageStyle = getElem("mainImage").style;
+ mainImageStyle.width = "";
+ mainImageStyle.height = "";
+ mainImageStyle.maxWidth = "";
+ mainImageStyle.maxHeight = "";
+ var maxheight = innerHeight - 15;
+ if (!/main/.test(getElem("titlebar").className)) {
+ maxheight -= 25;
+ }
+ if (clear || fitMode === kthoom.Key.N) {
+ } else if (fitMode === kthoom.Key.B) {
+ mainImageStyle.maxWidth = "100%";
+ mainImageStyle.maxHeight = maxheight + "px";
+ } else if (fitMode === kthoom.Key.H) {
+ mainImageStyle.height = maxheight + "px";
+ } else if (fitMode === kthoom.Key.W) {
+ mainImageStyle.width = "100%";
+ }
+ kthoom.saveSettings();
+}
+
+function keyHandler(evt) {
+ var code = evt.keyCode;
+
+ if ($("#progress").css("display") === "none"){
+ return;
+ }
+ canKeyNext = (($("body").css("offsetWidth") + $("body").css("scrollLeft")) / $("body").css("scrollWidth")) >= 1;
+ canKeyPrev = (scrollX <= 0);
+
+ if (evt.ctrlKey || evt.shiftKey || evt.metaKey) return;
+ switch (code) {
+ case kthoom.Key.LEFT:
+ if (canKeyPrev) showPrevPage();
+ break;
+ case kthoom.Key.RIGHT:
+ if (canKeyNext) showNextPage();
+ break;
+ case kthoom.Key.L:
+ kthoom.rotateTimes--;
+ if (kthoom.rotateTimes < 0) {
+ kthoom.rotateTimes = 3;
+ }
+ updatePage();
+ break;
+ case kthoom.Key.R:
+ kthoom.rotateTimes++;
+ if (kthoom.rotateTimes > 3) {
+ kthoom.rotateTimes = 0;
+ }
+ updatePage();
+ break;
+ case kthoom.Key.F:
+ if (!hflip && !vflip) {
+ hflip = true;
+ } else if (hflip === true) {
+ vflip = true;
+ hflip = false;
+ } else if (vflip === true) {
+ vflip = false;
+ }
+ updatePage();
+ break;
+ case kthoom.Key.W:
+ fitMode = kthoom.Key.W;
+ updateScale();
+ break;
+ case kthoom.Key.H:
+ fitMode = kthoom.Key.H;
+ updateScale();
+ break;
+ case kthoom.Key.B:
+ fitMode = kthoom.Key.B;
+ updateScale();
+ break;
+ case kthoom.Key.N:
+ fitMode = kthoom.Key.N;
+ updateScale();
+ break;
+ default:
+ //console.log('KeyCode = ' + code);
+ break;
+ }
+}
+
+function init(filename) {
+ if (!window.FileReader) {
+ alert("Sorry, kthoom will not work with your browser because it does not support the File API. Please try kthoom with Chrome 12+ or Firefox 7+");
+ } else {
+ var request = new XMLHttpRequest();
+ request.open("GET", filename);
+ request.responseType = "arraybuffer";
+ request.setRequestHeader("X-Test", "test1");
+ request.setRequestHeader("X-Test", "test2");
+ request.addEventListener("load", function(event) {
+ if (request.status >= 200 && request.status < 300) {
+ loadFromArrayBuffer(request.response);
+ } else {
+ console.warn(request.statusText, request.responseText);
+ }
+ });
+ request.send();
+ kthoom.initProgressMeter();
+ document.body.className += /AppleWebKit/.test(navigator.userAgent) ? " webkit" : "";
+ updateScale(true);
+ kthoom.loadSettings();
+ $(document).keydown(keyHandler);
+
+ $(window).resize(function() {
+ var f = (screen.width - innerWidth < 4 && screen.height - innerHeight < 4);
+ getElem("titlebar").className = f ? "main" : "";
+ updateScale();
+ });
+
+ $("#mainImage").click(function(evt) {
+ // Firefox does not support offsetX/Y so we have to manually calculate
+ // where the user clicked in the image.
+ var mainContentWidth = $("#mainContent").width();
+ var mainContentHeight = $("#mainContent").height();
+ var comicWidth = evt.target.clientWidth;
+ var comicHeight = evt.target.clientHeight;
+ var offsetX = (mainContentWidth - comicWidth) / 2;
+ var offsetY = (mainContentHeight - comicHeight) / 2;
+ var clickX = !!evt.offsetX ? evt.offsetX : (evt.clientX - offsetX);
+ var clickY = !!evt.offsetY ? evt.offsetY : (evt.clientY - offsetY);
+
+ // Determine if the user clicked/tapped the left side or the
+ // right side of the page.
+ var clickedPrev = false;
+ switch (kthoom.rotateTimes) {
+ case 0:
+ clickedPrev = clickX < (comicWidth / 2);
+ break;
+ case 1:
+ clickedPrev = clickY < (comicHeight / 2);
+ break;
+ case 2:
+ clickedPrev = clickX > (comicWidth / 2);
+ break;
+ case 3:
+ clickedPrev = clickY > (comicHeight / 2);
+ break;
+ }
+ if (clickedPrev) {
+ showPrevPage();
+ } else {
+ showNextPage();
+ }
+ });
+ }
+}
diff --git a/cps/static/js/libs/bootstrap-datepicker/locales/bootstrap-datepicker.it.min.js b/cps/static/js/libs/bootstrap-datepicker/locales/bootstrap-datepicker.it.min.js
new file mode 100644
index 00000000..cc30766f
--- /dev/null
+++ b/cps/static/js/libs/bootstrap-datepicker/locales/bootstrap-datepicker.it.min.js
@@ -0,0 +1 @@
+!function(a){a.fn.datepicker.dates.it={days:["Domenica","Lunedì","Martedì","Mercoledì","Giovedì","Venerdì","Sabato"],daysShort:["Dom","Lun","Mar","Mer","Gio","Ven","Sab"],daysMin:["Do","Lu","Ma","Me","Gi","Ve","Sa"],months:["Gennaio","Febbraio","Marzo","Aprile","Maggio","Giugno","Luglio","Agosto","Settembre","Ottobre","Novembre","Dicembre"],monthsShort:["Gen","Feb","Mar","Apr","Mag","Giu","Lug","Ago","Set","Ott","Nov","Dic"],today:"Oggi",monthsTitle:"Mesi",clear:"Cancella",weekStart:1,format:"dd/mm/yyyy"}}(jQuery);
\ No newline at end of file
diff --git a/cps/static/js/main.js b/cps/static/js/main.js
index 267c6d97..524abdb0 100644
--- a/cps/static/js/main.js
+++ b/cps/static/js/main.js
@@ -63,13 +63,13 @@ $(function() {
$(".load-more .row").infinitescroll({
debug: false,
navSelector : ".pagination",
- // selector for the paged navigation (it will be hidden)
+ // selector for the paged navigation (it will be hidden)
nextSelector : ".pagination a:last",
- // selector for the NEXT link (to page 2)
+ // selector for the NEXT link (to page 2)
itemSelector : ".load-more .book",
animate : true,
extraScrollPx: 300
- // selector for all items you'll retrieve
+ // selector for all items you'll retrieve
}, function(data) {
$(".load-more .row").isotope( "appended", $(data), null );
});
diff --git a/cps/static/js/reading/epub.js b/cps/static/js/reading/epub.js
new file mode 100644
index 00000000..169c207f
--- /dev/null
+++ b/cps/static/js/reading/epub.js
@@ -0,0 +1,43 @@
+/* global $, calibre, EPUBJS, ePubReader */
+
+(function() {
+ "use strict";
+
+ EPUBJS.filePath = calibre.filePath;
+ EPUBJS.cssPath = calibre.cssPath;
+
+ var reader = ePubReader(calibre.bookUrl, {
+ restore: true,
+ bookmarks: calibre.bookmark ? [calibre.bookmark] : []
+ });
+
+ if (calibre.useBookmarks) {
+ reader.on("reader:bookmarked", updateBookmark.bind(reader, "add"));
+ reader.on("reader:unbookmarked", updateBookmark.bind(reader, "remove"));
+ } else {
+ $("#bookmark, #show-Bookmarks").remove();
+ }
+
+ /**
+ * @param {string} action - Add or remove bookmark
+ * @param {string|int} location - Location or zero
+ */
+ function updateBookmark(action, location) {
+ // Remove other bookmarks (there can only be one)
+ if (action === "add") {
+ this.settings.bookmarks.filter(function (bookmark) {
+ return bookmark && bookmark !== location;
+ }).map(function (bookmark) {
+ this.removeBookmark(bookmark);
+ }.bind(this));
+ }
+
+ // Save to database
+ $.ajax(calibre.bookmarkUrl, {
+ method: "post",
+ data: { bookmark: location || "" }
+ }).fail(function (xhr, status, error) {
+ alert(error);
+ });
+ }
+})();
diff --git a/cps/static/js/unrar.js b/cps/static/js/unrar.js
new file mode 100644
index 00000000..f32fd6fa
--- /dev/null
+++ b/cps/static/js/unrar.js
@@ -0,0 +1,891 @@
+/**
+ * unrar.js
+ *
+ * Copyright(c) 2011 Google Inc.
+ * Copyright(c) 2011 antimatter15
+ *
+ * Reference Documentation:
+ *
+ * http://kthoom.googlecode.com/hg/docs/unrar.html
+ */
+/* global bitjs, importScripts */
+
+// This file expects to be invoked as a Worker (see onmessage below).
+importScripts("io.js");
+importScripts("archive.js");
+
+// Progress variables.
+var currentFilename = "";
+var currentFileNumber = 0;
+var currentBytesUnarchivedInFile = 0;
+var currentBytesUnarchived = 0;
+var totalUncompressedBytesInArchive = 0;
+var totalFilesInArchive = 0;
+
+// Helper functions.
+var info = function(str) {
+ postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
+};
+var err = function(str) {
+ postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
+};
+var postProgress = function() {
+ postMessage(new bitjs.archive.UnarchiveProgressEvent(
+ currentFilename,
+ currentFileNumber,
+ currentBytesUnarchivedInFile,
+ currentBytesUnarchived,
+ totalUncompressedBytesInArchive,
+ totalFilesInArchive));
+};
+
+// shows a byte value as its hex representation
+var nibble = "0123456789ABCDEF";
+var byteValueToHexString = function(num) {
+ return nibble[num>>4] + nibble[num & 0xF];
+};
+var twoByteValueToHexString = function(num) {
+ return nibble[(num>>12) & 0xF] + nibble[(num>>8) & 0xF] + nibble[(num>>4) & 0xF] + nibble[num & 0xF];
+};
+
+
+// Volume Types
+// MARK_HEAD = 0x72;
+var MAIN_HEAD = 0x73,
+ FILE_HEAD = 0x74,
+ // COMM_HEAD = 0x75,
+ // AV_HEAD = 0x76,
+ // SUB_HEAD = 0x77,
+ // PROTECT_HEAD = 0x78,
+ // SIGN_HEAD = 0x79,
+ // NEWSUB_HEAD = 0x7a,
+ ENDARC_HEAD = 0x7b;
+
+// bstream is a bit stream
+var RarVolumeHeader = function(bstream) {
+
+ var headPos = bstream.bytePtr;
+ // byte 1,2
+ info("Rar Volume Header @" + bstream.bytePtr);
+
+ this.crc = bstream.readBits(16);
+ info(" crc=" + this.crc);
+
+ // byte 3
+ this.headType = bstream.readBits(8);
+ info(" headType=" + this.headType);
+
+ // Get flags
+ // bytes 4,5
+ this.flags = {};
+ this.flags.value = bstream.peekBits(16);
+
+ info(" flags=" + twoByteValueToHexString(this.flags.value));
+ switch (this.headType) {
+ case MAIN_HEAD:
+ this.flags.MHD_VOLUME = !!bstream.readBits(1);
+ this.flags.MHD_COMMENT = !!bstream.readBits(1);
+ this.flags.MHD_LOCK = !!bstream.readBits(1);
+ this.flags.MHD_SOLID = !!bstream.readBits(1);
+ this.flags.MHD_PACK_COMMENT = !!bstream.readBits(1);
+ this.flags.MHD_NEWNUMBERING = this.flags.MHD_PACK_COMMENT;
+ this.flags.MHD_AV = !!bstream.readBits(1);
+ this.flags.MHD_PROTECT = !!bstream.readBits(1);
+ this.flags.MHD_PASSWORD = !!bstream.readBits(1);
+ this.flags.MHD_FIRSTVOLUME = !!bstream.readBits(1);
+ this.flags.MHD_ENCRYPTVER = !!bstream.readBits(1);
+ bstream.readBits(6); // unused
+ break;
+ case FILE_HEAD:
+ this.flags.LHD_SPLIT_BEFORE = !!bstream.readBits(1); // 0x0001
+ this.flags.LHD_SPLIT_AFTER = !!bstream.readBits(1); // 0x0002
+ this.flags.LHD_PASSWORD = !!bstream.readBits(1); // 0x0004
+ this.flags.LHD_COMMENT = !!bstream.readBits(1); // 0x0008
+ this.flags.LHD_SOLID = !!bstream.readBits(1); // 0x0010
+ bstream.readBits(3); // unused
+ this.flags.LHD_LARGE = !!bstream.readBits(1); // 0x0100
+ this.flags.LHD_UNICODE = !!bstream.readBits(1); // 0x0200
+ this.flags.LHD_SALT = !!bstream.readBits(1); // 0x0400
+ this.flags.LHD_VERSION = !!bstream.readBits(1); // 0x0800
+ this.flags.LHD_EXTTIME = !!bstream.readBits(1); // 0x1000
+ this.flags.LHD_EXTFLAGS = !!bstream.readBits(1); // 0x2000
+ bstream.readBits(2); // unused
+ info(" LHD_SPLIT_BEFORE = " + this.flags.LHD_SPLIT_BEFORE);
+ break;
+ default:
+ bstream.readBits(16);
+ }
+
+ // byte 6,7
+ this.headSize = bstream.readBits(16);
+ info(" headSize=" + this.headSize);
+ switch (this.headType) {
+ case MAIN_HEAD:
+ this.highPosAv = bstream.readBits(16);
+ this.posAv = bstream.readBits(32);
+ if (this.flags.MHD_ENCRYPTVER) {
+ this.encryptVer = bstream.readBits(8);
+ }
+ info("Found MAIN_HEAD with highPosAv=" + this.highPosAv + ", posAv=" + this.posAv);
+ break;
+ case FILE_HEAD:
+ this.packSize = bstream.readBits(32);
+ this.unpackedSize = bstream.readBits(32);
+ this.hostOS = bstream.readBits(8);
+ this.fileCRC = bstream.readBits(32);
+ this.fileTime = bstream.readBits(32);
+ this.unpVer = bstream.readBits(8);
+ this.method = bstream.readBits(8);
+ this.nameSize = bstream.readBits(16);
+ this.fileAttr = bstream.readBits(32);
+
+ if (this.flags.LHD_LARGE) {
+ info("Warning: Reading in LHD_LARGE 64-bit size values");
+ this.HighPackSize = bstream.readBits(32);
+ this.HighUnpSize = bstream.readBits(32);
+ } else {
+ this.HighPackSize = 0;
+ this.HighUnpSize = 0;
+ if (this.unpackedSize == 0xffffffff) {
+ this.HighUnpSize = 0x7fffffff;
+ this.unpackedSize = 0xffffffff;
+ }
+ }
+ this.fullPackSize = 0;
+ this.fullUnpackSize = 0;
+ this.fullPackSize |= this.HighPackSize;
+ this.fullPackSize <<= 32;
+ this.fullPackSize |= this.packSize;
+
+ // read in filename
+
+ this.filename = bstream.readBytes(this.nameSize);
+ for (var _i = 0, _s = ""; _i < this.filename.length ; _i++) {
+ _s += String.fromCharCode(this.filename[_i]);
+ }
+
+ this.filename = _s;
+
+ if (this.flags.LHD_SALT) {
+ info("Warning: Reading in 64-bit salt value");
+ this.salt = bstream.readBits(64); // 8 bytes
+ }
+
+ if (this.flags.LHD_EXTTIME) {
+ // 16-bit flags
+ var extTimeFlags = bstream.readBits(16);
+
+ // this is adapted straight out of arcread.cpp, Archive::ReadHeader()
+ for (var I = 0; I < 4; ++I) {
+ var rmode = extTimeFlags >> ((3 - I) * 4);
+ if ((rmode & 8)==0)
+ continue;
+ if (I!=0) {
+ bstream.readBits(16);
+ }
+ var count = (rmode & 3);
+ for (var J = 0; J < count; ++J) {
+ bstream.readBits(8);
+ }
+ }
+ }
+
+ if (this.flags.LHD_COMMENT) {
+ info("Found a LHD_COMMENT");
+ }
+
+
+ while (headPos + this.headSize > bstream.bytePtr) bstream.readBits(1);
+
+ info("Found FILE_HEAD with packSize=" + this.packSize + ", unpackedSize= " + this.unpackedSize + ", hostOS=" + this.hostOS + ", unpVer=" + this.unpVer + ", method=" + this.method + ", filename=" + this.filename);
+
+ break;
+ default:
+ info("Found a header of type 0x" + byteValueToHexString(this.headType));
+ // skip the rest of the header bytes (for now)
+ bstream.readBytes( this.headSize - 7 );
+ break;
+ }
+};
+
+var BLOCK_LZ = 0;
+ // BLOCK_PPM = 1;
+
+var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
+ rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5],
+ rDBitLengthCounts = [4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 0, 12],
+ rSDDecode = [0, 4, 8, 16, 32, 64, 128, 192],
+ rSDBits = [2,2,3, 4, 5, 6, 6, 6];
+
+var rDDecode = [0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32,
+ 48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072,
+ 4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152, 65536, 98304,
+ 131072, 196608, 262144, 327680, 393216, 458752, 524288, 589824,
+ 655360, 720896, 786432, 851968, 917504, 983040];
+
+var rDBits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
+ 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
+ 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16];
+
+var rLOW_DIST_REP_COUNT = 16;
+
+var rNC = 299,
+ rDC = 60,
+ rLDC = 17,
+ rRC = 28,
+ rBC = 20,
+ rHUFF_TABLE_SIZE = (rNC + rDC + rRC + rLDC);
+
+var UnpBlockType = BLOCK_LZ;
+var UnpOldTable = new Array(rHUFF_TABLE_SIZE);
+
+var BD = { //bitdecode
+ DecodeLen: new Array(16),
+ DecodePos: new Array(16),
+ DecodeNum: new Array(rBC)
+};
+var LD = { //litdecode
+ DecodeLen: new Array(16),
+ DecodePos: new Array(16),
+ DecodeNum: new Array(rNC)
+};
+var DD = { //distdecode
+ DecodeLen: new Array(16),
+ DecodePos: new Array(16),
+ DecodeNum: new Array(rDC)
+};
+var LDD = { //low dist decode
+ DecodeLen: new Array(16),
+ DecodePos: new Array(16),
+ DecodeNum: new Array(rLDC)
+};
+var RD = { //rep decode
+ DecodeLen: new Array(16),
+ DecodePos: new Array(16),
+ DecodeNum: new Array(rRC)
+};
+
+var rBuffer;
+
+// read in Huffman tables for RAR
+function RarReadTables(bstream) {
+ var BitLength = new Array(rBC),
+ Table = new Array(rHUFF_TABLE_SIZE);
+
+ // before we start anything we need to get byte-aligned
+ bstream.readBits( (8 - bstream.bitPtr) & 0x7 );
+
+ if (bstream.readBits(1)) {
+ info("Error! PPM not implemented yet");
+ return;
+ }
+
+ if (!bstream.readBits(1)) { //discard old table
+ for (var i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
+ }
+
+ // read in bit lengths
+ for (var I = 0; I < rBC; ++I) {
+
+ var Length = bstream.readBits(4);
+ if (Length == 15) {
+ var ZeroCount = bstream.readBits(4);
+ if (ZeroCount == 0) {
+ BitLength[I] = 15;
+ }
+ else {
+ ZeroCount += 2;
+ while (ZeroCount-- > 0 && I < rBC)
+ BitLength[I++] = 0;
+ --I;
+ }
+ }
+ else {
+ BitLength[I] = Length;
+ }
+ }
+
+ // now all 20 bit lengths are obtained, we construct the Huffman Table:
+
+ RarMakeDecodeTables(BitLength, 0, BD, rBC);
+
+ var TableSize = rHUFF_TABLE_SIZE;
+ //console.log(DecodeLen, DecodePos, DecodeNum);
+ for (var i = 0; i < TableSize;) {
+ var num = RarDecodeNumber(bstream, BD);
+ if (num < 16) {
+ Table[i] = (num + UnpOldTable[i]) & 0xf;
+ i++;
+ } else if(num < 18) {
+ var N = (num == 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
+
+ while (N-- > 0 && i < TableSize) {
+ Table[i] = Table[i - 1];
+ i++;
+ }
+ } else {
+ var N = (num == 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
+
+ while (N-- > 0 && i < TableSize) {
+ Table[i++] = 0;
+ }
+ }
+ }
+
+ RarMakeDecodeTables(Table, 0, LD, rNC);
+ RarMakeDecodeTables(Table, rNC, DD, rDC);
+ RarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
+ RarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
+
+ for (var i = UnpOldTable.length; i--;) {
+ UnpOldTable[i] = Table[i];
+ }
+ return true;
+}
+
+
+function RarDecodeNumber(bstream, dec) {
+ var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
+ var bitField = bstream.getBits() & 0xfffe;
+ //some sort of rolled out binary search
+ var bits = ((bitField < DecodeLen[8])?
+ ((bitField < DecodeLen[4])?
+ ((bitField < DecodeLen[2])?
+ ((bitField < DecodeLen[1])?1:2)
+ :((bitField < DecodeLen[3])?3:4))
+ :(bitField < DecodeLen[6])?
+ ((bitField < DecodeLen[5])?5:6)
+ :((bitField < DecodeLen[7])?7:8))
+ :((bitField < DecodeLen[12])?
+ ((bitField < DecodeLen[10])?
+ ((bitField < DecodeLen[9])?9:10)
+ :((bitField < DecodeLen[11])?11:12))
+ :(bitField < DecodeLen[14])?
+ ((bitField < DecodeLen[13])?13:14)
+ :15));
+ bstream.readBits(bits);
+ var N = DecodePos[bits] + ((bitField - DecodeLen[bits -1]) >>> (16 - bits));
+
+ return DecodeNum[N];
+}
+
+
+function RarMakeDecodeTables(BitLength, offset, dec, size) {
+ var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
+ var LenCount = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
+ TmpPos = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
+ N = 0, M = 0;
+ for (var i = DecodeNum.length; i--;) DecodeNum[i] = 0;
+ for (var i = 0; i < size; i++) {
+ LenCount[BitLength[i + offset] & 0xF]++;
+ }
+ LenCount[0] = 0;
+ TmpPos[0] = 0;
+ DecodePos[0] = 0;
+ DecodeLen[0] = 0;
+
+ for (var I = 1; I < 16; ++I) {
+ N = 2 * (N+LenCount[I]);
+ M = (N << (15-I));
+ if (M > 0xFFFF)
+ M = 0xFFFF;
+ DecodeLen[I] = M;
+ DecodePos[I] = DecodePos[I-1] + LenCount[I-1];
+ TmpPos[I] = DecodePos[I];
+ }
+ for (I = 0; I < size; ++I)
+ if (BitLength[I + offset] != 0)
+ DecodeNum[ TmpPos[ BitLength[offset + I] & 0xF ]++] = I;
+}
+
+// TODO: implement
+function Unpack15(bstream, Solid) {
+ info("ERROR! RAR 1.5 compression not supported");
+}
+
+function Unpack20(bstream, Solid) {
+ var destUnpSize = rBuffer.data.length;
+ var oldDistPtr = 0;
+
+ RarReadTables20(bstream);
+ while (destUnpSize > rBuffer.ptr) {
+ var num = RarDecodeNumber(bstream, LD);
+ if (num < 256) {
+ rBuffer.insertByte(num);
+ continue;
+ }
+ if (num > 269) {
+ var Length = rLDecode[num -= 270] + 3;
+ if ((Bits = rLBits[num]) > 0) {
+ Length += bstream.readBits(Bits);
+ }
+ var DistNumber = RarDecodeNumber(bstream, DD);
+ var Distance = rDDecode[DistNumber] + 1;
+ if ((Bits = rDBits[DistNumber]) > 0) {
+ Distance += bstream.readBits(Bits);
+ }
+ if (Distance >= 0x2000) {
+ Length++;
+ if(Distance >= 0x40000) Length++;
+ }
+ lastLength = Length;
+ lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
+ RarCopyString(Length, Distance);
+ continue;
+ }
+ if (num == 269) {
+ RarReadTables20(bstream);
+
+ RarUpdateProgress()
+
+ continue;
+ }
+ if (num == 256) {
+ lastDist = rOldDist[oldDistPtr++ & 3] = lastDist;
+ RarCopyString(lastLength, lastDist);
+ continue;
+ }
+ if (num < 261) {
+ var Distance = rOldDist[(oldDistPtr - (num - 256)) & 3];
+ var LengthNumber = RarDecodeNumber(bstream, RD);
+ var Length = rLDecode[LengthNumber] +2;
+ if ((Bits = rLBits[LengthNumber]) > 0) {
+ Length += bstream.readBits(Bits);
+ }
+ if (Distance >= 0x101) {
+ Length++;
+ if (Distance >= 0x2000) {
+ Length++
+ if (Distance >= 0x40000) Length++;
+ }
+ }
+ lastLength = Length;
+ lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
+ RarCopyString(Length, Distance);
+ continue;
+ }
+ if (num < 270) {
+ var Distance = rSDDecode[num -= 261] + 1;
+ if ((Bits = rSDBits[num]) > 0) {
+ Distance += bstream.readBits(Bits);
+ }
+ lastLength = 2;
+ lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
+ RarCopyString(2, Distance);
+ continue;
+ }
+ }
+ RarUpdateProgress()
+}
+
+function RarUpdateProgress() {
+ var change = rBuffer.ptr - currentBytesUnarchivedInFile;
+ currentBytesUnarchivedInFile = rBuffer.ptr;
+ currentBytesUnarchived += change;
+ postProgress();
+}
+
+
+var rNC20 = 298,
+ rDC20 = 48,
+ rRC20 = 28,
+ rBC20 = 19,
+ rMC20 = 257;
+
+var UnpOldTable20 = new Array(rMC20 * 4);
+
+function RarReadTables20(bstream) {
+ var BitLength = new Array(rBC20);
+ var Table = new Array(rMC20 * 4);
+ var TableSize, N, I;
+ var AudioBlock = bstream.readBits(1);
+ if (!bstream.readBits(1))
+ for (var i = UnpOldTable20.length; i--;) UnpOldTable20[i] = 0;
+ TableSize = rNC20 + rDC20 + rRC20;
+ for (var I = 0; I < rBC20; I++)
+ BitLength[I] = bstream.readBits(4);
+ RarMakeDecodeTables(BitLength, 0, BD, rBC20);
+ I = 0;
+ while (I < TableSize) {
+ var num = RarDecodeNumber(bstream, BD);
+ if (num < 16) {
+ Table[I] = num + UnpOldTable20[I] & 0xf;
+ I++;
+ } else if(num == 16) {
+ N = bstream.readBits(2) + 3;
+ while (N-- > 0 && I < TableSize) {
+ Table[I] = Table[I - 1];
+ I++;
+ }
+ } else {
+ if (num == 17) {
+ N = bstream.readBits(3) + 3;
+ } else {
+ N = bstream.readBits(7) + 11;
+ }
+ while (N-- > 0 && I < TableSize) {
+ Table[I++] = 0;
+ }
+ }
+ }
+ RarMakeDecodeTables(Table, 0, LD, rNC20);
+ RarMakeDecodeTables(Table, rNC20, DD, rDC20);
+ RarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20);
+ for (var i = UnpOldTable20.length; i--;) UnpOldTable20[i] = Table[i];
+}
+
+var lowDistRepCount = 0, prevLowDist = 0;
+
+var rOldDist = [0,0,0,0];
+var lastDist;
+var lastLength;
+
+
+function Unpack29(bstream, Solid) {
+ // lazy initialize rDDecode and rDBits
+
+ var DDecode = new Array(rDC);
+ var DBits = new Array(rDC);
+
+ var Dist=0,BitLength=0,Slot=0;
+
+ for (var I = 0; I < rDBitLengthCounts.length; I++,BitLength++) {
+ for (var J = 0; J < rDBitLengthCounts[I]; J++,Slot++,Dist+=(1<= 271) {
+ var Length = rLDecode[num -= 271] + 3;
+ if ((Bits = rLBits[num]) > 0) {
+ Length += bstream.readBits(Bits);
+ }
+ var DistNumber = RarDecodeNumber(bstream, DD);
+ var Distance = DDecode[DistNumber]+1;
+ if ((Bits = DBits[DistNumber]) > 0) {
+ if (DistNumber > 9) {
+ if (Bits > 4) {
+ Distance += ((bstream.getBits() >>> (20 - Bits)) << 4);
+ bstream.readBits(Bits - 4);
+ //todo: check this
+ }
+ if (lowDistRepCount > 0) {
+ lowDistRepCount--;
+ Distance += prevLowDist;
+ } else {
+ var LowDist = RarDecodeNumber(bstream, LDD);
+ if (LowDist == 16) {
+ lowDistRepCount = rLOW_DIST_REP_COUNT - 1;
+ Distance += prevLowDist;
+ } else {
+ Distance += LowDist;
+ prevLowDist = LowDist;
+ }
+ }
+ } else {
+ Distance += bstream.readBits(Bits);
+ }
+ }
+ if (Distance >= 0x2000) {
+ Length++;
+ if (Distance >= 0x40000) {
+ Length++;
+ }
+ }
+ RarInsertOldDist(Distance);
+ RarInsertLastMatch(Length, Distance);
+ RarCopyString(Length, Distance);
+ continue;
+ }
+ if (num == 256) {
+ if (!RarReadEndOfBlock(bstream)) break;
+ continue;
+ }
+ if (num == 257) {
+ //console.log("READVMCODE");
+ if (!RarReadVMCode(bstream)) break;
+ continue;
+ }
+ if (num == 258) {
+ if (lastLength != 0) {
+ RarCopyString(lastLength, lastDist);
+ }
+ continue;
+ }
+ if (num < 263) {
+ var DistNum = num - 259;
+ var Distance = rOldDist[DistNum];
+
+ for (var I = DistNum; I > 0; I--) {
+ rOldDist[I] = rOldDist[I-1];
+ }
+ rOldDist[0] = Distance;
+
+ var LengthNumber = RarDecodeNumber(bstream, RD);
+ var Length = rLDecode[LengthNumber] + 2;
+ if ((Bits = rLBits[LengthNumber]) > 0) {
+ Length += bstream.readBits(Bits);
+ }
+ RarInsertLastMatch(Length, Distance);
+ RarCopyString(Length, Distance);
+ continue;
+ }
+ if (num < 272) {
+ var Distance = rSDDecode[num -= 263] + 1;
+ if ((Bits = rSDBits[num]) > 0) {
+ Distance += bstream.readBits(Bits);
+ }
+ RarInsertOldDist(Distance);
+ RarInsertLastMatch(2, Distance);
+ RarCopyString(2, Distance);
+ continue;
+ }
+ }
+ RarUpdateProgress()
+}
+
+function RarReadEndOfBlock(bstream) {
+
+ RarUpdateProgress()
+
+ var NewTable = false, NewFile = false;
+ if (bstream.readBits(1)) {
+ NewTable = true;
+ } else {
+ NewFile = true;
+ NewTable = !!bstream.readBits(1);
+ }
+ //tablesRead = !NewTable;
+ return !(NewFile || NewTable && !RarReadTables(bstream));
+}
+
+
+function RarReadVMCode(bstream) {
+ var FirstByte = bstream.readBits(8);
+ var Length = (FirstByte & 7) + 1;
+ if (Length == 7) {
+ Length = bstream.readBits(8) + 7;
+ } else if(Length == 8) {
+ Length = bstream.readBits(16);
+ }
+ var vmCode = [];
+ for(var I = 0; I < Length; I++) {
+ //do something here with cheking readbuf
+ vmCode.push(bstream.readBits(8));
+ }
+ return RarAddVMCode(FirstByte, vmCode, Length);
+}
+
+function RarAddVMCode(firstByte, vmCode, length) {
+ //console.log(vmCode);
+ if (vmCode.length > 0) {
+ info("Error! RarVM not supported yet!");
+ }
+ return true;
+}
+
+function RarInsertLastMatch(length, distance) {
+ lastDist = distance;
+ lastLength = length;
+}
+
+function RarInsertOldDist(distance) {
+ rOldDist.splice(3,1);
+ rOldDist.splice(0,0,distance);
+}
+
+//this is the real function, the other one is for debugging
+function RarCopyString(length, distance) {
+ var destPtr = rBuffer.ptr - distance;
+ if(destPtr < 0){
+ var l = rOldBuffers.length;
+ while(destPtr < 0){
+ destPtr = rOldBuffers[--l].data.length + destPtr;
+ }
+ //TODO: lets hope that it never needs to read beyond file boundaries
+ while(length--) rBuffer.insertByte(rOldBuffers[l].data[destPtr++]);
+ }
+ if (length > distance) {
+ while(length--) rBuffer.insertByte(rBuffer.data[destPtr++]);
+ } else {
+ rBuffer.insertBytes(rBuffer.data.subarray(destPtr, destPtr + length));
+ }
+}
+
+var rOldBuffers = []
+// v must be a valid RarVolume
+function unpack(v) {
+
+ // TODO: implement what happens when unpVer is < 15
+ var Ver = v.header.unpVer <= 15 ? 15 : v.header.unpVer,
+ Solid = v.header.LHD_SOLID,
+ bstream = new bitjs.io.BitStream(v.fileData.buffer, true /* rtl */, v.fileData.byteOffset, v.fileData.byteLength );
+
+ rBuffer = new bitjs.io.ByteBuffer(v.header.unpackedSize);
+
+ info("Unpacking " + v.filename+" RAR v" + Ver);
+
+ switch(Ver) {
+ case 15: // rar 1.5 compression
+ Unpack15(bstream, Solid);
+ break;
+ case 20: // rar 2.x compression
+ case 26: // files larger than 2GB
+ Unpack20(bstream, Solid);
+ break;
+ case 29: // rar 3.x compression
+ case 36: // alternative hash
+ Unpack29(bstream, Solid);
+ break;
+ } // switch(method)
+
+ rOldBuffers.push(rBuffer);
+ //TODO: clear these old buffers when there's over 4MB of history
+ return rBuffer.data;
+}
+
+// bstream is a bit stream
+var RarLocalFile = function(bstream) {
+
+ this.header = new RarVolumeHeader(bstream);
+ this.filename = this.header.filename;
+
+ if (this.header.headType != FILE_HEAD && this.header.headType != ENDARC_HEAD) {
+ this.isValid = false;
+ info("Error! RAR Volume did not include a FILE_HEAD header ");
+ }
+ else {
+ // read in the compressed data
+ this.fileData = null;
+ if (this.header.packSize > 0) {
+ this.fileData = bstream.readBytes(this.header.packSize);
+ this.isValid = true;
+ }
+ }
+};
+
+RarLocalFile.prototype.unrar = function() {
+
+ if (!this.header.flags.LHD_SPLIT_BEFORE) {
+ // unstore file
+ if (this.header.method == 0x30) {
+ info("Unstore "+this.filename);
+ this.isValid = true;
+
+ currentBytesUnarchivedInFile += this.fileData.length;
+ currentBytesUnarchived += this.fileData.length;
+
+ // Create a new buffer and copy it over.
+ var len = this.header.packSize;
+ var newBuffer = new bitjs.io.ByteBuffer(len);
+ newBuffer.insertBytes(this.fileData);
+ this.fileData = newBuffer.data;
+ } else {
+ this.isValid = true;
+ this.fileData = unpack(this);
+ }
+ }
+}
+
+var unrar = function(arrayBuffer) {
+ currentFilename = "";
+ currentFileNumber = 0;
+ currentBytesUnarchivedInFile = 0;
+ currentBytesUnarchived = 0;
+ totalUncompressedBytesInArchive = 0;
+ totalFilesInArchive = 0;
+
+ postMessage(new bitjs.archive.UnarchiveStartEvent());
+ var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */);
+
+ var header = new RarVolumeHeader(bstream);
+ if (header.crc == 0x6152 &&
+ header.headType == 0x72 &&
+ header.flags.value == 0x1A21 &&
+ header.headSize == 7)
+ {
+ info("Found RAR signature");
+
+ var mhead = new RarVolumeHeader(bstream);
+ if (mhead.headType != MAIN_HEAD) {
+ info("Error! RAR did not include a MAIN_HEAD header");
+ } else {
+ var localFiles = [],
+ localFile = null;
+ do {
+ try {
+ localFile = new RarLocalFile(bstream);
+ info("RAR localFile isValid=" + localFile.isValid + ", volume packSize=" + localFile.header.packSize);
+ if (localFile && localFile.isValid && localFile.header.packSize > 0) {
+ totalUncompressedBytesInArchive += localFile.header.unpackedSize;
+ localFiles.push(localFile);
+ } else if (localFile.header.packSize == 0 && localFile.header.unpackedSize == 0) {
+ localFile.isValid = true;
+ }
+ } catch(err) {
+ break;
+ }
+ //info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length);
+ } while( localFile.isValid );
+ totalFilesInArchive = localFiles.length;
+
+ // now we have all information but things are unpacked
+ // TODO: unpack
+ localFiles = localFiles.sort(function(a,b) {
+ var aname = a.filename.toLowerCase();
+ var bname = b.filename.toLowerCase();
+ return aname > bname ? 1 : -1;
+ });
+
+ info(localFiles.map(function(a){return a.filename}).join(', '));
+ for (var i = 0; i < localFiles.length; ++i) {
+ var localfile = localFiles[i];
+
+ // update progress
+ currentFilename = localfile.header.filename;
+ currentBytesUnarchivedInFile = 0;
+
+ // actually do the unzipping
+ localfile.unrar();
+
+ if (localfile.isValid) {
+ postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
+ postProgress();
+ }
+ }
+
+ postProgress();
+ }
+ }
+ else {
+ err("Invalid RAR file");
+ }
+ postMessage(new bitjs.archive.UnarchiveFinishEvent());
+};
+
+// event.data.file has the ArrayBuffer.
+onmessage = function(event) {
+ var ab = event.data.file;
+ unrar(ab, true);
+};
diff --git a/cps/static/js/untar.js b/cps/static/js/untar.js
new file mode 100644
index 00000000..defed7d7
--- /dev/null
+++ b/cps/static/js/untar.js
@@ -0,0 +1,168 @@
+/**
+ * untar.js
+ *
+ * Copyright(c) 2011 Google Inc.
+ *
+ * Reference Documentation:
+ *
+ * TAR format: http://www.gnu.org/software/automake/manual/tar/Standard.html
+ */
+
+// This file expects to be invoked as a Worker (see onmessage below).
+importScripts('io.js');
+importScripts('archive.js');
+
+// Progress variables.
+var currentFilename = "";
+var currentFileNumber = 0;
+var currentBytesUnarchivedInFile = 0;
+var currentBytesUnarchived = 0;
+var totalUncompressedBytesInArchive = 0;
+var totalFilesInArchive = 0;
+
+// Helper functions.
+var info = function(str) {
+ postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
+};
+var err = function(str) {
+ postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
+};
+var postProgress = function() {
+ postMessage(new bitjs.archive.UnarchiveProgressEvent(
+ currentFilename,
+ currentFileNumber,
+ currentBytesUnarchivedInFile,
+ currentBytesUnarchived,
+ totalUncompressedBytesInArchive,
+ totalFilesInArchive));
+};
+
+// Removes all characters from the first zero-byte in the string onwards.
+var readCleanString = function(bstr, numBytes) {
+ var str = bstr.readString(numBytes);
+ var zIndex = str.indexOf(String.fromCharCode(0));
+ return zIndex != -1 ? str.substr(0, zIndex) : str;
+};
+
+// takes a ByteStream and parses out the local file information
+var TarLocalFile = function(bstream) {
+ this.isValid = false;
+
+ // Read in the header block
+ this.name = readCleanString(bstream, 100);
+ this.mode = readCleanString(bstream, 8);
+ this.uid = readCleanString(bstream, 8);
+ this.gid = readCleanString(bstream, 8);
+ this.size = parseInt(readCleanString(bstream, 12), 8);
+ this.mtime = readCleanString(bstream, 12);
+ this.chksum = readCleanString(bstream, 8);
+ this.typeflag = readCleanString(bstream, 1);
+ this.linkname = readCleanString(bstream, 100);
+ this.maybeMagic = readCleanString(bstream, 6);
+
+ if (this.maybeMagic == "ustar") {
+ this.version = readCleanString(bstream, 2);
+ this.uname = readCleanString(bstream, 32);
+ this.gname = readCleanString(bstream, 32);
+ this.devmajor = readCleanString(bstream, 8);
+ this.devminor = readCleanString(bstream, 8);
+ this.prefix = readCleanString(bstream, 155);
+
+ if (this.prefix.length) {
+ this.name = this.prefix + this.name;
+ }
+ bstream.readBytes(12); // 512 - 500
+ } else {
+ bstream.readBytes(255); // 512 - 257
+ }
+
+ // Done header, now rest of blocks are the file contents.
+ this.filename = this.name;
+ this.fileData = null;
+
+ info("Untarring file '" + this.filename + "'");
+ info(" size = " + this.size);
+ info(" typeflag = " + this.typeflag);
+
+ // A regular file.
+ if (this.typeflag == 0) {
+ info(" This is a regular file.");
+ var sizeInBytes = parseInt(this.size);
+ this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.size);
+ if (this.name.length > 0 && this.size > 0 && this.fileData && this.fileData.buffer) {
+ this.isValid = true;
+ }
+
+ bstream.readBytes(this.size);
+
+ // Round up to 512-byte blocks.
+ var remaining = 512 - this.size % 512;
+ if (remaining > 0 && remaining < 512) {
+ bstream.readBytes(remaining);
+ }
+ } else if (this.typeflag == 5) {
+ info(" This is a directory.")
+ }
+};
+
+// Takes an ArrayBuffer of a tar file in
+// returns null on error
+// returns an array of DecompressedFile objects on success
+var untar = function(arrayBuffer) {
+ currentFilename = "";
+ currentFileNumber = 0;
+ currentBytesUnarchivedInFile = 0;
+ currentBytesUnarchived = 0;
+ totalUncompressedBytesInArchive = 0;
+ totalFilesInArchive = 0;
+
+ postMessage(new bitjs.archive.UnarchiveStartEvent());
+ var bstream = new bitjs.io.ByteStream(arrayBuffer);
+ var localFiles = [];
+
+ // While we don't encounter an empty block, keep making TarLocalFiles.
+ while (bstream.peekNumber(4) != 0) {
+ var oneLocalFile = new TarLocalFile(bstream);
+ if (oneLocalFile && oneLocalFile.isValid) {
+ localFiles.push(oneLocalFile);
+ totalUncompressedBytesInArchive += oneLocalFile.size;
+ }
+ }
+ totalFilesInArchive = localFiles.length;
+
+ // got all local files, now sort them
+ localFiles.sort(function(a,b) {
+ var aname = a.filename.toLowerCase();
+ var bname = b.filename.toLowerCase();
+ return aname > bname ? 1 : -1;
+ });
+
+ // report # files and total length
+ if (localFiles.length > 0) {
+ postProgress();
+ }
+
+ // now do the shipping of each file
+ for (var i = 0; i < localFiles.length; ++i) {
+ var localfile = localFiles[i];
+ info("Sending file '" + localfile.filename + "' up");
+
+ // update progress
+ currentFilename = localfile.filename;
+ currentFileNumber = i;
+ currentBytesUnarchivedInFile = localfile.size;
+ currentBytesUnarchived += localfile.size;
+ postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
+ postProgress();
+ }
+
+ postProgress();
+
+ postMessage(new bitjs.archive.UnarchiveFinishEvent());
+};
+
+// event.data.file has the ArrayBuffer.
+onmessage = function(event) {
+ var ab = event.data.file;
+ untar(ab);
+};
diff --git a/cps/static/js/unzip.js b/cps/static/js/unzip.js
new file mode 100644
index 00000000..18b76443
--- /dev/null
+++ b/cps/static/js/unzip.js
@@ -0,0 +1,621 @@
+/**
+ * unzip.js
+ *
+ * Copyright(c) 2011 Google Inc.
+ * Copyright(c) 2011 antimatter15
+ *
+ * Reference Documentation:
+ *
+ * ZIP format: http://www.pkware.com/documents/casestudies/APPNOTE.TXT
+ * DEFLATE format: http://tools.ietf.org/html/rfc1951
+ */
+/* global bitjs, importScripts, Uint8Array */
+
+// This file expects to be invoked as a Worker (see onmessage below).
+importScripts("io.js");
+importScripts("archive.js");
+
+// Progress variables.
+var currentFilename = "";
+var currentFileNumber = 0;
+var currentBytesUnarchivedInFile = 0;
+var currentBytesUnarchived = 0;
+var totalUncompressedBytesInArchive = 0;
+var totalFilesInArchive = 0;
+
+// Helper functions.
+var info = function(str) {
+ postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
+};
+var err = function(str) {
+ postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
+};
+var postProgress = function() {
+ postMessage(new bitjs.archive.UnarchiveProgressEvent(
+ currentFilename,
+ currentFileNumber,
+ currentBytesUnarchivedInFile,
+ currentBytesUnarchived,
+ totalUncompressedBytesInArchive,
+ totalFilesInArchive));
+};
+
+var zLocalFileHeaderSignature = 0x04034b50;
+var zArchiveExtraDataSignature = 0x08064b50;
+var zCentralFileHeaderSignature = 0x02014b50;
+var zDigitalSignatureSignature = 0x05054b50;
+//var zEndOfCentralDirSignature = 0x06064b50;
+//var zEndOfCentralDirLocatorSignature = 0x07064b50;
+
+// takes a ByteStream and parses out the local file information
+var ZipLocalFile = function(bstream) {
+ if (typeof bstream !== typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function() {} ) {
+ return null;
+ }
+
+ bstream.readNumber(4); // swallow signature
+ this.version = bstream.readNumber(2);
+ this.generalPurpose = bstream.readNumber(2);
+ this.compressionMethod = bstream.readNumber(2);
+ this.lastModFileTime = bstream.readNumber(2);
+ this.lastModFileDate = bstream.readNumber(2);
+ this.crc32 = bstream.readNumber(4);
+ this.compressedSize = bstream.readNumber(4);
+ this.uncompressedSize = bstream.readNumber(4);
+ this.fileNameLength = bstream.readNumber(2);
+ this.extraFieldLength = bstream.readNumber(2);
+
+ this.filename = null;
+ if (this.fileNameLength > 0) {
+ this.filename = bstream.readString(this.fileNameLength);
+ }
+
+ info("Zip Local File Header:");
+ info(" version=" + this.version);
+ info(" general purpose=" + this.generalPurpose);
+ info(" compression method=" + this.compressionMethod);
+ info(" last mod file time=" + this.lastModFileTime);
+ info(" last mod file date=" + this.lastModFileDate);
+ info(" crc32=" + this.crc32);
+ info(" compressed size=" + this.compressedSize);
+ info(" uncompressed size=" + this.uncompressedSize);
+ info(" file name length=" + this.fileNameLength);
+ info(" extra field length=" + this.extraFieldLength);
+ info(" filename = '" + this.filename + "'");
+
+ this.extraField = null;
+ if (this.extraFieldLength > 0) {
+ this.extraField = bstream.readString(this.extraFieldLength);
+ info(" extra field=" + this.extraField);
+ }
+
+ // read in the compressed data
+ this.fileData = null;
+ if (this.compressedSize > 0) {
+ this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.compressedSize);
+ bstream.ptr += this.compressedSize;
+ }
+
+ // TODO: deal with data descriptor if present (we currently assume no data descriptor!)
+ // "This descriptor exists only if bit 3 of the general purpose bit flag is set"
+ // But how do you figure out how big the file data is if you don't know the compressedSize
+ // from the header?!?
+ if ((this.generalPurpose & bitjs.BIT[3]) !== 0) {
+ this.crc32 = bstream.readNumber(4);
+ this.compressedSize = bstream.readNumber(4);
+ this.uncompressedSize = bstream.readNumber(4);
+ }
+};
+
+// determine what kind of compressed data we have and decompress
+ZipLocalFile.prototype.unzip = function() {
+
+ // Zip Version 1.0, no compression (store only)
+ if (this.compressionMethod === 0 ) {
+ info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
+ currentBytesUnarchivedInFile = this.compressedSize;
+ currentBytesUnarchived += this.compressedSize;
+ }
+ // version == 20, compression method == 8 (DEFLATE)
+ else if (this.compressionMethod === 8) {
+ info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)");
+ this.fileData = inflate(this.fileData, this.uncompressedSize);
+ }
+ else {
+ err("UNSUPPORTED VERSION/FORMAT: ZIP v" + this.version + ", compression method=" + this.compressionMethod + ": " + this.filename + " (" + this.compressedSize + " bytes)");
+ this.fileData = null;
+ }
+};
+
+
+// Takes an ArrayBuffer of a zip file in
+// returns null on error
+// returns an array of DecompressedFile objects on success
+var unzip = function(arrayBuffer) {
+ postMessage(new bitjs.archive.UnarchiveStartEvent());
+
+ currentFilename = "";
+ currentFileNumber = 0;
+ currentBytesUnarchivedInFile = 0;
+ currentBytesUnarchived = 0;
+ totalUncompressedBytesInArchive = 0;
+ totalFilesInArchive = 0;
+ currentBytesUnarchived = 0;
+
+ var bstream = new bitjs.io.ByteStream(arrayBuffer);
+ // detect local file header signature or return null
+ if (bstream.peekNumber(4) === zLocalFileHeaderSignature) {
+ var localFiles = [];
+ // loop until we don't see any more local files
+ while (bstream.peekNumber(4) === zLocalFileHeaderSignature) {
+ var oneLocalFile = new ZipLocalFile(bstream);
+ // this should strip out directories/folders
+ if (oneLocalFile && oneLocalFile.uncompressedSize > 0 && oneLocalFile.fileData) {
+ localFiles.push(oneLocalFile);
+ totalUncompressedBytesInArchive += oneLocalFile.uncompressedSize;
+ }
+ }
+ totalFilesInArchive = localFiles.length;
+
+ // got all local files, now sort them
+ localFiles.sort(function(a, b) {
+ var aname = a.filename.toLowerCase();
+ var bname = b.filename.toLowerCase();
+ return aname > bname ? 1 : -1;
+ });
+
+ // archive extra data record
+ if (bstream.peekNumber(4) === zArchiveExtraDataSignature) {
+ info(" Found an Archive Extra Data Signature");
+
+ // skipping this record for now
+ bstream.readNumber(4);
+ var archiveExtraFieldLength = bstream.readNumber(4);
+ bstream.readString(archiveExtraFieldLength);
+ }
+
+ // central directory structure
+ // TODO: handle the rest of the structures (Zip64 stuff)
+ if (bstream.peekNumber(4) === zCentralFileHeaderSignature) {
+ info(" Found a Central File Header");
+
+ // read all file headers
+ while (bstream.peekNumber(4) === zCentralFileHeaderSignature) {
+ bstream.readNumber(4); // signature
+ bstream.readNumber(2); // version made by
+ bstream.readNumber(2); // version needed to extract
+ bstream.readNumber(2); // general purpose bit flag
+ bstream.readNumber(2); // compression method
+ bstream.readNumber(2); // last mod file time
+ bstream.readNumber(2); // last mod file date
+ bstream.readNumber(4); // crc32
+ bstream.readNumber(4); // compressed size
+ bstream.readNumber(4); // uncompressed size
+ var fileNameLength = bstream.readNumber(2); // file name length
+ var extraFieldLength = bstream.readNumber(2); // extra field length
+ var fileCommentLength = bstream.readNumber(2); // file comment length
+ bstream.readNumber(2); // disk number start
+ bstream.readNumber(2); // internal file attributes
+ bstream.readNumber(4); // external file attributes
+ bstream.readNumber(4); // relative offset of local header
+
+ bstream.readString(fileNameLength); // file name
+ bstream.readString(extraFieldLength); // extra field
+ bstream.readString(fileCommentLength); // file comment
+ }
+ }
+
+ // digital signature
+ if (bstream.peekNumber(4) === zDigitalSignatureSignature) {
+ info(" Found a Digital Signature");
+
+ bstream.readNumber(4);
+ var sizeOfSignature = bstream.readNumber(2);
+ bstream.readString(sizeOfSignature); // digital signature data
+ }
+
+ // report # files and total length
+ if (localFiles.length > 0) {
+ postProgress();
+ }
+
+ // now do the unzipping of each file
+ for (var i = 0; i < localFiles.length; ++i) {
+ var localfile = localFiles[i];
+
+ // update progress
+ currentFilename = localfile.filename;
+ currentFileNumber = i;
+ currentBytesUnarchivedInFile = 0;
+
+ // actually do the unzipping
+ localfile.unzip();
+
+ if (localfile.fileData !== null) {
+ postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
+ postProgress();
+ }
+ }
+ postProgress();
+ postMessage(new bitjs.archive.UnarchiveFinishEvent());
+ }
+};
+
+// returns a table of Huffman codes
+// each entry's index is its code and its value is a JavaScript object
+// containing {length: 6, symbol: X}
+function getHuffmanCodes(bitLengths) {
+ // ensure bitLengths is an array containing at least one element
+ if (typeof bitLengths !== typeof [] || bitLengths.length < 1) {
+ err("Error! getHuffmanCodes() called with an invalid array");
+ return null;
+ }
+
+ // Reference: http://tools.ietf.org/html/rfc1951#page-8
+ var numLengths = bitLengths.length,
+ blCount = [],
+ MAX_BITS = 1;
+
+ // Step 1: count up how many codes of each length we have
+ for (var i = 0; i < numLengths; ++i) {
+ var len = bitLengths[i];
+ // test to ensure each bit length is a positive, non-zero number
+ if (typeof len !== typeof 1 || len < 0) {
+ err("bitLengths contained an invalid number in getHuffmanCodes(): " + len + " of type " + (typeof len));
+ return null;
+ }
+ // increment the appropriate bitlength count
+ if (blCount[len] === undefined) blCount[len] = 0;
+ // a length of zero means this symbol is not participating in the huffman coding
+ if (len > 0) blCount[len]++;
+
+ if (len > MAX_BITS) MAX_BITS = len;
+ }
+
+ // Step 2: Find the numerical value of the smallest code for each code length
+ var nextCode = [],
+ code = 0;
+ for (var bits = 1; bits <= MAX_BITS; ++bits) {
+ var len = bits-1;
+ // ensure undefined lengths are zero
+ if (blCount[len] == undefined) blCount[len] = 0;
+ code = (code + blCount[bits-1]) << 1;
+ nextCode[bits] = code;
+ }
+
+ // Step 3: Assign numerical values to all codes
+ var table = {}, tableLength = 0;
+ for (var n = 0; n < numLengths; ++n) {
+ var len = bitLengths[n];
+ if (len !== 0) {
+ table[nextCode[len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(nextCode[len],len) };
+ tableLength++;
+ nextCode[len]++;
+ }
+ }
+ table.maxLength = tableLength;
+
+ return table;
+}
+
+/*
+ The Huffman codes for the two alphabets are fixed, and are not
+ represented explicitly in the data. The Huffman code lengths
+ for the literal/length alphabet are:
+
+ Lit Value Bits Codes
+ --------- ---- -----
+ 0 - 143 8 00110000 through
+ 10111111
+ 144 - 255 9 110010000 through
+ 111111111
+ 256 - 279 7 0000000 through
+ 0010111
+ 280 - 287 8 11000000 through
+ 11000111
+*/
+// fixed Huffman codes go from 7-9 bits, so we need an array whose index can hold up to 9 bits
+var fixedHCtoLiteral = null;
+var fixedHCtoDistance = null;
+function getFixedLiteralTable() {
+ // create once
+ if (!fixedHCtoLiteral) {
+ var bitlengths = new Array(288);
+ for (var i = 0; i <= 143; ++i) bitlengths[i] = 8;
+ for (var i = 144; i <= 255; ++i) bitlengths[i] = 9;
+ for (var i = 256; i <= 279; ++i) bitlengths[i] = 7;
+ for (var i = 280; i <= 287; ++i) bitlengths[i] = 8;
+
+ // get huffman code table
+ fixedHCtoLiteral = getHuffmanCodes(bitlengths);
+ }
+ return fixedHCtoLiteral;
+}
+function getFixedDistanceTable() {
+ // create once
+ if (!fixedHCtoDistance) {
+ var bitlengths = new Array(32);
+ for (var i = 0; i < 32; ++i) {
+ bitlengths[i] = 5;
+ }
+
+ // get huffman code table
+ fixedHCtoDistance = getHuffmanCodes(bitlengths);
+ }
+ return fixedHCtoDistance;
+}
+
+// extract one bit at a time until we find a matching Huffman Code
+// then return that symbol
+function decodeSymbol(bstream, hcTable) {
+ var code = 0, len = 0;
+ // var match = false;
+
+ // loop until we match
+ for (;;) {
+ // read in next bit
+ var bit = bstream.readBits(1);
+ code = (code<<1) | bit;
+ ++len;
+
+ // check against Huffman Code table and break if found
+ if (hcTable.hasOwnProperty(code) && hcTable[code].length == len) {
+
+ break;
+ }
+ if (len > hcTable.maxLength) {
+ err("Bit stream out of sync, didn't find a Huffman Code, length was " + len +
+ " and table only max code length of " + hcTable.maxLength);
+ break;
+ }
+ }
+ return hcTable[code].symbol;
+}
+
+
+var CodeLengthCodeOrder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
+ /*
+ Extra Extra Extra
+ Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
+ ---- ---- ------ ---- ---- ------- ---- ---- -------
+ 257 0 3 267 1 15,16 277 4 67-82
+ 258 0 4 268 1 17,18 278 4 83-98
+ 259 0 5 269 2 19-22 279 4 99-114
+ 260 0 6 270 2 23-26 280 4 115-130
+ 261 0 7 271 2 27-30 281 5 131-162
+ 262 0 8 272 2 31-34 282 5 163-194
+ 263 0 9 273 3 35-42 283 5 195-226
+ 264 0 10 274 3 43-50 284 5 227-257
+ 265 1 11,12 275 3 51-58 285 0 258
+ 266 1 13,14 276 3 59-66
+
+ */
+var LengthLookupTable = [
+ [0, 3], [0, 4], [0, 5], [0, 6],
+ [0, 7], [0, 8], [0, 9], [0, 10],
+ [1, 11], [1, 13], [1, 15], [1, 17],
+ [2, 19], [2, 23], [2, 27], [2, 31],
+ [3, 35], [3, 43], [3, 51], [3, 59],
+ [4, 67], [4, 83], [4, 99], [4, 115],
+ [5, 131], [5, 163], [5, 195], [5, 227],
+ [0, 258]
+];
+ /*
+ Extra Extra Extra
+ Code Bits Dist Code Bits Dist Code Bits Distance
+ ---- ---- ---- ---- ---- ------ ---- ---- --------
+ 0 0 1 10 4 33-48 20 9 1025-1536
+ 1 0 2 11 4 49-64 21 9 1537-2048
+ 2 0 3 12 5 65-96 22 10 2049-3072
+ 3 0 4 13 5 97-128 23 10 3073-4096
+ 4 1 5,6 14 6 129-192 24 11 4097-6144
+ 5 1 7,8 15 6 193-256 25 11 6145-8192
+ 6 2 9-12 16 7 257-384 26 12 8193-12288
+ 7 2 13-16 17 7 385-512 27 12 12289-16384
+ 8 3 17-24 18 8 513-768 28 13 16385-24576
+ 9 3 25-32 19 8 769-1024 29 13 24577-32768
+ */
+var DistLookupTable = [
+ [0, 1], [0, 2], [0, 3], [0, 4],
+ [1, 5], [1, 7],
+ [2, 9], [2, 13],
+ [3, 17], [3, 25],
+ [4, 33], [4, 49],
+ [5, 65], [5, 97],
+ [6, 129], [6, 193],
+ [7, 257], [7, 385],
+ [8, 513], [8, 769],
+ [9, 1025], [9, 1537],
+ [10, 2049], [10, 3073],
+ [11, 4097], [11, 6145],
+ [12, 8193], [12, 12289],
+ [13, 16385], [13, 24577]
+];
+
+function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
+ /*
+ loop (until end of block code recognized)
+ decode literal/length value from input stream
+ if value < 256
+ copy value (literal byte) to output stream
+ otherwise
+ if value = end of block (256)
+ break from loop
+ otherwise (value = 257..285)
+ decode distance from input stream
+
+ move backwards distance bytes in the output
+ stream, and copy length bytes from this
+ position to the output stream.
+ */
+ var numSymbols = 0, blockSize = 0;
+ for (;;) {
+ var symbol = decodeSymbol(bstream, hcLiteralTable);
+ ++numSymbols;
+ if (symbol < 256) {
+ // copy literal byte to output
+ buffer.insertByte(symbol);
+ blockSize++;
+ }
+ else {
+ // end of block reached
+ if (symbol === 256) {
+ break;
+ }
+ else {
+ var lengthLookup = LengthLookupTable[symbol-257],
+ length = lengthLookup[1] + bstream.readBits(lengthLookup[0]),
+ distLookup = DistLookupTable[decodeSymbol(bstream, hcDistanceTable)],
+ distance = distLookup[1] + bstream.readBits(distLookup[0]);
+
+ // now apply length and distance appropriately and copy to output
+
+ // TODO: check that backward distance < data.length?
+
+ // http://tools.ietf.org/html/rfc1951#page-11
+ // "Note also that the referenced string may overlap the current
+ // position; for example, if the last 2 bytes decoded have values
+ // X and Y, a string reference with
+ // adds X,Y,X,Y,X to the output stream."
+ //
+ // loop for each character
+ var ch = buffer.ptr - distance;
+ blockSize += length;
+ if(length > distance) {
+ var data = buffer.data;
+ while (length--) {
+ buffer.insertByte(data[ch++]);
+ }
+ } else {
+ buffer.insertBytes(buffer.data.subarray(ch, ch + length))
+ }
+
+ } // length-distance pair
+ } // length-distance pair or end-of-block
+ } // loop until we reach end of block
+ return blockSize;
+}
+
+// {Uint8Array} compressedData A Uint8Array of the compressed file data.
+// compression method 8
+// deflate: http://tools.ietf.org/html/rfc1951
+function inflate(compressedData, numDecompressedBytes) {
+ // Bit stream representing the compressed data.
+ var bstream = new bitjs.io.BitStream(compressedData.buffer,
+ false /* rtl */,
+ compressedData.byteOffset,
+ compressedData.byteLength);
+ var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
+ var numBlocks = 0;
+ var blockSize = 0;
+
+ // block format: http://tools.ietf.org/html/rfc1951#page-9
+ do {
+ var bFinal = bstream.readBits(1);
+ var bType = bstream.readBits(2);
+ blockSize = 0;
+ ++numBlocks;
+ // no compression
+ if (bType == 0) {
+ // skip remaining bits in this byte
+ while (bstream.bitPtr != 0) bstream.readBits(1);
+ var len = bstream.readBits(16),
+ nlen = bstream.readBits(16);
+ // TODO: check if nlen is the ones-complement of len?
+
+ if(len > 0) buffer.insertBytes(bstream.readBytes(len));
+ blockSize = len;
+ }
+ // fixed Huffman codes
+ else if(bType === 1) {
+ blockSize = inflateBlockData(bstream, getFixedLiteralTable(), getFixedDistanceTable(), buffer);
+ }
+ // dynamic Huffman codes
+ else if(bType === 2) {
+ var numLiteralLengthCodes = bstream.readBits(5) + 257;
+ var numDistanceCodes = bstream.readBits(5) + 1,
+ numCodeLengthCodes = bstream.readBits(4) + 4;
+
+ // populate the array of code length codes (first de-compaction)
+ var codeLengthsCodeLengths = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
+ for (var i = 0; i < numCodeLengthCodes; ++i) {
+ codeLengthsCodeLengths[ CodeLengthCodeOrder[i] ] = bstream.readBits(3);
+ }
+
+ // get the Huffman Codes for the code lengths
+ var codeLengthsCodes = getHuffmanCodes(codeLengthsCodeLengths);
+
+ // now follow this mapping
+ /*
+ 0 - 15: Represent code lengths of 0 - 15
+ 16: Copy the previous code length 3 - 6 times.
+ The next 2 bits indicate repeat length
+ (0 = 3, ... , 3 = 6)
+ Example: Codes 8, 16 (+2 bits 11),
+ 16 (+2 bits 10) will expand to
+ 12 code lengths of 8 (1 + 6 + 5)
+ 17: Repeat a code length of 0 for 3 - 10 times.
+ (3 bits of length)
+ 18: Repeat a code length of 0 for 11 - 138 times
+ (7 bits of length)
+ */
+ // to generate the true code lengths of the Huffman Codes for the literal
+ // and distance tables together
+ var literalCodeLengths = [];
+ var prevCodeLength = 0;
+ while (literalCodeLengths.length < numLiteralLengthCodes + numDistanceCodes) {
+ var symbol = decodeSymbol(bstream, codeLengthsCodes);
+ if (symbol <= 15) {
+ literalCodeLengths.push(symbol);
+ prevCodeLength = symbol;
+ }
+ else if (symbol === 16) {
+ var repeat = bstream.readBits(2) + 3;
+ while (repeat--) {
+ literalCodeLengths.push(prevCodeLength);
+ }
+ }
+ else if (symbol === 17) {
+ var repeat = bstream.readBits(3) + 3;
+ while (repeat--) {
+ literalCodeLengths.push(0);
+ }
+ }
+ else if (symbol === 18) {
+ var repeat = bstream.readBits(7) + 11;
+ while (repeat--) {
+ literalCodeLengths.push(0);
+ }
+ }
+ }
+
+ // now split the distance code lengths out of the literal code array
+ var distanceCodeLengths = literalCodeLengths.splice(numLiteralLengthCodes, numDistanceCodes);
+
+ // now generate the true Huffman Code tables using these code lengths
+ var hcLiteralTable = getHuffmanCodes(literalCodeLengths),
+ hcDistanceTable = getHuffmanCodes(distanceCodeLengths);
+ blockSize = inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer);
+ }
+ // error
+ else {
+ err("Error! Encountered deflate block of type 3");
+ return null;
+ }
+
+ // update progress
+ currentBytesUnarchivedInFile += blockSize;
+ currentBytesUnarchived += blockSize;
+ postProgress();
+
+ } while (bFinal !== 1);
+ // we are done reading blocks if the bFinal bit was set for this block
+
+ // return the buffer data bytes
+ return buffer.data;
+}
+
+// event.data.file has the ArrayBuffer.
+onmessage = function(event) {
+ unzip(event.data.file, true);
+};
diff --git a/cps/templates/author.html b/cps/templates/author.html
index 224244bc..78ddc8bb 100644
--- a/cps/templates/author.html
+++ b/cps/templates/author.html
@@ -39,7 +39,7 @@
{{entry.title|shortentitle}}
{% for author in entry.authors %}
- {{author.name}}
+ {{author.name.replace('|',',')}}
{% if not loop.last %}
&
{% endif %}
@@ -64,9 +64,9 @@
-{% if other_books is not none %}
+{% if other_books %}