Converting ebooks in background

additional sorting of tasklist according to date and runtime
codecosmetics
pull/675/head
Ozzie Isaacs 6 years ago
parent 11b798a01c
commit 7be328c535

@ -1,241 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import smtplib
import threading
from datetime import datetime
import logging
import time
import socket
import sys
from email.generator import Generator
import web
from flask_babel import gettext as _
import re
try:
from StringIO import StringIO
except ImportError as e:
from io import StringIO
chunksize = 8192
STAT_WAITING = 0
STAT_FAIL = 1
STAT_STARTED = 2
STAT_FINISH_SUCCESS = 3
class email(smtplib.SMTP):
transferSize = 0
progress = 0
def __init__(self, *args, **kwargs):
smtplib.SMTP.__init__(self, *args, **kwargs)
def data(self, msg):
self.transferSize = len(msg)
(code, resp) = smtplib.SMTP.data(self, msg)
self.progress = 0
return (code, resp)
def send(self, str):
"""Send `str' to the server."""
if self.debuglevel > 0:
print('send:', repr(str), file=sys.stderr)
if hasattr(self, 'sock') and self.sock:
try:
if self.transferSize:
lock=threading.Lock()
lock.acquire()
self.transferSize = len(str)
lock.release()
for i in range(0, self.transferSize, chunksize):
self.sock.send(str[i:i+chunksize])
lock.acquire()
self.progress = i
lock.release()
else:
self.sock.sendall(str)
except socket.error:
self.close()
raise smtplib.SMTPServerDisconnected('Server not connected')
else:
raise smtplib.SMTPServerDisconnected('please run connect() first')
def getTransferStatus(self):
if self.transferSize:
lock2 = threading.Lock()
lock2.acquire()
value = round(float(self.progress) / float(self.transferSize),2)*100
lock2.release()
return str(value) + ' %'
else:
return "100 %"
class email_SSL(email):
def __init__(self, *args, **kwargs):
smtplib.SMTP_SSL.__init__(self, *args, **kwargs)
class EMailThread(threading.Thread):
def __init__(self):
self._stopevent = threading.Event()
threading.Thread.__init__(self)
self.status = 0
self.current = 0
self.last = 0
self.queue=list()
self.UIqueue = list()
self.asyncSMTP=None
def run(self):
while not self._stopevent.isSet():
doLock = threading.Lock()
doLock.acquire()
if self.current != self.last:
doLock.release()
self.send_raw_email()
self.current += 1
else:
doLock.release()
time.sleep(1)
def stop(self):
self._stopevent.set()
def get_send_status(self):
if self.asyncSMTP:
return self.asyncSMTP.getTransferStatus()
else:
return "0 %"
def delete_completed_tasks(self):
# muss gelockt werden
for index, task in reversed(list(enumerate(self.UIqueue))):
if task['progress'] == "100 %":
# delete tasks
self.queue.pop(index)
self.UIqueue.pop(index)
# if we are deleting entries before the current index, adjust the index
# if self.current >= index:
self.current -= 1
self.last = len(self.queue)
def get_taskstatus(self):
if self.current < len(self.queue):
if self.queue[self.current]['status'] == STAT_STARTED:
self.UIqueue[self.current]['progress'] = self.get_send_status()
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
return self.UIqueue
def add_email(self, data, settings, recipient, user_name, type):
# if more than 50 entries in the list, clean the list
addLock = threading.Lock()
addLock.acquire()
if self.last >= 20:
self.delete_completed_tasks()
# progress, runtime, and status = 0
self.queue.append({'data':data, 'settings':settings, 'recipent':recipient, 'starttime': 0,
'status': STAT_WAITING})
self.UIqueue.append({'user': user_name, 'formStarttime': '', 'progress': " 0 %", 'type': type,
'runtime': '0 s', 'status': _('Waiting') })
# access issue
self.last=len(self.queue)
addLock.release()
def send_raw_email(self):
obj=self.queue[self.current]
# settings = ub.get_mail_settings()
obj['data']['From'] = obj['settings']["mail_from"]
obj['data']['To'] = obj['recipent']
use_ssl = int(obj['settings'].get('mail_use_ssl', 0))
# convert MIME message to string
fp = StringIO()
gen = Generator(fp, mangle_from_=False)
gen.flatten(obj['data'])
obj['data'] = fp.getvalue()
# send email
try:
timeout = 600 # set timeout to 5mins
org_stderr = sys.stderr
#org_stderr2 = smtplib.stderr
sys.stderr = StderrLogger()
#smtplib.stderr = StderrLogger()
self.queue[self.current]['status'] = STAT_STARTED
self.UIqueue[self.current]['status'] = _('Started')
self.queue[self.current]['starttime'] = datetime.now()
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
if use_ssl == 2:
self.asyncSMTP = email_SSL(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
else:
self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
# link to logginglevel
if web.ub.config.config_log_level != logging.DEBUG:
self.asyncSMTP.set_debuglevel(0)
else:
self.asyncSMTP.set_debuglevel(1)
if use_ssl == 1:
self.asyncSMTP.starttls()
if obj['settings']["mail_password"]:
self.asyncSMTP.login(str(obj['settings']["mail_login"]), str(obj['settings']["mail_password"]))
self.asyncSMTP.sendmail(obj['settings']["mail_from"], obj['recipent'], obj['data'])
self.asyncSMTP.quit()
self.queue[self.current]['status'] = STAT_FINISH_SUCCESS
self.UIqueue[self.current]['status'] = _('Finished')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
sys.stderr = org_stderr
#smtplib.stderr = org_stderr2
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException) as e:
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
web.app.logger.error(e)
return None
def _formatRuntime(self, runtime):
val = re.split('\:|\.', str(runtime))[0:3]
erg = list()
for v in val:
if int(v) > 0:
erg.append(v)
retVal = (':'.join(erg)).lstrip('0') + ' s'
if retVal == ' s':
retVal = '0 s'
return retVal
class StderrLogger(object):
buffer = ''
def __init__(self):
self.logger = web.app.logger
def write(self, message):
if message == '\n':
self.logger.debug(self.buffer)
print(self.buffer)
self.buffer = ''
else:
self.buffer += message

@ -1,19 +1,12 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import subprocess
import ub
import db
import re
import web
from flask_babel import gettext as _
RET_FAIL = 0
RET_SUCCESS = 1
def versionKindle():
versions = _(u'not installed')
if os.path.exists(ub.config.config_converterpath):
@ -46,97 +39,6 @@ def versionCalibre():
return {'Calibre converter' : versions}
def convert_kindlegen(file_path, book):
error_message = None
if not os.path.exists(ub.config.config_converterpath):
error_message = _(u"kindlegen binary %(kindlepath)s not found", kindlepath=ub.config.config_converterpath)
web.app.logger.error("convert_kindlegen: " + error_message)
return error_message, RET_FAIL
try:
p = subprocess.Popen((ub.config.config_converterpath + " \"" + file_path + u".epub\"").encode(sys.getfilesystemencoding()),
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception as e:
error_message = _(u"kindlegen failed, no execution permissions")
web.app.logger.error("convert_kindlegen: " + error_message)
return error_message, RET_FAIL
# Poll process for new output until finished
while True:
nextline = p.stdout.readline()
if nextline == '' and p.poll() is not None:
break
if nextline != "\r\n":
# Format of error message (kindlegen translates its output texts):
# Error(prcgen):E23006: Language not recognized in metadata.The dc:Language field is mandatory.Aborting.
conv_error = re.search(".*\(.*\):(E\d+):\s(.*)", nextline)
# If error occoures, log in every case
if conv_error:
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
error=conv_error.group(1), message=conv_error.group(2).decode('utf-8'))
web.app.logger.info("convert_kindlegen: " + error_message)
web.app.logger.info(nextline.strip('\r\n'))
else:
web.app.logger.debug(nextline.strip('\r\n'))
check = p.returncode
if not check or check < 2:
book.data.append(db.Data(
name=book.data[0].name,
book_format="MOBI",
book=book.id,
uncompressed_size=os.path.getsize(file_path + ".mobi")
))
db.session.commit()
if ub.config.config_use_google_drive:
os.remove(file_path + u".epub")
return file_path + ".mobi", RET_SUCCESS
else:
web.app.logger.info("convert_kindlegen: kindlegen failed with error while converting book")
if not error_message:
error_message = 'kindlegen failed, no excecution permissions'
return error_message, RET_FAIL
def convert_calibre(file_path, book):
error_message = None
if not os.path.exists(ub.config.config_converterpath):
error_message = _(u"Ebook-convert binary %(converterpath)s not found", converterpath=ub.config.config_converterpath)
web.app.logger.error("convert_calibre: " + error_message)
return error_message, RET_FAIL
try:
command = ("\""+ub.config.config_converterpath + "\" \"" + file_path + u".epub\" \""
+ file_path + u".mobi\" " + ub.config.config_calibre).encode(sys.getfilesystemencoding())
p = subprocess.Popen(command,stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception as e:
error_message = _(u"Ebook-convert failed, no execution permissions")
web.app.logger.error("convert_calibre: " + error_message)
return error_message, RET_FAIL
# Poll process for new output until finished
while True:
nextline = p.stdout.readline()
if nextline == '' and p.poll() is not None:
break
web.app.logger.debug(nextline.strip('\r\n').decode(sys.getfilesystemencoding()))
check = p.returncode
if check == 0 :
book.data.append(db.Data(
name=book.data[0].name,
book_format="MOBI",
book=book.id,
uncompressed_size=os.path.getsize(file_path + ".mobi")
))
db.session.commit()
if ub.config.config_use_google_drive:
os.remove(file_path + u".epub")
return file_path + ".mobi", RET_SUCCESS
else:
web.app.logger.info("convert_calibre: Ebook-convert failed with error while converting book")
if not error_message:
error_message = 'Ebook-convert failed, no excecution permissions'
return error_message, RET_FAIL
def versioncheck():
if ub.config.config_ebookconverter == 1:
return versionKindle()
@ -145,9 +47,3 @@ def versioncheck():
else:
return {'ebook_converter':''}
def convert_mobi(file_path, book):
if ub.config.config_ebookconverter == 2:
return convert_calibre(file_path, book)
else:
return convert_kindlegen(file_path, book)

@ -321,7 +321,7 @@ def setup_db():
try:
if not os.path.exists(dbpath):
raise
engine = create_engine('sqlite:///' + dbpath, echo=False, isolation_level="SERIALIZABLE")
engine = create_engine('sqlite:///' + dbpath, echo=False, isolation_level="SERIALIZABLE", connect_args={'check_same_thread': False})
conn = engine.connect()
except Exception:
content = ub.session.query(ub.Settings).first()
@ -381,8 +381,9 @@ def setup_db():
secondary=books_custom_column_links[cc_id[0]],
backref='books'))
# Base.metadata.create_all(engine)
Session = sessionmaker()
Session.configure(bind=engine)
Session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
session = Session()
return True

@ -5,6 +5,7 @@ try:
from apiclient import errors
except ImportError:
pass
import os
from ub import config
import cli
@ -179,9 +180,8 @@ def getEbooksFolderId(drive=None):
gDriveId = GdriveId()
try:
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
except:
pass
# ToDo Path not exisiting
except Exception:
web.app.logger.error('Error gDrive, root ID not found')
gDriveId.path = '/'
session.merge(gDriveId)
session.commit()
@ -282,19 +282,6 @@ def moveGdriveFolderRemote(origin_file, target_folder):
# drive.auth.service.files().delete(fileId=previous_parents).execute()
#def downloadFile(path, filename, output):
# f = getFileFromEbooksFolder(path, filename)
# return f.GetContentFile(output)
# ToDo: Check purpose Parameter f ??, purpose of function ?
def backupCalibreDbAndOptionalDownload(drive):
drive = getDrive(drive)
metaDataFile = "'%s' in parents and title = 'metadata.db' and trashed = false" % getEbooksFolderId()
fileList = drive.ListFile({'q': metaDataFile}).GetList()
#databaseFile = fileList[0]
#if f:
# databaseFile.GetContentFile(f)
def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
ignoreFiles=None,
@ -447,7 +434,6 @@ def deleteDatabaseOnChange():
session.commit()
def updateGdriveCalibreFromLocal():
# backupCalibreDbAndOptionalDownload(Gdrive.Instance().drive)
copyToDrive(Gdrive.Instance().drive, config.config_calibre_dir, False, True)
for x in os.listdir(config.config_calibre_dir):
if os.path.isdir(os.path.join(config.config_calibre_dir, x)):

@ -8,28 +8,12 @@ import logging
from tempfile import gettempdir
import sys
import os
import traceback
import re
import unicodedata
from io import BytesIO
import converter
import asyncmail
import worker
import time
try:
from StringIO import StringIO
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
except ImportError as e:
from io import StringIO
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import encoders
from email.utils import formatdate
from email.utils import make_msgid
from flask import send_from_directory, make_response, redirect, abort
from flask_babel import gettext as _
import threading
@ -51,29 +35,25 @@ except ImportError:
# Global variables
updater_thread = None
global_eMailThread = asyncmail.EMailThread()
global_eMailThread.start()
RET_SUCCESS = 1
RET_FAIL = 0
global_WorkerThread = worker.WorkerThread()
global_WorkerThread.start()
def update_download(book_id, user_id):
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
book_id).first()
if not check:
new_download = ub.Downloads(user_id=user_id, book_id=book_id)
ub.session.add(new_download)
ub.session.commit()
def make_mobi(book_id, calibrepath):
def make_mobi(book_id, calibrepath, user_id, kindle_mail):
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == 'EPUB').first()
if not data:
error_message = _(u"epub format not found for book id: %(book)d", book=book_id)
app.logger.error("make_mobi: " + error_message)
return error_message, RET_FAIL
return error_message
if ub.config.config_use_google_drive:
df = gd.getFileFromEbooksFolder(book.path, data.name + u".epub")
if df:
@ -82,120 +62,59 @@ def make_mobi(book_id, calibrepath):
os.makedirs(os.path.join(calibrepath, book.path))
df.GetContentFile(datafile)
else:
error_message = "make_mobi: epub not found on gdrive: %s.epub" % data.name
return error_message, RET_FAIL
# else:
error_message = (u"make_mobi: epub not found on gdrive: %s.epub" % data.name)
return error_message
file_path = os.path.join(calibrepath, book.path, data.name)
if os.path.exists(file_path + u".epub"):
# convert book, and upload in case of google drive
res = converter.convert_mobi(file_path, book)
if ub.config.config_use_google_drive:
gd.updateGdriveCalibreFromLocal()
# time.sleep(10)
return res
# append converter to queue
global_WorkerThread.add_convert(file_path, book.id, user_id, _(u"Convert: %s" % book.title), ub.get_mail_settings(),
kindle_mail)
return None
else:
error_message = "make_mobi: epub not found: %s.epub" % file_path
return error_message, RET_FAIL
error_message = (u"make_mobi: epub not found: %s.epub" % file_path)
return error_message
def send_test_mail(kindle_mail, user_name):
msg = MIMEMultipart()
msg['Subject'] = _(u'Calibre-web test email')
text = _(u'This email has been sent via calibre web.')
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
global_eMailThread.add_email(msg,ub.get_mail_settings(),kindle_mail, user_name, _('Test E-Mail'))
return # send_raw_email(kindle_mail, msg)
global_WorkerThread.add_email(_(u'Calibre-web test email'),None, None, ub.get_mail_settings(),
kindle_mail, user_name, _(u"Test E-Mail"))
return
# Files are processed in the following order/priority:
# 1: If Mobi file is exisiting, it's directly send to kindle email,
# 2: If Epub file is exisiting, it's converted and send to kindle email
# 3: If Pdf file is exisiting, it's directly send to kindle email,
def send_mail(book_id, kindle_mail, calibrepath, user_id):
"""Send email with attachments"""
# create MIME message
result= None
msg = MIMEMultipart()
msg['Subject'] = _(u'Send to Kindle')
msg['Message-Id'] = make_msgid('calibre-web')
msg['Date'] = formatdate(localtime=True)
text = _(u'This email has been sent via calibre web.')
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
data = db.session.query(db.Data).filter(db.Data.book == book.id).all()
formats = {}
index = 0
for indx,entry in enumerate(data):
for entry in data:
if entry.format == "MOBI":
formats["mobi"] = entry.name + ".mobi"
if entry.format == "EPUB":
formats["epub"] = entry.name + ".epub"
index = indx
if entry.format == "PDF":
formats["pdf"] = entry.name + ".pdf"
if len(formats) == 0:
return _("Could not find any formats suitable for sending by email")
return _(u"Could not find any formats suitable for sending by email")
if 'mobi' in formats:
result = get_attachment(calibrepath, book.path, formats['mobi'])
if result:
msg.attach(result)
result = formats['mobi']
elif 'epub' in formats:
# returns filename if sucess, otherwise errormessage
data, resultCode = make_mobi(book.id, calibrepath)
if resultCode == RET_SUCCESS:
result = get_attachment(calibrepath, book.path, os.path.basename(data))
if result:
msg.attach(result)
else:
app.logger.error(data)
return data
# returns None if sucess, otherwise errormessage
return make_mobi(book.id, calibrepath, user_id, kindle_mail)
elif 'pdf' in formats:
result = get_attachment(calibrepath, book.path, formats['pdf'])
if result:
msg.attach(result)
result = formats['pdf'] # worker.get_attachment()
else:
return _("Could not find any formats suitable for sending by email")
return _(u"Could not find any formats suitable for sending by email")
if result:
global_eMailThread.add_email(msg,ub.get_mail_settings(),kindle_mail, user_id, _(u"E-Mail: %s" % book.title))
return None # send_raw_email(kindle_mail, msg)
global_WorkerThread.add_email(_(u"Send to Kindle"), book.path, result, ub.get_mail_settings(),
kindle_mail, user_id, _(u"E-Mail: %s" % book.title))
else:
return _('The requested file could not be read. Maybe wrong permissions?')
# For gdrive download book from gdrive to calibredir (temp dir for books), read contents in both cases and append
# it in MIME Base64 encoded to
def get_attachment(calibrepath, bookpath, filename):
"""Get file as MIMEBase message"""
if ub.config.config_use_google_drive:
df = gd.getFileFromEbooksFolder(bookpath, filename)
if df:
datafile = os.path.join(calibrepath, bookpath, filename)
if not os.path.exists(os.path.join(calibrepath, bookpath)):
os.makedirs(os.path.join(calibrepath, bookpath))
df.GetContentFile(datafile)
else:
return None
file_ = open(datafile, 'rb')
data = file_.read()
file_.close()
os.remove(datafile)
else:
try:
file_ = open(os.path.join(calibrepath, bookpath, filename), 'rb')
data = file_.read()
file_.close()
except IOError:
traceback.print_exc()
app.logger.error = u'The requested file could not be read. Maybe wrong permissions?'
return None
attachment = MIMEBase('application', 'octet-stream')
attachment.set_payload(data)
encoders.encode_base64(attachment)
attachment.add_header('Content-Disposition', 'attachment',
filename=filename)
return attachment
return _(u"The requested file could not be read. Maybe wrong permissions?")
def get_valid_filename(value, replace_whitespace=True):
@ -225,7 +144,6 @@ def get_valid_filename(value, replace_whitespace=True):
value = value[:128]
if not value:
raise ValueError("Filename cannot be empty")
return value

@ -44,8 +44,8 @@ class server:
web.app.logger.info('Unable to listen on \'\', trying on IPv4 only...')
self.wsgiserver = WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
self.wsgiserver.serve_forever()
except:
pass
except Exception:
web.app.logger.info("Unknown error while starting gevent")
def startServer(self):
if gevent_present:
@ -70,7 +70,7 @@ class server:
if self.restart == True:
web.app.logger.info("Performing restart of Calibre-web")
web.helper.global_eMailThread.stop()
web.helper.global_WorkerThread.stop()
if os.name == 'nt':
arguments = ["\"" + sys.executable + "\""]
for e in sys.argv:
@ -80,7 +80,7 @@ class server:
os.execl(sys.executable, sys.executable, *sys.argv)
else:
web.app.logger.info("Performing shutdown of Calibre-web")
web.helper.global_eMailThread.stop()
web.helper.global_WorkerThread.stop()
sys.exit(0)
def setRestartTyp(self,starttyp):
@ -92,7 +92,8 @@ class server:
else:
self.wsgiserver.add_callback(self.wsgiserver.stop)
def getNameVersion(self):
@staticmethod
def getNameVersion():
if gevent_present:
return {'Gevent':'v'+geventVersion}
else:

@ -143,13 +143,12 @@ bitjs.archive = bitjs.archive || {};
* Progress event.
*/
bitjs.archive.UnarchiveProgressEvent = function(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive)
{
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive) {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.PROGRESS);
this.currentFilename = currentFilename;

@ -80,7 +80,7 @@ function prefixedSource(prefix, query, cb, bhAdapter) {
function getPath() {
var jsFileLocation = $("script[src*=edit_books]").attr("src"); // the js file path
return jsFileLocation.substr(0,jsFileLocation.search("/static/js/edit_books.js")); // the js folder path
return jsFileLocation.substr(0, jsFileLocation.search("/static/js/edit_books.js")); // the js folder path
}
var authors = new Bloodhound({

@ -121,7 +121,7 @@ bitjs.io = bitjs.io || {};
* @return {number} The peeked bits, as an unsigned number.
*/
bitjs.io.BitStream.prototype.peekBitsRtl = function(n, movePointers) {
if (n <= 0 || typeof n != typeof 1) {
if (n <= 0 || typeof n !== typeof 1) {
return 0;
}
@ -150,8 +150,7 @@ bitjs.io = bitjs.io || {};
bytePtr++;
bitPtr = 0;
n -= numBitsLeftInThisByte;
}
else {
} else {
result <<= n;
result |= ((bytes[bytePtr] & (BITMASK[n] << (8 - n - bitPtr))) >> (8 - n - bitPtr));

@ -42,10 +42,10 @@ var postProgress = function() {
// shows a byte value as its hex representation
var nibble = "0123456789ABCDEF";
var byteValueToHexString = function(num) {
return nibble[num>>4] + nibble[num & 0xF];
return nibble[num >> 4] + nibble[num & 0xF];
};
var twoByteValueToHexString = function(num) {
return nibble[(num>>12) & 0xF] + nibble[(num>>8) & 0xF] + nibble[(num>>4) & 0xF] + nibble[num & 0xF];
return nibble[(num >> 12) & 0xF] + nibble[(num >> 8) & 0xF] + nibble[(num >> 4) & 0xF] + nibble[num & 0xF];
};
@ -146,7 +146,7 @@ var RarVolumeHeader = function(bstream) {
} else {
this.HighPackSize = 0;
this.HighUnpSize = 0;
if (this.unpackedSize == 0xffffffff) {
if (this.unpackedSize === 0xffffffff) {
this.HighUnpSize = 0x7fffffff;
this.unpackedSize = 0xffffffff;
}
@ -178,9 +178,10 @@ var RarVolumeHeader = function(bstream) {
// this is adapted straight out of arcread.cpp, Archive::ReadHeader()
for (var I = 0; I < 4; ++I) {
var rmode = extTimeFlags >> ((3 - I) * 4);
if ((rmode & 8)==0)
if ((rmode & 8) === 0) {
continue;
if (I!=0) {
}
if (I !== 0) {
bstream.readBits(16);
}
var count = (rmode & 3);
@ -209,13 +210,12 @@ var RarVolumeHeader = function(bstream) {
};
var BLOCK_LZ = 0;
// BLOCK_PPM = 1;
var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5],
rDBitLengthCounts = [4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 0, 12],
rSDDecode = [0, 4, 8, 16, 32, 64, 128, 192],
rSDBits = [2,2,3, 4, 5, 6, 6, 6];
rSDBits = [2, 2, 3, 4, 5, 6, 6, 6];
var rDDecode = [0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32,
48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072,
@ -236,7 +236,7 @@ var rNC = 299,
rBC = 20,
rHUFF_TABLE_SIZE = (rNC + rDC + rRC + rLDC);
var UnpBlockType = BLOCK_LZ;
//var UnpBlockType = BLOCK_LZ;
var UnpOldTable = new Array(rHUFF_TABLE_SIZE);
var BD = { //bitdecode
@ -281,26 +281,26 @@ function RarReadTables(bstream) {
}
if (!bstream.readBits(1)) { //discard old table
for (var i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
var i;
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
}
// read in bit lengths
for (var I = 0; I < rBC; ++I) {
var Length = bstream.readBits(4);
if (Length == 15) {
var ZeroCount = bstream.readBits(4);
if (ZeroCount == 0) {
if (Length === 15) {
var ZeroCount = bstream.readBits(4);
if (ZeroCount === 0) {
BitLength[I] = 15;
}
else {
} else {
ZeroCount += 2;
while (ZeroCount-- > 0 && I < rBC)
while (ZeroCount-- > 0 && I < rBC) {
BitLength[I++] = 0;
}
--I;
}
}
else {
} else {
BitLength[I] = Length;
}
}
@ -311,20 +311,20 @@ function RarReadTables(bstream) {
var TableSize = rHUFF_TABLE_SIZE;
//console.log(DecodeLen, DecodePos, DecodeNum);
for (var i = 0; i < TableSize;) {
for (i = 0; i < TableSize;) {
var num = RarDecodeNumber(bstream, BD);
if (num < 16) {
Table[i] = (num + UnpOldTable[i]) & 0xf;
i++;
} else if(num < 18) {
var N = (num == 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
} else if (num < 18) {
var N = (num === 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i] = Table[i - 1];
i++;
}
} else {
var N = (num == 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
var N = (num === 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i++] = 0;
@ -337,7 +337,7 @@ function RarReadTables(bstream) {
RarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
RarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
for (var i = UnpOldTable.length; i--;) {
for (i = UnpOldTable.length; i--;) {
UnpOldTable[i] = Table[i];
}
return true;
@ -349,20 +349,20 @@ function RarDecodeNumber(bstream, dec) {
var bitField = bstream.getBits() & 0xfffe;
//some sort of rolled out binary search
var bits = ((bitField < DecodeLen[8])?
((bitField < DecodeLen[4])?
((bitField < DecodeLen[2])?
((bitField < DecodeLen[1])?1:2)
:((bitField < DecodeLen[3])?3:4))
:(bitField < DecodeLen[6])?
((bitField < DecodeLen[5])?5:6)
:((bitField < DecodeLen[7])?7:8))
:((bitField < DecodeLen[12])?
((bitField < DecodeLen[10])?
((bitField < DecodeLen[9])?9:10)
:((bitField < DecodeLen[11])?11:12))
:(bitField < DecodeLen[14])?
((bitField < DecodeLen[13])?13:14)
:15));
((bitField < DecodeLen[4]) ?
((bitField < DecodeLen[2]) ?
((bitField < DecodeLen[1]) ? 1 : 2)
: ((bitField < DecodeLen[3]) ? 3 : 4))
: (bitField < DecodeLen[6])?
((bitField < DecodeLen[5]) ? 5 : 6)
:((bitField < DecodeLen[7]) ? 7 : 8))
: ((bitField < DecodeLen[12]) ?
((bitField < DecodeLen[10]) ?
((bitField < DecodeLen[9]) ? 9 : 10)
:((bitField < DecodeLen[11]) ? 11 : 12))
: (bitField < DecodeLen[14]) ?
((bitField < DecodeLen[13]) ? 13 : 14)
: 15));
bstream.readBits(bits);
var N = DecodePos[bits] + ((bitField - DecodeLen[bits -1]) >>> (16 - bits));
@ -433,14 +433,14 @@ function Unpack20(bstream, Solid) {
RarCopyString(Length, Distance);
continue;
}
if (num == 269) {
if (num === 269) {
RarReadTables20(bstream);
RarUpdateProgress()
continue;
}
if (num == 256) {
if (num === 256) {
lastDist = rOldDist[oldDistPtr++ & 3] = lastDist;
RarCopyString(lastLength, lastDist);
continue;
@ -511,14 +511,14 @@ function RarReadTables20(bstream) {
if (num < 16) {
Table[I] = num + UnpOldTable20[I] & 0xf;
I++;
} else if(num == 16) {
} else if(num === 16) {
N = bstream.readBits(2) + 3;
while (N-- > 0 && I < TableSize) {
Table[I] = Table[I - 1];
I++;
}
} else {
if (num == 17) {
if (num === 17) {
N = bstream.readBits(3) + 3;
} else {
N = bstream.readBits(7) + 11;
@ -595,7 +595,7 @@ function Unpack29(bstream, Solid) {
Distance += prevLowDist;
} else {
var LowDist = RarDecodeNumber(bstream, LDD);
if (LowDist == 16) {
if (LowDist === 16) {
lowDistRepCount = rLOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
} else {
@ -618,16 +618,16 @@ function Unpack29(bstream, Solid) {
RarCopyString(Length, Distance);
continue;
}
if (num == 256) {
if (num === 256) {
if (!RarReadEndOfBlock(bstream)) break;
continue;
}
if (num == 257) {
if (num === 257) {
//console.log("READVMCODE");
if (!RarReadVMCode(bstream)) break;
continue;
}
if (num == 258) {
if (num === 258) {
if (lastLength != 0) {
RarCopyString(lastLength, lastDist);
}
@ -684,9 +684,9 @@ function RarReadEndOfBlock(bstream) {
function RarReadVMCode(bstream) {
var FirstByte = bstream.readBits(8);
var Length = (FirstByte & 7) + 1;
if (Length == 7) {
if (Length === 7) {
Length = bstream.readBits(8) + 7;
} else if(Length == 8) {
} else if(Length === 8) {
Length = bstream.readBits(16);
}
var vmCode = [];
@ -789,8 +789,8 @@ RarLocalFile.prototype.unrar = function() {
if (!this.header.flags.LHD_SPLIT_BEFORE) {
// unstore file
if (this.header.method == 0x30) {
info("Unstore "+this.filename);
if (this.header.method === 0x30) {
info("Unstore " + this.filename);
this.isValid = true;
currentBytesUnarchivedInFile += this.fileData.length;
@ -820,10 +820,10 @@ var unrar = function(arrayBuffer) {
var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */);
var header = new RarVolumeHeader(bstream);
if (header.crc == 0x6152 &&
header.headType == 0x72 &&
header.flags.value == 0x1A21 &&
header.headSize == 7)
if (header.crc === 0x6152 &&
header.headType === 0x72 &&
header.flags.value === 0x1A21 &&
header.headSize === 7)
{
info("Found RAR signature");
@ -840,7 +840,7 @@ var unrar = function(arrayBuffer) {
if (localFile && localFile.isValid && localFile.header.packSize > 0) {
totalUncompressedBytesInArchive += localFile.header.unpackedSize;
localFiles.push(localFile);
} else if (localFile.header.packSize == 0 && localFile.header.unpackedSize == 0) {
} else if (localFile.header.packSize === 0 && localFile.header.unpackedSize === 0) {
localFile.isValid = true;
}
} catch(err) {

@ -7,10 +7,11 @@
*
* TAR format: http://www.gnu.org/software/automake/manual/tar/Standard.html
*/
/* global bitjs, importScripts, Uint8Array */
// This file expects to be invoked as a Worker (see onmessage below).
importScripts('io.js');
importScripts('archive.js');
importScripts("io.js");
importScripts("archive.js");
// Progress variables.
var currentFilename = "";
@ -22,147 +23,147 @@ var totalFilesInArchive = 0;
// Helper functions.
var info = function(str) {
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
};
var err = function(str) {
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
var postProgress = function() {
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive));
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive));
};
// Removes all characters from the first zero-byte in the string onwards.
var readCleanString = function(bstr, numBytes) {
var str = bstr.readString(numBytes);
var zIndex = str.indexOf(String.fromCharCode(0));
return zIndex != -1 ? str.substr(0, zIndex) : str;
var str = bstr.readString(numBytes);
var zIndex = str.indexOf(String.fromCharCode(0));
return zIndex !== -1 ? str.substr(0, zIndex) : str;
};
// takes a ByteStream and parses out the local file information
var TarLocalFile = function(bstream) {
this.isValid = false;
// Read in the header block
this.name = readCleanString(bstream, 100);
this.mode = readCleanString(bstream, 8);
this.uid = readCleanString(bstream, 8);
this.gid = readCleanString(bstream, 8);
this.size = parseInt(readCleanString(bstream, 12), 8);
this.mtime = readCleanString(bstream, 12);
this.chksum = readCleanString(bstream, 8);
this.typeflag = readCleanString(bstream, 1);
this.linkname = readCleanString(bstream, 100);
this.maybeMagic = readCleanString(bstream, 6);
if (this.maybeMagic == "ustar") {
this.version = readCleanString(bstream, 2);
this.uname = readCleanString(bstream, 32);
this.gname = readCleanString(bstream, 32);
this.devmajor = readCleanString(bstream, 8);
this.devminor = readCleanString(bstream, 8);
this.prefix = readCleanString(bstream, 155);
if (this.prefix.length) {
this.name = this.prefix + this.name;
}
bstream.readBytes(12); // 512 - 500
} else {
bstream.readBytes(255); // 512 - 257
}
// Done header, now rest of blocks are the file contents.
this.filename = this.name;
this.fileData = null;
info("Untarring file '" + this.filename + "'");
info(" size = " + this.size);
info(" typeflag = " + this.typeflag);
// A regular file.
if (this.typeflag == 0) {
info(" This is a regular file.");
var sizeInBytes = parseInt(this.size);
this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.size);
if (this.name.length > 0 && this.size > 0 && this.fileData && this.fileData.buffer) {
this.isValid = true;
}
bstream.readBytes(this.size);
// Round up to 512-byte blocks.
var remaining = 512 - this.size % 512;
if (remaining > 0 && remaining < 512) {
bstream.readBytes(remaining);
}
} else if (this.typeflag == 5) {
info(" This is a directory.")
}
this.isValid = false;
// Read in the header block
this.name = readCleanString(bstream, 100);
this.mode = readCleanString(bstream, 8);
this.uid = readCleanString(bstream, 8);
this.gid = readCleanString(bstream, 8);
this.size = parseInt(readCleanString(bstream, 12), 8);
this.mtime = readCleanString(bstream, 12);
this.chksum = readCleanString(bstream, 8);
this.typeflag = readCleanString(bstream, 1);
this.linkname = readCleanString(bstream, 100);
this.maybeMagic = readCleanString(bstream, 6);
if (this.maybeMagic === "ustar") {
this.version = readCleanString(bstream, 2);
this.uname = readCleanString(bstream, 32);
this.gname = readCleanString(bstream, 32);
this.devmajor = readCleanString(bstream, 8);
this.devminor = readCleanString(bstream, 8);
this.prefix = readCleanString(bstream, 155);
if (this.prefix.length) {
this.name = this.prefix + this.name;
}
bstream.readBytes(12); // 512 - 500
} else {
bstream.readBytes(255); // 512 - 257
}
// Done header, now rest of blocks are the file contents.
this.filename = this.name;
this.fileData = null;
info("Untarring file '" + this.filename + "'");
info(" size = " + this.size);
info(" typeflag = " + this.typeflag);
// A regular file.
if (this.typeflag === 0) {
info(" This is a regular file.");
var sizeInBytes = parseInt(this.size);
this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.size);
if (this.name.length > 0 && this.size > 0 && this.fileData && this.fileData.buffer) {
this.isValid = true;
}
bstream.readBytes(this.size);
// Round up to 512-byte blocks.
var remaining = 512 - this.size % 512;
if (remaining > 0 && remaining < 512) {
bstream.readBytes(remaining);
}
} else if (this.typeflag === 5) {
info(" This is a directory.")
}
};
// Takes an ArrayBuffer of a tar file in
// returns null on error
// returns an array of DecompressedFile objects on success
var untar = function(arrayBuffer) {
currentFilename = "";
currentFileNumber = 0;
currentBytesUnarchivedInFile = 0;
currentBytesUnarchived = 0;
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
postMessage(new bitjs.archive.UnarchiveStartEvent());
var bstream = new bitjs.io.ByteStream(arrayBuffer);
var localFiles = [];
// While we don't encounter an empty block, keep making TarLocalFiles.
while (bstream.peekNumber(4) != 0) {
var oneLocalFile = new TarLocalFile(bstream);
if (oneLocalFile && oneLocalFile.isValid) {
localFiles.push(oneLocalFile);
totalUncompressedBytesInArchive += oneLocalFile.size;
}
}
totalFilesInArchive = localFiles.length;
// got all local files, now sort them
localFiles.sort(function(a,b) {
var aname = a.filename.toLowerCase();
var bname = b.filename.toLowerCase();
return aname > bname ? 1 : -1;
});
// report # files and total length
if (localFiles.length > 0) {
postProgress();
}
// now do the shipping of each file
for (var i = 0; i < localFiles.length; ++i) {
var localfile = localFiles[i];
info("Sending file '" + localfile.filename + "' up");
// update progress
currentFilename = localfile.filename;
currentFileNumber = i;
currentBytesUnarchivedInFile = localfile.size;
currentBytesUnarchived += localfile.size;
postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postProgress();
}
currentFilename = "";
currentFileNumber = 0;
currentBytesUnarchivedInFile = 0;
currentBytesUnarchived = 0;
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
postMessage(new bitjs.archive.UnarchiveStartEvent());
var bstream = new bitjs.io.ByteStream(arrayBuffer);
var localFiles = [];
// While we don't encounter an empty block, keep making TarLocalFiles.
while (bstream.peekNumber(4) !== 0) {
var oneLocalFile = new TarLocalFile(bstream);
if (oneLocalFile && oneLocalFile.isValid) {
localFiles.push(oneLocalFile);
totalUncompressedBytesInArchive += oneLocalFile.size;
}
}
totalFilesInArchive = localFiles.length;
// got all local files, now sort them
localFiles.sort(function(a, b) {
var aname = a.filename.toLowerCase();
var bname = b.filename.toLowerCase();
return aname > bname ? 1 : -1;
});
// report # files and total length
if (localFiles.length > 0) {
postProgress();
}
// now do the shipping of each file
for (var i = 0; i < localFiles.length; ++i) {
var localfile = localFiles[i];
info("Sending file '" + localfile.filename + "' up");
// update progress
currentFilename = localfile.filename;
currentFileNumber = i;
currentBytesUnarchivedInFile = localfile.size;
currentBytesUnarchived += localfile.size;
postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postProgress();
}
postProgress();
postProgress();
postMessage(new bitjs.archive.UnarchiveFinishEvent());
postMessage(new bitjs.archive.UnarchiveFinishEvent());
};
// event.data.file has the ArrayBuffer.
onmessage = function(event) {
var ab = event.data.file;
untar(ab);
var ab = event.data.file;
untar(ab);
};

@ -49,7 +49,7 @@ var zDigitalSignatureSignature = 0x05054b50;
// takes a ByteStream and parses out the local file information
var ZipLocalFile = function(bstream) {
if (typeof bstream !== typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function() {} ) {
if (typeof bstream !== typeof {} || !bstream.readNumber || typeof bstream.readNumber !== typeof function() {} ) {
return null;
}
@ -115,9 +115,8 @@ ZipLocalFile.prototype.unzip = function() {
info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
currentBytesUnarchivedInFile = this.compressedSize;
currentBytesUnarchived += this.compressedSize;
}
// version == 20, compression method == 8 (DEFLATE)
else if (this.compressionMethod === 8) {
} else if (this.compressionMethod === 8) {
// version == 20, compression method == 8 (DEFLATE)
info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = inflate(this.fileData, this.uncompressedSize);
}
@ -245,6 +244,7 @@ var unzip = function(arrayBuffer) {
// each entry's index is its code and its value is a JavaScript object
// containing {length: 6, symbol: X}
function getHuffmanCodes(bitLengths) {
var len;
// ensure bitLengths is an array containing at least one element
if (typeof bitLengths !== typeof [] || bitLengths.length < 1) {
err("Error! getHuffmanCodes() called with an invalid array");
@ -256,9 +256,10 @@ function getHuffmanCodes(bitLengths) {
blCount = [],
MAX_BITS = 1;
// Step 1: count up how many codes of each length we have
for (var i = 0; i < numLengths; ++i) {
var len = bitLengths[i];
len = bitLengths[i];
// test to ensure each bit length is a positive, non-zero number
if (typeof len !== typeof 1 || len < 0) {
err("bitLengths contained an invalid number in getHuffmanCodes(): " + len + " of type " + (typeof len));
@ -276,17 +277,17 @@ function getHuffmanCodes(bitLengths) {
var nextCode = [],
code = 0;
for (var bits = 1; bits <= MAX_BITS; ++bits) {
var len = bits-1;
len = bits - 1;
// ensure undefined lengths are zero
if (blCount[len] == undefined) blCount[len] = 0;
code = (code + blCount[bits-1]) << 1;
if (blCount[len] === undefined) blCount[len] = 0;
code = (code + blCount[bits - 1]) << 1;
nextCode[bits] = code;
}
// Step 3: Assign numerical values to all codes
var table = {}, tableLength = 0;
for (var n = 0; n < numLengths; ++n) {
var len = bitLengths[n];
len = bitLengths[n];
if (len !== 0) {
table[nextCode[len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(nextCode[len],len) };
tableLength++;
@ -318,13 +319,14 @@ function getHuffmanCodes(bitLengths) {
var fixedHCtoLiteral = null;
var fixedHCtoDistance = null;
function getFixedLiteralTable() {
var i;
// create once
if (!fixedHCtoLiteral) {
var bitlengths = new Array(288);
for (var i = 0; i <= 143; ++i) bitlengths[i] = 8;
for (var i = 144; i <= 255; ++i) bitlengths[i] = 9;
for (var i = 256; i <= 279; ++i) bitlengths[i] = 7;
for (var i = 280; i <= 287; ++i) bitlengths[i] = 8;
for (i = 0; i <= 143; ++i) bitlengths[i] = 8;
for (i = 144; i <= 255; ++i) bitlengths[i] = 9;
for (i = 256; i <= 279; ++i) bitlengths[i] = 7;
for (i = 280; i <= 287; ++i) bitlengths[i] = 8;
// get huffman code table
fixedHCtoLiteral = getHuffmanCodes(bitlengths);
@ -355,11 +357,11 @@ function decodeSymbol(bstream, hcTable) {
for (;;) {
// read in next bit
var bit = bstream.readBits(1);
code = (code<<1) | bit;
code = (code << 1) | bit;
++len;
// check against Huffman Code table and break if found
if (hcTable.hasOwnProperty(code) && hcTable[code].length == len) {
if (hcTable.hasOwnProperty(code) && hcTable[code].length === len) {
break;
}
@ -374,31 +376,31 @@ function decodeSymbol(bstream, hcTable) {
var CodeLengthCodeOrder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
/*
Extra Extra Extra
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
---- ---- ------ ---- ---- ------- ---- ---- -------
257 0 3 267 1 15,16 277 4 67-82
258 0 4 268 1 17,18 278 4 83-98
259 0 5 269 2 19-22 279 4 99-114
260 0 6 270 2 23-26 280 4 115-130
261 0 7 271 2 27-30 281 5 131-162
262 0 8 272 2 31-34 282 5 163-194
263 0 9 273 3 35-42 283 5 195-226
264 0 10 274 3 43-50 284 5 227-257
265 1 11,12 275 3 51-58 285 0 258
266 1 13,14 276 3 59-66
/*
Extra Extra Extra
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
---- ---- ------ ---- ---- ------- ---- ---- -------
257 0 3 267 1 15,16 277 4 67-82
258 0 4 268 1 17,18 278 4 83-98
259 0 5 269 2 19-22 279 4 99-114
260 0 6 270 2 23-26 280 4 115-130
261 0 7 271 2 27-30 281 5 131-162
262 0 8 272 2 31-34 282 5 163-194
263 0 9 273 3 35-42 283 5 195-226
264 0 10 274 3 43-50 284 5 227-257
265 1 11,12 275 3 51-58 285 0 258
266 1 13,14 276 3 59-66
*/
*/
var LengthLookupTable = [
[0, 3], [0, 4], [0, 5], [0, 6],
[0, 7], [0, 8], [0, 9], [0, 10],
[1, 11], [1, 13], [1, 15], [1, 17],
[2, 19], [2, 23], [2, 27], [2, 31],
[3, 35], [3, 43], [3, 51], [3, 59],
[4, 67], [4, 83], [4, 99], [4, 115],
[5, 131], [5, 163], [5, 195], [5, 227],
[0, 258]
[0, 3], [0, 4], [0, 5], [0, 6],
[0, 7], [0, 8], [0, 9], [0, 10],
[1, 11], [1, 13], [1, 15], [1, 17],
[2, 19], [2, 23], [2, 27], [2, 31],
[3, 35], [3, 43], [3, 51], [3, 59],
[4, 67], [4, 83], [4, 99], [4, 115],
[5, 131], [5, 163], [5, 195], [5, 227],
[0, 258]
];
/*
Extra Extra Extra
@ -448,10 +450,10 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
stream, and copy length bytes from this
position to the output stream.
*/
var numSymbols = 0, blockSize = 0;
var blockSize = 0;
for (;;) {
var symbol = decodeSymbol(bstream, hcLiteralTable);
++numSymbols;
// ++numSymbols;
if (symbol < 256) {
// copy literal byte to output
buffer.insertByte(symbol);
@ -463,7 +465,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
break;
}
else {
var lengthLookup = LengthLookupTable[symbol-257],
var lengthLookup = LengthLookupTable[symbol - 257],
length = lengthLookup[1] + bstream.readBits(lengthLookup[0]),
distLookup = DistLookupTable[decodeSymbol(bstream, hcDistanceTable)],
distance = distLookup[1] + bstream.readBits(distLookup[0]);
@ -481,13 +483,13 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
// loop for each character
var ch = buffer.ptr - distance;
blockSize += length;
if(length > distance) {
var data = buffer.data;
while (length--) {
buffer.insertByte(data[ch++]);
}
if (length > distance) {
var data = buffer.data;
while (length--) {
buffer.insertByte(data[ch++]);
}
} else {
buffer.insertBytes(buffer.data.subarray(ch, ch + length))
buffer.insertBytes(buffer.data.subarray(ch, ch + length));
}
} // length-distance pair
@ -506,7 +508,7 @@ function inflate(compressedData, numDecompressedBytes) {
compressedData.byteOffset,
compressedData.byteLength);
var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
var numBlocks = 0;
//var numBlocks = 0;
var blockSize = 0;
// block format: http://tools.ietf.org/html/rfc1951#page-9
@ -514,9 +516,9 @@ function inflate(compressedData, numDecompressedBytes) {
var bFinal = bstream.readBits(1);
var bType = bstream.readBits(2);
blockSize = 0;
++numBlocks;
// ++numBlocks;
// no compression
if (bType == 0) {
if (bType === 0) {
// skip remaining bits in this byte
while (bstream.bitPtr != 0) bstream.readBits(1);
var len = bstream.readBits(16),

@ -2,15 +2,15 @@
{% block body %}
<h1>{{title}}</h1>
<div class="container">
<div class="col-sm-6">
<div class="col-xs-12 col-sm-6">
{% for entry in entries %}
{% if loop.index0 == (loop.length/2)|int and loop.length > 20 %}
</div>
<div class="col-sm-6">
<div class="col-xs-12 col-sm-6">
{% endif %}
<div class="row">
<div class="col-xs-1" align="left"><span class="badge">{{entry.count}}</span></div>
<div class="col-xs-6"><a id="list_{{loop.index0}}" href="{{url_for(folder, book_id=entry[0].id )}}">{{entry[0].name}}</a></div>
<div class="col-xs-2 col-sm-2 col-md-1" align="left"><span class="badge">{{entry.count}}</span></div>
<div class="col-xs-10 col-sm-10 col-md-11"><a id="list_{{loop.index0}}" href="{{url_for(folder, book_id=entry[0].id )}}">{{entry[0].name}}</a></div>
</div>
{% endfor %}
</div>

@ -5,17 +5,19 @@
{% block body %}
<div class="discover">
<h2>{{_('Tasks list')}}</h2>
<table class="table table-no-bordered" id="table" data-url="{{'/ajax/emailstat'}}">
<table class="table table-no-bordered" id="table" data-url="{{'/ajax/emailstat'}}" data-sort-name="starttime" data-sort-order="asc">
<thead>
<tr>
{% if g.user.role_admin() %}
<th data-field="user">{{_('User')}}</th>
<th data-halign="right" data-align="right" data-field="user" data-sortable="true">{{_('User')}}</th>
{% endif %}
<th data-field="type">{{_('Task')}}</th>
<th data-field="status">{{_('Status')}}</th>
<th data-field="progress">{{_('Progress')}}</th>
<th data-field="runtime">{{_('Runtime')}}</th>
<th data-field="starttime">{{_('Starttime')}}</th>
<th data-halign="right" data-align="right" data-field="type" data-sortable="true">{{_('Task')}}</th>
<th data-halign="right" data-align="right" data-field="status" data-sortable="true">{{_('Status')}}</th>
<th data-halign="right" data-align="right" data-field="progress" data-sortable="true" data-sorter="elementSorter">{{_('Progress')}}</th>
<th data-halign="right" data-align="right" data-field="runtime" data-sortable="true" data-sort-name="rt">{{_('Runtime')}}</th>
<th data-halign="right" data-align="right" data-field="starttime" data-sortable="true" data-sort-name="id">{{_('Starttime')}}</th>
<th data-field="id" data-visible="false"></th>
<th data-field="rt" data-visible="false"></th>
</tr>
</thead>
</table>
@ -43,6 +45,13 @@
}
});
}, 1000);
function elementSorter(a, b) {
a = +a.slice(0, -2);
b = +b.slice(0, -2);
if (a > b) return 1;
if (a < b) return -1;
return 0;
}
</script>
{% endblock %}

@ -29,14 +29,13 @@ import logging
from logging.handlers import RotatingFileHandler
from flask import (Flask, render_template, request, Response, redirect,
url_for, send_from_directory, make_response, g, flash,
abort, Markup, stream_with_context)
abort, Markup)
from flask import __version__ as flaskVersion
import cache_buster
import ub
from ub import config
import helper
import os
import errno
from sqlalchemy.sql.expression import func
from sqlalchemy.sql.expression import false
from sqlalchemy.exc import IntegrityError
@ -349,25 +348,20 @@ def remote_login_required(f):
def shortentitle_filter(s,nchar=20):
text = s.split()
res = "" # result
sum = 0 # overall length
suml = 0 # overall length
for line in text:
if sum >= 60:
if suml >= 60:
res += '...'
break
# if word longer than 20 chars truncate line and append '...', otherwise add whole word to result
# string, and summarize total length to stop at 60 chars
if len(line) > nchar:
res += line[:(nchar-3)] + '[..] '
sum += nchar+3
suml += nchar+3
else:
res += line + ' '
sum += len(line) + 1
suml += len(line) + 1
return res.strip()
#if len(s) > 20:
# s = s.split(':', 1)[0]
# if len(s) > 20:
# s = textwrap.wrap(s, 20, break_long_words=True)[0] + ' ...'
#return s
@app.template_filter('mimetype')
@ -784,7 +778,7 @@ def feed_series(book_id):
off = request.args.get("offset")
if not off:
off = 0
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.series.any(db.Series.id == book_id), [db.Books.series_index])
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
response = make_response(xml)
@ -889,7 +883,7 @@ def get_metadata_calibre_companion(uuid):
@login_required
def get_email_status_json():
answer=list()
tasks=helper.global_eMailThread.get_taskstatus()
tasks=helper.global_WorkerThread.get_taskstatus()
if not current_user.role_admin():
for task in tasks:
if task['user'] == current_user.nickname:
@ -909,6 +903,32 @@ def get_email_status_json():
if 'starttime' not in task:
task['starttime'] = ""
answer = tasks
'''answer.append({'user': 'Test', 'starttime': '07.3.2018 15:23', 'progress': " 0 %", 'type': 'E-Mail',
'runtime': '0 s', 'rt': 0, 'status': _('Waiting'),'id':1 })
answer.append({'user': 'Admin', 'starttime': '07.3.2018 15:33', 'progress': " 11 %", 'type': 'E-Mail',
'runtime': '2 s', 'rt':2, 'status': _('Waiting'),'id':2})
answer.append({'user': 'Nanny', 'starttime': '8.3.2018 15:23', 'progress': " 2 %", 'type': 'E-Mail',
'runtime': '32 s','rt':32, 'status': _('Waiting'),'id':3})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '7 s','rt':7, 'status': _('Waiting'),'id':4})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '22 s','rt':22, 'status': _('Waiting'),'id':5})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '17 s','rt':17, 'status': _('Waiting'),'id':6})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '72 s','rt':72, 'status': _('Waiting'),'id':7})
answer.append({'user': 'Guest', 'starttime': '19.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '1:07 s','rt':67, 'status': _('Waiting'),'id':8})
answer.append({'user': 'Guest', 'starttime': '18.2.2018 12:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '2:07 s','rt':127, 'status': _('Waiting'),'id':9})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '27 s','rt':27, 'status': _('Waiting'),'id':10})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 16:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '73 s','rt':73, 'status': _('Waiting'),'id':11})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 14:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '71 s','rt':71, 'status': _('Waiting'),'id':12})
answer.append({'user': 'Guest', 'starttime': '09.3.2018 17:23', 'progress': " 44 %", 'type': 'E-Mail',
'runtime': '27 s','rt':27, 'status': _('Waiting'),'id':13})'''
js=json.dumps(answer)
response = make_response(js)
response.headers["Content-Type"] = "application/json; charset=utf-8"
@ -1184,7 +1204,7 @@ def author(book_id, page):
gc = GoodreadsClient(config.config_goodreads_api_key, config.config_goodreads_api_secret)
author_info = gc.find_author(author_name=name)
other_books = get_unique_other_books(entries.all(), author_info.books)
except:
except Exception:
# Skip goodreads, if site is down/inaccessible
app.logger.error('Goodreads website is down/inaccessible')
@ -1424,7 +1444,7 @@ def bookmark(book_id, book_format):
def get_tasks_status():
# if current user admin, show all email, otherwise only own emails
answer=list()
tasks=helper.global_eMailThread.get_taskstatus()
tasks=helper.global_WorkerThread.get_taskstatus()
if not current_user.role_admin():
for task in tasks:
if task['user'] == current_user.nickname:
@ -1492,9 +1512,7 @@ def delete_book(book_id, book_format):
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
# ToDo check Downloads.book right
ub.delete_download(book_id)
# ub.session.query(ub.Downloads).filter(ub.Downloads.book_id == book_id).delete()
ub.session.commit()
# check if only this book links to:
@ -2735,7 +2753,6 @@ def configuration_helper(origin):
gdriveError=gdriveError, goodreads=goodreads_support,
title=_(u"Basic Configuration"), page="config")
if reboot_required:
# db.engine.dispose() # ToDo verify correct
ub.session.close()
ub.engine.dispose()
# stop Server
@ -3066,7 +3083,6 @@ def edit_book(book_id):
if is_format:
# Format entry already exists, no need to update the database
app.logger.info('Bokk format already existing')
pass
else:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
db.session.add(db_format)

@ -0,0 +1,501 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import smtplib
import threading
from datetime import datetime
import logging
import time
import socket
import sys
import os
from email.generator import Generator
import web
from flask_babel import gettext as _
# from babel.dates import format_datetime
import re
import gdriveutils as gd
import subprocess
try:
from StringIO import StringIO
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
except ImportError:
from io import StringIO
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email import encoders
from email.utils import formatdate
from email.utils import make_msgid
chunksize = 8192
STAT_WAITING = 0
STAT_FAIL = 1
STAT_STARTED = 2
STAT_FINISH_SUCCESS = 3
TASK_EMAIL = 1
TASK_CONVERT = 2
RET_FAIL = 0
RET_SUCCESS = 1
# For gdrive download book from gdrive to calibredir (temp dir for books), read contents in both cases and append
# it in MIME Base64 encoded to
def get_attachment(bookpath, filename):
"""Get file as MIMEBase message"""
calibrepath = web.config.config_calibre_dir
if web.ub.config.config_use_google_drive:
df = gd.getFileFromEbooksFolder(bookpath, filename)
if df:
datafile = os.path.join(calibrepath, bookpath, filename)
if not os.path.exists(os.path.join(calibrepath, bookpath)):
os.makedirs(os.path.join(calibrepath, bookpath))
df.GetContentFile(datafile)
else:
return None
file_ = open(datafile, 'rb')
data = file_.read()
file_.close()
os.remove(datafile)
else:
try:
file_ = open(os.path.join(calibrepath, bookpath, filename), 'rb')
data = file_.read()
file_.close()
except IOError:
web.app.logger.exception(e) # traceback.print_exc()
web.app.logger.error(u'The requested file could not be read. Maybe wrong permissions?')
return None
attachment = MIMEBase('application', 'octet-stream')
attachment.set_payload(data)
encoders.encode_base64(attachment)
attachment.add_header('Content-Disposition', 'attachment',
filename=filename)
return attachment
# Class for sending email with ability to get current progress
class email(smtplib.SMTP):
transferSize = 0
progress = 0
def __init__(self, *args, **kwargs):
smtplib.SMTP.__init__(self, *args, **kwargs)
def data(self, msg):
self.transferSize = len(msg)
(code, resp) = smtplib.SMTP.data(self, msg)
self.progress = 0
return (code, resp)
def send(self, strg):
"""Send `strg' to the server."""
if self.debuglevel > 0:
print('send:', repr(strg), file=sys.stderr)
if hasattr(self, 'sock') and self.sock:
try:
if self.transferSize:
lock=threading.Lock()
lock.acquire()
self.transferSize = len(strg)
lock.release()
for i in range(0, self.transferSize, chunksize):
self.sock.send(strg[i:i+chunksize])
lock.acquire()
self.progress = i
lock.release()
else:
self.sock.sendall(strg)
except socket.error:
self.close()
raise smtplib.SMTPServerDisconnected('Server not connected')
else:
raise smtplib.SMTPServerDisconnected('please run connect() first')
def getTransferStatus(self):
if self.transferSize:
lock2 = threading.Lock()
lock2.acquire()
value = round(float(self.progress) / float(self.transferSize),2)*100
lock2.release()
return str(value) + ' %'
else:
return "100 %"
# Class for sending ssl encrypted email with ability to get current progress
class email_SSL(email):
def __init__(self, *args, **kwargs):
smtplib.SMTP_SSL.__init__(self, *args, **kwargs)
#Class for all worker tasks in the background
class WorkerThread(threading.Thread):
def __init__(self):
self._stopevent = threading.Event()
threading.Thread.__init__(self)
self.status = 0
self.current = 0
self.last = 0
self.queue = list()
self.UIqueue = list()
self.asyncSMTP=None
self.id = 0
# Main thread loop starting the different tasks
def run(self):
while not self._stopevent.isSet():
doLock = threading.Lock()
doLock.acquire()
if self.current != self.last:
doLock.release()
if self.queue[self.current]['typ'] == TASK_EMAIL:
self.send_raw_email()
if self.queue[self.current]['typ'] == TASK_CONVERT:
self.convert_mobi()
self.current += 1
else:
doLock.release()
time.sleep(1)
def stop(self):
self._stopevent.set()
def get_send_status(self):
if self.asyncSMTP:
return self.asyncSMTP.getTransferStatus()
else:
return "0 %"
def delete_completed_tasks(self):
for index, task in reversed(list(enumerate(self.UIqueue))):
if task['progress'] == "100 %":
# delete tasks
self.queue.pop(index)
self.UIqueue.pop(index)
# if we are deleting entries before the current index, adjust the index
self.current -= 1
self.last = len(self.queue)
def get_taskstatus(self):
if self.current < len(self.queue):
if self.queue[self.current]['status'] == STAT_STARTED:
if not self.queue[self.current]['typ'] == TASK_CONVERT:
self.UIqueue[self.current]['progress'] = self.get_send_status()
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
return self.UIqueue
def convert_mobi(self):
# convert book, and upload in case of google drive
self.queue[self.current]['status'] = STAT_STARTED
self.UIqueue[self.current]['status'] = _('Started')
self.queue[self.current]['starttime'] = datetime.now()
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
if web.ub.config.config_ebookconverter == 2:
filename = self.convert_calibre()
else:
filename = self.convert_kindlegen()
if web.ub.config.config_use_google_drive:
gd.updateGdriveCalibreFromLocal()
if(filename):
self.add_email(_(u'Send to Kindle'), self.queue[self.current]['path'], filename,
self.queue[self.current]['settings'], self.queue[self.current]['kindle'],
self.UIqueue[self.current]['user'], _(u"E-Mail: %s" % self.queue[self.current]['title']))
def convert_kindlegen(self):
error_message = None
file_path = self.queue[self.current]['file_path']
bookid = self.queue[self.current]['bookid']
if not os.path.exists(web.ub.config.config_converterpath):
error_message = _(u"kindlegen binary %(kindlepath)s not found", kindlepath=web.ub.config.config_converterpath)
web.app.logger.error("convert_kindlegen: " + error_message)
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return
try:
p = subprocess.Popen(
(web.ub.config.config_converterpath + " \"" + file_path + u".epub\"").encode(sys.getfilesystemencoding()),
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception:
error_message = _(u"kindlegen failed, no execution permissions")
web.app.logger.error("convert_kindlegen: " + error_message)
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return
# Poll process for new output until finished
while True:
nextline = p.stdout.readline()
if nextline == '' and p.poll() is not None:
break
if nextline != "\r\n":
# Format of error message (kindlegen translates its output texts):
# Error(prcgen):E23006: Language not recognized in metadata.The dc:Language field is mandatory.Aborting.
conv_error = re.search(".*\(.*\):(E\d+):\s(.*)", nextline)
# If error occoures, log in every case
if conv_error:
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
error=conv_error.group(1), message=conv_error.group(2).decode('utf-8'))
web.app.logger.info("convert_kindlegen: " + error_message)
web.app.logger.info(nextline.strip('\r\n'))
else:
web.app.logger.debug(nextline.strip('\r\n'))
check = p.returncode
if not check or check < 2:
cur_book = web.db.session.query(web.db.Books).filter(web.db.Books.id == bookid).first()
new_format = web.db.Data(name=cur_book.data[0].name,book_format="MOBI",
book=bookid,uncompressed_size=os.path.getsize(file_path + ".mobi"))
cur_book.data.append(new_format)
web.db.session.commit()
self.queue[self.current]['path'] = cur_book.path
self.queue[self.current]['title'] = cur_book.title
if web.ub.config.config_use_google_drive:
os.remove(file_path + u".epub")
self.queue[self.current]['status'] = STAT_FINISH_SUCCESS
self.UIqueue[self.current]['status'] = _('Finished')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
return file_path + ".mobi" #, RET_SUCCESS
else:
web.app.logger.info("convert_kindlegen: kindlegen failed with error while converting book")
if not error_message:
error_message = 'kindlegen failed, no excecution permissions'
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return # error_message, RET_FAIL
def convert_calibre(self):
error_message = None
file_path = self.queue[self.current]['file_path']
bookid = self.queue[self.current]['bookid']
if not os.path.exists(web.ub.config.config_converterpath):
error_message = _(u"Ebook-convert binary %(converterpath)s not found",
converterpath=web.ub.config.config_converterpath)
web.app.logger.error("convert_calibre: " + error_message)
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return
try:
command = ("\"" + web.ub.config.config_converterpath + "\" \"" + file_path + u".epub\" \""
+ file_path + u".mobi\" " + web.ub.config.config_calibre).encode(sys.getfilesystemencoding())
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception:
error_message = _(u"Ebook-convert failed, no execution permissions")
web.app.logger.error("convert_calibre: " + error_message)
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return # error_message, RET_FAIL
# Poll process for new output until finished
while True:
nextline = p.stdout.readline()
if nextline == '' and p.poll() is not None:
break
progress = re.search("(\d+)%\s.*", nextline)
if progress:
self.UIqueue[self.current]['progress'] = progress.group(1) + '%'
web.app.logger.debug(nextline.strip('\r\n').decode(sys.getfilesystemencoding()))
check = p.returncode
if check == 0:
cur_book = web.db.session.query(web.db.Books).filter(web.db.Books.id == bookid).first()
new_format = web.db.Data(name=cur_book.data[0].name,book_format="MOBI",
book=bookid,uncompressed_size=os.path.getsize(file_path + ".mobi"))
cur_book.data.append(new_format)
web.db.session.commit()
self.queue[self.current]['path'] = cur_book.path
self.queue[self.current]['title'] = cur_book.title
if web.ub.config.config_use_google_drive:
os.remove(file_path + u".epub")
self.queue[self.current]['status'] = STAT_FINISH_SUCCESS
self.UIqueue[self.current]['status'] = _('Finished')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
return file_path + ".mobi" # , RET_SUCCESS
else:
web.app.logger.info("convert_calibre: Ebook-convert failed with error while converting book")
if not error_message:
error_message = 'Ebook-convert failed, no excecution permissions'
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
self.UIqueue[self.current]['message'] = error_message
return # error_message, RET_FAIL
def add_convert(self, file_path, bookid, user_name, type, settings, kindle_mail):
addLock = threading.Lock()
addLock.acquire()
if self.last >= 20:
self.delete_completed_tasks()
# progress, runtime, and status = 0
self.id += 1
self.queue.append({'file_path':file_path, 'bookid':bookid, 'starttime': 0, 'kindle':kindle_mail,
'status': STAT_WAITING, 'typ': TASK_CONVERT, 'settings':settings})
self.UIqueue.append({'user': user_name, 'formStarttime': '', 'progress': " 0 %", 'type': type,
'runtime': '0 s', 'status': _('Waiting'),'id': self.id } )
self.id += 1
self.last=len(self.queue)
addLock.release()
def add_email(self, subject, filepath, attachment, settings, recipient, user_name, typ):
# if more than 20 entries in the list, clean the list
addLock = threading.Lock()
addLock.acquire()
if self.last >= 20:
self.delete_completed_tasks()
# progress, runtime, and status = 0
self.queue.append({'subject':subject, 'attachment':attachment, 'filepath':filepath,
'settings':settings, 'recipent':recipient, 'starttime': 0,
'status': STAT_WAITING, 'typ': TASK_EMAIL})
self.UIqueue.append({'user': user_name, 'formStarttime': '', 'progress': " 0 %", 'type': typ,
'runtime': '0 s', 'status': _('Waiting'),'id': self.id })
self.id += 1
self.last=len(self.queue)
addLock.release()
def send_raw_email(self):
obj=self.queue[self.current]
# create MIME message
msg = MIMEMultipart()
msg['Subject'] = self.queue[self.current]['subject']
msg['Message-Id'] = make_msgid('calibre-web')
msg['Date'] = formatdate(localtime=True)
text = _(u'This email has been sent via calibre web.')
msg.attach(MIMEText(text.encode('UTF-8'), 'plain', 'UTF-8'))
if obj['attachment']:
result = get_attachment(obj['filepath'], obj['attachment'])
if result:
msg.attach(result)
else:
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.queue[self.current]['starttime'] = datetime.now()
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
return
msg['From'] = obj['settings']["mail_from"]
msg['To'] = obj['recipent']
use_ssl = int(obj['settings'].get('mail_use_ssl', 0))
# convert MIME message to string
fp = StringIO()
gen = Generator(fp, mangle_from_=False)
gen.flatten(msg)
msg = fp.getvalue()
# send email
try:
timeout = 600 # set timeout to 5mins
org_stderr = sys.stderr
sys.stderr = StderrLogger()
self.queue[self.current]['status'] = STAT_STARTED
self.UIqueue[self.current]['status'] = _('Started')
self.queue[self.current]['starttime'] = datetime.now()
self.UIqueue[self.current]['formStarttime'] = self.queue[self.current]['starttime']
if use_ssl == 2:
self.asyncSMTP = email_SSL(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
else:
self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
# link to logginglevel
if web.ub.config.config_log_level != logging.DEBUG:
self.asyncSMTP.set_debuglevel(0)
else:
self.asyncSMTP.set_debuglevel(1)
if use_ssl == 1:
self.asyncSMTP.starttls()
if obj['settings']["mail_password"]:
self.asyncSMTP.login(str(obj['settings']["mail_login"]), str(obj['settings']["mail_password"]))
self.asyncSMTP.sendmail(obj['settings']["mail_from"], obj['recipent'], msg)
self.asyncSMTP.quit()
self.queue[self.current]['status'] = STAT_FINISH_SUCCESS
self.UIqueue[self.current]['status'] = _('Finished')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
sys.stderr = org_stderr
except (socket.error, smtplib.SMTPRecipientsRefused, smtplib.SMTPException):
self.queue[self.current]['status'] = STAT_FAIL
self.UIqueue[self.current]['status'] = _('Failed')
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
datetime.now() - self.queue[self.current]['starttime'])
web.app.logger.error(e)
# return None
def _formatRuntime(self, runtime):
self.UIqueue[self.current]['rt'] = runtime.total_seconds()
val = re.split('\:|\.', str(runtime))[0:3]
erg = list()
for v in val:
if int(v) > 0:
erg.append(v)
retVal = (':'.join(erg)).lstrip('0') + ' s'
if retVal == ' s':
retVal = '0 s'
return retVal
class StderrLogger(object):
buffer = ''
def __init__(self):
self.logger = web.app.logger
def write(self, message):
if message == '\n':
self.logger.debug(self.buffer)
print(self.buffer)
self.buffer = ''
else:
self.buffer += message
Loading…
Cancel
Save