NewsDownloader: Process articles sequentially

Lift the Trapper:wrap call out of the individual article processing code,
so that articles are processed one by one, in order to:
* Avoid concurrent progress updates fighting over the UI dialog
* Avoid trying to download many images at the same time
pull/4987/head
Tom Hall 5 years ago committed by Frans de Jonge
parent cad4d25d24
commit 8e23d2a937

@ -22,14 +22,7 @@ local max_redirects = 5; --prevent infinite redirects
function EpubDownloadBackend:download(url, path, include_images)
logger.dbg("EpubDownloadBackend:download")
-- self:createEpub(path, url)
self:createEpubWithUI(path, url, include_images, function(success)
if (success) then
logger.dbg("createEpubWithUI success")
else
logger.dbg("createEpubWithUI failure")
end
end)
self:createEpub(path, url, include_images)
end
-- Codes that getUrlContent may get from requester.request()
@ -505,9 +498,6 @@ function EpubDownloadBackend:createEpub(epub_path, url, include_images)
UI:info(_("Packing EPUB…"))
end
epub:close()
-- This was nearly a no-op, so sleep a bit to make that progress step seen
ffiutil.usleep(300000)
UI:reset() -- close last InfoMessage
if cancelled then
-- Build was cancelled, remove half created .epub
@ -528,30 +518,4 @@ function EpubDownloadBackend:createEpub(epub_path, url, include_images)
return true
end
-- Wrap EpubDownloadBackend:createEpub() with UI progress info, provided
-- by Trapper module.
function EpubDownloadBackend:createEpubWithUI(epub_path, url, include_images, result_callback)
logger.dbg("EpubDownloadBackend:createEpubWithUI(", epub_path, ",", url, ",", title, ", ...)")
-- To do any UI interaction while building the EPUB, we need
-- to use a coroutine, so that our code can be suspended while waiting
-- for user interaction, and resumed by UI widgets callbacks.
-- All this is hidden and done by Trapper with a simple API.
local Trapper = require("ui/trapper")
Trapper:wrap(function()
Trapper:setPausedText("Download paused")
-- If errors in EpubDownloadBackend:createEpub(), the coroutine (used by
-- Trapper) would just abort (no reader crash, no error logged).
-- So we use pcall to catch any errors, log it, and report
-- the failure via result_callback.
local ok, success = pcall(self.createEpub, self, epub_path, url, include_images)
if ok and success then
result_callback(true)
else
Trapper:reset() -- close any last widget not cleaned if error
logger.warn("EpubDownloadBackend.createEpub pcall:", ok, success)
result_callback(false)
end
end)
end
return EpubDownloadBackend

@ -232,18 +232,22 @@ function NewsDownloader:processFeedSource(url, limit, unsupported_feeds_urls, do
local is_rss = feeds.rss and feeds.rss.channel and feeds.rss.channel.title and feeds.rss.channel.item and feeds.rss.channel.item[1] and feeds.rss.channel.item[1].title and feeds.rss.channel.item[1].link
local is_atom = feeds.feed and feeds.feed.title and feeds.feed.entry[1] and feeds.feed.entry[1].title and feeds.feed.entry[1].link
if is_atom then
ok = pcall(function()
return self:processAtom(feeds, limit, download_full_article, include_images)
end)
elseif is_rss then
ok = pcall(function()
return self:processRSS(feeds, limit, download_full_article, include_images)
end)
end
if not ok or (not is_rss and not is_atom) then
table.insert(unsupported_feeds_urls, url)
end
local Trapper = require("ui/trapper")
Trapper:wrap(function()
if is_atom then
ok = pcall(function()
return self:processAtom(feeds, limit, download_full_article, include_images)
end)
elseif is_rss then
ok = pcall(function()
return self:processRSS(feeds, limit, download_full_article, include_images)
end)
end
if not ok or (not is_rss and not is_atom) then
table.insert(unsupported_feeds_urls, url)
end
Trapper:reset() -- close last InfoMessage
end)
end
function NewsDownloader:deserializeXMLString(xml_str)

Loading…
Cancel
Save