diff --git a/plugins/newsdownloader.koplugin/epubdownloadbackend.lua b/plugins/newsdownloader.koplugin/epubdownloadbackend.lua index bffda222c..a7b908df6 100644 --- a/plugins/newsdownloader.koplugin/epubdownloadbackend.lua +++ b/plugins/newsdownloader.koplugin/epubdownloadbackend.lua @@ -20,10 +20,10 @@ local EpubDownloadBackend = { } local max_redirects = 5; --prevent infinite redirects -function EpubDownloadBackend:download(url, path) +function EpubDownloadBackend:download(url, path, include_images) logger.dbg("EpubDownloadBackend:download") -- self:createEpub(path, url) - self:createEpubWithUI(path, url, function(success) + self:createEpubWithUI(path, url, include_images, function(success) if (success) then logger.dbg("createEpubWithUI success") else @@ -189,9 +189,8 @@ local ext_to_mimetype = { } -- Create an epub file (with possibly images) -function EpubDownloadBackend:createEpub(epub_path, url) +function EpubDownloadBackend:createEpub(epub_path, url, include_images) logger.dbg("EpubDownloadBackend:createEpub(", epub_path, ",", url, ")") - local with_images = true -- Use Trapper to display progress and ask questions through the UI. -- We need to have been Trapper.wrap()'ed for UI to be used, otherwise -- Trapper:info() and Trapper:confirm() will just use logger. @@ -298,7 +297,6 @@ function EpubDownloadBackend:createEpub(epub_path, url) logger.dbg("Images found in html:", images) -- See what to do with images - local include_images = true local use_img_2x = false if not include_images then -- Remove img tags to avoid little blank squares of missing images @@ -532,7 +530,7 @@ end -- Wrap EpubDownloadBackend:createEpub() with UI progress info, provided -- by Trapper module. -function EpubDownloadBackend:createEpubWithUI(epub_path, url, result_callback) +function EpubDownloadBackend:createEpubWithUI(epub_path, url, include_images, result_callback) logger.dbg("EpubDownloadBackend:createEpubWithUI(", epub_path, ",", url, ",", title, ", ...)") -- To do any UI interaction while building the EPUB, we need -- to use a coroutine, so that our code can be suspended while waiting @@ -545,7 +543,7 @@ function EpubDownloadBackend:createEpubWithUI(epub_path, url, result_callback) -- Trapper) would just abort (no reader crash, no error logged). -- So we use pcall to catch any errors, log it, and report -- the failure via result_callback. - local ok, success = pcall(self.createEpub, self, epub_path, url) + local ok, success = pcall(self.createEpub, self, epub_path, url, include_images) if ok and success then result_callback(true) else diff --git a/plugins/newsdownloader.koplugin/feed_config.lua b/plugins/newsdownloader.koplugin/feed_config.lua index df265117b..1c17ced8f 100644 --- a/plugins/newsdownloader.koplugin/feed_config.lua +++ b/plugins/newsdownloader.koplugin/feed_config.lua @@ -17,6 +17,10 @@ return {--do NOT change this line -- 'download_full_article=false' - means use only feed description to create feeds (usually only beginning of the article) -- default value is 'true' (if no 'download_full_article' entry) + -- 'include_images=true' - means download any images on the page and inlude them in the article + -- 'include_images=false' - means ignore any images, only download the text (faster download, smaller file sizes) + -- default value is 'false' (if no 'include_images' entry) + -- comment out line ("--" at line start) to stop downloading source diff --git a/plugins/newsdownloader.koplugin/main.lua b/plugins/newsdownloader.koplugin/main.lua index d3f255331..6b6e26281 100644 --- a/plugins/newsdownloader.koplugin/main.lua +++ b/plugins/newsdownloader.koplugin/main.lua @@ -181,12 +181,13 @@ function NewsDownloader:loadConfigAndProcessFeeds() local url = feed[1] local limit = feed.limit local download_full_article = feed.download_full_article == nil or feed.download_full_article + local include_images = feed.include_images if url and limit then info = InfoMessage:new{ text = T(_("Processing %1/%2:\n%3"), idx, total_feed_entries, url) } UIManager:show(info) -- processFeedSource is a blocking call, so manually force a UI refresh beforehand UIManager:forceRePaint() - NewsDownloader:processFeedSource(url, tonumber(limit), unsupported_feeds_urls, download_full_article) + NewsDownloader:processFeedSource(url, tonumber(limit), unsupported_feeds_urls, download_full_article, include_images) UIManager:close(info) else logger.warn('NewsDownloader: invalid feed config entry', feed) @@ -213,7 +214,7 @@ function NewsDownloader:loadConfigAndProcessFeeds() NewsDownloader:afterWifiAction() end -function NewsDownloader:processFeedSource(url, limit, unsupported_feeds_urls, download_full_article) +function NewsDownloader:processFeedSource(url, limit, unsupported_feeds_urls, download_full_article, include_images) local ok, response = pcall(function() return DownloadBackend:getResponseAsString(url) @@ -233,11 +234,11 @@ function NewsDownloader:processFeedSource(url, limit, unsupported_feeds_urls, do if is_atom then ok = pcall(function() - return self:processAtom(feeds, limit, download_full_article) + return self:processAtom(feeds, limit, download_full_article, include_images) end) elseif is_rss then ok = pcall(function() - return self:processRSS(feeds, limit, download_full_article) + return self:processRSS(feeds, limit, download_full_article, include_images) end) end if not ok or (not is_rss and not is_atom) then @@ -263,7 +264,7 @@ function NewsDownloader:deserializeXMLString(xml_str) return xmlhandler.root end -function NewsDownloader:processAtom(feeds, limit, download_full_article) +function NewsDownloader:processAtom(feeds, limit, download_full_article, include_images) local feed_output_dir = string.format("%s%s/", news_download_dir_path, util.replaceInvalidChars(getFeedTitle(feeds.feed.title))) @@ -276,14 +277,14 @@ function NewsDownloader:processAtom(feeds, limit, download_full_article) break end if download_full_article then - self:downloadFeed(feed, feed_output_dir) + self:downloadFeed(feed, feed_output_dir, include_images) else self:createFromDescription(feed, feed.context, feed_output_dir) end end end -function NewsDownloader:processRSS(feeds, limit, download_full_article) +function NewsDownloader:processRSS(feeds, limit, download_full_article, include_images) local feed_output_dir = ("%s%s/"):format( news_download_dir_path, util.replaceInvalidChars(util.htmlEntitiesToUtf8(feeds.rss.channel.title))) if not lfs.attributes(feed_output_dir, "mode") then @@ -295,7 +296,7 @@ function NewsDownloader:processRSS(feeds, limit, download_full_article) break end if download_full_article then - self:downloadFeed(feed, feed_output_dir) + self:downloadFeed(feed, feed_output_dir, include_images) else self:createFromDescription(feed, feed.description, feed_output_dir) end @@ -322,14 +323,14 @@ local function getTitleWithDate(feed) return title end -function NewsDownloader:downloadFeed(feed, feed_output_dir) +function NewsDownloader:downloadFeed(feed, feed_output_dir, include_images) local link = getFeedLink(feed.link) local news_dl_path = ("%s%s%s"):format(feed_output_dir, getTitleWithDate(feed), file_extension) logger.dbg("NewsDownloader: News file will be stored to :", news_dl_path) - DownloadBackend:download(link, news_dl_path) + DownloadBackend:download(link, news_dl_path, include_images) end function NewsDownloader:createFromDescription(feed, context, feed_output_dir)