From 205a0654c00275cbdd77efd00c4f39020fd53e62 Mon Sep 17 00:00:00 2001 From: Emanuel Hoogeveen <55082669+ehoogeveen-medweb@users.noreply.github.com> Date: Thu, 23 Dec 2021 03:29:03 +0100 Subject: [PATCH] Add option `--file-access-retries` (#2066) Closes #517 Authored by: ehoogeveen-medweb --- README.md | 2 ++ yt_dlp/YoutubeDL.py | 4 ++-- yt_dlp/__init__.py | 3 +++ yt_dlp/downloader/common.py | 18 ++++++++++++++++++ yt_dlp/downloader/external.py | 7 +++---- yt_dlp/downloader/fragment.py | 9 ++++----- yt_dlp/downloader/http.py | 3 +-- yt_dlp/options.py | 4 ++++ 8 files changed, 37 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index cd54986d0..381e1b263 100644 --- a/README.md +++ b/README.md @@ -463,6 +463,8 @@ You can also fork the project on github and run your fork's [build workflow](.gi video data is re-extracted (e.g. 100K) -R, --retries RETRIES Number of retries (default is 10), or "infinite" + --file-access-retries RETRIES Number of times to retry on file access error + (default is 10), or "infinite" --fragment-retries RETRIES Number of retries for a fragment (default is 10), or "infinite" (DASH, hlsnative and ISM) diff --git a/yt_dlp/YoutubeDL.py b/yt_dlp/YoutubeDL.py index 274a4a78a..3bbde9b00 100644 --- a/yt_dlp/YoutubeDL.py +++ b/yt_dlp/YoutubeDL.py @@ -450,8 +450,8 @@ class YoutubeDL(object): The following parameters are not used by YoutubeDL itself, they are used by the downloader (see yt_dlp/downloader/common.py): nopart, updatetime, buffersize, ratelimit, throttledratelimit, min_filesize, - max_filesize, test, noresizebuffer, retries, fragment_retries, continuedl, - noprogress, xattr_set_filesize, hls_use_mpegts, http_chunk_size, + max_filesize, test, noresizebuffer, retries, file_access_retries, fragment_retries, + continuedl, noprogress, xattr_set_filesize, hls_use_mpegts, http_chunk_size, external_downloader_args, concurrent_fragment_downloads. The following options are used by the post processors: diff --git a/yt_dlp/__init__.py b/yt_dlp/__init__.py index 7de640b10..da7283300 100644 --- a/yt_dlp/__init__.py +++ b/yt_dlp/__init__.py @@ -222,6 +222,8 @@ def _real_main(argv=None): return parsed_retries if opts.retries is not None: opts.retries = parse_retries(opts.retries) + if opts.file_access_retries is not None: + opts.file_access_retries = parse_retries(opts.file_access_retries, 'file access ') if opts.fragment_retries is not None: opts.fragment_retries = parse_retries(opts.fragment_retries, 'fragment ') if opts.extractor_retries is not None: @@ -673,6 +675,7 @@ def _real_main(argv=None): 'throttledratelimit': opts.throttledratelimit, 'overwrites': opts.overwrites, 'retries': opts.retries, + 'file_access_retries': opts.file_access_retries, 'fragment_retries': opts.fragment_retries, 'extractor_retries': opts.extractor_retries, 'skip_unavailable_fragments': opts.skip_unavailable_fragments, diff --git a/yt_dlp/downloader/common.py b/yt_dlp/downloader/common.py index 9f6577a12..37321e34b 100644 --- a/yt_dlp/downloader/common.py +++ b/yt_dlp/downloader/common.py @@ -4,12 +4,14 @@ import os import re import time import random +import errno from ..utils import ( decodeArgument, encodeFilename, error_to_compat_str, format_bytes, + sanitize_open, shell_quote, timeconvert, timetuple_from_msec, @@ -39,6 +41,7 @@ class FileDownloader(object): ratelimit: Download speed limit, in bytes/sec. throttledratelimit: Assume the download is being throttled below this speed (bytes/sec) retries: Number of times to retry for HTTP error 5xx + file_access_retries: Number of times to retry on file access error buffersize: Size of download buffer in bytes. noresizebuffer: Do not automatically resize the download buffer. continuedl: Try to continue downloads if possible. @@ -207,6 +210,21 @@ class FileDownloader(object): def ytdl_filename(self, filename): return filename + '.ytdl' + def sanitize_open(self, filename, open_mode): + file_access_retries = self.params.get('file_access_retries', 10) + retry = 0 + while True: + try: + return sanitize_open(filename, open_mode) + except (IOError, OSError) as err: + retry = retry + 1 + if retry > file_access_retries or err.errno not in (errno.EACCES,): + raise + self.to_screen( + '[download] Got file access error. Retrying (attempt %d of %s) ...' + % (retry, self.format_retries(file_access_retries))) + time.sleep(0.01) + def try_rename(self, old_filename, new_filename): if old_filename == new_filename: return diff --git a/yt_dlp/downloader/external.py b/yt_dlp/downloader/external.py index da69423f7..84738221b 100644 --- a/yt_dlp/downloader/external.py +++ b/yt_dlp/downloader/external.py @@ -22,7 +22,6 @@ from ..utils import ( handle_youtubedl_headers, check_executable, Popen, - sanitize_open, ) @@ -144,11 +143,11 @@ class ExternalFD(FragmentFD): return -1 decrypt_fragment = self.decrypter(info_dict) - dest, _ = sanitize_open(tmpfilename, 'wb') + dest, _ = self.sanitize_open(tmpfilename, 'wb') for frag_index, fragment in enumerate(info_dict['fragments']): fragment_filename = '%s-Frag%d' % (tmpfilename, frag_index) try: - src, _ = sanitize_open(fragment_filename, 'rb') + src, _ = self.sanitize_open(fragment_filename, 'rb') except IOError as err: if skip_unavailable_fragments and frag_index > 1: self.report_skip_fragment(frag_index, err) @@ -290,7 +289,7 @@ class Aria2cFD(ExternalFD): for frag_index, fragment in enumerate(info_dict['fragments']): fragment_filename = '%s-Frag%d' % (os.path.basename(tmpfilename), frag_index) url_list.append('%s\n\tout=%s' % (fragment['url'], fragment_filename)) - stream, _ = sanitize_open(url_list_file, 'wb') + stream, _ = self.sanitize_open(url_list_file, 'wb') stream.write('\n'.join(url_list).encode('utf-8')) stream.close() cmd += ['-i', url_list_file] diff --git a/yt_dlp/downloader/fragment.py b/yt_dlp/downloader/fragment.py index 79c6561c7..c1b90e2a2 100644 --- a/yt_dlp/downloader/fragment.py +++ b/yt_dlp/downloader/fragment.py @@ -24,7 +24,6 @@ from ..utils import ( DownloadError, error_to_compat_str, encodeFilename, - sanitize_open, sanitized_Request, ) @@ -96,7 +95,7 @@ class FragmentFD(FileDownloader): def _read_ytdl_file(self, ctx): assert 'ytdl_corrupt' not in ctx - stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'r') + stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'r') try: ytdl_data = json.loads(stream.read()) ctx['fragment_index'] = ytdl_data['downloader']['current_fragment']['index'] @@ -108,7 +107,7 @@ class FragmentFD(FileDownloader): stream.close() def _write_ytdl_file(self, ctx): - frag_index_stream, _ = sanitize_open(self.ytdl_filename(ctx['filename']), 'w') + frag_index_stream, _ = self.sanitize_open(self.ytdl_filename(ctx['filename']), 'w') try: downloader = { 'current_fragment': { @@ -140,7 +139,7 @@ class FragmentFD(FileDownloader): return True, self._read_fragment(ctx) def _read_fragment(self, ctx): - down, frag_sanitized = sanitize_open(ctx['fragment_filename_sanitized'], 'rb') + down, frag_sanitized = self.sanitize_open(ctx['fragment_filename_sanitized'], 'rb') ctx['fragment_filename_sanitized'] = frag_sanitized frag_content = down.read() down.close() @@ -216,7 +215,7 @@ class FragmentFD(FileDownloader): self._write_ytdl_file(ctx) assert ctx['fragment_index'] == 0 - dest_stream, tmpfilename = sanitize_open(tmpfilename, open_mode) + dest_stream, tmpfilename = self.sanitize_open(tmpfilename, open_mode) ctx.update({ 'dl': dl, diff --git a/yt_dlp/downloader/http.py b/yt_dlp/downloader/http.py index 6290884a8..34a1eb59b 100644 --- a/yt_dlp/downloader/http.py +++ b/yt_dlp/downloader/http.py @@ -16,7 +16,6 @@ from ..utils import ( ContentTooShortError, encodeFilename, int_or_none, - sanitize_open, sanitized_Request, ThrottledDownload, write_xattr, @@ -263,7 +262,7 @@ class HttpFD(FileDownloader): # Open destination file just in time if ctx.stream is None: try: - ctx.stream, ctx.tmpfilename = sanitize_open( + ctx.stream, ctx.tmpfilename = self.sanitize_open( ctx.tmpfilename, ctx.open_mode) assert ctx.stream is not None ctx.filename = self.undo_temp_name(ctx.tmpfilename) diff --git a/yt_dlp/options.py b/yt_dlp/options.py index b9e41d23f..5be236c53 100644 --- a/yt_dlp/options.py +++ b/yt_dlp/options.py @@ -681,6 +681,10 @@ def parseOpts(overrideArguments=None): '-R', '--retries', dest='retries', metavar='RETRIES', default=10, help='Number of retries (default is %default), or "infinite"') + downloader.add_option( + '--file-access-retries', + dest='file_access_retries', metavar='RETRIES', default=10, + help='Number of times to retry on file access error (default is %default), or "infinite"') downloader.add_option( '--fragment-retries', dest='fragment_retries', metavar='RETRIES', default=10,