This commit is contained in:
Cyberes 2023-01-21 17:03:26 -07:00
parent f4cf3f7a9b
commit c8057e47d5
No known key found for this signature in database
GPG Key ID: 194A1C358AACFC39
3 changed files with 75 additions and 53 deletions

View File

@ -16,6 +16,9 @@ from process.funcs import get_silent_logger, remove_duplicates_from_playlist, re
from process.threads import download_video
from ydl.files import create_directories, resolve_path
# logging.basicConfig(level=1000)
# logging.getLogger().setLevel(1000)
urlRegex = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
@ -32,7 +35,7 @@ parser.add_argument('--max-size', type=int, default=1100, help='Max allowed size
parser.add_argument('--rm-cache', '-r', action='store_true', help='Delete the yt-dlp cache on start.')
parser.add_argument('--threads', type=int, default=cpu_count(), help='How many download processes to use.')
parser.add_argument('--daemon', '-d', action='store_true', help="Run in daemon mode. Disables progress bars sleeps for the amount of time specified in --sleep.")
parser.add_argument('--sleep', type=int, default=60, help='How many minutes to sleep when in daemon mode.')
parser.add_argument('--sleep', type=float, default=60, help='How many minutes to sleep when in daemon mode.')
args = parser.parse_args()
if args.threads <= 0:
@ -67,6 +70,9 @@ if not args.no_update:
if args.rm_cache:
subprocess.run('yt-dlp --rm-cache-dir', shell=True)
if args.daemon:
print('Running in daemon mode.')
log_dir = args.output / 'logs'
create_directories(args.output, log_dir)
@ -85,13 +91,20 @@ start_time = time.time()
manager = Manager()
# Find existing videos to skip.
downloaded_videos = set()
download_archive_file = args.output / 'download-archive.log'
if not download_archive_file.exists():
download_archive_file.touch()
with open(download_archive_file, 'r') as file:
downloaded_videos.update(([line.rstrip() for line in file]))
def load_existing_videos():
# Find existing videos.
output = set()
if not download_archive_file.exists():
download_archive_file.touch()
with open(download_archive_file, 'r') as file:
output.update(([line.rstrip() for line in file]))
return output
downloaded_videos = load_existing_videos()
print('Found', len(downloaded_videos), 'downloaded videos.')
# Create this object AFTER reading in the download_archive.
@ -179,7 +192,7 @@ main_opts = dict(ydl_opts, **{'logger': ytdl_logger()})
yt_dlp = ydl.YDL(main_opts)
# Init bars
playlist_bar = tqdm(position=1, desc='Playlist', disable=args.daemon, miniters=0, mininterval=0, maxinterval=1)
playlist_bar = tqdm(position=1, desc='Playlist', disable=args.daemon)
video_bars = manager.list()
if not args.daemon:
for i in range(args.threads):
@ -188,51 +201,58 @@ if not args.daemon:
manager.Lock()
])
for i, target_url in tqdm(enumerate(url_list), total=len(url_list), position=0, desc='Inputs', disable=args.daemon):
logger.info('Fetching playlist...')
playlist = yt_dlp.playlist_contents(target_url)
playlist['entries'] = remove_duplicates_from_playlist(playlist['entries'])
while True:
for i, target_url in tqdm(enumerate(url_list), total=len(url_list), position=0, desc='Inputs', disable=args.daemon):
logger.info('Fetching playlist...')
playlist = yt_dlp.playlist_contents(target_url)
playlist['entries'] = remove_duplicates_from_playlist(playlist['entries'])
log_info_twice(f"Downloading item: '{playlist['title']}' {target_url}")
log_info_twice(f"Downloading item: '{playlist['title']}' {target_url}")
playlist_bar.total = len(playlist['entries'])
playlist_bar.set_description(playlist['title'])
playlist_bar.total = len(playlist['entries'])
playlist_bar.set_description(playlist['title'])
# Remove already downloaded files from the to-do list.
download_queue = []
s = set()
for p, video in enumerate(playlist['entries']):
if video['id'] not in downloaded_videos and video['id'] not in s:
download_queue.append(video)
s.add(video['id'])
playlist_bar.update(len(downloaded_videos))
# Remove already downloaded files from the to-do list.
download_queue = []
s = set()
for p, video in enumerate(playlist['entries']):
if video['id'] not in downloaded_videos and video['id'] not in s:
download_queue.append(video)
s.add(video['id'])
playlist_bar.update(len(downloaded_videos))
if len(download_queue): # Don't mess with multiprocessing if all videos are already downloaded
with Pool(processes=args.threads) as pool:
status_bar.set_description_str('=' * os.get_terminal_size()[0])
logger.info('Starting downloads...')
for result in pool.imap_unordered(download_video,
((video, {
'bars': video_bars,
'ydl_opts': ydl_opts,
'output_dir': args.output
}) for video in download_queue)):
# Save the video ID to the file
if result['downloaded_video_id']:
download_archive_logger.info(result['downloaded_video_id'])
if len(download_queue): # Don't mess with multiprocessing if all videos are already downloaded
with Pool(processes=args.threads) as pool:
status_bar.set_description_str('=' * os.get_terminal_size()[0])
logger.info('Starting downloads...')
for result in pool.imap_unordered(download_video,
((video, {
'bars': video_bars,
'ydl_opts': ydl_opts,
'output_dir': args.output
}) for video in download_queue)):
# Save the video ID to the file
if result['downloaded_video_id']:
download_archive_logger.info(result['downloaded_video_id'])
# Print stuff
for line in result['video_error_logger_msg']:
video_error_logger.info(line)
for line in result['status_msg']:
playlist_bar.write(line)
for line in result['logger_msg']:
log_info_twice(line)
playlist_bar.update()
# Print stuff
for line in result['video_error_logger_msg']:
video_error_logger.info(line)
for line in result['status_msg']:
playlist_bar.write(line)
for line in result['logger_msg']:
log_info_twice(line)
playlist_bar.update()
else:
playlist_bar.write(f"All videos already downloaded for '{playlist['title']}'.")
log_info_twice(f"Finished item: '{playlist['title']}' {target_url}")
log_info_twice(f"Finished process in {round(math.ceil(time.time() - start_time) / 60, 2)} min.")
if not args.daemon:
break
else:
playlist_bar.write(f"All videos already downloaded for '{playlist['title']}'.")
log_info_twice(f"Finished item: '{playlist['title']}' {target_url}")
log_info_twice(f"Finished process in {round(math.ceil(time.time() - start_time) / 60, 2)} min.")
logger.info(f'Sleeping for {args.sleep} min.')
time.sleep(args.sleep * 60)
downloaded_videos = load_existing_videos() # reload the videos that have already been downloaded
# Erase the status bar.
status_bar.set_description_str('\x1b[2KDone!')

View File

@ -33,16 +33,18 @@ def setup_file_logger(name, log_file, level=logging.INFO, format_str: str = '%(a
logger.addHandler(handler)
# Silence console logging
if no_console:
console = logging.StreamHandler(sys.stdout)
console.setLevel(100)
# if no_console:
# console = logging.StreamHandler()
# console.setLevel(100)
return logger
def get_silent_logger(name, level=logging.INFO, silent: bool = True):
def get_silent_logger(name, level=logging.INFO, format_str: str = '%(asctime)s - %(name)s - %(levelname)s - %(message)s', silent: bool = True):
logger = logging.getLogger(name)
logger.setLevel(level)
console = logging.StreamHandler()
console.setFormatter(logging.Formatter(format_str))
logger.addHandler(console)
if silent:
logger.setLevel(100)
else:

View File

@ -83,7 +83,7 @@ def download_video(args) -> dict:
error_code = yt_dlp(video['url']) # Do the download
if not error_code:
elapsed = round(math.ceil(time.time() - start_time) / 60, 2)
output_dict['logger_msg'].append(f"{video['id']} downloaded in {elapsed} min.")
output_dict['logger_msg'].append(f"{video['id']} '{video['title']}' downloaded in {elapsed} min.")
output_dict['downloaded_video_id'] = video['id']
else:
m = f'{video["id"]} {video["title"]} -> Failed to download, error code: {error_code}'