From e4368e142c0d66282529fd585e55d4c5fa4c5e85 Mon Sep 17 00:00:00 2001 From: MrTyton Date: Tue, 15 Aug 2017 19:56:05 -0400 Subject: [PATCH] Minor text bugfix. Stopped from spawning multiple processess when only 1 link to be downloaded. --- fanficdownload.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/fanficdownload.py b/fanficdownload.py index 66c371a..cd70e24 100644 --- a/fanficdownload.py +++ b/fanficdownload.py @@ -136,6 +136,7 @@ def downloader(args): try: res = check_output('calibredb remove {} {}'.format(path, storyId), shell=True,stderr=STDOUT,stdin=PIPE, ) except: + if not live: print output.strip() raise output += log("\tAdding {} to library".format(cur), 'BLUE', live) @@ -143,6 +144,7 @@ def downloader(args): res = check_output('calibredb add -d {} "{}"'.format(path, cur), shell=True,stderr=STDOUT,stdin=PIPE, ) except Exception as e: output += log(e) + if not live: print output.strip() raise try: res = check_output('calibredb search "Identifiers:{}" {}'.format(url, path), shell=True, stderr=STDOUT,stdin=PIPE, ) @@ -207,9 +209,11 @@ def main(user, password, server, label, inout_file, path, live ): log("URLs to parse ({}):".format(len(urls)), 'HEADER') for url in urls: log("\t{}".format(url), 'BLUE') - - p = Pool() - p.map(downloader, [[url, inout_file, path, live] for url in urls]) + if len(urls) == 1: + downloader([urls[0], inout_file, path, live]) + else: + p = Pool() + p.map(downloader, [[url, inout_file, path, live] for url in urls]) return