mirror of
				https://github.com/KevinMidboe/bulk-downloader-for-reddit.git
				synced 2025-10-29 17:40:15 +00:00 
			
		
		
		
	remove exclude mode
This commit is contained in:
		| @@ -55,6 +55,8 @@ It should redirect to a page which shows your **imgur_client_id** and **imgur_cl | ||||
| ## Changes on *master* | ||||
| ### [23/07/2018](https://github.com/aliparlakci/bulk-downloader-for-reddit/tree/bcae177b1e2d4e951db0fad26863b956fa920132) | ||||
| - Split download() funtion | ||||
| - Remove exclude feature | ||||
| - Bug fix | ||||
|  | ||||
| ### [22/07/2018](https://github.com/aliparlakci/bulk-downloader-for-reddit/tree/a67da461d2fcd70672effcb20c8179e3224091bb) | ||||
| - Put log files in a folder named "LOG_FILES" | ||||
|   | ||||
| @@ -40,8 +40,6 @@ optional arguments: | ||||
|                         all | ||||
|   --NoDownload          Just gets the posts and store them in a file for | ||||
|                         downloading later | ||||
|   --exclude {imgur,gfycat,direct,self} [{imgur,gfycat,direct,self} ...] | ||||
|                         Do not download specified links | ||||
| ``` | ||||
|  | ||||
| # Examples | ||||
|   | ||||
							
								
								
									
										46
									
								
								script.py
									
									
									
									
									
								
							
							
						
						
									
										46
									
								
								script.py
									
									
									
									
									
								
							| @@ -144,11 +144,6 @@ def parseArguments(arguments=[]): | ||||
|                         action="store_true", | ||||
|                         default=False) | ||||
|      | ||||
|     parser.add_argument("--exclude", | ||||
|                         nargs="+", | ||||
|                         help="Do not download specified links", | ||||
|                         choices=["imgur","gfycat","direct","self"], | ||||
|                         type=str) | ||||
|  | ||||
|     if arguments == []: | ||||
|         return parser.parse_args() | ||||
| @@ -253,7 +248,8 @@ class PromptUser: | ||||
|                 GLOBAL.arguments.subreddit = "+".join(GLOBAL.arguments.subreddit.split()) | ||||
|  | ||||
|             # DELETE THE PLUS (+) AT THE END | ||||
|             GLOBAL.arguments.subreddit = GLOBAL.arguments.subreddit[:-1] | ||||
|             if not subredditInput.lower() == "frontpage": | ||||
|                 GLOBAL.arguments.subreddit = GLOBAL.arguments.subreddit[:-1] | ||||
|  | ||||
|             print("\nselect sort type:") | ||||
|             sortTypes = [ | ||||
| @@ -327,32 +323,6 @@ class PromptUser: | ||||
|                 if Path(GLOBAL.arguments.log ).is_file(): | ||||
|                     break  | ||||
|  | ||||
|         GLOBAL.arguments.exclude = [] | ||||
|  | ||||
|         sites = ["imgur","gfycat","direct","self"] | ||||
|                  | ||||
|         excludeInput = input("exclude: ").lower() | ||||
|         if excludeInput in sites and excludeInput != "": | ||||
|             GLOBAL.arguments.exclude = [excludeInput] | ||||
|  | ||||
|         while not excludeInput == "": | ||||
|             while True: | ||||
|                 excludeInput = input("exclude: ").lower() | ||||
|                 if not excludeInput in sites or excludeInput in GLOBAL.arguments.exclude: | ||||
|                     break | ||||
|                 elif excludeInput == "": | ||||
|                     break | ||||
|                 else: | ||||
|                     GLOBAL.arguments.exclude.append(excludeInput) | ||||
|  | ||||
|         for i in range(len(GLOBAL.arguments.exclude)): | ||||
|             if " " in GLOBAL.arguments.exclude[i]: | ||||
|                 inputWithWhitespace = GLOBAL.arguments.exclude[i] | ||||
|                 del GLOBAL.arguments.exclude[i] | ||||
|                 for siteInput in inputWithWhitespace.split(): | ||||
|                     if siteInput in sites and siteInput not in GLOBAL.arguments.exclude: | ||||
|                         GLOBAL.arguments.exclude.append(siteInput) | ||||
|  | ||||
|         while True: | ||||
|             try: | ||||
|                 GLOBAL.arguments.limit = int(input("\nlimit (0 for none): ")) | ||||
| @@ -472,15 +442,14 @@ def postExists(POST): | ||||
|     else: | ||||
|         return False | ||||
|  | ||||
| def downloadPost(SUBMISSION,EXCLUDE): | ||||
| def downloadPost(SUBMISSION): | ||||
|     directory = GLOBAL.directory / SUBMISSION['postSubreddit'] | ||||
|  | ||||
|     global lastRequestTime | ||||
|  | ||||
|     downloaders = {"imgur":Imgur,"gfycat":Gfycat,"direct":Direct,"self":Self} | ||||
|  | ||||
|     if SUBMISSION['postType'] in downloaders and \ | ||||
|        not SUBMISSION['postType'] in EXCLUDE: | ||||
|     if SUBMISSION['postType'] in downloaders: | ||||
|  | ||||
|         print(SUBMISSION['postType'].upper()) | ||||
|  | ||||
| @@ -542,11 +511,6 @@ def download(submissions): | ||||
|     downloadedCount = subsLenght | ||||
|     duplicates = 0 | ||||
|  | ||||
|     if GLOBAL.arguments.exclude is not None: | ||||
|         DoNotDownload = GLOBAL.arguments.exclude | ||||
|     else: | ||||
|         DoNotDownload = [] | ||||
|  | ||||
|     FAILED_FILE = createLogFile("FAILED") | ||||
|  | ||||
|     for i in range(subsLenght): | ||||
| @@ -566,7 +530,7 @@ def download(submissions): | ||||
|             continue | ||||
|  | ||||
|         try: | ||||
|             downloadPost(submissions[i],DoNotDownload) | ||||
|             downloadPost(submissions[i]) | ||||
|          | ||||
|         except FileAlreadyExistsError: | ||||
|             print("It already exists") | ||||
|   | ||||
		Reference in New Issue
	
	Block a user