mirror of
https://github.com/KevinMidboe/bulk-downloader-for-reddit.git
synced 2026-01-10 19:25:41 +00:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27532408c1 | ||
|
|
32647beee9 | ||
|
|
a67da461d2 | ||
|
|
8c6f593496 | ||
|
|
b60ce8a71e | ||
|
|
49920cc457 | ||
|
|
c70e7c2ebb | ||
|
|
3931dfff54 | ||
|
|
4a8c2377f9 | ||
|
|
8a18a42a9a | ||
|
|
6c2d748fbc | ||
|
|
8c966df105 |
@@ -53,6 +53,13 @@ It should redirect to a page which shows your **imgur_client_id** and **imgur_cl
|
||||
them, there.
|
||||
|
||||
## Changelog
|
||||
### [22/07/2018](https://github.com/aliparlakci/bulk-downloader-for-reddit/tree/a67da461d2fcd70672effcb20c8179e3224091bb)
|
||||
- Put log files in a folder named "LOG_FILES"
|
||||
- Fixed the bug that makes multireddit mode unusable
|
||||
|
||||
### [21/07/2018](https://github.com/aliparlakci/bulk-downloader-for-reddit/tree/4a8c2377f9fb4d60ed7eeb8d50aaf9a26492462a)
|
||||
- Added exclude mode
|
||||
|
||||
### [20/07/2018](https://github.com/aliparlakci/bulk-downloader-for-reddit/commit/7548a010198fb693841ca03654d2c9bdf5742139)
|
||||
- "0" input for no limit
|
||||
- Fixed the bug that recognizes none image direct links as image links
|
||||
|
||||
@@ -23,7 +23,8 @@ optional arguments:
|
||||
--saved Triggers saved mode
|
||||
--submitted Gets posts of --user
|
||||
--upvoted Gets upvoted posts of --user
|
||||
--log LOG FILE Triggers log read mode and takes a log file
|
||||
--log LOG FILE Takes a log file which created by itself (json files),
|
||||
reads posts and tries downloading them again.
|
||||
--subreddit SUBREDDIT [SUBREDDIT ...]
|
||||
Triggers subreddit mode and takes subreddit's name
|
||||
without r/. use "frontpage" for frontpage
|
||||
@@ -39,6 +40,8 @@ optional arguments:
|
||||
all
|
||||
--NoDownload Just gets the posts and store them in a file for
|
||||
downloading later
|
||||
--exclude {imgur,gfycat,direct,self} [{imgur,gfycat,direct,self} ...]
|
||||
Do not download specified links
|
||||
```
|
||||
|
||||
# Examples
|
||||
|
||||
61
script.py
61
script.py
@@ -22,7 +22,7 @@ from src.tools import (GLOBAL, createLogFile, jsonFile, nameCorrector,
|
||||
|
||||
__author__ = "Ali Parlakci"
|
||||
__license__ = "GPL"
|
||||
__version__ = "1.2.1"
|
||||
__version__ = "1.3.1"
|
||||
__maintainer__ = "Ali Parlakci"
|
||||
__email__ = "parlakciali@gmail.com"
|
||||
|
||||
@@ -143,6 +143,12 @@ def parseArguments(arguments=[]):
|
||||
" for downloading later",
|
||||
action="store_true",
|
||||
default=False)
|
||||
|
||||
parser.add_argument("--exclude",
|
||||
nargs="+",
|
||||
help="Do not download specified links",
|
||||
choices=["imgur","gfycat","direct","self"],
|
||||
type=str)
|
||||
|
||||
if arguments == []:
|
||||
return parser.parse_args()
|
||||
@@ -159,7 +165,10 @@ def checkConflicts():
|
||||
else:
|
||||
user = 1
|
||||
|
||||
modes = ["saved","subreddit","submitted","search","log","link","upvoted"]
|
||||
modes = [
|
||||
"saved","subreddit","submitted","search","log","link","upvoted",
|
||||
"multireddit"
|
||||
]
|
||||
|
||||
values = {
|
||||
x: 0 if getattr(GLOBAL.arguments,x) is None or \
|
||||
@@ -265,7 +274,7 @@ class PromptUser:
|
||||
|
||||
elif programMode == "multireddit":
|
||||
GLOBAL.arguments.user = input("\nredditor: ")
|
||||
GLOBAL.arguments.subreddit = input("\nmultireddit: ")
|
||||
GLOBAL.arguments.multireddit = input("\nmultireddit: ")
|
||||
|
||||
print("\nselect sort type:")
|
||||
sortTypes = [
|
||||
@@ -318,6 +327,32 @@ class PromptUser:
|
||||
if Path(GLOBAL.arguments.log ).is_file():
|
||||
break
|
||||
|
||||
GLOBAL.arguments.exclude = []
|
||||
|
||||
sites = ["imgur","gfycat","direct","self"]
|
||||
|
||||
excludeInput = input("exclude: ").lower()
|
||||
if excludeInput in sites and excludeInput != "":
|
||||
GLOBAL.arguments.exclude = [excludeInput]
|
||||
|
||||
while not excludeInput == "":
|
||||
while True:
|
||||
excludeInput = input("exclude: ").lower()
|
||||
if not excludeInput in sites or excludeInput in GLOBAL.arguments.exclude:
|
||||
break
|
||||
elif excludeInput == "":
|
||||
break
|
||||
else:
|
||||
GLOBAL.arguments.exclude.append(excludeInput)
|
||||
|
||||
for i in range(len(GLOBAL.arguments.exclude)):
|
||||
if " " in GLOBAL.arguments.exclude[i]:
|
||||
inputWithWhitespace = GLOBAL.arguments.exclude[i]
|
||||
del GLOBAL.arguments.exclude[i]
|
||||
for siteInput in inputWithWhitespace.split():
|
||||
if siteInput in sites and siteInput not in GLOBAL.arguments.exclude:
|
||||
GLOBAL.arguments.exclude.append(siteInput)
|
||||
|
||||
while True:
|
||||
try:
|
||||
GLOBAL.arguments.limit = int(input("\nlimit (0 for none): "))
|
||||
@@ -377,6 +412,9 @@ def prepareAttributes():
|
||||
|
||||
ATTRIBUTES["subreddit"] = GLOBAL.arguments.subreddit
|
||||
|
||||
elif GLOBAL.arguments.multireddit is not None:
|
||||
ATTRIBUTES["multireddit"] = GLOBAL.arguments.multireddit
|
||||
|
||||
elif GLOBAL.arguments.saved is True:
|
||||
ATTRIBUTES["saved"] = True
|
||||
|
||||
@@ -444,6 +482,10 @@ def download(submissions):
|
||||
downloadedCount = subsLenght
|
||||
duplicates = 0
|
||||
BACKUP = {}
|
||||
if GLOBAL.arguments.exclude is not None:
|
||||
ToBeDownloaded = GLOBAL.arguments.exclude
|
||||
else:
|
||||
ToBeDownloaded = []
|
||||
|
||||
FAILED_FILE = createLogFile("FAILED")
|
||||
|
||||
@@ -466,7 +508,7 @@ def download(submissions):
|
||||
|
||||
directory = GLOBAL.directory / submissions[i]['postSubreddit']
|
||||
|
||||
if submissions[i]['postType'] == 'imgur':
|
||||
if submissions[i]['postType'] == 'imgur' and not 'imgur' in ToBeDownloaded:
|
||||
print("IMGUR",end="")
|
||||
|
||||
while int(time.time() - lastRequestTime) <= 2:
|
||||
@@ -529,7 +571,7 @@ def download(submissions):
|
||||
)
|
||||
downloadedCount -= 1
|
||||
|
||||
elif submissions[i]['postType'] == 'gfycat':
|
||||
elif submissions[i]['postType'] == 'gfycat' and not 'gfycat' in ToBeDownloaded:
|
||||
print("GFYCAT")
|
||||
try:
|
||||
Gfycat(directory,submissions[i])
|
||||
@@ -549,7 +591,7 @@ def download(submissions):
|
||||
FAILED_FILE.add({int(i+1):[str(exception),submissions[i]]})
|
||||
downloadedCount -= 1
|
||||
|
||||
elif submissions[i]['postType'] == 'direct':
|
||||
elif submissions[i]['postType'] == 'direct' and not 'direct' in ToBeDownloaded:
|
||||
print("DIRECT")
|
||||
try:
|
||||
Direct(directory,submissions[i])
|
||||
@@ -564,7 +606,7 @@ def download(submissions):
|
||||
FAILED_FILE.add({int(i+1):[str(exception),submissions[i]]})
|
||||
downloadedCount -= 1
|
||||
|
||||
elif submissions[i]['postType'] == 'self':
|
||||
elif submissions[i]['postType'] == 'self' and not 'self' in ToBeDownloaded:
|
||||
print("SELF")
|
||||
try:
|
||||
Self(directory,submissions[i])
|
||||
@@ -668,7 +710,10 @@ if __name__ == "__main__":
|
||||
GLOBAL.directory = Path(".\\")
|
||||
print("\nQUITTING...")
|
||||
except Exception as exception:
|
||||
logging.error("Runtime error!", exc_info=full_exc_info(sys.exc_info()))
|
||||
if GLOBAL.directory is None:
|
||||
GLOBAL.directory = Path(".\\")
|
||||
logging.error(sys.exc_info()[0].__name__,
|
||||
exc_info=full_exc_info(sys.exc_info()))
|
||||
print(log_stream.getvalue())
|
||||
|
||||
input("Press enter to quit\n")
|
||||
|
||||
@@ -75,8 +75,10 @@ def createLogFile(TITLE):
|
||||
put given arguments inside \"HEADER\" key
|
||||
"""
|
||||
|
||||
folderDirectory = GLOBAL.directory / str(time.strftime("%d-%m-%Y_%H-%M-%S",
|
||||
time.localtime(GLOBAL.RUN_TIME)))
|
||||
folderDirectory = GLOBAL.directory / "LOG_FILES" / \
|
||||
str(time.strftime(
|
||||
"%d-%m-%Y_%H-%M-%S",time.localtime(GLOBAL.RUN_TIME)
|
||||
))
|
||||
logFilename = TITLE.upper()+'.json'
|
||||
|
||||
if not path.exists(folderDirectory):
|
||||
@@ -95,7 +97,7 @@ def printToFile(*args, **kwargs):
|
||||
|
||||
TIME = str(time.strftime("%d-%m-%Y_%H-%M-%S",
|
||||
time.localtime(GLOBAL.RUN_TIME)))
|
||||
folderDirectory = GLOBAL.directory / TIME
|
||||
folderDirectory = GLOBAL.directory / "LOG_FILES" / TIME
|
||||
print(*args,**kwargs)
|
||||
|
||||
if not path.exists(folderDirectory):
|
||||
|
||||
Reference in New Issue
Block a user