Merge branch 'master' of github.com:KevinMidboe/seasonedShows

This commit is contained in:
2017-04-05 18:57:36 +02:00
8 changed files with 194 additions and 96 deletions

View File

@@ -1,10 +1,10 @@
# *Seasoned*: an intelligent organizer for your shows # *Seasoned*: an intelligent organizer for your shows
## Architecture *Seasoned* is a intelligent organizer for your tv show episodes. It is made to automate and simplify to process of renaming and moving newly downloaded tv show episodes following Plex file naming and placement.
The flow of the system will first check for new folders in your tv shows directory, if a new file is found it's contents are analyzed, stored and tweets suggested changes to it's contents to use_admin.
Then there is a script for looking for replies on twitter by user_admin, if caanges are needed, it handles the changes specified and updates dtabbase. ## Architecture
The flow of the system will first check for new folders in your tv shows directory, if a new file is found it's contents are analyzed, stored and tweets suggested changes to it's contents to use_admin.
Then there is a script for looking for replies on twitter by user_admin, if caanges are needed, it handles the changes specified and updates dtabbase.
After approval by user the files are modified and moved to folders in resptected area. If error occours, pasteee link if log is sent to user. After approval by user the files are modified and moved to folders in resptected area. If error occours, pasteee link if log is sent to user.

View File

@@ -52,26 +52,11 @@ def XOR(list1, list2):
return set(list1) ^ set(list2) return set(list1) ^ set(list2)
def getNewFolderContents():
showNames = getShowNames().keys()
folderContents = filter( lambda f: not f.startswith('.'), os.listdir(showDir))
return XOR(folderContents, showNames)
def checkForSingleEpisodes(folderItem):
showName, hit = getFuzzyName(folderItem)
episodeMatch = re.findall(re.sub(' ', '.', showName)+'\.S[0-9]{1,2}E[0-9]{1,2}\.', folderItem)
if episodeMatch:
return True
def getByIdentifier(folderItem, identifier): def getByIdentifier(folderItem, identifier):
itemMatch = re.findall(identifier + '[0-9]{1,2}', folderItem) itemMatch = re.findall(identifier + '[0-9]{1,2}', folderItem)
# TODO Should be more precise than first item in list
item = re.sub(identifier, '', itemMatch[0]) item = re.sub(identifier, '', itemMatch[0])
# TODO Should be checking for errors
return item return item
def getItemChildren(folder): def getItemChildren(folder):
@@ -90,31 +75,7 @@ def getItemChildren(folder):
return media_items, subtitles, trash return media_items, subtitles, trash
def getEpisodeInfo(folderItem):
showName, hit = getFuzzyName(folderItem)
season = getByIdentifier(folderItem, 'S')
episode = getByIdentifier(folderItem, 'E')
media_items, subtitles, trash = getItemChildren(folderItem)
episodeInfo = []
episodeInfo = {'original': folderItem,
'full_path': showDir + folderItem,
'name': showName,
'season': season,
'episode': episode,
'media_items': media_items,
'subtitles': subtitles,
'trash': trash,
'tweet_id': None,
'reponse_id': None,
'verified': '0',
'moved': '0'}
addToDB(episodeInfo)
return episodeInfo
def addToDB(episodeInfo): def addToDB(episodeInfo):
conn = sqlite3.connect(dbPath) conn = sqlite3.connect(dbPath)
c = conn.cursor() c = conn.cursor()
@@ -143,7 +104,56 @@ def addToDB(episodeInfo):
conn.commit() conn.commit()
conn.close() conn.close()
def getNewFolderContents():
# TODO Should not do on keys, if empty.
showNames = getShowNames().keys()
# TODO Better way to filter non dotfiles, dirread in filter?
# Should maybe all dirs be checked at start?
folderContents = filter( lambda f: not f.startswith('.'), os.listdir(showDir))
return XOR(folderContents, showNames) # OK
def checkForSingleEpisodes(folderItem):
# TODO also if empty, should be checked earlier
showName, hit = getFuzzyName(folderItem)
episodeMatch = re.findall(re.sub(' ', '.', showName)+'\.S[0-9]{1,2}E[0-9]{1,2}\.', folderItem)
if episodeMatch:
return True # OK
def getEpisodeInfo(folderItem):
showName, hit = getFuzzyName(folderItem)
season = getByIdentifier(folderItem, 'S')
episode = getByIdentifier(folderItem, 'E')
media_items, subtitles, trash = getItemChildren(folderItem)
episodeInfo = []
episodeInfo = {'original': folderItem,
'full_path': showDir + folderItem,
'name': showName,
'season': season,
'episode': episode,
'media_items': media_items,
'subtitles': subtitles,
'trash': trash,
'tweet_id': None,
'reponse_id': None,
'verified': '0',
'moved': '0'}
addToDB(episodeInfo)
return episodeInfo
def findStray(): def findStray():
# TODO What if null or tries to pass down error
for item in getNewFolderContents(): for item in getNewFolderContents():
if checkForSingleEpisodes(item): if checkForSingleEpisodes(item):
pprint(getEpisodeInfo(item)) pprint(getEpisodeInfo(item))

View File

@@ -17,40 +17,6 @@ auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret) auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth) api = tweepy.API(auth)
def unpackEpisodes():
conn = sqlite3.connect(dbPath)
c = conn.cursor()
cursor = c.execute('SELECT * FROM stray_episodes WHERE verified = 1 AND moved = 0')
episodeList = []
for row in c.fetchall():
columnNames = [description[0] for description in cursor.description]
episodeDict = dict.fromkeys(columnNames)
for i, key in enumerate(episodeDict.keys()):
episodeDict[key] = row[i]
episodeList.append(episodeDict)
conn.close()
return episodeList
def createFolders(episode):
showDir = '/media/hdd1/tv/%s/'% episode['name']
episodeFormat = '%s S%sE%s/'% (episode['name'], episode['season'], episode['episode'])
seasonFormat = '%s Season %s/'% (episode['name'], episode['season'])
if not os.path.isdir(showDir + seasonFormat):
os.makedirs(showDir + seasonFormat)
if not os.path.isdir(showDir + seasonFormat + episodeFormat):
os.makedirs(showDir + seasonFormat + episodeFormat)
def newnameMediaitems(media_items): def newnameMediaitems(media_items):
# media_items = [['New.Girl.S06E18.720p.HDTV.x264-EZTV.srt', '-EZTV', 'nl'], ['New.Girl.S06E18.720p.HDTV.x264-FLEET.srt', '-FLEET', 'en']] # media_items = [['New.Girl.S06E18.720p.HDTV.x264-EZTV.srt', '-EZTV', 'nl'], ['New.Girl.S06E18.720p.HDTV.x264-FLEET.srt', '-FLEET', 'en']]
media_items = json.loads(media_items) media_items = json.loads(media_items)
@@ -80,11 +46,46 @@ def updateMovedStatus(episodeDict):
conn.commit() conn.commit()
conn.close() conn.close()
def unpackEpisodes():
conn = sqlite3.connect(dbPath)
c = conn.cursor()
cursor = c.execute('SELECT * FROM stray_episodes WHERE verified = 1 AND moved = 0')
episodeList = []
for row in c.fetchall():
columnNames = [description[0] for description in cursor.description]
episodeDict = dict.fromkeys(columnNames)
for i, key in enumerate(episodeDict.keys()):
episodeDict[key] = row[i]
episodeList.append(episodeDict)
conn.close()
return episodeList
def createFolders(episode):
showDir = '/media/hdd1/tv/%s/'% episode['name']
episodeFormat = '%s S%sE%s/'% (episode['name'], episode['season'], episode['episode'])
seasonFormat = '%s Season %s/'% (episode['name'], episode['season'])
if not os.path.isdir(showDir + seasonFormat):
os.makedirs(showDir + seasonFormat)
if not os.path.isdir(showDir + seasonFormat + episodeFormat):
os.makedirs(showDir + seasonFormat + episodeFormat)
def moveFiles(episode): def moveFiles(episode):
# TODO All this should be imported from config file
showDir = '/media/hdd1/tv/' showDir = '/media/hdd1/tv/'
episodeFormat = '%s S%sE%s/'% (episode['name'], episode['season'], episode['episode']) episodeFormat = '%s S%sE%s/'% (episode['name'], episode['season'], episode['episode'])
seasonFormat = '%s/%s Season %s/'% (episode['name'], episode['name'], episode['season']) seasonFormat = '%s/%s Season %s/'% (episode['name'], episode['name'], episode['season'])
# TODO All this is pretty ballsy to do this hard/stict.
newMediaitems = newnameMediaitems(episode['media_items']) newMediaitems = newnameMediaitems(episode['media_items'])
for item in newMediaitems: for item in newMediaitems:
old_location = showDir + episode['original'] + '/' + item[0] old_location = showDir + episode['original'] + '/' + item[0]
@@ -103,6 +104,7 @@ def moveFiles(episode):
for trash in json.loads(episode['trash']): for trash in json.loads(episode['trash']):
os.remove(showDir + episode['original'] + '/'+ trash) os.remove(showDir + episode['original'] + '/'+ trash)
# TODO Maybe move to delete folder instead, than user can dump.
os.rmdir(showDir + episode['original']) os.rmdir(showDir + episode['original'])
updateMovedStatus(episode) updateMovedStatus(episode)

33
modules/classedStray.py Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: KevinMidboe
# @Date: 2017-04-05 18:40:11
# @Last Modified by: KevinMidboe
# @Last Modified time: 2017-04-05 18:51:32
import os, hashlib
from functools import reduce
import time, glob
dirHash = None
def directoryChecksum():
dirList = os.listdir('/Volumes/media/tv')
concat = reduce(lambda x, y: x + y, dirList, "")
m = hashlib.md5()
m.update(bytes(concat, 'utf-16be'))
return m.digest()
def blober():
for filename in glob.iglob('/Volumes/media/tv/*'):
pass
def main():
start_time = time.time()
if dirHash is None:
blober()
print("--- %s seconds ---" % (time.time() - start_time))
if __name__ == '__main__':
main()

16
modules/createPasteee.py Executable file
View File

@@ -0,0 +1,16 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: KevinMidboe
# @Date: 2017-02-23 21:41:40
# @Last Modified by: KevinMidboe
# @Last Modified time: 2017-03-05 19:35:10
from pasteee import Paste
def createPasteee():
paste = Paste('Test pastee', views=10)
print(paste)
print(paste['raw'])
if __name__ == '__main__':
createPasteee()

23
modules/dirHash.py Executable file
View File

@@ -0,0 +1,23 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: KevinMidboe
# @Date: 2017-04-05 15:24:17
# @Last Modified by: KevinMidboe
# @Last Modified time: 2017-04-05 18:22:13
import os, hashlib
from functools import reduce
hashDir = '/Volumes/media/tv'
def main():
dirList = os.listdir(hashDir)
concat = reduce(lambda x, y: x + y, dirList, "")
m = hashlib.md5()
m.update(bytes(concat, 'utf-16be'))
return m.digest()
if __name__ == '__main__':
print(main())
# TODO The hash value should be saved in a global manner

12
modules/subLangfinder.py Executable file
View File

@@ -0,0 +1,12 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from langdetect import detect
def main():
f = open('/Volumes/media/movies/The Man from UNCLE (2015)/The.Man.from.U.N.C.L.E.2015.1080p.nl.srt', 'r', encoding = "ISO-8859-15")
print(detect(f.read()))
f.close()
print(f.close())
if __name__ == '__main__':
main()

View File

@@ -73,24 +73,11 @@ def tweetEpisode(episode):
updateTweetID(episode, tweet_id) updateTweetID(episode, tweet_id)
def lookForNewEpisodes():
conn = sqlite3.connect(dbPath)
c = conn.cursor()
c.execute('SELECT * FROM stray_episodes WHERE tweet_id is NULL')
for row in c.fetchall():
episode = unpackEpisode(row)
tweetEpisode(episode)
conn.close()
def getLastTweets(user, count=1): def getLastTweets(user, count=1):
return api.user_timeline(screen_name=user,count=count) return api.(screen_name=user,count=count)
def verifyByID(id, reponse_id): def verifyByID(id, reponse_iduser_timeline):
conn = sqlite3.connect(dbPath) conn = sqlite3.connect(dbPath)
c = conn.cursor() c = conn.cursor()
@@ -99,6 +86,7 @@ def verifyByID(id, reponse_id):
conn.commit() conn.commit()
conn.close() conn.close()
# TODO Add more parsing than confirm
def parseReply(tweet): def parseReply(tweet):
if b'\xf0\x9f\x91\x8d' in tweet.text.encode('utf-8'): if b'\xf0\x9f\x91\x8d' in tweet.text.encode('utf-8'):
print('Verified!') print('Verified!')
@@ -119,7 +107,21 @@ def getReply(tweet):
parseReply(tweet) parseReply(tweet)
conn.close() conn.close()
def lookForNewEpisodes():
conn = sqlite3.connect(dbPath)
c = conn.cursor()
c.execute('SELECT * FROM stray_episodes WHERE tweet_id is NULL')
for row in c.fetchall():
episode = unpackEpisode(row)
tweetEpisode(episode)
conn.close()
def checkForReply(): def checkForReply():
for tweet in getLastTweets('KevinMidboe', 10): for tweet in getLastTweets('KevinMidboe', 10):
if tweet.in_reply_to_status_id_str != None: if tweet.in_reply_to_status_id_str != None: