-
-
Save robot3498712/4f1587fdaf510da06da7817755e682b6 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#! python2 | |
""" | |
uTorrent resume.dat => qbittorrent | |
Author: robot | |
Largely based on resumedata_2015.rb, see https://github.com/qbittorrent/qBittorrent/issues/1826 for discussion. | |
Zero error tolerance is assumed. As well see error.log for warnings. | |
Feel free to change defaults in def mkfr(). | |
Trial mode (read-only) | |
--test | |
Easy mode (continue on fatal errors) | |
--easy | |
See debug option - match mode - to apply arbitrary filters. | |
Pending | |
* Review complete file relocation | |
* Correct status / summary | |
* Piece priorities / no recheck required | |
""" | |
import os, sys, time, logging, argparse, traceback, re | |
import bencode, codecs, binascii, hashlib | |
from shutil import copyfile | |
# GLOBALS | |
app = "uTorrent resume.dat => qbittorrent : v.0.1.0" | |
scriptPath = None | |
inPath = None | |
outPath = None | |
resumeDAT = None | |
logger = None | |
M_debug = False # trial mode | |
M_easy = False # easy mode | |
M_match = None # match caption | |
class BEncodeError(Exception): | |
pass | |
def printl(txt, alt=None): | |
''' print and log to file ''' | |
try: | |
if isinstance(txt, str): txt = unicode(txt, 'utf-8') | |
logger.error(txt) | |
print(txt) | |
except: | |
if alt is None: alt = 'Error - failed to print error details' | |
logger.error(alt) | |
print(alt) | |
def getScriptPath(): | |
return os.path.dirname(os.path.realpath(sys.argv[0])) | |
def initprompt(): | |
print("Copy resume.data and all *.torrent files from typically %appdata%/uTorrent to ./in folder") | |
print("qbt exports will be output to ./out folder") | |
print("Press return to continue or ctrl+c to exit") | |
try: | |
response = raw_input("> ") | |
except KeyboardInterrupt: | |
sys.exit(0) | |
def continueprompt(): | |
print("Press return to continue or ctrl+c to exit") | |
try: | |
response = raw_input("> ") | |
except KeyboardInterrupt: | |
sys.exit(0) | |
def priogen(s): | |
''' | |
per-file priority generator | |
@param s : hexlified priority string | |
''' | |
while True: | |
try: | |
a = s[0] | |
b = s[1] | |
s = s[2:] | |
except IndexError: | |
return | |
yield int("%s%s" % (a,b), 16) | |
def mkfr(res, tor, inf): | |
''' | |
make fastresume | |
@param res : resume data object | |
@param tor : torrent data object | |
@param inf : info hash string | |
''' | |
fr = { | |
'active_time' : 0 , # runtime | |
'added_time' : 0 , # added_on | |
'allocation' : 'full' , | |
'announce_to_dht' : 0 , | |
'announce_to_lsd' : 0 , | |
'announce_to_trackers' : 1 , | |
'auto_managed' : 1 , | |
'banned_peers' : '' , | |
'banned_peers6' : '' , | |
'blocks per piece' : 0 , # ("piece length" from .torrent) / ("block size" resume.dat) - range [1,256] | |
'completed_time' : 0 , # completed_on | |
'download_rate_limit' : 0 , | |
'file sizes' : [ | |
[ | |
0 , # File 1, size in bytes | |
0 # File 1, modified date (timestamp) or (modtimes array in resume) | |
], | |
[ | |
0 , # File 2, size in bytes | |
0 # File 2, mtime (ts) | |
], | |
[ | |
0 , | |
0 | |
] | |
] , | |
'file-format' : 'libtorrent resume file' , # req | |
'file-version' : 1 , # req | |
'file_priority' : [ # prio bitfield | |
2 , # File 1, High | |
0 , # File 2, Do not download | |
1 # File 3, Normal | |
] , | |
'finished_time' : 0 , | |
'info-hash' : '', # rb: Digest::SHA1.digest('infohashbinarydata') , # tdb['info'].bencode | |
'last_download' : 0 , | |
'last_scrape' : 0 , | |
'last_seen_complete' : 0 , | |
'last_upload' : 0 , | |
'libtorrent-version' : '1.0.6.0' , | |
'mapped_files' : [ | |
'relative\path\to\file1.ext' , # File 1 | |
'r\p\t\file2.ext' , # File 2 | |
'file3.ext' # File 3 | |
] , | |
'max_connections' : 100 , | |
'max_uploads' : 16777215 , | |
'num_downloaders' : 16777215 , | |
'num_incomplete' : 0 , | |
'num_seeds' : 0 , | |
'paused' : 0 , # 'started' - 0 = stopped, 1 = force , 2 = start | |
'peers' : '' , | |
'peers6' : '' , | |
'piece_priority' : '' , # "\x01"*1399 , # * num pieces? | |
'pieces' : '' , # "\x01"*1399 , # * num pieces? | |
'seed_mode' : 0 , | |
'seeding_time' : 0 , | |
'sequential_download' : 0 , | |
'super_seeding' : 0 , | |
'total_downloaded' : 0 , # downloaded field | |
'total_uploaded' : 0 , # uploaded field | |
'upload_rate_limit' : 0 , # upspeed | |
'trackers' : [] | |
} | |
npieces = len(tor['info']['pieces']) / 20 # SHA1 hash is 20 bytes | |
fr['added_time'] = int(res['added_on']) | |
fr['completed_time'] = int(res['completed_on']) | |
fr['active_time'] = int(res['runtime']) | |
fr['seeding_time'] = fr['active_time'] | |
fr['blocks per piece'] = int(tor['info']['piece length']) / int(res['blocksize']) | |
fr['info-hash'] = hashlib.sha1(bencode.bencode(tor['info'])).digest() | |
fr['paused'] = 1 # Always add torrents in paused state | |
fr['auto_managed'] = 0 | |
fr['total_downloaded'] = int(res['downloaded']) | |
fr['total_uploaded'] = int(res['uploaded']) | |
fr['upload_rate_limit'] = int(res['upspeed']) | |
for obj in res['trackers']: | |
if isinstance(obj, list): fr['trackers'].append(obj) | |
else: fr['trackers'].append([obj]) | |
# https://idiosyncratic-ruby.com/4-what-the-pack.html | |
# KISS: recheck required | |
fr['piece_priority'] = npieces * "\x01" | |
fr['pieces'] = npieces * "\x00" | |
fr['finished_time'] = int(round(time.time())) - fr['completed_time'] # ?! | |
if (fr['finished_time'] > 0): | |
fr['last_seen_complete'] = int(round(time.time())) | |
fr['last_download'] = fr['finished_time'] | |
fr['last_scrape'] = fr['finished_time'] | |
fr['last_upload'] = fr['finished_time'] | |
# Per file fields: | |
# mapped_files | |
# file_priority | |
# file sizes | |
fr['file_priority'] = [] | |
for prio in priogen(binascii.hexlify(res['prio'])): # (str)0808.. => (int)8 (int)8 .. | |
if prio in range(1,9): fr['file_priority'].append(1) | |
elif prio in range(9,16): fr['file_priority'].append(2) | |
else: fr['file_priority'].append(0) | |
fr['mapped_files'] = [] | |
fr['file sizes'] = [] | |
fmt = 0 # file time to avoid checking / not present in ut2.2 | |
fmtSupported = True if ('modtimes' in res and len(res['modtimes'])) else False | |
if 'files' in tor['info'] and len(tor['info']['files']) >= 1: # multiple files or single file in folder | |
uSavePath = unicode(res['path'], "utf-8", errors="ignore") | |
uSavePath = os.path.join(uSavePath, u"") # ... and append os.sep | |
fr['save_path'] = os.path.join(res['path'], "") # ASCII-8BIT | |
fr['qBt-savePath'] = fr['save_path'] | |
if not os.path.isdir(uSavePath): | |
# something is wrong or torrent has never been started | |
printl("warning - save_path does not exist\n\t%s" % (res['path'],), "warning - save_path does not exist\n\t%s" % (inf,)) | |
touched = False | |
else: | |
touched = True | |
# iterate files | |
ix = -1 | |
for f in tor['info']['files']: | |
ix += 1 | |
fmt = int(res['modtimes'][ix]) if (fmtSupported) else 0 | |
uFPath = unicode(os.path.join(*f['path']), "utf-8", errors="ignore") | |
fr['mapped_files'].append(os.path.join(*f['path'])) # ASCII-8BIT | |
if touched: | |
uPath = os.path.join(uSavePath, uFPath) | |
if fmt == 0 and os.path.isfile(uPath): fmt = int(os.path.getmtime(uPath)) | |
if (fr['file_priority'][ix] != 0): | |
fr['file sizes'].append([int(f['length']), fmt]) | |
else: | |
fr['file sizes'].append([0, 0]) | |
# END multiple files | |
else: # single file (not in folder) | |
# qbt wants the parent dir as savePath, then we map the single file to it | |
uSavePath = unicode(res['path'], "utf-8", errors="ignore") # file path | |
fr['qBt-savePath'] = os.path.join(os.path.abspath(os.path.join(res['path'], os.pardir)), "") # ASCII-8BIT | |
fr['mapped_files'].append(os.path.basename(res['path'])) # ASCII-8BIT | |
if not os.path.isfile(uSavePath): | |
# something is wrong or torrent has never been started | |
printl("warning - save_path does not exist\n\t%s" % (res['path'],), "warning - save_path does not exist\n\t%s" % (inf,)) | |
touched = False | |
else: | |
touched = True | |
fmt = int(res['modtimes'][0]) if (fmtSupported) else 0 | |
if touched and fmt == 0: fmt = int(os.path.getmtime(uSavePath)) | |
if (fr['file_priority'][0] != 0): | |
if not 'length' in tor['info']: | |
fr['file sizes'].append([int(tor['info']['files'][0]['length']), fmt]) | |
else: | |
fr['file sizes'].append([int(tor['info']['length']), fmt]) | |
else: | |
fr['file sizes'].append([0, 0]) | |
# END single file | |
# qBittorrent 3.1+ Fields | |
fr['qBt-queuePosition'] = -1 # -1 for completed | |
fr['qBt-seedDate'] = fr['completed_time'] | |
fr['qBt-ratioLimit'] = '-2' # -2 = Use Global, -1 = No limit, other number = actual ratio? | |
if 'label' in res and len(res['label']): | |
fr['qBt-label'] = res['label'] | |
# additions | |
if 'caption' in res and len(res['caption']): | |
fr['qBt-name'] = res['caption'] | |
return fr | |
# END mkfr() | |
def argHandler(args): | |
global M_debug, M_easy, M_match | |
if args['test']: | |
print('********* TRIAL MODE *********') | |
M_debug = True | |
if args['easy']: | |
print('********* EASY MODE *********') | |
M_easy = True | |
if args['match']: | |
print('********* MATCH MODE *********') | |
M_match = args['match'] | |
def main(): | |
parser = argparse.ArgumentParser(description=app) | |
parser.add_argument('-t','--test', help='Debug: Trial mode, read-only', required=False, action='store_true') | |
parser.add_argument('-m','--match', help='Debug: Match caption', required=False) | |
parser.add_argument('-e','--easy', help='Easy mode, continue on fatal error', required=False, action='store_true') | |
args = vars(parser.parse_args()) | |
argHandler(args); | |
initprompt() | |
scriptPath = getScriptPath() | |
errorLogFile = os.path.join(scriptPath, 'ut2qbt-error.log') | |
# clean up | |
if os.path.isfile(errorLogFile): os.remove(errorLogFile) | |
# setup logger | |
with open(errorLogFile, 'w') as f: # don't ask | |
f.write(codecs.BOM_UTF8) | |
f.write("") | |
global logger | |
handler = logging.FileHandler(errorLogFile, "a", encoding = "utf-8") | |
formatter = logging.Formatter(u'%(asctime)s %(message)s') | |
handler.setFormatter(formatter) | |
logger = logging.getLogger(__name__) | |
logger.addHandler(handler) | |
logger.setLevel(logging.ERROR) | |
# check paths, assign globals | |
inPath = os.path.join(scriptPath, 'in') | |
outPath = os.path.join(scriptPath, 'out') | |
resumeDAT = os.path.join(inPath, 'resume.dat') | |
if not os.path.isdir(inPath): | |
printl("directory error: %s" % (inPath,)) | |
if not M_easy: sys.exit(1) | |
if not os.path.isdir(outPath): | |
printl("directory error: %s" % (outPath,)) | |
if not M_easy: sys.exit(1) | |
if not os.path.isfile(resumeDAT): | |
printl("resume.dat error: %s" % (resumeDAT,)) | |
if not M_easy: sys.exit(1) | |
# load resume.dat, sanity checks | |
print "Parsing dat file..." | |
with open(resumeDAT, 'rb') as f: | |
data = f.read() | |
ecnt = 0 | |
infoHashes = [] | |
torrents = {} | |
try: | |
data = bencode.bdecode(data) | |
for item in data: | |
if item == ".fileguard": continue | |
if "info" in data[item]: | |
try: | |
inf = binascii.hexlify(data[item]['info']) | |
if not 'caption' in data[item]: raise BEncodeError | |
if not 'added_on' in data[item]: raise BEncodeError | |
if not 'completed_on' in data[item]: raise BEncodeError | |
except BEncodeError: | |
ecnt += 1 | |
continue | |
except: # already hex? | |
x = data[item]['info'] | |
if (M_match is not None): | |
m = re.search(r""+str(M_match)+"", data[item]['caption'], re.I|re.M) | |
if not m: continue | |
infoHashes.append(inf.lower()) | |
data[item]['__torrentfile'] = item # cleaner | |
torrents[inf.lower()] = data[item] | |
else: | |
printl("error\r\n\t%s\r\n\t%s" % (data[item], item)) | |
ecnt += 1 | |
except: | |
pass | |
print("torrents: %s" % (str(len(infoHashes)),)) | |
if ecnt: | |
printl("errors: %s" % (str(ecnt),)) | |
continueprompt() | |
print("Processing, please wait...") | |
infoHashesOK = [] | |
# load .torrent files | |
for inf in torrents: | |
# out paths | |
utoPath = unicode(os.path.join(outPath, "%s.torrent" % (inf,)), "utf-8", errors="ignore") | |
uroPath = unicode(os.path.join(outPath, "%s.resume" % (inf,)), "utf-8", errors="ignore") | |
# .torrent or .fastresume exists => already processed | |
if os.path.isfile(utoPath) or os.path.isfile(uroPath): continue | |
tPath = os.path.join(inPath, torrents[inf]['__torrentfile']) | |
utPath = unicode(tPath, "utf-8", errors="ignore") | |
if not os.path.isfile(utPath): | |
printl("error loading item: %s\n\t%s" % (inf, torrents[inf]['__torrentfile'])) | |
if not M_easy: sys.exit(1) | |
try: | |
with open(utPath, 'rb') as f: | |
tdata = f.read() | |
tdata = bencode.bdecode(tdata) | |
except Exception as e: | |
exc_info = sys.exc_info() | |
printl("error decoding item: %s\n\t%s\n\t%s\n\t%s" % (inf, torrents[inf]['__torrentfile'], e, traceback.format_exception(*exc_info))) | |
if not M_easy: sys.exit(1) | |
else: del exc_info | |
try: | |
frfile = mkfr(torrents[inf], tdata, inf) | |
except Exception as e: | |
exc_info = sys.exc_info() | |
printl("error making fastresume: %s\n\t%s\n\t%s\n\t%s" % (inf, torrents[inf]['__torrentfile'], e, traceback.format_exception(*exc_info))) | |
if not M_easy: sys.exit(1) | |
else: del exc_info | |
if M_debug: continue | |
# copy torrent | |
try: | |
copyfile(utPath, utoPath) | |
except: | |
printl("copy torrent failed: %s\n\t%s" % (inf, torrents[inf]['__torrentfile'])) | |
if not M_easy: sys.exit(1) | |
# write fr | |
try: | |
ufrPath = unicode(os.path.join(outPath, "%s.fastresume" % (inf,)), "utf-8", errors="ignore") | |
with open(ufrPath, 'wb') as f: | |
f.write(bencode.bencode(frfile)) | |
except: | |
printl("write fastresume failed: %s\n\t%s" % (inf, torrents[inf]['__torrentfile'])) | |
if not M_easy: sys.exit(1) | |
infoHashesOK.append(inf) | |
if (str(len(infoHashesOK)) < str(len(infoHashes))): | |
logger.error("\n\nFailed to process summary:") | |
for inf in infoHashes: | |
if not inf in infoHashesOK: logger.error(inf) | |
print("%s of %s torrents processed" % (str(len(infoHashesOK)), str(len(infoHashes)))) | |
print("Finished.") | |
sys.exit(0) | |
# END main() | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment