def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
for url in urls:
logger.log(u"Downloading a result from " + self.name + " at " + url)
if helpers.download_file(url, filename, session=self.session):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
else:
logger.log(u"Could not download %s" % url, logger.WARNING)
helpers._remove_file_failed(filename)
if len(urls):
logger.log(u"Failed to download any results", logger.WARNING)
return False
def download_result(self, result):
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
if 'NO_DOWNLOAD_NAME' in url:
continue
if url.startswith('http'):
self.headers.update({
'Referer': '/'.join(url.split('/')[:3]) + '/'
})
logger.log(u'Downloading a result from {0} at {1}'.format(self.name, url))
if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
filename = replace_extension(filename, GenericProvider.TORRENT)
if download_file(url, filename, session=self.session, headers=self.headers, hooks={'response': self.get_url_hook}):
if self._verify_download(filename):
logger.log(u'Saved result to {0}'.format(filename), logger.INFO)
return True
logger.log(u'Could not download {0}'.format(url), logger.WARNING)
remove_file_failed(filename)
if urls:
logger.log(u'Failed to download any results', logger.WARNING)
return False
def download_result(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
# Search results don't return torrent files directly, it returns show sheets so we must parse showSheet to access torrent.
data = self.get_url(url, returns='text')
url_torrent = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group()
if url_torrent.startswith('http'):
self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'})
logger.log('Downloading a result from {}'.format(url))
if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log('Saved result to {}'.format(filename), logger.INFO)
return True
else:
logger.log('Could not download {}'.format(url), logger.WARNING)
helpers.remove_file_failed(filename)
if len(urls):
logger.log('Failed to download any results', logger.WARNING)
return False
def download_result(self, result):
if not self.login():
return False
urls, filename = self._make_url(result)
for url in urls:
if "NO_DOWNLOAD_NAME" in url:
continue
if url.startswith("http"):
self.headers.update({"Referer": "/".join(url.split("/")[:3]) + "/"})
logger.log(u"Downloading a result from {0} at {1}".format(self.name, url))
if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
filename = replace_extension(filename, GenericProvider.TORRENT)
if download_file(
url, filename, session=self.session, headers=self.headers, hooks={"response": self.get_url_hook}
):
if self._verify_download(filename):
logger.log(u"Saved result to {0}".format(filename), logger.INFO)
return True
logger.log(u"Could not download {0}".format(url), logger.WARNING)
remove_file_failed(filename)
if urls:
logger.log(u"Failed to download any results", logger.WARNING)
return False
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
if self.proxy.isEnabled():
self.headers.update({"Referer": self.proxy.getProxyURL()})
elif "Referer" in self.headers:
self.headers.pop("Referer")
for url in urls:
if "NO_DOWNLOAD_NAME" in url:
continue
logger.log(u"Downloading a result from " + self.name + " at " + url)
if helpers.download_file(self.proxy._buildURL(url), filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
else:
logger.log(u"Could not download %s" % url, logger.WARNING)
helpers._remove_file_failed(filename)
if len(urls):
logger.log(u"Failed to download any results", logger.WARNING)
return False
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
elif 'Referer' in self.headers:
self.headers.pop('Referer')
for url in urls:
if 'NO_DOWNLOAD_NAME' in url:
continue
if not self.proxy.isEnabled() and url.startswith('http'):
# Let's just set a referer for every .torrent/.nzb, should work as a cover-all without side-effects
self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'})
logger.log(u"Downloading a result from " + self.name + " at " + url)
# Support for Jackett/TorzNab
if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT
if helpers.download_file(self.proxy._buildURL(url), filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
else:
logger.log(u"Could not download %s" % url, logger.WARNING)
helpers._remove_file_failed(filename)
if len(urls):
logger.log(u"Failed to download any results", logger.WARNING)
return False
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
for url in urls:
if 'NO_DOWNLOAD_NAME' in url:
continue
if url.startswith('http'):
self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'})
logger.log(u"Downloading a result from " + self.name + " at " + url)
# Support for Jackett/TorzNab
if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT
if helpers.download_file(url, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
else:
logger.log(u"Could not download %s" % url, logger.WARNING)
helpers.remove_file_failed(filename)
if len(urls):
logger.log(u"Failed to download any results", logger.WARNING)
return False
def cache_image(self, image_url, image_path):
# Only cache if the file does not exist yet
if not ek(os.path.isfile, image_path):
helpers.download_file(image_url, image_path, session=self.session)
#.........这里部分代码省略.........
'#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
del found_results[provider_id][ep_num]
# of all the single ep results narrow it down to the best one for each episode
final_results += set(multi_results.values())
quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None
for cur_ep in found_results[provider_id]:
if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
continue
if 0 == len(found_results[provider_id][cur_ep]):
continue
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
filter_rls=orig_thread_name)
# if all results were rejected move on to the next episode
if not best_result:
continue
# filter out possible bad torrents from providers
if 'torrent' == best_result.resultType:
if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
best_result.url = best_result.get_data_func(best_result.url)
best_result.get_data_func = None # consume only once
if not best_result.url:
continue
if best_result.url.startswith('magnet'):
if 'blackhole' != sickbeard.TORRENT_METHOD:
best_result.content = None
else:
cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(),
'%s.torrent' % (helpers.sanitizeFileName(best_result.name)))
if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session):
continue
try:
with open(cache_file, 'rb') as fh:
td = fh.read()
setattr(best_result, 'cache_file', cache_file)
except (StandardError, Exception):
continue
if getattr(best_result.provider, 'chk_td', None):
name = None
try:
hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
x, v = len(hdr[0]), int(hdr[1])
while x < len(td):
y = x + v
name = 'name' == td[x: y]
w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0]
x, v = y + len(w[0]), int(w[1])
if name:
name = td[x: x + v]
break
except (StandardError, Exception):
continue
if name:
if not pass_show_wordlist_checks(name, show):
continue
if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False):
logger.log('Ignored: %s (debug log has detail)' % name)
continue
best_result.name = name
def change_unrar_tool(unrar_tool, alt_unrar_tool):
# Check for failed unrar attempt, and remove it
# Must be done before unrar is ever called or the self-extractor opens and locks startup
bad_unrar = os.path.join(sickbeard.DATA_DIR, 'unrar.exe')
if os.path.exists(bad_unrar) and os.path.getsize(bad_unrar) == 447440:
try:
os.remove(bad_unrar)
except OSError as e:
logger.log("Unable to delete bad unrar.exe file {0}: {1}. You should delete it manually".format(bad_unrar, e.strerror), logger.WARNING)
try:
rarfile.custom_check(unrar_tool)
except (rarfile.RarCannotExec, rarfile.RarExecError, OSError, IOError):
# Let's just return right now if the defaults work
try:
# noinspection PyProtectedMember
test = rarfile._check_unrar_tool()
if test:
# These must always be set to something before returning
sickbeard.UNRAR_TOOL = rarfile.UNRAR_TOOL
sickbeard.ALT_UNRAR_TOOL = rarfile.ALT_TOOL
return True
except (rarfile.RarCannotExec, rarfile.RarExecError, OSError, IOError):
pass
if platform.system() == 'Windows':
# Look for WinRAR installations
found = False
winrar_path = 'WinRAR\\UnRAR.exe'
# Make a set of unique paths to check from existing environment variables
check_locations = {
os.path.join(location, winrar_path) for location in (
os.environ.get("ProgramW6432"), os.environ.get("ProgramFiles(x86)"),
os.environ.get("ProgramFiles"), re.sub(r'\s?\(x86\)', '', os.environ["ProgramFiles"])
) if location
}
check_locations.add(os.path.join(sickbeard.PROG_DIR, 'unrar\\unrar.exe'))
for check in check_locations:
if ek(os.path.isfile, check):
# Can use it?
try:
rarfile.custom_check(check)
unrar_tool = check
found = True
break
except (rarfile.RarCannotExec, rarfile.RarExecError, OSError, IOError):
found = False
# Download
if not found:
logger.log('Trying to download unrar.exe and set the path')
unrar_store = ek(os.path.join, sickbeard.PROG_DIR, 'unrar') # ./unrar (folder)
unrar_zip = ek(os.path.join, sickbeard.PROG_DIR, 'unrar_win.zip') # file download
if (helpers.download_file(
"http://sickrage.github.io/unrar/unrar_win.zip", filename=unrar_zip, session=helpers.make_session()
) and helpers.extractZip(archive=unrar_zip, targetDir=unrar_store)):
try:
ek(os.remove, unrar_zip)
except OSError as e:
logger.log("Unable to delete downloaded file {0}: {1}. You may delete it manually".format(unrar_zip, e.strerror))
check = os.path.join(unrar_store, "unrar.exe")
try:
rarfile.custom_check(check)
unrar_tool = check
logger.log('Successfully downloaded unrar.exe and set as unrar tool', )
except (rarfile.RarCannotExec, rarfile.RarExecError, OSError, IOError):
logger.log('Sorry, unrar was not set up correctly. Try installing WinRAR and make sure it is on the system PATH')
else:
logger.log('Unable to download unrar.exe')
# These must always be set to something before returning
sickbeard.UNRAR_TOOL = rarfile.UNRAR_TOOL = rarfile.ORIG_UNRAR_TOOL = unrar_tool
sickbeard.ALT_UNRAR_TOOL = rarfile.ALT_TOOL = alt_unrar_tool
try:
# noinspection PyProtectedMember
test = rarfile._check_unrar_tool()
except (rarfile.RarCannotExec, rarfile.RarExecError, OSError, IOError):
if sickbeard.UNPACK == 1:
logger.log('Disabling UNPACK setting because no unrar is installed.')
sickbeard.UNPACK = 0
test = False
return test
def _update_zoneinfo():
"""
Request new zoneinfo directly from repository
"""
global sb_timezone
sb_timezone = tz.tzlocal()
url_zv = 'http://sickragetv.github.io/network_timezones/zoneinfo.txt'
try:
url_data = helpers.getURL(url_zv, session=requests.Session())
if not url_data:
raise
# Filename of existing zoneinfo
if zoneinfo.ZONEINFOFILE is not None:
cur_zoneinfo = ek(basename, zoneinfo.ZONEINFOFILE)
else:
cur_zoneinfo = None
# Filename and hash of new zoneinfo
(new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(u' ')
except Exception as e:
logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' %
url_zv, logger.WARNING)
return
if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
return
# now load the new zoneinfo
url_tar = u'http://sickragetv.github.io/network_timezones/%s' % new_zoneinfo
zonefile = helpers.real_path(ek(join, ek(os.path.dirname, zoneinfo.__file__), new_zoneinfo))
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
if ek(os.path.exists, zonefile_tmp):
try:
ekk(os.remove, zonefile_tmp)
except:
logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.WARNING)
return
if not helpers.download_file(url_tar, zonefile_tmp, session=requests.Session()):
return
if not ek(os.path.exists, zonefile_tmp):
logger.log(u'Download of %s failed.' % zonefile_tmp, logger.WARNING)
return
new_hash = str(helpers.md5_for_file(zonefile_tmp))
if zoneinfo_md5.upper() == new_hash.upper():
logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO)
try:
# remove the old zoneinfo file
if cur_zoneinfo is not None:
old_file = helpers.real_path(
ek(join, ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
if ek(os.path.exists, old_file):
ek(os.remove, old_file)
# rename downloaded file
ek(os.rename, zonefile_tmp, zonefile)
# load the new zoneinfo
reload(zoneinfo)
sb_timezone = tz.tzlocal()
except:
_remove_zoneinfo_failed(zonefile_tmp)
return
else:
_remove_zoneinfo_failed(zonefile_tmp)
logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.WARNING)
return
请发表评论