antizapret
parent
fbd60a9910
commit
219431b312
47
Core.py
47
Core.py
|
@ -254,28 +254,33 @@ class Core:
|
|||
#playlist.add(path, listitem)
|
||||
#xbmc.Player().play(playlist)
|
||||
|
||||
try:
|
||||
import cherrytorrent
|
||||
http_config = {
|
||||
'port': 8089,
|
||||
}
|
||||
from resources.proxy import antizapret
|
||||
config = antizapret.config()
|
||||
log('[antizapret]: '+str(config["domains"]))
|
||||
log('[antizapret]: '+str(config["server"]))
|
||||
|
||||
torrent_config = {
|
||||
'port': 6900,
|
||||
'max_download_rate': 0,
|
||||
'max_upload_rate': 0,
|
||||
'keep_files': False
|
||||
}
|
||||
server = cherrytorrent.Server(http_config, torrent_config)
|
||||
server.run()
|
||||
url="http://localhost:8089/add?uri=magnet%3A%3Fxt%3Durn%3Abtih%3Ac39fe3eefbdb62da9c27eb6398ff4a7d2e26e7ab%26dn%3Dbig%2Bbuck%2Bbunny%2Bbdrip%2Bxvid%2Bmedic%26tr%3Dudp%253A%252F%252Ftracker.publicbt.com%253A80%252Fannounce%26tr%3Dudp%253A%252F%252Fopen.demonii.com%253A1337"
|
||||
print str(get_url('',url))
|
||||
xbmc.sleep(3000)
|
||||
path="http://localhost:8089/video"#?info_hash=c39fe3eefbdb62da9c27eb6398ff4a7d2e26e7ab
|
||||
xbmc.Player().play(path)
|
||||
xbmc.sleep(30000)
|
||||
finally:
|
||||
get_url('',"http://localhost:8089/shutdown")
|
||||
#try:
|
||||
# import cherrytorrent
|
||||
# http_config = {
|
||||
# 'port': 8089,
|
||||
# }
|
||||
|
||||
# torrent_config = {
|
||||
# 'port': 6900,
|
||||
# 'max_download_rate': 0,
|
||||
# 'max_upload_rate': 0,
|
||||
# 'keep_files': False
|
||||
# }
|
||||
# server = cherrytorrent.Server(http_config, torrent_config)
|
||||
# server.run()
|
||||
# url="http://localhost:8089/add?uri=magnet%3A%3Fxt%3Durn%3Abtih%3Ac39fe3eefbdb62da9c27eb6398ff4a7d2e26e7ab%26dn%3Dbig%2Bbuck%2Bbunny%2Bbdrip%2Bxvid%2Bmedic%26tr%3Dudp%253A%252F%252Ftracker.publicbt.com%253A80%252Fannounce%26tr%3Dudp%253A%252F%252Fopen.demonii.com%253A1337"
|
||||
# print str(get_url('',url))
|
||||
# xbmc.sleep(3000)
|
||||
# path="http://localhost:8089/video"#?info_hash=c39fe3eefbdb62da9c27eb6398ff4a7d2e26e7ab
|
||||
# xbmc.Player().play(path)
|
||||
# xbmc.sleep(30000)
|
||||
#finally:
|
||||
# get_url('',"http://localhost:8089/shutdown")
|
||||
|
||||
def DownloadStatus(self, params={}):
|
||||
db = DownloadDB()
|
||||
|
|
|
@ -416,7 +416,7 @@ class Libtorrent:
|
|||
'save_path': self.storageDirectory,
|
||||
#'storage_mode': self.lt.storage_mode_t(1),
|
||||
'paused': False,
|
||||
#'auto_managed': False,
|
||||
'auto_managed': False,
|
||||
#'duplicate_is_error': True
|
||||
}
|
||||
self.torrentHandle = self.session.add_torrent(torrent_info)
|
||||
|
@ -452,17 +452,6 @@ class Libtorrent:
|
|||
self.torrentHandle.piece_priority(self.endPart - i, 7)
|
||||
# print str(i)
|
||||
|
||||
def fetchParts(self):
|
||||
priorities = self.torrentHandle.piece_priorities()
|
||||
status = self.torrentHandle.status()
|
||||
if len(status.pieces) == 0:
|
||||
return
|
||||
if priorities[self.startPart] == 0:
|
||||
self.torrentHandle.piece_priority(self.startPart, 2)
|
||||
for part in range(self.startPart, self.endPart + 1):
|
||||
if priorities[part] == 0:
|
||||
self.torrentHandle.piece_priority(part, 1)
|
||||
|
||||
def checkThread(self):
|
||||
if self.threadComplete == True:
|
||||
log('checkThread KIIIIIIIIIIILLLLLLLLLLLLLLL')
|
||||
|
@ -476,19 +465,20 @@ class Libtorrent:
|
|||
self.session.stop_dht()
|
||||
|
||||
def debug(self):
|
||||
try:
|
||||
#try:
|
||||
if 1==1:
|
||||
# print str(self.getFilePath(0))
|
||||
s = self.torrentHandle.status()
|
||||
#get_cache_status=self.session.get_cache_status()
|
||||
#log('get_cache_status - %s/%s' % (str(get_cache_status.blocks_written), str(get_cache_status.blocks_read)))
|
||||
# get_settings=self.torrentHandle.status
|
||||
# print s.num_pieces
|
||||
# priorities = self.torrentHandle.piece_priorities()
|
||||
# self.dump(priorities)
|
||||
priorities = self.torrentHandle.piece_priorities()
|
||||
str(priorities)
|
||||
# print str('anonymous_mode '+str(get_settings['anonymous_mode']))
|
||||
|
||||
state_str = ['queued', 'checking', 'downloading metadata',
|
||||
'downloading', 'finished', 'seeding', 'allocating']
|
||||
'downloading', 'finished', 'seeding', 'allocating', 'checking fastresume']
|
||||
log('[%s] %.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s' % \
|
||||
(self.lt.version, s.progress * 100, s.download_rate / 1000,
|
||||
s.upload_rate / 1000, s.num_peers, state_str[s.state]))
|
||||
|
@ -512,7 +502,8 @@ class Libtorrent:
|
|||
# print 'True pieces: %d' % i
|
||||
# print s.current_tracker
|
||||
# print str(s.pieces)
|
||||
except:
|
||||
#except:
|
||||
else:
|
||||
print 'debug error'
|
||||
pass
|
||||
|
||||
|
|
|
@ -277,7 +277,6 @@ class TorrentPlayer(xbmc.Player):
|
|||
self.torrent.checkThread()
|
||||
return
|
||||
xbmc.sleep(1000)
|
||||
#self.torrent.torrentHandle.flush_cache()
|
||||
self.torrent.session.remove_torrent(self.torrent.torrentHandle)
|
||||
progressBar.update(0)
|
||||
progressBar.close()
|
||||
|
|
|
@ -41,6 +41,7 @@ class SearcherABC:
|
|||
sourceWeight = 1
|
||||
cookieJar = None
|
||||
timeout_multi=int(sys.modules["__main__"].__settings__.getSetting("timeout"))
|
||||
proxy=int(sys.modules["__main__"].__settings__.getSetting("proxy"))
|
||||
__plugin__='Empty v 0 0 0'
|
||||
baseurl = 'site.com'
|
||||
|
||||
|
@ -107,6 +108,19 @@ class SearcherABC:
|
|||
|
||||
def makeRequest(self, url, data={}, headers={}):
|
||||
self.load_cookie()
|
||||
if self.proxy == 1:
|
||||
from resources.proxy import antizapret
|
||||
opener = urllib2.build_opener(antizapret.AntizapretProxyHandler(), urllib2.HTTPCookieProcessor(self.cookieJar))
|
||||
config = antizapret.config()
|
||||
self.debug('[antizapret]: '+str(config["domains"]))
|
||||
self.debug('[antizapret]: '+str(config["server"]))
|
||||
elif self.proxy == 2:
|
||||
from resources.proxy import immunicity
|
||||
opener = urllib2.build_opener(immunicity.ImmunicityProxyHandler(), urllib2.HTTPCookieProcessor(self.cookieJar))
|
||||
config = immunicity.config()
|
||||
self.debug('[immunicity]: '+str(config["domains"]))
|
||||
self.debug('[immunicity]: '+str(config["server"]))
|
||||
else:
|
||||
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookieJar))
|
||||
opener.addheaders = headers
|
||||
if 0 < len(data):
|
||||
|
|
|
@ -44,6 +44,10 @@
|
|||
<string id="30044">Save files</string>
|
||||
<string id="30045">Ask to save</string>
|
||||
<string id="30046">BTclient (python-libtorrent via http)</string>
|
||||
<string id="30047">Auto-unblocking proxy</string>
|
||||
<string id="30048">None</string>
|
||||
<string id="30049">Anti-zapret</string>
|
||||
<string id="30050">Immunicity</string>
|
||||
<string id="30101">Interface</string>
|
||||
<string id="30102">P2P Network</string>
|
||||
<string id="30103">Advanced</string>
|
||||
|
@ -69,4 +73,5 @@
|
|||
<string id="30418">Choose searcher</string>
|
||||
<string id="30419">You don't have external searcher. Please install it first.</string>
|
||||
|
||||
|
||||
</strings>
|
||||
|
|
|
@ -44,6 +44,10 @@
|
|||
<string id="30044">Сохранять файлы</string>
|
||||
<string id="30045">Спросить о сохранении</string>
|
||||
<string id="30046">BTclient (python-libtorrent по http)</string>
|
||||
<string id="30047">Антизапрет (прокси)</string>
|
||||
<string id="30048">Не использовать</string>
|
||||
<string id="30049">Anti-zapret</string>
|
||||
<string id="30050">Immunicity</string>
|
||||
<string id="30101">Интерфейс</string>
|
||||
<string id="30102">P2P Сеть</string>
|
||||
<string id="30103">Дополнительные</string>
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os, re, fnmatch, threading, urllib2
|
||||
from contextlib import contextmanager, closing
|
||||
from functions import log, debug, tempdir
|
||||
|
||||
LOCKS = {}
|
||||
PAC_URL = "http://antizapret.prostovpn.org/proxy.pac"
|
||||
CACHE_DIR = tempdir()
|
||||
USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36"
|
||||
|
||||
if not os.path.exists(CACHE_DIR):
|
||||
os.makedirs(CACHE_DIR)
|
||||
|
||||
CACHE = 24 * 3600 # 24 hour caching
|
||||
|
||||
@contextmanager
|
||||
def shelf(filename, ttl=0):
|
||||
import shelve
|
||||
filename = os.path.join(CACHE_DIR, filename)
|
||||
with LOCKS.get(filename, threading.RLock()):
|
||||
with closing(shelve.open(filename, writeback=True)) as d:
|
||||
import time
|
||||
if not d:
|
||||
d.update({
|
||||
"created_at": time.time(),
|
||||
"data": {},
|
||||
})
|
||||
elif ttl > 0 and (time.time() - d["created_at"]) > ttl:
|
||||
d["data"] = {}
|
||||
yield d["data"]
|
||||
|
||||
_config = {}
|
||||
|
||||
def config():
|
||||
global _config
|
||||
if not _config:
|
||||
with shelf("antizapret.pac_config", ttl=CACHE) as pac_config:
|
||||
if not pac_config:
|
||||
log("[antizapret]: Fetching Antizapret PAC file on %s" %PAC_URL)
|
||||
try:
|
||||
pac_data = urllib2.urlopen(PAC_URL).read()
|
||||
except:
|
||||
pac_data = ""
|
||||
|
||||
r = re.search(r"\"PROXY (.*); DIRECT", pac_data)
|
||||
if r:
|
||||
pac_config["server"] = r.group(1)
|
||||
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", "").replace("\\", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
|
||||
else:
|
||||
pac_config["server"] = None
|
||||
pac_config["domains"] = []
|
||||
_config = pac_config
|
||||
return _config
|
||||
|
||||
class AntizapretProxyHandler(urllib2.ProxyHandler, object):
|
||||
def __init__(self):
|
||||
self.config = config()
|
||||
urllib2.ProxyHandler.__init__(self, {
|
||||
"http" : "<empty>",
|
||||
"https": "<empty>",
|
||||
"ftp" : "<empty>",
|
||||
})
|
||||
def proxy_open(self, req, proxy, type):
|
||||
import socket
|
||||
|
||||
if socket.gethostbyname(req.get_host().split(":")[0]) in self.config["domains"]:
|
||||
debug("[antizapret]: Pass request through proxy " + self.config["server"])
|
||||
return urllib2.ProxyHandler.proxy_open(self, req, self.config["server"], type)
|
||||
|
||||
return None
|
||||
|
||||
def url_get(url, params={}, headers={}, post = None):
|
||||
|
||||
if params:
|
||||
import urllib
|
||||
url = "%s?%s" % (url, urllib.urlencode(params))
|
||||
|
||||
if post:
|
||||
import urllib
|
||||
post = urllib.urlencode(post)
|
||||
|
||||
req = urllib2.Request(url, post)
|
||||
req.add_header("User-Agent", USER_AGENT)
|
||||
|
||||
for k, v in headers.items():
|
||||
req.add_header(k, v)
|
||||
|
||||
try:
|
||||
with closing(urllib2.urlopen(req)) as response:
|
||||
data = response.read()
|
||||
if response.headers.get("Content-Encoding", "") == "gzip":
|
||||
import zlib
|
||||
return zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(data)
|
||||
return data
|
||||
except urllib2.HTTPError as e:
|
||||
log("[antizapret]: HTTP Error(%s): %s" % (e.errno, e.strerror))
|
||||
return None
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os, re, fnmatch, threading, urllib2
|
||||
from contextlib import contextmanager, closing
|
||||
from functions import log, debug, tempdir
|
||||
|
||||
LOCKS = {}
|
||||
PAC_URL = "http://clientconfig.immunicity.org/pacs/all.pac"
|
||||
CACHE_DIR = tempdir()
|
||||
USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36"
|
||||
|
||||
if not os.path.exists(CACHE_DIR):
|
||||
os.makedirs(CACHE_DIR)
|
||||
|
||||
CACHE = 24 * 3600 # 24 hour caching
|
||||
|
||||
@contextmanager
|
||||
def shelf(filename, ttl=0):
|
||||
import shelve
|
||||
filename = os.path.join(CACHE_DIR, filename)
|
||||
with LOCKS.get(filename, threading.RLock()):
|
||||
with closing(shelve.open(filename, writeback=True)) as d:
|
||||
import time
|
||||
if not d:
|
||||
d.update({
|
||||
"created_at": time.time(),
|
||||
"data": {},
|
||||
})
|
||||
elif ttl > 0 and (time.time() - d["created_at"]) > ttl:
|
||||
d["data"] = {}
|
||||
yield d["data"]
|
||||
|
||||
_config = {}
|
||||
|
||||
def config():
|
||||
global _config
|
||||
if not _config:
|
||||
with shelf("xbmctorrent.immunicity.pac_config", ttl=CACHE) as pac_config:
|
||||
log("Fetching Immunicity PAC file")
|
||||
pac_data = urllib2.urlopen(PAC_URL).read()
|
||||
pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1)
|
||||
pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data)))
|
||||
_config = pac_config
|
||||
return _config
|
||||
|
||||
class ImmunicityProxyHandler(urllib2.ProxyHandler, object):
|
||||
def __init__(self):
|
||||
self.config = config()
|
||||
urllib2.ProxyHandler.__init__(self, {
|
||||
"http" : "<empty>",
|
||||
"https": "<empty>",
|
||||
"ftp" : "<empty>",
|
||||
})
|
||||
def proxy_open(self, req, proxy, type):
|
||||
import socket
|
||||
|
||||
if socket.gethostbyname(req.get_host().split(":")[0]) in self.config["domains"]:
|
||||
debug("[immunicity]: Pass request through proxy " + self.config["server"])
|
||||
return urllib2.ProxyHandler.proxy_open(self, req, self.config["server"], type)
|
||||
|
||||
return None
|
||||
|
||||
def url_get(url, params={}, headers={}, post = None):
|
||||
|
||||
if params:
|
||||
import urllib
|
||||
url = "%s?%s" % (url, urllib.urlencode(params))
|
||||
|
||||
if post:
|
||||
import urllib
|
||||
post = urllib.urlencode(post)
|
||||
|
||||
req = urllib2.Request(url, post)
|
||||
req.add_header("User-Agent", USER_AGENT)
|
||||
|
||||
for k, v in headers.items():
|
||||
req.add_header(k, v)
|
||||
|
||||
try:
|
||||
with closing(urllib2.urlopen(req)) as response:
|
||||
data = response.read()
|
||||
if response.headers.get("Content-Encoding", "") == "gzip":
|
||||
import zlib
|
||||
return zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(data)
|
||||
return data
|
||||
except urllib2.HTTPError as e:
|
||||
log("[immunicity]: HTTP Error(%s): %s" % (e.errno, e.strerror))
|
||||
return None
|
|
@ -11,6 +11,7 @@
|
|||
<setting id="timeout" type="enum" lvalues="30026|30027|30028" label="30025" default="1"/>
|
||||
<setting id="search_phrase" type="text" label="30040"/>
|
||||
<setting id="num_threads" type="slider" label="30042" default="3" range="1,1,9" option="int"/>
|
||||
<setting id="proxy" type="enum" lvalues="30048|30049|" label="30047" default="0"/>
|
||||
<setting id="debug" type="bool" label="30015" default="false"/>
|
||||
</category>
|
||||
<category label="30102">
|
||||
|
|
Loading…
Reference in New Issue