platform fix
parent
30df420b4f
commit
ca92203b75
1682
.idea/workspace.xml
1682
.idea/workspace.xml
File diff suppressed because it is too large
Load Diff
12
AceStream.py
12
AceStream.py
|
@ -18,8 +18,6 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
import urllib2
|
||||
import urllib
|
||||
|
@ -28,8 +26,8 @@ import re
|
|||
import base64
|
||||
from StringIO import StringIO
|
||||
import gzip
|
||||
from functions import file_decode, file_encode
|
||||
|
||||
from functions import file_decode, file_encode
|
||||
from functions import magnet_alert
|
||||
import xbmcvfs
|
||||
|
||||
|
@ -67,12 +65,11 @@ class AceStream:
|
|||
print 'Error importing TSengine from ASCore. Exception: ' + str(e)
|
||||
return
|
||||
|
||||
|
||||
self.TSplayer = tsengine()
|
||||
del tsengine
|
||||
self.torrentFilesDirectory = torrentFilesDirectory
|
||||
self.storageDirectory = storageDirectory
|
||||
_path=os.path.join(self.storageDirectory, self.torrentFilesDirectory)+os.sep
|
||||
_path = os.path.join(self.storageDirectory, self.torrentFilesDirectory) + os.sep
|
||||
if not xbmcvfs.exists(_path):
|
||||
xbmcvfs.mkdirs(_path)
|
||||
if xbmcvfs.exists(torrentFile):
|
||||
|
@ -115,7 +112,8 @@ class AceStream:
|
|||
localFile.write(content)
|
||||
localFile.close()
|
||||
except Exception, e:
|
||||
print 'Unable to save torrent file from "' + torrentUrl + '" to "' + torrentFile + '" in Torrent::saveTorrent' + '. Exception: ' + str(e)
|
||||
print 'Unable to save torrent file from "' + torrentUrl + '" to "' + torrentFile + '" in Torrent::saveTorrent' + '. Exception: ' + str(
|
||||
e)
|
||||
return
|
||||
if xbmcvfs.exists(torrentFile):
|
||||
self.torrentFile = torrentFile
|
||||
|
@ -134,7 +132,7 @@ class AceStream:
|
|||
fileList = self.getContentList()
|
||||
for i in fileList:
|
||||
if i['ind'] == contentId:
|
||||
return os.path.join(file_encode(self.storageDirectory),i['title'])
|
||||
return os.path.join(file_encode(self.storageDirectory), i['title'])
|
||||
|
||||
def getContentList(self):
|
||||
filelist = []
|
||||
|
|
117
BeautifulSoup.py
117
BeautifulSoup.py
|
@ -86,7 +86,6 @@ __license__ = "New-style BSD"
|
|||
from sgmllib import SGMLParser, SGMLParseError
|
||||
import codecs
|
||||
import markupbase
|
||||
import types
|
||||
import re
|
||||
import sgmllib
|
||||
|
||||
|
@ -176,9 +175,9 @@ class PageElement(object):
|
|||
except ValueError:
|
||||
pass
|
||||
|
||||
#Find the two elements that would be next to each other if
|
||||
#this element (and any children) hadn't been parsed. Connect
|
||||
#the two.
|
||||
# Find the two elements that would be next to each other if
|
||||
# this element (and any children) hadn't been parsed. Connect
|
||||
# the two.
|
||||
lastChild = self._lastRecursiveChild()
|
||||
nextElement = lastChild.next
|
||||
|
||||
|
@ -343,7 +342,7 @@ class PageElement(object):
|
|||
|
||||
fetchParents = findParents # Compatibility with pre-3.x
|
||||
|
||||
#These methods do the real heavy lifting.
|
||||
# These methods do the real heavy lifting.
|
||||
|
||||
def _findOne(self, method, name, attrs, text, **kwargs):
|
||||
r = None
|
||||
|
@ -388,8 +387,8 @@ class PageElement(object):
|
|||
break
|
||||
return results
|
||||
|
||||
#These Generators can be used to navigate starting from both
|
||||
#NavigableStrings and Tags.
|
||||
# These Generators can be used to navigate starting from both
|
||||
# NavigableStrings and Tags.
|
||||
def nextGenerator(self):
|
||||
i = self
|
||||
while i is not None:
|
||||
|
@ -655,8 +654,8 @@ class Tag(PageElement):
|
|||
for item in self.attrs:
|
||||
if item[0] == key:
|
||||
self.attrs.remove(item)
|
||||
#We don't break because bad HTML can define the same
|
||||
#attribute multiple times.
|
||||
# We don't break because bad HTML can define the same
|
||||
# attribute multiple times.
|
||||
self._getAttrMap()
|
||||
if self.attrMap.has_key(key):
|
||||
del self.attrMap[key]
|
||||
|
@ -668,7 +667,7 @@ class Tag(PageElement):
|
|||
return apply(self.findAll, args, kwargs)
|
||||
|
||||
def __getattr__(self, tag):
|
||||
#print "Getattr %s.%s" % (self.__class__, tag)
|
||||
# print "Getattr %s.%s" % (self.__class__, tag)
|
||||
if len(tag) > 3 and tag.rfind('Tag') == len(tag) - 3:
|
||||
return self.find(tag[:-3])
|
||||
elif tag.find('__') != 0:
|
||||
|
@ -830,7 +829,7 @@ class Tag(PageElement):
|
|||
s.append("\n")
|
||||
return ''.join(s)
|
||||
|
||||
#Soup methods
|
||||
# Soup methods
|
||||
|
||||
def find(self, name=None, attrs={}, recursive=True, text=None,
|
||||
**kwargs):
|
||||
|
@ -872,7 +871,7 @@ class Tag(PageElement):
|
|||
def firstText(self, text=None, recursive=True):
|
||||
return self.find(text=text, recursive=recursive)
|
||||
|
||||
#Private methods
|
||||
# Private methods
|
||||
|
||||
def _getAttrMap(self):
|
||||
"""Initializes a map representation of this tag's attributes,
|
||||
|
@ -883,7 +882,7 @@ class Tag(PageElement):
|
|||
self.attrMap[key] = value
|
||||
return self.attrMap
|
||||
|
||||
#Generator methods
|
||||
# Generator methods
|
||||
def childGenerator(self):
|
||||
# Just use the iterator from the contents
|
||||
return iter(self.contents)
|
||||
|
@ -961,7 +960,7 @@ class SoupStrainer:
|
|||
return found
|
||||
|
||||
def search(self, markup):
|
||||
#print 'looking for %s in %s' % (self, markup)
|
||||
# print 'looking for %s in %s' % (self, markup)
|
||||
found = None
|
||||
# If given a list of items, scan it for a text element that
|
||||
# matches.
|
||||
|
@ -988,20 +987,20 @@ class SoupStrainer:
|
|||
return found
|
||||
|
||||
def _matches(self, markup, matchAgainst):
|
||||
#print "Matching %s against %s" % (markup, matchAgainst)
|
||||
# print "Matching %s against %s" % (markup, matchAgainst)
|
||||
result = False
|
||||
if matchAgainst is True:
|
||||
result = markup is not None
|
||||
elif callable(matchAgainst):
|
||||
result = matchAgainst(markup)
|
||||
else:
|
||||
#Custom match methods take the tag as an argument, but all
|
||||
#other ways of matching match the tag name as a string.
|
||||
# Custom match methods take the tag as an argument, but all
|
||||
# other ways of matching match the tag name as a string.
|
||||
if isinstance(markup, Tag):
|
||||
markup = markup.name
|
||||
if markup and not isinstance(markup, basestring):
|
||||
markup = unicode(markup)
|
||||
#Now we know that chunk is either a string, or None.
|
||||
# Now we know that chunk is either a string, or None.
|
||||
if hasattr(matchAgainst, 'match'):
|
||||
# It's a regexp object.
|
||||
result = markup and matchAgainst.search(markup)
|
||||
|
@ -1038,15 +1037,15 @@ def buildTagMap(default, *args):
|
|||
built = {}
|
||||
for portion in args:
|
||||
if hasattr(portion, 'items'):
|
||||
#It's a map. Merge it.
|
||||
# It's a map. Merge it.
|
||||
for k, v in portion.items():
|
||||
built[k] = v
|
||||
elif hasattr(portion, '__iter__'): # is a list
|
||||
#It's a list. Map each item to the default.
|
||||
# It's a list. Map each item to the default.
|
||||
for k in portion:
|
||||
built[k] = default
|
||||
else:
|
||||
#It's a scalar. Map it to the default.
|
||||
# It's a scalar. Map it to the default.
|
||||
built[portion] = default
|
||||
return built
|
||||
|
||||
|
@ -1211,7 +1210,7 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
def __getattr__(self, methodName):
|
||||
"""This method routes method call requests to either the SGMLParser
|
||||
superclass or the Tag superclass, depending on the method name."""
|
||||
#print "__getattr__ called on %s.%s" % (self.__class__, methodName)
|
||||
# print "__getattr__ called on %s.%s" % (self.__class__, methodName)
|
||||
|
||||
if methodName.startswith('start_') or methodName.startswith('end_') \
|
||||
or methodName.startswith('do_'):
|
||||
|
@ -1240,13 +1239,13 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
|
||||
#print "Pop", tag.name
|
||||
# print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
# print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
|
@ -1274,13 +1273,12 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
self.previous = o
|
||||
self.currentTag.contents.append(o)
|
||||
|
||||
|
||||
def _popToTag(self, name, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
# print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
return
|
||||
|
||||
|
@ -1323,18 +1321,18 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
for i in range(len(self.tagStack) - 1, 0, -1):
|
||||
p = self.tagStack[i]
|
||||
if (not p or p.name == name) and not isNestable:
|
||||
#Non-nestable tags get popped to the top or to their
|
||||
#last occurance.
|
||||
# Non-nestable tags get popped to the top or to their
|
||||
# last occurance.
|
||||
popTo = name
|
||||
break
|
||||
if (nestingResetTriggers is not None
|
||||
and p.name in nestingResetTriggers) \
|
||||
or (nestingResetTriggers is None and isResetNesting
|
||||
and self.RESET_NESTING_TAGS.has_key(p.name)):
|
||||
#If we encounter one of the nesting reset triggers
|
||||
#peculiar to this tag, or we encounter another tag
|
||||
#that causes nesting to reset, pop up to but not
|
||||
#including that tag.
|
||||
# If we encounter one of the nesting reset triggers
|
||||
# peculiar to this tag, or we encounter another tag
|
||||
# that causes nesting to reset, pop up to but not
|
||||
# including that tag.
|
||||
popTo = p.name
|
||||
inclusive = False
|
||||
break
|
||||
|
@ -1343,10 +1341,10 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
self._popToTag(popTo, inclusive)
|
||||
|
||||
def unknown_starttag(self, name, attrs, selfClosing=0):
|
||||
#print "Start tag %s: %s" % (name, attrs)
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
if self.quoteStack:
|
||||
#This is not a real tag.
|
||||
#print "<%s> is not real!" % name
|
||||
# This is not a real tag.
|
||||
# print "<%s> is not real!" % name
|
||||
attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs])
|
||||
self.handle_data('<%s%s>' % (name, attrs))
|
||||
return
|
||||
|
@ -1367,16 +1365,16 @@ class BeautifulStoneSoup(Tag, SGMLParser):
|
|||
if selfClosing or self.isSelfClosingTag(name):
|
||||
self.popTag()
|
||||
if name in self.QUOTE_TAGS:
|
||||
#print "Beginning quote (%s)" % name
|
||||
# print "Beginning quote (%s)" % name
|
||||
self.quoteStack.append(name)
|
||||
self.literal = 1
|
||||
return tag
|
||||
|
||||
def unknown_endtag(self, name):
|
||||
#print "End tag %s" % name
|
||||
# print "End tag %s" % name
|
||||
if self.quoteStack and self.quoteStack[-1] != name:
|
||||
#This is not a real end tag.
|
||||
#print "</%s> is not real!" % name
|
||||
# This is not a real end tag.
|
||||
# print "</%s> is not real!" % name
|
||||
self.handle_data('</%s>' % name)
|
||||
return
|
||||
self.endData()
|
||||
|
@ -1544,18 +1542,18 @@ class BeautifulSoup(BeautifulStoneSoup):
|
|||
|
||||
QUOTE_TAGS = {'script': None, 'textarea': None}
|
||||
|
||||
#According to the HTML standard, each of these inline tags can
|
||||
#contain another tag of the same type. Furthermore, it's common
|
||||
#to actually use these tags this way.
|
||||
# According to the HTML standard, each of these inline tags can
|
||||
# contain another tag of the same type. Furthermore, it's common
|
||||
# to actually use these tags this way.
|
||||
NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
|
||||
'center')
|
||||
|
||||
#According to the HTML standard, these block tags can contain
|
||||
#another tag of the same type. Furthermore, it's common
|
||||
#to actually use these tags this way.
|
||||
# According to the HTML standard, these block tags can contain
|
||||
# another tag of the same type. Furthermore, it's common
|
||||
# to actually use these tags this way.
|
||||
NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del')
|
||||
|
||||
#Lists can contain other lists, but there are restrictions.
|
||||
# Lists can contain other lists, but there are restrictions.
|
||||
NESTABLE_LIST_TAGS = {'ol': [],
|
||||
'ul': [],
|
||||
'li': ['ul', 'ol'],
|
||||
|
@ -1563,7 +1561,7 @@ class BeautifulSoup(BeautifulStoneSoup):
|
|||
'dd': ['dl'],
|
||||
'dt': ['dl']}
|
||||
|
||||
#Tables can contain other tables, but there are restrictions.
|
||||
# Tables can contain other tables, but there are restrictions.
|
||||
NESTABLE_TABLE_TAGS = {'table': [],
|
||||
'tr': ['table', 'tbody', 'tfoot', 'thead'],
|
||||
'td': ['tr'],
|
||||
|
@ -1575,8 +1573,8 @@ class BeautifulSoup(BeautifulStoneSoup):
|
|||
|
||||
NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre')
|
||||
|
||||
#If one of these tags is encountered, all tags up to the next tag of
|
||||
#this type are popped.
|
||||
# If one of these tags is encountered, all tags up to the next tag of
|
||||
# this type are popped.
|
||||
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
|
||||
NON_NESTABLE_BLOCK_TAGS,
|
||||
NESTABLE_LIST_TAGS,
|
||||
|
@ -1723,14 +1721,14 @@ class BeautifulSOAP(BeautifulStoneSoup):
|
|||
BeautifulStoneSoup.popTag(self)
|
||||
|
||||
|
||||
#Enterprise class names! It has come to our attention that some people
|
||||
#think the names of the Beautiful Soup parser classes are too silly
|
||||
#and "unprofessional" for use in enterprise screen-scraping. We feel
|
||||
#your pain! For such-minded folk, the Beautiful Soup Consortium And
|
||||
#All-Night Kosher Bakery recommends renaming this file to
|
||||
#"RobustParser.py" (or, in cases of extreme enterprisiness,
|
||||
#"RobustParserBeanInterface.class") and using the following
|
||||
#enterprise-friendly class aliases:
|
||||
# Enterprise class names! It has come to our attention that some people
|
||||
# think the names of the Beautiful Soup parser classes are too silly
|
||||
# and "unprofessional" for use in enterprise screen-scraping. We feel
|
||||
# your pain! For such-minded folk, the Beautiful Soup Consortium And
|
||||
# All-Night Kosher Bakery recommends renaming this file to
|
||||
# "RobustParser.py" (or, in cases of extreme enterprisiness,
|
||||
# "RobustParserBeanInterface.class") and using the following
|
||||
# enterprise-friendly class aliases:
|
||||
class RobustXMLParser(BeautifulStoneSoup):
|
||||
pass
|
||||
|
||||
|
@ -1865,7 +1863,7 @@ class UnicodeDammit:
|
|||
# print "That didn't work!"
|
||||
# print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
# print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _toUnicode(self, data, encoding):
|
||||
|
@ -1960,7 +1958,6 @@ class UnicodeDammit:
|
|||
xml_encoding = sniffed_xml_encoding
|
||||
return xml_data, xml_encoding, sniffed_xml_encoding
|
||||
|
||||
|
||||
def find_codec(self, charset):
|
||||
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
|
||||
or (charset and self._codec(charset.replace("-", ""))) \
|
||||
|
@ -2041,7 +2038,7 @@ class UnicodeDammit:
|
|||
#######################################################################
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
# By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
|
||||
import hashlib, sys
|
||||
import hashlib
|
||||
import sys
|
||||
|
||||
import Libtorrent
|
||||
import AceStream
|
||||
|
@ -27,7 +28,6 @@ import AceStream
|
|||
class Torrent():
|
||||
__settings__ = sys.modules["__main__"].__settings__
|
||||
|
||||
|
||||
def __init__(self, storageDirectory='', torrentFile='', torrentFilesDirectory='torrents'):
|
||||
self.get_torrent_client()
|
||||
if self.player == 'libtorrent':
|
||||
|
@ -77,7 +77,7 @@ class Torrent():
|
|||
return self.player.getFilePath(contentId)
|
||||
|
||||
def getContentList(self):
|
||||
#print str(self.player.getContentList())
|
||||
# print str(self.player.getContentList())
|
||||
return self.player.getContentList()
|
||||
|
||||
def setUploadLimit(self, bytesPerSecond):
|
||||
|
|
147
Libtorrent.py
147
Libtorrent.py
|
@ -18,22 +18,23 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
|
||||
#import time
|
||||
# import time
|
||||
import thread
|
||||
import os
|
||||
import urllib2
|
||||
import hashlib
|
||||
import re
|
||||
import sys
|
||||
from platform import get_platform
|
||||
from StringIO import StringIO
|
||||
import gzip
|
||||
from functions import file_decode, file_encode, isSubtitle, DownloadDB
|
||||
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import xbmcvfs
|
||||
import Localization
|
||||
from platform_pulsar import get_platform
|
||||
from functions import file_decode, file_encode, isSubtitle, DownloadDB
|
||||
|
||||
|
||||
class Libtorrent:
|
||||
torrentFile = None
|
||||
|
@ -102,12 +103,13 @@ class Libtorrent:
|
|||
#from ctypes import *
|
||||
#cdll.LoadLibrary(dirname + '/libtorrent-rasterbar.so.7')'''
|
||||
|
||||
self.platform=get_platform()
|
||||
self.platform = get_platform()
|
||||
|
||||
print '[Libtorrent] self.platform: '+str(self.platform)
|
||||
print '[Libtorrent] self.platform: ' + str(self.platform)
|
||||
|
||||
try:
|
||||
import libtorrent
|
||||
|
||||
print 'Imported libtorrent v' + libtorrent.version + ' from system'
|
||||
except Exception, e:
|
||||
print 'Error importing from system. Exception: ' + str(e)
|
||||
|
@ -117,7 +119,9 @@ class Libtorrent:
|
|||
'python_libtorrent', self.platform['system'])
|
||||
sys.path.insert(0, dirname)
|
||||
import libtorrent
|
||||
print 'Imported libtorrent v' + libtorrent.version + ' from python_libtorrent.' + self.platform['system']
|
||||
|
||||
print 'Imported libtorrent v' + libtorrent.version + ' from python_libtorrent.' + self.platform[
|
||||
'system']
|
||||
except Exception, e:
|
||||
print 'Error importing python_libtorrent.' + self.platform['system'] + '. Exception: ' + str(e)
|
||||
pass
|
||||
|
@ -127,11 +131,12 @@ class Libtorrent:
|
|||
del libtorrent
|
||||
except:
|
||||
xbmcgui.Dialog().ok(Localization.localize('Python-Libtorrent Not Found'),
|
||||
Localization.localize(self.platform["message"][0]),Localization.localize(self.platform["message"][1]))
|
||||
Localization.localize(self.platform["message"][0]),
|
||||
Localization.localize(self.platform["message"][1]))
|
||||
return
|
||||
|
||||
self.storageDirectory = storageDirectory
|
||||
self.torrentFilesPath=os.path.join(self.storageDirectory, torrentFilesDirectory)+os.sep
|
||||
self.torrentFilesPath = os.path.join(self.storageDirectory, torrentFilesDirectory) + os.sep
|
||||
if xbmcvfs.exists(torrentFile):
|
||||
self.torrentFile = torrentFile
|
||||
self.torrentFileInfo = self.lt.torrent_info(file_decode(self.torrentFile))
|
||||
|
@ -142,7 +147,7 @@ class Libtorrent:
|
|||
if re.match("^magnet\:.+$", torrentUrl):
|
||||
self.magnetLink = torrentUrl
|
||||
self.magnetToTorrent(torrentUrl)
|
||||
self.magnetLink=None
|
||||
self.magnetLink = None
|
||||
return self.torrentFile
|
||||
else:
|
||||
if not xbmcvfs.exists(self.torrentFilesPath):
|
||||
|
@ -168,7 +173,8 @@ class Libtorrent:
|
|||
localFile.write(content)
|
||||
localFile.close()
|
||||
except Exception, e:
|
||||
print 'Unable to save torrent file from "' + torrentUrl + '" to "' + torrentFile + '" in Torrent::saveTorrent' + '. Exception: ' + str(e)
|
||||
print 'Unable to save torrent file from "' + torrentUrl + '" to "' + torrentFile + '" in Torrent::saveTorrent' + '. Exception: ' + str(
|
||||
e)
|
||||
return
|
||||
if xbmcvfs.exists(torrentFile):
|
||||
try:
|
||||
|
@ -180,14 +186,16 @@ class Libtorrent:
|
|||
baseName = file_encode(os.path.basename(self.getFilePath()))
|
||||
if not xbmcvfs.exists(self.torrentFilesPath):
|
||||
xbmcvfs.mkdirs(self.torrentFilesPath)
|
||||
newFile = self.torrentFilesPath+self.md5(baseName)+'.'+self.md5(torrentUrl)+'.torrent'# + '.'+ baseName
|
||||
newFile = self.torrentFilesPath + self.md5(baseName) + '.' + self.md5(
|
||||
torrentUrl) + '.torrent' # + '.'+ baseName
|
||||
if not xbmcvfs.exists(newFile):
|
||||
xbmcvfs.delete(newFile)
|
||||
if not xbmcvfs.exists(newFile):
|
||||
try:
|
||||
xbmcvfs.rename(torrentFile, newFile)
|
||||
except Exception, e:
|
||||
print 'Unable to rename torrent file from "' + torrentFile + '" to "' + newFile + '" in Torrent::renameTorrent'+ '. Exception: ' + str(e)
|
||||
print 'Unable to rename torrent file from "' + torrentFile + '" to "' + newFile + '" in Torrent::renameTorrent' + '. Exception: ' + str(
|
||||
e)
|
||||
return
|
||||
self.torrentFile = newFile
|
||||
if not self.torrentFileInfo:
|
||||
|
@ -266,7 +274,7 @@ class Libtorrent:
|
|||
return self.getContentList()[contentId]['size']
|
||||
|
||||
def getFilePath(self, contentId=0):
|
||||
return os.path.join(self.storageDirectory,self.getContentList()[contentId]['title'])#.decode('utf8')
|
||||
return os.path.join(self.storageDirectory, self.getContentList()[contentId]['title']) # .decode('utf8')
|
||||
|
||||
def getContentList(self):
|
||||
filelist = []
|
||||
|
@ -278,12 +286,12 @@ class Libtorrent:
|
|||
return filelist
|
||||
except:
|
||||
xbmcgui.Dialog().ok(Localization.localize('Python-Libtorrent Not Found'),
|
||||
Localization.localize(self.platform["message"][0]),Localization.localize(self.platform["message"][1]))
|
||||
Localization.localize(self.platform["message"][0]),
|
||||
Localization.localize(self.platform["message"][1]))
|
||||
return
|
||||
|
||||
|
||||
def getSubsIds(self, filename):
|
||||
subs=[]
|
||||
subs = []
|
||||
for i in self.getContentList():
|
||||
if isSubtitle(filename, i['title']):
|
||||
subs.append((i['ind'], i['title']))
|
||||
|
@ -305,25 +313,26 @@ class Libtorrent:
|
|||
|
||||
def downloadProcess(self, contentId):
|
||||
self.startSession()
|
||||
self.paused=False
|
||||
db=DownloadDB()
|
||||
ContentList=self.getContentList()
|
||||
if contentId!=None: contentId=int(contentId)
|
||||
if len(ContentList)==1 or contentId not in [None, -1]:
|
||||
if not contentId: contentId=0
|
||||
title=os.path.basename(ContentList[contentId]['title'])
|
||||
path=os.path.join(self.storageDirectory, ContentList[contentId]['title'])
|
||||
type='file'
|
||||
self.paused = False
|
||||
db = DownloadDB()
|
||||
ContentList = self.getContentList()
|
||||
if contentId != None: contentId = int(contentId)
|
||||
if len(ContentList) == 1 or contentId not in [None, -1]:
|
||||
if not contentId: contentId = 0
|
||||
title = os.path.basename(ContentList[contentId]['title'])
|
||||
path = os.path.join(self.storageDirectory, ContentList[contentId]['title'])
|
||||
type = 'file'
|
||||
else:
|
||||
contentId=-1
|
||||
title=ContentList[0]['title'].split('\\')[0]
|
||||
path=os.path.join(self.storageDirectory, title)
|
||||
type='folder'
|
||||
contentId = -1
|
||||
title = ContentList[0]['title'].split('\\')[0]
|
||||
path = os.path.join(self.storageDirectory, title)
|
||||
type = 'folder'
|
||||
|
||||
add=db.add(title, path, type, {'progress':0}, 'downloading', self.torrentFile, contentId, self.storageDirectory)
|
||||
get=db.get(title)
|
||||
if add or get[5]=='stopped':
|
||||
if get[5]=='stopped':
|
||||
add = db.add(title, path, type, {'progress': 0}, 'downloading', self.torrentFile, contentId,
|
||||
self.storageDirectory)
|
||||
get = db.get(title)
|
||||
if add or get[5] == 'stopped':
|
||||
if get[5] == 'stopped':
|
||||
db.update_status(get[0], 'downloading')
|
||||
if contentId not in [None, -1]:
|
||||
self.continueSession(int(contentId), Offset=0, seeding=False)
|
||||
|
@ -333,27 +342,27 @@ class Libtorrent:
|
|||
thread.start_new_thread(self.downloadLoop, (title,))
|
||||
|
||||
def downloadLoop(self, title):
|
||||
db=DownloadDB()
|
||||
status='downloading'
|
||||
while db.get(title) and status!='stopped':
|
||||
db = DownloadDB()
|
||||
status = 'downloading'
|
||||
while db.get(title) and status != 'stopped':
|
||||
xbmc.sleep(3000)
|
||||
status=db.get_status(title)
|
||||
status = db.get_status(title)
|
||||
if not self.paused:
|
||||
if status=='pause':
|
||||
self.paused=True
|
||||
if status == 'pause':
|
||||
self.paused = True
|
||||
self.session.pause()
|
||||
else:
|
||||
if status!='pause':
|
||||
self.paused=False
|
||||
if status != 'pause':
|
||||
self.paused = False
|
||||
self.session.resume()
|
||||
s = self.torrentHandle.status()
|
||||
info={}
|
||||
info['upload']=s.upload_payload_rate
|
||||
info['download']=s.download_payload_rate
|
||||
info['peers']=s.num_peers
|
||||
info['seeds']=s.num_seeds
|
||||
info = {}
|
||||
info['upload'] = s.upload_payload_rate
|
||||
info['download'] = s.download_payload_rate
|
||||
info['peers'] = s.num_peers
|
||||
info['seeds'] = s.num_seeds
|
||||
iterator = int(s.progress * 100)
|
||||
info['progress']=iterator
|
||||
info['progress'] = iterator
|
||||
db.update(title, info)
|
||||
self.debug()
|
||||
self.session.remove_torrent(self.torrentHandle)
|
||||
|
@ -378,7 +387,7 @@ class Libtorrent:
|
|||
self.torrentHandle = self.session.add_torrent({'ti': self.torrentFileInfo,
|
||||
'save_path': self.storageDirectory,
|
||||
'flags': 0x300,
|
||||
#'storage_mode': self.lt.storage_mode_t.storage_mode_allocate,
|
||||
# 'storage_mode': self.lt.storage_mode_t.storage_mode_allocate,
|
||||
})
|
||||
else:
|
||||
self.torrentFileInfo = self.getMagnetInfo()
|
||||
|
@ -395,21 +404,21 @@ class Libtorrent:
|
|||
if not Offset:
|
||||
Offset = selectedFileInfo['size'] / (1024 * 1024)
|
||||
self.partOffset = (Offset * 1024 * 1024 / self.piece_length) + 1
|
||||
#print 'partOffset ' + str(self.partOffset)+str(' ')
|
||||
# print 'partOffset ' + str(self.partOffset)+str(' ')
|
||||
self.startPart = selectedFileInfo['offset'] / self.piece_length
|
||||
self.endPart = int((selectedFileInfo['offset'] + selectedFileInfo['size']) / self.piece_length)
|
||||
#print 'part ' + str(self.startPart)+ str(' ')+ str(self.endPart)
|
||||
multiplier=self.partOffset/5
|
||||
print 'continueSession: multiplier '+str(multiplier)
|
||||
# print 'part ' + str(self.startPart)+ str(' ')+ str(self.endPart)
|
||||
multiplier = self.partOffset / 5
|
||||
print 'continueSession: multiplier ' + str(multiplier)
|
||||
for i in range(self.startPart, self.startPart + self.partOffset):
|
||||
if i <= self.endPart:
|
||||
self.torrentHandle.piece_priority(i, 7)
|
||||
if isMP4 and i%multiplier==0:
|
||||
self.torrentHandle.piece_priority(self.endPart - i/multiplier, 7)
|
||||
#print str(i)
|
||||
if multiplier>=i:
|
||||
if isMP4 and i % multiplier == 0:
|
||||
self.torrentHandle.piece_priority(self.endPart - i / multiplier, 7)
|
||||
# print str(i)
|
||||
if multiplier >= i:
|
||||
self.torrentHandle.piece_priority(self.endPart - i, 7)
|
||||
#print str(i)
|
||||
# print str(i)
|
||||
|
||||
def fetchParts(self):
|
||||
priorities = self.torrentHandle.piece_priorities()
|
||||
|
@ -429,13 +438,13 @@ class Libtorrent:
|
|||
|
||||
def debug(self):
|
||||
try:
|
||||
#print str(self.getFilePath(0))
|
||||
# print str(self.getFilePath(0))
|
||||
s = self.torrentHandle.status()
|
||||
#get_settings=self.torrentHandle.status
|
||||
#print s.num_pieces
|
||||
#priorities = self.torrentHandle.piece_priorities()
|
||||
#self.dump(priorities)
|
||||
#print str('anonymous_mode '+str(get_settings['anonymous_mode']))
|
||||
# get_settings=self.torrentHandle.status
|
||||
# print s.num_pieces
|
||||
# priorities = self.torrentHandle.piece_priorities()
|
||||
# self.dump(priorities)
|
||||
# print str('anonymous_mode '+str(get_settings['anonymous_mode']))
|
||||
|
||||
state_str = ['queued', 'checking', 'downloading metadata',
|
||||
'downloading', 'finished', 'seeding', 'allocating']
|
||||
|
@ -444,13 +453,13 @@ class Libtorrent:
|
|||
s.upload_rate / 1000,
|
||||
s.num_peers, state_str[s.state])
|
||||
i = 0
|
||||
#for t in s.pieces:
|
||||
# for t in s.pieces:
|
||||
# if t: i=i+1
|
||||
#print str(self.session.pop_alert())
|
||||
#print str(s.pieces[self.startPart:self.endPart])
|
||||
#print 'True pieces: %d' % i
|
||||
#print s.current_tracker
|
||||
#print str(s.pieces)
|
||||
# print str(self.session.pop_alert())
|
||||
# print str(s.pieces[self.startPart:self.endPart])
|
||||
# print 'True pieces: %d' % i
|
||||
# print s.current_tracker
|
||||
# print str(s.pieces)
|
||||
except:
|
||||
print 'debug error'
|
||||
pass
|
||||
|
|
115
Player.py
115
Player.py
|
@ -13,7 +13,6 @@ import xbmcvfs
|
|||
import Localization
|
||||
from functions import calculate, showMessage, clearStorage, DownloadDB, get_ids_video
|
||||
|
||||
|
||||
ROOT = sys.modules["__main__"].__root__
|
||||
RESOURCES_PATH = os.path.join(ROOT, 'resources')
|
||||
TORRENT2HTTP_TIMEOUT = 20
|
||||
|
@ -106,20 +105,20 @@ class OverlayText(object):
|
|||
class TorrentPlayer(xbmc.Player):
|
||||
__plugin__ = sys.modules["__main__"].__plugin__
|
||||
__settings__ = sys.modules["__main__"].__settings__
|
||||
ROOT = sys.modules["__main__"].__root__ #.decode('utf-8').encode(sys.getfilesystemencoding())
|
||||
ROOT = sys.modules["__main__"].__root__ # .decode('utf-8').encode(sys.getfilesystemencoding())
|
||||
USERAGENT = "Mozilla/5.0 (Windows NT 6.1; rv:5.0) Gecko/20100101 Firefox/5.0"
|
||||
torrentFilesDirectory = 'torrents'
|
||||
debug = __settings__.getSetting('debug') == 'true'
|
||||
subs_dl = __settings__.getSetting('subs_dl') == 'true'
|
||||
seeding = __settings__.getSetting('keep_seeding') == 'true' and __settings__.getSetting('keep_files') == 'true'
|
||||
seeding_status=False
|
||||
seeding_run=False
|
||||
seeding_status = False
|
||||
seeding_run = False
|
||||
ids_video = None
|
||||
episodeId = None
|
||||
|
||||
def __init__(self, userStorageDirectory, torrentUrl, params={}):
|
||||
self.userStorageDirectory = userStorageDirectory
|
||||
self.torrentUrl=torrentUrl
|
||||
self.torrentUrl = torrentUrl
|
||||
xbmc.Player.__init__(self)
|
||||
print ("[TorrentPlayer] Initalized")
|
||||
self.params = params
|
||||
|
@ -138,16 +137,16 @@ class TorrentPlayer(xbmc.Player):
|
|||
if self.buffer():
|
||||
while True:
|
||||
if self.setup_play():
|
||||
#print '************************************* GOING LOOP'
|
||||
# print '************************************* GOING LOOP'
|
||||
self.torrent.continueSession(self.contentId)
|
||||
self.loop()
|
||||
else:
|
||||
break
|
||||
#print '************************************* GO NEXT?'
|
||||
# print '************************************* GO NEXT?'
|
||||
if self.next_dl and self.next_dling and isinstance(self.next_contentId, int) and self.iterator == 100:
|
||||
self.contentId = self.next_contentId
|
||||
continue
|
||||
#print '************************************* NO! break'
|
||||
# print '************************************* NO! break'
|
||||
break
|
||||
|
||||
self.torrent.stopSession()
|
||||
|
@ -166,7 +165,7 @@ class TorrentPlayer(xbmc.Player):
|
|||
|
||||
def init(self):
|
||||
self.next_dl = True if self.__settings__.getSetting('next_dl') == 'true' and self.ids_video else False
|
||||
print '[TorrentPlayer]: init - '+str(self.next_dl)
|
||||
print '[TorrentPlayer]: init - ' + str(self.next_dl)
|
||||
self.next_contentId = False
|
||||
self.display_name = ''
|
||||
self.downloadedSize = 0
|
||||
|
@ -178,26 +177,28 @@ class TorrentPlayer(xbmc.Player):
|
|||
|
||||
def setup_torrent(self):
|
||||
self.torrent.startSession()
|
||||
upload_limit=self.__settings__.getSetting("upload_limit") if self.__settings__.getSetting("upload_limit")!="" else 0
|
||||
upload_limit = self.__settings__.getSetting("upload_limit") if self.__settings__.getSetting(
|
||||
"upload_limit") != "" else 0
|
||||
if 0 < int(upload_limit):
|
||||
self.torrent.setUploadLimit(int(upload_limit) * 1000000 / 8) #MBits/second
|
||||
download_limit=self.__settings__.getSetting("download_limit") if self.__settings__.getSetting("download_limit")!="" else 0
|
||||
self.torrent.setUploadLimit(int(upload_limit) * 1000000 / 8) # MBits/second
|
||||
download_limit = self.__settings__.getSetting("download_limit") if self.__settings__.getSetting(
|
||||
"download_limit") != "" else 0
|
||||
if 0 < int(download_limit):
|
||||
self.torrent.setDownloadLimit(
|
||||
int(download_limit) * 1000000 / 8) #MBits/second
|
||||
int(download_limit) * 1000000 / 8) # MBits/second
|
||||
self.torrent.status = False
|
||||
self.fullSize = self.torrent.getFileSize(self.contentId)
|
||||
Offset = calculate(self.fullSize)
|
||||
#print 'Offset: '+str(Offset)
|
||||
# print 'Offset: '+str(Offset)
|
||||
|
||||
#mp4 fix
|
||||
# mp4 fix
|
||||
label = os.path.basename(self.torrent.getFilePath(self.contentId))
|
||||
isMP4=False
|
||||
isMP4 = False
|
||||
if '.' in label:
|
||||
ext=label.split('.')[-1]
|
||||
if ext.lower()=='mp4':
|
||||
isMP4=True
|
||||
#print 'setup_torrent: '+str((self.contentId, Offset, isMP4, label, ext))
|
||||
ext = label.split('.')[-1]
|
||||
if ext.lower() == 'mp4':
|
||||
isMP4 = True
|
||||
# print 'setup_torrent: '+str((self.contentId, Offset, isMP4, label, ext))
|
||||
self.torrent.continueSession(self.contentId, Offset=Offset, isMP4=isMP4)
|
||||
|
||||
def buffer(self):
|
||||
|
@ -206,11 +207,11 @@ class TorrentPlayer(xbmc.Player):
|
|||
progressBar.create(Localization.localize('Please Wait') + str(' [%s]' % str(self.torrent.lt.version)),
|
||||
Localization.localize('Seeds searching.'))
|
||||
if self.subs_dl:
|
||||
subs=self.torrent.getSubsIds(os.path.basename(self.torrent.getFilePath(self.contentId)))
|
||||
if len(subs)>0:
|
||||
subs = self.torrent.getSubsIds(os.path.basename(self.torrent.getFilePath(self.contentId)))
|
||||
if len(subs) > 0:
|
||||
for ind, title in subs:
|
||||
self.torrent.continueSession(ind)
|
||||
num_pieces=int(self.torrent.torrentFileInfo.num_pieces())
|
||||
num_pieces = int(self.torrent.torrentFileInfo.num_pieces())
|
||||
while iterator < 100:
|
||||
xbmc.sleep(1000)
|
||||
self.torrent.debug()
|
||||
|
@ -218,7 +219,7 @@ class TorrentPlayer(xbmc.Player):
|
|||
status = self.torrent.torrentHandle.status()
|
||||
iterator = int(status.progress * 100)
|
||||
if status.state == 0 or (status.progress == 0 and status.num_pieces > 0):
|
||||
iterator = int(status.num_pieces*100/num_pieces)
|
||||
iterator = int(status.num_pieces * 100 / num_pieces)
|
||||
if iterator > 99: iterator = 99
|
||||
progressBar.update(iterator, Localization.localize('Checking preloaded files...'), ' ', ' ')
|
||||
elif status.state == 3:
|
||||
|
@ -245,10 +246,10 @@ class TorrentPlayer(xbmc.Player):
|
|||
return True
|
||||
|
||||
def setup_subs(self, label, path):
|
||||
iterator=0
|
||||
subs=self.torrent.getSubsIds(label)
|
||||
#print str(subs)
|
||||
if len(subs)>0:
|
||||
iterator = 0
|
||||
subs = self.torrent.getSubsIds(label)
|
||||
# print str(subs)
|
||||
if len(subs) > 0:
|
||||
showMessage(Localization.localize('Information'),
|
||||
Localization.localize('Downloading and copy subtitles. Please wait.'), forced=True)
|
||||
for ind, title in subs:
|
||||
|
@ -258,38 +259,39 @@ class TorrentPlayer(xbmc.Player):
|
|||
self.torrent.debug()
|
||||
status = self.torrent.torrentHandle.status()
|
||||
iterator = int(status.progress * 100)
|
||||
#xbmc.sleep(2000)
|
||||
# xbmc.sleep(2000)
|
||||
for ind, title in subs:
|
||||
folder=title.split(os.sep)[0]
|
||||
temp=os.path.basename(title)
|
||||
addition=os.path.dirname(title).lstrip(folder+os.sep).replace(os.sep,'.').replace(' ','_').strip()
|
||||
ext=temp.split('.')[-1]
|
||||
temp = temp[:len(temp) - len(ext) - 1]+'.'+addition+'.'+ext
|
||||
newFileName=os.path.join(os.path.dirname(path),temp)
|
||||
#print str((os.path.join(os.path.dirname(os.path.dirname(path)),title),newFileName))
|
||||
folder = title.split(os.sep)[0]
|
||||
temp = os.path.basename(title)
|
||||
addition = os.path.dirname(title).lstrip(folder + os.sep).replace(os.sep, '.').replace(' ', '_').strip()
|
||||
ext = temp.split('.')[-1]
|
||||
temp = temp[:len(temp) - len(ext) - 1] + '.' + addition + '.' + ext
|
||||
newFileName = os.path.join(os.path.dirname(path), temp)
|
||||
# print str((os.path.join(os.path.dirname(os.path.dirname(path)),title),newFileName))
|
||||
if not xbmcvfs.exists(newFileName):
|
||||
xbmcvfs.copy(os.path.join(os.path.dirname(os.path.dirname(path)),title),newFileName)
|
||||
xbmcvfs.copy(os.path.join(os.path.dirname(os.path.dirname(path)), title), newFileName)
|
||||
|
||||
def setup_play(self):
|
||||
self.next_dling = False
|
||||
self.iterator=0
|
||||
self.iterator = 0
|
||||
path = self.torrent.getFilePath(self.contentId)
|
||||
label = os.path.basename(path)
|
||||
self.basename=label
|
||||
self.seeding_run=False
|
||||
self.basename = label
|
||||
self.seeding_run = False
|
||||
listitem = xbmcgui.ListItem(label, path=path)
|
||||
|
||||
if self.subs_dl:
|
||||
self.setup_subs(label, path)
|
||||
try:
|
||||
seasonId = self.get("seasonId")
|
||||
self.episodeId = self.get("episodeId") if not self.episodeId else int(self.episodeId)+1
|
||||
self.episodeId = self.get("episodeId") if not self.episodeId else int(self.episodeId) + 1
|
||||
title = urllib.unquote_plus(self.get("title")) if self.get("title") else None
|
||||
|
||||
if self.get("label") and self.episodeId == self.get("episodeId"):
|
||||
label = urllib.unquote_plus(self.get("label"))
|
||||
elif seasonId and self.episodeId and title:
|
||||
label = '%s S%02dE%02d.%s (%s)' % (title, int(seasonId), int(self.episodeId), self.basename.split('.')[-1], self.basename)
|
||||
label = '%s S%02dE%02d.%s (%s)' % (
|
||||
title, int(seasonId), int(self.episodeId), self.basename.split('.')[-1], self.basename)
|
||||
|
||||
if seasonId and self.episodeId and label and title:
|
||||
listitem = xbmcgui.ListItem(label, path=path)
|
||||
|
@ -306,7 +308,7 @@ class TorrentPlayer(xbmc.Player):
|
|||
listitem.setThumbnailImage(urllib.unquote_plus(thumbnail))
|
||||
self.display_name = label
|
||||
|
||||
#мегакостыль!
|
||||
# мегакостыль!
|
||||
rpc = ({'jsonrpc': '2.0', 'method': 'Files.GetDirectory', 'params': {
|
||||
'media': 'video', 'directory': os.path.dirname(path)}, 'id': 0})
|
||||
data = json.dumps(rpc)
|
||||
|
@ -315,13 +317,13 @@ class TorrentPlayer(xbmc.Player):
|
|||
xbmc.sleep(300)
|
||||
|
||||
if response:
|
||||
#xbmc.Player().play(path, listitem)
|
||||
# xbmc.Player().play(path, listitem)
|
||||
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
|
||||
playlist.clear()
|
||||
playlist.add(path, listitem)
|
||||
xbmc.Player().play(playlist)
|
||||
|
||||
xbmc.sleep(3000)#very important, do not edit this, podavan
|
||||
xbmc.sleep(3000) # very important, do not edit this, podavan
|
||||
return True
|
||||
|
||||
def onPlayBackStarted(self):
|
||||
|
@ -362,7 +364,7 @@ class TorrentPlayer(xbmc.Player):
|
|||
self.torrent.debug()
|
||||
status = self.torrent.torrentHandle.status()
|
||||
overlay.text = "\n".join(self._get_status_lines(status))
|
||||
#downloadedSize = torrent.torrentHandle.file_progress()[contentId]
|
||||
# downloadedSize = torrent.torrentHandle.file_progress()[contentId]
|
||||
self.iterator = int(status.progress * 100)
|
||||
xbmc.sleep(1000)
|
||||
if self.iterator == 100 and self.next_dl:
|
||||
|
@ -371,19 +373,20 @@ class TorrentPlayer(xbmc.Player):
|
|||
self.next_contentId = int(self.ids_video[next_contentId_index])
|
||||
else:
|
||||
self.next_contentId = False
|
||||
#print str(self.next_contentId)+'xxxxxx23'
|
||||
# print str(self.next_contentId)+'xxxxxx23'
|
||||
if not self.seeding_run and self.iterator == 100 and self.seeding:
|
||||
self.seeding_run=True
|
||||
self.seeding_run = True
|
||||
self.seed(self.contentId)
|
||||
self.seeding_status=True
|
||||
#xbmc.sleep(7000)
|
||||
if self.iterator == 100 and self.next_dl and not self.next_dling and isinstance(self.next_contentId, int) and self.next_contentId!=False:
|
||||
self.seeding_status = True
|
||||
# xbmc.sleep(7000)
|
||||
if self.iterator == 100 and self.next_dl and not self.next_dling and isinstance(self.next_contentId,
|
||||
int) and self.next_contentId != False:
|
||||
showMessage(Localization.localize('Torrent Downloading'),
|
||||
Localization.localize('Starting download next episode!'), forced=True)
|
||||
self.torrent.stopSession()
|
||||
#xbmc.sleep(1000)
|
||||
# xbmc.sleep(1000)
|
||||
path = self.torrent.getFilePath(self.next_contentId)
|
||||
self.basename=self.display_name = os.path.basename(path)
|
||||
self.basename = self.display_name = os.path.basename(path)
|
||||
self.torrent.continueSession(self.next_contentId)
|
||||
self.next_dling = True
|
||||
|
||||
|
@ -397,14 +400,14 @@ class TorrentPlayer(xbmc.Player):
|
|||
]
|
||||
|
||||
def db_delete(self):
|
||||
db=DownloadDB()
|
||||
get=db.get(self.basename)
|
||||
db = DownloadDB()
|
||||
get = db.get(self.basename)
|
||||
if get:
|
||||
db.delete(get[0])
|
||||
|
||||
def seed(self, contentId):
|
||||
self.db_delete()
|
||||
exec_str='XBMC.RunPlugin(%s)' % \
|
||||
exec_str = 'XBMC.RunPlugin(%s)' % \
|
||||
('%s?action=%s&url=%s&storage=%s&ind=%s') % \
|
||||
(sys.argv[0], 'downloadLibtorrent', urllib.quote_plus(self.torrentUrl),
|
||||
urllib.quote_plus(self.userStorageDirectory), str(contentId))
|
||||
|
|
14
cal.py
14
cal.py
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
import os, json, re
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import os
|
||||
import re
|
||||
|
||||
ROOT = os.path.dirname(sys.modules["__main__"].sys.argv[0])
|
||||
searcherObject = {}
|
||||
|
@ -10,11 +10,11 @@ if ROOT + os.sep + 'resources' + os.sep + 'contenters' not in sys.path:
|
|||
sys.path.insert(0, ROOT + os.sep + 'resources' + os.sep + 'contenters')
|
||||
searcherObject[searcher] = getattr(__import__(searcher), searcher)()
|
||||
|
||||
#print str(searcherObject[searcher].get_info('http://kickass.so/greys-anatomy-s11e09-hdtv-x264-lol-ettv-t10144556.html'))
|
||||
# print str(searcherObject[searcher].get_info('http://kickass.so/greys-anatomy-s11e09-hdtv-x264-lol-ettv-t10144556.html'))
|
||||
|
||||
|
||||
x='<a href="http://swesub.tv/action/">Action</a></li><li><a href="http://swesub.tv/animerat/">Animerat</a></li><li><a href="http://swesub.tv/dans/">Dans</a></li><li><a href="http://swesub.tv/dokumentar/">Dokumentär</a></li><li><a href="http://swesub.tv/drama/">Drama</a></li><li><a href="http://swesub.tv/familj/">Familj</a></li><li><a href="http://swesub.tv/fantasy/">Fantasy</a></li><li><a href="http://swesub.tv/komedi/">Komedi</a></li><li><a href="http://swesub.tv/krig/">Krig</a></li><li><a href="http://swesub.tv/kriminal/">Kriminal</a></li><li><a href="http://swesub.tv/musikal/">Musikal</a></li><li><a href="http://swesub.tv/romantik/">Romantik</a></li><li><a href="http://swesub.tv/sci-fi/">Sci-Fi</a></li><li><a href="http://swesub.tv/skrack/">Skräck</a></li><li><a href="http://swesub.tv/sport/">Sport</a></li><li><a href="http://swesub.tv/thriller/">Thriller</a></li><li><a href="http://swesub.tv/western/">Western</a></li><li><a href="http://swesub.tv/aventyr/">Äventyr</a></li>'
|
||||
y='href="http://swesub.tv/(.+?)/">(.+?)<'
|
||||
for u,t in re.findall(y,x):
|
||||
#print ", '/"+u+"/', {'page': '/"+u+"/?page=%d', 'increase': 1, 'second_page': 2,}),"
|
||||
x = '<a href="http://swesub.tv/action/">Action</a></li><li><a href="http://swesub.tv/animerat/">Animerat</a></li><li><a href="http://swesub.tv/dans/">Dans</a></li><li><a href="http://swesub.tv/dokumentar/">Dokumentär</a></li><li><a href="http://swesub.tv/drama/">Drama</a></li><li><a href="http://swesub.tv/familj/">Familj</a></li><li><a href="http://swesub.tv/fantasy/">Fantasy</a></li><li><a href="http://swesub.tv/komedi/">Komedi</a></li><li><a href="http://swesub.tv/krig/">Krig</a></li><li><a href="http://swesub.tv/kriminal/">Kriminal</a></li><li><a href="http://swesub.tv/musikal/">Musikal</a></li><li><a href="http://swesub.tv/romantik/">Romantik</a></li><li><a href="http://swesub.tv/sci-fi/">Sci-Fi</a></li><li><a href="http://swesub.tv/skrack/">Skräck</a></li><li><a href="http://swesub.tv/sport/">Sport</a></li><li><a href="http://swesub.tv/thriller/">Thriller</a></li><li><a href="http://swesub.tv/western/">Western</a></li><li><a href="http://swesub.tv/aventyr/">Äventyr</a></li>'
|
||||
y = 'href="http://swesub.tv/(.+?)/">(.+?)<'
|
||||
for u, t in re.findall(y, x):
|
||||
# print ", '/"+u+"/', {'page': '/"+u+"/?page=%d', 'increase': 1, 'second_page': 2,}),"
|
||||
print t
|
|
@ -1,13 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import xbmcaddon
|
||||
import xbmc, sys
|
||||
from functions import getParameters, HistoryDB
|
||||
import sys
|
||||
|
||||
import xbmcaddon
|
||||
import xbmc
|
||||
from functions import getParameters, HistoryDB
|
||||
from resources.pyxbmct.addonwindow import *
|
||||
from functions import Searchers
|
||||
|
||||
|
||||
__settings__ = xbmcaddon.Addon(id='plugin.video.torrenter')
|
||||
__language__ = __settings__.getLocalizedString
|
||||
__version__ = __settings__.getAddonInfo('version')
|
||||
|
@ -16,10 +16,11 @@ __root__ = __settings__.getAddonInfo('path')
|
|||
|
||||
print 'SYS ARGV: ' + str(sys.argv)
|
||||
|
||||
if len(sys.argv)>1:
|
||||
if len(sys.argv) > 1:
|
||||
params = getParameters(sys.argv[1])
|
||||
else:
|
||||
params={}
|
||||
params = {}
|
||||
|
||||
|
||||
class MyAddon(AddonDialogWindow):
|
||||
def __init__(self, title=''):
|
||||
|
@ -170,22 +171,22 @@ class ControlCenter(AddonDialogWindow):
|
|||
super(ControlCenter, self).__init__(title)
|
||||
|
||||
self.dic = Searchers().dic()
|
||||
self.db=None
|
||||
self.addtime=None
|
||||
self.db = None
|
||||
self.addtime = None
|
||||
self.keys = self.dic.keys()
|
||||
if addtime:
|
||||
self.addtime=addtime
|
||||
self.addtime = addtime
|
||||
self.db = HistoryDB()
|
||||
providers = self.db.get_providers(addtime)
|
||||
if not providers:
|
||||
self.db.set_providers(addtime, self.dic)
|
||||
else:
|
||||
for searcher in self.keys:
|
||||
self.dic[searcher]=False
|
||||
self.dic[searcher] = False
|
||||
for searcher in providers:
|
||||
try:
|
||||
if searcher in self.keys:
|
||||
self.dic[searcher]=True
|
||||
self.dic[searcher] = True
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -215,16 +216,16 @@ class ControlCenter(AddonDialogWindow):
|
|||
def set_info_controls(self):
|
||||
# Demo for PyXBMCt UI controls.
|
||||
# no_int_label = Label(__language__(30146), alignment=ALIGN_CENTER)
|
||||
#self.placeControl(no_int_label, 0, 0, 1, 3)
|
||||
# self.placeControl(no_int_label, 0, 0, 1, 3)
|
||||
#
|
||||
#label_timeout = Label(__language__(30410))
|
||||
#self.placeControl(label_timeout, 1, 0)
|
||||
# label_timeout = Label(__language__(30410))
|
||||
# self.placeControl(label_timeout, 1, 0)
|
||||
# Label
|
||||
#self.label = Label(__language__(30545) % TimeOut().timeout())
|
||||
#self.placeControl(self.label, 1, 1)
|
||||
# self.label = Label(__language__(30545) % TimeOut().timeout())
|
||||
# self.placeControl(self.label, 1, 1)
|
||||
#
|
||||
#label_watched = Label(__language__(30414) % (WatchedDB().count()))
|
||||
#self.placeControl(label_watched, 2, 0)
|
||||
# label_watched = Label(__language__(30414) % (WatchedDB().count()))
|
||||
# self.placeControl(label_watched, 2, 0)
|
||||
pass
|
||||
|
||||
def set_active_controls(self):
|
||||
|
@ -286,8 +287,8 @@ class ControlCenter(AddonDialogWindow):
|
|||
ser = placed_keys[placed_values.index((place[0], place[1] - 1))]
|
||||
self.radiobutton[searcher].controlLeft(self.radiobutton[ser])
|
||||
|
||||
#print str((self.button_columns, self.last_column_row))
|
||||
#print searcher
|
||||
# print str((self.button_columns, self.last_column_row))
|
||||
# print searcher
|
||||
|
||||
if place == (self.button_columns, self.last_column_row) and self.last_column_row < 2:
|
||||
ser = placed_keys[placed_values.index((place[0] - 1, place[1] + 1))]
|
||||
|
@ -363,11 +364,11 @@ class ControlCenter(AddonDialogWindow):
|
|||
|
||||
|
||||
def main():
|
||||
title='Torrenter Global Control Center'
|
||||
addtime=None
|
||||
title = 'Torrenter Global Control Center'
|
||||
addtime = None
|
||||
if params.get('title'):
|
||||
title=str(params.get('title'))
|
||||
addtime=str(params.get('addtime'))
|
||||
title = str(params.get('title'))
|
||||
addtime = str(params.get('addtime'))
|
||||
|
||||
window = ControlCenter(title, addtime)
|
||||
window.doModal()
|
||||
|
|
149
functions.py
149
functions.py
|
@ -19,7 +19,6 @@ import xbmcvfs
|
|||
import Localization
|
||||
from resources.scrapers.scrapers import Scrapers
|
||||
|
||||
|
||||
try:
|
||||
from hashlib import md5
|
||||
except ImportError:
|
||||
|
@ -50,10 +49,10 @@ def clearStorage(userStorageDirectory):
|
|||
import shutil
|
||||
|
||||
temp = userStorageDirectory.rstrip('Torrenter').rstrip('/\\')
|
||||
torrents_temp,i=None,0
|
||||
torrents_temp, i = None, 0
|
||||
while not torrents_temp or xbmcvfs.exists(torrents_temp):
|
||||
torrents_temp=os.path.join(temp, 'torrents'+str(i))+os.sep
|
||||
i+=1
|
||||
torrents_temp = os.path.join(temp, 'torrents' + str(i)) + os.sep
|
||||
i += 1
|
||||
shutil.move(os.path.join(userStorageDirectory, 'torrents'), torrents_temp)
|
||||
shutil.rmtree(userStorageDirectory, ignore_errors=True)
|
||||
xbmcvfs.mkdir(userStorageDirectory)
|
||||
|
@ -83,7 +82,7 @@ def md5(string):
|
|||
|
||||
|
||||
def Debug(msg, force=False):
|
||||
if (1==1 or debug == 'true' or force):
|
||||
if (1 == 1 or debug == 'true' or force):
|
||||
try:
|
||||
print "[Torrenter v2] " + msg
|
||||
except UnicodeEncodeError:
|
||||
|
@ -264,6 +263,7 @@ def calculate(full):
|
|||
|
||||
return repl_const
|
||||
|
||||
|
||||
def getDirList(path, newl=None):
|
||||
l = []
|
||||
try:
|
||||
|
@ -371,13 +371,13 @@ def cutFolder(contentList, tdir=None):
|
|||
common = False
|
||||
break
|
||||
|
||||
#print common_folder
|
||||
# print common_folder
|
||||
for fileTitle, contentId in contentList:
|
||||
dir = None
|
||||
if common:
|
||||
fileTitle = fileTitle[len(common_folder) + 1:]
|
||||
|
||||
#print fileTitle
|
||||
# print fileTitle
|
||||
|
||||
if '\\' in fileTitle:
|
||||
dir = fileTitle.split('\\')[0]
|
||||
|
@ -396,6 +396,7 @@ def cutFolder(contentList, tdir=None):
|
|||
else:
|
||||
return dirList, contentList
|
||||
|
||||
|
||||
def sweetpair(l):
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
|
@ -836,6 +837,7 @@ class TimeOut():
|
|||
# Debug('[TimeOut]: '+str(to))
|
||||
return to
|
||||
|
||||
|
||||
class ListDB:
|
||||
def __init__(self, version=1.0):
|
||||
self.dbname = 'list' + '.db3'
|
||||
|
@ -854,7 +856,7 @@ class ListDB:
|
|||
# self.cur.execute('create table list(addtime integer PRIMARY KEY, title varchar(32), originaltitle varchar(32)'
|
||||
# ', year integer, category varchar(32), subcategory varchar(32))')
|
||||
self.cur.execute('create table list(addtime integer PRIMARY KEY, info varchar(32))')
|
||||
self.cur.execute('insert into db_ver(version) values(?)', (self.version, ))
|
||||
self.cur.execute('insert into db_ver(version) values(?)', (self.version,))
|
||||
self.db.commit()
|
||||
self._close()
|
||||
|
||||
|
@ -919,27 +921,27 @@ class HistoryDB:
|
|||
self.cur.execute('select providers from history where addtime="' + addtime + '"')
|
||||
x = self.cur.fetchone()
|
||||
self._close()
|
||||
#print 'get_providers: '+str(x[0].split(',') if x and x[0]!='' else None)
|
||||
return x[0].split(',') if x and x[0]!='' else None
|
||||
# print 'get_providers: '+str(x[0].split(',') if x and x[0]!='' else None)
|
||||
return x[0].split(',') if x and x[0] != '' else None
|
||||
|
||||
def set_providers(self, addtime, providers):
|
||||
self._connect()
|
||||
if isinstance(providers, dict):
|
||||
temp=[]
|
||||
temp = []
|
||||
for i in providers.keys():
|
||||
if providers.get(i):
|
||||
temp.append(i)
|
||||
providers=temp
|
||||
str_p=','.join(providers)
|
||||
providers = temp
|
||||
str_p = ','.join(providers)
|
||||
self.cur.execute('UPDATE history SET providers = "' + str_p + '" where addtime=' + addtime)
|
||||
self.db.commit()
|
||||
self._close()
|
||||
|
||||
def change_providers(self, addtime, searcher):
|
||||
self._connect()
|
||||
providers=self.get_providers(addtime)
|
||||
keys=Searchers().dic().keys()
|
||||
if providers and len(providers)>0:
|
||||
providers = self.get_providers(addtime)
|
||||
keys = Searchers().dic().keys()
|
||||
if providers and len(providers) > 0:
|
||||
if searcher in providers:
|
||||
providers.remove(searcher)
|
||||
else:
|
||||
|
@ -955,8 +957,8 @@ class HistoryDB:
|
|||
self._connect()
|
||||
self.cur.execute('select fav from history where string="' + url + '"')
|
||||
x = self.cur.fetchone()
|
||||
if x: x=int(x[0])
|
||||
fav=True if x else False
|
||||
if x: x = int(x[0])
|
||||
fav = True if x else False
|
||||
if not fav:
|
||||
self.cur.execute('delete from history where string="' + decode(url) + '"')
|
||||
self.cur.execute('insert into history(addtime,string,fav,providers)'
|
||||
|
@ -1025,7 +1027,7 @@ class HistoryDB:
|
|||
cur.execute('create table db_ver(version real)')
|
||||
cur.execute(
|
||||
'create table history(addtime integer PRIMARY KEY, string varchar(32), providers varchar(32), fav integer)')
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version, ))
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version,))
|
||||
self.db.commit()
|
||||
cur.close()
|
||||
self.cur = self.db.cursor()
|
||||
|
@ -1034,6 +1036,7 @@ class HistoryDB:
|
|||
self.cur.close()
|
||||
self.db.close()
|
||||
|
||||
|
||||
class Searchers():
|
||||
def __init__(self):
|
||||
pass
|
||||
|
@ -1071,7 +1074,7 @@ class Searchers():
|
|||
get_active = []
|
||||
for searcher in self.list():
|
||||
if self.old(searcher): get_active.append(searcher + '.py')
|
||||
print 'Active Searchers: '+str(get_active)
|
||||
print 'Active Searchers: ' + str(get_active)
|
||||
return get_active
|
||||
|
||||
|
||||
|
@ -1082,9 +1085,9 @@ class Contenters():
|
|||
def first_time(self, scrapperDB_ver, language='ru'):
|
||||
searcher = 'metadata'
|
||||
redl = False
|
||||
scrapperDB_ver=scrapperDB_ver[language]
|
||||
if scrapperDB_ver != __settings__.getSetting('scrapperDB_ver'+language) and self.getBoolSetting(searcher):
|
||||
__settings__.setSetting('scrapperDB_ver'+language, scrapperDB_ver)
|
||||
scrapperDB_ver = scrapperDB_ver[language]
|
||||
if scrapperDB_ver != __settings__.getSetting('scrapperDB_ver' + language) and self.getBoolSetting(searcher):
|
||||
__settings__.setSetting('scrapperDB_ver' + language, scrapperDB_ver)
|
||||
ok = xbmcgui.Dialog().yesno('< %s >' % Localization.localize('Content Lists'),
|
||||
Localization.localize('Your preloaded databases are outdated!'),
|
||||
Localization.localize('Do you want to download new ones right now?'))
|
||||
|
@ -1093,7 +1096,7 @@ class Contenters():
|
|||
dirname = os.path.join(dirname, 'xbmcup', 'plugin.video.torrenter')
|
||||
scrapers = {'tvdb': 'TheTVDB.com', 'tmdb': 'TheMovieDB.org', 'kinopoisk': 'KinoPoisk.ru'}
|
||||
for i in scrapers.keys():
|
||||
xbmcvfs.delete(os.path.join(dirname, i+'.'+language + '.db'))
|
||||
xbmcvfs.delete(os.path.join(dirname, i + '.' + language + '.db'))
|
||||
showMessage(Localization.localize('Reset All Cache DBs'), Localization.localize('Deleted!'))
|
||||
redl = True
|
||||
else:
|
||||
|
@ -1102,8 +1105,8 @@ class Contenters():
|
|||
'You can always restart this by deleting DBs via Context Menu'), )
|
||||
|
||||
if not self.getBoolSetting('oldc_' + searcher + language):
|
||||
self.setBoolSetting('oldc_' + searcher+ language, True)
|
||||
__settings__.setSetting('scrapperDB_ver'+language, scrapperDB_ver)
|
||||
self.setBoolSetting('oldc_' + searcher + language, True)
|
||||
__settings__.setSetting('scrapperDB_ver' + language, scrapperDB_ver)
|
||||
ok = xbmcgui.Dialog().yesno('< %s >' % Localization.localize('Content Lists'),
|
||||
Localization.localize('Do you want to search and cache full metadata + arts?'),
|
||||
Localization.localize(
|
||||
|
@ -1118,7 +1121,7 @@ class Contenters():
|
|||
self.Scraper = Scrapers()
|
||||
scrapers = {'tvdb': 'TheTVDB.com', 'tmdb': 'TheMovieDB.org', 'kinopoisk': 'KinoPoisk.ru'}
|
||||
for scraper in scrapers.keys():
|
||||
if scraper!='kinopoisk' or language=='ru':
|
||||
if scraper != 'kinopoisk' or language == 'ru':
|
||||
self.Scraper.scraper(scraper, {'label': 'Мстители', 'search': [u'Мстители', u'The Avengers'],
|
||||
'year': 2012}, language)
|
||||
|
||||
|
@ -1341,26 +1344,30 @@ def fetchData(url, referer=None):
|
|||
print " fetchData(" + url + ") exception: " + str(e)
|
||||
return
|
||||
|
||||
|
||||
def file_decode(filename):
|
||||
if not __settings__.getSetting('delete_russian')=='true':
|
||||
if not __settings__.getSetting('delete_russian') == 'true':
|
||||
try:
|
||||
filename=filename.decode('utf-8')#,'ignore')
|
||||
filename = filename.decode('utf-8') # ,'ignore')
|
||||
except:
|
||||
pass
|
||||
return filename
|
||||
|
||||
|
||||
def file_encode(filename):
|
||||
if not __settings__.getSetting('delete_russian')=='true':
|
||||
if sys.getfilesystemencoding()=='mbcs' and isAsciiString(filename):
|
||||
filename=filename.decode('cp1251').encode('utf-8')
|
||||
if not __settings__.getSetting('delete_russian') == 'true':
|
||||
if sys.getfilesystemencoding() == 'mbcs' and isAsciiString(filename):
|
||||
filename = filename.decode('cp1251').encode('utf-8')
|
||||
return filename
|
||||
|
||||
|
||||
def isAsciiString(mediaName):
|
||||
for index, char in enumerate(mediaName):
|
||||
if ord(char) >= 128:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def getParameters(parameterString):
|
||||
commands = {}
|
||||
splitCommands = parameterString[parameterString.find('?') + 1:].split('&')
|
||||
|
@ -1373,51 +1380,55 @@ def getParameters(parameterString):
|
|||
commands[name] = value
|
||||
return commands
|
||||
|
||||
|
||||
def isSubtitle(filename, filename2):
|
||||
filename_if = filename[:len(filename) - len(filename.split('.')[-1]) - 1]
|
||||
filename_if = filename_if.split('/')[-1].split('\\')[-1]
|
||||
filename_if2 = filename2.split('/')[-1].split('\\')[-1][:len(filename_if)]
|
||||
#Debug('Compare ' + filename_if.lower() + ' and ' + filename_if2.lower() + ' and ' + filename2.lower().split('.')[-1])
|
||||
# Debug('Compare ' + filename_if.lower() + ' and ' + filename_if2.lower() + ' and ' + filename2.lower().split('.')[-1])
|
||||
ext = ['ass', 'mpsub', 'rum', 'sbt', 'sbv', 'srt', 'ssa', 'sub', 'sup', 'w32']
|
||||
if filename2.lower().split('.')[-1] in ext and \
|
||||
filename_if.lower() == filename_if2.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def delete_russian(ok=False, action='delete'):
|
||||
i=0
|
||||
i = 0
|
||||
if not ok:
|
||||
ok = xbmcgui.Dialog().yesno('< %s >' % Localization.localize('International Check - First Run'),
|
||||
'Delete Russian stuff?',
|
||||
Localization.localize('Delete Russian stuff?'))
|
||||
if ok:
|
||||
fileList={
|
||||
'contenters':['CXZ.py','FastTorrent.py','KinoPoisk.py','RiperAM.py'],
|
||||
'searchers':['NNMClubRu.py','OpenSharing.py','RiperAM.py','RuTorOrg.py','RuTrackerOrg.py','TFileME.py']
|
||||
fileList = {
|
||||
'contenters': ['CXZ.py', 'FastTorrent.py', 'KinoPoisk.py', 'RiperAM.py'],
|
||||
'searchers': ['NNMClubRu.py', 'OpenSharing.py', 'RiperAM.py', 'RuTorOrg.py', 'RuTrackerOrg.py',
|
||||
'TFileME.py']
|
||||
}
|
||||
|
||||
for path in fileList.keys():
|
||||
for filename in fileList[path]:
|
||||
if action=='delete':
|
||||
filepath=os.path.join(ROOT,'resources', path,filename)
|
||||
if action == 'delete':
|
||||
filepath = os.path.join(ROOT, 'resources', path, filename)
|
||||
if xbmcvfs.exists(filepath):
|
||||
newfilepath=os.path.join(ROOT,'resources', path,'unused',filename)
|
||||
newfilepath = os.path.join(ROOT, 'resources', path, 'unused', filename)
|
||||
xbmcvfs.copy(filepath, newfilepath)
|
||||
xbmcvfs.delete(filepath)
|
||||
elif action=='return':
|
||||
filepath=os.path.join(ROOT,'resources', path,'unused',filename)
|
||||
elif action == 'return':
|
||||
filepath = os.path.join(ROOT, 'resources', path, 'unused', filename)
|
||||
if xbmcvfs.exists(filepath):
|
||||
newfilepath=os.path.join(ROOT,'resources',path,filename)
|
||||
newfilepath = os.path.join(ROOT, 'resources', path, filename)
|
||||
xbmcvfs.copy(filepath, newfilepath)
|
||||
xbmcvfs.delete(filepath)
|
||||
i=i+1
|
||||
i = i + 1
|
||||
|
||||
if action=='return':
|
||||
if action == 'return':
|
||||
return i
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class DownloadDB:
|
||||
def __init__(self, version=1.41):
|
||||
self.name = 'download.db3'
|
||||
|
@ -1426,26 +1437,32 @@ class DownloadDB:
|
|||
def get_all(self):
|
||||
self._connect()
|
||||
try:
|
||||
self.cur.execute('select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads order by addtime DESC')
|
||||
self.cur.execute(
|
||||
'select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads order by addtime DESC')
|
||||
except:
|
||||
Debug('[DownloadDB]: DELETE '+str(self.filename))
|
||||
Debug('[DownloadDB]: DELETE ' + str(self.filename))
|
||||
xbmcvfs.delete(self.filename)
|
||||
self._connect()
|
||||
self.cur.execute('select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads order by addtime DESC')
|
||||
self.cur.execute(
|
||||
'select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads order by addtime DESC')
|
||||
x = self.cur.fetchall()
|
||||
self._close()
|
||||
return x if x else None
|
||||
|
||||
def get(self, title):
|
||||
self._connect()
|
||||
self.cur.execute('select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads where title="' + decode(title) + '"')
|
||||
self.cur.execute(
|
||||
'select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads where title="' + decode(
|
||||
title) + '"')
|
||||
x = self.cur.fetchone()
|
||||
self._close()
|
||||
return x if x else None
|
||||
|
||||
def get_byaddtime(self, addtime):
|
||||
self._connect()
|
||||
self.cur.execute('select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads where addtime="' + str(addtime) + '"')
|
||||
self.cur.execute(
|
||||
'select addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage from downloads where addtime="' + str(
|
||||
addtime) + '"')
|
||||
x = self.cur.fetchone()
|
||||
self._close()
|
||||
return x if x else None
|
||||
|
@ -1460,8 +1477,11 @@ class DownloadDB:
|
|||
def add(self, title, path, type, info, status, torrent, ind, storage):
|
||||
if not self.get(title):
|
||||
self._connect()
|
||||
self.cur.execute('insert into downloads(addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage)'
|
||||
' values(?,?,?,?,?,?,?,?,?,?)', (int(time.time()), decode(title), decode(path), type, json.dumps(info), status, decode(torrent), ind, int(time.time()), decode(storage)))
|
||||
self.cur.execute(
|
||||
'insert into downloads(addtime, title, path, type, jsoninfo, status, torrent, ind, lastupdate, storage)'
|
||||
' values(?,?,?,?,?,?,?,?,?,?)', (
|
||||
int(time.time()), decode(title), decode(path), type, json.dumps(info), status, decode(torrent), ind,
|
||||
int(time.time()), decode(storage)))
|
||||
self.db.commit()
|
||||
self._close()
|
||||
return True
|
||||
|
@ -1474,13 +1494,15 @@ class DownloadDB:
|
|||
except:
|
||||
pass
|
||||
self._connect()
|
||||
self.cur.execute('UPDATE downloads SET jsoninfo = "' + urllib.quote_plus(json.dumps(info)) + '", lastupdate='+str(int(time.time()))+' where title="' + title+'"')
|
||||
self.cur.execute(
|
||||
'UPDATE downloads SET jsoninfo = "' + urllib.quote_plus(json.dumps(info)) + '", lastupdate=' + str(
|
||||
int(time.time())) + ' where title="' + title + '"')
|
||||
self.db.commit()
|
||||
self._close()
|
||||
|
||||
def update_status(self, addtime, status):
|
||||
self._connect()
|
||||
self.cur.execute('UPDATE downloads SET status = "' + status + '" where addtime="' + str(addtime)+'"')
|
||||
self.cur.execute('UPDATE downloads SET status = "' + status + '" where addtime="' + str(addtime) + '"')
|
||||
self.db.commit()
|
||||
self._close()
|
||||
|
||||
|
@ -1533,7 +1555,7 @@ class DownloadDB:
|
|||
cur.execute('create table db_ver(version real)')
|
||||
cur.execute(
|
||||
'create table downloads(addtime integer PRIMARY KEY, title varchar(32), path varchar(32), type varchar(32), jsoninfo varchar(32), status varchar(32), torrent varchar(32), ind integer, lastupdate integer, storage varchar(32))')
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version, ))
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version,))
|
||||
self.db.commit()
|
||||
cur.close()
|
||||
self.cur = self.db.cursor()
|
||||
|
@ -1542,6 +1564,7 @@ class DownloadDB:
|
|||
self.cur.close()
|
||||
self.db.close()
|
||||
|
||||
|
||||
def decode(string, ret=None):
|
||||
try:
|
||||
string = string.decode('utf-8')
|
||||
|
@ -1552,6 +1575,7 @@ def decode(string, ret=None):
|
|||
else:
|
||||
return string
|
||||
|
||||
|
||||
def unquote(string, ret=None):
|
||||
try:
|
||||
return urllib.unquote_plus(string)
|
||||
|
@ -1561,6 +1585,7 @@ def unquote(string, ret=None):
|
|||
else:
|
||||
return string
|
||||
|
||||
|
||||
def itemScrap(item, kwarg):
|
||||
# Debug('[itemTVDB]:meta '+str(kwarg))
|
||||
if 'title' in kwarg and kwarg['title']:
|
||||
|
@ -1584,11 +1609,13 @@ def itemScrap(item, kwarg):
|
|||
|
||||
return item
|
||||
|
||||
|
||||
def get_ids_video(contentList):
|
||||
ids_video=[]
|
||||
allowed_video_ext=['avi','mp4','mkv','flv','mov','vob','wmv','ogm','asx','mpg','mpeg','avc','vp3','fli','flc','m4v','iso']
|
||||
allowed_music_ext=['mp3','flac','wma','ogg','m4a','aac','m4p','rm','ra']
|
||||
for extlist in [allowed_video_ext,allowed_music_ext]:
|
||||
ids_video = []
|
||||
allowed_video_ext = ['avi', 'mp4', 'mkv', 'flv', 'mov', 'vob', 'wmv', 'ogm', 'asx', 'mpg', 'mpeg', 'avc', 'vp3',
|
||||
'fli', 'flc', 'm4v', 'iso']
|
||||
allowed_music_ext = ['mp3', 'flac', 'wma', 'ogg', 'm4a', 'aac', 'm4p', 'rm', 'ra']
|
||||
for extlist in [allowed_video_ext, allowed_music_ext]:
|
||||
for title, identifier in contentList:
|
||||
try:
|
||||
ext = title.split('.')[-1]
|
||||
|
@ -1596,7 +1623,7 @@ def get_ids_video(contentList):
|
|||
ids_video.append(str(identifier))
|
||||
except:
|
||||
pass
|
||||
if len(ids_video)>1:
|
||||
if len(ids_video) > 1:
|
||||
break
|
||||
#print Debug('[get_ids_video]:'+str(ids_video))
|
||||
# print Debug('[get_ids_video]:'+str(ids_video))
|
||||
return ids_video
|
||||
|
|
64
platform.py
64
platform.py
|
@ -1,64 +0,0 @@
|
|||
import xbmc
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def get_platform():
|
||||
ret = {
|
||||
"arch": sys.maxsize > 2**32 and "x64" or "x86",
|
||||
}
|
||||
if xbmc.getCondVisibility("system.platform.android"):
|
||||
ret["os"] = "android"
|
||||
if "arm" in os.uname()[4]:
|
||||
ret["arch"] = "arm"
|
||||
elif xbmc.getCondVisibility("system.platform.linux"):
|
||||
ret["os"] = "linux"
|
||||
if "arm" in os.uname()[4]:
|
||||
ret["arch"] = "arm"
|
||||
elif xbmc.getCondVisibility("system.platform.xbox"):
|
||||
system_platform = "xbox"
|
||||
ret["arch"] = ""
|
||||
elif xbmc.getCondVisibility("system.platform.windows"):
|
||||
ret["os"] = "windows"
|
||||
elif xbmc.getCondVisibility("system.platform.osx"):
|
||||
ret["os"] = "darwin"
|
||||
elif xbmc.getCondVisibility("system.platform.ios"):
|
||||
ret["os"] = "ios"
|
||||
ret["arch"] = "arm"
|
||||
|
||||
ret["system"] = ''
|
||||
ret["message"]=['','']
|
||||
|
||||
if ret["os"]=='windows':
|
||||
ret["system"] = 'windows'
|
||||
ret["message"]=['Windows has static compiled python-libtorrent included.',
|
||||
'You should install "script.module.libtorrent" from "MyShows.me Kodi Repo"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "x64":
|
||||
ret["system"] = 'linux_x86_64'
|
||||
ret["message"]=['Linux x64 has not static compiled python-libtorrent included.',
|
||||
'You should install it by "sudo apt-get install python-libtorrent"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "x86":
|
||||
ret["system"] = 'linux_x86'
|
||||
ret["message"]=['Linux has static compiled python-libtorrent included but it didn\'t work.',
|
||||
'You should install it by "sudo apt-get install python-libtorrent"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "arm":
|
||||
ret["system"] = 'linux_arm'
|
||||
ret["message"]=['As far as I know you can compile python-libtorrent for ARMv6-7.',
|
||||
'You should search for "OneEvil\'s OpenELEC libtorrent" or use Ace Stream.']
|
||||
elif ret["os"] == "android":
|
||||
ret["system"] = 'android'
|
||||
ret["message"]=['Please use install Ace Stream APK and choose it in Settings.',
|
||||
'It is possible to compile python-libtorrent for Android, but I don\'t know how.']
|
||||
elif ret["os"] == "darwin":
|
||||
ret["system"] = 'darwin'
|
||||
ret["message"]=['It is possible to compile python-libtorrent for OS X.',
|
||||
'But you would have to do it by yourself, there is some info on github.com.']
|
||||
elif ret["os"] == "ios":
|
||||
ret["system"] = 'ios'
|
||||
ret["message"]=['It is NOT possible to compile python-libtorrent for iOS.',
|
||||
'But you can use torrent-client control functions.']
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
PLATFORM = get_platform()
|
|
@ -0,0 +1,65 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
import xbmc
|
||||
|
||||
|
||||
def get_platform():
|
||||
ret = {
|
||||
"arch": sys.maxsize > 2 ** 32 and "x64" or "x86",
|
||||
}
|
||||
if xbmc.getCondVisibility("system.platform.android"):
|
||||
ret["os"] = "android"
|
||||
if "arm" in os.uname()[4]:
|
||||
ret["arch"] = "arm"
|
||||
elif xbmc.getCondVisibility("system.platform.linux"):
|
||||
ret["os"] = "linux"
|
||||
if "arm" in os.uname()[4]:
|
||||
ret["arch"] = "arm"
|
||||
elif xbmc.getCondVisibility("system.platform.xbox"):
|
||||
system_platform = "xbox"
|
||||
ret["arch"] = ""
|
||||
elif xbmc.getCondVisibility("system.platform.windows"):
|
||||
ret["os"] = "windows"
|
||||
elif xbmc.getCondVisibility("system.platform.osx"):
|
||||
ret["os"] = "darwin"
|
||||
elif xbmc.getCondVisibility("system.platform.ios"):
|
||||
ret["os"] = "ios"
|
||||
ret["arch"] = "arm"
|
||||
|
||||
ret["system"] = ''
|
||||
ret["message"] = ['', '']
|
||||
|
||||
if ret["os"] == 'windows':
|
||||
ret["system"] = 'windows'
|
||||
ret["message"] = ['Windows has static compiled python-libtorrent included.',
|
||||
'You should install "script.module.libtorrent" from "MyShows.me Kodi Repo"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "x64":
|
||||
ret["system"] = 'linux_x86_64'
|
||||
ret["message"] = ['Linux x64 has not static compiled python-libtorrent included.',
|
||||
'You should install it by "sudo apt-get install python-libtorrent"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "x86":
|
||||
ret["system"] = 'linux_x86'
|
||||
ret["message"] = ['Linux has static compiled python-libtorrent included but it didn\'t work.',
|
||||
'You should install it by "sudo apt-get install python-libtorrent"']
|
||||
elif ret["os"] == "linux" and ret["arch"] == "arm":
|
||||
ret["system"] = 'linux_arm'
|
||||
ret["message"] = ['As far as I know you can compile python-libtorrent for ARMv6-7.',
|
||||
'You should search for "OneEvil\'s OpenELEC libtorrent" or use Ace Stream.']
|
||||
elif ret["os"] == "android":
|
||||
ret["system"] = 'android'
|
||||
ret["message"] = ['Please use install Ace Stream APK and choose it in Settings.',
|
||||
'It is possible to compile python-libtorrent for Android, but I don\'t know how.']
|
||||
elif ret["os"] == "darwin":
|
||||
ret["system"] = 'darwin'
|
||||
ret["message"] = ['It is possible to compile python-libtorrent for OS X.',
|
||||
'But you would have to do it by yourself, there is some info on github.com.']
|
||||
elif ret["os"] == "ios":
|
||||
ret["system"] = 'ios'
|
||||
ret["message"] = ['It is NOT possible to compile python-libtorrent for iOS.',
|
||||
'But you can use torrent-client control functions.']
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
PLATFORM = get_platform()
|
|
@ -18,7 +18,10 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
|
||||
import Content, re
|
||||
import re
|
||||
|
||||
import Content
|
||||
|
||||
|
||||
class EZTV(Content.Content):
|
||||
category_dict = {
|
||||
|
@ -59,31 +62,31 @@ class EZTV(Content.Content):
|
|||
response = self.makeRequest(url, headers=self.headers)
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
if category in ['hot']:
|
||||
contentList = self.mode(response)
|
||||
#print str(contentList)
|
||||
# print str(contentList)
|
||||
return contentList
|
||||
|
||||
def mode(self, response):
|
||||
contentList = []
|
||||
#print str(result)
|
||||
# print str(result)
|
||||
num = 51
|
||||
result = re.compile(
|
||||
r'''class="epinfo">(.+?)</a>.+?<a href="(magnet.+?)".+?<td align="center" class="forum_thread_post">(.+?) </td>''',
|
||||
re.DOTALL).findall(response)
|
||||
for title, link, date in result:
|
||||
#main
|
||||
# main
|
||||
info = {}
|
||||
num = num - 1
|
||||
original_title = None
|
||||
year = 0
|
||||
img = ''
|
||||
#info
|
||||
# info
|
||||
|
||||
info['label'] = info['title'] = title
|
||||
info['link'] = link
|
||||
info['plot'] = info['title']+'\r\nAge: %s' % (date)
|
||||
info['plot'] = info['title'] + '\r\nAge: %s' % (date)
|
||||
|
||||
contentList.append((
|
||||
int(int(self.sourceWeight) * (int(num))),
|
||||
|
|
|
@ -18,19 +18,24 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
import re
|
||||
|
||||
import Content
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
|
||||
|
||||
class KickAssSo(Content.Content):
|
||||
category_dict = {
|
||||
'hot': ('Most Recent', '/new/?field=seeders&sorder=desc', {'page': '/new/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ':[{'name':' ', 'url_after':'?field=seeders&sorder=desc'}]}),
|
||||
'hot': ('Most Recent', '/new/?field=seeders&sorder=desc',
|
||||
{'page': '/new/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ': [{'name': ' ', 'url_after': '?field=seeders&sorder=desc'}]}),
|
||||
'anime': ('Anime', '/anime/', {'page': '/anime/%d/', 'increase': 1, 'second_page': 2,
|
||||
' ':[{'name':' ', 'url_after':'?field=seeders&sorder=desc'}]}),
|
||||
'tvshows': ('TV Shows', '/tv/?field=seeders&sorder=desc', {'page': '/tv/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ':[{'name':' ', 'url_after':'?field=seeders&sorder=desc'}]}),
|
||||
'movies': ('Movies', '/movies/?field=seeders&sorder=desc', {'page': '/movies/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ':[{'name':' ', 'url_after':'?field=seeders&sorder=desc'}]}),
|
||||
' ': [{'name': ' ', 'url_after': '?field=seeders&sorder=desc'}]}),
|
||||
'tvshows': ('TV Shows', '/tv/?field=seeders&sorder=desc',
|
||||
{'page': '/tv/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ': [{'name': ' ', 'url_after': '?field=seeders&sorder=desc'}]}),
|
||||
'movies': ('Movies', '/movies/?field=seeders&sorder=desc',
|
||||
{'page': '/movies/%d/?field=seeders&sorder=desc', 'increase': 1, 'second_page': 2,
|
||||
' ': [{'name': ' ', 'url_after': '?field=seeders&sorder=desc'}]}),
|
||||
}
|
||||
|
||||
baseurl = "http://kat.cr"
|
||||
|
@ -70,36 +75,37 @@ class KickAssSo(Content.Content):
|
|||
response = self.makeRequest(url, headers=self.headers)
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
if category:
|
||||
contentList = self.mode(response)
|
||||
#print str(contentList)
|
||||
# print str(contentList)
|
||||
return contentList
|
||||
|
||||
def mode(self, response):
|
||||
contentList = []
|
||||
#print str(result)
|
||||
# print str(result)
|
||||
num = 51
|
||||
good_forums=['TV','Anime','Movies']
|
||||
good_forums = ['TV', 'Anime', 'Movies']
|
||||
result = re.compile(
|
||||
r'''title="Download torrent file" href="(.+?\.torrent).+?" class=".+?"><i.+?<a.+?<a.+?<a href="(.+?html)" class=".+?">(.+?)</a>.+? in <span.+?"><strong>.+?">(.+?)</a>.+?<td class="nobr center">(.+?)</td>.+?<td class="center">(\d+ .+?)</td>.+?<td class="green center">(\d+?)</td>.+?<td class="red lasttd center">(\d+?)</td>''',
|
||||
re.DOTALL).findall(response)
|
||||
for link,infolink,title,forum,size,date,seeds,leechers in result:
|
||||
#main
|
||||
for link, infolink, title, forum, size, date, seeds, leechers in result:
|
||||
# main
|
||||
if forum in good_forums:
|
||||
info = {}
|
||||
num = num - 1
|
||||
original_title = None
|
||||
year = 0
|
||||
img = ''
|
||||
#info
|
||||
# info
|
||||
|
||||
info['label'] = info['title'] = self.unescape(title)
|
||||
info['link'] = link
|
||||
info['infolink']=self.baseurl+infolink
|
||||
info['infolink'] = self.baseurl + infolink
|
||||
size = self.unescape(self.stripHtml(size))
|
||||
date=self.unescape(self.stripHtml(date))
|
||||
info['plot'] = info['title']+'\r\n[I](%s) [S/L: %s/%s] [/I]\r\nAge: %s' % (size, seeds, leechers, date)
|
||||
date = self.unescape(self.stripHtml(date))
|
||||
info['plot'] = info['title'] + '\r\n[I](%s) [S/L: %s/%s] [/I]\r\nAge: %s' % (
|
||||
size, seeds, leechers, date)
|
||||
|
||||
contentList.append((
|
||||
int(int(self.sourceWeight) * (int(num))),
|
||||
|
@ -108,8 +114,8 @@ class KickAssSo(Content.Content):
|
|||
return contentList
|
||||
|
||||
def get_info(self, url):
|
||||
movieInfo={}
|
||||
color='[COLOR blue]%s:[/COLOR] %s\r\n'
|
||||
movieInfo = {}
|
||||
color = '[COLOR blue]%s:[/COLOR] %s\r\n'
|
||||
response = self.makeRequest(url, headers=self.headers)
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
|
@ -117,57 +123,59 @@ class KickAssSo(Content.Content):
|
|||
result = Soup.find('div', 'torrentMediaInfo')
|
||||
if not result:
|
||||
return None
|
||||
li=result.findAll('li')
|
||||
info,movieInfo={'Cast':''},{'desc':'','poster':'','title':'','views':'0','rating':'50','kinopoisk':''}
|
||||
li = result.findAll('li')
|
||||
info, movieInfo = {'Cast': ''}, {'desc': '', 'poster': '', 'title': '', 'views': '0', 'rating': '50',
|
||||
'kinopoisk': ''}
|
||||
try:
|
||||
img=result.find('a',{'class':'movieCover'}).find('img').get('src')
|
||||
movieInfo['poster']='http:'+img
|
||||
img = result.find('a', {'class': 'movieCover'}).find('img').get('src')
|
||||
movieInfo['poster'] = 'http:' + img
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
movie=re.compile('View all <strong>(.+?)</strong> episodes</a>').match(str(result))
|
||||
movie = re.compile('View all <strong>(.+?)</strong> episodes</a>').match(str(result))
|
||||
if movie:
|
||||
info['Movie']=movie.group(1)
|
||||
info['Movie'] = movie.group(1)
|
||||
except:
|
||||
pass
|
||||
for i in li:
|
||||
name=i.find('strong').text
|
||||
name = i.find('strong').text
|
||||
if name:
|
||||
info[name.rstrip(':')]=i.text.replace(name,'',1)
|
||||
plot=result.find('div',{'id':'summary'})
|
||||
info[name.rstrip(':')] = i.text.replace(name, '', 1)
|
||||
plot = result.find('div', {'id': 'summary'})
|
||||
if plot:
|
||||
cut=plot.find('strong').text
|
||||
info['plot']=plot.text.replace(cut,'',1).replace('report summary','')
|
||||
#print str(result)
|
||||
cast=re.compile('<a href="/movies/actor/.+?">(.+?)</a>').findall(str(result))
|
||||
cut = plot.find('strong').text
|
||||
info['plot'] = plot.text.replace(cut, '', 1).replace('report summary', '')
|
||||
# print str(result)
|
||||
cast = re.compile('<a href="/movies/actor/.+?">(.+?)</a>').findall(str(result))
|
||||
if cast:
|
||||
for actor in cast:
|
||||
info['Cast']+=actor+", "
|
||||
info['Cast'] += actor + ", "
|
||||
if 'Genres' in info:
|
||||
info['Genres']=info['Genres'].replace(', ',',').replace(',',', ')
|
||||
info['Genres'] = info['Genres'].replace(', ', ',').replace(',', ', ')
|
||||
for key in info.keys():
|
||||
if not 'Movie' in info and info[key]=='addto bookmarks':
|
||||
movieInfo['title']=self.unescape(key)
|
||||
info['TV Show']=self.unescape(key)
|
||||
if not 'Movie' in info and info[key] == 'addto bookmarks':
|
||||
movieInfo['title'] = self.unescape(key)
|
||||
info['TV Show'] = self.unescape(key)
|
||||
if not 'plot' in info and 'Summary' in key:
|
||||
info['plot']=info[key]
|
||||
info['plot'] = info[key]
|
||||
|
||||
for i in ['Movie','TV Show','Release date','Original run','Episode','Air date','Genres','Language','Director','Writers','Cast','Original run','IMDb rating','AniDB rating']:
|
||||
for i in ['Movie', 'TV Show', 'Release date', 'Original run', 'Episode', 'Air date', 'Genres', 'Language',
|
||||
'Director', 'Writers', 'Cast', 'Original run', 'IMDb rating', 'AniDB rating']:
|
||||
if info.get(i) and info.get(i) not in ['']:
|
||||
movieInfo['desc']+=color %(i,info.get(i))
|
||||
if i=='Movie':
|
||||
movieInfo['title']=info.get(i)
|
||||
movieInfo['desc'] += color % (i, info.get(i))
|
||||
if i == 'Movie':
|
||||
movieInfo['title'] = info.get(i)
|
||||
|
||||
for i in ['plot','IMDb link','RottenTomatoes']:
|
||||
for i in ['plot', 'IMDb link', 'RottenTomatoes']:
|
||||
if info.get(i) and info.get(i) not in ['']:
|
||||
if i=='plot':
|
||||
movieInfo['desc']+='\r\n[COLOR blue]Plot:[/COLOR]\r\n'+info.get(i)
|
||||
if i=='RottenTomatoes':
|
||||
movieInfo['rating']=str(info.get(i).split('%')[0])
|
||||
if i=='IMDb link':
|
||||
movieInfo['kinopoisk']='http://imdb.snick.ru/ratefor/02/tt%s.png' % info.get(i)
|
||||
if i == 'plot':
|
||||
movieInfo['desc'] += '\r\n[COLOR blue]Plot:[/COLOR]\r\n' + info.get(i)
|
||||
if i == 'RottenTomatoes':
|
||||
movieInfo['rating'] = str(info.get(i).split('%')[0])
|
||||
if i == 'IMDb link':
|
||||
movieInfo['kinopoisk'] = 'http://imdb.snick.ru/ratefor/02/tt%s.png' % info.get(i)
|
||||
|
||||
|
||||
#print str(info)
|
||||
# print str(info)
|
||||
|
||||
return movieInfo
|
|
@ -17,34 +17,40 @@
|
|||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
import re
|
||||
import Content
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
|
||||
|
||||
class SWESUB(Content.Content):
|
||||
category_dict = {
|
||||
'tvshows': ('TV Shows', '/senaste-tv-serier/', {'page': '/senaste-tv-serier/?page=%d',
|
||||
'increase': 1, 'second_page': 2,}),
|
||||
'movies': ('Movies', '/senaste-filmer/'),#, {'page': '/senaste-filmer/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'increase': 1, 'second_page': 2, }),
|
||||
'movies': ('Movies', '/senaste-filmer/'),
|
||||
# , {'page': '/senaste-filmer/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'genre': {'genre': 'by Genre',
|
||||
'action': ('Action', '/action/', {'page': '/action/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'adventure': ('Adventure', '/aventyr/', {'page': '/aventyr/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'animation': ('Animation', '/animerat/', {'page': '/animerat/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'comedy': ('Comedy', '/komedi/', {'page': '/komedi/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'crime': ('Crime', '/kriminal/', {'page': '/kriminal/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'documentary': ('Documentary', '/dokumentar/', {'page': '/dokumentar/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'drama': ('Drama', '/drama/', {'page': '/drama/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'family': ('Family', '/familj/', {'page': '/familj/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'fantasy': ('Fantasy', '/fantasy/', {'page': '/fantasy/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'horror': ('Horror', '/skrack/', {'page': '/skrack/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'music': ('Music', '/dans/', {'page': '/dans/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'musical': ('Musical', '/musikal/', {'page': '/musikal/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'romance': ('Romance', '/romantik/', {'page': '/romantik/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'sci_fi': ('Sci-Fi', '/sci-fi/', {'page': '/sci-fi/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'sport': ('Sport', '/sport/', {'page': '/sport/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'thriller': ('Thriller', '/thriller/', {'page': '/thriller/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'war': ('War', '/krig/', {'page': '/krig/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'western': ('Western', '/western/', {'page': '/western/?page=%d', 'increase': 1, 'second_page': 2,}),
|
||||
'action': ('Action', '/action/', {'page': '/action/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'adventure': (
|
||||
'Adventure', '/aventyr/', {'page': '/aventyr/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'animation': (
|
||||
'Animation', '/animerat/', {'page': '/animerat/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'comedy': ('Comedy', '/komedi/', {'page': '/komedi/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'crime': ('Crime', '/kriminal/', {'page': '/kriminal/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'documentary': (
|
||||
'Documentary', '/dokumentar/', {'page': '/dokumentar/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'drama': ('Drama', '/drama/', {'page': '/drama/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'family': ('Family', '/familj/', {'page': '/familj/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'fantasy': ('Fantasy', '/fantasy/', {'page': '/fantasy/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'horror': ('Horror', '/skrack/', {'page': '/skrack/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'music': ('Music', '/dans/', {'page': '/dans/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'musical': ('Musical', '/musikal/', {'page': '/musikal/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'romance': (
|
||||
'Romance', '/romantik/', {'page': '/romantik/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'sci_fi': ('Sci-Fi', '/sci-fi/', {'page': '/sci-fi/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'sport': ('Sport', '/sport/', {'page': '/sport/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'thriller': (
|
||||
'Thriller', '/thriller/', {'page': '/thriller/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'war': ('War', '/krig/', {'page': '/krig/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
'western': ('Western', '/western/', {'page': '/western/?page=%d', 'increase': 1, 'second_page': 2, }),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,39 +89,39 @@ class SWESUB(Content.Content):
|
|||
url = self.get_url(category, subcategory, apps_property)
|
||||
|
||||
response = self.makeRequest(url, headers=self.headers)
|
||||
response=response.decode('iso-8859-1')
|
||||
response = response.decode('iso-8859-1')
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
if category:
|
||||
contentList = self.mode(response)
|
||||
#print str(contentList)
|
||||
# print str(contentList)
|
||||
return contentList
|
||||
|
||||
def mode(self, response):
|
||||
contentList = []
|
||||
num = 51
|
||||
Soup = BeautifulSoup(response)
|
||||
result = Soup.findAll('article', {'class':'box'})
|
||||
#print str(result)
|
||||
result = Soup.findAll('article', {'class': 'box'})
|
||||
# print str(result)
|
||||
for article in result:
|
||||
#main
|
||||
# main
|
||||
info = {}
|
||||
num = num - 1
|
||||
original_title = None
|
||||
year = 0
|
||||
|
||||
div=article.find('div', {'class':'box-img'})
|
||||
title=div.find('img').get('alt')
|
||||
img=div.find('img').get('src')
|
||||
link=div.find('a').get('href').replace(self.baseurl,'').replace('.html','')
|
||||
div = article.find('div', {'class': 'box-img'})
|
||||
title = div.find('img').get('alt')
|
||||
img = div.find('img').get('src')
|
||||
link = div.find('a').get('href').replace(self.baseurl, '').replace('.html', '')
|
||||
|
||||
#info
|
||||
# info
|
||||
|
||||
info['label'] = info['title'] = self.unescape(title)
|
||||
info['link'] = '%s::%s' % ('Nyaa', self.baseurl+'/downloads'+link+'/')
|
||||
info['infolink']=self.baseurl+link+'.html'
|
||||
info['link'] = '%s::%s' % ('Nyaa', self.baseurl + '/downloads' + link + '/')
|
||||
info['infolink'] = self.baseurl + link + '.html'
|
||||
|
||||
info['plot'] = article.find('div',{'class':'item-content'}).text
|
||||
info['plot'] = article.find('div', {'class': 'item-content'}).text
|
||||
|
||||
contentList.append((
|
||||
int(int(self.sourceWeight) * (int(num))),
|
||||
|
|
|
@ -18,22 +18,24 @@
|
|||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
'''
|
||||
import re
|
||||
|
||||
import Content
|
||||
|
||||
|
||||
class ThePirateBaySe(Content.Content):
|
||||
category_dict = {
|
||||
'tvshows': ('TV Shows', '/browse/205', {'page': '/browse/208/%d', 'increase': 1, 'second_page': 1,
|
||||
'sort':[{'name':'by Seeders', 'url_after':'/0/7/0'},
|
||||
{'name':'by Date', 'url_after':'/0/3/0'}]}),
|
||||
'sort': [{'name': 'by Seeders', 'url_after': '/0/7/0'},
|
||||
{'name': 'by Date', 'url_after': '/0/3/0'}]}),
|
||||
'tvshowshd': ('TV Shows [HD]', '/browse/208', {'page': '/browse/208/%d', 'increase': 1, 'second_page': 1,
|
||||
'sort':[{'name':'by Seeders', 'url_after':'/0/7/0'},
|
||||
{'name':'by Date', 'url_after':'/0/3/0'}]}),
|
||||
'sort': [{'name': 'by Seeders', 'url_after': '/0/7/0'},
|
||||
{'name': 'by Date', 'url_after': '/0/3/0'}]}),
|
||||
'movies': ('Movies', '/browse/201', {'page': '/browse/208/%d', 'increase': 1, 'second_page': 1,
|
||||
'sort':[{'name':'by Seeders', 'url_after':'/0/7/0'},
|
||||
{'name':'by Date', 'url_after':'/0/3/0'}]}),
|
||||
'sort': [{'name': 'by Seeders', 'url_after': '/0/7/0'},
|
||||
{'name': 'by Date', 'url_after': '/0/3/0'}]}),
|
||||
'movieshd': ('Movies [HD]', '/browse/207', {'page': '/browse/208/%d', 'increase': 1, 'second_page': 1,
|
||||
'sort':[{'name':'by Seeders', 'url_after':'/0/7/0'},
|
||||
{'name':'by Date', 'url_after':'/0/3/0'}]}),
|
||||
'sort': [{'name': 'by Seeders', 'url_after': '/0/7/0'},
|
||||
{'name': 'by Date', 'url_after': '/0/3/0'}]}),
|
||||
}
|
||||
|
||||
baseurl = "https://thepiratebay.se"
|
||||
|
@ -73,15 +75,15 @@ class ThePirateBaySe(Content.Content):
|
|||
response = self.makeRequest(url, headers=self.headers)
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
if category:
|
||||
contentList = self.mode(response)
|
||||
#print str(contentList)
|
||||
# print str(contentList)
|
||||
return contentList
|
||||
|
||||
def mode(self, response):
|
||||
contentList = []
|
||||
#print str(result)
|
||||
# print str(result)
|
||||
num = 31
|
||||
result = re.compile(
|
||||
r'''<div class="detName">.+?">(.+?)</a>.+?<a href="(.+?)".+?<font class="detDesc">Uploaded (.+?), Size (.+?), .+?</font>.+?<td align="right">(\d+?)</td>.+?<td align="right">(\d+?)</td>''',
|
||||
|
@ -95,11 +97,11 @@ class ThePirateBaySe(Content.Content):
|
|||
size = size.replace(' ', ' ')
|
||||
date = self.stripHtml(date.replace(' ', ' '))
|
||||
|
||||
#info
|
||||
# info
|
||||
|
||||
info['label'] = info['title'] = self.unescape(title)
|
||||
info['link'] = link
|
||||
info['plot'] = info['title']+'\r\n[I](%s) [S/L: %s/%s] [/I]\r\n%s' % (size, seeds, leechers, date)
|
||||
info['plot'] = info['title'] + '\r\n[I](%s) [S/L: %s/%s] [/I]\r\n%s' % (size, seeds, leechers, date)
|
||||
contentList.append((
|
||||
int(int(self.sourceWeight) * (int(num))),
|
||||
original_title, title, int(year), img, info,
|
||||
|
|
|
@ -11,9 +11,12 @@
|
|||
# PyXBMCt framework module
|
||||
|
||||
import os
|
||||
import xbmc, xbmcgui, xbmcaddon
|
||||
|
||||
#_addon = xbmcaddon.Addon()
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
|
||||
|
||||
# _addon = xbmcaddon.Addon()
|
||||
_images = os.path.join(os.path.dirname(__file__), 'textures', 'default')
|
||||
|
||||
|
||||
|
@ -82,6 +85,7 @@ class Label(xbmcgui.ControlLabel):
|
|||
Example:
|
||||
self.label = Label('Status', angle=45)
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
return super(Label, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs)
|
||||
|
||||
|
@ -100,6 +104,7 @@ class FadeLabel(xbmcgui.ControlFadeLabel):
|
|||
Example:
|
||||
self.fadelabel = FadeLabel(textColor='0xFFFFFFFF')
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
return super(FadeLabel, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs)
|
||||
|
||||
|
@ -117,6 +122,7 @@ class TextBox(xbmcgui.ControlTextBox):
|
|||
Example:
|
||||
self.textbox = TextBox(textColor='0xFFFFFFFF')
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
return super(TextBox, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs)
|
||||
|
||||
|
@ -136,6 +142,7 @@ class Image(xbmcgui.ControlImage):
|
|||
Example:
|
||||
self.image = Image('d:\images\picture.jpg', aspectRatio=2)
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
return super(Image, cls).__new__(cls, -10, -10, 1, 1, *args, **kwargs)
|
||||
|
||||
|
@ -163,6 +170,7 @@ class Button(xbmcgui.ControlButton):
|
|||
Example:
|
||||
self.button = Button('Status', font='font14')
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
textures = {'focusTexture': os.path.join(_images, 'Button', 'KeyboardKey.png'),
|
||||
'noFocusTexture': os.path.join(_images, 'Button', 'KeyboardKeyNF.png')}
|
||||
|
@ -202,6 +210,7 @@ class RadioButton(xbmcgui.ControlRadioButton):
|
|||
Example:
|
||||
self.radiobutton = RadioButton('Status', font='font14')
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if int(xbmc.getInfoLabel('System.BuildVersion')[:2]) >= 13:
|
||||
textures = {'focusTexture': os.path.join(_images, 'RadioButton', 'MenuItemFO.png'),
|
||||
|
@ -242,6 +251,7 @@ class Edit(xbmcgui.ControlEdit):
|
|||
example:
|
||||
- self.edit = Edit('Status')
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
textures = {'focusTexture': os.path.join(_images, 'Edit', 'button-focus.png'),
|
||||
'noFocusTexture': os.path.join(_images, 'Edit', 'black-back2.png')}
|
||||
|
@ -272,6 +282,7 @@ class List(xbmcgui.ControlList):
|
|||
Example:
|
||||
self.cList = List('font14', space=5)
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
textures = {'buttonTexture': os.path.join(_images, 'List', 'MenuItemNF.png'),
|
||||
'buttonFocusTexture': os.path.join(_images, 'List', 'MenuItemFO.png')}
|
||||
|
@ -293,6 +304,7 @@ class Slider(xbmcgui.ControlSlider):
|
|||
Example:
|
||||
self.slider = Slider()
|
||||
"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
textures = {'textureback': os.path.join(_images, 'Slider', 'osd_slider_bg.png'),
|
||||
'texture': os.path.join(_images, 'Slider', 'osd_slider_nibNF.png'),
|
||||
|
@ -302,7 +314,6 @@ class Slider(xbmcgui.ControlSlider):
|
|||
|
||||
|
||||
class _AbstractWindow(object):
|
||||
|
||||
"""
|
||||
Top-level control window.
|
||||
|
||||
|
@ -340,8 +351,8 @@ class _AbstractWindow(object):
|
|||
self.x = pos_x
|
||||
self.y = pos_y
|
||||
else:
|
||||
self.x = 640 - self.width/2
|
||||
self.y = 360 - self.height/2
|
||||
self.x = 640 - self.width / 2
|
||||
self.y = 360 - self.height / 2
|
||||
self.setGrid()
|
||||
|
||||
def setGrid(self):
|
||||
|
@ -351,8 +362,8 @@ class _AbstractWindow(object):
|
|||
"""
|
||||
self.grid_x = self.x
|
||||
self.grid_y = self.y
|
||||
self.tile_width = self.width/self.columns
|
||||
self.tile_height = self.height/self.rows
|
||||
self.tile_width = self.width / self.columns
|
||||
self.tile_height = self.height / self.rows
|
||||
|
||||
def placeControl(self, control, row, column, rowspan=1, columnspan=1, pad_x=5, pad_y=5):
|
||||
"""
|
||||
|
@ -525,7 +536,6 @@ class _AbstractWindow(object):
|
|||
|
||||
|
||||
class _AddonWindow(_AbstractWindow):
|
||||
|
||||
"""
|
||||
Top-level control window.
|
||||
|
||||
|
@ -574,8 +584,10 @@ class _AddonWindow(_AbstractWindow):
|
|||
self.addControl(self.title_bar)
|
||||
self.setAnimation(self.title_bar)
|
||||
self.window_close_button = xbmcgui.ControlButton(-100, -100, 60, 30, '',
|
||||
focusTexture=os.path.join(_images, 'AddonWindow', 'DialogCloseButton-focus.png'),
|
||||
noFocusTexture=os.path.join(_images, 'AddonWindow', 'DialogCloseButton.png'))
|
||||
focusTexture=os.path.join(_images, 'AddonWindow',
|
||||
'DialogCloseButton-focus.png'),
|
||||
noFocusTexture=os.path.join(_images, 'AddonWindow',
|
||||
'DialogCloseButton.png'))
|
||||
self.addControl(self.window_close_button)
|
||||
self.setAnimation(self.window_close_button)
|
||||
|
||||
|
@ -614,9 +626,10 @@ class _AddonWindow(_AbstractWindow):
|
|||
"""
|
||||
self.grid_x = self.x + self.X_MARGIN + self.win_padding
|
||||
self.grid_y = self.y + self.Y_MARGIN + self.Y_SHIFT + self.HEADER_HEIGHT + self.win_padding
|
||||
self.tile_width = (self.width - 2 * (self.X_MARGIN + self.win_padding))/self.columns
|
||||
self.tile_width = (self.width - 2 * (self.X_MARGIN + self.win_padding)) / self.columns
|
||||
self.tile_height = (
|
||||
self.height - self.HEADER_HEIGHT - self.Y_SHIFT - 2 * (self.Y_MARGIN + self.win_padding))/self.rows
|
||||
self.height - self.HEADER_HEIGHT - self.Y_SHIFT - 2 * (
|
||||
self.Y_MARGIN + self.win_padding)) / self.rows
|
||||
|
||||
def setWindowTitle(self, title=''):
|
||||
"""
|
||||
|
@ -632,8 +645,8 @@ class _AddonWindow(_AbstractWindow):
|
|||
"""Get window title."""
|
||||
return self.title_bar.getLabel()
|
||||
|
||||
class _FullWindow(xbmcgui.Window):
|
||||
|
||||
class _FullWindow(xbmcgui.Window):
|
||||
"""An abstract class to define window event processing."""
|
||||
|
||||
def onAction(self, action):
|
||||
|
@ -659,7 +672,6 @@ class _FullWindow(xbmcgui.Window):
|
|||
|
||||
|
||||
class _DialogWindow(xbmcgui.WindowDialog):
|
||||
|
||||
"""An abstract class to define window event processing."""
|
||||
|
||||
def onAction(self, action):
|
||||
|
@ -718,8 +730,8 @@ class BlankDialogWindow(_DialogWindow, _AbstractWindow):
|
|||
"""
|
||||
pass
|
||||
|
||||
class AddonFullWindow(_FullWindow, _AddonWindow):
|
||||
|
||||
class AddonFullWindow(_FullWindow, _AddonWindow):
|
||||
"""
|
||||
Addon UI container with a solid background.
|
||||
Control window is displayed on top of the main background image - self.main_bg.
|
||||
|
|
|
@ -12,7 +12,6 @@ import xbmcgui
|
|||
import Localization
|
||||
from net import HTTP
|
||||
|
||||
|
||||
try:
|
||||
from sqlite3 import dbapi2 as sqlite
|
||||
except:
|
||||
|
@ -33,7 +32,7 @@ class Cache:
|
|||
|
||||
def get(self, token, callback, *param):
|
||||
cur = self.db.cursor()
|
||||
cur.execute('select expire,data from cache where id=? limit 1', (token, ))
|
||||
cur.execute('select expire,data from cache where id=? limit 1', (token,))
|
||||
row = cur.fetchone()
|
||||
cur.close()
|
||||
|
||||
|
@ -67,7 +66,7 @@ class Cache:
|
|||
def expire(self, expire):
|
||||
# with rtrCache_lock:
|
||||
cur = self.db.cursor()
|
||||
cur.execute('delete from cache where addtime<?', (int(time.time()) - expire, ))
|
||||
cur.execute('delete from cache where addtime<?', (int(time.time()) - expire,))
|
||||
self.db.commit()
|
||||
cur.close()
|
||||
|
||||
|
@ -77,7 +76,7 @@ class Cache:
|
|||
if os.path.getsize(self.filename) < size:
|
||||
break
|
||||
cur = self.db.cursor()
|
||||
cur.execute('select id from cache order by addtime asc limit ?', (step, ))
|
||||
cur.execute('select id from cache order by addtime asc limit ?', (step,))
|
||||
rows = cur.fetchall()
|
||||
if not rows:
|
||||
cur.close()
|
||||
|
@ -129,7 +128,7 @@ class Cache:
|
|||
cur.execute('create table cache(id varchar(255) unique, addtime integer, expire integer, data blob)')
|
||||
cur.execute('create index time on cache(addtime asc)')
|
||||
cur.execute('create table db_ver(version real)')
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version, ))
|
||||
cur.execute('insert into db_ver(version) values(?)', (self.version,))
|
||||
self.db.commit()
|
||||
cur.close()
|
||||
|
||||
|
@ -146,7 +145,7 @@ class Cache:
|
|||
|
||||
def download(self):
|
||||
dirname = os.path.dirname(self.filename)
|
||||
zipname = os.path.basename(self.filename).replace('.db','') + '.zip'
|
||||
zipname = os.path.basename(self.filename).replace('.db', '') + '.zip'
|
||||
url = 'http://www.tat-store.ru/torrenter/' + zipname
|
||||
self.http = HTTP()
|
||||
response = self.http.fetch(url, download=os.path.join(dirname, zipname), progress=True)
|
||||
|
|
|
@ -27,10 +27,8 @@ import math
|
|||
import difflib
|
||||
|
||||
import translit
|
||||
import LOGGER as Log
|
||||
from HTTP import *
|
||||
|
||||
|
||||
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.172 Safari/537.22'
|
||||
# USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/534.51.22 (KHTML, like Gecko) Version/5.1.1 Safari/534.51.22'
|
||||
ENCODING_PLEX = 'utf-8'
|
||||
|
@ -230,7 +228,7 @@ def scoreMediaTitleMatch(mediaName, mediaYear, title, altTitle, year, itemIndex)
|
|||
elif yearDiff == 2:
|
||||
yearPenalty = 25
|
||||
score = score - yearPenalty
|
||||
#print str(mediaYear)+' '+str(year)+' '+str(yearPenalty)
|
||||
# print str(mediaYear)+' '+str(year)+' '+str(yearPenalty)
|
||||
|
||||
# Compute title penalty.
|
||||
titlePenalty = computeTitlePenalty(mediaName, title)
|
||||
|
|
|
@ -32,9 +32,11 @@ import pluginsettings as S
|
|||
import translit
|
||||
|
||||
|
||||
|
||||
|
||||
# MATCHER_MOVIE_DURATION = re.compile('\s*(\d+).*?', re.UNICODE | re.DOTALL)
|
||||
#MATCHER_IMDB_RATING = re.compile('IMDb:\s*(\d+\.?\d*)\s*\(\s*([\s\d]+)\s*\)', re.UNICODE | re.DOTALL)
|
||||
#MATCHER_IMDB_RATING = re.compile('IMDb:\s*(\d+\.?\d*)\s?\((.*)\)', re.UNICODE)
|
||||
# MATCHER_IMDB_RATING = re.compile('IMDb:\s*(\d+\.?\d*)\s*\(\s*([\s\d]+)\s*\)', re.UNICODE | re.DOTALL)
|
||||
# MATCHER_IMDB_RATING = re.compile('IMDb:\s*(\d+\.?\d*)\s?\((.*)\)', re.UNICODE)
|
||||
|
||||
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.172 Safari/537.22'
|
||||
'''
|
||||
|
@ -171,5 +173,3 @@ class PageParser:
|
|||
results.append([itemKinoPoiskId, itemTitle, itemYear, itemScore])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
|
|
@ -19,11 +19,9 @@ Simple transliteration
|
|||
"""
|
||||
|
||||
# __id__ = __revision__ = "$Id: translit.py 102 2007-07-12 12:33:36Z the.pythy $"
|
||||
#__url__ = "$URL: https://pythy.googlecode.com/svn/tags/pytils/0_2_2/pytils/translit.py $"
|
||||
# __url__ = "$URL: https://pythy.googlecode.com/svn/tags/pytils/0_2_2/pytils/translit.py $"
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
TRANSTABLE = (
|
||||
(u"'", u"'"),
|
||||
|
|
|
@ -11,7 +11,6 @@ import kinopoisk.LOGGER
|
|||
import kinopoisk.pageparser
|
||||
import kinopoisk.common
|
||||
|
||||
|
||||
GENRE = {
|
||||
'anime': 1750,
|
||||
'biography': 22,
|
||||
|
@ -295,7 +294,7 @@ class KinoPoisk:
|
|||
"""
|
||||
|
||||
def __init__(self, language='ru'):
|
||||
dbname='kinopoisk.%s.db' % language
|
||||
dbname = 'kinopoisk.%s.db' % language
|
||||
self.cache = Cache(dbname, 1.0)
|
||||
self.html = Clear()
|
||||
|
||||
|
@ -463,7 +462,8 @@ class KinoPoisk:
|
|||
if len(data) == 3:
|
||||
i = 0
|
||||
for mon in (
|
||||
u'января', u'февраля', u'марта', u'апреля', u'мая', u'июня', u'июля', u'августа', u'сентября',
|
||||
u'января', u'февраля', u'марта', u'апреля', u'мая', u'июня', u'июля', u'августа',
|
||||
u'сентября',
|
||||
u'октября', u'ноября', u'декабря'):
|
||||
i += 1
|
||||
if mon == data[1]:
|
||||
|
|
|
@ -12,10 +12,8 @@ import itertools
|
|||
|
||||
import xbmc
|
||||
import xbmcgui
|
||||
import xbmcaddon
|
||||
import xbmcvfs
|
||||
|
||||
|
||||
RE = {
|
||||
'content-disposition': re.compile('attachment;\sfilename="*([^"\s]+)"|\s')
|
||||
}
|
||||
|
@ -34,7 +32,6 @@ class HTTP:
|
|||
if not xbmcvfs.exists(self._dirname):
|
||||
xbmcvfs.mkdir(self._dirname)
|
||||
|
||||
|
||||
def fetch(self, request, **kwargs):
|
||||
self.con, self.fd, self.progress, self.cookies, self.request = None, None, None, None, request
|
||||
|
||||
|
@ -74,7 +71,6 @@ class HTTP:
|
|||
|
||||
return self.response
|
||||
|
||||
|
||||
def _opener(self):
|
||||
|
||||
build = [urllib2.HTTPHandler()]
|
||||
|
@ -101,7 +97,6 @@ class HTTP:
|
|||
|
||||
urllib2.install_opener(urllib2.build_opener(*build))
|
||||
|
||||
|
||||
def _fetch(self):
|
||||
params = {} if self.request.params is None else self.request.params
|
||||
|
||||
|
@ -130,7 +125,7 @@ class HTTP:
|
|||
':'.join([self.request.auth_username, self.request.auth_password])).strip())
|
||||
|
||||
self.con = urllib2.urlopen(req, timeout=self.request.timeout)
|
||||
#self.con = urllib2.urlopen(req)
|
||||
# self.con = urllib2.urlopen(req)
|
||||
self.response.headers = self._headers(self.con.info())
|
||||
|
||||
if self.request.download:
|
||||
|
@ -141,7 +136,6 @@ class HTTP:
|
|||
if self.request.cookies:
|
||||
self.cookies.save(self.request.cookies)
|
||||
|
||||
|
||||
def _download(self):
|
||||
fd = open(self.request.download, 'wb')
|
||||
if self.request.progress:
|
||||
|
@ -173,7 +167,6 @@ class HTTP:
|
|||
|
||||
self.response.filename = self.request.download
|
||||
|
||||
|
||||
def _upload(self, upload, params):
|
||||
res = []
|
||||
boundary = mimetools.choose_boundary()
|
||||
|
@ -209,7 +202,6 @@ class HTTP:
|
|||
result.append('')
|
||||
return boundary, '\r\n'.join(result)
|
||||
|
||||
|
||||
def _headers(self, raw):
|
||||
headers = {}
|
||||
for line in raw.headers:
|
||||
|
@ -221,7 +213,6 @@ class HTTP:
|
|||
headers[tag] = value
|
||||
return headers
|
||||
|
||||
|
||||
def _progress(self, read, size, name):
|
||||
res = []
|
||||
if size < 0:
|
||||
|
@ -304,4 +295,3 @@ class HTTPResponse:
|
|||
else:
|
||||
args += ',body=None'
|
||||
return '%s(%s)' % (self.__class__.__name__, args)
|
||||
|
||||
|
|
|
@ -383,7 +383,6 @@ def get_unicode_from_response(r):
|
|||
except TypeError:
|
||||
return r.content
|
||||
|
||||
|
||||
# The unreserved URI characters (RFC 3986)
|
||||
UNRESERVED_SET = frozenset(
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
|
@ -519,6 +518,7 @@ def should_bypass_proxies(url):
|
|||
|
||||
return False
|
||||
|
||||
|
||||
def get_environ_proxies(url):
|
||||
"""Return a dict of environment proxies."""
|
||||
if should_bypass_proxies(url):
|
||||
|
@ -600,7 +600,6 @@ def parse_header_links(value):
|
|||
|
||||
return links
|
||||
|
||||
|
||||
# Null bytes; no need to recreate these on each call to guess_json_utf
|
||||
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
|
||||
_null2 = _null * 2
|
||||
|
|
|
@ -2,13 +2,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
#import xbmc, xbmcgui, xbmcplugin, xbmcvfs
|
||||
import xbmcaddon
|
||||
# import xbmc, xbmcgui, xbmcplugin, xbmcvfs
|
||||
from tvdb import TvDb
|
||||
from tmdbs import TmDb
|
||||
from kinopoisks import KinoPoisk
|
||||
|
||||
|
||||
STATUS = {
|
||||
'moder': (40501, 'FFFF0000'),
|
||||
'check': (40502, 'FFFF0000'),
|
||||
|
@ -90,7 +88,7 @@ class Scrapers():
|
|||
scraper = TvDb(language)
|
||||
elif content == 'tmdb':
|
||||
scraper = TmDb(language)
|
||||
else: #if content == 'kinopoisk':
|
||||
else: # if content == 'kinopoisk':
|
||||
scraper = KinoPoisk(language)
|
||||
|
||||
name, search, year = item['label'], item['search'], item['year']
|
||||
|
|
|
@ -21,9 +21,6 @@
|
|||
import urllib
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import os
|
||||
import time
|
||||
|
||||
import SearcherABC
|
||||
|
||||
|
@ -66,7 +63,7 @@ class RuTrackerOrg(SearcherABC.SearcherABC):
|
|||
|
||||
def search(self, keyword):
|
||||
filesList = []
|
||||
url='http://rutracker.org/forum/tracker.php?nm=' + urllib.quote_plus(keyword)
|
||||
url = 'http://rutracker.org/forum/tracker.php?nm=' + urllib.quote_plus(keyword)
|
||||
|
||||
data = {'prev_my': '0',
|
||||
'prev_new': '0',
|
||||
|
@ -79,11 +76,11 @@ class RuTrackerOrg(SearcherABC.SearcherABC):
|
|||
response = self.makeRequest(url, data=data)
|
||||
if None != response and 0 < len(response):
|
||||
response = response.decode('cp1251').encode('utf8')
|
||||
#print response
|
||||
# print response
|
||||
if not self.check_login(response):
|
||||
response = self.makeRequest(url, data=data)
|
||||
response = response.decode('cp1251').encode('utf8')
|
||||
#print response
|
||||
# print response
|
||||
forums = [7, 187, 2090, 2221, 2091, 2092, 2093, 934, 505, 212, 2459, 1235, 185, 22, 941, 1666, 124, 1543,
|
||||
376, 709, 1577, 511, 656, 93, 905, 1576, 101, 100, 103, 572, 1670, 2198, 2199, 313, 2201, 312,
|
||||
2339, 314, 352, 549, 1213, 2109, 514, 2097, 4, 930, 2365, 1900, 521, 2258, 208, 539, 209, 484,
|
||||
|
@ -128,19 +125,19 @@ class RuTrackerOrg(SearcherABC.SearcherABC):
|
|||
|
||||
def getTorrentFile(self, url):
|
||||
self.load_cookie()
|
||||
cookie=None
|
||||
cookie = None
|
||||
for cookie in self.cookieJar:
|
||||
if cookie.name=='bb_data' and cookie.domain=='.rutracker.org':
|
||||
cookie = 'bb_data=' + cookie.value + '; bb_dl=' + re.search('(\d+)$',url).group(1)
|
||||
if cookie.name == 'bb_data' and cookie.domain == '.rutracker.org':
|
||||
cookie = 'bb_data=' + cookie.value + '; bb_dl=' + re.search('(\d+)$', url).group(1)
|
||||
break
|
||||
if not cookie:
|
||||
cookie = self.login() + '; bb_dl=' + re.search('(\d+)$',url).group(1)
|
||||
cookie = self.login() + '; bb_dl=' + re.search('(\d+)$', url).group(1)
|
||||
|
||||
referer = 'http://rutracker.org/forum/viewtopic.php?t=' + re.search('(\d+)$', url).group(1)
|
||||
headers=[('Cookie', cookie), ('Referer', referer)]
|
||||
headers = [('Cookie', cookie), ('Referer', referer)]
|
||||
content = self.makeRequest(url, headers=headers)
|
||||
if not self.check_login(content):
|
||||
cookie = self.login() + '; bb_dl=' + re.search('(\d+)$',url).group(1)
|
||||
cookie = self.login() + '; bb_dl=' + re.search('(\d+)$', url).group(1)
|
||||
content = self.makeRequest(url, headers=[('Cookie', cookie), ('Referer', referer)])
|
||||
|
||||
return self.saveTorrentFile(url, content)
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
'''
|
||||
|
||||
import re
|
||||
import os
|
||||
import urllib
|
||||
import tempfile
|
||||
import sys
|
||||
|
||||
import SearcherABC
|
||||
|
@ -65,23 +63,24 @@ class T411FR(SearcherABC.SearcherABC):
|
|||
headers = {('Origin', 'http://t411.io'),
|
||||
('User-Agent',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 YaBrowser/14.10.2062.12061 Safari/537.36'),
|
||||
('Referer', 'http://t411.io/'),('X-NewRelic-ID','x='),
|
||||
('X-Requested-With','XMLHttpRequest'),}
|
||||
('Referer', 'http://t411.io/'), ('X-NewRelic-ID', 'x='),
|
||||
('X-Requested-With', 'XMLHttpRequest'), }
|
||||
|
||||
def search(self, keyword):
|
||||
filesList = []
|
||||
url='http://www.t411.io/torrents/search/?search=%s' % urllib.quote_plus(keyword.decode('utf-8').encode('cp1251'))
|
||||
url+='&order=seeders&type=desc'
|
||||
url = 'http://www.t411.io/torrents/search/?search=%s' % urllib.quote_plus(
|
||||
keyword.decode('utf-8').encode('cp1251'))
|
||||
url += '&order=seeders&type=desc'
|
||||
response = self.makeRequest(url, headers=self.headers)
|
||||
if None != response and 0 < len(response):
|
||||
#self.cookieJar.save(ignore_discard=True)
|
||||
#self.check_login(response)
|
||||
#print response
|
||||
# self.cookieJar.save(ignore_discard=True)
|
||||
# self.check_login(response)
|
||||
# print response
|
||||
regex = '''<a href="//.+?" title="(.+?)">.+?<span class="up">.+?<a href="/torrents/nfo/\?id=(\d+)" class="ajax nfo"></a>.+?</td>.+?<td align="center">.+?</td>.+?<td align="center">.+?</td>.+?<td align="center">(.+?)</td>.+?<td align="center" class="up">(\d+)</td>.+?<td align="center" class="down">(\d+)</td>'''
|
||||
for (title, link, size, seeds, leechers) in re.compile(regex, re.DOTALL).findall(response):
|
||||
title=self.clear_title(title)
|
||||
title = self.clear_title(title)
|
||||
image = sys.modules["__main__"].__root__ + self.searchIcon
|
||||
link = 'http://www.t411.io/torrents/download/?id='+link
|
||||
link = 'http://www.t411.io/torrents/download/?id=' + link
|
||||
filesList.append((
|
||||
int(int(self.sourceWeight) * int(seeds)),
|
||||
int(seeds), int(leechers), size,
|
||||
|
@ -92,11 +91,11 @@ class T411FR(SearcherABC.SearcherABC):
|
|||
return filesList
|
||||
|
||||
def clear_title(self, s):
|
||||
return self.stripHtml(self.unescape(s)).replace(' ',' ').replace(' ',' ').strip()
|
||||
return self.stripHtml(self.unescape(s)).replace(' ', ' ').replace(' ', ' ').strip()
|
||||
|
||||
def check_login(self, response=None):
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
if re.compile('<input class="userInput"').search(response) or \
|
||||
re.compile('start cache').search(response):
|
||||
print 'T411FR Not logged!'
|
||||
|
@ -106,24 +105,24 @@ class T411FR(SearcherABC.SearcherABC):
|
|||
|
||||
def getTorrentFile(self, url):
|
||||
content = self.makeRequest(url, headers=self.headers)
|
||||
#print content
|
||||
# print content
|
||||
if not self.check_login(content):
|
||||
content = self.makeRequest(url, headers=self.headers)
|
||||
#return url
|
||||
# return url
|
||||
return self.saveTorrentFile(url, content)
|
||||
|
||||
def login(self):
|
||||
data = {
|
||||
'password': 'toraddon20',
|
||||
'login': 'zombitorrent',
|
||||
'remember':'1'
|
||||
'remember': '1'
|
||||
}
|
||||
x=self.makeRequest(
|
||||
'http://www.t411.io/users/auth/',data=data, headers=self.headers)
|
||||
if re.search('{"status":"OK"',x):
|
||||
x = self.makeRequest(
|
||||
'http://www.t411.io/users/auth/', data=data, headers=self.headers)
|
||||
if re.search('{"status":"OK"', x):
|
||||
print 'LOGGED T411FR'
|
||||
self.cookieJar.save(ignore_discard=True)
|
||||
for cookie in self.cookieJar:
|
||||
if cookie.name == 'authKey' and cookie.domain=='.t411.io':
|
||||
if cookie.name == 'authKey' and cookie.domain == '.t411.io':
|
||||
return 'authKey=' + cookie.value
|
||||
return False
|
|
@ -19,9 +19,7 @@
|
|||
'''
|
||||
|
||||
import re
|
||||
import os
|
||||
import urllib
|
||||
import tempfile
|
||||
import sys
|
||||
|
||||
import SearcherABC
|
||||
|
@ -64,8 +62,8 @@ class TFileME(SearcherABC.SearcherABC):
|
|||
|
||||
def search(self, keyword):
|
||||
filesList = []
|
||||
url='http://tfile.me/forum/ssearch.php?q=%s' % urllib.quote_plus(keyword.decode('utf-8').encode('cp1251'))
|
||||
url+='&c=2&f=4&f=1488&f=1379&f=1225&f=1331&f=1248&f=1197&f=1026&f=293&f=1227&f=577&f=298&f=297&f=290&f=299&f=230&f=303&f=292&f=1240&f=304&f=296&f=300&f=1332&f=1324&f=691&f=301&f=294&f=1241&f=498&f=367&f=574&f=1226&f=295&f=189&f=1525&f=1224&f=1388&f=1387&f=1276&f=1889&f=1917&f=1907&f=1908&f=1909&f=1910&f=1911&f=1890&f=1891&f=1892&f=1893&f=1912&f=1899&f=1894&f=1895&f=1903&f=1896&f=1897&f=1898&f=1900&f=1902&f=1901&f=1904&f=1905&f=1906&f=1913&f=15&f=1918&f=1374&f=1946&f=1579&f=1947&f=1242&f=1508&f=1165&f=1166&f=1245&f=1158&f=532&f=1167&f=1159&f=1244&f=1160&f=1173&f=1238&f=1678&f=1161&f=1320&f=1162&f=1246&f=496&f=1164&f=1163&f=1172&f=1243&f=1386&f=1312&f=1536&f=1919&f=1577&f=1989&f=1578&f=1554&f=1537&f=1538&f=1539&f=1540&f=1541&f=1542&f=1543&f=1555&f=1680&f=1544&f=1556&f=1545&f=1546&f=1547&f=1848&f=1548&f=1550&f=1620&f=1920&f=193&f=1968&f=1237&f=1420&f=1036&f=449&f=448&f=447&f=537&f=1170&f=37&f=1921&f=1323&f=1252&f=1685&f=697&f=172&f=311&f=183&f=130&f=1024&f=139&f=1023&f=179&f=392&f=308&f=342&f=1612&f=1015&f=96&f=353&f=997&f=285&f=154&f=1613&f=975&f=168&f=1849&f=1020&f=265&f=123&f=1614&f=1615&f=117&f=155&f=1611&f=1616&f=1617&f=152&f=105&f=312&f=127&f=1030&f=150&f=328&f=305&f=149&f=136&f=134&f=158&f=169&f=1421&f=768&f=767&f=309&f=377&f=1017&f=1590&f=1923&f=1591&f=1966&f=1592&f=1607&f=1593&f=1594&f=1595&f=1596&f=1597&f=1598&f=1599&f=1600&f=1844&f=1601&f=1602&f=1603&f=1604&f=1605&f=1681&f=17&f=1924&f=1415&f=1964&f=1416&f=1304&f=1146&f=1147&f=1156&f=1534&f=1142&f=29&f=85&f=1514&f=1148&f=1515&f=384&f=216&f=1149&f=232&f=1535&f=506&f=1517&f=1516&f=1000&f=1518&f=237&f=243&f=1150&f=244&f=239&f=197&f=236&f=1151&f=235&f=1152&f=234&f=1153&f=1018&f=1143&f=1563&f=1925&f=1564&f=1565&f=1566&f=1567&f=1568&f=1569&f=1570&f=1571&f=1572&f=1574&f=1575&f=1576&f=1926&f=175&f=1881&f=1256&f=1145&f=1140&f=1253&f=1157&f=727&f=1551&f=567&f=1254&f=219&f=568&f=974&f=495&f=743&f=494&f=401&f=731&f=499&f=500&f=538&f=206&f=1040&f=446&f=1005&f=210&f=203&f=207&f=204&f=1255&f=202&f=1141&f=16&f=1927&f=1380&f=1425&f=1438&f=1333&f=187&f=1062&f=1310&f=1059&f=1033&f=1509&f=1193&f=1195&f=1064&f=1063&f=1028&f=1058&f=1019&f=490&f=1397&f=1065&f=1419&f=1194&f=1070&f=274&f=1383&f=1334&f=1067&f=1068&f=1066&f=1069&f=1060&f=1282&f=19&f=1915&f=1872&f=1922&f=1284&f=1294&f=1301&f=1288&f=1291&f=1309&f=39&f=1285&f=1290&f=1306&f=1295&f=1300&f=1302&f=1287&f=1307&f=1292&f=1299&f=1297&f=1293&f=1888&f=1286&f=1298&f=1296&f=1519&f=1303&f=1527&g=&act=&y=&ql=&a=&d=&o=&size_min=0&size_max=0'
|
||||
url = 'http://tfile.me/forum/ssearch.php?q=%s' % urllib.quote_plus(keyword.decode('utf-8').encode('cp1251'))
|
||||
url += '&c=2&f=4&f=1488&f=1379&f=1225&f=1331&f=1248&f=1197&f=1026&f=293&f=1227&f=577&f=298&f=297&f=290&f=299&f=230&f=303&f=292&f=1240&f=304&f=296&f=300&f=1332&f=1324&f=691&f=301&f=294&f=1241&f=498&f=367&f=574&f=1226&f=295&f=189&f=1525&f=1224&f=1388&f=1387&f=1276&f=1889&f=1917&f=1907&f=1908&f=1909&f=1910&f=1911&f=1890&f=1891&f=1892&f=1893&f=1912&f=1899&f=1894&f=1895&f=1903&f=1896&f=1897&f=1898&f=1900&f=1902&f=1901&f=1904&f=1905&f=1906&f=1913&f=15&f=1918&f=1374&f=1946&f=1579&f=1947&f=1242&f=1508&f=1165&f=1166&f=1245&f=1158&f=532&f=1167&f=1159&f=1244&f=1160&f=1173&f=1238&f=1678&f=1161&f=1320&f=1162&f=1246&f=496&f=1164&f=1163&f=1172&f=1243&f=1386&f=1312&f=1536&f=1919&f=1577&f=1989&f=1578&f=1554&f=1537&f=1538&f=1539&f=1540&f=1541&f=1542&f=1543&f=1555&f=1680&f=1544&f=1556&f=1545&f=1546&f=1547&f=1848&f=1548&f=1550&f=1620&f=1920&f=193&f=1968&f=1237&f=1420&f=1036&f=449&f=448&f=447&f=537&f=1170&f=37&f=1921&f=1323&f=1252&f=1685&f=697&f=172&f=311&f=183&f=130&f=1024&f=139&f=1023&f=179&f=392&f=308&f=342&f=1612&f=1015&f=96&f=353&f=997&f=285&f=154&f=1613&f=975&f=168&f=1849&f=1020&f=265&f=123&f=1614&f=1615&f=117&f=155&f=1611&f=1616&f=1617&f=152&f=105&f=312&f=127&f=1030&f=150&f=328&f=305&f=149&f=136&f=134&f=158&f=169&f=1421&f=768&f=767&f=309&f=377&f=1017&f=1590&f=1923&f=1591&f=1966&f=1592&f=1607&f=1593&f=1594&f=1595&f=1596&f=1597&f=1598&f=1599&f=1600&f=1844&f=1601&f=1602&f=1603&f=1604&f=1605&f=1681&f=17&f=1924&f=1415&f=1964&f=1416&f=1304&f=1146&f=1147&f=1156&f=1534&f=1142&f=29&f=85&f=1514&f=1148&f=1515&f=384&f=216&f=1149&f=232&f=1535&f=506&f=1517&f=1516&f=1000&f=1518&f=237&f=243&f=1150&f=244&f=239&f=197&f=236&f=1151&f=235&f=1152&f=234&f=1153&f=1018&f=1143&f=1563&f=1925&f=1564&f=1565&f=1566&f=1567&f=1568&f=1569&f=1570&f=1571&f=1572&f=1574&f=1575&f=1576&f=1926&f=175&f=1881&f=1256&f=1145&f=1140&f=1253&f=1157&f=727&f=1551&f=567&f=1254&f=219&f=568&f=974&f=495&f=743&f=494&f=401&f=731&f=499&f=500&f=538&f=206&f=1040&f=446&f=1005&f=210&f=203&f=207&f=204&f=1255&f=202&f=1141&f=16&f=1927&f=1380&f=1425&f=1438&f=1333&f=187&f=1062&f=1310&f=1059&f=1033&f=1509&f=1193&f=1195&f=1064&f=1063&f=1028&f=1058&f=1019&f=490&f=1397&f=1065&f=1419&f=1194&f=1070&f=274&f=1383&f=1334&f=1067&f=1068&f=1066&f=1069&f=1060&f=1282&f=19&f=1915&f=1872&f=1922&f=1284&f=1294&f=1301&f=1288&f=1291&f=1309&f=39&f=1285&f=1290&f=1306&f=1295&f=1300&f=1302&f=1287&f=1307&f=1292&f=1299&f=1297&f=1293&f=1888&f=1286&f=1298&f=1296&f=1519&f=1303&f=1527&g=&act=&y=&ql=&a=&d=&o=&size_min=0&size_max=0'
|
||||
headers = {('Origin', 'http://tfile.me'),
|
||||
('User-Agent',
|
||||
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 YaBrowser/14.10.2062.12061 Safari/537.36'),
|
||||
|
@ -75,15 +73,15 @@ class TFileME(SearcherABC.SearcherABC):
|
|||
if None != response and 0 < len(response):
|
||||
response = response.decode('cp1251').encode('utf-8')
|
||||
self.cookieJar.save(ignore_discard=True)
|
||||
#self.check_login(response)
|
||||
#print response
|
||||
#bad_forums = [2,1,23,32,40,41]
|
||||
# self.check_login(response)
|
||||
# print response
|
||||
# bad_forums = [2,1,23,32,40,41]
|
||||
regex = '''<a href="/forum/viewforum\.php\?f=(\d+)">.+?<a href="/forum/viewtopic\.php\?t=.+?">(.+?)</a>.+?<a href="/forum/download\.php\?id=(\d+)">(.+?)</a>.+?class="sd">(\d+)</b>.+?class="lc">(\d+)'''
|
||||
for (forum, title, link, size, seeds, leechers) in re.compile(regex, re.DOTALL).findall(response):
|
||||
#if int(forum) not in bad_forums:
|
||||
title=self.clear_title(title)
|
||||
# if int(forum) not in bad_forums:
|
||||
title = self.clear_title(title)
|
||||
image = sys.modules["__main__"].__root__ + self.searchIcon
|
||||
link = 'http://tfile.me/forum/download.php?id='+link
|
||||
link = 'http://tfile.me/forum/download.php?id=' + link
|
||||
filesList.append((
|
||||
int(int(self.sourceWeight) * int(seeds)),
|
||||
int(seeds), int(leechers), size,
|
||||
|
@ -94,11 +92,11 @@ class TFileME(SearcherABC.SearcherABC):
|
|||
return filesList
|
||||
|
||||
def clear_title(self, s):
|
||||
return self.stripHtml(self.unescape(s)).replace(' ',' ').replace(' ',' ').strip()
|
||||
return self.stripHtml(self.unescape(s)).replace(' ', ' ').replace(' ', ' ').strip()
|
||||
|
||||
def check_login(self, response=None):
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
if re.compile('<input class="text" type="text" name="username"').search(response):
|
||||
print 'TFileME Not logged!'
|
||||
self.login()
|
||||
|
@ -109,21 +107,21 @@ class TFileME(SearcherABC.SearcherABC):
|
|||
self.timeout(5)
|
||||
self.check_login(self.makeRequest('http://tfile.me/'))
|
||||
content = self.makeRequest(url)
|
||||
#return url
|
||||
# return url
|
||||
return self.saveTorrentFile(url, content)
|
||||
|
||||
def login(self):
|
||||
data = {
|
||||
'password': 'torrenter',
|
||||
'username': 'torrenterpl',
|
||||
'login':'Вход'
|
||||
'login': 'Вход'
|
||||
}
|
||||
x=self.makeRequest(
|
||||
x = self.makeRequest(
|
||||
'http://tfile.me/login/',
|
||||
data
|
||||
)
|
||||
self.cookieJar.save(ignore_discard=True)
|
||||
for cookie in self.cookieJar:
|
||||
if cookie.name == 'phpbb2mysql_data' and cookie.domain=='.tfile.me':
|
||||
if cookie.name == 'phpbb2mysql_data' and cookie.domain == '.tfile.me':
|
||||
return 'phpbb2mysql_data=' + cookie.value
|
||||
return False
|
|
@ -21,7 +21,6 @@
|
|||
import urllib
|
||||
import re
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
import SearcherABC
|
||||
|
||||
|
@ -69,12 +68,12 @@ class ThePirateBaySe(SearcherABC.SearcherABC):
|
|||
response = self.makeRequest(url)
|
||||
|
||||
if None != response and 0 < len(response):
|
||||
#print response
|
||||
# print response
|
||||
dat = re.compile(
|
||||
r'<div class="detName">.+?">(.+?)</a>.+?<a href="(.+?)".+?<font class="detDesc">Uploaded .+?, Size (.+?), .+?</font>.+?<td align="right">(\d+?)</td>.+?<td align="right">(\d+?)</td>',
|
||||
re.DOTALL).findall(response)
|
||||
for (title, link, size, seeds, leechers) in dat:
|
||||
torrentTitle = title #"%s [S\L: %s\%s]" % (title, seeds, leechers)
|
||||
torrentTitle = title # "%s [S\L: %s\%s]" % (title, seeds, leechers)
|
||||
size = size.replace(' ', ' ')
|
||||
image = sys.modules["__main__"].__root__ + self.searchIcon
|
||||
if not re.match('^https?\://.+', link) and not re.match('^magnet\:.+', link):
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
'''
|
||||
|
||||
import re
|
||||
import os
|
||||
import urllib
|
||||
import tempfile
|
||||
import sys
|
||||
|
||||
import SearcherABC
|
||||
|
@ -64,7 +62,8 @@ class KinoZalTV(SearcherABC.SearcherABC):
|
|||
|
||||
def search(self, keyword):
|
||||
filesList = []
|
||||
url='http://kinozal.tv/browse.php?s=%s&g=0&c=0&v=0&d=0&w=0&t=1&f=0' % urllib.quote_plus(keyword.decode('utf-8').encode('cp1251'))
|
||||
url = 'http://kinozal.tv/browse.php?s=%s&g=0&c=0&v=0&d=0&w=0&t=1&f=0' % urllib.quote_plus(
|
||||
keyword.decode('utf-8').encode('cp1251'))
|
||||
|
||||
headers = {('Origin', 'http://kinozal.tv'),
|
||||
('User-Agent',
|
||||
|
@ -74,8 +73,8 @@ class KinoZalTV(SearcherABC.SearcherABC):
|
|||
response = self.makeRequest(url, headers=headers)
|
||||
if None != response and 0 < len(response):
|
||||
response = response.decode('cp1251').encode('utf-8')
|
||||
#print response
|
||||
bad_forums = [2,1,23,32,40,41]
|
||||
# print response
|
||||
bad_forums = [2, 1, 23, 32, 40, 41]
|
||||
regex = '''onclick="cat\((\d+)\);".+?<a href="/details\.php\?id=(\d+)".+?>(.+?)</a>.+?<td class='s'>(.+?)</td>.+?class='sl_s'>(\d+)</td>.+?class='sl_p'>(\d+)</td>'''
|
||||
for (forum, topic, title, size, seeds, leechers) in re.compile(regex, re.DOTALL).findall(response):
|
||||
if int(forum) not in bad_forums:
|
||||
|
@ -102,10 +101,10 @@ class KinoZalTV(SearcherABC.SearcherABC):
|
|||
self.timeout(5)
|
||||
|
||||
content = self.makeRequest(url)
|
||||
#print content
|
||||
# print content
|
||||
if not self.check_login(content):
|
||||
content = self.makeRequest(url)
|
||||
#print content
|
||||
# print content
|
||||
|
||||
return self.saveTorrentFile(url, content)
|
||||
|
||||
|
@ -113,7 +112,7 @@ class KinoZalTV(SearcherABC.SearcherABC):
|
|||
data = {
|
||||
'password': 'torrenter',
|
||||
'username': 'torrenterpl',
|
||||
'returnto:':''
|
||||
'returnto:': ''
|
||||
}
|
||||
self.makeRequest(
|
||||
'http://kinozal.tv/takelogin.php',
|
||||
|
@ -121,11 +120,11 @@ class KinoZalTV(SearcherABC.SearcherABC):
|
|||
)
|
||||
self.cookieJar.save(ignore_discard=True)
|
||||
for cookie in self.cookieJar:
|
||||
uid,passed=None,None
|
||||
uid, passed = None, None
|
||||
if cookie.name == 'uid':
|
||||
uid=cookie.value
|
||||
uid = cookie.value
|
||||
if cookie.name == 'pass':
|
||||
passed=cookie.value
|
||||
passed = cookie.value
|
||||
if uid and passed:
|
||||
return 'uid=' + uid+'; pass='+ passed
|
||||
return 'uid=' + uid + '; pass=' + passed
|
||||
return False
|
|
@ -1,28 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import xbmcgui, Localization, sys, xbmc
|
||||
import sys
|
||||
|
||||
import xbmcgui
|
||||
import Localization
|
||||
import xbmc
|
||||
|
||||
KEY_BUTTON_BACK = 275
|
||||
KEY_KEYBOARD_ESC = 61467
|
||||
ACTION_PREVIOUS_MENU = 10
|
||||
ACTION_NAV_BACK = 92
|
||||
|
||||
|
||||
class DialogXml(xbmcgui.WindowXMLDialog):
|
||||
def onInit(self):
|
||||
print "onInit(): Window Initialized"
|
||||
localize=Localization.localize
|
||||
color='[COLOR %s]%s[/COLOR]'
|
||||
localize = Localization.localize
|
||||
color = '[COLOR %s]%s[/COLOR]'
|
||||
self.movie_label = self.getControl(32)
|
||||
self.movie_label.setText(self.movieInfo['desc'])
|
||||
|
||||
if self.movieInfo.get('views'):
|
||||
self.view_label = self.getControl(34)
|
||||
self.view_label.setLabel(color % ('blue', localize('Views:'))+self.movieInfo['views'])
|
||||
self.view_label.setLabel(color % ('blue', localize('Views:')) + self.movieInfo['views'])
|
||||
|
||||
self.view_label = self.getControl(35)
|
||||
self.ratingcolor = 'green'
|
||||
self.ratingint = int(self.movieInfo['rating'])
|
||||
if(self.ratingint < 70):
|
||||
if (self.ratingint < 70):
|
||||
self.ratingcolor = 'red'
|
||||
self.view_label.setLabel(color % ('blue', localize('Rating:'))+color % (self.ratingcolor, self.movieInfo['rating']))
|
||||
self.view_label.setLabel(
|
||||
color % ('blue', localize('Rating:')) + color % (self.ratingcolor, self.movieInfo['rating']))
|
||||
|
||||
self.movie_label = self.getControl(1)
|
||||
self.movie_label.setLabel(self.movieInfo['title'])
|
||||
|
@ -61,18 +68,16 @@ class DialogXml(xbmcgui.WindowXMLDialog):
|
|||
|
||||
def RunPlugin(self, action):
|
||||
if self.link:
|
||||
exec_str='XBMC.RunPlugin(%s)' % \
|
||||
exec_str = 'XBMC.RunPlugin(%s)' % \
|
||||
('%s?action=%s&url=%s') % \
|
||||
(sys.argv[0], action, self.link)
|
||||
xbmc.executebuiltin(exec_str)
|
||||
|
||||
|
||||
def onFocus(self, controlID):
|
||||
#print "onFocus(): control %i" % controlID
|
||||
# print "onFocus(): control %i" % controlID
|
||||
pass
|
||||
|
||||
|
||||
def doModal(self, movieInfo, url):
|
||||
self.movieInfo = movieInfo
|
||||
self.link=url
|
||||
self.link = url
|
||||
xbmcgui.WindowXMLDialog.doModal(self)
|
|
@ -842,7 +842,7 @@ class AzureusConnection(AzureusLink):
|
|||
raise NoEstablishedConnectionError
|
||||
|
||||
from xml.dom.minidom import parseString
|
||||
from dopal.xmlutils import normalise_xml_structure, get_text_content
|
||||
from dopal.xmlutils import normalise_xml_structure
|
||||
|
||||
# First step, convert the method data to XML.
|
||||
xml_data = remote_method_call_as_xml(method_name, method_args,
|
||||
|
@ -1018,7 +1018,6 @@ class ExtendedAzureusConnection(AzureusConnection):
|
|||
|
||||
return dopal.utils.parse_azureus_version_string(az_version)
|
||||
|
||||
|
||||
# Use of this name is deprecated, and this alias will be removed in later
|
||||
# versions of DOPAL.
|
||||
ReusableAzureusConnection = ExtendedAzureusConnection
|
||||
|
|
|
@ -22,7 +22,7 @@ from dopal.core import ExtendedAzureusConnection
|
|||
from dopal.errors import AzMethodError, InvalidObjectIDError, \
|
||||
RemoteMethodError, StaleObjectReferenceError, ConnectionlessObjectError, \
|
||||
NonRefreshableConnectionlessObjectError, MissingRemoteAttributeError, \
|
||||
NonRefreshableIncompleteObjectError, NonRefreshableObjectError
|
||||
NonRefreshableObjectError
|
||||
import dopal.utils
|
||||
|
||||
|
||||
|
@ -487,7 +487,6 @@ class RemoteAttributesMixin(object):
|
|||
text = "'%s' object has no attribute '%s'"
|
||||
raise AttributeError, text % (type(self).__name__, name)
|
||||
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.__protect_remote_attributes__ and not name.startswith('__'):
|
||||
if name in self.__remote_attribute_names__:
|
||||
|
|
|
@ -18,7 +18,6 @@ import xbmc
|
|||
import xbmcgui
|
||||
import xbmcvfs
|
||||
|
||||
|
||||
os.sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import dopal.main
|
||||
|
@ -51,7 +50,6 @@ class HTTP:
|
|||
if not xbmcvfs.exists(self._dirname):
|
||||
xbmcvfs.mkdir(self._dirname)
|
||||
|
||||
|
||||
def fetch(self, request, **kwargs):
|
||||
self.con, self.fd, self.progress, self.cookies, self.request = None, None, None, None, request
|
||||
|
||||
|
@ -91,7 +89,6 @@ class HTTP:
|
|||
|
||||
return self.response
|
||||
|
||||
|
||||
def _opener(self):
|
||||
|
||||
build = [urllib2.HTTPHandler()]
|
||||
|
@ -118,7 +115,6 @@ class HTTP:
|
|||
|
||||
urllib2.install_opener(urllib2.build_opener(*build))
|
||||
|
||||
|
||||
def _fetch(self):
|
||||
params = {} if self.request.params is None else self.request.params
|
||||
|
||||
|
@ -153,7 +149,7 @@ class HTTP:
|
|||
if self.request.download:
|
||||
self._download()
|
||||
else:
|
||||
if not self.response.headers.get('content-encoding')=='gzip':
|
||||
if not self.response.headers.get('content-encoding') == 'gzip':
|
||||
self.response.body = self.con.read()
|
||||
else:
|
||||
buf = StringIO(self.con.read())
|
||||
|
@ -163,7 +159,6 @@ class HTTP:
|
|||
if self.request.cookies:
|
||||
self.cookies.save(self.request.cookies)
|
||||
|
||||
|
||||
def _download(self):
|
||||
fd = open(self.request.download, 'wb')
|
||||
if self.request.progress:
|
||||
|
@ -195,7 +190,6 @@ class HTTP:
|
|||
|
||||
self.response.filename = self.request.download
|
||||
|
||||
|
||||
def _upload(self, upload, params):
|
||||
res = []
|
||||
boundary = mimetools.choose_boundary()
|
||||
|
@ -231,7 +225,6 @@ class HTTP:
|
|||
result.append('')
|
||||
return boundary, '\r\n'.join(result)
|
||||
|
||||
|
||||
def _headers(self, raw):
|
||||
headers = {}
|
||||
for line in raw.headers:
|
||||
|
@ -243,7 +236,6 @@ class HTTP:
|
|||
headers[tag] = value
|
||||
return headers
|
||||
|
||||
|
||||
def _progress(self, read, size, name):
|
||||
res = []
|
||||
if size < 0:
|
||||
|
@ -756,11 +748,11 @@ class Transmission:
|
|||
return True if res else None
|
||||
|
||||
def setprio_simple_multi(self, menu):
|
||||
id=menu[0][0]
|
||||
prio=menu[0][1]
|
||||
res=None
|
||||
id = menu[0][0]
|
||||
prio = menu[0][1]
|
||||
res = None
|
||||
|
||||
inds=[]
|
||||
inds = []
|
||||
for hash, action, ind in menu:
|
||||
inds.append(int(ind))
|
||||
|
||||
|
@ -857,27 +849,27 @@ class Deluge:
|
|||
self.login = login
|
||||
self.password = password
|
||||
|
||||
self.url = ['http://','https://'][int(url)] + host
|
||||
self.url = ['http://', 'https://'][int(url)] + host
|
||||
if port:
|
||||
self.url += ':' + str(port)
|
||||
|
||||
self.http = HTTP()
|
||||
|
||||
def get_info(self):
|
||||
obj = self.action({"method":"web.update_ui",
|
||||
"params":[[],{}],"id":1})
|
||||
obj = self.action({"method": "web.update_ui",
|
||||
"params": [[], {}], "id": 1})
|
||||
return obj
|
||||
|
||||
def list(self):
|
||||
obj=self.get_info()
|
||||
obj = self.get_info()
|
||||
if obj is None:
|
||||
return False
|
||||
|
||||
res = []
|
||||
if len(obj['result'].get('torrents'))>0:
|
||||
if len(obj['result'].get('torrents')) > 0:
|
||||
for k in obj['result'].get('torrents').keys():
|
||||
r=obj['result']['torrents'][k]
|
||||
add={
|
||||
r = obj['result']['torrents'][k]
|
||||
add = {
|
||||
'id': str(k),
|
||||
'status': self.get_status(r['state']),
|
||||
'name': r['name'],
|
||||
|
@ -895,34 +887,34 @@ class Deluge:
|
|||
'add': r['time_added'],
|
||||
'dir': r['save_path']
|
||||
}
|
||||
if len(r['files'])>1: add['dir']=os.path.join(r['save_path'],r['name'])
|
||||
if len(r['files']) > 1: add['dir'] = os.path.join(r['save_path'], r['name'])
|
||||
res.append(add)
|
||||
return res
|
||||
|
||||
def listdirs(self):
|
||||
obj = self.action({"method":"core.get_config","params":[],"id":5})
|
||||
obj = self.action({"method": "core.get_config", "params": [], "id": 5})
|
||||
if obj is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
res = [obj['result'].get('download_location')]
|
||||
except:
|
||||
res=[None]
|
||||
res = [None]
|
||||
return res, res
|
||||
|
||||
def listfiles(self, id):
|
||||
obj = self.get_info()
|
||||
i=0
|
||||
i = 0
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
res = []
|
||||
obj=obj['result']['torrents'][id]
|
||||
#print str(obj)
|
||||
if len(obj['files'])==1:
|
||||
strip_path=None
|
||||
obj = obj['result']['torrents'][id]
|
||||
# print str(obj)
|
||||
if len(obj['files']) == 1:
|
||||
strip_path = None
|
||||
else:
|
||||
strip_path=obj['name']
|
||||
strip_path = obj['name']
|
||||
|
||||
for x in obj['files']:
|
||||
if x['size'] >= 1024 * 1024 * 1024:
|
||||
|
@ -934,17 +926,18 @@ class Deluge:
|
|||
else:
|
||||
size = str(x['size']) + 'B'
|
||||
if strip_path:
|
||||
path=x['path'].lstrip(strip_path).lstrip('/')
|
||||
path = x['path'].lstrip(strip_path).lstrip('/')
|
||||
else:
|
||||
path=x['path']
|
||||
path = x['path']
|
||||
|
||||
if x.get('progress'):
|
||||
percent=int(x['progress']*100)
|
||||
elif obj.get('file_progress') and len(obj['file_progress'])>=i:
|
||||
percent=int(obj['file_progress'][i]*100)
|
||||
else:percent=0
|
||||
percent = int(x['progress'] * 100)
|
||||
elif obj.get('file_progress') and len(obj['file_progress']) >= i:
|
||||
percent = int(obj['file_progress'][i] * 100)
|
||||
else:
|
||||
percent = 0
|
||||
|
||||
i+=1
|
||||
i += 1
|
||||
res.append([path, percent, x['index'], size])
|
||||
|
||||
return res
|
||||
|
@ -953,24 +946,26 @@ class Deluge:
|
|||
obj = self.get_info()
|
||||
if obj is None:
|
||||
return None
|
||||
res=obj['result']['torrents'][id]['file_priorities']
|
||||
res = obj['result']['torrents'][id]['file_priorities']
|
||||
return res
|
||||
|
||||
def add(self, torrent, dirname):
|
||||
torrentFile=os.path.join(self.http._dirname,'deluge.torrent')
|
||||
torrentFile = os.path.join(self.http._dirname, 'deluge.torrent')
|
||||
if self.action({'method': 'core.add_torrent_file',
|
||||
'params': [torrentFile,
|
||||
base64.b64encode(torrent), {"download_path": dirname}],"id":3}) is None:
|
||||
base64.b64encode(torrent), {"download_path": dirname}], "id": 3}) is None:
|
||||
return None
|
||||
return True
|
||||
|
||||
def add_url(self, torrent, dirname):
|
||||
if re.match("^magnet\:.+$", torrent):
|
||||
if self.action({'method': 'core.add_torrent_magnet', 'params':[torrent,
|
||||
{'download_path': dirname}],"id":3}) is None:
|
||||
if self.action({'method': 'core.add_torrent_magnet', 'params': [torrent,
|
||||
{'download_path': dirname}],
|
||||
"id": 3}) is None:
|
||||
return None
|
||||
else:
|
||||
if self.action({"method": "core.add_torrent_url", "params":[torrent, {'download_path': dirname}],"id":3}) is None:
|
||||
if self.action({"method": "core.add_torrent_url", "params": [torrent, {'download_path': dirname}],
|
||||
"id": 3}) is None:
|
||||
return None
|
||||
return True
|
||||
|
||||
|
@ -979,48 +974,48 @@ class Deluge:
|
|||
|
||||
def setprio(self, id, ind):
|
||||
i = -1
|
||||
prios=self.get_prio(id)
|
||||
prios = self.get_prio(id)
|
||||
|
||||
for p in prios:
|
||||
i=i+1
|
||||
if p==1:
|
||||
i = i + 1
|
||||
if p == 1:
|
||||
prios.pop(i)
|
||||
prios.insert(i,0)
|
||||
prios.insert(i, 0)
|
||||
|
||||
prios.pop(int(ind))
|
||||
prios.insert(int(ind),7)
|
||||
prios.insert(int(ind), 7)
|
||||
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params":[id, prios],"id":6}) is None:
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params": [id, prios], "id": 6}) is None:
|
||||
return None
|
||||
|
||||
return True
|
||||
|
||||
def setprio_simple(self, id, prio, ind):
|
||||
prios=self.get_prio(id)
|
||||
prios = self.get_prio(id)
|
||||
|
||||
if ind!=None:
|
||||
if ind != None:
|
||||
prios.pop(int(ind))
|
||||
if prio == '3':
|
||||
prios.insert(int(ind),7)
|
||||
prios.insert(int(ind), 7)
|
||||
elif prio == '0':
|
||||
prios.insert(int(ind),0)
|
||||
prios.insert(int(ind), 0)
|
||||
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params":[id, prios],"id":6}) is None:
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params": [id, prios], "id": 6}) is None:
|
||||
return None
|
||||
return True
|
||||
|
||||
def setprio_simple_multi(self, menu):
|
||||
id=menu[0][0]
|
||||
prios=self.get_prio(id)
|
||||
id = menu[0][0]
|
||||
prios = self.get_prio(id)
|
||||
|
||||
for hash, action, ind in menu:
|
||||
prios.pop(int(ind))
|
||||
if action == '3':
|
||||
prios.insert(int(ind),7)
|
||||
prios.insert(int(ind), 7)
|
||||
elif action == '0':
|
||||
prios.insert(int(ind),0)
|
||||
prios.insert(int(ind), 0)
|
||||
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params":[id, prios],"id":6}) is None:
|
||||
if self.action({"method": "core.set_torrent_file_priorities", "params": [id, prios], "id": 6}) is None:
|
||||
return None
|
||||
|
||||
def action(self, request):
|
||||
|
@ -1049,23 +1044,23 @@ class Deluge:
|
|||
return obj
|
||||
|
||||
def action_simple(self, action, id):
|
||||
actions = {'start': {"method":"core.resume_torrent","params":[[id]],"id":4},
|
||||
'stop': {"method":"core.pause_torrent","params":[[id]],"id":4},
|
||||
'remove': {"method":"core.remove_torrent","params":[id, False],"id":4},
|
||||
'removedata': {"method":"core.remove_torrent", "params":[id, True],"id":4}}
|
||||
actions = {'start': {"method": "core.resume_torrent", "params": [[id]], "id": 4},
|
||||
'stop': {"method": "core.pause_torrent", "params": [[id]], "id": 4},
|
||||
'remove': {"method": "core.remove_torrent", "params": [id, False], "id": 4},
|
||||
'removedata': {"method": "core.remove_torrent", "params": [id, True], "id": 4}}
|
||||
obj = self.action(actions[action])
|
||||
return True if obj else None
|
||||
|
||||
def get_auth(self):
|
||||
params=json.dumps({"method":"auth.login","params":[self.password],"id":0})
|
||||
params = json.dumps({"method": "auth.login", "params": [self.password], "id": 0})
|
||||
response = self.http.fetch(self.url + '/json', method='POST', params=params, gzip=True,
|
||||
headers={'X-Requested-With': 'XMLHttpRequest',
|
||||
'Content-Type': 'application/json; charset=UTF-8'})
|
||||
if response.error:
|
||||
return None
|
||||
|
||||
auth=json.loads(response.body)
|
||||
if auth["result"]==False:
|
||||
auth = json.loads(response.body)
|
||||
if auth["result"] == False:
|
||||
return False
|
||||
else:
|
||||
r = re.compile('_session_id=([^;]+);').search(response.headers.get('set-cookie', ''))
|
||||
|
@ -1118,7 +1113,7 @@ class Vuze:
|
|||
'download': float(getattr(getattr(r, 'stats'), 'downloaded')),
|
||||
'upload': getattr(getattr(r, 'stats'), 'uploaded'),
|
||||
# 'upspeed': r['rateUpload'],
|
||||
#'downspeed': r['rateDownload'],
|
||||
# 'downspeed': r['rateDownload'],
|
||||
'ratio': float(r.stats.share_ratio) / 1000,
|
||||
'eta': getattr(getattr(r, 'stats'), 'eta'),
|
||||
'peer': getattr(getattr(r, 'scrape_result'), 'non_seed_count') + getattr(getattr(r, 'scrape_result'),
|
||||
|
|
Loading…
Reference in New Issue