plugin.video.torrenter/resources/scrapers/cache.py

163 lines
5.4 KiB
Python
Raw Permalink Normal View History

2015-01-09 14:11:21 +03:00
# -*- coding: utf-8 -*-
import os
import time
import pickle
import threading
import zipfile
import xbmc
import xbmcvfs
import xbmcgui
import Localization
from net import HTTP
try:
from sqlite3 import dbapi2 as sqlite
except:
from pysqlite2 import dbapi2 as sqlite
rtrCache_lock = threading.RLock()
class Cache:
def __init__(self, name, version, expire=0, size=0, step=100):
self.name = name
self.version = version
self._connect()
if expire:
self.expire(expire)
if size:
self.size(size, step)
def get(self, token, callback, *param):
cur = self.db.cursor()
2015-06-23 23:00:27 +03:00
cur.execute('select expire,data from cache where id=? limit 1', (token,))
2015-01-09 14:11:21 +03:00
row = cur.fetchone()
cur.close()
if row:
if row[0] and row[0] < int(time.time()):
pass
else:
try:
obj = pickle.loads(row[1])
except:
pass
else:
return obj
response = callback(*param)
if response[0]:
obj = sqlite.Binary(pickle.dumps(response[1]))
curtime = int(time.time())
cur = self.db.cursor()
if isinstance(response[0], bool):
cur.execute('replace into cache(id,addtime,expire,data) values(?,?,?,?)', (token, curtime, None, obj))
else:
cur.execute('replace into cache(id,addtime,expire,data) values(?,?,?,?)',
(token, curtime, curtime + response[0], obj))
self.db.commit()
cur.close()
return response[1]
def expire(self, expire):
# with rtrCache_lock:
cur = self.db.cursor()
2015-06-23 23:00:27 +03:00
cur.execute('delete from cache where addtime<?', (int(time.time()) - expire,))
2015-01-09 14:11:21 +03:00
self.db.commit()
cur.close()
def size(self, size, step=100):
# with rtrCache_lock:
while True:
if os.path.getsize(self.filename) < size:
break
cur = self.db.cursor()
2015-06-23 23:00:27 +03:00
cur.execute('select id from cache order by addtime asc limit ?', (step,))
2015-01-09 14:11:21 +03:00
rows = cur.fetchall()
if not rows:
cur.close()
break
cur.execute('delete from cache where id in (' + ','.join(len(rows) * '?') + ')', [x[0] for x in rows])
self.db.commit()
cur.close()
def flush(self):
# with rtrCache_lock:
cur = self.db.cursor()
cur.execute('delete from cache')
self.db.commit()
cur.close()
def _connect(self):
with rtrCache_lock:
dirname = xbmc.translatePath('special://temp')
for subdir in ('xbmcup', 'plugin.video.torrenter'):
dirname = os.path.join(dirname, subdir)
if not xbmcvfs.exists(dirname):
xbmcvfs.mkdir(dirname)
self.filename = os.path.join(dirname, self.name)
first = False
if not xbmcvfs.exists(self.filename):
first = True
self.db = sqlite.connect(self.filename, check_same_thread=False)
if not first:
cur = self.db.cursor()
try:
cur.execute('select version from db_ver')
row = cur.fetchone()
if not row or float(row[0]) != self.version:
cur.execute('drop table cache')
cur.execute('drop table if exists db_ver')
first = True
except:
cur.execute('drop table cache')
first = True
self.db.commit()
cur.close()
if first and not self.first_time():
cur = self.db.cursor()
cur.execute('pragma auto_vacuum=1')
cur.execute('create table cache(id varchar(255) unique, addtime integer, expire integer, data blob)')
cur.execute('create index time on cache(addtime asc)')
cur.execute('create table db_ver(version real)')
2015-06-23 23:00:27 +03:00
cur.execute('insert into db_ver(version) values(?)', (self.version,))
2015-01-09 14:11:21 +03:00
self.db.commit()
cur.close()
def first_time(self):
scrapers = {'tvdb': 'TheTVDB.com', 'tmdb': 'TheMovieDB.org', 'kinopoisk': 'KinoPoisk.ru'}
ok = xbmcgui.Dialog().yesno(Localization.localize('Content Lists'),
Localization.localize('Do you want to preload full metadata?') + ' (%s)' % (
2015-06-23 23:00:27 +03:00
scrapers[os.path.basename(self.filename).split('.')[0]]),
2015-01-09 14:11:21 +03:00
Localization.localize('It is highly recommended!'))
if ok:
return self.download()
else:
return False
def download(self):
dirname = os.path.dirname(self.filename)
2015-06-23 23:00:27 +03:00
zipname = os.path.basename(self.filename).replace('.db', '') + '.zip'
2015-01-09 14:11:21 +03:00
url = 'http://www.tat-store.ru/torrenter/' + zipname
self.http = HTTP()
response = self.http.fetch(url, download=os.path.join(dirname, zipname), progress=True)
if response.error:
return False
try:
filezip = zipfile.ZipFile(os.path.join(dirname, zipname), 'r')
filezip.extractall(dirname)
filezip.close()
except:
return False
return True