fix nothing

pull/1/head
DiMartinoXBMC 2015-12-14 23:08:38 +03:00
parent ed8e9b201c
commit 43b53abba7
10 changed files with 672 additions and 4852 deletions

View File

@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
</project>
<component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false">
<file url="PROJECT" charset="UTF-8" />
</component>
</project>

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="plugin.video.torrenter" name="Torrenter" version="2.3.8c" provider-name="vadim.skorba, DiMartino">
<addon id="plugin.video.torrenter" name="Torrenter" version="2.3.8g" provider-name="vadim.skorba, DiMartino">
<requires>
<import addon="xbmc.python" version="2.1.0"/>
<import addon="script.module.libtorrent"/>

File diff suppressed because it is too large Load Diff

View File

@ -1,828 +0,0 @@
#!/usr/bin/env python
import time
import sys
import argparse
import os.path
from threading import Thread
import re
import urlparse
import BaseHTTPServer as htserver
import types
import logging
import logging.handlers
import traceback
import urllib
import SocketServer
import socket
import pickle
import json
import shutil
from cachebt import CacheBT
from common import AbstractFile, Hasher, BaseMonitor, BaseClient, Resolver
logging.basicConfig()
logger = logging.getLogger()
INITIAL_TRACKERS = ['udp://tracker.openbittorrent.com:80',
'udp://tracker.istole.it:80',
'udp://open.demonii.com:80',
'udp://tracker.coppersurfer.tk:80',
'udp://tracker.leechers-paradise.org:6969',
'udp://exodus.desync.com:6969',
'udp://tracker.publicbt.com:80']
VIDEO_EXTS = {'.avi': 'video/x-msvideo', '.mp4': 'video/mp4', '.mkv': 'video/x-matroska',
'.m4v': 'video/mp4', '.mov': 'video/quicktime', '.mpg': 'video/mpeg', '.ogv': 'video/ogg',
'.ogg': 'video/ogg', '.webm': 'video/webm', '.ts': 'video/mp2t', '.3gp': 'video/3gpp'}
RANGE_RE = re.compile(r'bytes=(\d+)-')
# offset from end to download first
FILE_TAIL = 10000
class x:
lol=''
def parse_range(range): # @ReservedAssignment
if range:
m = RANGE_RE.match(range)
if m:
try:
return int(m.group(1))
except:
pass
return 0
class StreamServer(SocketServer.ThreadingMixIn, htserver.HTTPServer):
daemon_threads = True
def __init__(self, address, handler_class, tfile=None, allow_range=True, status_fn=None):
htserver.HTTPServer.__init__(self, address, handler_class)
self.file = tfile
self._running = True
self.allow_range = allow_range
self.status_fn = status_fn
def stop(self):
self._running = False
def set_file(self, f):
self.file = f
def serve(self, w):
while self._running:
try:
self.handle_request()
time.sleep(w)
except Exception, e:
print >> sys.stderr, str(e)
def run(self):
self.timeout = 0.5
t = Thread(target=self.serve, args=[self.timeout], name='HTTP Server')
t.daemon = True
t.start()
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
_, e, _ = sys.exc_info()
if isinstance(e, socket.error) and e.errno == 32:
logger.debug("Socket disconnect for client %s", client_address)
# pprint.pprint(e)
else:
logger.exception("HTTP Server Error")
traceback.print_exc()
class BTFileHandler(htserver.BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
def do_GET(self):
if self.do_HEAD(only_header=False):
with self.server.file.create_cursor(self._offset) as f:
send_something = False
while True:
buf = f.read(1024)
if not send_something and logger.level <= logging.DEBUG:
logger.debug('Start sending data')
send_something = True
if buf:
self.wfile.write(buf)
else:
if logger.level <= logging.DEBUG:
logger.debug('Finished sending data')
break
def _file_info(self):
size = self.server.file.size
ext = os.path.splitext(self.server.file.path)[1]
mime = (self.server.file.mime if hasattr(self.server.file, 'mime') else None) or VIDEO_EXTS.get(ext)
if not mime:
mime = 'application/octet-stream'
return size, mime
def do_HEAD(self, only_header=True):
parsed_url = urlparse.urlparse(self.path)
if parsed_url.path == "/status" and self.server.status_fn:
s = self.server.status_fn()
status = json.dumps(s)
self.send_response(200, 'OK')
self.send_header('Content-Type', 'application/json')
self.send_header('Content-Length', len(status))
self._finish_header(only_header)
if not only_header:
self.wfile.write(status)
return False
elif self.server.file and urllib.unquote(parsed_url.path) == '/' + self.server.file.path:
self._offset = 0
size, mime = self._file_info()
range = None # @ReservedAssignment
if self.server.allow_range:
range = parse_range(self.headers.get('Range', None)) # @ReservedAssignment
if range not in [None, False]:
self._offset = range
range = (range, size - 1, size) # @ReservedAssignment
logger.debug('Request range %s - (header is %s', range, self.headers.get('Range', None))
self.send_resp_header(mime, size, range, only_header)
return True
else:
logger.error('Requesting wrong path %s, but file is %s', parsed_url.path, '/' + self.server.file.path)
self.send_error(404, 'Not Found')
def send_resp_header(self, cont_type, cont_length, range=False, only_header=False): # @ReservedAssignment
logger.debug('range is %s'% str(range))
if self.server.allow_range and range not in [None, False]:
self.send_response(206, 'Partial Content')
else:
self.send_response(200, 'OK')
self.send_header('Content-Type', cont_type)
self.send_header('transferMode.dlna.org', 'Streaming')
self.send_header('contentFeatures.dlna.org',
'DLNA.ORG_OP=01;DLNA.ORG_CI=0;DLNA.ORG_FLAGS=01700000000000000000000000000000')
if self.server.allow_range:
self.send_header('Accept-Ranges', 'bytes')
else:
self.send_header('Accept-Ranges', 'none')
if self.server.allow_range and range not in [None, False]:
if isinstance(range, (types.TupleType, types.ListType)) and len(range) == 3:
self.send_header('Content-Range', 'bytes %d-%d/%d' % range)
self.send_header('Content-Length', range[1] - range[0] + 1)
else:
raise ValueError('Invalid range value')
else:
self.send_header('Content-Range', 'bytes %d-%d/%d' % (range, cont_length-1, cont_length))
self.send_header('Content-Length', cont_length)
self._finish_header(only_header)
def _finish_header(self, only_header):
self.send_header('Connection', 'close')
if not only_header: self.end_headers()
def log_message(self, format, *args): # @ReservedAssignment
logger.debug(format, *args)
class BTClient(BaseClient):
def __init__(self, path_to_store,
args=None,
state_file="",
lt=None,
**kwargs):
super(BTClient, self).__init__(path_to_store, args=args)
self.lt=lt
self._cache = CacheBT(path_to_store, self.lt)
self._torrent_params = {'save_path': path_to_store,
'storage_mode': self.lt.storage_mode_t.storage_mode_sparse
}
if not state_file:
state_file=os.path.join(path_to_store,'.btclient_state')
self._state_file = os.path.expanduser(state_file)
self._ses = self.lt.session()
if os.path.exists(self._state_file):
with open(self._state_file) as f:
state = pickle.load(f)
self._ses.load_state(state)
# self._ses.set_alert_mask(self.lt.alert.category_t.progress_notification)
if args:
s = self._ses.get_settings()
s['download_rate_limit'] = int(round(args.bt_download_limit * 1024))
s['upload_rate_limit'] = int(round(args.bt_upload_limit * 1024))
self._ses.set_settings(s)
self._ses.listen_on(args.listen_port_min, args.listen_port_max)
self.content_id=args.content_id
else:
self._ses.listen_on(6881, 6891)
self._start_services()
self._th = None
self._monitor.add_listener(self._check_ready)
self._dispatcher = BTClient.Dispatcher(self, lt=self.lt)
self._dispatcher.add_listener(self._update_ready_pieces)
self._hash = None
self._url = None
if args and args.debug_log and args.trace:
self.add_monitor_listener(self.debug_download_queue)
self.add_dispatcher_listener(self.debug_alerts)
@property
def is_file_complete(self):
pcs = self._th.status().pieces[self._file.first_piece:self._file.last_piece + 1]
return all(pcs)
def _update_ready_pieces(self, alert_type, alert):
if alert_type == 'read_piece_alert' and self._file:
self._file.update_piece(alert.piece, alert.buffer)
def _check_ready(self, s, **kwargs):
if s.state in [3, 4, 5] and not self._file and s.progress > 0:
try:
self._meta_ready(self._th.torrent_file())
except:
self._meta_ready(self._th.get_torrent_info())
logger.debug('Got torrent metadata and start download')
self.hash = True
self.hash = Hasher(self._file, self._on_file_ready)
def _choose_file(self, files, i):
if not i and i!=0:
videos = filter(lambda f: VIDEO_EXTS.has_key(os.path.splitext(f.path)[1]), files)
if not videos:
raise Exception('No video files in torrent')
f = sorted(videos, key=lambda f: f.size)[-1]
i = files.index(f)
f.index = i
f=files[i]
f.index = i
return f
def _meta_ready(self, meta):
fs = meta.files()
files = fs if isinstance(fs, list) else [fs.at(i) for i in xrange(fs.num_files())]
f = self._choose_file(files, self.content_id)
fmap = meta.map_file(f.index, 0, 1)
self._file = BTFile(f.path, self._base_path, f.index, f.size, fmap, meta.piece_length(),
self.prioritize_piece)
self.prioritize_file()
print ('File %s pieces (pc=%d, ofs=%d, sz=%d), total_pieces=%d, pc_length=%d' %
(f.path, fmap.piece, fmap.start, fmap.length,
meta.num_pieces(), meta.piece_length()))
try:
meta = self._th.torrent_file()
except:
meta=self._th.get_torrent_info()
self._cache.file_complete(meta,
self._url if self._url and self._url.startswith('http') else None)
def prioritize_piece(self, pc, idx):
piece_duration = 1000
min_deadline = 2000
dl = idx * piece_duration + min_deadline
self._th.set_piece_deadline(pc, dl, self.lt.deadline_flags.alert_when_available)
logger.debug("Set deadline %d for piece %d", dl, pc)
# we do not need to download pieces that are lower then current index, but last two pieces are special because players sometime look at end of file
if idx == 0 and (self._file.last_piece - pc) > 2:
for i in xrange(pc - 1):
self._th.piece_priority(i, 0)
self._th.reset_piece_deadline(i)
def prioritize_file(self):
try:
meta = self._th.torrent_file()
except:
meta=self._th.get_torrent_info()
priorities = [1 if i >= self._file.first_piece and i <= self.file.last_piece else 0 \
for i in xrange(meta.num_pieces())]
self._th.prioritize_pieces(priorities)
def encrypt(self):
# Encryption settings
print 'Encryption enabling...'
try:
encryption_settings = self.lt.pe_settings()
encryption_settings.out_enc_policy = self.lt.enc_policy(self.lt.enc_policy.forced)
encryption_settings.in_enc_policy = self.lt.enc_policy(self.lt.enc_policy.forced)
encryption_settings.allowed_enc_level = self.lt.enc_level.both
encryption_settings.prefer_rc4 = True
self._ses.set_pe_settings(encryption_settings)
print 'Encryption on!'
except Exception, e:
print 'Encryption failed! Exception: ' + str(e)
pass
@property
def unique_file_id(self):
try:
meta = self._th.torrent_file()
except:
meta=self._th.get_torrent_info()
return str(meta.info_hash())
@property
def pieces(self):
return self._th.status().pieces
def add_dispatcher_listener(self, cb):
self._dispatcher.add_listener(cb)
def remove_dispacher_listener(self, cb):
self._dispatcher.remove_listener(cb)
def remove_all_dispatcher_listeners(self):
self._dispatcher.remove_all_listeners()
def info_from_file(self, uri):
if os.access(uri, os.R_OK):
e = self.lt.bdecode(open(uri, 'rb').read())
info = self.lt.torrent_info(e)
tp = {'ti': info}
resume_data = self._cache.get_resume(info_hash=str(info.info_hash()))
if resume_data:
tp['resume_data'] = resume_data
return tp
raise ValueError('Invalid torrent path %s' % uri)
def start_url(self, uri):
if self._th:
raise Exception('Torrent is already started')
if uri.startswith('http://') or uri.startswith('https://'):
self._url = uri
stored = self._cache.get_torrent(url=uri)
if stored:
tp = self.info_from_file(stored)
else:
tp = {'url': uri}
resume_data = self._cache.get_resume(url=uri)
if resume_data:
tp['resume_data'] = resume_data
elif uri.startswith('magnet:'):
self._url = uri
stored = self._cache.get_torrent(info_hash=CacheBT.hash_from_magnet(uri))
if stored:
tp = self.info_from_file(stored)
else:
tp = {'url': uri}
resume_data = self._cache.get_resume(info_hash=CacheBT.hash_from_magnet(uri))
if resume_data:
tp['resume_data'] = resume_data
elif os.path.isfile(uri):
tp = self.info_from_file(uri)
else:
raise ValueError("Invalid torrent %s" % uri)
tp.update(self._torrent_params)
self._th = self._ses.add_torrent(tp)
for tr in INITIAL_TRACKERS:
self._th.add_tracker({'url': tr})
self._th.set_sequential_download(True)
time.sleep(1)
self._th.force_dht_announce()
self._monitor.start()
self._dispatcher.do_start(self._th, self._ses)
def stop(self):
BaseClient.stop(self)(self)
self._dispatcher.stop()
self._dispatcher.join()
def _start_services(self):
self._ses.add_dht_router('router.bittorrent.com', 6881)
self._ses.add_dht_router('router.utorrent.com', 6881)
self._ses.add_dht_router('router.bitcomet.com', 6881)
self._ses.start_dht()
self._ses.start_lsd()
self._ses.start_upnp()
self._ses.start_natpmp()
def _stop_services(self):
self._ses.stop_natpmp()
self._ses.stop_upnp()
self._ses.stop_lsd()
self._ses.stop_dht()
def save_state(self):
state = self._ses.save_state()
with open(self._state_file, 'wb') as f:
pickle.dump(state, f)
def save_resume(self):
if self._th.need_save_resume_data() and self._th.is_valid() and self._th.status().has_metadata:
r = BTClient.ResumeData(self)
start = time.time()
while (time.time() - start) <= 5:
if r.data or r.failed:
break
time.sleep(0.1)
if r.data:
logger.debug('Savig fast resume data')
self._cache.save_resume(self.unique_file_id, self.lt.bencode(r.data))
else:
logger.warn('Fast resume data not available')
def close(self):
self.remove_all_dispatcher_listeners()
self._monitor.stop()
self._cache.close()
if self._ses:
self._ses.pause()
if self._th:
self.save_resume()
self.save_state()
self._stop_services()
try:
self._ses.remove_torrent(self._th)
except:
print 'RuntimeError: invalid torrent handle used'
BaseClient.close(self)
@property
def status(self):
if self._th:
s = self._th.status()
if self._file:
pieces = s.pieces[self._file.first_piece:self._file.last_piece]
if len(pieces)>0:
progress = float(sum(pieces)) / len(pieces)
else:
progress = 0
else:
progress = 0
size = self._file.size if self._file else 0
s.desired_rate = self._file.byte_rate if self._file and progress > 0.003 else 0
s.progress_file = progress
s.file_size = size
return s
class ResumeData(object):
def __init__(self, client):
self.data = None
self.failed = False
client.add_dispatcher_listener(self._process_alert)
client._th.save_resume_data()
def _process_alert(self, t, alert):
if t == 'save_resume_data_failed_alert':
logger.debug('Fast resume data generation failed')
self.failed = True
elif t == 'save_resume_data_alert':
self.data = alert.resume_data
class Dispatcher(BaseMonitor):
def __init__(self, client, lt=None):
super(BTClient.Dispatcher, self).__init__(client, name='Torrent Events Dispatcher')
self.lt=lt
def do_start(self, th, ses):
self._th = th
self._ses = ses
self.start()
def run(self):
if not self._ses:
raise Exception('Invalid state, session is not initialized')
while (self._running):
a = self._ses.wait_for_alert(1000)
if a:
alerts = self._ses.pop_alerts()
for alert in alerts:
with self._lock:
for cb in self._listeners:
if "udp_error_alert" not in self.lt.alert.what(alert):
cb(self.lt.alert.what(alert), alert)
STATE_STR = ['queued', 'checking', 'downloading metadata',
'downloading', 'finished', 'seeding', 'allocating', 'checking fastresume']
def print_status(self, s, client):
if self._th:
state_str = ['queued', 'checking', 'downloading metadata',
'downloading', 'finished', 'seeding', 'allocating', 'checking fastresume']
print('[%s] %.2f%% complete (down: %.1f kb/s up: %.1f kB/s peers: %d) %s' %
(self.lt.version, s.progress * 100, s.download_rate / 1000, s.upload_rate / 1000,
s.num_peers, state_str[s.state]))
def get_normalized_status(self):
s = self.status
if self._file:
pieces = s.pieces[self._file.first_piece: self._file.last_piece + 1]
downloaded = reduce(lambda s, x: s + (x and 1 or 0) * self._file.piece_size, pieces[:-1], 0)
if pieces[-1]:
rem = self._file.size % self._file.piece_size
downloaded += rem if rem else self._file.piece_size
else:
downloaded = 0
return {'source_type': 'bittorrent',
'state': BTClient.STATE_STR[s.state],
'downloaded': downloaded,
'total_size': s.file_size,
'download_rate': s.download_rate,
'upload_rate': s.upload_rate,
'desired_rate': s.desired_rate,
'piece_size': self._file.piece_size if self._file else 0,
'progress': s.progress_file,
# BT specific
'seeds_connected': s.num_seeds,
'seeds_total': s.num_complete,
'peers_connected': s.num_peers,
'peers_total': s.num_incomplete,
'num_pieces': s.num_pieces,
}
def debug_download_queue(self, s, client):
if s.state != 3:
return
download_queue = self._th.get_download_queue()
if self.file:
first = self.file.first_piece
else:
first = 0
q = map(lambda x: x['piece_index'] + first, download_queue)
logger.debug('Download queue: %s', q)
def debug_alerts(self, type, alert):
logger.debug("Alert %s - %s", type, alert)
class BTFile(AbstractFile):
def __init__(self, path, base, index, size, fmap, piece_size, prioritize_fn):
AbstractFile.__init__(self, path, base, size, piece_size)
self.index = index
self.first_piece = fmap.piece
self.last_piece = self.first_piece + max((size - 1 + fmap.start), 0) // piece_size
self.offset = fmap.start
self._prioritize_fn = prioritize_fn
def prioritize_piece(self, n, idx):
self._prioritize_fn(n, idx)
class LangAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
super(LangAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if len(values) != 3:
raise ValueError('subtitles language should be 3 letters code')
setattr(namespace, self.dest, values)
def main(args=None):
#from argparse import Namespace
#args=Namespace(bt_download_limit=0,
# bt_upload_limit=0,
# choose_subtitles=False,
# clear_older=0,
# debug_log='D:\\log.txt',
# delete_on_finish=True,#Flase,
# directory='D:\\',
# listen_port_max=6891,
# listen_port_min=6881,
# no_resume=False,
# player='vlc',#kodi
# port=5001,
# print_pieces=False,
# quiet=False,
# stdin=False,
# stream=False,
# subtitles=None,
# trace=True,
# url='D:\\ntest.torrent')
if not args:
p = argparse.ArgumentParser()
p.add_argument("url", help="Torrent file, link to file or magnet link")
p.add_argument("-d", "--directory", default="./", help="directory to save download files")
p.add_argument("-p", "--player", default="mplayer", choices=["mplayer", "vlc"], help="Video player")
p.add_argument("--port", type=int, default=5001, help="Port for http server")
p.add_argument("--debug-log", default='', help="File for debug logging")
p.add_argument("--stdin", action='store_true', help='sends video to player via stdin (no seek then)')
p.add_argument("--print-pieces", action="store_true",
help="Prints map of downloaded pieces and ends (X is downloaded piece, O is not downloaded)")
p.add_argument("-s", "--subtitles", action=LangAction,
help="language for subtitle 3 letter code eng,cze ... (will try to get subtitles from opensubtitles.org)")
p.add_argument("--stream", action="store_true", help="just file streaming, but will not start player")
p.add_argument("--no-resume", action="store_true", help="Do not resume from last known position")
p.add_argument("-q", "--quiet", action="store_true", help="Quiet - did not print progress to stdout")
p.add_argument('--delete-on-finish', action="store_true", help="Delete downloaded file when program finishes")
p.add_argument('--clear-older', type=int, default=0,
help="Deletes files older then x days from download directory, if set will slowdown start of client")
p.add_argument('--bt-download-limit', type=int, default=0, help='Download limit for torrents kB/s')
p.add_argument('--bt-upload-limit', type=int, default=0, help='Upload limit for torrents kB/s')
p.add_argument('--listen-port-min', type=int, default=6881, help='Bitorrent input port range - minimum port')
p.add_argument('--listen-port-max', type=int, default=6891, help='Bitorrent input port range - maximum port')
p.add_argument('--choose-subtitles', action="store_true",
help="Always manually choose subtitles (otherwise will try to use best match in many cases)")
p.add_argument('--trace', action='store_true', help='More detailed debug logging')
args = p.parse_args(args)
# str(args)
if args.debug_log:
logger.setLevel(logging.DEBUG)
h = logging.handlers.RotatingFileHandler(args.debug_log)
logger.addHandler(h)
else:
logger.setLevel(logging.CRITICAL)
logger.addHandler(logging.StreamHandler())
if args.clear_older:
days = args.clear_older
items = os.listdir(args.directory)
now = time.time()
for item in items:
if item != CacheBT.CACHE_DIR:
full_path = os.path.join(args.directory, item)
if now - os.path.getctime(full_path) > days * 24 * 3600:
logger.debug('Deleting path %s', full_path)
if os.path.isdir(full_path):
shutil.rmtree(full_path, ignore_errors=True)
else:
os.unlink(full_path)
if args.print_pieces:
print_pieces(args)
elif re.match('https?://localhost', args.url):
class TestResolver(Resolver):
SPEED_LIMIT = 300
THREADS = 2
#stream(args, HTClient, TestResolver)
else:
#rclass = plugins.find_matching_plugin(args.url)
#if rclass:
# stream(args, HTClient, rclass)
#else:
#stream(args, BTClient)
return args
#def stream(args, client_class, resolver_class=None):
# c = client_class(args.directory, args=args, resolver_class=resolver_class)
# player = None
#
# def on_exit(sig=None, frame=None):
# c.close()
# if player:
# player.terminate()
# if sig:
# logger.info('Exiting by signal %d', sig)
# sys.exit(0)
#
# try:
#
# if not args.stream:
# player = Player.create(args.player, c.update_play_time)
#
# server = None
# # test if port if free, otherwise find free
# free_port = args.port
# while True:
#
# try:
# s = socket.socket()
# res = s.connect_ex(('127.0.0.1', free_port))
# if res:
# break
# finally:
# s.close()
# free_port += 1
# if not args.stdin:
# server = StreamServer(('127.0.0.1', free_port), BTFileHandler, allow_range=True,
# status_fn=c.get_normalized_status)
# logger.debug('Started http server on port %d', free_port)
# server.run()
# #thread.start_new_thread(server.run, ())
# if player:
# def start_play(f, finished):
# base = None
# if not args.stdin:
# server.set_file(f)
# base = 'http://127.0.0.1:' + str(free_port) + '/'
# sin = args.stdin
# if finished:
# base = args.directory
# sin = False
# logger.debug('File is already downloaded, will play it directly')
# args.play_file = True
#
# if args.no_resume:
# start_time = 0
# else:
# start_time = c.last_play_time or 0
# player.start(f, base, stdin=sin, sub_lang=args.subtitles, start_time=start_time,
# always_choose_subtitles=args.choose_subtitles)
# logger.debug('Started media player for %s', f)
#
# c.set_on_file_ready(start_play)
# else:
# def print_url(f, done):
# server.set_file(f)
# base = 'http://127.0.0.1:' + str(free_port) + '/'
# url = urlparse.urljoin(base, urllib.quote(f.path))
# print "\nServing file on %s" % url
# sys.stdout.flush()
#
# c.set_on_file_ready(print_url)
#
# logger.debug('Starting btclient - libtorrent version %s', self.lt.version)
# c.start_url(args.url)
# while not c.is_file_ready:
# time.sleep(1)
# if not args.stdin or hasattr(args, 'play_file') and args.play_file:
# f = None
# else:
# f = c.file.create_cursor()
#
# while True:
# if player and not player.is_playing():
# break
# if not f:
# time.sleep(1)
# else:
# buf = f.read(1024)
# if buf:
# try:
# player.write(buf)
# logger.debug("written to stdin")
# except IOError:
# pass
# else:
# player.close()
# if f:
# f.close()
# logger.debug('Play ended')
# if server:
# server.stop()
# if player:
# if player.rcode != 0:
# msg = 'Player ended with error %d\n' % (player.rcode or 0)
# sys.stderr.write(msg)
# logger.error(msg)
#
# logger.debug("Player output:\n %s", player.log)
# finally:
# on_exit()
# # logger.debug("Remaining threads %s", list(threading.enumerate()))
def pieces_map(pieces, w):
idx = 0
sz = len(pieces)
w(" " * 4)
for i in xrange(10):
w("%d " % i)
w('\n')
while idx < sz:
w("%3d " % (idx / 10))
for _c in xrange(min(10, sz - idx)):
if pieces[idx]:
w('X ')
else:
w('O ')
idx += 1
w('\n')
def print_pieces(args):
def w(x):
sys.stdout.write(x)
c = BTClient(args.directory)
c.start_url(args.url)
# c.add_listener(print_status)
start = time.time()
while time.time() - start < 60:
if c.file:
print "Pieces (each %.0f k) for file: %s" % (c.file.piece_size / 1024.0, c.file.path)
pieces = c.pieces
pieces_map(pieces, w)
return
time.sleep(1)
print >> sys.stderr, "Cannot get metadata"
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print >> sys.stderr, '\nProgram interrupted, exiting'
logger.info('Exiting by SIGINT')
except Exception:
traceback.print_exc()
logger.exception('General error')

View File

@ -1,100 +0,0 @@
'''
Created on Apr 28, 2015
@author: ivan
'''
import os.path
import shelve
import re
import logging
import base64
logger = logging.getLogger('cache')
class CacheBT(object):
CACHE_DIR = '.cache'
def __init__(self, path, lt):
if not os.path.isdir(path):
raise ValueError('Invalid base directory')
self.path = os.path.join(path, CacheBT.CACHE_DIR)
if not os.path.isdir(self.path):
os.mkdir(self.path)
self._index_path = os.path.join(self.path, 'index')
self._index = shelve.open(self._index_path)
self._last_pos_path = os.path.join(self.path, 'last_position')
self._last_pos = shelve.open(self._last_pos_path)
self.lt=lt
def save(self, url, info_hash):
self._index[url] = info_hash
self._index.sync()
def close(self):
self._index.close()
self._last_pos.close()
def _tname(self, info_hash):
return os.path.join(self.path, info_hash.upper() + '.torrent')
def _rname(self, info_hash):
return os.path.join(self.path, info_hash.upper() + '.resume')
def save_resume(self, info_hash, data):
with open(self._rname(info_hash), 'wb') as f:
f.write(data)
def get_resume(self, url=None, info_hash=None):
if url:
info_hash = self._index.get(url)
if not info_hash:
return
rname = self._rname(info_hash)
if os.access(rname, os.R_OK):
with open(rname, 'rb') as f:
return f.read()
def file_complete(self, torrent, url=None):
info_hash = str(torrent.info_hash())
nt = self.lt.create_torrent(torrent)
tname = self._tname(info_hash)
with open(tname, 'wb') as f:
f.write(self.lt.bencode(nt.generate()))
if url:
self.save(url, info_hash)
def get_torrent(self, url=None, info_hash=None):
if url:
info_hash = self._index.get(url)
if not info_hash:
return
tname = self._tname(info_hash)
if os.access(tname, os.R_OK):
logger.debug('Torrent is cached')
return tname
magnet_re = re.compile('xt=urn:btih:([0-9A-Za-z]+)')
hexa_chars = re.compile('^[0-9A-F]+$')
@staticmethod
def hash_from_magnet(m):
res = CacheBT.magnet_re.search(m)
if res:
ih = res.group(1).upper()
if len(ih) == 40 and CacheBT.hexa_chars.match(ih):
return res.group(1).upper()
elif len(ih) == 32:
s = base64.b32decode(ih)
return "".join("{:02X}".format(ord(c)) for c in s)
else:
raise ValueError('Not BT magnet link')
else:
raise ValueError('Not BT magnet link')
def play_position(self, info_hash, secs):
self._last_pos[info_hash] = secs
def get_last_position(self, info_hash):
return self._last_pos.get(info_hash) or 0

View File

@ -1,505 +0,0 @@
'''
Created on May 3, 2015
@author: ivan
'''
import os
from collections import deque
import logging
import copy
import threading
import traceback
import shutil
import urlparse
from StringIO import StringIO
from hachoir_metadata import extractMetadata
from hachoir_parser import guessParser
import hachoir_core.config as hachoir_config
from hachoir_core.stream.input import InputIOStream
logger = logging.getLogger('common')
hachoir_config.quiet = True
def enum(**enums):
return type('Enum', (), enums)
TerminalColor = enum(default='\033[39m', green='\033[32m', red='\033[31m', yellow='\033[33m')
def get_duration(fn):
# We need to provide just begining of file otherwise hachoir might try to read all file
with open(fn, 'rb') as f:
s = StringIO(f.read(1024 * 64))
p = guessParser(InputIOStream(s, filename=unicode(fn), tags=[]))
m = extractMetadata(p)
if m:
return m.getItem('duration', 0) and m.getItem('duration', 0).value
def debug_fn(fn):
def _fn(*args, **kwargs):
print "Entering %s, thread %s" % (fn.__name__, threading.current_thread().name)
traceback.print_stack()
ret = fn(*args, **kwargs)
print "Leaving %s, thread %s" % (fn.__name__, threading.current_thread().name)
return ret
return _fn
class Hasher(threading.Thread):
def __init__(self, btfile, hash_cb):
threading.Thread.__init__(self, name="Hasher")
if btfile is None:
raise ValueError('BTFile is None!')
self._btfile = btfile
self._hash_cb = hash_cb
self.hash = None
self.daemon = True
self.start()
def run(self):
with self._btfile.create_cursor() as c:
filehash = OpenSubtitles.hash_file(c, self._btfile.size)
self.hash = filehash
self._hash_cb(filehash)
class OpenSubtitles(object):
USER_AGENT = 'BTClient'
def __init__(self, lang, user='', pwd=''):
self._lang = lang
self._token = None
self._user = user
self._pwd = pwd
@staticmethod
def hash_file(f, filesize):
import struct
longlongformat = '<q' # little-endian long long
bytesize = struct.calcsize(longlongformat)
hash = filesize # @ReservedAssignment
if filesize < 65536 * 2:
raise ValueError("SizeError")
for _x in range(65536 / bytesize):
buffer = f.read(bytesize) # @ReservedAssignment
(l_value,) = struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF # to remain as 64bit number @ReservedAssignment
f.seek(max(0, filesize - 65536))
for _x in range(65536 / bytesize):
buffer = f.read(bytesize) # @ReservedAssignment
(l_value,) = struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF # @ReservedAssignment
returnedhash = "%016x" % hash
return returnedhash
class BaseMonitor(threading.Thread):
def __init__(self, client, name):
threading.Thread.__init__(self, name=name)
self.daemon = True
self._listeners = []
self._lock = threading.Lock()
self._wait_event = threading.Event()
self._running = True
self._client = client
self._ses = None
def stop(self):
self._running = False
self._wait_event.set()
def add_listener(self, cb):
with self._lock:
if not cb in self._listeners:
self._listeners.append(cb)
def remove_listener(self, cb):
with self._lock:
try:
self._listeners.remove(cb)
except ValueError:
pass
def remove_all_listeners(self):
with self._lock:
self._listeners = []
class BaseClient(object):
class Monitor(BaseMonitor):
def __init__(self, client):
super(BaseClient.Monitor, self).__init__(client, name="Status Monitor")
self._client = client
def run(self):
while (self._running):
s = self._client.status
with self._lock:
for cb in self._listeners:
cb(s, client=self._client)
self._wait_event.wait(1.0)
def __init__(self, path_to_store, args=None):
self._base_path = path_to_store
self._ready = False
self._file = None
self._on_ready_action = None
self._monitor = BaseClient.Monitor(self)
if not args or not args.quiet:
self.add_monitor_listener(self.print_status)
self._delete_on_close = True if args and args.delete_on_finish else False
def _on_file_ready(self, filehash):
#self._file.filehash = filehash
self._ready = True
if self._on_ready_action:
self._on_ready_action(self._file, self.is_file_complete)
@property
def status(self):
raise NotImplementedError()
def get_normalized_status(self):
s = self.status
return {'source_type': 'base',
'state': s.state,
'downloaded': s.downloaded,
'total_size': s.total_size,
'download_rate': s.download_rate,
'desired_rate': s.desired_rate,
'progress': s.progress,
'piece_size': self._file.piece_size if self._file else 0
}
@property
def file(self):
return self._file
def set_on_file_ready(self, action):
self._on_ready_action = action
@property
def is_file_ready(self):
return self._ready
def print_status(self, s, client):
raise NotImplementedError()
@property
def is_file_complete(self):
raise NotImplementedError()
def start_url(self, uri):
raise NotImplementedError()
def close(self):
if self._cache:
self._cache.close()
if self._delete_on_close and self._file:
self._file.remove()
@property
def unique_file_id(self):
raise NotImplementedError()
def update_play_time(self, playtime):
self._cache.play_position(self.unique_file_id, playtime)
@property
def last_play_time(self):
return self._cache.get_last_position(self.unique_file_id)
def add_monitor_listener(self, cb):
self._monitor.add_listener(cb)
def remove_monitor_listener(self, cb):
self._monitor.remove_listener(cb)
def stop(self):
self._monitor.stop()
self._monitor.join()
class PieceCache(object):
TIMEOUT = 30
size = 5
def __init__(self, btfile):
# self._btfile=btfile
self._cache = [None] * self.size
self._lock = threading.Lock()
self._event = threading.Event()
self._cache_first = btfile.first_piece
self._piece_size = btfile.piece_size
self._map_offset = btfile.map_piece
self._file_size = btfile.size
self._last_piece = btfile.last_piece
self._request_piece = btfile.prioritize_piece
self._btfile = btfile
def clone(self):
c = PieceCache(self._btfile)
with self._lock:
c._cache = copy.copy(self._cache)
c._cache_first = self._cache_first
return c
def fill_cache(self, first):
to_request = []
with self._lock:
diff = first - self._cache_first
if diff > 0:
for i in xrange(self.size):
if i + diff < self.size:
self._cache[i] = self._cache[i + diff]
else:
self._cache[i] = None
elif diff < 0:
for i in xrange(self.size - 1, -1, -1):
if i + diff >= 0:
self._cache[i] = self._cache[i + diff]
else:
self._cache[i] = None
self._cache_first = first
self._event.clear()
for i in xrange(self.size):
if self._cache[i] is None and (self._cache_first + i) <= self._last_piece:
to_request.append((self._cache_first + i, i))
for args in to_request:
self._request_piece(*args)
def add_piece(self, n, data):
with self._lock:
i = n - self._cache_first
if i >= 0 and i < self.size:
self._cache[i] = data
if i == 0:
self._event.set()
def has_piece(self, n):
with self._lock:
i = n - self._cache_first
if i >= 0 and i < self.size:
return not (self._cache[i] is None)
def _wait_piece(self, pc_no):
while not self.has_piece(pc_no):
self.fill_cache(pc_no)
# self._event.clear()
logger.debug('Waiting for piece %d' % pc_no)
self._event.wait(self.TIMEOUT)
def _get_piece(self, n):
with self._lock:
i = n - self._cache_first
if i < 0 or i > self.size:
raise ValueError('index of of scope of current cache')
return self._cache[i]
def get_piece(self, n):
self._wait_piece(n)
return self._get_piece(n)
def read(self, offset, size):
size = min(size, self._file_size - offset)
if not size:
return
pc_no, ofs = self._map_offset(offset)
data = self.get_piece(pc_no)
pc_size = self._piece_size - ofs
if pc_size > size:
return data[ofs: ofs + size]
else:
pieces = [data[ofs:self._piece_size]]
remains = size - pc_size
new_head = pc_no + 1
while remains and self.has_piece(new_head):
sz = min(remains, self._piece_size)
data = self.get_piece(new_head)
pieces.append(data[:sz])
remains -= sz
if remains:
new_head += 1
self.fill_cache(new_head)
return ''.join(pieces)
class BTCursor(object):
def __init__(self, btfile):
self._btfile = btfile
self._pos = 0
self._cache = PieceCache(btfile)
def clone(self):
c = BTCursor(self._btfile)
c._cache = self._cache.clone()
return c
def close(self):
self._btfile.remove_cursor(self)
def read(self, n=None):
sz = self._btfile.size - self._pos
if not n:
n = sz
else:
n = min(n, sz)
res = self._cache.read(self._pos, n)
if res:
self._pos += len(res)
return res
def seek(self, n):
if n > self._btfile.size:
n = self._btfile.size
# raise ValueError('Seeking beyond file size')
elif n < 0:
raise ValueError('Seeking negative')
self._pos = n
def tell(self):
return self._pos
def update_piece(self, n, data):
self._cache.add_piece(n, data)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
class AbstractFile(object):
def __init__(self, path, base, size, piece_size):
self._base = base
self.size = size
self.path = path
self.piece_size = piece_size
self.offset = 0
self._full_path = os.path.join(base, path)
self._cursors = []
self._cursors_history = deque(maxlen=3)
self._lock = threading.Lock()
self.first_piece = 0
self.last_piece = self.first_piece + (max(size - 1, 0)) // piece_size
self._rate = None
self._piece_duration = None
def add_cursor(self, c):
with self._lock:
self._cursors.append(c)
def remove_cursor(self, c):
with self._lock:
self._cursors.remove(c)
self._cursors_history.appendleft(c)
def create_cursor(self, offset=None):
c = None
if offset is not None:
with self._lock:
for e in reversed(self._cursors):
if abs(e.tell() - offset) < self.piece_size:
c = e.clone()
logger.debug('Cloning existing cursor')
break
if not c:
with self._lock:
for e in reversed(self._cursors_history):
if abs(e.tell() - offset) < self.piece_size:
c = e
logger.debug('Reusing previous cursor')
if not c:
c = BTCursor(self)
self.add_cursor(c)
if offset:
c.seek(offset)
return c
def map_piece(self, ofs):
return self.first_piece + (ofs + self.offset) // self.piece_size, \
(ofs + self.offset) % self.piece_size
def prioritize_piece(self, piece, idx):
raise NotImplementedError()
@property
def full_path(self):
return self._full_path
def close(self):
pass
def remove(self):
dirs = self.path.split(os.sep)
if len(dirs) > 1:
shutil.rmtree(os.path.join(self._base, dirs[0]), ignore_errors=True)
else:
os.unlink(self._full_path)
def update_piece(self, n, data):
for c in self._cursors:
c.update_piece(n, data)
@property
def duration(self):
if not hasattr(self, '_duration'):
self._duration = get_duration(self._full_path) if os.path.exists(self._full_path) else 0
return self._duration or 0
@property
def piece_duration_ms(self):
if not self._piece_duration:
if self.byte_rate:
self._piece_duration = self.piece_size / self.byte_rate / 1000
return self._piece_duration
@property
def byte_rate(self):
if not self._rate:
d = self.duration
if d:
if hasattr(d, 'total_seconds'):
total_seconds=d.total_seconds()
else:
total_seconds=(d.microseconds + (d.seconds + d.days * 24 * 3600) * 10**6) / 10**6
self._rate = self.size / total_seconds
return self._rate
def __str__(self):
return self.path
class Resolver(object):
URL_PATTERN = None
SPEED_LIMIT = None # kB/s
THREADS = 4
def __init__(self, loader):
self._client = loader
def resolve(self, url):
return url
@staticmethod
def url_to_file(uri):
path = urlparse.urlsplit(uri)[2]
if path.startswith('/'):
path = path[1:]
return path

View File

@ -1,262 +0,0 @@
'''
Created on Apr 2, 2015
@author: ivan
import xmlrpclib
import urllib2
import os.path
import gzip
from StringIO import StringIO
import logging
import subprocess
import struct
import time
logger = logging.getLogger('opensubtitles')
class Urllib2Transport(xmlrpclib.Transport):
def __init__(self, opener=None, https=False, use_datetime=0):
xmlrpclib.Transport.__init__(self, use_datetime)
self.opener = opener or urllib2.build_opener()
self.https = https
def request(self, host, handler, request_body, verbose=0):
proto = ('http', 'https')[bool(self.https)]
req = urllib2.Request('%s://%s%s' % (proto, host, handler), request_body)
req.add_header('User-agent', self.user_agent)
self.verbose = verbose
return self.parse_response(self.opener.open(req))
class OpenSubtitles(object):
USER_AGENT = 'BTClient'
def __init__(self, lang, user='', pwd=''):
self._lang = lang
self._proxy = xmlrpclib.ServerProxy('http://api.opensubtitles.org/xml-rpc',
Urllib2Transport(use_datetime=True),
allow_none=True, use_datetime=True)
self._token = None
self._user = user
self._pwd = pwd
def login(self):
res = self._proxy.LogIn(self._user, self._pwd, 'en', self.USER_AGENT)
self._parse_status(res)
token = res.get('token')
if token:
self._token = token
else:
raise xmlrpclib.Fault('NO_TOKEN', 'No token!')
def _parse_status(self, res):
if res.has_key('status'):
code = res['status'].split()[0]
if code != '200':
raise xmlrpclib.Fault('ERROR_CODE_RETURENED', 'Returned error status: %s (%s)' % (code, res))
return True
else:
raise xmlrpclib.Fault('NO_STATUS', 'No status!')
def search(self, filename, filesize=None, filehash=None, limit=20):
filename = os.path.split(filename)[1]
name = os.path.splitext(filename)[0]
query = []
if filehash and filesize:
query.append({'sublanguageid': self._lang, 'moviehash': filehash, 'moviebytesize': str(filesize)})
query.append({'sublanguageid': self._lang, 'tag': filename})
query.append({'sublanguageid': self._lang, 'query': name})
res = self._proxy.SearchSubtitles(self._token, query, {'limit': limit})
self._parse_status(res)
data = res.get('data')
return data if data else []
@staticmethod
def _sub_file(filename, lang, ext):
lang = lang.lower()
path, fname = os.path.split(filename)
fname = os.path.splitext(fname)[0]
return os.path.join(path, fname + '.' + lang + '.' + ext)
@staticmethod
def _base_name(filename):
fname = os.path.split(filename)[1]
return os.path.splitext(fname)[0]
@staticmethod
def hash_file(f, filesize):
longlongformat = '<q' # little-endian long long
bytesize = struct.calcsize(longlongformat)
hash = filesize # @ReservedAssignment
if filesize < 65536 * 2:
raise ValueError("SizeError")
for _x in range(65536 / bytesize):
buffer = f.read(bytesize) # @ReservedAssignment
(l_value,) = struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF # to remain as 64bit number @ReservedAssignment
f.seek(max(0, filesize - 65536))
for _x in range(65536 / bytesize):
buffer = f.read(bytesize) # @ReservedAssignment
(l_value,) = struct.unpack(longlongformat, buffer)
hash += l_value
hash = hash & 0xFFFFFFFFFFFFFFFF # @ReservedAssignment
returnedhash = "%016x" % hash
return returnedhash
def choose(self, data):
null = f = open(os.devnull, "w")
items = []
for l in data:
items.append(l['SubDownloadLink'])
items.append(l['SubFileName'])
items.append(l['SubDownloadsCnt'])
p = subprocess.Popen(
'zenity --list --title "Select subtitles" --text "Select best matching subtitles" --width 1024 --height 600 --column Link --column Name --column Downloads --hide-column=1',
stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=null, shell=True)
res, _ = p.communicate(u'\n'.join(items).encode('utf-8'))
null.close()
return res if res.startswith('http') else None
@staticmethod
def download_if_not_exists(filename, lang, filesize=None, filehash=None, sub_ext='srt',
can_choose=True, overwrite=False, retries=3):
sfile = OpenSubtitles._sub_file(filename, lang, sub_ext)
if os.path.exists(sfile) and os.stat(sfile).st_size > 0 and not overwrite:
logger.debug('subs %s are already downloaded', sfile)
return sfile
else:
while True:
try:
with OpenSubtitles(lang) as opensub:
res = opensub.download(filename, filesize, filehash, can_choose)
if res:
logger.debug('Subtitles %s downloaded', res)
return res
else:
logger.debug('No subtitles found for file %s in language %s', filename, lang)
return
except urllib2.HTTPError, e:
retries -= 1
if retries <= 0:
raise e
logger.debug('Retrying to load subtitles due to HTTP error %d, remains %d attempts', e.code,
retries)
time.sleep(1)
def download(self, filename, filesize=None, filehash=None, can_choose=True):
data = self.search(filename, filesize, filehash)
if not data:
return None
media_file = OpenSubtitles._base_name(filename).lower()
def same_name(b):
return media_file == OpenSubtitles._base_name(b['SubFileName']).lower()
if filehash and filesize:
match = filter(lambda x: x.get('QueryNumber', 0) == 0, data)
logger.debug('Got results by filehash')
else:
match = filter(same_name, data)
if match and can_choose != 'always':
sub = match[0]
link = sub['SubDownloadLink']
ext = sub['SubFormat']
logger.debug('Find exact match for media file, subtitle is %s', sub['SubFileName'])
elif can_choose:
link = self.choose(data)
ext = 'srt'
else:
sub = data[0]
link = sub['SubDownloadLink']
ext = sub['SubFormat']
if link:
return self.download_link(filename, link, ext)
def download_link(self, filename, link, ext):
out_file = OpenSubtitles._sub_file(filename, self._lang, ext)
res = urllib2.urlopen(link, timeout=10)
data = StringIO(res.read())
data.seek(0)
res.close()
z = gzip.GzipFile(fileobj=data)
with open(out_file, 'wb') as f:
while True:
d = z.read(1024)
if not d:
break
f.write(d)
z.close()
return out_file
def logout(self):
try:
res = self._proxy.LogOut(self._token)
self._parse_status(res)
except urllib2.HTTPError:
logger.warn('Failed to logout')
def __enter__(self):
self.login()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logout()
def down(f, lang, overwrite=False):
filesize, filehash = calc_hash(f)
OpenSubtitles.download_if_not_exists(f, lang, filesize=filesize,
filehash=filehash, can_choose=True, overwrite=overwrite)
def calc_hash(f):
if not os.access(f, os.R_OK):
raise ValueError('Cannot read from file %s' % f)
filesize = os.stat(f).st_size
with open(f, 'rb') as fs:
filehash = OpenSubtitles.hash_file(fs, filesize)
return filesize, filehash
def list_subs(f, lang):
import pprint
filesize, filehash = calc_hash(f)
with OpenSubtitles(lang) as opensub:
res = opensub.search(f, filesize, filehash)
res = map(lambda x: {'SubFileName': x['SubFileName'],
'SubDownloadsCnt': x['SubDownloadsCnt'],
'QueryNumber': x.get('QueryNumber', 0),
},
res)
pprint.pprint(res)
if __name__ == '__main__':
from argparse import ArgumentParser
p = ArgumentParser()
p.add_argument("video_file", help="video file")
p.add_argument("-d", "--download", action="store_true", help="Download subtitles for video files")
p.add_argument("-l", "--list", action="store_true", help="List available subtitles")
p.add_argument("--lang", default='eng', help="Language")
p.add_argument("--debug", action="store_true", help="Print debug messages")
p.add_argument("--overwrite", action="store_true", help="Overwrite existing subtitles ")
args = p.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
if args.download:
down(args.video_file, args.lang, args.overwrite)
else:
list_subs(args.video_file, args.lang)
'''

View File

@ -132,7 +132,7 @@ class CXZ(Content.Content):
def mode(self, response):
contentList = []
Soup = BeautifulSoup(response)
result = Soup.findAll('div', {'class': 'b-poster-tile '})
result = Soup.findAll('div', {'class': 'b-poster-tile '})
num = 0
for tr in result:
#main
@ -158,40 +158,4 @@ class CXZ(Content.Content):
originaltitle, title, int(year), img, info,
))
#print result
return contentList
'''
- Video Values:
- genre : string (Comedy)
- year : integer (2009)
- episode : integer (4)
- season : integer (1)
- top250 : integer (192)
- rating : float (6.4) - range is 0..10
- cast : list (Michal C. Hall)
- castandrole : list (Michael C. Hall|Dexter)
- director : string (Dagur Kari)
- mpaa : string (PG-13)
- plot : string (Long Description)
- plotoutline : string (Short Description)
- title : string (Big Fan)
- originaltitle : string (Big Fan)
- sorttitle : string (Big Fan)
- duration : string (3:18)
- studio : string (Warner Bros.)
- tagline : string (An awesome movie) - short description of movie
- writer : string (Robert D. Siegel)
- tvshowtitle : string (Heroes)
- premiered : string (2005-03-04)
- status : string (Continuing) - status of a TVshow
- code : string (tt0110293) - IMDb code
- aired : string (2008-12-07)
- credits : string (Andy Kaufman) - writing credits
- lastplayed : string (Y-m-d h:m:s = 2009-04-05 23:16:04)
- album : string (The Joshua Tree)
- artist : list (['U2'])
- votes : string (12345 votes)
- trailer : string (/home/user/trailer.avi)
- dateadded : string (Y-m-d h:m:s = 2009-04-05 23:16:04)
'''
return contentList