Compare commits
No commits in common. "master" and "sandbox1" have entirely different histories.
|
@ -4,6 +4,3 @@ script.module.pyrrent2http
|
|||
This add-on is engine for [plugin.video.torrenter](https://github.com/DiMartinoXBMC/plugin.video.torrenter)
|
||||
|
||||
This add-on can be used to stream media files from torrents without need to download entire files.
|
||||
|
||||
Данный движок для модуля Torrenter v2 медиа-центра KODI (XBMC) появился из-за отсутствия рабочего решения для Raspberry Pi.
|
||||
Сейчас pyrrent2http работает на множестве платформ.
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addon id="script.module.pyrrent2http" name="pyrrent2http" version="1.1.1" provider-name="inpos">
|
||||
<addon id="script.module.pyrrent2http" name="pyrrent2http" version="0.9.0" provider-name="inpos">
|
||||
<requires>
|
||||
<import addon="xbmc.python" version="3.0.0"/>
|
||||
<import addon="script.module.libtorrent" version="1.2.0"/>
|
||||
<import addon="xbmc.python" version="2.14.0"/>
|
||||
<import addon="script.module.libtorrent" />
|
||||
<import addon="script.module.chardet" />
|
||||
</requires>
|
||||
<extension point="xbmc.python.module" library="lib"/>
|
||||
|
@ -17,5 +17,5 @@
|
|||
<description lang="ru">Обеспечивает последовательную (sequential) загрузку торрентов для потокового онлайн просмотра через HTTP. Основан на библиотеке LibTorrent.</description>
|
||||
<description lang="en">Provides sequential torrent downloading for online streaming video and other media over HTTP.</description>
|
||||
<email>inpos@yandex.ru</email>
|
||||
<source>https://git.ukamnya.ru/ukamnya/script.module.pyrrent2http</source></extension>
|
||||
<source>https://github.com/inpos/script.module.pyrrent2http</source></extension>
|
||||
</addon>
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
[B]Version 1.0.0[/B]
|
||||
+ Исправлен механизм добавления трекеров
|
||||
+ Добавлена поддержка прокси SOCKS длч подключения к трекерам
|
||||
|
||||
[B]Version 0.9.0[/B]
|
||||
+ Изменён принцип работы с торрентом
|
||||
|
||||
|
|
|
@ -3,6 +3,33 @@
|
|||
from collections import namedtuple
|
||||
|
||||
|
||||
# noinspection PyClassHasNoInit
|
||||
class State:
|
||||
QUEUED_FOR_CHECKING = 0
|
||||
CHECKING_FILES = 1
|
||||
DOWNLOADING_METADATA = 2
|
||||
DOWNLOADING = 3
|
||||
FINISHED = 4
|
||||
SEEDING = 5
|
||||
ALLOCATING = 6
|
||||
CHECKING_RESUME_DATA = 7
|
||||
|
||||
|
||||
# noinspection PyClassHasNoInit
|
||||
class MediaType:
|
||||
UNKNOWN = None
|
||||
AUDIO = 'audio'
|
||||
VIDEO = 'video'
|
||||
SUBTITLES = 'subtitles'
|
||||
|
||||
|
||||
# noinspection PyClassHasNoInit
|
||||
class Encryption:
|
||||
FORCED = 0
|
||||
ENABLED = 1
|
||||
DISABLED = 2
|
||||
|
||||
|
||||
SessionStatus = namedtuple('SessionStatus', "name, state, state_str, error, progress, download_rate, upload_rate, "
|
||||
"total_download, total_upload, num_peers, num_seeds, total_seeds, "
|
||||
"total_peers")
|
||||
|
@ -12,5 +39,5 @@ FileStatus = namedtuple('FileStatus', "name, save_path, url, size, offset, downl
|
|||
PeerInfo = namedtuple('PeerInfo', "ip, flags, source, up_speed, down_speed, total_upload, total_download, "
|
||||
"country, client")
|
||||
|
||||
from .engine import Engine
|
||||
from .error import Error
|
||||
from engine import Engine
|
||||
from error import Error
|
||||
|
|
|
@ -1,37 +1,31 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import threading
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
import chardet
|
||||
import sys
|
||||
import time
|
||||
import pyrrent2http
|
||||
import xbmc
|
||||
|
||||
from . import SessionStatus, FileStatus, PeerInfo
|
||||
from . import pyrrent2http
|
||||
from .error import Error
|
||||
from .structs import Encryption
|
||||
from .util import can_bind, find_free_port, localize_path, uri2path, detect_media_type
|
||||
from error import Error
|
||||
from . import SessionStatus, FileStatus, PeerInfo, Encryption
|
||||
from util import can_bind, find_free_port, localize_path, uri2path, detect_media_type
|
||||
import threading
|
||||
import urllib
|
||||
import chardet
|
||||
|
||||
LOGGING = True
|
||||
|
||||
|
||||
class Engine:
|
||||
"""
|
||||
This is python binding class to pyrrent2http client.
|
||||
"""
|
||||
|
||||
def _log(self, message):
|
||||
if self.logger:
|
||||
self.logger(message)
|
||||
else:
|
||||
xbmc.log("[pyrrent2http] %s" % message)
|
||||
|
||||
|
||||
def __init__(self, uri=None, platform=None, download_path=".",
|
||||
bind_host='127.0.0.1', bind_port=5001, connections_limit=200, download_kbps=-1, upload_kbps=-1,
|
||||
bind_host='127.0.0.1', bind_port=5001, connections_limit=None, download_kbps=None, upload_kbps=None,
|
||||
enable_dht=True, enable_lsd=True, enable_natpmp=True, enable_upnp=True, enable_scrape=False,
|
||||
log_stats=False, encryption=Encryption.ENABLED, keep_complete=False, keep_incomplete=False,
|
||||
keep_files=False, log_files_progress=False, log_overall_progress=False, log_pieces_progress=False,
|
||||
|
@ -39,7 +33,7 @@ class Engine:
|
|||
user_agent=None, startup_timeout=5, state_file='', enable_utp=True, enable_tcp=True,
|
||||
debug_alerts=False, logger=None, torrent_connect_boost=50, connection_speed=50,
|
||||
peer_connect_timeout=15, request_timeout=20, min_reconnect_time=60, max_failcount=3,
|
||||
dht_routers=None, trackers=None, proxy=None):
|
||||
dht_routers=None, trackers=None):
|
||||
"""
|
||||
Creates engine instance. It doesn't do anything except initializing object members. For starting engine use
|
||||
start() method.
|
||||
|
@ -132,7 +126,6 @@ class Engine:
|
|||
self.logger = logger
|
||||
self.uri = uri
|
||||
self.started = False
|
||||
self.proxy = proxy
|
||||
|
||||
@staticmethod
|
||||
def _validate_save_path(path):
|
||||
|
@ -173,7 +166,6 @@ class Engine:
|
|||
kwargs = {
|
||||
'torrentConnectBoost': self.torrent_connect_boost,
|
||||
'trackers': ",".join(self.trackers),
|
||||
'proxy': self.proxy,
|
||||
'resumeFile': self.resume_file,
|
||||
'minReconnectTime': self.min_reconnect_time,
|
||||
'enableUPNP': self.enable_upnp,
|
||||
|
@ -204,7 +196,7 @@ class Engine:
|
|||
'maxFailCount': self.max_failcount,
|
||||
'showPiecesProgress': self.log_pieces_progress,
|
||||
'idleTimeout': self.max_idle_timeout,
|
||||
# 'fileIndex': start_index,
|
||||
'fileIndex': start_index,
|
||||
'connectionsLimit': self.connections_limit,
|
||||
'enableScrape': self.enable_scrape,
|
||||
'enableUTP': self.enable_utp,
|
||||
|
@ -214,28 +206,25 @@ class Engine:
|
|||
}
|
||||
|
||||
self._log("Invoking pyrrent2http")
|
||||
|
||||
class Logging(object):
|
||||
def __init__(self, _log):
|
||||
self._log = _log
|
||||
|
||||
def info(self, message):
|
||||
if LOGGING:
|
||||
self._log('INFO: %s' % (message,))
|
||||
|
||||
def error(self, message):
|
||||
if LOGGING:
|
||||
self._log('ERROR: %s' % (message,))
|
||||
|
||||
pyrrent2http.logging = Logging(self._log)
|
||||
|
||||
|
||||
self.pyrrent2http = pyrrent2http.Pyrrent2http(**kwargs)
|
||||
self.pyrrent2http.startSession()
|
||||
self.pyrrent2http.startServices()
|
||||
self.pyrrent2http.addTorrent()
|
||||
self.pyrrent2http.startHTTP()
|
||||
self.pyrrent2http_loop = threading.Thread(target=self.pyrrent2http.loop)
|
||||
self.pyrrent2http_loop = threading.Thread(target = self.pyrrent2http.loop)
|
||||
self.pyrrent2http_loop.start()
|
||||
|
||||
|
||||
start = time.time()
|
||||
self.started = True
|
||||
|
@ -245,7 +234,7 @@ class Engine:
|
|||
if not self.is_alive():
|
||||
raise Error("Can't start pyrrent2http, see log for details", Error.PROCESS_ERROR)
|
||||
try:
|
||||
# self.status(1)
|
||||
#self.status(1)
|
||||
initialized = True
|
||||
break
|
||||
except Error:
|
||||
|
@ -256,15 +245,6 @@ class Engine:
|
|||
raise Error("Can't start pyrrent2http, time is out", Error.TIMEOUT)
|
||||
self._log("pyrrent2http successfully started.")
|
||||
|
||||
def activate_file(self, index):
|
||||
self.pyrrent2http.TorrentFS.file(index)
|
||||
|
||||
def pause(self):
|
||||
self.pyrrent2http.pause = True
|
||||
|
||||
def resume(self):
|
||||
self.pyrrent2http.pause = False
|
||||
|
||||
def check_torrent_error(self, status=None):
|
||||
"""
|
||||
It is recommended to call this method periodically to check if any libtorrent errors occurred.
|
||||
|
@ -289,6 +269,8 @@ class Engine:
|
|||
status = SessionStatus(**status)
|
||||
return status
|
||||
|
||||
|
||||
|
||||
def list(self, media_types=None, timeout=10):
|
||||
"""
|
||||
Returns list of files in the torrent (see FileStatus named tuple).
|
||||
|
@ -304,9 +286,8 @@ class Engine:
|
|||
if files:
|
||||
res = [FileStatus(index=index, **f) for index, f in enumerate(files)]
|
||||
if media_types is not None:
|
||||
res = [fs for fs in res if fs.media_type in media_types]
|
||||
res = filter(lambda fs: fs.media_type in media_types, res)
|
||||
return res
|
||||
|
||||
def list_from_info(self, media_types=None):
|
||||
try:
|
||||
info = pyrrent2http.lt.torrent_info(uri2path(self.uri))
|
||||
|
@ -315,21 +296,21 @@ class Engine:
|
|||
files = []
|
||||
for i in range(info.num_files()):
|
||||
f = info.file_at(i)
|
||||
Url = 'http://' + "%s:%s" % (self.bind_host, self.bind_port) + '/files/' + urllib.parse.quote(f.path)
|
||||
Url = 'http://' + "%s:%s" % (self.bind_host, self.bind_port) + '/files/' + urllib.quote(f.path)
|
||||
files.append({
|
||||
'name': localize_path(f.path),
|
||||
'size': f.size,
|
||||
'offset': f.offset,
|
||||
'media_type': media_types is not None and detect_media_type(f.path) or '',
|
||||
'download': 0,
|
||||
'progress': 0.0,
|
||||
'save_path': '',
|
||||
'url': Url
|
||||
})
|
||||
if len(files) > 0:
|
||||
'name': localize_path(f.path),
|
||||
'size': f.size,
|
||||
'offset': f.offset,
|
||||
'media_type': media_types and detect_media_type(f.path.decode(chardet.detect(f.path)['encoding'])) or '',
|
||||
'download': 0,
|
||||
'progress': 0.0,
|
||||
'save_path': '',
|
||||
'url': Url
|
||||
})
|
||||
if files:
|
||||
res = [FileStatus(index=index, **f) for index, f in enumerate(files)]
|
||||
if media_types is not None:
|
||||
res = [fs for fs in res if fs.media_type in media_types]
|
||||
res = filter(lambda fs: fs.media_type in media_types, res)
|
||||
return res
|
||||
|
||||
def file_status(self, file_index, timeout=10):
|
||||
|
@ -343,12 +324,13 @@ class Engine:
|
|||
:return: File with specified index
|
||||
:rtype: FileStatus
|
||||
"""
|
||||
filestatus = self.pyrrent2http.Ls(file_index)
|
||||
try:
|
||||
return FileStatus(**filestatus)
|
||||
except:
|
||||
files = self.pyrrent2http.Ls()['files']
|
||||
if files:
|
||||
for f in files:
|
||||
if f['index'] == file_index:
|
||||
return FileStatus(**f)
|
||||
raise Error("Requested file index (%d) is invalid" % (file_index,), Error.INVALID_FILE_INDEX,
|
||||
file_index=file_index)
|
||||
file_index=file_index)
|
||||
|
||||
def peers(self, timeout=10):
|
||||
"""
|
||||
|
@ -364,7 +346,6 @@ class Engine:
|
|||
|
||||
def is_alive(self):
|
||||
return self.pyrrent2http_loop.is_alive()
|
||||
|
||||
def wait_on_close(self, wait_timeout=10):
|
||||
"""
|
||||
By default, close() method sends shutdown command to pyrrent2http, stops logging and returns immediately, not
|
||||
|
|
|
@ -0,0 +1,613 @@
|
|||
"""Guess the MIME type of a file.
|
||||
|
||||
This module defines two useful functions:
|
||||
|
||||
guess_type(url, strict=1) -- guess the MIME type and encoding of a URL.
|
||||
|
||||
guess_extension(type, strict=1) -- guess the extension for a given MIME type.
|
||||
|
||||
It also contains the following, for tuning the behavior:
|
||||
|
||||
Data:
|
||||
|
||||
knownfiles -- list of files to parse
|
||||
inited -- flag set when init() has been called
|
||||
suffix_map -- dictionary mapping suffixes to suffixes
|
||||
encodings_map -- dictionary mapping suffixes to encodings
|
||||
types_map -- dictionary mapping suffixes to types
|
||||
|
||||
Functions:
|
||||
|
||||
init([files]) -- parse a list of files, default knownfiles (on Windows, the
|
||||
default values are taken from the registry)
|
||||
read_mime_types(file) -- parse one file, return a dictionary or None
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import posixpath
|
||||
import urllib
|
||||
try:
|
||||
import _winreg
|
||||
except ImportError:
|
||||
_winreg = None
|
||||
|
||||
__all__ = [
|
||||
"guess_type","guess_extension","guess_all_extensions",
|
||||
"add_type","read_mime_types","init"
|
||||
]
|
||||
|
||||
knownfiles = [
|
||||
"/etc/mime.types",
|
||||
"/etc/httpd/mime.types", # Mac OS X
|
||||
"/etc/httpd/conf/mime.types", # Apache
|
||||
"/etc/apache/mime.types", # Apache 1
|
||||
"/etc/apache2/mime.types", # Apache 2
|
||||
"/usr/local/etc/httpd/conf/mime.types",
|
||||
"/usr/local/lib/netscape/mime.types",
|
||||
"/usr/local/etc/httpd/conf/mime.types", # Apache 1.2
|
||||
"/usr/local/etc/mime.types", # Apache 1.3
|
||||
]
|
||||
|
||||
inited = False
|
||||
_db = None
|
||||
|
||||
|
||||
class MimeTypes:
|
||||
"""MIME-types datastore.
|
||||
|
||||
This datastore can handle information from mime.types-style files
|
||||
and supports basic determination of MIME type from a filename or
|
||||
URL, and can guess a reasonable extension given a MIME type.
|
||||
"""
|
||||
|
||||
def __init__(self, filenames=(), strict=True):
|
||||
if not inited:
|
||||
init()
|
||||
self.encodings_map = encodings_map.copy()
|
||||
self.suffix_map = suffix_map.copy()
|
||||
self.types_map = ({}, {}) # dict for (non-strict, strict)
|
||||
self.types_map_inv = ({}, {})
|
||||
for (ext, type) in types_map.items():
|
||||
self.add_type(type, ext, True)
|
||||
for (ext, type) in common_types.items():
|
||||
self.add_type(type, ext, False)
|
||||
for name in filenames:
|
||||
self.read(name, strict)
|
||||
|
||||
def add_type(self, type, ext, strict=True):
|
||||
"""Add a mapping between a type and an extension.
|
||||
|
||||
When the extension is already known, the new
|
||||
type will replace the old one. When the type
|
||||
is already known the extension will be added
|
||||
to the list of known extensions.
|
||||
|
||||
If strict is true, information will be added to
|
||||
list of standard types, else to the list of non-standard
|
||||
types.
|
||||
"""
|
||||
self.types_map[strict][ext] = type
|
||||
exts = self.types_map_inv[strict].setdefault(type, [])
|
||||
if ext not in exts:
|
||||
exts.append(ext)
|
||||
|
||||
def guess_type(self, url, strict=True):
|
||||
"""Guess the type of a file based on its URL.
|
||||
|
||||
Return value is a tuple (type, encoding) where type is None if
|
||||
the type can't be guessed (no or unknown suffix) or a string
|
||||
of the form type/subtype, usable for a MIME Content-type
|
||||
header; and encoding is None for no encoding or the name of
|
||||
the program used to encode (e.g. compress or gzip). The
|
||||
mappings are table driven. Encoding suffixes are case
|
||||
sensitive; type suffixes are first tried case sensitive, then
|
||||
case insensitive.
|
||||
|
||||
The suffixes .tgz, .taz and .tz (case sensitive!) are all
|
||||
mapped to '.tar.gz'. (This is table-driven too, using the
|
||||
dictionary suffix_map.)
|
||||
|
||||
Optional `strict' argument when False adds a bunch of commonly found,
|
||||
but non-standard types.
|
||||
"""
|
||||
scheme, url = urllib.splittype(url)
|
||||
if scheme == 'data':
|
||||
# syntax of data URLs:
|
||||
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
|
||||
# mediatype := [ type "/" subtype ] *( ";" parameter )
|
||||
# data := *urlchar
|
||||
# parameter := attribute "=" value
|
||||
# type/subtype defaults to "text/plain"
|
||||
comma = url.find(',')
|
||||
if comma < 0:
|
||||
# bad data URL
|
||||
return None, None
|
||||
semi = url.find(';', 0, comma)
|
||||
if semi >= 0:
|
||||
type = url[:semi]
|
||||
else:
|
||||
type = url[:comma]
|
||||
if '=' in type or '/' not in type:
|
||||
type = 'text/plain'
|
||||
return type, None # never compressed, so encoding is None
|
||||
base, ext = posixpath.splitext(url)
|
||||
while ext in self.suffix_map:
|
||||
base, ext = posixpath.splitext(base + self.suffix_map[ext])
|
||||
if ext in self.encodings_map:
|
||||
encoding = self.encodings_map[ext]
|
||||
base, ext = posixpath.splitext(base)
|
||||
else:
|
||||
encoding = None
|
||||
types_map = self.types_map[True]
|
||||
if ext in types_map:
|
||||
return types_map[ext], encoding
|
||||
elif ext.lower() in types_map:
|
||||
return types_map[ext.lower()], encoding
|
||||
elif strict:
|
||||
return None, encoding
|
||||
types_map = self.types_map[False]
|
||||
if ext in types_map:
|
||||
return types_map[ext], encoding
|
||||
elif ext.lower() in types_map:
|
||||
return types_map[ext.lower()], encoding
|
||||
else:
|
||||
return None, encoding
|
||||
|
||||
def guess_all_extensions(self, type, strict=True):
|
||||
"""Guess the extensions for a file based on its MIME type.
|
||||
|
||||
Return value is a list of strings giving the possible filename
|
||||
extensions, including the leading dot ('.'). The extension is not
|
||||
guaranteed to have been associated with any particular data stream,
|
||||
but would be mapped to the MIME type `type' by guess_type().
|
||||
|
||||
Optional `strict' argument when false adds a bunch of commonly found,
|
||||
but non-standard types.
|
||||
"""
|
||||
type = type.lower()
|
||||
extensions = self.types_map_inv[True].get(type, [])
|
||||
if not strict:
|
||||
for ext in self.types_map_inv[False].get(type, []):
|
||||
if ext not in extensions:
|
||||
extensions.append(ext)
|
||||
return extensions
|
||||
|
||||
def guess_extension(self, type, strict=True):
|
||||
"""Guess the extension for a file based on its MIME type.
|
||||
|
||||
Return value is a string giving a filename extension,
|
||||
including the leading dot ('.'). The extension is not
|
||||
guaranteed to have been associated with any particular data
|
||||
stream, but would be mapped to the MIME type `type' by
|
||||
guess_type(). If no extension can be guessed for `type', None
|
||||
is returned.
|
||||
|
||||
Optional `strict' argument when false adds a bunch of commonly found,
|
||||
but non-standard types.
|
||||
"""
|
||||
extensions = self.guess_all_extensions(type, strict)
|
||||
if not extensions:
|
||||
return None
|
||||
return extensions[0]
|
||||
|
||||
def read(self, filename, strict=True):
|
||||
"""
|
||||
Read a single mime.types-format file, specified by pathname.
|
||||
|
||||
If strict is true, information will be added to
|
||||
list of standard types, else to the list of non-standard
|
||||
types.
|
||||
"""
|
||||
with open(filename) as fp:
|
||||
self.readfp(fp, strict)
|
||||
|
||||
def readfp(self, fp, strict=True):
|
||||
"""
|
||||
Read a single mime.types-format file.
|
||||
|
||||
If strict is true, information will be added to
|
||||
list of standard types, else to the list of non-standard
|
||||
types.
|
||||
"""
|
||||
while 1:
|
||||
line = fp.readline()
|
||||
if not line:
|
||||
break
|
||||
words = line.split()
|
||||
for i in range(len(words)):
|
||||
if words[i][0] == '#':
|
||||
del words[i:]
|
||||
break
|
||||
if not words:
|
||||
continue
|
||||
type, suffixes = words[0], words[1:]
|
||||
for suff in suffixes:
|
||||
self.add_type(type, '.' + suff, strict)
|
||||
|
||||
def read_windows_registry(self, strict=True):
|
||||
"""
|
||||
Load the MIME types database from Windows registry.
|
||||
|
||||
If strict is true, information will be added to
|
||||
list of standard types, else to the list of non-standard
|
||||
types.
|
||||
"""
|
||||
|
||||
# Windows only
|
||||
if not _winreg:
|
||||
return
|
||||
|
||||
def enum_types(mimedb):
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
ctype = _winreg.EnumKey(mimedb, i)
|
||||
except EnvironmentError:
|
||||
break
|
||||
try:
|
||||
ctype = ctype#.encode(default_encoding) # omit in 3.x!
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
else:
|
||||
yield ctype
|
||||
i += 1
|
||||
|
||||
default_encoding = sys.getdefaultencoding()
|
||||
with _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT,
|
||||
r'MIME\Database\Content Type') as mimedb:
|
||||
for ctype in enum_types(mimedb):
|
||||
try:
|
||||
with _winreg.OpenKey(mimedb, ctype) as key:
|
||||
suffix, datatype = _winreg.QueryValueEx(key,
|
||||
'Extension')
|
||||
except EnvironmentError:
|
||||
continue
|
||||
if datatype != _winreg.REG_SZ:
|
||||
continue
|
||||
try:
|
||||
suffix = suffix.encode(default_encoding) # omit in 3.x!
|
||||
except UnicodeEncodeError:
|
||||
continue
|
||||
self.add_type(ctype, suffix, strict)
|
||||
|
||||
|
||||
def guess_type(url, strict=True):
|
||||
"""Guess the type of a file based on its URL.
|
||||
|
||||
Return value is a tuple (type, encoding) where type is None if the
|
||||
type can't be guessed (no or unknown suffix) or a string of the
|
||||
form type/subtype, usable for a MIME Content-type header; and
|
||||
encoding is None for no encoding or the name of the program used
|
||||
to encode (e.g. compress or gzip). The mappings are table
|
||||
driven. Encoding suffixes are case sensitive; type suffixes are
|
||||
first tried case sensitive, then case insensitive.
|
||||
|
||||
The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped
|
||||
to ".tar.gz". (This is table-driven too, using the dictionary
|
||||
suffix_map).
|
||||
|
||||
Optional `strict' argument when false adds a bunch of commonly found, but
|
||||
non-standard types.
|
||||
"""
|
||||
if _db is None:
|
||||
init()
|
||||
return _db.guess_type(url, strict)
|
||||
|
||||
|
||||
def guess_all_extensions(type, strict=True):
|
||||
"""Guess the extensions for a file based on its MIME type.
|
||||
|
||||
Return value is a list of strings giving the possible filename
|
||||
extensions, including the leading dot ('.'). The extension is not
|
||||
guaranteed to have been associated with any particular data
|
||||
stream, but would be mapped to the MIME type `type' by
|
||||
guess_type(). If no extension can be guessed for `type', None
|
||||
is returned.
|
||||
|
||||
Optional `strict' argument when false adds a bunch of commonly found,
|
||||
but non-standard types.
|
||||
"""
|
||||
if _db is None:
|
||||
init()
|
||||
return _db.guess_all_extensions(type, strict)
|
||||
|
||||
def guess_extension(type, strict=True):
|
||||
"""Guess the extension for a file based on its MIME type.
|
||||
|
||||
Return value is a string giving a filename extension, including the
|
||||
leading dot ('.'). The extension is not guaranteed to have been
|
||||
associated with any particular data stream, but would be mapped to the
|
||||
MIME type `type' by guess_type(). If no extension can be guessed for
|
||||
`type', None is returned.
|
||||
|
||||
Optional `strict' argument when false adds a bunch of commonly found,
|
||||
but non-standard types.
|
||||
"""
|
||||
if _db is None:
|
||||
init()
|
||||
return _db.guess_extension(type, strict)
|
||||
|
||||
def add_type(type, ext, strict=True):
|
||||
"""Add a mapping between a type and an extension.
|
||||
|
||||
When the extension is already known, the new
|
||||
type will replace the old one. When the type
|
||||
is already known the extension will be added
|
||||
to the list of known extensions.
|
||||
|
||||
If strict is true, information will be added to
|
||||
list of standard types, else to the list of non-standard
|
||||
types.
|
||||
"""
|
||||
if _db is None:
|
||||
init()
|
||||
return _db.add_type(type, ext, strict)
|
||||
|
||||
|
||||
def init(files=None):
|
||||
global suffix_map, types_map, encodings_map, common_types
|
||||
global inited, _db
|
||||
inited = True # so that MimeTypes.__init__() doesn't call us again
|
||||
db = MimeTypes()
|
||||
if files is None:
|
||||
if _winreg:
|
||||
db.read_windows_registry()
|
||||
files = knownfiles
|
||||
for file in files:
|
||||
if os.path.isfile(file):
|
||||
db.read(file)
|
||||
encodings_map = db.encodings_map
|
||||
suffix_map = db.suffix_map
|
||||
types_map = db.types_map[True]
|
||||
common_types = db.types_map[False]
|
||||
# Make the DB a global variable now that it is fully initialized
|
||||
_db = db
|
||||
|
||||
|
||||
def read_mime_types(file):
|
||||
try:
|
||||
f = open(file)
|
||||
except IOError:
|
||||
return None
|
||||
db = MimeTypes()
|
||||
db.readfp(f, True)
|
||||
return db.types_map[True]
|
||||
|
||||
|
||||
def _default_mime_types():
|
||||
global suffix_map
|
||||
global encodings_map
|
||||
global types_map
|
||||
global common_types
|
||||
|
||||
suffix_map = {
|
||||
'.tgz': '.tar.gz',
|
||||
'.taz': '.tar.gz',
|
||||
'.tz': '.tar.gz',
|
||||
'.tbz2': '.tar.bz2',
|
||||
}
|
||||
|
||||
encodings_map = {
|
||||
'.gz': 'gzip',
|
||||
'.Z': 'compress',
|
||||
'.bz2': 'bzip2',
|
||||
}
|
||||
|
||||
# Before adding new types, make sure they are either registered with IANA,
|
||||
# at http://www.isi.edu/in-notes/iana/assignments/media-types
|
||||
# or extensions, i.e. using the x- prefix
|
||||
|
||||
# If you add to these, please keep them sorted!
|
||||
types_map = {
|
||||
'.3gp' : 'video/3gp',
|
||||
'.a' : 'application/octet-stream',
|
||||
'.ai' : 'application/postscript',
|
||||
'.aif' : 'audio/x-aiff',
|
||||
'.aifc' : 'audio/x-aiff',
|
||||
'.aiff' : 'audio/x-aiff',
|
||||
'.asf' : 'video/x-ms-asf',
|
||||
'.asx' : 'video/x-ms-asf',
|
||||
'.au' : 'audio/basic',
|
||||
'.avi' : 'video/x-msvideo',
|
||||
'.axv' : 'video/annodex',
|
||||
'.bat' : 'text/plain',
|
||||
'.bcpio' : 'application/x-bcpio',
|
||||
'.bin' : 'application/octet-stream',
|
||||
'.bmp' : 'image/x-ms-bmp',
|
||||
'.c' : 'text/plain',
|
||||
# Duplicates :(
|
||||
'.cdf' : 'application/x-cdf',
|
||||
'.cdf' : 'application/x-netcdf',
|
||||
'.cpio' : 'application/x-cpio',
|
||||
'.csh' : 'application/x-csh',
|
||||
'.css' : 'text/css',
|
||||
'.dif' : 'video/dv',
|
||||
'.dl' : 'video/dl',
|
||||
'.dll' : 'application/octet-stream',
|
||||
'.dv' : 'video/dv',
|
||||
'.doc' : 'application/msword',
|
||||
'.dot' : 'application/msword',
|
||||
'.dvi' : 'application/x-dvi',
|
||||
'.eml' : 'message/rfc822',
|
||||
'.eps' : 'application/postscript',
|
||||
'.etx' : 'text/x-setext',
|
||||
'.exe' : 'application/octet-stream',
|
||||
'.fli' : 'video/fli',
|
||||
'.flv' : 'video/x-flv',
|
||||
'.gif' : 'image/gif',
|
||||
'.gl' : 'video/gl',
|
||||
'.gtar' : 'application/x-gtar',
|
||||
'.h' : 'text/plain',
|
||||
'.hdf' : 'application/x-hdf',
|
||||
'.htm' : 'text/html',
|
||||
'.html' : 'text/html',
|
||||
'.ief' : 'image/ief',
|
||||
'.jpe' : 'image/jpeg',
|
||||
'.jpeg' : 'image/jpeg',
|
||||
'.jpg' : 'image/jpeg',
|
||||
'.js' : 'application/x-javascript',
|
||||
'.ksh' : 'text/plain',
|
||||
'.latex' : 'application/x-latex',
|
||||
'.lsf' : 'video/x-la-lsf',
|
||||
'.lsx' : 'video/x-la-lsf',
|
||||
'.m1v' : 'video/mpeg',
|
||||
'.man' : 'application/x-troff-man',
|
||||
'.me' : 'application/x-troff-me',
|
||||
'.mht' : 'message/rfc822',
|
||||
'.mhtml' : 'message/rfc822',
|
||||
'.mif' : 'application/x-mif',
|
||||
'.mng' : 'video/x-mng',
|
||||
'.movie' : 'video/x-sgi-movie',
|
||||
'.mp2' : 'audio/mpeg',
|
||||
'.mp3' : 'audio/mpeg',
|
||||
'.mp4' : 'video/mp4',
|
||||
'.mpa' : 'video/mpeg',
|
||||
'.mpe' : 'video/mpeg',
|
||||
'.mpeg' : 'video/mpeg',
|
||||
'.mpg' : 'video/mpeg',
|
||||
'.mpv' : 'video/matroska',
|
||||
'.mkv' : 'video/matroska',
|
||||
'.mov' : 'video/quicktime',
|
||||
'.ms' : 'application/x-troff-ms',
|
||||
'.nc' : 'application/x-netcdf',
|
||||
'.nws' : 'message/rfc822',
|
||||
'.o' : 'application/octet-stream',
|
||||
'.obj' : 'application/octet-stream',
|
||||
'.oda' : 'application/oda',
|
||||
'.ogv' : 'video/ogg',
|
||||
'.p12' : 'application/x-pkcs12',
|
||||
'.p7c' : 'application/pkcs7-mime',
|
||||
'.pbm' : 'image/x-portable-bitmap',
|
||||
'.pdf' : 'application/pdf',
|
||||
'.pfx' : 'application/x-pkcs12',
|
||||
'.pgm' : 'image/x-portable-graymap',
|
||||
'.pl' : 'text/plain',
|
||||
'.png' : 'image/png',
|
||||
'.pnm' : 'image/x-portable-anymap',
|
||||
'.pot' : 'application/vnd.ms-powerpoint',
|
||||
'.ppa' : 'application/vnd.ms-powerpoint',
|
||||
'.ppm' : 'image/x-portable-pixmap',
|
||||
'.pps' : 'application/vnd.ms-powerpoint',
|
||||
'.ppt' : 'application/vnd.ms-powerpoint',
|
||||
'.ps' : 'application/postscript',
|
||||
'.pwz' : 'application/vnd.ms-powerpoint',
|
||||
'.py' : 'text/x-python',
|
||||
'.pyc' : 'application/x-python-code',
|
||||
'.pyo' : 'application/x-python-code',
|
||||
'.qt' : 'video/quicktime',
|
||||
'.ra' : 'audio/x-pn-realaudio',
|
||||
'.ram' : 'application/x-pn-realaudio',
|
||||
'.ras' : 'image/x-cmu-raster',
|
||||
'.rdf' : 'application/xml',
|
||||
'.rgb' : 'image/x-rgb',
|
||||
'.roff' : 'application/x-troff',
|
||||
'.rtx' : 'text/richtext',
|
||||
'.sgm' : 'text/x-sgml',
|
||||
'.sgml' : 'text/x-sgml',
|
||||
'.sh' : 'application/x-sh',
|
||||
'.shar' : 'application/x-shar',
|
||||
'.snd' : 'audio/basic',
|
||||
'.so' : 'application/octet-stream',
|
||||
'.src' : 'application/x-wais-source',
|
||||
'.sv4cpio': 'application/x-sv4cpio',
|
||||
'.sv4crc' : 'application/x-sv4crc',
|
||||
'.swf' : 'application/x-shockwave-flash',
|
||||
'.t' : 'application/x-troff',
|
||||
'.tar' : 'application/x-tar',
|
||||
'.tcl' : 'application/x-tcl',
|
||||
'.tex' : 'application/x-tex',
|
||||
'.texi' : 'application/x-texinfo',
|
||||
'.texinfo': 'application/x-texinfo',
|
||||
'.tif' : 'image/tiff',
|
||||
'.tiff' : 'image/tiff',
|
||||
'.tr' : 'application/x-troff',
|
||||
'.ts' : 'video/MP2T',
|
||||
'.tsv' : 'text/tab-separated-values',
|
||||
'.txt' : 'text/plain',
|
||||
'.ustar' : 'application/x-ustar',
|
||||
'.vcf' : 'text/x-vcard',
|
||||
'.wav' : 'audio/x-wav',
|
||||
'.webm' : 'video/webm',
|
||||
'.wiz' : 'application/msword',
|
||||
'.wm' : 'video/x-ms-wm',
|
||||
'.wmv' : 'video/x-ms-wmv',
|
||||
'.wmx' : 'video/x-ms-wmx',
|
||||
'.wvx' : 'video/x-ms-wvx',
|
||||
'.wsdl' : 'application/xml',
|
||||
'.xbm' : 'image/x-xbitmap',
|
||||
'.xlb' : 'application/vnd.ms-excel',
|
||||
# Duplicates :(
|
||||
'.xls' : 'application/excel',
|
||||
'.xls' : 'application/vnd.ms-excel',
|
||||
'.xml' : 'text/xml',
|
||||
'.xpdl' : 'application/xml',
|
||||
'.xpm' : 'image/x-xpixmap',
|
||||
'.xsl' : 'application/xml',
|
||||
'.xwd' : 'image/x-xwindowdump',
|
||||
'.zip' : 'application/zip',
|
||||
}
|
||||
|
||||
# These are non-standard types, commonly found in the wild. They will
|
||||
# only match if strict=0 flag is given to the API methods.
|
||||
|
||||
# Please sort these too
|
||||
common_types = {
|
||||
'.jpg' : 'image/jpg',
|
||||
'.mid' : 'audio/midi',
|
||||
'.midi': 'audio/midi',
|
||||
'.pct' : 'image/pict',
|
||||
'.pic' : 'image/pict',
|
||||
'.pict': 'image/pict',
|
||||
'.rtf' : 'application/rtf',
|
||||
'.xul' : 'text/xul'
|
||||
}
|
||||
|
||||
|
||||
_default_mime_types()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import getopt
|
||||
|
||||
USAGE = """\
|
||||
Usage: mimetypes.py [options] type
|
||||
|
||||
Options:
|
||||
--help / -h -- print this message and exit
|
||||
--lenient / -l -- additionally search of some common, but non-standard
|
||||
types.
|
||||
--extension / -e -- guess extension instead of type
|
||||
|
||||
More than one type argument may be given.
|
||||
"""
|
||||
|
||||
def usage(code, msg=''):
|
||||
print USAGE
|
||||
if msg: print msg
|
||||
sys.exit(code)
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'hle',
|
||||
['help', 'lenient', 'extension'])
|
||||
except getopt.error, msg:
|
||||
usage(1, msg)
|
||||
|
||||
strict = 1
|
||||
extension = 0
|
||||
for opt, arg in opts:
|
||||
if opt in ('-h', '--help'):
|
||||
usage(0)
|
||||
elif opt in ('-l', '--lenient'):
|
||||
strict = 0
|
||||
elif opt in ('-e', '--extension'):
|
||||
extension = 1
|
||||
for gtype in args:
|
||||
if extension:
|
||||
guess = guess_extension(gtype, strict)
|
||||
if not guess: print "I don't know anything about type", gtype
|
||||
else: print guess
|
||||
else:
|
||||
guess, encoding = guess_type(gtype, strict)
|
||||
if not guess: print "I don't know anything about type", gtype
|
||||
else: print 'type:', guess, 'encoding:', encoding
|
|
@ -1,16 +1,16 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import chardet
|
||||
|
||||
try:
|
||||
from python_libtorrent import get_libtorrent # @UnresolvedImport
|
||||
|
||||
lt = get_libtorrent()
|
||||
print(('Imported libtorrent v%s from python_libtorrent' % (lt.version,)))
|
||||
except Exception as e:
|
||||
print(('Error importing python_libtorrent.Exception: %s' % (str(e),)))
|
||||
from python_libtorrent import get_libtorrent
|
||||
lt=get_libtorrent()
|
||||
print('Imported libtorrent v%s from python_libtorrent' %(lt.version, ))
|
||||
except Exception, e:
|
||||
print('Error importing python_libtorrent.Exception: %s' %(str(e),))
|
||||
try:
|
||||
import libtorrent as lt # @UnresolvedImport
|
||||
import libtorrent as lt
|
||||
except Exception as e:
|
||||
strerror = e.args
|
||||
print(strerror)
|
||||
|
@ -18,39 +18,28 @@ except Exception as e:
|
|||
|
||||
from random import SystemRandom
|
||||
import time
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import http.server
|
||||
import socketserver
|
||||
import urllib
|
||||
import BaseHTTPServer
|
||||
import SocketServer
|
||||
import threading
|
||||
import io
|
||||
from .util import localize_path, Struct, detect_media_type, uri2path, encode_msg
|
||||
from util import localize_path, Struct, detect_media_type, uri2path, encode_msg
|
||||
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
from ctypes import *
|
||||
|
||||
libc = CDLL('/system/lib/libc.so')
|
||||
libc.lseek64.restype = c_ulonglong
|
||||
libc.lseek64.argtypes = [c_uint, c_ulonglong, c_uint]
|
||||
libc.read.restype = c_long
|
||||
libc.read.argtypes = [c_uint, c_void_p, c_long]
|
||||
O_RDONLY = 0
|
||||
O_LARGEFILE = 0x8000
|
||||
|
||||
######################################################################################
|
||||
|
||||
if not hasattr(os, 'getppid'):
|
||||
import ctypes
|
||||
|
||||
TH32CS_SNAPPROCESS = 0x02
|
||||
CreateToolhelp32Snapshot = ctypes.windll.kernel32.CreateToolhelp32Snapshot # @UndefinedVariable
|
||||
GetCurrentProcessId = ctypes.windll.kernel32.GetCurrentProcessId # @UndefinedVariable
|
||||
TH32CS_SNAPPROCESS = 0x02L
|
||||
CreateToolhelp32Snapshot = ctypes.windll.kernel32.CreateToolhelp32Snapshot
|
||||
GetCurrentProcessId = ctypes.windll.kernel32.GetCurrentProcessId
|
||||
|
||||
MAX_PATH = 260
|
||||
|
||||
_kernel32dll = ctypes.windll.Kernel32
|
||||
CloseHandle = _kernel32dll.CloseHandle
|
||||
|
||||
|
||||
class PROCESSENTRY32(ctypes.Structure):
|
||||
_fields_ = [
|
||||
("dwSize", ctypes.c_ulong),
|
||||
|
@ -66,11 +55,9 @@ if not hasattr(os, 'getppid'):
|
|||
("szExeFile", ctypes.c_wchar * MAX_PATH)
|
||||
]
|
||||
|
||||
|
||||
Process32First = _kernel32dll.Process32FirstW
|
||||
Process32Next = _kernel32dll.Process32NextW
|
||||
|
||||
|
||||
def getppid():
|
||||
'''
|
||||
:return: The pid of the parent of this process.
|
||||
|
@ -96,32 +83,28 @@ if not hasattr(os, 'getppid'):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
os.getppid = getppid
|
||||
|
||||
#################################################################################
|
||||
|
||||
AVOID_HTTP_SERVER_EXCEPTION_OUTPUT = True
|
||||
VERSION = "0.6.0"
|
||||
# USER_AGENT = "pyrrent2http/" + VERSION + " libtorrent/" + lt.version
|
||||
#USER_AGENT = "pyrrent2http/" + VERSION + " libtorrent/" + lt.version
|
||||
USER_AGENT = 'libtorrent/1.0.9.0'
|
||||
|
||||
VIDEO_EXTS = {'.avi': 'video/x-msvideo', '.mp4': 'video/mp4', '.mkv': 'video/x-matroska',
|
||||
'.m4v': 'video/mp4', '.mov': 'video/quicktime', '.mpg': 'video/mpeg', '.ogv': 'video/ogg',
|
||||
'.ogg': 'video/ogg', '.webm': 'video/webm', '.ts': 'video/mp2t', '.3gp': 'video/3gpp'}
|
||||
|
||||
|
||||
VIDEO_EXTS={'.avi':'video/x-msvideo','.mp4':'video/mp4','.mkv':'video/x-matroska',
|
||||
'.m4v':'video/mp4','.mov':'video/quicktime', '.mpg':'video/mpeg','.ogv':'video/ogg',
|
||||
'.ogg':'video/ogg', '.webm':'video/webm', '.ts': 'video/mp2t', '.3gp':'video/3gpp'}
|
||||
######################################################################################
|
||||
|
||||
class Ticker(object):
|
||||
def __init__(self, interval):
|
||||
self.tick = False
|
||||
self._timer = None
|
||||
self.interval = interval
|
||||
self._timer = None
|
||||
self.interval = interval
|
||||
self.is_running = False
|
||||
self.start()
|
||||
|
||||
@property
|
||||
def true(self):
|
||||
if self.tick:
|
||||
self.tick = False
|
||||
|
@ -144,25 +127,23 @@ class Ticker(object):
|
|||
self._timer.cancel()
|
||||
self.is_running = False
|
||||
|
||||
|
||||
#######################################################################################
|
||||
|
||||
class TorrentFile(object):
|
||||
tfs = None
|
||||
closed = True
|
||||
save_path = str()
|
||||
fileEntry = None
|
||||
index = 0
|
||||
filePtr = None
|
||||
downloaded = 0
|
||||
progress = 0.0
|
||||
pdl_thread = None
|
||||
|
||||
tfs = None
|
||||
closed = True
|
||||
save_path = str()
|
||||
fileEntry = None
|
||||
index = 0
|
||||
filePtr = None
|
||||
downloaded = 0
|
||||
progress = 0.0
|
||||
pdl_thread = None
|
||||
def __init__(self, tfs, fileEntry, savePath, index):
|
||||
self.tfs = tfs
|
||||
self.fileEntry = fileEntry
|
||||
self.name = self.fileEntry.path
|
||||
self.unicode_name = isinstance(self.name, str) and self.name or self.name.decode(chardet.detect(self.name)['encoding'])
|
||||
self.unicode_name = self.name.decode(chardet.detect(self.name)['encoding'])
|
||||
self.media_type = detect_media_type(self.unicode_name)
|
||||
self.save_path = savePath
|
||||
self.index = index
|
||||
|
@ -171,13 +152,11 @@ class TorrentFile(object):
|
|||
self.offset = self.fileEntry.offset
|
||||
self.startPiece, self.endPiece = self.Pieces()
|
||||
self.pieces_deadlined = [False] * (self.endPiece - self.startPiece)
|
||||
|
||||
|
||||
def Downloaded(self):
|
||||
return self.downloaded
|
||||
|
||||
def Progress(self):
|
||||
return self.progress
|
||||
|
||||
def __fileptr_(self):
|
||||
if self.closed:
|
||||
return None
|
||||
|
@ -186,49 +165,34 @@ class TorrentFile(object):
|
|||
logging.info('Waiting for file: %s' % (self.save_path,))
|
||||
self.tfs.handle.flush_cache()
|
||||
time.sleep(0.5)
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
self.filePtr = libc.open(self.save_path, O_RDONLY | O_LARGEFILE, 755)
|
||||
else:
|
||||
self.filePtr = io.open(self.save_path, 'rb')
|
||||
self.filePtr = io.open(self.save_path, 'rb')
|
||||
return self.filePtr
|
||||
|
||||
def log(self, message):
|
||||
fnum = self.tfs.openedFiles.index(self)
|
||||
logging.info("[Thread No.%d] %s\n" % (fnum, message))
|
||||
|
||||
logging.info("[%d] %s\n" % (fnum, message))
|
||||
def Pieces(self):
|
||||
startPiece, _ = self.pieceFromOffset(1)
|
||||
endPiece, _ = self.pieceFromOffset(self.size - 1)
|
||||
return startPiece, endPiece
|
||||
|
||||
def SetPriority(self, priority):
|
||||
self.tfs.setPriority(self.index, priority)
|
||||
|
||||
def readOffset(self):
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
return libc.lseek64(self.filePtr, 0, os.SEEK_CUR)
|
||||
else:
|
||||
return self.filePtr.seek(0, os.SEEK_CUR)
|
||||
|
||||
return self.filePtr.seek(0, io.SEEK_CUR)
|
||||
def havePiece(self, piece):
|
||||
return self.tfs.handle.have_piece(piece)
|
||||
|
||||
def pieceFromOffset(self, offset):
|
||||
piece = int((self.offset + offset) / self.piece_length)
|
||||
pieceOffset = int((self.offset + offset) % self.piece_length)
|
||||
return piece, pieceOffset
|
||||
|
||||
def waitForPiece(self, piece):
|
||||
def set_deadlines(p):
|
||||
next_piece = p + 1
|
||||
BUF_SIZE = 2 # Лучшее враг хорошего
|
||||
BUF_SIZE = 2 # Лучшее враг хорошего
|
||||
for i in range(BUF_SIZE):
|
||||
if (next_piece + i < self.endPiece and
|
||||
not self.pieces_deadlined[(next_piece + i) - self.startPiece] and not self.havePiece(
|
||||
next_piece + i)):
|
||||
if (next_piece + i < self.endPiece and
|
||||
not self.pieces_deadlined[(next_piece + i) - self.startPiece] and not self.havePiece(next_piece + i)):
|
||||
self.tfs.handle.set_piece_deadline(next_piece + i, 70 + (20 * i))
|
||||
self.pieces_deadlined[(next_piece + i) - self.startPiece] = True
|
||||
|
||||
if not self.havePiece(piece):
|
||||
self.log('Waiting for piece %d' % (piece,))
|
||||
self.tfs.handle.set_piece_deadline(piece, 50)
|
||||
|
@ -237,22 +201,17 @@ class TorrentFile(object):
|
|||
return False
|
||||
time.sleep(0.1)
|
||||
if not isinstance(self.pdl_thread, threading.Thread) or not self.pdl_thread.is_alive():
|
||||
self.pdl_thread = threading.Thread(target=set_deadlines, args=(piece,))
|
||||
self.pdl_thread = threading.Thread(target = set_deadlines, args = (piece,))
|
||||
self.pdl_thread.start()
|
||||
return True
|
||||
|
||||
def Close(self):
|
||||
if self.closed: return
|
||||
self.log('Closing %s...' % (self.name,))
|
||||
self.tfs.removeOpenedFile(self)
|
||||
self.closed = True
|
||||
if self.filePtr is not None:
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
libc.close(self.filePtr)
|
||||
else:
|
||||
self.filePtr.close()
|
||||
self.filePtr.close()
|
||||
self.filePtr = None
|
||||
|
||||
def ShowPieces(self):
|
||||
pieces = self.tfs.handle.status().pieces
|
||||
str_ = ''
|
||||
|
@ -262,7 +221,6 @@ class TorrentFile(object):
|
|||
else:
|
||||
str_ += "#"
|
||||
self.log(str_)
|
||||
|
||||
def Read(self, buf):
|
||||
filePtr = self.__fileptr_()
|
||||
if filePtr is None:
|
||||
|
@ -273,64 +231,55 @@ class TorrentFile(object):
|
|||
readOffset = self.readOffset()
|
||||
startPiece, _ = self.pieceFromOffset(readOffset)
|
||||
endPiece, _ = self.pieceFromOffset(readOffset + toRead)
|
||||
for i in range(startPiece, endPiece + 1):
|
||||
for i in range(startPiece, endPiece + 1):
|
||||
if not self.waitForPiece(i):
|
||||
raise IOError
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
read = libc.read(self.filePtr, addressof(buf), toRead)
|
||||
else:
|
||||
read = filePtr.readinto(buf)
|
||||
read = filePtr.readinto(buf)
|
||||
return read
|
||||
|
||||
def Seek(self, offset, whence):
|
||||
filePtr = self.__fileptr_()
|
||||
if filePtr is None: return
|
||||
if whence == os.SEEK_END:
|
||||
offset = self.size - offset
|
||||
whence = os.SEEK_SET
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
newOffset = libc.lseek64(self.filePtr, offset, whence)
|
||||
else:
|
||||
newOffset = filePtr.seek(offset, whence)
|
||||
newOffset = filePtr.seek(offset, whence)
|
||||
self.log('Seeking to %d/%d' % (newOffset, self.size))
|
||||
return newOffset
|
||||
|
||||
def IsComplete(self):
|
||||
return self.downloaded == self.size
|
||||
|
||||
|
||||
#######################################################################################
|
||||
|
||||
class TorrentFS(object):
|
||||
handle = None
|
||||
info = None
|
||||
priorities = list()
|
||||
openedFiles = list()
|
||||
lastOpenedFile = None
|
||||
shuttingDown = False
|
||||
fileCounter = int()
|
||||
progresses = list()
|
||||
save_path = None
|
||||
handle = None
|
||||
info = None
|
||||
priorities = list()
|
||||
openedFiles = list()
|
||||
lastOpenedFile = None
|
||||
shuttingDown = False
|
||||
fileCounter = int()
|
||||
progresses = list()
|
||||
save_path = None
|
||||
|
||||
def __init__(self, root, handle):
|
||||
def __init__(self, root, handle, startIndex):
|
||||
self.root = root
|
||||
self.handle = handle
|
||||
self.waitForMetadata()
|
||||
self.save_path = localize_path(self.root.torrentParams['save_path'])
|
||||
self.priorities = list(self.handle.file_priorities())
|
||||
self.files = {}
|
||||
num_files = self.info.num_files()
|
||||
for i in range(num_files):
|
||||
self.setPriority(i, 0)
|
||||
file_ = self.__file_at_(startIndex)
|
||||
self.files = {file_.name: file_}
|
||||
#self.handle.set_piece_deadline(self.files[startIndex].startPiece, 50)
|
||||
if startIndex < 0:
|
||||
logging.info('No -file-index specified, downloading will be paused until any file is requested')
|
||||
|
||||
def file(self, index):
|
||||
for name in list(self.files.keys()):
|
||||
if self.files[name].index == index:
|
||||
return self.files[name]
|
||||
file_ = self.__file_at_(index)
|
||||
self.files[file_.name] = file_
|
||||
self.setPriority(index, 1)
|
||||
return file_
|
||||
num_files = self.info.num_files()
|
||||
|
||||
for i in range(num_files):
|
||||
if startIndex == i:
|
||||
self.setPriority(i, 1)
|
||||
else:
|
||||
self.setPriority(i, 0)
|
||||
|
||||
def Shutdown(self):
|
||||
self.shuttingDown = True
|
||||
|
@ -338,27 +287,22 @@ class TorrentFS(object):
|
|||
logging.info('Closing %d opened file(s)' % (len(self.openedFiles),))
|
||||
for f in self.openedFiles:
|
||||
f.Close()
|
||||
|
||||
def addOpenedFile(self, file_):
|
||||
self.openedFiles.append(file_)
|
||||
|
||||
self.openedFiles.append(file_)
|
||||
def setPriority(self, index, priority):
|
||||
if self.priorities[index] != priority:
|
||||
logging.info('Setting %s priority to %d' % (self.info.file_at(index).path, priority))
|
||||
self.priorities[index] = priority
|
||||
self.handle.file_priority(index, priority)
|
||||
|
||||
def findOpenedFile(self, file):
|
||||
for i, f in enumerate(self.openedFiles):
|
||||
if f == file:
|
||||
return i
|
||||
return -1
|
||||
|
||||
def removeOpenedFile(self, file):
|
||||
pos = self.findOpenedFile(file)
|
||||
if pos >= 0:
|
||||
del self.openedFiles[pos]
|
||||
|
||||
def waitForMetadata(self):
|
||||
if not self.handle.status().has_metadata:
|
||||
time.sleep(0.1)
|
||||
|
@ -366,24 +310,19 @@ class TorrentFS(object):
|
|||
self.info = self.handle.torrent_file()
|
||||
except:
|
||||
self.info = self.handle.get_torrent_info()
|
||||
|
||||
def HasTorrentInfo(self):
|
||||
return self.info is not None
|
||||
|
||||
def LoadFileProgress(self):
|
||||
self.progresses = self.handle.file_progress()
|
||||
for k in list(self.files.keys()):
|
||||
for k in self.files.keys():
|
||||
self.files[k].downloaded = self.getFileDownloadedBytes(self.files[k].index)
|
||||
if self.files[k].size > 0: self.files[k].progress = float(self.files[k].downloaded) / float(
|
||||
self.files[k].size)
|
||||
|
||||
if self.files[k].size > 0: self.files[k].progress = float(self.files[k].downloaded) / float(self.files[k].size)
|
||||
def getFileDownloadedBytes(self, i):
|
||||
try:
|
||||
bytes_ = self.progresses[i]
|
||||
except IndexError:
|
||||
bytes_ = 0
|
||||
return bytes_
|
||||
|
||||
def __files_(self):
|
||||
info = self.info
|
||||
files_ = []
|
||||
|
@ -391,33 +330,29 @@ class TorrentFS(object):
|
|||
file_ = self.__file_at_(i)
|
||||
file_.downloaded = self.getFileDownloadedBytes(i)
|
||||
if file_.size > 0:
|
||||
file_.progress = float(file_.downloaded) / float(file_.size)
|
||||
file_.progress = float(file_.downloaded)/float(file_.size)
|
||||
files_.append(file_)
|
||||
return files_
|
||||
|
||||
def __file_at_(self, index):
|
||||
info = self.info
|
||||
fileEntry = info.file_at(index)
|
||||
fe_path = fileEntry.path
|
||||
path = os.path.abspath(os.path.join(self.save_path, localize_path(fe_path)))
|
||||
return TorrentFile(
|
||||
self,
|
||||
fileEntry,
|
||||
path,
|
||||
index
|
||||
)
|
||||
|
||||
self,
|
||||
fileEntry,
|
||||
path,
|
||||
index
|
||||
)
|
||||
def FileByName(self, name):
|
||||
for i, f in enumerate(self.info.files()):
|
||||
if f.path == name:
|
||||
return self.__file_at_(i)
|
||||
raise IOError
|
||||
|
||||
def Open(self, name):
|
||||
if self.shuttingDown or not self.HasTorrentInfo():
|
||||
raise IOError
|
||||
return self.OpenFile(name)
|
||||
|
||||
def checkPriorities(self):
|
||||
for index, priority in enumerate(self.priorities):
|
||||
if priority == 0:
|
||||
|
@ -429,7 +364,6 @@ class TorrentFS(object):
|
|||
break
|
||||
if not found:
|
||||
self.setPriority(index, 0)
|
||||
|
||||
def OpenFile(self, name):
|
||||
try:
|
||||
tf = self.FileByName(name)
|
||||
|
@ -446,69 +380,53 @@ class TorrentFS(object):
|
|||
self.files[tf.name] = tf
|
||||
self.checkPriorities()
|
||||
return tf
|
||||
|
||||
|
||||
|
||||
#############################################################
|
||||
|
||||
class ThreadingHTTPServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
|
||||
class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
|
||||
def handle_error(self, *args, **kwargs):
|
||||
'''Обходим злосчастный "Broken Pipe" и прочие трейсы'''
|
||||
if not AVOID_HTTP_SERVER_EXCEPTION_OUTPUT:
|
||||
http.server.HTTPServer.handle_error(self, *args, **kwargs)
|
||||
|
||||
BaseHTTPServer.HTTPServer.handle_error(self, *args, **kwargs)
|
||||
|
||||
def HttpHandlerFactory():
|
||||
class HttpHandler(http.server.BaseHTTPRequestHandler):
|
||||
class HttpHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
# print ('---Headers---\n%s\n' % (self.headers,))
|
||||
# print ('---Request---\n%s\n' % (self.path,))
|
||||
#print ('---Headers---\n%s\n' % (self.headers,))
|
||||
#print ('---Request---\n%s\n' % (self.path,))
|
||||
if self.path.startswith('/files/'):
|
||||
self.filesHandler()
|
||||
else:
|
||||
self.send_error(404, 'Not found')
|
||||
self.end_headers()
|
||||
|
||||
def filesHandler(self):
|
||||
f, start_range, end_range = self.send_head()
|
||||
if not f.closed:
|
||||
f.Seek(start_range, 0)
|
||||
chunk = f.piece_length
|
||||
total = 0
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
buf = create_string_buffer(chunk)
|
||||
else:
|
||||
buf = bytearray(chunk)
|
||||
while chunk > 0 and not self.server.root_obj.forceShutdown:
|
||||
buf = bytearray(chunk)
|
||||
while chunk > 0:
|
||||
if start_range + chunk > end_range:
|
||||
chunk = end_range - start_range
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
buf = create_string_buffer(chunk)
|
||||
else:
|
||||
buf = bytearray(chunk)
|
||||
buf = bytearray(chunk)
|
||||
try:
|
||||
if f.Read(buf) < 1: break
|
||||
while self.server.root_obj.pause and not self.server.root_obj.forceShutdown:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
if os.getenv('ANDROID_ROOT'):
|
||||
self.wfile.write(buf.raw)
|
||||
else:
|
||||
self.wfile.write(buf)
|
||||
self.wfile.write(buf)
|
||||
except:
|
||||
break
|
||||
total += chunk
|
||||
start_range += chunk
|
||||
f.Close()
|
||||
|
||||
def send_head(self):
|
||||
fname = urllib.parse.unquote(self.path.lstrip('/files/'))
|
||||
fname = urllib.unquote(self.path.lstrip('/files/'))
|
||||
try:
|
||||
f = self.server.root_obj.TorrentFS.Open(fname)
|
||||
f = self.server.root_obj.TorrentFS.Open(fname)
|
||||
except IOError:
|
||||
self.send_error(404, "File not found")
|
||||
return (None, 0, 0)
|
||||
_, ext = os.path.splitext(fname)
|
||||
ctype = (ext != '' and ext in list(VIDEO_EXTS.keys())) and VIDEO_EXTS[ext] or 'application/octet-stream'
|
||||
ctype = (ext != '' and ext in VIDEO_EXTS.keys())and VIDEO_EXTS[ext] or 'application/octet-stream'
|
||||
if "Range" in self.headers:
|
||||
self.send_response(206, 'Partial Content')
|
||||
else:
|
||||
|
@ -535,32 +453,27 @@ def HttpHandlerFactory():
|
|||
self.send_header("Content-Length", end_range - start_range)
|
||||
self.send_header("Last-Modified", self.date_time_string(f.fileEntry.mtime))
|
||||
self.end_headers()
|
||||
# print "Sending Bytes ",start_range, " to ", end_range, "...\n"
|
||||
return f, start_range, end_range
|
||||
|
||||
#print "Sending Bytes ",start_range, " to ", end_range, "...\n"
|
||||
return (f, start_range, end_range)
|
||||
# Вырубаем access-log
|
||||
def log_message(self, fmt, *args):
|
||||
def log_message(self, format, *args):
|
||||
return
|
||||
|
||||
return HttpHandler
|
||||
|
||||
|
||||
class Pyrrent2http(object):
|
||||
pause = False
|
||||
|
||||
def __init__(self, uri='', bindAddress='localhost:5001', downloadPath='.',
|
||||
idleTimeout=-1, keepComplete=False,
|
||||
keepIncomplete=False, keepFiles=False, showAllStats=False,
|
||||
showOverallProgress=False, showFilesProgress=False,
|
||||
showPiecesProgress=False, debugAlerts=False,
|
||||
exitOnFinish=False, resumeFile='', stateFile='',
|
||||
userAgent=USER_AGENT, dhtRouters='', trackers='',
|
||||
listenPort=6881, torrentConnectBoost=50, connectionSpeed=50,
|
||||
peerConnectTimeout=15, requestTimeout=20, maxDownloadRate=-1,
|
||||
maxUploadRate=-1, connectionsLimit=200, encryption=1,
|
||||
minReconnectTime=60, maxFailCount=3, noSparseFile=False,
|
||||
randomPort=False, enableScrape=False, enableDHT=True,
|
||||
enableLSD=True, enableUPNP=True, enableNATPMP=True, enableUTP=True, enableTCP=True, proxy=None):
|
||||
def __init__(self, uri = '', bindAddress = 'localhost:5001', downloadPath = '.',
|
||||
idleTimeout = -1, fileIndex = -1, keepComplete = False,
|
||||
keepIncomplete = False, keepFiles = False, showAllStats = False,
|
||||
showOverallProgress = False, showFilesProgress = False,
|
||||
showPiecesProgress = False, debugAlerts = False,
|
||||
exitOnFinish = False, resumeFile = '', stateFile = '',
|
||||
userAgent = USER_AGENT, dhtRouters = '', trackers = '',
|
||||
listenPort = 6881, torrentConnectBoost = 50, connectionSpeed = 50,
|
||||
peerConnectTimeout = 15, requestTimeout = 20, maxDownloadRate = -1,
|
||||
maxUploadRate = -1, connectionsLimit = 200, encryption = 1,
|
||||
minReconnectTime = 60, maxFailCount = 3, noSparseFile = False,
|
||||
randomPort = False, enableScrape = False, enableDHT = True,
|
||||
enableLSD = True, enableUPNP = True, enableNATPMP = True, enableUTP = True, enableTCP = True):
|
||||
self.torrentHandle = None
|
||||
self.forceShutdown = False
|
||||
self.session = None
|
||||
|
@ -571,6 +484,7 @@ class Pyrrent2http(object):
|
|||
self.config.bindAddress = bindAddress
|
||||
self.config.downloadPath = downloadPath
|
||||
self.config.idleTimeout = idleTimeout
|
||||
self.config.fileIndex = fileIndex
|
||||
self.config.keepComplete = keepComplete
|
||||
self.config.keepIncomplete = keepIncomplete
|
||||
self.config.keepFiles = keepFiles
|
||||
|
@ -605,7 +519,6 @@ class Pyrrent2http(object):
|
|||
self.config.enableNATPMP = enableNATPMP
|
||||
self.config.enableUTP = enableUTP
|
||||
self.config.enableTCP = enableTCP
|
||||
self.config.proxy = proxy
|
||||
if self.config.uri == '':
|
||||
raise Exception("uri is empty string")
|
||||
if self.config.uri.startswith('magnet:'):
|
||||
|
@ -613,7 +526,7 @@ class Pyrrent2http(object):
|
|||
if self.config.resumeFile is None: self.config.resumeFile = ''
|
||||
if self.config.resumeFile != '' and not self.config.keepFiles:
|
||||
raise Exception('Не должно быть файла восстановления, если мы не храним файлы')
|
||||
|
||||
|
||||
def buildTorrentParams(self, uri):
|
||||
try:
|
||||
absPath = uri2path(uri)
|
||||
|
@ -627,7 +540,7 @@ class Pyrrent2http(object):
|
|||
torrentParams['ti'] = torrent_info
|
||||
logging.info('Setting save path: %s' % (encode_msg(self.config.downloadPath),))
|
||||
torrentParams['save_path'] = self.config.downloadPath
|
||||
|
||||
|
||||
if os.path.exists(self.config.resumeFile):
|
||||
logging.info('Loading resume file: %s' % (encode_msg(self.config.resumeFile),))
|
||||
try:
|
||||
|
@ -641,7 +554,7 @@ class Pyrrent2http(object):
|
|||
logging.info('Disabling sparse file support...')
|
||||
torrentParams["storage_mode"] = lt.storage_mode_t.storage_mode_allocate
|
||||
return torrentParams
|
||||
|
||||
|
||||
def addTorrent(self):
|
||||
self.torrentParams = self.buildTorrentParams(self.config.uri)
|
||||
logging.info('Adding torrent')
|
||||
|
@ -654,12 +567,12 @@ class Pyrrent2http(object):
|
|||
#
|
||||
self.torrentHandle.set_max_connections(60)
|
||||
if self.config.trackers != '':
|
||||
trackers = self.config.trackers.split(',')
|
||||
startTier = 256 - len(trackers)
|
||||
trackers = self.config.trackers.split(',')
|
||||
startTier = 256 - len(trackers)
|
||||
for n in range(len(trackers)):
|
||||
tracker = trackers[n].strip()
|
||||
logging.info('Adding tracker: %s' % (tracker,))
|
||||
self.torrentHandle.add_tracker({'url': tracker})
|
||||
logging.info('Adding tracker: %s' % (tracker,) )
|
||||
self.torrentHandle.add_tracker(tracker, startTier + n)
|
||||
if self.config.enableScrape:
|
||||
logging.info('Sending scrape request to tracker')
|
||||
self.torrentHandle.scrape_tracker()
|
||||
|
@ -669,25 +582,31 @@ class Pyrrent2http(object):
|
|||
info = self.torrentHandle.get_torrent_info()
|
||||
logging.info('Downloading torrent: %s' % (info.name(),))
|
||||
try:
|
||||
self.TorrentFS = TorrentFS(self, self.torrentHandle)
|
||||
self.TorrentFS = TorrentFS(self, self.torrentHandle, self.config.fileIndex)
|
||||
except Exception as e:
|
||||
logging.error(e.args)
|
||||
name = self.TorrentFS.info.name()
|
||||
self.torrent_name = name
|
||||
|
||||
|
||||
def startHTTP(self):
|
||||
#def http_server_loop(listener, alive):
|
||||
# while alive.is_set():
|
||||
# print('+++handle request+++')
|
||||
# listener.handle_request()
|
||||
# listener.server_close()
|
||||
#self.main_alive = threading.Event()
|
||||
#self.main_alive.set()
|
||||
logging.info('Starting HTTP Server...')
|
||||
handler = HttpHandlerFactory()
|
||||
handler.protocol_version = 'HTTP/1.1'
|
||||
logging.info('Listening HTTP on %s...\n' % (self.config.bindAddress,))
|
||||
host, strport = self.config.bindAddress.split(':')
|
||||
if len(strport) > 0:
|
||||
srv_port = int(strport)
|
||||
self.httpListener = ThreadingHTTPServer((host, srv_port), handler)
|
||||
self.httpListener.root_obj = self
|
||||
self.listener_thread = threading.Thread(target=self.httpListener.serve_forever)
|
||||
self.listener_thread.start()
|
||||
|
||||
#self.httpListener.timeout = 0.5
|
||||
#thread = threading.Thread(target = http_server_loop, args = (self.httpListener, self.main_alive))
|
||||
thread = threading.Thread(target = self.httpListener.serve_forever)
|
||||
thread.start()
|
||||
|
||||
def startServices(self):
|
||||
if self.config.enableDHT:
|
||||
logging.info('Starting DHT...')
|
||||
|
@ -701,19 +620,19 @@ class Pyrrent2http(object):
|
|||
if self.config.enableNATPMP:
|
||||
logging.info('Starting NATPMP...')
|
||||
self.session.start_natpmp()
|
||||
|
||||
|
||||
def startSession(self):
|
||||
logging.info('Starting session...')
|
||||
self.session = lt.session(lt.fingerprint('LT', lt.version_major, lt.version_minor, 0, 0),
|
||||
flags=int(lt.session_flags_t.add_default_plugins))
|
||||
alertMask = (lt.alert.category_t.error_notification |
|
||||
lt.alert.category_t.storage_notification |
|
||||
flags=int(lt.session_flags_t.add_default_plugins))
|
||||
alertMask = (lt.alert.category_t.error_notification |
|
||||
lt.alert.category_t.storage_notification |
|
||||
lt.alert.category_t.tracker_notification |
|
||||
lt.alert.category_t.status_notification)
|
||||
if self.config.debugAlerts:
|
||||
alertMask |= lt.alert.category_t.debug_notification
|
||||
self.session.set_alert_mask(alertMask)
|
||||
|
||||
|
||||
settings = self.session.get_settings()
|
||||
settings["request_timeout"] = self.config.requestTimeout
|
||||
settings["peer_connect_timeout"] = self.config.peerConnectTimeout
|
||||
|
@ -728,22 +647,8 @@ class Pyrrent2http(object):
|
|||
settings["rate_limit_ip_overhead"] = True
|
||||
settings["min_announce_interval"] = 60
|
||||
settings["tracker_backoff"] = 0
|
||||
### Непонятно, как заставить использовать прокси только для подключения к трекеру?
|
||||
if self.config.proxy is not None:
|
||||
ps = lt.proxy_settings()
|
||||
# peer_ps = lt.proxy_settings()
|
||||
# peer_ps.type = lt.proxy_type.none
|
||||
ps.hostname = self.config.proxy['host']
|
||||
ps.port = self.config.proxy['port']
|
||||
ps.type = lt.proxy_type.socks5
|
||||
# self.session.set_peer_proxy(peer_ps)
|
||||
self.session.set_proxy(ps)
|
||||
settings['force_proxy'] = False
|
||||
settings['proxy_peer_connections'] = False
|
||||
settings['anonymous_mode'] = False
|
||||
settings['proxy_tracker_connections'] = True
|
||||
self.session.set_settings(settings)
|
||||
|
||||
|
||||
if self.config.stateFile != '':
|
||||
logging.info('Loading session state from %s' % (self.config.stateFile,))
|
||||
try:
|
||||
|
@ -754,7 +659,7 @@ class Pyrrent2http(object):
|
|||
logging.error(strerror)
|
||||
else:
|
||||
self.session.load_state(lt.bdecode(bytes__))
|
||||
|
||||
|
||||
rand = SystemRandom(time.time())
|
||||
portLower = self.config.listenPort
|
||||
if self.config.randomPort:
|
||||
|
@ -766,7 +671,7 @@ class Pyrrent2http(object):
|
|||
strerror = e.args
|
||||
logging.error(strerror)
|
||||
raise
|
||||
|
||||
|
||||
settings = self.session.get_settings()
|
||||
if self.config.userAgent != '':
|
||||
settings['user_agent'] = self.config.userAgent
|
||||
|
@ -781,7 +686,7 @@ class Pyrrent2http(object):
|
|||
settings['enable_incoming_utp'] = self.config.enableUTP
|
||||
settings['enable_outgoing_utp'] = self.config.enableUTP
|
||||
self.session.set_settings(settings)
|
||||
|
||||
|
||||
if self.config.dhtRouters != '':
|
||||
routers = self.config.dhtRouters.split(',')
|
||||
for router in routers:
|
||||
|
@ -807,74 +712,101 @@ class Pyrrent2http(object):
|
|||
self.session.set_pe_settings(encryptionSettings)
|
||||
except Exception as e:
|
||||
logging.info('Encryption not supported: %s' % (e.args,))
|
||||
|
||||
|
||||
def Status(self):
|
||||
info = self.TorrentFS.info
|
||||
#tstatus = self.torrentHandle.status()
|
||||
#tstatus = self.TorrentFS.handle.status()
|
||||
tstatus = self.torrentHandle.status()
|
||||
|
||||
status = {
|
||||
'name': self.torrent_name,
|
||||
'state': int(tstatus.state),
|
||||
'state_str': str(tstatus.state),
|
||||
'error': tstatus.error,
|
||||
'progress': tstatus.progress,
|
||||
'download_rate': tstatus.download_rate // 1024,
|
||||
'upload_rate': tstatus.upload_rate // 1024,
|
||||
'total_download': tstatus.total_download,
|
||||
'total_upload': tstatus.total_upload,
|
||||
'num_peers': tstatus.num_peers,
|
||||
'num_seeds': tstatus.num_seeds,
|
||||
'total_seeds': tstatus.num_complete,
|
||||
'total_peers': tstatus.num_incomplete
|
||||
}
|
||||
'name' : info.name(),
|
||||
'state' : int(tstatus.state),
|
||||
'state_str' : str(tstatus.state),
|
||||
'error' : tstatus.error,
|
||||
'progress' : tstatus.progress,
|
||||
'download_rate' : tstatus.download_rate / 1024,
|
||||
'upload_rate' : tstatus.upload_rate / 1024,
|
||||
'total_download' : tstatus.total_download,
|
||||
'total_upload' : tstatus.total_upload,
|
||||
'num_peers' : tstatus.num_peers,
|
||||
'num_seeds' : tstatus.num_seeds,
|
||||
'total_seeds' : tstatus.num_complete,
|
||||
'total_peers' : tstatus.num_incomplete
|
||||
}
|
||||
return status
|
||||
|
||||
def Ls(self, index):
|
||||
fi = {}
|
||||
def Ls(self):
|
||||
retFiles = {'files': []}
|
||||
if self.TorrentFS.HasTorrentInfo():
|
||||
x = [n for n in list(self.TorrentFS.files.keys()) if self.TorrentFS.files[n].index == index]
|
||||
name = x[0]
|
||||
files = self.TorrentFS.files
|
||||
Url = 'http://' + self.config.bindAddress + '/files/' + urllib.parse.quote(name)
|
||||
fi = {
|
||||
'index': files[name].index,
|
||||
'name': files[name].unicode_name,
|
||||
'media_type': files[name].media_type,
|
||||
'size': files[name].size,
|
||||
'offset': files[name].offset,
|
||||
'download': files[name].downloaded,
|
||||
'progress': files[name].progress,
|
||||
'save_path': files[name].save_path,
|
||||
'url': Url
|
||||
}
|
||||
return fi
|
||||
|
||||
for name in files.keys():
|
||||
Url = 'http://' + self.config.bindAddress + '/files/' + urllib.quote(name)
|
||||
fi = {
|
||||
'index': files[name].index,
|
||||
'name': files[name].unicode_name,
|
||||
'media_type': files[name].media_type,
|
||||
'size': files[name].size,
|
||||
'offset': files[name].offset,
|
||||
'download': files[name].downloaded,
|
||||
'progress': files[name].progress,
|
||||
'save_path': files[name].save_path,
|
||||
'url': Url
|
||||
}
|
||||
retFiles['files'].append(fi)
|
||||
return retFiles
|
||||
def Peers(self):
|
||||
peers = {'peers': []}
|
||||
for peer in self.torrentHandle.get_peer_info():
|
||||
if peer.flags & peer.connecting or peer.flags & peer.handshake:
|
||||
continue
|
||||
pi = {
|
||||
'Ip': peer.ip,
|
||||
'Flags': peer.flags,
|
||||
'Source': peer.source,
|
||||
'UpSpeed': peer.up_speed // 1024,
|
||||
'DownSpeed': peer.down_speed // 1024,
|
||||
'TotalDownload': peer.total_download,
|
||||
'TotalUpload': peer.total_upload,
|
||||
'Country': peer.country,
|
||||
'Client': peer.client
|
||||
}
|
||||
'Ip': peer.ip,
|
||||
'Flags': peer.flags,
|
||||
'Source': peer.source,
|
||||
'UpSpeed': peer.up_speed/1024,
|
||||
'DownSpeed': peer.down_speed/1024,
|
||||
'TotalDownload': peer.total_download,
|
||||
'TotalUpload': peer.total_upload,
|
||||
'Country': peer.country,
|
||||
'Client': peer.client
|
||||
}
|
||||
peers['peers'].append(pi)
|
||||
return peers
|
||||
|
||||
'''def stats(self):
|
||||
status = self.torrentHandle.status()
|
||||
dhtStatusStr = ''
|
||||
if not status.has_metadata:
|
||||
return
|
||||
if self.config.showAllStats or self.config.showOverallProgress:
|
||||
sessionStatus = self.session.status()
|
||||
if self.session.is_dht_running():
|
||||
dhtStatusStr = ', DHT nodes: %d' % (sessionStatus.dht_nodes,)
|
||||
errorStr = ''
|
||||
if len(status.error) > 0:
|
||||
errorStr = ' (%s)' % (status.error,)
|
||||
logging.info('%s, overall progress: %.2f%%, dl/ul: %.3f/%.3f kbps, peers/seeds: %d/%d' % (
|
||||
str(status.state),
|
||||
status.progress * 100,
|
||||
float(status.download_rate)/1024,
|
||||
float(status.upload_rate)/1024,
|
||||
status.num_peers,
|
||||
status.num_seeds
|
||||
) + dhtStatusStr + errorStr
|
||||
)
|
||||
if self.config.showFilesProgress or self.config.showAllStats:
|
||||
str_ = 'Files: '
|
||||
for i, f in enumerate(self.TorrentFS.files):
|
||||
str_ += '[%d] %.2f%% ' % (i, f.Progress()*100)
|
||||
logging.info(str_)
|
||||
if (self.config.showPiecesProgress or self.config.showAllStats) and self.TorrentFS.lastOpenedFile != None:
|
||||
self.TorrentFS.lastOpenedFile.ShowPieces()
|
||||
'''
|
||||
def consumeAlerts(self):
|
||||
alerts = self.session.pop_alerts()
|
||||
for alert in alerts:
|
||||
if type(alert) == lt.save_resume_data_alert:
|
||||
self.processSaveResumeDataAlert(alert)
|
||||
break
|
||||
|
||||
def waitForAlert(self, alert_type, timeout):
|
||||
start = time.time()
|
||||
while True:
|
||||
|
@ -885,8 +817,8 @@ class Pyrrent2http(object):
|
|||
alert = self.session.pop_alert()
|
||||
if type(alert) == alert_type:
|
||||
return alert
|
||||
|
||||
def loop(self):
|
||||
#self.statsTicker = Ticker(30)
|
||||
self.saveResumeDataTicker = Ticker(5)
|
||||
time_start = time.time()
|
||||
while True:
|
||||
|
@ -901,6 +833,8 @@ class Pyrrent2http(object):
|
|||
if os.getppid() == 1:
|
||||
self.forceShutdown = True
|
||||
time_start = time.time()
|
||||
#if self.statsTicker.true:
|
||||
# self.stats()
|
||||
if self.saveResumeDataTicker.true:
|
||||
self.saveResumeData(True)
|
||||
time.sleep(0.3)
|
||||
|
@ -914,18 +848,16 @@ class Pyrrent2http(object):
|
|||
except IOError as e:
|
||||
strerror = e.args
|
||||
logging.error(strerror)
|
||||
|
||||
def saveResumeData(self, async_=False):
|
||||
def saveResumeData(self, async = False):
|
||||
if not self.torrentHandle.status().need_save_resume or self.config.resumeFile == '':
|
||||
return False
|
||||
self.torrentHandle.save_resume_data(lt.save_resume_flags_t.flush_disk_cache)
|
||||
if not async_:
|
||||
if not async:
|
||||
alert = self.waitForAlert(lt.save_resume_data_alert, 5)
|
||||
if alert is None:
|
||||
if alert == None:
|
||||
return False
|
||||
self.processSaveResumeDataAlert(alert)
|
||||
return True
|
||||
|
||||
def saveSessionState(self):
|
||||
if self.config.stateFile == '':
|
||||
return
|
||||
|
@ -939,7 +871,6 @@ class Pyrrent2http(object):
|
|||
except IOError as e:
|
||||
strerror = e.args
|
||||
logging.error(strerror)
|
||||
|
||||
def removeFiles(self, files):
|
||||
for file in files:
|
||||
try:
|
||||
|
@ -955,7 +886,6 @@ class Pyrrent2http(object):
|
|||
os.remove(path)
|
||||
path_ = os.path.dirname(path)
|
||||
path = path_[-1] == os.path.sep and path_[:-1] or path_
|
||||
|
||||
def filesToRemove(self):
|
||||
files = []
|
||||
if self.TorrentFS.HasTorrentInfo():
|
||||
|
@ -966,7 +896,6 @@ class Pyrrent2http(object):
|
|||
if os.path.exists(path):
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
def removeTorrent(self):
|
||||
files = []
|
||||
flag = 0
|
||||
|
@ -982,13 +911,12 @@ class Pyrrent2http(object):
|
|||
logging.info('Waiting for files to be removed')
|
||||
self.waitForAlert(lt.torrent_deleted_alert, 15)
|
||||
self.removeFiles(files)
|
||||
|
||||
def shutdown(self):
|
||||
logging.info('Stopping pyrrent2http...')
|
||||
self.forceShutdown = True
|
||||
#self.statsTicker.stop()
|
||||
self.saveResumeDataTicker.stop()
|
||||
self.httpListener.shutdown()
|
||||
self.httpListener.socket.close()
|
||||
self.TorrentFS.Shutdown()
|
||||
if self.session != None:
|
||||
self.session.pause()
|
||||
|
@ -998,5 +926,5 @@ class Pyrrent2http(object):
|
|||
self.saveSessionState()
|
||||
self.removeTorrent()
|
||||
logging.info('Aborting the session')
|
||||
self.session = None
|
||||
del self.session
|
||||
logging.info('Bye bye')
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
class State:
|
||||
QUEUED_FOR_CHECKING = 0
|
||||
CHECKING_FILES = 1
|
||||
DOWNLOADING_METADATA = 2
|
||||
DOWNLOADING = 3
|
||||
FINISHED = 4
|
||||
SEEDING = 5
|
||||
ALLOCATING = 6
|
||||
CHECKING_RESUME_DATA = 7
|
||||
|
||||
|
||||
class MediaType:
|
||||
UNKNOWN = None
|
||||
AUDIO = 'audio'
|
||||
VIDEO = 'video'
|
||||
SUBTITLES = 'subtitles'
|
||||
|
||||
|
||||
class Encryption:
|
||||
FORCED = 0
|
||||
ENABLED = 1
|
||||
DISABLED = 2
|
|
@ -1,41 +1,31 @@
|
|||
import mimetypes
|
||||
import os
|
||||
import socket
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
import chardet
|
||||
import sys
|
||||
import xbmc
|
||||
|
||||
from .structs import MediaType
|
||||
import socket
|
||||
import chardet
|
||||
import os
|
||||
from . import MediaType
|
||||
import mimetypes
|
||||
import urlparse, urllib
|
||||
|
||||
SUBTITLES_FORMATS = ['.aqt', '.gsub', '.jss', '.sub', '.ttxt', '.pjs', '.psb', '.rt', '.smi', '.stl',
|
||||
'.ssf', '.srt', '.ssa', '.ass', '.usf', '.idx']
|
||||
|
||||
'.ssf', '.srt', '.ssa', '.ass', '.usf', '.idx']
|
||||
|
||||
class Struct(dict):
|
||||
def __getattr__(self, attr):
|
||||
return self[attr]
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self[attr] = value
|
||||
|
||||
|
||||
def uri2path(uri):
|
||||
if uri[1] == ':' and sys.platform.startswith('win'):
|
||||
uri = 'file:///' + uri
|
||||
fileUri = urllib.parse.urlparse(uri)
|
||||
fileUri = urlparse.urlparse(uri)
|
||||
if fileUri.scheme == 'file':
|
||||
uriPath = fileUri.path
|
||||
if uriPath != '' and sys.platform.startswith('win') and (os.path.sep == uriPath[0] or uriPath[0] == '/'):
|
||||
uriPath = uriPath[1:]
|
||||
absPath = os.path.abspath(urllib.parse.unquote(uriPath))
|
||||
absPath = os.path.abspath(urllib.unquote(uriPath))
|
||||
return localize_path(absPath)
|
||||
|
||||
|
||||
def detect_media_type(name):
|
||||
ext = os.path.splitext(name)[1]
|
||||
if ext in SUBTITLES_FORMATS:
|
||||
|
@ -51,31 +41,24 @@ def detect_media_type(name):
|
|||
return MediaType.VIDEO
|
||||
else:
|
||||
return MediaType.UNKNOWN
|
||||
|
||||
|
||||
def unicode_msg(tmpl, args):
|
||||
msg = isinstance(tmpl, str) and tmpl or tmpl.decode(chardet.detect(tmpl)['encoding'])
|
||||
msg = isinstance(tmpl, unicode) and tmpl or tmpl.decode(chardet.detect(tmpl)['encoding'])
|
||||
arg_ = []
|
||||
for a in args:
|
||||
arg_.append(isinstance(a, str) and a or a.decode(chardet.detect(a)['encoding']))
|
||||
arg_.append(isinstance(a, unicode) and a or a.decode(chardet.detect(a)['encoding']))
|
||||
return msg % tuple(arg_)
|
||||
|
||||
|
||||
def encode_msg(msg):
|
||||
msg = isinstance(msg, str) and msg.encode(
|
||||
sys.getfilesystemencoding() != 'ascii' and sys.getfilesystemencoding() or 'utf-8') or msg
|
||||
msg = isinstance(msg, unicode) and msg.encode(True and sys.getfilesystemencoding() or 'utf-8') or msg
|
||||
return msg
|
||||
|
||||
|
||||
|
||||
def localize_path(path):
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode(chardet.detect(path)['encoding'])
|
||||
# if not sys.platform.startswith('win'):
|
||||
# path = path.encode(
|
||||
# (sys.getfilesystemencoding() not in ('ascii', 'ANSI_X3.4-1968')) and sys.getfilesystemencoding() or 'utf-8')
|
||||
if not isinstance(path, unicode): path = path.decode(chardet.detect(path)['encoding'])
|
||||
if not sys.platform.startswith('win'):
|
||||
path = path.encode(True and sys.getfilesystemencoding() or 'utf-8')
|
||||
return path
|
||||
|
||||
|
||||
def can_bind(host, port):
|
||||
"""
|
||||
Checks we can bind to specified host and port
|
||||
|
@ -111,6 +94,6 @@ def find_free_port(host):
|
|||
|
||||
|
||||
def ensure_fs_encoding(string):
|
||||
if isinstance(string, bytes):
|
||||
if isinstance(string, str):
|
||||
string = string.decode('utf-8')
|
||||
return string.encode(sys.getfilesystemencoding() or 'utf-8')
|
||||
|
|
Loading…
Reference in New Issue