2015-05-02 15:45:17 +02:00
|
|
|
"""
|
|
|
|
Kickass Torrent (Videos, Music, Files)
|
|
|
|
|
|
|
|
@website https://kickass.so
|
|
|
|
@provide-api no (nothing found)
|
|
|
|
|
|
|
|
@using-api no
|
|
|
|
@results HTML (using search portal)
|
|
|
|
@stable yes (HTML can change)
|
|
|
|
@parse url, title, content, seed, leech, magnetlink
|
|
|
|
"""
|
2014-12-09 19:19:39 +01:00
|
|
|
|
|
|
|
from urlparse import urljoin
|
|
|
|
from urllib import quote
|
|
|
|
from lxml import html
|
|
|
|
from operator import itemgetter
|
2015-01-30 21:02:17 +01:00
|
|
|
from searx.engines.xpath import extract_text
|
2016-10-11 19:31:42 +02:00
|
|
|
from searx.utils import get_torrent_size, convert_str_to_int
|
2014-12-09 19:19:39 +01:00
|
|
|
|
|
|
|
# engine dependent config
|
|
|
|
categories = ['videos', 'music', 'files']
|
|
|
|
paging = True
|
|
|
|
|
|
|
|
# search-url
|
2016-10-11 19:31:42 +02:00
|
|
|
url = 'https://kickass.cd/'
|
2014-12-09 19:19:39 +01:00
|
|
|
search_url = url + 'search/{search_term}/{pageno}/'
|
|
|
|
|
|
|
|
# specific xpath variables
|
|
|
|
magnet_xpath = './/a[@title="Torrent magnet link"]'
|
2015-01-10 19:40:27 +01:00
|
|
|
torrent_xpath = './/a[@title="Download torrent file"]'
|
2014-12-29 21:31:04 +01:00
|
|
|
content_xpath = './/span[@class="font11px lightgrey block"]'
|
2014-12-09 19:19:39 +01:00
|
|
|
|
|
|
|
|
|
|
|
# do search-request
|
|
|
|
def request(query, params):
|
|
|
|
params['url'] = search_url.format(search_term=quote(query),
|
|
|
|
pageno=params['pageno'])
|
|
|
|
|
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
# get response from search-request
|
|
|
|
def response(resp):
|
|
|
|
results = []
|
|
|
|
|
|
|
|
dom = html.fromstring(resp.text)
|
|
|
|
|
|
|
|
search_res = dom.xpath('//table[@class="data"]//tr')
|
|
|
|
|
|
|
|
# return empty array if nothing is found
|
|
|
|
if not search_res:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# parse results
|
|
|
|
for result in search_res[1:]:
|
|
|
|
link = result.xpath('.//a[@class="cellMainLink"]')[0]
|
|
|
|
href = urljoin(url, link.attrib['href'])
|
2015-01-30 21:02:17 +01:00
|
|
|
title = extract_text(link)
|
2016-12-09 11:44:24 +01:00
|
|
|
content = extract_text(result.xpath(content_xpath))
|
2016-10-11 19:31:42 +02:00
|
|
|
seed = extract_text(result.xpath('.//td[contains(@class, "green")]'))
|
|
|
|
leech = extract_text(result.xpath('.//td[contains(@class, "red")]'))
|
|
|
|
filesize_info = extract_text(result.xpath('.//td[contains(@class, "nobr")]'))
|
|
|
|
files = extract_text(result.xpath('.//td[contains(@class, "center")][2]'))
|
2014-12-09 19:19:39 +01:00
|
|
|
|
2016-10-11 19:31:42 +02:00
|
|
|
seed = convert_str_to_int(seed)
|
|
|
|
leech = convert_str_to_int(leech)
|
|
|
|
|
|
|
|
filesize, filesize_multiplier = filesize_info.split()
|
|
|
|
filesize = get_torrent_size(filesize, filesize_multiplier)
|
2015-01-10 19:40:27 +01:00
|
|
|
if files.isdigit():
|
|
|
|
files = int(files)
|
|
|
|
else:
|
|
|
|
files = None
|
|
|
|
|
2014-12-09 19:19:39 +01:00
|
|
|
magnetlink = result.xpath(magnet_xpath)[0].attrib['href']
|
2015-01-10 20:01:36 +01:00
|
|
|
|
2015-01-10 19:40:27 +01:00
|
|
|
torrentfile = result.xpath(torrent_xpath)[0].attrib['href']
|
2015-01-11 19:34:11 +01:00
|
|
|
torrentfileurl = quote(torrentfile, safe="%/:=&?~#+!$,;'@()*")
|
2014-12-09 19:19:39 +01:00
|
|
|
|
|
|
|
# append result
|
|
|
|
results.append({'url': href,
|
|
|
|
'title': title,
|
2014-12-14 23:27:27 +01:00
|
|
|
'content': content,
|
2014-12-09 19:19:39 +01:00
|
|
|
'seed': seed,
|
|
|
|
'leech': leech,
|
2015-01-10 19:40:27 +01:00
|
|
|
'filesize': filesize,
|
|
|
|
'files': files,
|
2014-12-09 19:19:39 +01:00
|
|
|
'magnetlink': magnetlink,
|
2015-01-11 19:34:11 +01:00
|
|
|
'torrentfile': torrentfileurl,
|
2014-12-09 19:19:39 +01:00
|
|
|
'template': 'torrent.html'})
|
|
|
|
|
|
|
|
# return results sorted by seeder
|
|
|
|
return sorted(results, key=itemgetter('seed'), reverse=True)
|