2021-01-13 11:31:25 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2015-05-02 15:45:17 +02:00
|
|
|
"""
|
|
|
|
Soundcloud (Music)
|
|
|
|
"""
|
2014-09-02 18:12:30 +02:00
|
|
|
|
2015-12-30 01:20:14 +01:00
|
|
|
import re
|
2013-10-17 21:21:23 +02:00
|
|
|
from json import loads
|
2016-11-30 18:43:03 +01:00
|
|
|
from lxml import html
|
2015-01-05 02:04:23 +01:00
|
|
|
from dateutil import parser
|
2020-08-06 17:42:46 +02:00
|
|
|
from urllib.parse import quote_plus, urlencode
|
2015-12-30 01:20:14 +01:00
|
|
|
from searx import logger
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 10:43:33 +02:00
|
|
|
from searx.network import get as http_get
|
2016-11-30 18:43:03 +01:00
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
# about
|
|
|
|
about = {
|
|
|
|
"website": 'https://soundcloud.com',
|
|
|
|
"wikidata_id": 'Q568769',
|
|
|
|
"official_api_documentation": 'https://developers.soundcloud.com/',
|
|
|
|
"use_official_api": True,
|
|
|
|
"require_api_key": False,
|
|
|
|
"results": 'JSON',
|
|
|
|
}
|
2013-10-17 21:21:23 +02:00
|
|
|
|
2014-09-02 18:12:30 +02:00
|
|
|
# engine dependent config
|
2013-10-17 21:21:23 +02:00
|
|
|
categories = ['music']
|
2014-09-02 18:12:30 +02:00
|
|
|
paging = True
|
2013-10-17 21:21:23 +02:00
|
|
|
|
2014-09-02 18:12:30 +02:00
|
|
|
# search-url
|
2019-07-18 21:34:07 +02:00
|
|
|
# missing attribute: user_id, app_version, app_locale
|
|
|
|
url = 'https://api-v2.soundcloud.com/'
|
2014-12-16 17:26:16 +01:00
|
|
|
search_url = url + 'search?{query}'\
|
2019-07-18 21:34:07 +02:00
|
|
|
'&variant_ids='\
|
2014-12-16 17:26:16 +01:00
|
|
|
'&facet=model'\
|
|
|
|
'&limit=20'\
|
|
|
|
'&offset={offset}'\
|
|
|
|
'&linked_partitioning=1'\
|
|
|
|
'&client_id={client_id}' # noqa
|
2014-01-20 02:31:20 +01:00
|
|
|
|
2015-01-05 02:04:23 +01:00
|
|
|
embedded_url = '<iframe width="100%" height="166" ' +\
|
|
|
|
'scrolling="no" frameborder="no" ' +\
|
|
|
|
'data-src="https://w.soundcloud.com/player/?url={uri}"></iframe>'
|
|
|
|
|
2016-11-30 18:43:03 +01:00
|
|
|
cid_re = re.compile(r'client_id:"([^"]*)"', re.I | re.U)
|
2017-06-06 22:20:20 +02:00
|
|
|
guest_client_id = ''
|
2016-11-30 18:43:03 +01:00
|
|
|
|
2013-10-17 21:21:23 +02:00
|
|
|
|
2015-12-30 01:20:14 +01:00
|
|
|
def get_client_id():
|
|
|
|
response = http_get("https://soundcloud.com")
|
|
|
|
|
|
|
|
if response.ok:
|
2016-11-30 18:43:03 +01:00
|
|
|
tree = html.fromstring(response.content)
|
2019-11-29 14:47:43 +01:00
|
|
|
# script_tags has been moved from /assets/app/ to /assets/ path. I
|
|
|
|
# found client_id in https://a-v2.sndcdn.com/assets/49-a0c01933-3.js
|
|
|
|
script_tags = tree.xpath("//script[contains(@src, '/assets/')]")
|
2015-12-30 01:20:14 +01:00
|
|
|
app_js_urls = [script_tag.get('src') for script_tag in script_tags if script_tag is not None]
|
|
|
|
|
|
|
|
# extracts valid app_js urls from soundcloud.com content
|
2021-03-21 09:29:53 +01:00
|
|
|
for app_js_url in app_js_urls[::-1]:
|
2015-12-30 01:20:14 +01:00
|
|
|
# gets app_js and searches for the clientid
|
|
|
|
response = http_get(app_js_url)
|
|
|
|
if response.ok:
|
2020-08-06 17:42:46 +02:00
|
|
|
cids = cid_re.search(response.content.decode())
|
2015-12-30 01:20:14 +01:00
|
|
|
if cids is not None and len(cids.groups()):
|
|
|
|
return cids.groups()[0]
|
|
|
|
logger.warning("Unable to fetch guest client_id from SoundCloud, check parser!")
|
|
|
|
return ""
|
|
|
|
|
2016-07-15 19:49:23 +02:00
|
|
|
|
2019-09-23 17:14:32 +02:00
|
|
|
def init(engine_settings=None):
|
2017-06-06 22:20:20 +02:00
|
|
|
global guest_client_id
|
|
|
|
# api-key
|
|
|
|
guest_client_id = get_client_id()
|
2015-12-30 01:20:14 +01:00
|
|
|
|
|
|
|
|
2014-09-02 18:12:30 +02:00
|
|
|
# do search-request
|
2013-10-17 21:21:23 +02:00
|
|
|
def request(query, params):
|
2014-01-30 01:50:15 +01:00
|
|
|
offset = (params['pageno'] - 1) * 20
|
2014-09-02 18:12:30 +02:00
|
|
|
|
2014-01-30 01:50:15 +01:00
|
|
|
params['url'] = search_url.format(query=urlencode({'q': query}),
|
2014-09-02 18:12:30 +02:00
|
|
|
offset=offset,
|
|
|
|
client_id=guest_client_id)
|
|
|
|
|
2013-10-17 21:21:23 +02:00
|
|
|
return params
|
|
|
|
|
|
|
|
|
2014-09-02 18:12:30 +02:00
|
|
|
# get response from search-request
|
2013-10-17 21:21:23 +02:00
|
|
|
def response(resp):
|
|
|
|
results = []
|
2014-09-02 18:12:30 +02:00
|
|
|
|
2013-10-17 21:21:23 +02:00
|
|
|
search_res = loads(resp.text)
|
2014-09-02 18:12:30 +02:00
|
|
|
|
|
|
|
# parse results
|
2013-10-17 21:21:23 +02:00
|
|
|
for result in search_res.get('collection', []):
|
2013-10-20 00:52:32 +02:00
|
|
|
if result['kind'] in ('track', 'playlist'):
|
2013-10-17 21:21:23 +02:00
|
|
|
title = result['title']
|
2020-12-07 17:45:36 +01:00
|
|
|
content = result['description'] or ''
|
2015-01-05 02:04:23 +01:00
|
|
|
publishedDate = parser.parse(result['last_modified'])
|
|
|
|
uri = quote_plus(result['uri'])
|
|
|
|
embedded = embedded_url.format(uri=uri)
|
2014-09-02 18:12:30 +02:00
|
|
|
|
|
|
|
# append result
|
2014-01-20 02:31:20 +01:00
|
|
|
results.append({'url': result['permalink_url'],
|
|
|
|
'title': title,
|
2015-01-05 02:04:23 +01:00
|
|
|
'publishedDate': publishedDate,
|
|
|
|
'embedded': embedded,
|
2014-01-20 02:31:20 +01:00
|
|
|
'content': content})
|
2014-09-02 18:12:30 +02:00
|
|
|
|
|
|
|
# return results
|
2013-10-17 21:21:23 +02:00
|
|
|
return results
|