first commit
This commit is contained in:
2
tests/unit/engines/__init__.py
Normal file
2
tests/unit/engines/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring,disable=missing-class-docstring,invalid-name
|
||||
218
tests/unit/engines/test_command.py
Normal file
218
tests/unit/engines/test_command.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring,disable=missing-class-docstring,invalid-name
|
||||
|
||||
from searx.engines import command as command_engine
|
||||
from searx.result_types import KeyValue
|
||||
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestCommandEngine(SearxTestCase):
|
||||
|
||||
def test_basic_seq_command_engine(self):
|
||||
ls_engine = command_engine
|
||||
ls_engine.command = ['seq', '{{QUERY}}']
|
||||
ls_engine.delimiter = {'chars': ' ', 'keys': ['number']}
|
||||
expected_results = [
|
||||
KeyValue(kvmap={'number': 1}),
|
||||
KeyValue(kvmap={'number': 2}),
|
||||
KeyValue(kvmap={'number': 3}),
|
||||
KeyValue(kvmap={'number': 4}),
|
||||
KeyValue(kvmap={'number': 5}),
|
||||
]
|
||||
results = ls_engine.search('5', {'pageno': 1})
|
||||
for i, expected in enumerate(expected_results):
|
||||
self.assertEqual(results[i].kvmap["number"], str(expected.kvmap["number"]))
|
||||
|
||||
def test_delimiter_parsing(self):
|
||||
searx_logs = '''DEBUG:searx.webapp:static directory is /home/n/p/searx/searx/static
|
||||
DEBUG:searx.webapp:templates directory is /home/n/p/searx/searx/templates
|
||||
DEBUG:searx.engines:soundcloud engine: Starting background initialization
|
||||
DEBUG:searx.engines:wolframalpha engine: Starting background initialization
|
||||
DEBUG:searx.engines:locate engine: Starting background initialization
|
||||
DEBUG:searx.engines:regex search in files engine: Starting background initialization
|
||||
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): www.wolframalpha.com
|
||||
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): soundcloud.com
|
||||
DEBUG:searx.engines:find engine: Starting background initialization
|
||||
DEBUG:searx.engines:pattern search in files engine: Starting background initialization
|
||||
DEBUG:searx.webapp:starting webserver on 127.0.0.1:8888
|
||||
WARNING:werkzeug: * Debugger is active!
|
||||
INFO:werkzeug: * Debugger PIN: 299-578-362'''
|
||||
echo_engine = command_engine
|
||||
echo_engine.command = ['echo', searx_logs]
|
||||
echo_engine.delimiter = {'chars': ':', 'keys': ['level', 'component', 'message']}
|
||||
|
||||
page1 = [
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'static directory is /home/n/p/searx/searx/static',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'templates directory is /home/n/p/searx/searx/templates',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'soundcloud engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'wolframalpha engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'locate engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'regex search in files engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'urllib3.connectionpool',
|
||||
'message': 'Starting new HTTPS connection (1): www.wolframalpha.com',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'urllib3.connectionpool',
|
||||
'message': 'Starting new HTTPS connection (1): soundcloud.com',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'find engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'pattern search in files engine: Starting background initialization',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
]
|
||||
page2 = [
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'starting webserver on 127.0.0.1:8888',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'werkzeug',
|
||||
'message': ' * Debugger is active!',
|
||||
'level': 'WARNING',
|
||||
},
|
||||
{
|
||||
'component': 'werkzeug',
|
||||
'message': ' * Debugger PIN: 299-578-362',
|
||||
'level': 'INFO',
|
||||
},
|
||||
]
|
||||
|
||||
page1 = [KeyValue(kvmap=row) for row in page1]
|
||||
page2 = [KeyValue(kvmap=row) for row in page2]
|
||||
|
||||
expected_results_by_page = [page1, page2]
|
||||
for i in [0, 1]:
|
||||
results = echo_engine.search('', {'pageno': i + 1})
|
||||
page = expected_results_by_page[i]
|
||||
for i, expected in enumerate(page):
|
||||
self.assertEqual(expected.kvmap["message"], str(results[i].kvmap["message"]))
|
||||
|
||||
def test_regex_parsing(self):
|
||||
txt = '''commit 35f9a8c81d162a361b826bbcd4a1081a4fbe76a7
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Tue Oct 15 11:31:33 2019 +0200
|
||||
|
||||
first interesting message
|
||||
|
||||
commit 6c3c206316153ccc422755512bceaa9ab0b14faa
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Mon Oct 14 17:10:08 2019 +0200
|
||||
|
||||
second interesting message
|
||||
|
||||
commit d8594d2689b4d5e0d2f80250223886c3a1805ef5
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Mon Oct 14 14:45:05 2019 +0200
|
||||
|
||||
third interesting message
|
||||
|
||||
commit '''
|
||||
git_log_engine = command_engine
|
||||
git_log_engine.command = ['echo', txt]
|
||||
git_log_engine.result_separator = '\n\ncommit '
|
||||
git_log_engine.delimiter = {}
|
||||
git_log_engine.parse_regex = {
|
||||
'commit': r'\w{40}',
|
||||
'author': r'[\w* ]* <\w*@?\w*\.?\w*>',
|
||||
'date': r'Date: .*',
|
||||
'message': r'\n\n.*$',
|
||||
}
|
||||
git_log_engine.init({"command": git_log_engine.command, "parse_regex": git_log_engine.parse_regex})
|
||||
expected_results = [
|
||||
{
|
||||
'commit': '35f9a8c81d162a361b826bbcd4a1081a4fbe76a7',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Tue Oct 15 11:31:33 2019 +0200',
|
||||
'message': '\n\nfirst interesting message',
|
||||
},
|
||||
{
|
||||
'commit': '6c3c206316153ccc422755512bceaa9ab0b14faa',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Mon Oct 14 17:10:08 2019 +0200',
|
||||
'message': '\n\nsecond interesting message',
|
||||
},
|
||||
{
|
||||
'commit': 'd8594d2689b4d5e0d2f80250223886c3a1805ef5',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Mon Oct 14 14:45:05 2019 +0200',
|
||||
'message': '\n\nthird interesting message',
|
||||
},
|
||||
]
|
||||
|
||||
expected_results = [KeyValue(kvmap=kvmap) for kvmap in expected_results]
|
||||
results = git_log_engine.search('', {'pageno': 1})
|
||||
for i, expected in enumerate(expected_results):
|
||||
self.assertEqual(expected.kvmap["message"], str(results[i].kvmap["message"]))
|
||||
|
||||
def test_working_dir_path_query(self):
|
||||
ls_engine = command_engine
|
||||
ls_engine.command = ['ls', '{{QUERY}}']
|
||||
ls_engine.result_separator = '\n'
|
||||
ls_engine.delimiter = {'chars': ' ', 'keys': ['file']}
|
||||
ls_engine.query_type = 'path'
|
||||
|
||||
results = ls_engine.search('.', {'pageno': 1})
|
||||
self.assertTrue(len(results) != 0)
|
||||
|
||||
forbidden_paths = [
|
||||
'..',
|
||||
'../..',
|
||||
'./..',
|
||||
'~',
|
||||
'/var',
|
||||
]
|
||||
for forbidden_path in forbidden_paths:
|
||||
self.assertRaises(ValueError, ls_engine.search, forbidden_path, {'pageno': 1})
|
||||
|
||||
def test_enum_queries(self):
|
||||
echo_engine = command_engine
|
||||
echo_engine.command = ['echo', '{{QUERY}}']
|
||||
echo_engine.query_type = 'enum'
|
||||
echo_engine.query_enum = ['i-am-allowed-to-say-this', 'and-that']
|
||||
|
||||
for allowed in echo_engine.query_enum:
|
||||
results = echo_engine.search(allowed, {'pageno': 1})
|
||||
self.assertTrue(len(results) != 0)
|
||||
|
||||
forbidden_queries = [
|
||||
'forbidden',
|
||||
'banned',
|
||||
'prohibited',
|
||||
]
|
||||
for forbidden in forbidden_queries:
|
||||
self.assertRaises(ValueError, echo_engine.search, forbidden, {'pageno': 1})
|
||||
254
tests/unit/engines/test_json_engine.py
Normal file
254
tests/unit/engines/test_json_engine.py
Normal file
@@ -0,0 +1,254 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring
|
||||
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
|
||||
from searx.engines import json_engine
|
||||
from searx import logger
|
||||
|
||||
from tests import SearxTestCase
|
||||
|
||||
logger = logger.getChild('engines')
|
||||
|
||||
|
||||
class TestJsonEngine(SearxTestCase): # pylint: disable=missing-class-docstring
|
||||
json = """
|
||||
[
|
||||
{
|
||||
"title": "title0",
|
||||
"content": "content0",
|
||||
"url": "https://example.com/url0",
|
||||
"images": [
|
||||
{
|
||||
"thumb": "https://example.com/thumb00"
|
||||
},
|
||||
{
|
||||
"thumb": "https://example.com/thumb01"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "<h1>title1</h1>",
|
||||
"content": "<h2>content1</h2>",
|
||||
"url": "https://example.com/url1",
|
||||
"images": [
|
||||
{
|
||||
"thumb": "https://example.com/thumb10"
|
||||
},
|
||||
{
|
||||
"thumb": "https://example.com/thumb11"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "title2",
|
||||
"content": "content2",
|
||||
"url": 2,
|
||||
"images": [
|
||||
{
|
||||
"thumb": "thumb20"
|
||||
},
|
||||
{
|
||||
"thumb": 21
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
json_result_query = """
|
||||
{
|
||||
"data": {
|
||||
"results": [
|
||||
{
|
||||
"title": "title0",
|
||||
"content": "content0",
|
||||
"url": "https://example.com/url0",
|
||||
"images": [
|
||||
{
|
||||
"thumb": "https://example.com/thumb00"
|
||||
},
|
||||
{
|
||||
"thumb": "https://example.com/thumb01"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "<h1>title1</h1>",
|
||||
"content": "<h2>content1</h2>",
|
||||
"url": "https://example.com/url1",
|
||||
"images": [
|
||||
{
|
||||
"thumb": "https://example.com/thumb10"
|
||||
},
|
||||
{
|
||||
"thumb": "https://example.com/thumb11"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "title2",
|
||||
"content": "content2",
|
||||
"url": 2,
|
||||
"images": [
|
||||
{
|
||||
"thumb": "thumb20"
|
||||
},
|
||||
{
|
||||
"thumb": 21
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"suggestions": [
|
||||
"suggestion0",
|
||||
"suggestion1"
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
json_engine.logger = logger.getChild('test_json_engine')
|
||||
|
||||
def test_request(self):
|
||||
json_engine.search_url = 'https://example.com/{query}'
|
||||
json_engine.categories = []
|
||||
json_engine.paging = False
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
dicto['pageno'] = 1
|
||||
params = json_engine.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEqual('https://example.com/test_query', params['url'])
|
||||
|
||||
json_engine.search_url = 'https://example.com/q={query}&p={pageno}'
|
||||
json_engine.paging = True
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
dicto['pageno'] = 1
|
||||
params = json_engine.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEqual('https://example.com/q=test_query&p=1', params['url'])
|
||||
|
||||
json_engine.search_url = 'https://example.com/'
|
||||
json_engine.paging = True
|
||||
json_engine.request_body = '{{"page": {pageno}, "query": "{query}"}}'
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
dicto['pageno'] = 1
|
||||
params = json_engine.request(query, dicto)
|
||||
self.assertIn('data', params)
|
||||
self.assertEqual('{"page": 1, "query": "test_query"}', params['data'])
|
||||
|
||||
def test_response(self):
|
||||
# without results_query
|
||||
json_engine.results_query = ''
|
||||
json_engine.url_query = 'url'
|
||||
json_engine.url_prefix = ''
|
||||
json_engine.title_query = 'title'
|
||||
json_engine.content_query = 'content'
|
||||
json_engine.thumbnail_query = 'images/thumb'
|
||||
json_engine.thumbnail_prefix = ''
|
||||
json_engine.title_html_to_text = False
|
||||
json_engine.content_html_to_text = False
|
||||
json_engine.categories = []
|
||||
|
||||
self.assertRaises(AttributeError, json_engine.response, None)
|
||||
self.assertRaises(AttributeError, json_engine.response, [])
|
||||
self.assertRaises(AttributeError, json_engine.response, '')
|
||||
self.assertRaises(AttributeError, json_engine.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}', status_code=200)
|
||||
self.assertEqual(json_engine.response(response), [])
|
||||
|
||||
response = mock.Mock(text=self.json, status_code=200)
|
||||
results = json_engine.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'title0')
|
||||
self.assertEqual(results[0]['url'], 'https://example.com/url0')
|
||||
self.assertEqual(results[0]['content'], 'content0')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://example.com/thumb00')
|
||||
self.assertEqual(results[1]['title'], '<h1>title1</h1>')
|
||||
self.assertEqual(results[1]['url'], 'https://example.com/url1')
|
||||
self.assertEqual(results[1]['content'], '<h2>content1</h2>')
|
||||
self.assertEqual(results[1]['thumbnail'], 'https://example.com/thumb10')
|
||||
|
||||
# with prefix and suggestions without results_query
|
||||
json_engine.url_prefix = 'https://example.com/url'
|
||||
json_engine.thumbnail_query = 'images/1/thumb'
|
||||
json_engine.thumbnail_prefix = 'https://example.com/thumb'
|
||||
|
||||
results = json_engine.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[2]['title'], 'title2')
|
||||
self.assertEqual(results[2]['url'], 'https://example.com/url2')
|
||||
self.assertEqual(results[2]['content'], 'content2')
|
||||
self.assertEqual(results[2]['thumbnail'], 'https://example.com/thumb21')
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls without results_query
|
||||
json_engine.categories = ['onions']
|
||||
results = json_engine.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
|
||||
def test_response_results_json(self):
|
||||
# with results_query
|
||||
json_engine.results_query = 'data/results'
|
||||
json_engine.url_query = 'url'
|
||||
json_engine.url_prefix = ''
|
||||
json_engine.title_query = 'title'
|
||||
json_engine.content_query = 'content'
|
||||
json_engine.thumbnail_query = 'images/1/thumb'
|
||||
json_engine.thumbnail_prefix = ''
|
||||
json_engine.title_html_to_text = True
|
||||
json_engine.content_html_to_text = True
|
||||
json_engine.categories = []
|
||||
|
||||
self.assertRaises(AttributeError, json_engine.response, None)
|
||||
self.assertRaises(AttributeError, json_engine.response, [])
|
||||
self.assertRaises(AttributeError, json_engine.response, '')
|
||||
self.assertRaises(AttributeError, json_engine.response, '[]')
|
||||
|
||||
response = mock.Mock(text='{}', status_code=200)
|
||||
self.assertEqual(json_engine.response(response), [])
|
||||
|
||||
response = mock.Mock(text=self.json_result_query, status_code=200)
|
||||
results = json_engine.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertEqual(results[0]['title'], 'title0')
|
||||
self.assertEqual(results[0]['url'], 'https://example.com/url0')
|
||||
self.assertEqual(results[0]['content'], 'content0')
|
||||
self.assertEqual(results[0]['thumbnail'], 'https://example.com/thumb01')
|
||||
self.assertEqual(results[1]['title'], 'title1')
|
||||
self.assertEqual(results[1]['url'], 'https://example.com/url1')
|
||||
self.assertEqual(results[1]['content'], 'content1')
|
||||
self.assertEqual(results[1]['thumbnail'], 'https://example.com/thumb11')
|
||||
|
||||
# with prefix and suggestions with results_query
|
||||
json_engine.url_prefix = 'https://example.com/url'
|
||||
json_engine.thumbnail_query = 'images/1/thumb'
|
||||
json_engine.thumbnail_prefix = 'https://example.com/thumb'
|
||||
json_engine.suggestion_query = 'data/suggestions'
|
||||
|
||||
results = json_engine.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 4)
|
||||
self.assertEqual(results[2]['title'], 'title2')
|
||||
self.assertEqual(results[2]['url'], 'https://example.com/url2')
|
||||
self.assertEqual(results[2]['content'], 'content2')
|
||||
self.assertEqual(results[2]['thumbnail'], 'https://example.com/thumb21')
|
||||
self.assertEqual(results[3]['suggestion'], ['suggestion0', 'suggestion1'])
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls with results_query
|
||||
json_engine.categories = ['onions']
|
||||
results = json_engine.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
136
tests/unit/engines/test_xpath.py
Normal file
136
tests/unit/engines/test_xpath.py
Normal file
@@ -0,0 +1,136 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# pylint: disable=missing-module-docstring,disable=missing-class-docstring,invalid-name
|
||||
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
|
||||
from searx.engines import xpath
|
||||
from searx import logger
|
||||
|
||||
from tests import SearxTestCase
|
||||
|
||||
logger = logger.getChild('engines')
|
||||
|
||||
|
||||
class TestXpathEngine(SearxTestCase):
|
||||
html = """
|
||||
<div>
|
||||
<div class="search_result">
|
||||
<a class="result" href="https://result1.com">Result 1</a>
|
||||
<p class="content">Content 1</p>
|
||||
<a class="cached" href="https://cachedresult1.com">Cache</a>
|
||||
</div>
|
||||
<div class="search_result">
|
||||
<a class="result" href="https://result2.com">Result 2</a>
|
||||
<p class="content">Content 2</p>
|
||||
<a class="cached" href="https://cachedresult2.com">Cache</a>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
xpath.logger = logger.getChild('test_xpath')
|
||||
|
||||
def test_request(self):
|
||||
xpath.search_url = 'https://url.com/{query}'
|
||||
xpath.categories = []
|
||||
xpath.paging = False
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
dicto['pageno'] = 1
|
||||
params = xpath.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEqual('https://url.com/test_query', params['url'])
|
||||
|
||||
xpath.search_url = 'https://url.com/q={query}&p={pageno}'
|
||||
xpath.paging = True
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['language'] = 'all'
|
||||
dicto['pageno'] = 1
|
||||
params = xpath.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEqual('https://url.com/q=test_query&p=1', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
# without results_xpath
|
||||
xpath.url_xpath = '//div[@class="search_result"]//a[@class="result"]/@href'
|
||||
xpath.title_xpath = '//div[@class="search_result"]//a[@class="result"]'
|
||||
xpath.content_xpath = '//div[@class="search_result"]//p[@class="content"]'
|
||||
|
||||
self.assertRaises(AttributeError, xpath.response, None)
|
||||
self.assertRaises(AttributeError, xpath.response, [])
|
||||
self.assertRaises(AttributeError, xpath.response, '')
|
||||
self.assertRaises(AttributeError, xpath.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>', status_code=200)
|
||||
self.assertEqual(xpath.response(response), [])
|
||||
|
||||
response = mock.Mock(text=self.html, status_code=200)
|
||||
results = xpath.response(response)
|
||||
self.assertIsInstance(results, list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Result 1')
|
||||
self.assertEqual(results[0]['url'], 'https://result1.com/')
|
||||
self.assertEqual(results[0]['content'], 'Content 1')
|
||||
self.assertEqual(results[1]['title'], 'Result 2')
|
||||
self.assertEqual(results[1]['url'], 'https://result2.com/')
|
||||
self.assertEqual(results[1]['content'], 'Content 2')
|
||||
|
||||
# with cached urls, without results_xpath
|
||||
xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href'
|
||||
results = xpath.response(response)
|
||||
self.assertIsInstance(results, list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
|
||||
self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls (no results_xpath)
|
||||
xpath.categories = ['onions']
|
||||
results = xpath.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
|
||||
def test_response_results_xpath(self):
|
||||
# with results_xpath
|
||||
xpath.results_xpath = '//div[@class="search_result"]'
|
||||
xpath.url_xpath = './/a[@class="result"]/@href'
|
||||
xpath.title_xpath = './/a[@class="result"]'
|
||||
xpath.content_xpath = './/p[@class="content"]'
|
||||
xpath.cached_xpath = None
|
||||
xpath.categories = []
|
||||
|
||||
self.assertRaises(AttributeError, xpath.response, None)
|
||||
self.assertRaises(AttributeError, xpath.response, [])
|
||||
self.assertRaises(AttributeError, xpath.response, '')
|
||||
self.assertRaises(AttributeError, xpath.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>', status_code=200)
|
||||
self.assertEqual(xpath.response(response), [])
|
||||
|
||||
response = mock.Mock(text=self.html, status_code=200)
|
||||
results = xpath.response(response)
|
||||
self.assertIsInstance(results, list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Result 1')
|
||||
self.assertEqual(results[0]['url'], 'https://result1.com/')
|
||||
self.assertEqual(results[0]['content'], 'Content 1')
|
||||
self.assertEqual(results[1]['title'], 'Result 2')
|
||||
self.assertEqual(results[1]['url'], 'https://result2.com/')
|
||||
self.assertEqual(results[1]['content'], 'Content 2')
|
||||
|
||||
# with cached urls, with results_xpath
|
||||
xpath.cached_xpath = './/a[@class="cached"]/@href'
|
||||
results = xpath.response(response)
|
||||
self.assertIsInstance(results, list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
|
||||
self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls (with results_xpath)
|
||||
xpath.categories = ['onions']
|
||||
results = xpath.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
Reference in New Issue
Block a user