Skip to content

Commit

Permalink
Added tests for middleware (#68)
Browse files Browse the repository at this point in the history
  • Loading branch information
mabelvj authored and fcanobrash committed Dec 3, 2019
1 parent 6799861 commit 68e1cc9
Show file tree
Hide file tree
Showing 4 changed files with 108 additions and 8 deletions.
11 changes: 6 additions & 5 deletions scrapy_autounit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,10 @@ def get_project_dir():


def get_middlewares(spider):
autounit_mw_path = 'scrapy_autounit.AutounitMiddleware'

full_list = build_component_list(
spider.settings.getwithbase('SPIDER_MIDDLEWARES'))
autounit_mw_path = list(filter(
lambda x: x.endswith('AutounitMiddleware'), full_list))[0]
start = full_list.index(autounit_mw_path)
mw_paths = [mw for mw in full_list[start:] if mw != autounit_mw_path]

Expand Down Expand Up @@ -339,7 +339,8 @@ def test(self):
k: v for k, v in spider.__dict__.items()
if k not in ('crawler', 'settings', 'start_urls')
}
self.assertEqual(spider_args_in, result_attr_in, 'Not equal!')
self.assertEqual(spider_args_in, result_attr_in,
'Input arguments not equal!')

for mw in middlewares:
if hasattr(mw, 'process_spider_input'):
Expand Down Expand Up @@ -391,6 +392,6 @@ def test(self):
if k not in ('crawler', 'settings', 'start_urls')
}

self.assertEqual(data['spider_args_out'], result_attr_out, 'Not equal!'
)
self.assertEqual(data['spider_args_out'], result_attr_out,
'Output arguments not equal!')
return test
1 change: 1 addition & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .test_middleware import DelAttrAutounitMiddleware, DelObjectsAutounitMiddleware
21 changes: 21 additions & 0 deletions tests/test_middleware.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from scrapy_autounit.middleware import AutounitMiddleware


class DelAttrAutounitMiddleware(AutounitMiddleware, object):

def process_spider_output(self, response, result, spider):
if hasattr(spider, 'test_attr'):
delattr(spider, 'test_attr')

return super(self.__class__, self).process_spider_output(response,
result,
spider)


class DelObjectsAutounitMiddleware(AutounitMiddleware, object):

def process_spider_output(self, response, result, spider):
result = []
return super(self.__class__, self).process_spider_output(response,
result,
spider)
83 changes: 80 additions & 3 deletions tests/test_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@ def __init__(self):
self._second_callback = None
self.init = None

@property
def template(self):
return SPIDER_TEMPLATE

def __enter__(self):
return self

Expand Down Expand Up @@ -134,7 +138,7 @@ def second_callback(self, string):

def _write_spider(self):
with open(os.path.join(self.proj_dir, 'myspider.py'), 'w') as dest:
dest.write(SPIDER_TEMPLATE.format(
dest.write(self.template.format(
name=self._spider_name,
init=self.init,
start_requests=self._start_requests,
Expand Down Expand Up @@ -193,8 +197,7 @@ def test(self, test_verbosity=True):
)
check_process('Unit tests failed!', result)
err = result['stderr'].decode('utf-8')
tests_ran = re.search('Ran ([0-9]+) test', err).group(1)

tests_ran = int(re.search('Ran ([0-9]+) test', err).group(1) or '0')
if tests_ran == '0':
raise AssertionError(
'No tests run!\nProject dir:\n{}'.format(
Expand All @@ -206,6 +209,7 @@ def test(self, test_verbosity=True):


class TestRecording(unittest.TestCase):

def test_normal(self):
with CaseSpider() as spider:
spider.start_requests("yield scrapy.Request('data:text/plain,')")
Expand Down Expand Up @@ -446,3 +450,76 @@ def test_reference_preservation(self):
''')
spider.record()
spider.test()

def test_fixture_length(self):
class ModifiedSpider(CaseSpider):
@property
def template(self):
return re.sub(
r'(scrapy\_autounit)(\.)(AutounitMiddleware)',
r'tests.DelObjectsAutounitMiddleware',
super(ModifiedSpider, self).template)
with ModifiedSpider() as spider:
spider.set_init("""
self.page_number = 0
self.base_url = "http://www.example.com"
""")
spider.start_requests("""
yield scrapy.Request('data:text/plain,', self.parse,
meta={'test_attr': {'page_number': -1,
'base_url': ''}})
""")
spider.parse("""
yield {'a': 5}
""")
spider.record()
expected_message = "AssertionError: The fixture's data length "\
"doesn't match with the current callback's "\
"output length."
with self.assertRaisesRegexp(AssertionError,
re.escape(expected_message)):
spider.test(test_verbosity=True)

def test_attribute_change_raises_error(self):
class ModifiedSpider(CaseSpider):
@property
def template(self):
return re.sub(
r'(scrapy\_autounit)(\.)(AutounitMiddleware)',
r'tests.DelAttr\3', super(ModifiedSpider, self).template)

with ModifiedSpider() as spider:
spider.set_init("""self.page_number = 0""")
spider.start_requests("""
self.test_attr = 100 # attribute to be deleted
yield scrapy.Request('data:text/plain,', self.parse)
""")
spider.parse("""
self.page_number += 1
yield {
'page_number': self.page_number
}
if self.page_number < 3:
yield scrapy.Request('data:text/plain,', dont_filter=True)
""")
spider.record()
expected_message = "Output arguments not equal!"
with self.assertRaisesRegexp(AssertionError,
re.escape(expected_message)):
spider.test(test_verbosity=True)

def test_missing_parse_method_raises_assertionerror(self):
with CaseSpider() as spider:
spider.start_requests("""
yield scrapy.Request('data:text/plain,')
""")
with self.assertRaises(AssertionError):
spider.record()

def test_missing_start_requests_method_raises_assertionerror(self):
with CaseSpider() as spider:
spider.parse("""
yield scrapy.Request('data:text/plain,')
""")
with self.assertRaises(AssertionError):
spider.record()

0 comments on commit 68e1cc9

Please sign in to comment.