TE-2647 Stop running pa11ycrawler (#20682)

This commit is contained in:
Jeremy Bowman
2019-05-24 11:02:49 -04:00
committed by GitHub
parent 4160cd1310
commit a9cd9de89f
13 changed files with 6 additions and 533 deletions

160
package-lock.json generated
View File

@@ -1688,15 +1688,6 @@
"callsite": "1.0.0"
}
},
"bfj": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/bfj/-/bfj-1.2.2.tgz",
"integrity": "sha1-OJlKxn//jVbEZyaP6vWr/x6XsWo=",
"dev": true,
"requires": {
"check-types": "3.2.0"
}
},
"bi-app-sass": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/bi-app-sass/-/bi-app-sass-1.1.0.tgz",
@@ -2145,12 +2136,6 @@
"integrity": "sha1-lCg191Dk7GGjCOYMLvjMEBEgLvw=",
"dev": true
},
"check-types": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/check-types/-/check-types-3.2.0.tgz",
"integrity": "sha1-Bew1hEm7wV3sOcatV/KuDs1SrrM=",
"dev": true
},
"cheerio": {
"version": "1.0.0-rc.2",
"resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.2.tgz",
@@ -4870,12 +4855,6 @@
"map-cache": "0.2.2"
}
},
"freeport": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/freeport/-/freeport-1.0.5.tgz",
"integrity": "sha1-JV6KuEFwwzuoXZkOghrl9KGpvF0=",
"dev": true
},
"fs-access": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/fs-access/-/fs-access-1.0.1.tgz",
@@ -5128,12 +5107,6 @@
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz",
"integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg="
},
"graceful-readlink": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz",
"integrity": "sha1-TK+tdrxi8C+gObL5Tpo906ORpyU=",
"dev": true
},
"growly": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz",
@@ -5388,15 +5361,6 @@
}
}
},
"hasbin": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/hasbin/-/hasbin-1.1.3.tgz",
"integrity": "sha1-C7Qi5DxDRklUzgMi0t3xXFGJxxg=",
"dev": true,
"requires": {
"async": "1.5.2"
}
},
"hash-base": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz",
@@ -5822,12 +5786,6 @@
"integrity": "sha1-LKmwM2UREYVUEvFr5dd8YqRYp2Y=",
"dev": true
},
"is": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/is/-/is-3.2.1.tgz",
"integrity": "sha1-0Kwq1V63sL7JJqUmb2xmKqqD3KU=",
"dev": true
},
"is-absolute": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-0.2.6.tgz",
@@ -8093,12 +8051,6 @@
"signal-exit": "3.0.2"
}
},
"lower-case": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.4.tgz",
"integrity": "sha1-miyr0bno4K6ZOkv31YdcOcQujqw=",
"dev": true
},
"lru-cache": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.1.tgz",
@@ -8717,15 +8669,6 @@
"which": "1.3.0"
}
},
"node-phantom-simple": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/node-phantom-simple/-/node-phantom-simple-2.2.4.tgz",
"integrity": "sha1-T8Tv+7AvJB+1CCvU+6s5jkrstk0=",
"dev": true,
"requires": {
"debug": "2.6.9"
}
},
"node-sass": {
"version": "4.7.2",
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-4.7.2.tgz",
@@ -8776,15 +8719,6 @@
}
}
},
"node.extend": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/node.extend/-/node.extend-1.1.6.tgz",
"integrity": "sha1-p7iCyC1sk6SGOlUEvV3o7IYli5Y=",
"dev": true,
"requires": {
"is": "3.2.1"
}
},
"nomnom": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/nomnom/-/nomnom-1.6.2.tgz",
@@ -9172,79 +9106,6 @@
"integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=",
"dev": true
},
"pa11y": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/pa11y/-/pa11y-4.0.1.tgz",
"integrity": "sha1-EG5enAHcLAQqnAh3JCaRMFqdDtQ=",
"dev": true,
"requires": {
"async": "1.4.2",
"bfj": "1.2.2",
"chalk": "1.1.3",
"commander": "2.8.1",
"lower-case": "1.1.4",
"node.extend": "1.1.6",
"once": "1.3.3",
"truffler": "2.2.1"
},
"dependencies": {
"ansi-styles": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
"integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=",
"dev": true
},
"async": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/async/-/async-1.4.2.tgz",
"integrity": "sha1-bJ7csRztTw3S8tQNsNSaEJwIiqs=",
"dev": true
},
"chalk": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
"integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
"dev": true,
"requires": {
"ansi-styles": "2.2.1",
"escape-string-regexp": "1.0.5",
"has-ansi": "2.0.0",
"strip-ansi": "3.0.1",
"supports-color": "2.0.0"
}
},
"commander": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.8.1.tgz",
"integrity": "sha1-Br42f+v9oMMwqh4qBy09yXYkJdQ=",
"dev": true,
"requires": {
"graceful-readlink": "1.0.1"
}
},
"once": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/once/-/once-1.3.3.tgz",
"integrity": "sha1-suJhVXzkwxTsgwTz+oJmPkKXyiA=",
"dev": true,
"requires": {
"wrappy": "1.0.2"
}
},
"supports-color": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=",
"dev": true
}
}
},
"pa11y-reporter-json-oldnode": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/pa11y-reporter-json-oldnode/-/pa11y-reporter-json-oldnode-1.0.0.tgz",
"integrity": "sha1-GYn0RDCGBo5ySKSsI3xRq+lqcvg=",
"dev": true
},
"pako": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/pako/-/pako-1.0.6.tgz",
@@ -13694,27 +13555,6 @@
}
}
},
"truffler": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/truffler/-/truffler-2.2.1.tgz",
"integrity": "sha1-f3hQsnaqTjSusBptUc08yofYp4w=",
"dev": true,
"requires": {
"async": "1.4.2",
"freeport": "1.0.5",
"hasbin": "1.1.3",
"node-phantom-simple": "2.2.4",
"node.extend": "1.1.6"
},
"dependencies": {
"async": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/async/-/async-1.4.2.tgz",
"integrity": "sha1-bJ7csRztTw3S8tQNsNSaEJwIiqs=",
"dev": true
}
}
},
"tty-browserify": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz",

View File

@@ -92,8 +92,6 @@
"karma-sourcemap-loader": "0.3.7",
"karma-spec-reporter": "0.0.20",
"karma-webpack": "2.0.9",
"pa11y": "4.0.1",
"pa11y-reporter-json-oldnode": "1.0.0",
"plato": "1.2.2",
"react-test-renderer": "16.4.0",
"selenium-webdriver": "3.4.0",

View File

@@ -12,12 +12,8 @@ from pavelib.utils.envs import Env
from pavelib.utils.passthrough_opts import PassthroughTask
from pavelib.utils.test.bokchoy_options import (
BOKCHOY_OPTS,
PA11Y_COURSE_KEY,
PA11Y_FETCH_COURSE,
PA11Y_HTML,
PA11Y_SINGLE_URL
)
from pavelib.utils.test.suites.bokchoy_suite import BokChoyTestSuite, Pa11yCrawler
from pavelib.utils.test.suites.bokchoy_suite import BokChoyTestSuite
from pavelib.utils.test.utils import check_firefox_version
from pavelib.utils.timer import timed
@@ -103,39 +99,6 @@ def perf_report_bokchoy(options, passthrough_options):
run_bokchoy(options.perf_report_bokchoy, passthrough_options)
@needs('pavelib.prereqs.install_prereqs', 'get_test_course')
@cmdopts(
BOKCHOY_OPTS + [PA11Y_SINGLE_URL, PA11Y_HTML, PA11Y_COURSE_KEY, PA11Y_FETCH_COURSE],
share_with=['get_test_course', 'prepare_bokchoy_run', 'load_courses']
)
@PassthroughTask
@timed
def pa11ycrawler(options, passthrough_options):
"""
Runs pa11ycrawler against the demo-test-course to generates accessibility
reports. (See https://github.com/edx/demo-test-course)
Note: Like the bok-choy tests, this can be used with the `serversonly`
flag to get an environment running. The setup for this is the same as
for bok-choy tests, only test course is imported as well.
"""
# Modify the options object directly, so that any subsequently called tasks
# that share with this task get the modified options
options.pa11ycrawler.report_dir = Env.PA11YCRAWLER_REPORT_DIR
options.pa11ycrawler.coveragerc = options.get('coveragerc', None)
options.pa11ycrawler.should_fetch_course = getattr(
options,
'should_fetch_course',
not options.get('fasttest')
)
options.pa11ycrawler.course_key = getattr(options, 'course-key', "course-v1:edX+Test101+course")
test_suite = Pa11yCrawler('pa11ycrawler', passthrough_options=passthrough_options, **options.pa11ycrawler)
test_suite.run()
if getattr(options, 'with_html', False):
test_suite.generate_html_reports()
def run_bokchoy(options, passthrough_options):
"""
Runs BokChoyTestSuite with the given options.
@@ -196,15 +159,3 @@ def a11y_coverage():
Env.BOK_CHOY_A11Y_REPORT_DIR,
Env.BOK_CHOY_A11Y_COVERAGERC
)
@task
@timed
def pa11ycrawler_coverage():
"""
Generate coverage reports for bok-choy tests
"""
parse_coverage(
Env.PA11YCRAWLER_REPORT_DIR,
Env.PA11YCRAWLER_COVERAGERC
)

View File

@@ -11,7 +11,7 @@ import ddt
from mock import Mock, call, patch
from paver.easy import call_task, environment
from pavelib.utils.test.suites import BokChoyTestSuite, Pa11yCrawler
from pavelib.utils.test.suites import BokChoyTestSuite
from pavelib.utils.test.suites.bokchoy_suite import DEMO_COURSE_IMPORT_DIR, DEMO_COURSE_TAR_GZ
REPO_DIR = os.getcwd()
@@ -168,136 +168,3 @@ class TestPaverBokChoyCmd(unittest.TestCase):
]
suite = BokChoyTestSuite('', num_processes=process_count)
self.assertEqual(suite.verbosity_processes_command, expected_verbosity_command)
@ddt.ddt
class TestPaverPa11yCrawlerCmd(unittest.TestCase):
"""
Paver pa11ycrawler command test cases. Most of the functionality is
inherited from BokChoyTestSuite, so those tests aren't duplicated.
"""
def setUp(self):
super(TestPaverPa11yCrawlerCmd, self).setUp()
# Mock shell commands
mock_sh = patch('pavelib.utils.test.suites.bokchoy_suite.sh')
self._mock_sh = mock_sh.start()
# Cleanup mocks
self.addCleanup(mock_sh.stop)
# reset the options for all tasks
environment.options.clear()
def test_default(self):
suite = Pa11yCrawler(
'pa11ycrawler', course_key="course-v1:edX+Test101+course",
)
ignore = (
"pa11y_ignore_rules_url="
"https://raw.githubusercontent.com/edx/"
"pa11ycrawler-ignore/master/ignore.yaml"
)
expected_cmd = [
"scrapy",
"crawl",
"edx",
"-a",
"port=8003",
"-a",
"course_key=course-v1:edX+Test101+course",
"-a",
ignore,
"-a",
"data_dir=/edx/app/edxapp/edx-platform/reports/pa11ycrawler/data",
]
actual_cmd = suite.cmd
# verify that the final section of this command is for specifying the
# data directory
self.assertEqual(actual_cmd[-2], "-a")
self.assertTrue(actual_cmd[-1].startswith("data_dir="))
# chop off the `data_dir` argument when comparing,
# since it is highly dependent on who is running this paver command,
# and where it's being run (devstack, jenkins, etc)
self.assertEqual(actual_cmd[0:-2], expected_cmd[0:-2])
@ddt.data(
(True, True, None),
(True, False, None),
(False, True, DEMO_COURSE_IMPORT_DIR),
(False, False, None),
)
@ddt.unpack
def test_get_test_course(self, import_dir_set, should_fetch_course_set, downloaded_to):
options = {}
if import_dir_set:
options['imports_dir'] = 'some_import_dir'
if should_fetch_course_set:
options['should_fetch_course'] = True
call_task('pavelib.utils.test.suites.bokchoy_suite.get_test_course', options=options)
if downloaded_to is None:
self._mock_sh.assert_has_calls([])
else:
self._mock_sh.assert_has_calls([
call(
u'wget {targz} -O {dir}demo_course.tar.gz'.format(targz=DEMO_COURSE_TAR_GZ, dir=downloaded_to)),
call(
u'tar zxf {dir}demo_course.tar.gz -C {dir}'.format(dir=downloaded_to)),
])
@patch("pavelib.utils.test.suites.bokchoy_suite.path")
def test_scrapy_cfg_exists(self, mocked_path_func):
# setup
mock_path = Mock()
mock_path.expand.return_value = mock_path
mock_path.isfile.return_value = True
mocked_path_func.return_value = mock_path
# test
Pa11yCrawler('pa11ycrawler')
# check
mocked_path_func.assert_called_with("~/.config/scrapy.cfg")
self.assertTrue(mock_path.isfile.called)
self.assertFalse(mock_path.write_text.called)
@patch("pavelib.utils.test.suites.bokchoy_suite.path")
def test_scrapy_cfg_not_exists(self, mocked_path_func):
# setup
mock_path = Mock()
mock_path.expand.return_value = mock_path
mock_path.isfile.return_value = False
mocked_path_func.return_value = mock_path
# test
Pa11yCrawler('pa11ycrawler')
# check
mocked_path_func.assert_called_with("~/.config/scrapy.cfg")
self.assertTrue(mock_path.isfile.called)
self.assertTrue(mock_path.parent.makedirs_p.called)
content = dedent("""
[settings]
default = pa11ycrawler.settings
[deploy]
project = pa11ycrawler
""")
mock_path.write_text.assert_called_with(content)
def test_generate_html_reports(self):
suite = Pa11yCrawler('pa11ycrawler')
suite.generate_html_reports()
self._mock_sh.assert_has_calls([
call([
'pa11ycrawler-html',
'--data-dir',
os.path.join(suite.report_dir, "data"),
'--output-dir',
os.path.join(suite.report_dir, "html"),
])
])

View File

@@ -76,9 +76,6 @@ class Env(object):
"lib" / "custom_a11y_rules.js"
)
PA11YCRAWLER_REPORT_DIR = REPORT_DIR / "pa11ycrawler"
PA11YCRAWLER_COVERAGERC = BOK_CHOY_DIR / ".pa11ycrawlercoveragerc"
# If set, put reports for run in "unique" directories.
# The main purpose of this is to ensure that the reports can be 'slurped'
# in the main jenkins flow job without overwriting the reports from other

View File

@@ -88,13 +88,3 @@ BOKCHOY_OPTS = [
help="deprecated in favor of save-screenshots"
),
]
PA11Y_SINGLE_URL = make_option('--single-url', help='Crawl only the specified url')
PA11Y_HTML = ('with-html', 'w', 'Include html reports')
PA11Y_COURSE_KEY = make_option('--course-key', help='Course key for test course')
PA11Y_FETCH_COURSE = make_option(
"--fetch-course",
action="store_true",
dest="should_fetch_course",
help='Course key for test course',
)

View File

@@ -5,4 +5,4 @@ from .suite import TestSuite
from .pytest_suite import PytestSuite, SystemTestSuite, LibTestSuite
from .python_suite import PythonTestSuite
from .js_suite import JsTestSuite, JestSnapshotTestSuite
from .bokchoy_suite import BokChoyTestSuite, Pa11yCrawler
from .bokchoy_suite import BokChoyTestSuite

View File

@@ -19,8 +19,7 @@ from pavelib.utils.test.bokchoy_utils import (
from pavelib.utils.test.bokchoy_options import (
BOKCHOY_IMPORTS_DIR, BOKCHOY_IMPORTS_DIR_DEPR,
BOKCHOY_DEFAULT_STORE, BOKCHOY_DEFAULT_STORE_DEPR,
BOKCHOY_FASTTEST,
PA11Y_FETCH_COURSE
BOKCHOY_FASTTEST
)
from pavelib.utils.test import utils as test_utils
from pavelib.utils.timer import timed
@@ -107,49 +106,6 @@ def update_fixtures():
)
@task
@cmdopts([BOKCHOY_IMPORTS_DIR, BOKCHOY_IMPORTS_DIR_DEPR, PA11Y_FETCH_COURSE])
@timed
def get_test_course(options):
"""
Fetches the test course.
"""
if options.get('imports_dir'):
print(colorize("green", "--imports-dir specified, skipping fetch of test course"))
return
if not options.get('should_fetch_course', False):
print(colorize("green", "--skip-fetch specified, skipping fetch of test course"))
return
# Set the imports_dir for use by other tasks
options.imports_dir = DEMO_COURSE_IMPORT_DIR
options.imports_dir.makedirs_p()
zipped_course = options.imports_dir + 'demo_course.tar.gz'
msg = colorize('green', "Fetching the test course from github...")
print(msg)
sh(
u'wget {tar_gz_file} -O {zipped_course}'.format(
tar_gz_file=DEMO_COURSE_TAR_GZ,
zipped_course=zipped_course,
)
)
msg = colorize('green', "Uncompressing the test course...")
print(msg)
sh(
u'tar zxf {zipped_course} -C {courses_dir}'.format(
zipped_course=zipped_course,
courses_dir=options.imports_dir,
)
)
@task
@timed
def reset_test_database():
@@ -377,83 +333,3 @@ class BokChoyTestSuite(TestSuite):
cmd.extend(self.passthrough_options)
return cmd
class Pa11yCrawler(BokChoyTestSuite):
"""
Sets up test environment with mega-course loaded, and runs pa11ycralwer
against it.
"""
def __init__(self, *args, **kwargs):
super(Pa11yCrawler, self).__init__(*args, **kwargs)
self.course_key = kwargs.get('course_key')
self.single_url = kwargs.get('single_url', False)
self.ensure_scrapy_cfg()
def ensure_scrapy_cfg(self):
"""
Scrapy requires a few configuration settings in order to run:
http://doc.scrapy.org/en/1.1/topics/commands.html#configuration-settings
This method ensures they are correctly written to the filesystem
in a location where Scrapy knows to look for them.
Returns True if the file was created, or False if the file already
exists (in which case it was not modified.)
"""
cfg_file = path("~/.config/scrapy.cfg").expand()
if cfg_file.isfile():
return False
cfg_file.parent.makedirs_p()
content = dedent("""
[settings]
default = pa11ycrawler.settings
[deploy]
project = pa11ycrawler
""")
cfg_file.write_text(content)
return True
def generate_html_reports(self):
"""
Runs pa11ycrawler-html
"""
command = [
'pa11ycrawler-html',
'--data-dir',
os.path.join(self.report_dir, 'data'),
'--output-dir',
os.path.join(self.report_dir, 'html'),
]
sh(command)
@property
def cmd(self):
"""
Runs pa11ycrawler as staff user against the test course.
"""
data_dir = os.path.join(self.report_dir, 'data')
url = "https://raw.githubusercontent.com/edx/pa11ycrawler-ignore/master/ignore.yaml"
command = [
"scrapy",
"crawl",
"edx",
"-a",
"port=8003",
"-a",
"course_key={key}".format(key=self.course_key),
"-a",
"pa11y_ignore_rules_url={url}".format(url=url),
"-a",
"data_dir={dir}".format(dir=data_dir),
]
if self.single_url:
command = command + [
"-a",
"single_url={url}".format(url=self.single_url),
]
return command

View File

@@ -164,7 +164,7 @@ networkx==1.7
newrelic==4.20.0.120
nltk==3.4.1
nodeenv==1.1.1
numpy==1.16.3
numpy==1.16.3 # via scipy
oauth2==1.9.0.post1
oauthlib==2.1.0
openapi-codec==1.3.2 # via django-rest-swagger

View File

@@ -35,7 +35,6 @@ asn1crypto==0.24.0
astroid==1.5.3
atomicwrites==1.3.0
attrs==17.4.0
automat==0.7.0
babel==1.3
backports.functools-lru-cache==1.5
beautifulsoup4==4.7.1
@@ -57,7 +56,6 @@ click==7.0
code-annotations==0.3.1
colorama==0.4.1
configparser==3.7.4
constantly==15.1.0
coreapi==2.3.3
coreschema==0.0.4
coverage==4.4
@@ -170,10 +168,8 @@ help-tokens==1.0.3
html5lib==1.0.1
httplib2==0.12.3
httpretty==0.9.6
hyperlink==19.0.0
idna==2.8
imagesize==1.1.0 # via sphinx
incremental==17.5.0
inflect==2.1.0
ipaddress==1.0.22
isort==4.3.20
@@ -217,9 +213,7 @@ oauth2==1.9.0.post1
oauthlib==2.1.0
openapi-codec==1.3.2
git+https://github.com/edx/edx-ora2.git@2.2.3#egg=ora2==2.2.3
pa11ycrawler==1.7.3
packaging==19.0
parsel==1.5.1
path.py==8.2.1
pathlib2==2.3.3
pathtools==0.1.2
@@ -234,19 +228,15 @@ polib==1.1.0
psutil==1.2.1
py2neo==3.1.2
py==1.8.0
pyasn1-modules==0.2.5
pyasn1==0.4.5
pycodestyle==2.5.0
pycontracts==1.7.1
pycountry==18.12.8
pycparser==2.19
pycryptodome==3.8.1
pycryptodomex==3.4.7
pydispatcher==2.0.5
pyflakes==2.1.1
pygments==2.4.0
pygraphviz==1.5
pyhamcrest==1.9.0
pyinotify==0.9.6
pyjwkest==1.3.2
pyjwt==1.5.2
@@ -256,7 +246,6 @@ pylint-plugin-utils==0.3
pylint==1.7.6
pymongo==2.9.1
pynliner==0.8.0
pyopenssl==19.0.0
pyparsing==2.2.0
pyquery==1.4.0
pysqlite==2.8.3 ; python_version == "2.7"
@@ -278,7 +267,6 @@ python3-saml==1.5.0
pytz==2019.1
pyuca==1.1
pyyaml==5.1
queuelib==1.5.0
radon==3.0.3
git+https://github.com/edx/RecommenderXBlock.git@1.4.0#egg=recommender-xblock==1.4.0
redis==2.10.6
@@ -292,10 +280,8 @@ s3transfer==0.1.13
sailthru-client==2.2.3
scandir==1.10.0
scipy==1.2.1
scrapy==1.6.0
selenium==3.141.0
semantic-version==2.6.0
service-identity==18.1.0
shapely==1.6.4.post2
shortuuid==0.5.0
simplejson==3.16.0
@@ -322,18 +308,15 @@ toml==0.10.0
tox-battery==0.5.1
tox==3.12.1
transifex-client==0.13.6
twisted==19.2.0
typing==3.6.6
unicodecsv==0.14.1
unidecode==1.0.23
uritemplate==3.0.0
urllib3==1.23
urlobject==2.4.3
user-util==0.1.5
virtualenv==16.6.0
voluptuous==0.11.5
vulture==1.0
w3lib==1.20.0
watchdog==0.9.0
wcwidth==0.1.7
web-fragments==0.3.0
@@ -347,4 +330,3 @@ xblock-utils==1.2.1
xblock==1.2.2
xmltodict==0.12.0
zendesk==1.1.1
zope.interface==4.6.0

View File

@@ -31,7 +31,6 @@ freezegun # Allows tests to mock the output of assorted datetime
httpretty # Library for mocking HTTP requests, used in many tests
isort # For checking and fixing the order of imports
moto==0.3.1 # Lets tests mock AWS access via the boto library
pa11ycrawler # Python crawler (using Scrapy) that uses Pa11y to check accessibility of pages as it crawls
pycodestyle # Checker for compliance with the Python style guide (PEP 8)
polib # Library for manipulating gettext translation files, used to test paver i18n commands
pyquery # jQuery-like API for retrieving fragments of HTML and XML files in tests

View File

@@ -33,7 +33,6 @@ asn1crypto==0.24.0
astroid==1.5.3 # via pylint, pylint-celery
atomicwrites==1.3.0 # via pytest
attrs==17.4.0
automat==0.7.0 # via twisted
babel==1.3
backports.functools-lru-cache==1.5
beautifulsoup4==4.7.1
@@ -55,7 +54,6 @@ click==7.0
code-annotations==0.3.1
colorama==0.4.1 # via radon
configparser==3.7.4 # via entrypoints, flake8, pylint
constantly==15.1.0 # via twisted
coreapi==2.3.3
coreschema==0.0.4
coverage==4.4
@@ -155,7 +153,7 @@ freezegun==0.3.11
fs-s3fs==0.1.8
fs==2.0.18
funcsigs==1.0.2 # via pytest
functools32==3.2.3.post2 ; python_version == "2.7" # via flake8, parsel
functools32==3.2.3.post2 ; python_version == "2.7" # via flake8
future==0.17.1
futures==3.2.0 ; python_version == "2.7"
geoip2==2.9.0
@@ -165,9 +163,7 @@ help-tokens==1.0.3
html5lib==1.0.1
httplib2==0.12.3
httpretty==0.9.6
hyperlink==19.0.0 # via twisted
idna==2.8
incremental==17.5.0 # via twisted
inflect==2.1.0
ipaddress==1.0.22
isort==4.3.20
@@ -210,9 +206,7 @@ oauth2==1.9.0.post1
oauthlib==2.1.0
openapi-codec==1.3.2
git+https://github.com/edx/edx-ora2.git@2.2.3#egg=ora2==2.2.3
pa11ycrawler==1.7.3
packaging==19.0 # via caniusepython3
parsel==1.5.1 # via scrapy
path.py==8.2.1
pathlib2==2.3.3 # via pytest, pytest-django
pathtools==0.1.2
@@ -226,19 +220,15 @@ polib==1.1.0
psutil==1.2.1
py2neo==3.1.2
py==1.8.0 # via pytest, tox
pyasn1-modules==0.2.5 # via service-identity
pyasn1==0.4.5 # via pyasn1-modules, service-identity
pycodestyle==2.5.0
pycontracts==1.7.1
pycountry==18.12.8
pycparser==2.19
pycryptodome==3.8.1
pycryptodomex==3.4.7
pydispatcher==2.0.5 # via scrapy
pyflakes==2.1.1 # via flake8
pygments==2.4.0
pygraphviz==1.5
pyhamcrest==1.9.0 # via twisted
pyjwkest==1.3.2
pyjwt==1.5.2
pylint-celery==0.3 # via edx-lint
@@ -247,7 +237,6 @@ pylint-plugin-utils==0.3 # via pylint-celery, pylint-django
pylint==1.7.6 # via edx-lint, pylint-celery, pylint-django
pymongo==2.9.1
pynliner==0.8.0
pyopenssl==19.0.0 # via scrapy
pyparsing==2.2.0
pyquery==1.4.0
pysqlite==2.8.3 ; python_version == "2.7"
@@ -269,7 +258,6 @@ python3-saml==1.5.0
pytz==2019.1
pyuca==1.1
pyyaml==5.1
queuelib==1.5.0 # via scrapy
radon==3.0.3
git+https://github.com/edx/RecommenderXBlock.git@1.4.0#egg=recommender-xblock==1.4.0
redis==2.10.6
@@ -283,10 +271,8 @@ s3transfer==0.1.13
sailthru-client==2.2.3
scandir==1.10.0 # via pathlib2
scipy==1.2.1
scrapy==1.6.0 # via pa11ycrawler
selenium==3.141.0
semantic-version==2.6.0
service-identity==18.1.0 # via scrapy
shapely==1.6.4.post2
shortuuid==0.5.0
simplejson==3.16.0
@@ -309,17 +295,14 @@ toml==0.10.0 # via tox
tox-battery==0.5.1
tox==3.12.1
transifex-client==0.13.6
twisted==19.2.0 # via scrapy
typing==3.6.6 # via flake8
unicodecsv==0.14.1
unidecode==1.0.23 # via python-slugify
uritemplate==3.0.0
urllib3==1.23
urlobject==2.4.3 # via pa11ycrawler
user-util==0.1.5
virtualenv==16.6.0 # via tox
voluptuous==0.11.5
w3lib==1.20.0 # via parsel, scrapy
watchdog==0.9.0
wcwidth==0.1.7 # via pytest
web-fragments==0.3.0
@@ -333,4 +316,3 @@ xblock-utils==1.2.1
xblock==1.2.2
xmltodict==0.12.0 # via moto
zendesk==1.1.1
zope.interface==4.6.0 # via twisted

View File

@@ -32,12 +32,3 @@ fi
echo "Running explicit accessibility tests..."
SELENIUM_BROWSER=chrome BOKCHOY_HEADLESS=true $TOX paver test_a11y
# The settings that we use are installed with the pa11ycrawler module
export SCRAPY_SETTINGS_MODULE='pa11ycrawler.settings'
echo "Reset db cache files to remove any changes from running a11y tests"
git checkout -- common/test/db_cache
echo "Running pa11ycrawler against test course..."
$TOX paver pa11ycrawler --fasttest --skip-clean --fetch-course --with-html