diff --git a/scripts/cov_merge.py b/scripts/cov_merge.py new file mode 100644 index 0000000000..8b4cd1c6d5 --- /dev/null +++ b/scripts/cov_merge.py @@ -0,0 +1,172 @@ +import os +import sys +from textwrap import dedent +from bs4 import BeautifulSoup +import multiprocessing + + +FIRST = dedent( + ''' + + + + CMS Python Test Coverage Report + + + + + + + + + ''') + + +LAST = dedent( + ''' + + + ''') + + +class ReportMerge(object): + """Merge multiple html coverage reports""" + + DESTINATION = os.path.join(os.environ['HOME'], 'results', os.environ['TDDIUM_SESSION_ID'], 'session') + + def __init__(self): + self.reports_dir = os.path.realpath(__file__).replace("scripts/cov_merge.py", "reports/") + + def _files(self, cover_path): + """ + Return list of file paths in `cover_path`. `cover_path` will be something like */reports/cms/cover + """ + include = lambda f: f.endswith('.html') and os.path.basename(f) != 'index.html' + return [os.path.join(cover_path, f) for f in os.listdir(cover_path) if include(f)] + + def merge(self, modules, output_file=None): + """ + Merge reports for `modules` + + Arguments: + output_file (str): name of output report file -- only used for bok_choy reports + + """ + for module in modules: + for (path, _, _) in os.walk(os.path.join(self.reports_dir, module)): + if os.path.basename(path) == 'cover': + self.merge_report(path, output_file) + + def merge_report(self, path, output_file): + """ + Collect multiple parts of a report and join them to create a single report. + + Arguments: + path (str): path where multiple files are located to be merged + output_file (str): name of output report file -- only used for bok_choy reports + + """ + content = list() + + # Extract total coverage percentage and file links table + index_html = os.path.join(path, 'index.html') + with open(index_html) as index_file: + soup = BeautifulSoup(index_file) + total_percentage = soup.find('div', id='header') + total_percentage.find('img').decompose() + index_table = soup.find('div', id='index') + + # Extract file names + files = [os.path.join(path, name['href']) for name in index_table.find_all('a')] + if not files: + return + + print 'Merging Report for {}'.format(path) + + # Collect different parts of html report + content.append(FIRST) + content.append('') + content.append(str(total_percentage)) + content.append(str(index_table)) + for html in files: + content.append(self._html_content(html)) + + content.append(LAST) + + if output_file: + report_path = os.path.join(self.DESTINATION, output_file) + else: + report_filename = path.split('reports/')[1].split('/cover')[0].replace('/', '_') + report_path = os.path.join(self.DESTINATION, report_filename+'_coverage.html') + + # Write everything to single report file + with open(report_path, 'w') as report_file: + report_file.write('\n'.join(content)) + + print 'Report Merged for {}'.format(path) + + def _html_content(self, html): + """ + Returns html tags of interest for file specified by `html` + """ + # Create id for each link in file links table + navigate_div_id = os.path.basename(html).split('.')[0].replace('/', '_') + navigate_div_start = "
\n".format(navigate_div_id) + navigate_div_close = "\n
".format(navigate_div_id) + + content = list() + content.append(navigate_div_start) + + with open(html) as html_file: + soup = BeautifulSoup(html_file) + header = soup.find('div', id='header') + header.find('img').decompose() + source = soup.find('div', id='source') + source_img = source.find('img') + if source_img: + source_img.decompose() + + content.append(str(header)) + content.append(str(source)) + + content.append(navigate_div_close) + + return '\n'.join(content) + +if __name__ == '__main__': + args = sys.argv + + if 'bok_choy' in args[1]: + paths = ['bok_choy'] + rm = ReportMerge() + rm.merge(paths, output_file=args[2]) + elif 'unit' in args[1]: + paths = ['common', 'cms', 'lms'] + for pth in paths: + rm = ReportMerge() + mp = multiprocessing.Process(target=rm.merge, args=([pth],)) + mp.start() + else: + print 'Unsupported Test Suit' diff --git a/scripts/coverage.sh b/scripts/coverage.sh new file mode 100755 index 0000000000..3e91295277 --- /dev/null +++ b/scripts/coverage.sh @@ -0,0 +1,25 @@ +#!/bin/bash + + +case $1 in + "shard1") + echo "Collecting Coverage for Bok-Choy Shard1" + paver bokchoy_coverage + echo "Merging Coverage into a Single HTML File for Bok-Choy Shard1" + python ./scripts/cov_merge.py bok_choy bok_choy_shard1_coverage.html + ;; + "shard2") + echo "Collecting Coverage for Bok-Choy Shard2" + paver bokchoy_coverage + echo "Merging Coverage into a Single HTML File for Bok-Choy Shard2" + python ./scripts/cov_merge.py bok_choy bok_choy_shard2_coverage.html + ;; + "shard3") + echo "Collecting Coverage for Bok-Choy Shard3" + paver bokchoy_coverage + echo "Merging Coverage into a Single HTML File for Bok-Choy Shard3" + python ./scripts/cov_merge.py bok_choy bok_choy_shard3_coverage.html + ;; + *) + echo "Invalid Bok-Choy Shard Value!";; +esac diff --git a/scripts/run_ut.sh b/scripts/run_ut.sh new file mode 100755 index 0000000000..fddacc9f65 --- /dev/null +++ b/scripts/run_ut.sh @@ -0,0 +1,21 @@ +mkdir -p jscover-dist && wget http://files.edx.org/testeng/JSCover-1.0.2.zip -P jscover-dist && unzip jscover-dist/JSCover-1.0.2.zip -d jscover-dist/ && cp jscover-dist/target/dist/JSCover-all.jar jscover-dist && export JSCOVER_JAR=$PWD/jscover-dist/JSCover-all.jar && paver test + +echo '******************************************************' + +echo 'Collecting Coverage...' + +paver coverage + +echo 'Coverage Collection Completed' + + +current_path=`pwd` +reports_path=$current_path/reports +dest_path=$HOME/results/$TDDIUM_SESSION_ID/session/ +unit_combined_rpt=$reports_path/diff_coverage_combined.html + +echo 'Copying '$unit_combined_rpt' to '$dest_path + +cp -f $unit_combined_rpt $dest_path + +echo '******************************************************' diff --git a/tddium.yml b/tddium.yml index 4c97d149b4..19a218fe68 100644 --- a/tddium.yml +++ b/tddium.yml @@ -7,7 +7,6 @@ tddium: :version: "1.8.5.5" :hooks: :pre_setup: "virtualenv $HOME/python-env && $HOME/python-env/bin/pip install -r requirements/edx/paver.txt && $HOME/python-env/bin/pip install -r requirements/edx/pre.txt && $HOME/python-env/bin/pip install -r requirements/edx/base.txt && $HOME/python-env/bin/pip install -r requirements/edx/github.txt && $HOME/python-env/bin/pip install -r requirements/edx/local.txt && $HOME/python-env/bin/pip install -r requirements/edx/post.txt" - # :post_build: "paver coverage; paver bokchoy_coverage; for i in $(find reports -name cover); do cp -R $i $HOME/results/$TDDIUM_SESSION_ID/session/; done" :post_worker: 'python ./scripts/post_worker.py' :cache: :key_paths: @@ -37,7 +36,7 @@ tddium: - "reports/diff_quality/diff_quality_pylint.html" - :type: junit :mode: basic - :command: "mkdir -p jscover-dist && wget http://files.edx.org/testeng/JSCover-1.0.2.zip -P jscover-dist && unzip jscover-dist/JSCover-1.0.2.zip -d jscover-dist/ && cp jscover-dist/target/dist/JSCover-all.jar jscover-dist && export JSCOVER_JAR=$PWD/jscover-dist/JSCover-all.jar && paver test" + :command: bash ./scripts/run_ut.sh && python ./scripts/cov_merge.py unit :invocation: single :output: exit-status :report_files: @@ -80,21 +79,21 @@ tddium: - "reports/acceptance/cms.xml" - :type: junit :mode: basic - :command: paver test_bokchoy --extra_args="-a shard_1" + :command: paver test_bokchoy --extra_args="-a shard_1" && bash ./scripts/coverage.sh shard1 :invocation: single :output: exit-status :report_files: - "reports/bok_choy/xunit.xml" - :type: junit :mode: basic - :command: paver test_bokchoy --extra_args="-a shard_2" + :command: paver test_bokchoy --extra_args="-a shard_2" && bash ./scripts/coverage.sh shard2 :invocation: single :output: exit-status :report_files: - "reports/bok_choy/xunit.xml" - :type: junit :mode: basic - :command: paver test_bokchoy --extra_args="-a shard_1=False,shard_2=False" + :command: paver test_bokchoy --extra_args="-a shard_1=False,shard_2=False" && bash ./scripts/coverage.sh shard3 :invocation: single :output: exit-status :report_files: