Get tests to pass by creating a mock peer grading service
This commit is contained in:
@@ -15,6 +15,26 @@ from xmodule.course_module import CourseDescriptor
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class MockPeerGradingService(object):
|
||||
# TODO: make this return real results
|
||||
def get_next_submission(self, problem_location, grader_id):
|
||||
return {'success': true}
|
||||
|
||||
def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key):
|
||||
return {'success': true}
|
||||
|
||||
def is_student_calibrated(self, problem_location, grader_id):
|
||||
return {'success': true}
|
||||
|
||||
def show_calibration_essay(self, problem_location, grader_id):
|
||||
return {'success': true}
|
||||
|
||||
def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback):
|
||||
return {'success': true}
|
||||
|
||||
def get_problem_list(self, course_id, grader_id):
|
||||
return {'success': true}
|
||||
|
||||
class PeerGradingService(GradingService):
|
||||
"""
|
||||
Interface with the grading controller for peer grading
|
||||
@@ -78,7 +98,10 @@ def peer_grading_service():
|
||||
if _service is not None:
|
||||
return _service
|
||||
|
||||
_service = PeerGradingService(settings.PEER_GRADING_INTERFACE)
|
||||
if settings.MOCK_PEER_GRADING:
|
||||
_service = MockPeerGradingService()
|
||||
else:
|
||||
_service = PeerGradingService(settings.PEER_GRADING_INTERFACE)
|
||||
|
||||
return _service
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ Replace this with more appropriate tests for your application.
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
from instructor import staff_grading_service
|
||||
from open_ended_grading import staff_grading_service
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.contrib.auth.models import Group
|
||||
|
||||
@@ -14,6 +14,11 @@ from courseware.access import _course_staff_group_name
|
||||
import courseware.tests.tests as ct
|
||||
from xmodule.modulestore.django import modulestore
|
||||
import xmodule.modulestore.django
|
||||
from nose import SkipTest
|
||||
from mock import patch, Mock
|
||||
import json
|
||||
|
||||
from override_settings import override_settings
|
||||
|
||||
_mock_service = staff_grading_service.MockStaffGradingService()
|
||||
|
||||
@@ -45,7 +50,7 @@ class TestStaffGradingService(ct.PageLoader):
|
||||
|
||||
make_instructor(self.toy)
|
||||
|
||||
self.mock_service = staff_grading_service.grading_service()
|
||||
self.mock_service = staff_grading_service.staff_grading_service()
|
||||
|
||||
self.logout()
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundErr
|
||||
from xmodule.modulestore.search import path_to_location
|
||||
|
||||
from peer_grading_service import PeerGradingService
|
||||
from peer_grading_service import MockPeerGradingService
|
||||
from grading_service import GradingServiceError
|
||||
import json
|
||||
import track.views
|
||||
@@ -38,7 +39,10 @@ from .staff_grading import StaffGrading
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
template_imports = {'urllib': urllib}
|
||||
peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE)
|
||||
if settings.MOCK_PEER_GRADING:
|
||||
peer_gs = MockPeerGradingService()
|
||||
else:
|
||||
peer_gs = PeerGradingService(settings.PEER_GRADING_INTERFACE)
|
||||
|
||||
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
|
||||
def staff_grading(request, course_id):
|
||||
@@ -62,6 +66,7 @@ def staff_grading(request, course_id):
|
||||
'staff_access': True, })
|
||||
|
||||
|
||||
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
|
||||
def peer_grading(request, course_id):
|
||||
'''
|
||||
Show a peer grading interface
|
||||
@@ -104,6 +109,7 @@ def peer_grading(request, course_id):
|
||||
'staff_access': False, })
|
||||
|
||||
|
||||
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
|
||||
def peer_grading_problem(request, course_id):
|
||||
'''
|
||||
Show individual problem interface
|
||||
|
||||
@@ -333,6 +333,8 @@ STAFF_GRADING_INTERFACE = None
|
||||
# Used for testing, debugging
|
||||
MOCK_STAFF_GRADING = False
|
||||
|
||||
################################# Peer grading config #####################
|
||||
PEER_GRADING_INTERFACE = None
|
||||
|
||||
################################# Jasmine ###################################
|
||||
JASMINE_TEST_DIRECTORY = PROJECT_ROOT + '/static/coffee'
|
||||
|
||||
@@ -62,6 +62,7 @@ XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
|
||||
|
||||
# Don't rely on a real staff grading backend
|
||||
MOCK_STAFF_GRADING = True
|
||||
MOCK_PEER_GRADING = True
|
||||
|
||||
# TODO (cpennington): We need to figure out how envs/test.py can inject things
|
||||
# into common.py so that we don't have to repeat this sort of thing
|
||||
|
||||
Reference in New Issue
Block a user