diff --git a/.gitignore b/.gitignore
index f1784a48f3..76cc1efa95 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,3 +36,7 @@ chromedriver.log
/nbproject
ghostdriver.log
node_modules
+.pip_download_cache/
+.prereqs_cache
+autodeploy.properties
+.ws_migrations_complete
diff --git a/LICENSE.TXT b/LICENSE
similarity index 100%
rename from LICENSE.TXT
rename to LICENSE
diff --git a/README.md b/README.md
index ec17d7c9a4..90b82ff07a 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@ Installation
The installation process is a bit messy at the moment. Here's a high-level
overview of what you should do to get started.
-**TLDR:** There is a `create-dev-env.sh` script that will attempt to set all
+**TLDR:** There is a `scripts/create-dev-env.sh` script that will attempt to set all
of this up for you. If you're in a hurry, run that script. Otherwise, I suggest
that you understand what the script is doing, and why, by reading this document.
@@ -77,11 +77,16 @@ environment), and Node has a library installer called
Once you've got your languages and virtual environments set up, install
the libraries like so:
- $ pip install -r pre-requirements.txt
- $ pip install -r requirements.txt
+ $ pip install -r requirements/base.txt
+ $ pip install -r requirements/post.txt
$ bundle install
$ npm install
+You can also use [`rake`](http://rake.rubyforge.org/) to get all of the prerequisites (or to update)
+them if they've changed
+
+ $ rake install_prereqs
+
Other Dependencies
------------------
You'll also need to install [MongoDB](http://www.mongodb.org/), since our
@@ -137,7 +142,7 @@ Studio, visit `127.0.0.1:8001` in your web browser; to view the LMS, visit
There's also an older version of the LMS that saves its information in XML files
in the `data` directory, instead of in Mongo. To run this older version, run:
-$ rake lms
+ $ rake lms
Further Documentation
=====================
diff --git a/__init__.py b/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/cms/djangoapps/contentstore/tests/test_contentstore.py b/cms/djangoapps/contentstore/tests/test_contentstore.py
index c11b350349..0aec61729c 100644
--- a/cms/djangoapps/contentstore/tests/test_contentstore.py
+++ b/cms/djangoapps/contentstore/tests/test_contentstore.py
@@ -646,7 +646,7 @@ class ContentStoreTest(ModuleStoreTestCase):
resp = self.client.get(reverse('index'))
self.assertContains(
resp,
- '
My Courses ',
+ '',
status_code=200,
html=True
)
diff --git a/cms/djangoapps/contentstore/tests/test_i18n.py b/cms/djangoapps/contentstore/tests/test_i18n.py
index 4188b43857..a292b7316e 100644
--- a/cms/djangoapps/contentstore/tests/test_i18n.py
+++ b/cms/djangoapps/contentstore/tests/test_i18n.py
@@ -48,7 +48,7 @@ class InternationalizationTest(ModuleStoreTestCase):
resp = self.client.get(reverse('index'))
self.assertContains(resp,
- 'My Courses ',
+ '',
status_code=200,
html=True)
@@ -63,7 +63,7 @@ class InternationalizationTest(ModuleStoreTestCase):
)
self.assertContains(resp,
- 'My Courses ',
+ '',
status_code=200,
html=True)
diff --git a/cms/static/sass/_variables.scss b/cms/static/sass/_variables.scss
index 712ef9153f..14c215c7fd 100644
--- a/cms/static/sass/_variables.scss
+++ b/cms/static/sass/_variables.scss
@@ -184,6 +184,6 @@ $lightBluishGrey2: rgb(213, 220, 228);
$error-red: rgb(253, 87, 87);
// type
-$sans-serif: $f-serif;
+$sans-serif: $f-sans-serif;
$body-line-height: golden-ratio(.875em, 1);
diff --git a/common/lib/capa/capa/tests/test_responsetypes.py b/common/lib/capa/capa/tests/test_responsetypes.py
index da3d45ad74..5fbc7f8c87 100644
--- a/common/lib/capa/capa/tests/test_responsetypes.py
+++ b/common/lib/capa/capa/tests/test_responsetypes.py
@@ -708,7 +708,7 @@ class JavascriptResponseTest(ResponseTest):
def test_grade(self):
# Compile coffee files into javascript used by the response
coffee_file_path = os.path.dirname(__file__) + "/test_files/js/*.coffee"
- os.system("coffee -c %s" % (coffee_file_path))
+ os.system("node_modules/.bin/coffee -c %s" % (coffee_file_path))
problem = self.build_problem(generator_src="test_problem_generator.js",
grader_src="test_problem_grader.js",
diff --git a/common/lib/capa/jasmine_test_runner.html.erb b/common/lib/capa/jasmine_test_runner.html.erb
new file mode 100644
index 0000000000..7b078daedd
--- /dev/null
+++ b/common/lib/capa/jasmine_test_runner.html.erb
@@ -0,0 +1,48 @@
+
+
+
+ Jasmine Test Runner
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <% for src in js_source %>
+
+ <% end %>
+
+
+ <% for src in js_specs %>
+
+ <% end %>
+
+
+
+
+
+
+
+
diff --git a/common/lib/capa/setup.py b/common/lib/capa/setup.py
index d9c813f55c..7719626c3e 100644
--- a/common/lib/capa/setup.py
+++ b/common/lib/capa/setup.py
@@ -4,5 +4,5 @@ setup(
name="capa",
version="0.1",
packages=find_packages(exclude=["tests"]),
- install_requires=['distribute==0.6.30', 'pyparsing==1.5.6'],
+ install_requires=['distribute==0.6.28', 'pyparsing==1.5.6'],
)
diff --git a/common/lib/xmodule/xmodule/templates.py b/common/lib/xmodule/xmodule/templates.py
index f4e37ab0d5..6479b3df24 100644
--- a/common/lib/xmodule/xmodule/templates.py
+++ b/common/lib/xmodule/xmodule/templates.py
@@ -4,8 +4,18 @@ These templates are used by the CMS to provide baseline content that
can be cloned when adding new modules to a course.
`Template`s are defined in x_module. They contain 3 attributes:
- metadata: A dictionary with the template metadata
- data: A JSON value that defines the template content
+ metadata: A dictionary with the template metadata. This should contain
+ any values for fields
+ * with scope Scope.settings
+ * that have values different than the field defaults
+ * and that are to be editable in Studio
+ data: A JSON value that defines the template content. This should be a dictionary
+ containing values for fields
+ * with scope Scope.content
+ * that have values different than the field defaults
+ * and that are to be editable in Studio
+ or, if the module uses a single Scope.content String field named `data`, this
+ should be a string containing the contents of that field
children: A list of Location urls that define the template children
Templates are defined on XModuleDescriptor types, in the template attribute.
diff --git a/distribute-0.6.32.tar.gz b/distribute-0.6.32.tar.gz
deleted file mode 100644
index 2438db60fa..0000000000
Binary files a/distribute-0.6.32.tar.gz and /dev/null differ
diff --git a/distribute-0.6.34.tar.gz b/distribute-0.6.34.tar.gz
deleted file mode 100644
index 4e91b3af62..0000000000
Binary files a/distribute-0.6.34.tar.gz and /dev/null differ
diff --git a/doc/development.md b/doc/development.md
index a6a1de4ef7..c99e99f906 100644
--- a/doc/development.md
+++ b/doc/development.md
@@ -36,7 +36,7 @@ Check out the course data directories that you want to work with into the
To create your development environment, run the shell script in the root of
the repo:
- create-dev-env.sh
+ scripts/create-dev-env.sh
## Starting development servers
diff --git a/fixtures/anonymize_fixtures.py b/fixtures/anonymize_fixtures.py
deleted file mode 100755
index ba62652de5..0000000000
--- a/fixtures/anonymize_fixtures.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-import json
-import random
-import copy
-from collections import defaultdict
-from argparse import ArgumentParser, FileType
-from datetime import datetime
-
-def generate_user(user_number):
- return {
- "pk": user_number,
- "model": "auth.user",
- "fields": {
- "status": "w",
- "last_name": "Last",
- "gold": 0,
- "is_staff": False,
- "user_permissions": [],
- "interesting_tags": "",
- "email_key": None,
- "date_joined": "2012-04-26 11:36:39",
- "first_name": "",
- "email_isvalid": False,
- "avatar_type": "n",
- "website": "",
- "is_superuser": False,
- "date_of_birth": None,
- "last_login": "2012-04-26 11:36:48",
- "location": "",
- "new_response_count": 0,
- "email": "user{num}@example.com".format(num=user_number),
- "username": "user{num}".format(num=user_number),
- "is_active": True,
- "consecutive_days_visit_count": 0,
- "email_tag_filter_strategy": 1,
- "groups": [],
- "password": "sha1$90e6f$562a1d783a0c47ce06ebf96b8c58123a0671bbf0",
- "silver": 0,
- "bronze": 0,
- "questions_per_page": 10,
- "about": "",
- "show_country": True,
- "country": "",
- "display_tag_filter_strategy": 0,
- "seen_response_count": 0,
- "real_name": "",
- "ignored_tags": "",
- "reputation": 1,
- "gravatar": "366d981a10116969c568a18ee090f44c",
- "last_seen": "2012-04-26 11:36:39"
- }
- }
-
-
-def parse_args(args=sys.argv[1:]):
- parser = ArgumentParser()
- parser.add_argument('-d', '--data', type=FileType('r'), default=sys.stdin)
- parser.add_argument('-o', '--output', type=FileType('w'), default=sys.stdout)
- parser.add_argument('count', type=int)
- return parser.parse_args(args)
-
-
-def main(args=sys.argv[1:]):
- args = parse_args(args)
-
- data = json.load(args.data)
- unique_students = set(entry['fields']['student'] for entry in data)
- if args.count > len(unique_students) * 0.1:
- raise Exception("Can't be sufficiently anonymous selecting {count} of {unique} students".format(
- count=args.count, unique=len(unique_students)))
-
- by_problems = defaultdict(list)
- for entry in data:
- by_problems[entry['fields']['module_id']].append(entry)
-
- out_data = []
- out_pk = 1
- for name, answers in by_problems.items():
- for student_id in xrange(args.count):
- sample = random.choice(answers)
- data = copy.deepcopy(sample)
- data["fields"]["student"] = student_id + 1
- data["fields"]["created"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
- data["fields"]["modified"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
- data["pk"] = out_pk
- out_pk += 1
- out_data.append(data)
-
- for student_id in xrange(args.count):
- out_data.append(generate_user(student_id))
-
- json.dump(out_data, args.output, indent=2)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/fixtures/pm.json b/fixtures/pm.json
deleted file mode 100644
index 5ecb839093..0000000000
--- a/fixtures/pm.json
+++ /dev/null
@@ -1 +0,0 @@
-[{"pk": 1, "model": "user.userprofile", "fields": {"name": "pm", "language": "pm", "courseware": "course.xml", "meta": "", "location": "pm", "user": 1}}, {"pk": 1, "model": "auth.user", "fields": {"status": "w", "last_name": "", "gold": 0, "is_staff": true, "user_permissions": [], "interesting_tags": "", "email_key": null, "date_joined": "2012-01-23 17:03:54", "first_name": "", "email_isvalid": false, "avatar_type": "n", "website": "", "is_superuser": true, "date_of_birth": null, "last_login": "2012-01-23 17:04:16", "location": "", "new_response_count": 0, "email": "pmitros@csail.mit.edu", "username": "pm", "is_active": true, "consecutive_days_visit_count": 0, "email_tag_filter_strategy": 1, "groups": [], "password": "sha1$a3e96$dbabbd114f0da01bce2cc2adcafa2ca651c7ae0a", "silver": 0, "bronze": 0, "questions_per_page": 10, "about": "", "show_country": false, "country": "", "display_tag_filter_strategy": 0, "seen_response_count": 0, "real_name": "", "ignored_tags": "", "reputation": 1, "gravatar": "7a591afd0cc7972fdbe5e12e26af352a", "last_seen": "2012-01-23 17:04:41"}}, {"pk": 1, "model": "user.userprofile", "fields": {"name": "pm", "language": "pm", "courseware": "course.xml", "meta": "", "location": "pm", "user": 1}}, {"pk": 1, "model": "auth.user", "fields": {"status": "w", "last_name": "", "gold": 0, "is_staff": true, "user_permissions": [], "interesting_tags": "", "email_key": null, "date_joined": "2012-01-23 17:03:54", "first_name": "", "email_isvalid": false, "avatar_type": "n", "website": "", "is_superuser": true, "date_of_birth": null, "last_login": "2012-01-23 17:04:16", "location": "", "new_response_count": 0, "email": "pmitros@csail.mit.edu", "username": "pm", "is_active": true, "consecutive_days_visit_count": 0, "email_tag_filter_strategy": 1, "groups": [], "password": "sha1$a3e96$dbabbd114f0da01bce2cc2adcafa2ca651c7ae0a", "silver": 0, "bronze": 0, "questions_per_page": 10, "about": "", "show_country": false, "country": "", "display_tag_filter_strategy": 0, "seen_response_count": 0, "real_name": "", "ignored_tags": "", "reputation": 1, "gravatar": "7a591afd0cc7972fdbe5e12e26af352a", "last_seen": "2012-01-23 17:04:41"}}]
\ No newline at end of file
diff --git a/jenkins/base.sh b/jenkins/base.sh
deleted file mode 100644
index 7eb4802b8f..0000000000
--- a/jenkins/base.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-##
-## requires >= 1.3.0 of the Jenkins git plugin
-##
-
-function github_status {
-
- if [[ ! ${GIT_URL} =~ git@github.com:([^/]+)/([^\.]+).git ]]; then
- echo "Cannot parse Github org or repo from URL, using defaults."
- ORG="edx"
- REPO="mitx"
- else
- ORG=${BASH_REMATCH[1]}
- REPO=${BASH_REMATCH[2]}
- fi
-
- gcli status create $ORG $REPO $GIT_COMMIT \
- --params=$1 \
- target_url:$BUILD_URL \
- description:"Build #$BUILD_NUMBER is running" \
- -f csv
-}
-
-function github_mark_failed_on_exit {
- trap '[ $? == "0" ] || github_status state:failed' EXIT
-}
diff --git a/jenkins/test.sh b/jenkins/test.sh
index 0964ce5dd9..32279fe22f 100755
--- a/jenkins/test.sh
+++ b/jenkins/test.sh
@@ -3,8 +3,21 @@
set -e
set -x
+##
+## requires >= 1.3.0 of the Jenkins git plugin
+##
+
function github_status {
- gcli status create edx edx-platform $GIT_COMMIT \
+ if [[ ! ${GIT_URL} =~ git@github.com:([^/]+)/([^\.]+).git ]]; then
+ echo "Cannot parse Github org or repo from URL, using defaults."
+ ORG="edx"
+ REPO="edx-platform"
+ else
+ ORG=${BASH_REMATCH[1]}
+ REPO=${BASH_REMATCH[2]}
+ fi
+
+ gcli status create $ORG $REPO $GIT_COMMIT \
--params=$1 \
target_url:$BUILD_URL \
description:"Build #$BUILD_NUMBER $2" \
@@ -27,21 +40,32 @@ git submodule foreach 'git reset --hard HEAD'
export PYTHONIOENCODING=UTF-8
GIT_BRANCH=${GIT_BRANCH/HEAD/master}
-if [ ! -d /mnt/virtualenvs/"$JOB_NAME" ]; then
- mkdir -p /mnt/virtualenvs/"$JOB_NAME"
- virtualenv /mnt/virtualenvs/"$JOB_NAME"
+
+# When running in parallel on jenkins, workspace could be suffixed by @x
+# In that case, we want to use a separate virtualenv that matches up with
+# workspace
+#
+# We need to handle both the case of /path/to/workspace
+# and /path/to/workspace@2, which is why we use the following substitutions
+#
+# $WORKSPACE is the absolute path for the workspace
+WORKSPACE_SUFFIX=$(expr "$WORKSPACE" : '.*\(@.*\)') || true
+
+VIRTUALENV_DIR="/mnt/virtualenvs/${JOB_NAME}${WORKSPACE_SUFFIX}"
+
+if [ ! -d "$VIRTUALENV_DIR" ]; then
+ mkdir -p "$VIRTUALENV_DIR"
+ virtualenv "$VIRTUALENV_DIR"
fi
export PIP_DOWNLOAD_CACHE=/mnt/pip-cache
+# Allow django liveserver tests to use a range of ports
+export DJANGO_LIVE_TEST_SERVER_ADDRESS=${DJANGO_LIVE_TEST_SERVER_ADDRESS-localhost:8000-9000}
+
source /mnt/virtualenvs/"$JOB_NAME"/bin/activate
-pip install -q -r pre-requirements.txt
-yes w | pip install -q -r requirements.txt
-
-bundle install
-
-npm install
+rake install_prereqs
rake clobber
rake pep8 > pep8.log || cat pep8.log
rake pylint > pylint.log || cat pylint.log
diff --git a/lms/djangoapps/courseware/tests/tests.py b/lms/djangoapps/courseware/tests/tests.py
index 235f7d60bb..d50e0b4526 100644
--- a/lms/djangoapps/courseware/tests/tests.py
+++ b/lms/djangoapps/courseware/tests/tests.py
@@ -631,8 +631,8 @@ class TestViewAuth(LoginEnrollmentTestCase):
urls = reverse_urls(['info', 'progress'], course)
urls.extend([
reverse('book', kwargs={'course_id': course.id,
- 'book_index': book.title})
- for book in course.textbooks
+ 'book_index': index})
+ for index, book in enumerate(course.textbooks)
])
return urls
@@ -643,8 +643,6 @@ class TestViewAuth(LoginEnrollmentTestCase):
"""
urls = reverse_urls(['about_course'], course)
urls.append(reverse('courses'))
- # Need separate test for change_enrollment, since it's a POST view
- #urls.append(reverse('change_enrollment'))
return urls
diff --git a/lms/envs/common.py b/lms/envs/common.py
index e6d761c070..71a14c3ea5 100644
--- a/lms/envs/common.py
+++ b/lms/envs/common.py
@@ -123,9 +123,7 @@ sys.path.append(COMMON_ROOT / 'lib')
# For Node.js
-system_node_path = os.environ.get("NODE_PATH", None)
-if system_node_path is None:
- system_node_path = "/usr/local/lib/node_modules"
+system_node_path = os.environ.get("NODE_PATH", REPO_ROOT / 'node_modules')
node_paths = [COMMON_ROOT / "static/js/vendor",
COMMON_ROOT / "static/coffee/src",
diff --git a/pre-requirements.txt b/pre-requirements.txt
deleted file mode 100644
index d39199a741..0000000000
--- a/pre-requirements.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-# We use `scipy` in our project, which relies on `numpy`. `pip` apparently
-# installs packages in a two-step process, where it will first try to build
-# all packages, and then try to install all packages. As a result, if we simply
-# added these packages to the top of `requirements.txt`, `pip` would try to
-# build `scipy` before `numpy` has been installed, and it would fail. By
-# separating this out into a `pre-requirements.txt` file, we can make sure
-# that `numpy` is built *and* installed before we try to build `scipy`.
-
-numpy==1.6.2
-distribute>=0.6.28
diff --git a/rakefile b/rakefile
index f677c4b8b3..cef93e67eb 100644
--- a/rakefile
+++ b/rakefile
@@ -1,641 +1,12 @@
require 'rake/clean'
-require 'tempfile'
-require 'net/http'
-require 'launchy'
-require 'colorize'
-require 'erb'
-require 'tempfile'
+require './rakefiles/helpers.rb'
+
+Dir['rakefiles/*.rake'].each do |rakefile|
+ import rakefile
+end
# Build Constants
REPO_ROOT = File.dirname(__FILE__)
-BUILD_DIR = File.join(REPO_ROOT, "build")
REPORT_DIR = File.join(REPO_ROOT, "reports")
-LMS_REPORT_DIR = File.join(REPORT_DIR, "lms")
-
-# Packaging constants
-DEPLOY_DIR = "/opt/wwc"
-PACKAGE_NAME = "edx-platform"
-PKG_VERSION = "0.1"
-COMMIT = (ENV["GIT_COMMIT"] || `git rev-parse HEAD`).chomp()[0, 10]
-BRANCH = (ENV["GIT_BRANCH"] || `git symbolic-ref -q HEAD`).chomp().gsub('refs/heads/', '').gsub('origin/', '')
-BUILD_NUMBER = (ENV["BUILD_NUMBER"] || "dev").chomp()
-
-# Set up the clean and clobber tasks
-CLOBBER.include(BUILD_DIR, REPORT_DIR, 'test_root/*_repo', 'test_root/staticfiles')
-CLEAN.include("#{BUILD_DIR}/*.deb", "#{BUILD_DIR}/util")
-
-def select_executable(*cmds)
- cmds.find_all{ |cmd| system("which #{cmd} > /dev/null 2>&1") }[0] || fail("No executables found from #{cmds.join(', ')}")
-end
-
-def django_admin(system, env, command, *args)
- django_admin = ENV['DJANGO_ADMIN_PATH'] || select_executable('django-admin.py', 'django-admin')
- return "#{django_admin} #{command} --traceback --settings=#{system}.envs.#{env} --pythonpath=. #{args.join(' ')}"
-end
-
-# Runs Process.spawn, and kills the process at the end of the rake process
-# Expects the same arguments as Process.spawn
-def background_process(*command)
- pid = Process.spawn({}, *command, {:pgroup => true})
-
- at_exit do
- puts "Ending process and children"
- pgid = Process.getpgid(pid)
- begin
- Timeout.timeout(5) do
- puts "Terminating process group #{pgid}"
- Process.kill(:SIGTERM, -pgid)
- puts "Waiting on process group #{pgid}"
- Process.wait(-pgid)
- puts "Done waiting on process group #{pgid}"
- end
- rescue Timeout::Error
- puts "Killing process group #{pgid}"
- Process.kill(:SIGKILL, -pgid)
- puts "Waiting on process group #{pgid}"
- Process.wait(-pgid)
- puts "Done waiting on process group #{pgid}"
- end
- end
-end
-
-def django_for_jasmine(system, django_reload)
- if !django_reload
- reload_arg = '--noreload'
- end
-
- port = 10000 + rand(40000)
- jasmine_url = "http://localhost:#{port}/_jasmine/"
-
- background_process(*django_admin(system, 'jasmine', 'runserver', '-v', '0', port.to_s, reload_arg).split(' '))
-
- up = false
- start_time = Time.now
- until up do
- if Time.now - start_time > 30
- abort "Timed out waiting for server to start to run jasmine tests"
- end
- begin
- response = Net::HTTP.get_response(URI(jasmine_url))
- puts response.code
- up = response.code == '200'
- rescue => e
- puts e.message
- ensure
- puts('Waiting server to start')
- sleep(0.5)
- end
- end
- yield jasmine_url
-end
-
-def template_jasmine_runner(lib)
- coffee_files = Dir["#{lib}/**/js/**/*.coffee", "common/static/coffee/src/**/*.coffee"]
- if !coffee_files.empty?
- sh("node_modules/.bin/coffee -c #{coffee_files.join(' ')}")
- end
- phantom_jasmine_path = File.expand_path("node_modules/phantom-jasmine")
- common_js_root = File.expand_path("common/static/js")
- common_coffee_root = File.expand_path("common/static/coffee/src")
-
- # Get arrays of spec and source files, ordered by how deep they are nested below the library
- # (and then alphabetically) and expanded from a relative to an absolute path
- spec_glob = File.join("#{lib}", "**", "spec", "**", "*.js")
- src_glob = File.join("#{lib}", "**", "src", "**", "*.js")
- js_specs = Dir[spec_glob].sort_by {|p| [p.split('/').length, p]} .map {|f| File.expand_path(f)}
- js_source = Dir[src_glob].sort_by {|p| [p.split('/').length, p]} .map {|f| File.expand_path(f)}
-
- template = ERB.new(File.read("#{lib}/jasmine_test_runner.html.erb"))
- template_output = "#{lib}/jasmine_test_runner.html"
- File.open(template_output, 'w') do |f|
- f.write(template.result(binding))
- end
- yield File.expand_path(template_output)
-end
-
-
-def report_dir_path(dir)
- return File.join(REPORT_DIR, dir.to_s)
-end
-
-def compile_assets(watch=false, debug=false)
- xmodule_cmd = 'xmodule_assets common/static/xmodule'
- if watch
- xmodule_cmd = "watchmedo shell-command \
- --patterns='*.js;*.coffee;*.sass;*.scss;*.css' \
- --recursive \
- --command='#{xmodule_cmd}' \
- common/lib/xmodule"
- end
- coffee_cmd = "node_modules/.bin/coffee #{watch ? '--watch' : ''} --compile */static"
- sass_cmd = "sass #{debug ? '--debug-info' : '--style compressed'} " +
- "--load-path ./common/static/sass " +
- "--require ./common/static/sass/bourbon/lib/bourbon.rb " +
- "#{watch ? '--watch' : '--update --force'} */static"
-
- [xmodule_cmd, coffee_cmd, sass_cmd].each do |cmd|
- if watch
- background_process(cmd)
- else
- pid = Process.spawn(cmd)
- puts "Waiting for `#{cmd}` to complete (pid #{pid})"
- Process.wait(pid)
- puts "Completed"
- if !$?.exited? || $?.exitstatus != 0
- abort "`#{cmd}` failed"
- end
- end
- end
-end
task :default => [:test, :pep8, :pylint]
-
-directory REPORT_DIR
-
-default_options = {
- :lms => '8000',
- :cms => '8001',
-}
-
-desc "Install all prerequisites needed for the lms and cms"
-task :install_prereqs => [:install_node_prereqs, :install_ruby_prereqs, :install_python_prereqs]
-
-desc "Install all node prerequisites for the lms and cms"
-task :install_node_prereqs do
- sh('npm install')
-end
-
-desc "Install all ruby prerequisites for the lms and cms"
-task :install_ruby_prereqs do
- sh('bundle install')
-end
-
-desc "Install all python prerequisites for the lms and cms"
-task :install_python_prereqs do
- sh('pip install -r requirements.txt')
- # Check for private-requirements.txt: used to install our libs as working dirs,
- # or personal-use tools.
- if File.file?("private-requirements.txt")
- sh('pip install -r private-requirements.txt')
- end
-end
-
-task :predjango do
- sh("find . -type f -name *.pyc -delete")
- sh('pip install -q --no-index -r local-requirements.txt')
-end
-
-task :clean_test_files do
- sh("git clean -fqdx test_root")
-end
-
-[:lms, :cms, :common].each do |system|
- report_dir = report_dir_path(system)
- directory report_dir
-
- desc "Run pep8 on all #{system} code"
- task "pep8_#{system}" => report_dir do
- sh("pep8 #{system} | tee #{report_dir}/pep8.report")
- end
- task :pep8 => "pep8_#{system}"
-
- desc "Run pylint on all #{system} code"
- task "pylint_#{system}" => report_dir do
- apps = Dir["#{system}/*.py", "#{system}/djangoapps/*", "#{system}/lib/*"].map do |app|
- File.basename(app)
- end.select do |app|
- app !=~ /.pyc$/
- end.map do |app|
- if app =~ /.py$/
- app.gsub('.py', '')
- else
- app
- end
- end
-
- pythonpath_prefix = "PYTHONPATH=#{system}:#{system}/djangoapps:#{system}/lib:common/djangoapps:common/lib"
- sh("#{pythonpath_prefix} pylint --rcfile=.pylintrc -f parseable #{apps.join(' ')} | tee #{report_dir}/pylint.report")
- end
- task :pylint => "pylint_#{system}"
-
-end
-
-$failed_tests = 0
-
-def run_under_coverage(cmd, root)
- cmd0, cmd_rest = cmd.split(" ", 2)
- # We use "python -m coverage" so that the proper python will run the importable coverage
- # rather than the coverage that OS path finds.
- cmd = "python -m coverage run --rcfile=#{root}/.coveragerc `which #{cmd0}` #{cmd_rest}"
- return cmd
-end
-
-def run_tests(system, report_dir, stop_on_failure=true)
- ENV['NOSE_XUNIT_FILE'] = File.join(report_dir, "nosetests.xml")
- dirs = Dir["common/djangoapps/*"] + Dir["#{system}/djangoapps/*"]
- cmd = django_admin(system, :test, 'test', '--logging-clear-handlers', *dirs.each)
- sh(run_under_coverage(cmd, system)) do |ok, res|
- if !ok and stop_on_failure
- abort "Test failed!"
- end
- $failed_tests += 1 unless ok
- end
-end
-
-def run_acceptance_tests(system, report_dir, harvest_args)
- sh(django_admin(system, 'acceptance', 'syncdb', '--noinput'))
- sh(django_admin(system, 'acceptance', 'migrate', '--noinput'))
- sh(django_admin(system, 'acceptance', 'harvest', '--debug-mode', '--tag -skip', harvest_args))
-end
-
-
-TEST_TASK_DIRS = []
-
-task :fastlms do
- # this is >2 times faster that rake [lms], and does not need web, good for local dev
- django_admin = ENV['DJANGO_ADMIN_PATH'] || select_executable('django-admin.py', 'django-admin')
- sh("#{django_admin} runserver --traceback --settings=lms.envs.dev --pythonpath=.")
-end
-
-[:lms, :cms].each do |system|
- report_dir = report_dir_path(system)
-
- # Per System tasks
- desc "Run all django tests on our djangoapps for the #{system}"
- task "test_#{system}", [:stop_on_failure] => ["clean_test_files", :predjango, "#{system}:gather_assets:test", "fasttest_#{system}"]
-
- # Have a way to run the tests without running collectstatic -- useful when debugging without
- # messing with static files.
- task "fasttest_#{system}", [:stop_on_failure] => [report_dir, :predjango] do |t, args|
- args.with_defaults(:stop_on_failure => 'true')
- run_tests(system, report_dir, args.stop_on_failure)
- end
-
- task :fasttest => "fasttest_#{system}"
-
- # Run acceptance tests
- desc "Run acceptance tests"
- task "test_acceptance_#{system}", [:harvest_args] => ["#{system}:gather_assets:acceptance", "fasttest_acceptance_#{system}"]
-
- desc "Run acceptance tests without collectstatic"
- task "fasttest_acceptance_#{system}", [:harvest_args] => ["clean_test_files", :predjango, report_dir] do |t, args|
- args.with_defaults(:harvest_args => '')
- run_acceptance_tests(system, report_dir, args.harvest_args)
- end
-
-
- TEST_TASK_DIRS << system
-
- desc <<-desc
- Start the #{system} locally with the specified environment (defaults to dev).
- Other useful environments are devplus (for dev testing with a real local database)
- desc
- task system, [:env, :options] => [:predjango] do |t, args|
- args.with_defaults(:env => 'dev', :options => default_options[system])
-
- # Compile all assets first
- compile_assets(watch=false, debug=true)
-
- # Listen for any changes to assets
- compile_assets(watch=true, debug=true)
-
- sh(django_admin(system, args.env, 'runserver', args.options))
- end
-
- # Per environment tasks
- Dir["#{system}/envs/**/*.py"].each do |env_file|
- env = env_file.gsub("#{system}/envs/", '').gsub(/\.py/, '').gsub('/', '.')
- desc "Attempt to import the settings file #{system}.envs.#{env} and report any errors"
- task "#{system}:check_settings:#{env}" => :predjango do
- sh("echo 'import #{system}.envs.#{env}' | #{django_admin(system, env, 'shell')}")
- end
-
- desc "Compile coffeescript and sass, and then run collectstatic in the specified environment"
- task "#{system}:gather_assets:#{env}" do
- compile_assets()
- sh("#{django_admin(system, env, 'collectstatic', '--noinput')} > /dev/null") do |ok, status|
- if !ok
- abort "collectstatic failed!"
- end
- end
- end
- end
-
- desc "Open jasmine tests for #{system} in your default browser"
- task "browse_jasmine_#{system}" do
- compile_assets()
- django_for_jasmine(system, true) do |jasmine_url|
- Launchy.open(jasmine_url)
- puts "Press ENTER to terminate".red
- $stdin.gets
- end
- end
-
- desc "Use phantomjs to run jasmine tests for #{system} from the console"
- task "phantomjs_jasmine_#{system}" do
- compile_assets()
- phantomjs = ENV['PHANTOMJS_PATH'] || 'phantomjs'
- django_for_jasmine(system, false) do |jasmine_url|
- sh("#{phantomjs} node_modules/phantom-jasmine/lib/run_jasmine_test.coffee #{jasmine_url}")
- end
- end
-end
-
-desc "Reset the relational database used by django. WARNING: this will delete all of your existing users"
-task :resetdb, [:env] do |t, args|
- args.with_defaults(:env => 'dev')
- sh(django_admin(:lms, args.env, 'syncdb'))
- sh(django_admin(:lms, args.env, 'migrate'))
-end
-
-desc "Update the relational database to the latest migration"
-task :migrate, [:env] do |t, args|
- args.with_defaults(:env => 'dev')
- sh(django_admin(:lms, args.env, 'migrate'))
-end
-
-Dir["common/lib/*"].select{|lib| File.directory?(lib)}.each do |lib|
- task_name = "test_#{lib}"
-
- report_dir = report_dir_path(lib)
-
- desc "Run tests for common lib #{lib}"
- task task_name => report_dir do
- ENV['NOSE_XUNIT_FILE'] = File.join(report_dir, "nosetests.xml")
- cmd = "nosetests #{lib}"
- sh(run_under_coverage(cmd, lib)) do |ok, res|
- $failed_tests += 1 unless ok
- end
- end
- TEST_TASK_DIRS << lib
-
- desc "Run tests for common lib #{lib} (without coverage)"
- task "fasttest_#{lib}" do
- sh("nosetests #{lib}")
- end
-
- desc "Open jasmine tests for #{lib} in your default browser"
- task "browse_jasmine_#{lib}" do
- template_jasmine_runner(lib) do |f|
- sh("python -m webbrowser -t 'file://#{f}'")
- puts "Press ENTER to terminate".red
- $stdin.gets
- end
- end
-
- desc "Use phantomjs to run jasmine tests for #{lib} from the console"
- task "phantomjs_jasmine_#{lib}" do
- phantomjs = ENV['PHANTOMJS_PATH'] || 'phantomjs'
- template_jasmine_runner(lib) do |f|
- sh("#{phantomjs} node_modules/phantom-jasmine/lib/run_jasmine_test.coffee #{f}")
- end
- end
-end
-
-task :report_dirs
-
-TEST_TASK_DIRS.each do |dir|
- report_dir = report_dir_path(dir)
- directory report_dir
- task :report_dirs => [REPORT_DIR, report_dir]
-end
-
-task :test do
- TEST_TASK_DIRS.each do |dir|
- Rake::Task["test_#{dir}"].invoke(false)
- end
-
- if $failed_tests > 0
- abort "Tests failed!"
- end
-end
-
-namespace :coverage do
- desc "Build the html coverage reports"
- task :html => :report_dirs do
- TEST_TASK_DIRS.each do |dir|
- report_dir = report_dir_path(dir)
-
- if !File.file?("#{report_dir}/.coverage")
- next
- end
-
- sh("coverage html --rcfile=#{dir}/.coveragerc")
- end
- end
-
- desc "Build the xml coverage reports"
- task :xml => :report_dirs do
- TEST_TASK_DIRS.each do |dir|
- report_dir = report_dir_path(dir)
-
- if !File.file?("#{report_dir}/.coverage")
- next
- end
- # Why doesn't the rcfile control the xml output file properly??
- sh("coverage xml -o #{report_dir}/coverage.xml --rcfile=#{dir}/.coveragerc")
- end
- end
-end
-
-task :runserver => :lms
-
-desc "Run django-admin against the specified system and environment"
-task "django-admin", [:action, :system, :env, :options] do |t, args|
- args.with_defaults(:env => 'dev', :system => 'lms', :options => '')
- sh(django_admin(args.system, args.env, args.action, args.options))
-end
-
-desc "Set the staff bit for a user"
-task :set_staff, [:user, :system, :env] do |t, args|
- args.with_defaults(:env => 'dev', :system => 'lms', :options => '')
- sh(django_admin(args.system, args.env, 'set_staff', args.user))
-end
-
-namespace :cms do
- desc "Clone existing MongoDB based course"
- task :clone do
-
- if ENV['SOURCE_LOC'] and ENV['DEST_LOC']
- sh(django_admin(:cms, :dev, :clone, ENV['SOURCE_LOC'], ENV['DEST_LOC']))
- else
- raise "You must pass in a SOURCE_LOC and DEST_LOC parameters"
- end
- end
-
- desc "Delete existing MongoDB based course"
- task :delete_course do
-
- if ENV['LOC'] and ENV['COMMIT']
- sh(django_admin(:cms, :dev, :delete_course, ENV['LOC'], ENV['COMMIT']))
- elsif ENV['LOC']
- sh(django_admin(:cms, :dev, :delete_course, ENV['LOC']))
- else
- raise "You must pass in a LOC parameter"
- end
- end
-
- desc "Import course data within the given DATA_DIR variable"
- task :import do
- if ENV['DATA_DIR'] and ENV['COURSE_DIR']
- sh(django_admin(:cms, :dev, :import, ENV['DATA_DIR'], ENV['COURSE_DIR']))
- elsif ENV['DATA_DIR']
- sh(django_admin(:cms, :dev, :import, ENV['DATA_DIR']))
- else
- raise "Please specify a DATA_DIR variable that point to your data directory.\n" +
- "Example: \`rake cms:import DATA_DIR=../data\`"
- end
- end
-
- desc "Imports all the templates from the code pack"
- task :update_templates do
- sh(django_admin(:cms, :dev, :update_templates))
- end
-
- desc "Import course data within the given DATA_DIR variable"
- task :xlint do
- if ENV['DATA_DIR'] and ENV['COURSE_DIR']
- sh(django_admin(:cms, :dev, :xlint, ENV['DATA_DIR'], ENV['COURSE_DIR']))
- elsif ENV['DATA_DIR']
- sh(django_admin(:cms, :dev, :xlint, ENV['DATA_DIR']))
- else
- raise "Please specify a DATA_DIR variable that point to your data directory.\n" +
- "Example: \`rake cms:import DATA_DIR=../data\`"
- end
- end
-
- desc "Export course data to a tar.gz file"
- task :export do
- if ENV['COURSE_ID'] and ENV['OUTPUT_PATH']
- sh(django_admin(:cms, :dev, :export, ENV['COURSE_ID'], ENV['OUTPUT_PATH']))
- else
- raise "Please specify a COURSE_ID and OUTPUT_PATH.\n" +
- "Example: \`rake cms:export COURSE_ID=MITx/12345/name OUTPUT_PATH=foo.tar.gz\`"
- end
- end
-end
-
-desc "Build a properties file used to trigger autodeploy builds"
-task :autodeploy_properties do
- File.open("autodeploy.properties", "w") do |file|
- file.puts("UPSTREAM_NOOP=false")
- file.puts("UPSTREAM_BRANCH=#{BRANCH}")
- file.puts("UPSTREAM_JOB=#{PACKAGE_NAME}")
- file.puts("UPSTREAM_REVISION=#{COMMIT}")
- end
-end
-
-# --- Internationalization tasks
-
-namespace :i18n do
-
- desc "Extract localizable strings from sources"
- task :extract => "i18n:validate:gettext" do
- sh(File.join(REPO_ROOT, "i18n", "extract.py"))
- end
-
- desc "Compile localizable strings from sources. With optional flag 'extract', will extract strings first."
- task :generate => "i18n:validate:gettext" do
- if ARGV.last.downcase == 'extract'
- Rake::Task["i18n:extract"].execute
- end
- sh(File.join(REPO_ROOT, "i18n", "generate.py"))
- end
-
- desc "Simulate international translation by generating dummy strings corresponding to source strings."
- task :dummy do
- source_files = Dir["#{REPO_ROOT}/conf/locale/en/LC_MESSAGES/*.po"]
- dummy_locale = 'fr'
- cmd = File.join(REPO_ROOT, "i18n", "make_dummy.py")
- for file in source_files do
- sh("#{cmd} #{file} #{dummy_locale}")
- end
- end
-
- namespace :validate do
-
- desc "Make sure GNU gettext utilities are available"
- task :gettext do
- begin
- select_executable('xgettext')
- rescue
- msg = "Cannot locate GNU gettext utilities, which are required by django for internationalization.\n"
- msg += "(see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#message-files)\n"
- msg += "Try downloading them from http://www.gnu.org/software/gettext/"
- abort(msg.red)
- end
- end
-
- desc "Make sure config file with username/password exists"
- task :transifex_config do
- config_file = "#{Dir.home}/.transifexrc"
- if !File.file?(config_file) or File.size(config_file)==0
- msg ="Cannot connect to Transifex, config file is missing or empty: #{config_file}\n"
- msg += "See http://help.transifex.com/features/client/#transifexrc"
- abort(msg.red)
- end
- end
- end
-
- namespace :transifex do
- desc "Push source strings to Transifex for translation"
- task :push => "i18n:validate:transifex_config" do
- cmd = File.join(REPO_ROOT, "i18n", "transifex.py")
- sh("#{cmd} push")
- end
-
- desc "Pull translated strings from Transifex"
- task :pull => "i18n:validate:transifex_config" do
- cmd = File.join(REPO_ROOT, "i18n", "transifex.py")
- sh("#{cmd} pull")
- end
- end
-
- desc "Run tests for the internationalization library"
- task :test => "i18n:validate:gettext" do
- test = File.join(REPO_ROOT, "i18n", "tests")
- sh("nosetests #{test}")
- end
-
-end
-
-# --- Develop and public documentation ---
-desc "Invoke sphinx 'make build' to generate docs."
-task :builddocs, [:options] do |t, args|
- if args.options == 'pub'
- path = "doc/public"
- else
- path = "docs"
- end
-
- Dir.chdir(path) do
- sh('make html')
- end
-end
-
-desc "Show docs in browser (mac and ubuntu)."
-task :showdocs, [:options] do |t, args|
- if args.options == 'pub'
- path = "doc/public"
- else
- path = "docs"
- end
-
- Dir.chdir("#{path}/build/html") do
- if RUBY_PLATFORM.include? 'darwin' # mac os
- sh('open index.html')
- elsif RUBY_PLATFORM.include? 'linux' # make more ubuntu specific?
- sh('sensible-browser index.html') # ubuntu
- else
- raise "\nUndefined how to run browser on your machine.
-Please use 'rake builddocs' and then manually open
-'mitx/#{path}/build/html/index.html."
- end
- end
-end
-
-desc "Build docs and show them in browser"
-task :doc, [:options] => :builddocs do |t, args|
- Rake::Task["showdocs"].invoke(args.options)
-end
-# --- Develop and public documentation ---
diff --git a/rakefiles/assets.rake b/rakefiles/assets.rake
new file mode 100644
index 0000000000..0954dc9815
--- /dev/null
+++ b/rakefiles/assets.rake
@@ -0,0 +1,100 @@
+
+def xmodule_cmd(watch=false, debug=false)
+ xmodule_cmd = 'xmodule_assets common/static/xmodule'
+ if watch
+ "watchmedo shell-command " +
+ "--patterns='*.js;*.coffee;*.sass;*.scss;*.css' " +
+ "--recursive " +
+ "--command='#{xmodule_cmd}' " +
+ "common/lib/xmodule"
+ else
+ xmodule_cmd
+ end
+end
+
+def coffee_cmd(watch=false, debug=false)
+ "node_modules/.bin/coffee #{watch ? '--watch' : ''} --compile */static"
+end
+
+def sass_cmd(watch=false, debug=false)
+ "sass #{debug ? '--debug-info' : '--style compressed'} " +
+ "--load-path ./common/static/sass " +
+ "--require ./common/static/sass/bourbon/lib/bourbon.rb " +
+ "#{watch ? '--watch' : '--update'} */static"
+end
+
+desc "Compile all assets"
+multitask :assets => 'assets:all'
+
+namespace :assets do
+
+ desc "Compile all assets in debug mode"
+ multitask :debug
+
+ desc "Watch all assets for changes and automatically recompile"
+ task :watch => 'assets:_watch' do
+ puts "Press ENTER to terminate".red
+ $stdin.gets
+ end
+
+ {:xmodule => :install_python_prereqs,
+ :coffee => :install_node_prereqs,
+ :sass => :install_ruby_prereqs}.each_pair do |asset_type, prereq_task|
+ desc "Compile all #{asset_type} assets"
+ task asset_type => prereq_task do
+ cmd = send(asset_type.to_s + "_cmd", watch=false, debug=false)
+ sh(cmd)
+ end
+
+ multitask :all => asset_type
+ multitask :debug => "assets:#{asset_type}:debug"
+ multitask :_watch => "assets:#{asset_type}:_watch"
+
+ namespace asset_type do
+ desc "Compile all #{asset_type} assets in debug mode"
+ task :debug => prereq_task do
+ cmd = send(asset_type.to_s + "_cmd", watch=false, debug=true)
+ sh(cmd)
+ end
+
+ desc "Watch all #{asset_type} assets and compile on change"
+ task :watch => "assets:#{asset_type}:_watch" do
+ puts "Press ENTER to terminate".red
+ $stdin.gets
+ end
+
+ task :_watch => prereq_task do
+ cmd = send(asset_type.to_s + "_cmd", watch=true, debug=true)
+ background_process(cmd)
+ end
+ end
+ end
+
+
+ multitask :sass => 'assets:xmodule'
+ namespace :sass do
+ # In watch mode, sass doesn't immediately compile out of date files,
+ # so force a recompile first
+ task :_watch => 'assets:sass:debug'
+ multitask :debug => 'assets:xmodule:debug'
+ end
+
+ multitask :coffee => 'assets:xmodule'
+ namespace :coffee do
+ multitask :debug => 'assets:xmodule:debug'
+ end
+end
+
+[:lms, :cms].each do |system|
+ # Per environment tasks
+ environments(system).each do |env|
+ desc "Compile coffeescript and sass, and then run collectstatic in the specified environment"
+ task "#{system}:gather_assets:#{env}" => :assets do
+ sh("#{django_admin(system, env, 'collectstatic', '--noinput')} > /dev/null") do |ok, status|
+ if !ok
+ abort "collectstatic failed!"
+ end
+ end
+ end
+ end
+end
diff --git a/rakefiles/deploy.rake b/rakefiles/deploy.rake
new file mode 100644
index 0000000000..1d0a1b2c4f
--- /dev/null
+++ b/rakefiles/deploy.rake
@@ -0,0 +1,15 @@
+
+# Packaging constants
+COMMIT = (ENV["GIT_COMMIT"] || `git rev-parse HEAD`).chomp()[0, 10]
+PACKAGE_NAME = "mitx"
+BRANCH = (ENV["GIT_BRANCH"] || `git symbolic-ref -q HEAD`).chomp().gsub('refs/heads/', '').gsub('origin/', '')
+
+desc "Build a properties file used to trigger autodeploy builds"
+task :autodeploy_properties do
+ File.open("autodeploy.properties", "w") do |file|
+ file.puts("UPSTREAM_NOOP=false")
+ file.puts("UPSTREAM_BRANCH=#{BRANCH}")
+ file.puts("UPSTREAM_JOB=#{PACKAGE_NAME}")
+ file.puts("UPSTREAM_REVISION=#{COMMIT}")
+ end
+end
\ No newline at end of file
diff --git a/rakefiles/django.rake b/rakefiles/django.rake
new file mode 100644
index 0000000000..594c7d6ec3
--- /dev/null
+++ b/rakefiles/django.rake
@@ -0,0 +1,125 @@
+default_options = {
+ :lms => '8000',
+ :cms => '8001',
+}
+
+task :predjango => :install_python_prereqs do
+ sh("find . -type f -name *.pyc -delete")
+ sh('pip install -q --no-index -r requirements/local.txt')
+end
+
+
+task :fastlms do
+ # this is >2 times faster that rake [lms], and does not need web, good for local dev
+ django_admin = ENV['DJANGO_ADMIN_PATH'] || select_executable('django-admin.py', 'django-admin')
+ sh("#{django_admin} runserver --traceback --settings=lms.envs.dev --pythonpath=.")
+end
+
+[:lms, :cms].each do |system|
+ desc <<-desc
+ Start the #{system} locally with the specified environment (defaults to dev).
+ Other useful environments are devplus (for dev testing with a real local database)
+ desc
+ task system, [:env, :options] => [:install_prereqs, 'assets:_watch', :predjango] do |t, args|
+ args.with_defaults(:env => 'dev', :options => default_options[system])
+ sh(django_admin(system, args.env, 'runserver', args.options))
+ end
+
+ # Per environment tasks
+ environments(system).each do |env|
+ desc "Attempt to import the settings file #{system}.envs.#{env} and report any errors"
+ task "#{system}:check_settings:#{env}" => :predjango do
+ sh("echo 'import #{system}.envs.#{env}' | #{django_admin(system, env, 'shell')}")
+ end
+ end
+end
+
+desc "Reset the relational database used by django. WARNING: this will delete all of your existing users"
+task :resetdb, [:env] do |t, args|
+ args.with_defaults(:env => 'dev')
+ sh(django_admin(:lms, args.env, 'syncdb'))
+ sh(django_admin(:lms, args.env, 'migrate'))
+end
+
+desc "Update the relational database to the latest migration"
+task :migrate, [:env] do |t, args|
+ args.with_defaults(:env => 'dev')
+ sh(django_admin(:lms, args.env, 'migrate'))
+end
+
+task :runserver => :lms
+
+desc "Run django-admin against the specified system and environment"
+task "django-admin", [:action, :system, :env, :options] do |t, args|
+ args.with_defaults(:env => 'dev', :system => 'lms', :options => '')
+ sh(django_admin(args.system, args.env, args.action, args.options))
+end
+
+desc "Set the staff bit for a user"
+task :set_staff, [:user, :system, :env] do |t, args|
+ args.with_defaults(:env => 'dev', :system => 'lms', :options => '')
+ sh(django_admin(args.system, args.env, 'set_staff', args.user))
+end
+
+namespace :cms do
+ desc "Clone existing MongoDB based course"
+ task :clone do
+
+ if ENV['SOURCE_LOC'] and ENV['DEST_LOC']
+ sh(django_admin(:cms, :dev, :clone, ENV['SOURCE_LOC'], ENV['DEST_LOC']))
+ else
+ raise "You must pass in a SOURCE_LOC and DEST_LOC parameters"
+ end
+ end
+
+ desc "Delete existing MongoDB based course"
+ task :delete_course do
+
+ if ENV['LOC'] and ENV['COMMIT']
+ sh(django_admin(:cms, :dev, :delete_course, ENV['LOC'], ENV['COMMIT']))
+ elsif ENV['LOC']
+ sh(django_admin(:cms, :dev, :delete_course, ENV['LOC']))
+ else
+ raise "You must pass in a LOC parameter"
+ end
+ end
+
+ desc "Import course data within the given DATA_DIR variable"
+ task :import do
+ if ENV['DATA_DIR'] and ENV['COURSE_DIR']
+ sh(django_admin(:cms, :dev, :import, ENV['DATA_DIR'], ENV['COURSE_DIR']))
+ elsif ENV['DATA_DIR']
+ sh(django_admin(:cms, :dev, :import, ENV['DATA_DIR']))
+ else
+ raise "Please specify a DATA_DIR variable that point to your data directory.\n" +
+ "Example: \`rake cms:import DATA_DIR=../data\`"
+ end
+ end
+
+ desc "Imports all the templates from the code pack"
+ task :update_templates do
+ sh(django_admin(:cms, :dev, :update_templates))
+ end
+
+ desc "Import course data within the given DATA_DIR variable"
+ task :xlint do
+ if ENV['DATA_DIR'] and ENV['COURSE_DIR']
+ sh(django_admin(:cms, :dev, :xlint, ENV['DATA_DIR'], ENV['COURSE_DIR']))
+ elsif ENV['DATA_DIR']
+ sh(django_admin(:cms, :dev, :xlint, ENV['DATA_DIR']))
+ else
+ raise "Please specify a DATA_DIR variable that point to your data directory.\n" +
+ "Example: \`rake cms:import DATA_DIR=../data\`"
+ end
+ end
+
+ desc "Export course data to a tar.gz file"
+ task :export do
+ if ENV['COURSE_ID'] and ENV['OUTPUT_PATH']
+ sh(django_admin(:cms, :dev, :export, ENV['COURSE_ID'], ENV['OUTPUT_PATH']))
+ else
+ raise "Please specify a COURSE_ID and OUTPUT_PATH.\n" +
+ "Example: \`rake cms:export COURSE_ID=MITx/12345/name OUTPUT_PATH=foo.tar.gz\`"
+ end
+ end
+end
\ No newline at end of file
diff --git a/rakefiles/docs.rake b/rakefiles/docs.rake
new file mode 100644
index 0000000000..f10fc80d59
--- /dev/null
+++ b/rakefiles/docs.rake
@@ -0,0 +1,34 @@
+require 'launchy'
+
+# --- Develop and public documentation ---
+desc "Invoke sphinx 'make build' to generate docs."
+task :builddocs, [:options] do |t, args|
+ if args.options == 'pub'
+ path = "doc/public"
+ else
+ path = "docs"
+ end
+
+ Dir.chdir(path) do
+ sh('make html')
+ end
+end
+
+desc "Show docs in browser (mac and ubuntu)."
+task :showdocs, [:options] do |t, args|
+ if args.options == 'pub'
+ path = "doc/public"
+ else
+ path = "docs"
+ end
+
+ Dir.chdir("#{path}/build/html") do
+ Launchy.open('index.html')
+ end
+end
+
+desc "Build docs and show them in browser"
+task :doc, [:options] => :builddocs do |t, args|
+ Rake::Task["showdocs"].invoke(args.options)
+end
+# --- Develop and public documentation ---
diff --git a/rakefiles/helpers.rb b/rakefiles/helpers.rb
new file mode 100644
index 0000000000..be5929d805
--- /dev/null
+++ b/rakefiles/helpers.rb
@@ -0,0 +1,70 @@
+require 'digest/md5'
+
+
+def select_executable(*cmds)
+ cmds.find_all{ |cmd| system("which #{cmd} > /dev/null 2>&1") }[0] || fail("No executables found from #{cmds.join(', ')}")
+end
+
+def django_admin(system, env, command, *args)
+ django_admin = ENV['DJANGO_ADMIN_PATH'] || select_executable('django-admin.py', 'django-admin')
+ return "#{django_admin} #{command} --traceback --settings=#{system}.envs.#{env} --pythonpath=. #{args.join(' ')}"
+end
+
+def report_dir_path(dir)
+ return File.join(REPORT_DIR, dir.to_s)
+end
+
+def when_changed(*files)
+ Rake::Task[PREREQS_MD5_DIR].invoke
+ cache_file = File.join(PREREQS_MD5_DIR, files.join('-').gsub(/\W+/, '-')) + '.md5'
+ digest = Digest::MD5.new()
+ Dir[*files].select{|file| File.file?(file)}.each do |file|
+ digest.file(file)
+ end
+ if !File.exists?(cache_file) or digest.hexdigest != File.read(cache_file)
+ yield
+ File.write(cache_file, digest.hexdigest)
+ end
+end
+
+# Runs Process.spawn, and kills the process at the end of the rake process
+# Expects the same arguments as Process.spawn
+def background_process(*command)
+ pid = Process.spawn({}, *command, {:pgroup => true})
+
+ at_exit do
+ puts "Ending process and children"
+ pgid = Process.getpgid(pid)
+ begin
+ Timeout.timeout(5) do
+ puts "Interrupting process group #{pgid}"
+ Process.kill(:SIGINT, -pgid)
+ puts "Waiting on process group #{pgid}"
+ Process.wait(-pgid)
+ puts "Done waiting on process group #{pgid}"
+ end
+ rescue Timeout::Error
+ begin
+ Timeout.timeout(5) do
+ puts "Terminating process group #{pgid}"
+ Process.kill(:SIGTERM, -pgid)
+ puts "Waiting on process group #{pgid}"
+ Process.wait(-pgid)
+ puts "Done waiting on process group #{pgid}"
+ end
+ rescue Timeout::Error
+ puts "Killing process group #{pgid}"
+ Process.kill(:SIGKILL, -pgid)
+ puts "Waiting on process group #{pgid}"
+ Process.wait(-pgid)
+ puts "Done waiting on process group #{pgid}"
+ end
+ end
+ end
+end
+
+def environments(system)
+ Dir["#{system}/envs/**/*.py"].select{|file| ! (/__init__.py$/ =~ file)}.map do |env_file|
+ env_file.gsub("#{system}/envs/", '').gsub(/\.py/, '').gsub('/', '.')
+ end
+end
diff --git a/rakefiles/i18n.rake b/rakefiles/i18n.rake
new file mode 100644
index 0000000000..e30c119e2e
--- /dev/null
+++ b/rakefiles/i18n.rake
@@ -0,0 +1,73 @@
+# --- Internationalization tasks
+
+namespace :i18n do
+
+ desc "Extract localizable strings from sources"
+ task :extract => "i18n:validate:gettext" do
+ sh(File.join(REPO_ROOT, "i18n", "extract.py"))
+ end
+
+ desc "Compile localizable strings from sources. With optional flag 'extract', will extract strings first."
+ task :generate => "i18n:validate:gettext" do
+ if ARGV.last.downcase == 'extract'
+ Rake::Task["i18n:extract"].execute
+ end
+ sh(File.join(REPO_ROOT, "i18n", "generate.py"))
+ end
+
+ desc "Simulate international translation by generating dummy strings corresponding to source strings."
+ task :dummy do
+ source_files = Dir["#{REPO_ROOT}/conf/locale/en/LC_MESSAGES/*.po"]
+ dummy_locale = 'fr'
+ cmd = File.join(REPO_ROOT, "i18n", "make_dummy.py")
+ for file in source_files do
+ sh("#{cmd} #{file} #{dummy_locale}")
+ end
+ end
+
+ namespace :validate do
+
+ desc "Make sure GNU gettext utilities are available"
+ task :gettext do
+ begin
+ select_executable('xgettext')
+ rescue
+ msg = "Cannot locate GNU gettext utilities, which are required by django for internationalization.\n"
+ msg += "(see https://docs.djangoproject.com/en/dev/topics/i18n/translation/#message-files)\n"
+ msg += "Try downloading them from http://www.gnu.org/software/gettext/"
+ abort(msg.red)
+ end
+ end
+
+ desc "Make sure config file with username/password exists"
+ task :transifex_config do
+ config_file = "#{Dir.home}/.transifexrc"
+ if !File.file?(config_file) or File.size(config_file)==0
+ msg ="Cannot connect to Transifex, config file is missing or empty: #{config_file}\n"
+ msg += "See http://help.transifex.com/features/client/#transifexrc"
+ abort(msg.red)
+ end
+ end
+ end
+
+ namespace :transifex do
+ desc "Push source strings to Transifex for translation"
+ task :push => "i18n:validate:transifex_config" do
+ cmd = File.join(REPO_ROOT, "i18n", "transifex.py")
+ sh("#{cmd} push")
+ end
+
+ desc "Pull translated strings from Transifex"
+ task :pull => "i18n:validate:transifex_config" do
+ cmd = File.join(REPO_ROOT, "i18n", "transifex.py")
+ sh("#{cmd} pull")
+ end
+ end
+
+ desc "Run tests for the internationalization library"
+ task :test => "i18n:validate:gettext" do
+ test = File.join(REPO_ROOT, "i18n", "tests")
+ sh("nosetests #{test}")
+ end
+
+end
diff --git a/rakefiles/jasmine.rake b/rakefiles/jasmine.rake
new file mode 100644
index 0000000000..d9b3bee427
--- /dev/null
+++ b/rakefiles/jasmine.rake
@@ -0,0 +1,97 @@
+require 'colorize'
+require 'erb'
+require 'launchy'
+require 'net/http'
+
+
+def django_for_jasmine(system, django_reload)
+ if !django_reload
+ reload_arg = '--noreload'
+ end
+
+ port = 10000 + rand(40000)
+ jasmine_url = "http://localhost:#{port}/_jasmine/"
+
+ background_process(*django_admin(system, 'jasmine', 'runserver', '-v', '0', port.to_s, reload_arg).split(' '))
+
+ up = false
+ start_time = Time.now
+ until up do
+ if Time.now - start_time > 30
+ abort "Timed out waiting for server to start to run jasmine tests"
+ end
+ begin
+ response = Net::HTTP.get_response(URI(jasmine_url))
+ puts response.code
+ up = response.code == '200'
+ rescue => e
+ puts e.message
+ ensure
+ puts('Waiting server to start')
+ sleep(0.5)
+ end
+ end
+ yield jasmine_url
+end
+
+def template_jasmine_runner(lib)
+ coffee_files = Dir["#{lib}/**/js/**/*.coffee", "common/static/coffee/src/**/*.coffee"]
+ if !coffee_files.empty?
+ sh("node_modules/.bin/coffee -c #{coffee_files.join(' ')}")
+ end
+ phantom_jasmine_path = File.expand_path("node_modules/phantom-jasmine")
+ common_js_root = File.expand_path("common/static/js")
+ common_coffee_root = File.expand_path("common/static/coffee/src")
+
+ # Get arrays of spec and source files, ordered by how deep they are nested below the library
+ # (and then alphabetically) and expanded from a relative to an absolute path
+ spec_glob = File.join("#{lib}", "**", "spec", "**", "*.js")
+ src_glob = File.join("#{lib}", "**", "src", "**", "*.js")
+ js_specs = Dir[spec_glob].sort_by {|p| [p.split('/').length, p]} .map {|f| File.expand_path(f)}
+ js_source = Dir[src_glob].sort_by {|p| [p.split('/').length, p]} .map {|f| File.expand_path(f)}
+
+ template = ERB.new(File.read("#{lib}/jasmine_test_runner.html.erb"))
+ template_output = "#{lib}/jasmine_test_runner.html"
+ File.open(template_output, 'w') do |f|
+ f.write(template.result(binding))
+ end
+ yield File.expand_path(template_output)
+end
+
+[:lms, :cms].each do |system|
+ desc "Open jasmine tests for #{system} in your default browser"
+ task "browse_jasmine_#{system}" => :assets do
+ django_for_jasmine(system, true) do |jasmine_url|
+ Launchy.open(jasmine_url)
+ puts "Press ENTER to terminate".red
+ $stdin.gets
+ end
+ end
+
+ desc "Use phantomjs to run jasmine tests for #{system} from the console"
+ task "phantomjs_jasmine_#{system}" => :assets do
+ phantomjs = ENV['PHANTOMJS_PATH'] || 'phantomjs'
+ django_for_jasmine(system, false) do |jasmine_url|
+ sh("#{phantomjs} node_modules/phantom-jasmine/lib/run_jasmine_test.coffee #{jasmine_url}")
+ end
+ end
+end
+
+Dir["common/lib/*"].select{|lib| File.directory?(lib)}.each do |lib|
+ desc "Open jasmine tests for #{lib} in your default browser"
+ task "browse_jasmine_#{lib}" do
+ template_jasmine_runner(lib) do |f|
+ sh("python -m webbrowser -t 'file://#{f}'")
+ puts "Press ENTER to terminate".red
+ $stdin.gets
+ end
+ end
+
+ desc "Use phantomjs to run jasmine tests for #{lib} from the console"
+ task "phantomjs_jasmine_#{lib}" do
+ phantomjs = ENV['PHANTOMJS_PATH'] || 'phantomjs'
+ template_jasmine_runner(lib) do |f|
+ sh("#{phantomjs} node_modules/phantom-jasmine/lib/run_jasmine_test.coffee #{f}")
+ end
+ end
+end
diff --git a/rakefiles/prereqs.rake b/rakefiles/prereqs.rake
new file mode 100644
index 0000000000..430e650127
--- /dev/null
+++ b/rakefiles/prereqs.rake
@@ -0,0 +1,39 @@
+require './rakefiles/helpers.rb'
+
+
+PREREQS_MD5_DIR = ENV["PREREQ_CACHE_DIR"] || File.join(REPO_ROOT, '.prereqs_cache')
+
+CLOBBER.include(PREREQS_MD5_DIR)
+
+directory PREREQS_MD5_DIR
+
+desc "Install all prerequisites needed for the lms and cms"
+task :install_prereqs => [:install_node_prereqs, :install_ruby_prereqs, :install_python_prereqs]
+
+desc "Install all node prerequisites for the lms and cms"
+task :install_node_prereqs => "ws:migrate" do
+ when_changed('package.json') do
+ sh('npm install')
+ end unless ENV['NO_PREREQ_INSTALL']
+end
+
+desc "Install all ruby prerequisites for the lms and cms"
+task :install_ruby_prereqs => "ws:migrate" do
+ when_changed('Gemfile') do
+ sh('bundle install')
+ end unless ENV['NO_PREREQ_INSTALL']
+end
+
+desc "Install all python prerequisites for the lms and cms"
+task :install_python_prereqs => "ws:migrate" do
+ when_changed('requirements/**') do
+ ENV['PIP_DOWNLOAD_CACHE'] ||= '.pip_download_cache'
+ sh('pip install --exists-action w -r requirements/base.txt')
+ sh('pip install --exists-action w -r requirements/post.txt')
+ # Check for private-requirements.txt: used to install our libs as working dirs,
+ # or personal-use tools.
+ if File.file?("requirements/private.txt")
+ sh('pip install -r requirements/private.txt')
+ end
+ end unless ENV['NO_PREREQ_INSTALL']
+end
\ No newline at end of file
diff --git a/rakefiles/quality.rake b/rakefiles/quality.rake
new file mode 100644
index 0000000000..00ce627ac5
--- /dev/null
+++ b/rakefiles/quality.rake
@@ -0,0 +1,31 @@
+
+[:lms, :cms, :common].each do |system|
+ report_dir = report_dir_path(system)
+ directory report_dir
+
+ desc "Run pep8 on all #{system} code"
+ task "pep8_#{system}" => [report_dir, :install_python_prereqs] do
+ sh("pep8 #{system} | tee #{report_dir}/pep8.report")
+ end
+ task :pep8 => "pep8_#{system}"
+
+ desc "Run pylint on all #{system} code"
+ task "pylint_#{system}" => [report_dir, :install_python_prereqs] do
+ apps = Dir["#{system}/*.py", "#{system}/djangoapps/*", "#{system}/lib/*"].map do |app|
+ File.basename(app)
+ end.select do |app|
+ app !=~ /.pyc$/
+ end.map do |app|
+ if app =~ /.py$/
+ app.gsub('.py', '')
+ else
+ app
+ end
+ end
+
+ pythonpath_prefix = "PYTHONPATH=#{system}:#{system}/djangoapps:#{system}/lib:common/djangoapps:common/lib"
+ sh("#{pythonpath_prefix} pylint --rcfile=.pylintrc -f parseable #{apps.join(' ')} | tee #{report_dir}/pylint.report")
+ end
+ task :pylint => "pylint_#{system}"
+
+end
\ No newline at end of file
diff --git a/rakefiles/tests.rake b/rakefiles/tests.rake
new file mode 100644
index 0000000000..ebe8ea6375
--- /dev/null
+++ b/rakefiles/tests.rake
@@ -0,0 +1,137 @@
+
+# Set up the clean and clobber tasks
+CLOBBER.include(REPORT_DIR, 'test_root/*_repo', 'test_root/staticfiles')
+
+$failed_tests = 0
+
+def run_under_coverage(cmd, root)
+ cmd0, cmd_rest = cmd.split(" ", 2)
+ # We use "python -m coverage" so that the proper python will run the importable coverage
+ # rather than the coverage that OS path finds.
+ cmd = "python -m coverage run --rcfile=#{root}/.coveragerc `which #{cmd0}` #{cmd_rest}"
+ return cmd
+end
+
+def run_tests(system, report_dir, stop_on_failure=true)
+ ENV['NOSE_XUNIT_FILE'] = File.join(report_dir, "nosetests.xml")
+ dirs = Dir["common/djangoapps/*"] + Dir["#{system}/djangoapps/*"]
+ cmd = django_admin(system, :test, 'test', '--logging-clear-handlers', *dirs.each)
+ sh(run_under_coverage(cmd, system)) do |ok, res|
+ if !ok and stop_on_failure
+ abort "Test failed!"
+ end
+ $failed_tests += 1 unless ok
+ end
+end
+
+def run_acceptance_tests(system, report_dir, harvest_args)
+ sh(django_admin(system, 'acceptance', 'syncdb', '--noinput'))
+ sh(django_admin(system, 'acceptance', 'migrate', '--noinput'))
+ sh(django_admin(system, 'acceptance', 'harvest', '--debug-mode', '--tag -skip', harvest_args))
+end
+
+
+directory REPORT_DIR
+
+task :clean_test_files do
+ sh("git clean -fqdx test_root")
+end
+
+TEST_TASK_DIRS = []
+
+[:lms, :cms].each do |system|
+ report_dir = report_dir_path(system)
+
+ # Per System tasks
+ desc "Run all django tests on our djangoapps for the #{system}"
+ task "test_#{system}", [:stop_on_failure] => ["clean_test_files", :predjango, "#{system}:gather_assets:test", "fasttest_#{system}"]
+
+ # Have a way to run the tests without running collectstatic -- useful when debugging without
+ # messing with static files.
+ task "fasttest_#{system}", [:stop_on_failure] => [report_dir, :install_prereqs, :predjango] do |t, args|
+ args.with_defaults(:stop_on_failure => 'true')
+ run_tests(system, report_dir, args.stop_on_failure)
+ end
+
+ # Run acceptance tests
+ desc "Run acceptance tests"
+ task "test_acceptance_#{system}", [:harvest_args] => ["#{system}:gather_assets:acceptance", "fasttest_acceptance_#{system}"]
+
+ desc "Run acceptance tests without collectstatic"
+ task "fasttest_acceptance_#{system}", [:harvest_args] => ["clean_test_files", :predjango, report_dir] do |t, args|
+ args.with_defaults(:harvest_args => '')
+ run_acceptance_tests(system, report_dir, args.harvest_args)
+ end
+
+
+ task :fasttest => "fasttest_#{system}"
+
+ TEST_TASK_DIRS << system
+end
+
+Dir["common/lib/*"].select{|lib| File.directory?(lib)}.each do |lib|
+ task_name = "test_#{lib}"
+
+ report_dir = report_dir_path(lib)
+
+ desc "Run tests for common lib #{lib}"
+ task task_name => report_dir do
+ ENV['NOSE_XUNIT_FILE'] = File.join(report_dir, "nosetests.xml")
+ cmd = "nosetests #{lib}"
+ sh(run_under_coverage(cmd, lib)) do |ok, res|
+ $failed_tests += 1 unless ok
+ end
+ end
+ TEST_TASK_DIRS << lib
+
+ desc "Run tests for common lib #{lib} (without coverage)"
+ task "fasttest_#{lib}" do
+ sh("nosetests #{lib}")
+ end
+end
+
+task :report_dirs
+
+TEST_TASK_DIRS.each do |dir|
+ report_dir = report_dir_path(dir)
+ directory report_dir
+ task :report_dirs => [REPORT_DIR, report_dir]
+end
+
+task :test do
+ TEST_TASK_DIRS.each do |dir|
+ Rake::Task["test_#{dir}"].invoke(false)
+ end
+
+ if $failed_tests > 0
+ abort "Tests failed!"
+ end
+end
+
+namespace :coverage do
+ desc "Build the html coverage reports"
+ task :html => :report_dirs do
+ TEST_TASK_DIRS.each do |dir|
+ report_dir = report_dir_path(dir)
+
+ if !File.file?("#{report_dir}/.coverage")
+ next
+ end
+
+ sh("coverage html --rcfile=#{dir}/.coveragerc")
+ end
+ end
+
+ desc "Build the xml coverage reports"
+ task :xml => :report_dirs do
+ TEST_TASK_DIRS.each do |dir|
+ report_dir = report_dir_path(dir)
+
+ if !File.file?("#{report_dir}/.coverage")
+ next
+ end
+ # Why doesn't the rcfile control the xml output file properly??
+ sh("coverage xml -o #{report_dir}/coverage.xml --rcfile=#{dir}/.coveragerc")
+ end
+ end
+end
diff --git a/rakefiles/workspace.rake b/rakefiles/workspace.rake
new file mode 100644
index 0000000000..c705899f58
--- /dev/null
+++ b/rakefiles/workspace.rake
@@ -0,0 +1,16 @@
+MIGRATION_MARKER_DIR = File.join(REPO_ROOT, '.ws_migrations_complete')
+SKIP_MIGRATIONS = ENV['SKIP_WS_MIGRATIONS'] || false
+
+directory MIGRATION_MARKER_DIR
+
+namespace :ws do
+ task :migrate => MIGRATION_MARKER_DIR do
+ Dir['ws_migrations/*'].select{|m| File.executable?(m)}.each do |migration|
+ completion_file = File.join(MIGRATION_MARKER_DIR, File.basename(migration))
+ if ! File.exist?(completion_file)
+ sh(migration)
+ File.write(completion_file, "")
+ end
+ end unless SKIP_MIGRATIONS
+ end
+end
\ No newline at end of file
diff --git a/repo-requirements.txt b/repo-requirements.txt
deleted file mode 100644
index aa503e9779..0000000000
--- a/repo-requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
--r github-requirements.txt
--r local-requirements.txt
\ No newline at end of file
diff --git a/requirements.txt b/requirements/base.txt
similarity index 93%
rename from requirements.txt
rename to requirements/base.txt
index c6ee47becb..f6cc250587 100644
--- a/requirements.txt
+++ b/requirements/base.txt
@@ -1,7 +1,9 @@
--r repo-requirements.txt
+-r repo.txt
+
beautifulsoup4==4.1.3
beautifulsoup==3.2.1
boto==2.6.0
+distribute==0.6.28
django-celery==3.0.11
django-countries==1.5
django-followit==0.0.3
@@ -21,11 +23,9 @@ feedparser==5.1.3
fs==0.4.0
GitPython==0.3.2.RC1
glob2==0.3
-http://sympy.googlecode.com/files/sympy-0.7.1.tar.gz
lxml==3.0.1
mako==0.7.3
Markdown==2.2.1
-MySQL-python==1.2.4c1
networkx==1.7
nltk==2.0.4
numpy==1.6.2
@@ -42,10 +42,10 @@ python-openid==2.2.5
pytz==2012h
PyYAML==3.10
requests==0.14.2
-scipy==0.11.0
Shapely==1.2.16
sorl-thumbnail==11.12
South==0.7.6
+sympy==0.7.1
xmltodict==0.4.1
# Used for debugging
diff --git a/github-requirements.txt b/requirements/github.txt
similarity index 100%
rename from github-requirements.txt
rename to requirements/github.txt
diff --git a/local-requirements.txt b/requirements/local.txt
similarity index 100%
rename from local-requirements.txt
rename to requirements/local.txt
diff --git a/requirements/post.txt b/requirements/post.txt
new file mode 100644
index 0000000000..e1e26b381a
--- /dev/null
+++ b/requirements/post.txt
@@ -0,0 +1,6 @@
+
+# This must be installed after distribute 0.6.28
+MySQL-python==1.2.4c1
+
+# This must be installed after numpy
+scipy==0.11.0
diff --git a/requirements/repo.txt b/requirements/repo.txt
new file mode 100644
index 0000000000..da3903b3de
--- /dev/null
+++ b/requirements/repo.txt
@@ -0,0 +1,2 @@
+-r github.txt
+-r local.txt
diff --git a/brew-formulas.txt b/requirements/system/mac_os_x/brew-formulas.txt
similarity index 100%
rename from brew-formulas.txt
rename to requirements/system/mac_os_x/brew-formulas.txt
diff --git a/apt-packages.txt b/requirements/system/ubuntu/apt-packages.txt
similarity index 100%
rename from apt-packages.txt
rename to requirements/system/ubuntu/apt-packages.txt
diff --git a/apt-repos.txt b/requirements/system/ubuntu/apt-repos.txt
similarity index 100%
rename from apt-repos.txt
rename to requirements/system/ubuntu/apt-repos.txt
diff --git a/create-dev-env.sh b/scripts/create-dev-env.sh
similarity index 100%
rename from create-dev-env.sh
rename to scripts/create-dev-env.sh
diff --git a/install-system-req.sh b/scripts/install-system-req.sh
similarity index 100%
rename from install-system-req.sh
rename to scripts/install-system-req.sh
diff --git a/run.sh b/scripts/run.sh
similarity index 100%
rename from run.sh
rename to scripts/run.sh
diff --git a/run_watch_data.py b/scripts/run_watch_data.py
similarity index 100%
rename from run_watch_data.py
rename to scripts/run_watch_data.py
diff --git a/runone.py b/scripts/runone.py
similarity index 100%
rename from runone.py
rename to scripts/runone.py
diff --git a/setup-test-dirs.sh b/scripts/setup-test-dirs.sh
similarity index 100%
rename from setup-test-dirs.sh
rename to scripts/setup-test-dirs.sh
diff --git a/ws_migrations/README.rst b/ws_migrations/README.rst
new file mode 100644
index 0000000000..c952a25c7b
--- /dev/null
+++ b/ws_migrations/README.rst
@@ -0,0 +1,29 @@
+Developer Workspace Migrations
+==============================
+
+This directory contains executable files which run once prior to
+installation of pre-requisites to bring a developers workspace
+into line.
+
+Specifications
+--------------
+
+Each file in this directory should meet the following criteria
+
+* Executable (`chmod +x ws_migrations/foo.sh`)
+* Idempotent (ideally, each script is run only once, but no
+ guarantees are made by the caller, so the script must do
+ the right thing)
+* Either fast or verbose (if the script is going to take
+ a long time, it should notify the user of that)
+* A comment at the top of the file explaining the migration
+
+Execution
+---------
+
+The scripts are run by the rake task `ws:migrate`. That task
+only runs a given script if a corresponding marker file
+in .completed-ws-migrations doesn't already exist.
+
+If the SKIP_WS_MIGRATIONS environment variable is set, then
+no workspace migrations will be run.
\ No newline at end of file
diff --git a/ws_migrations/clean_xmodule_assets.sh b/ws_migrations/clean_xmodule_assets.sh
new file mode 100755
index 0000000000..ebda0fda55
--- /dev/null
+++ b/ws_migrations/clean_xmodule_assets.sh
@@ -0,0 +1,11 @@
+#! /bin/sh
+
+# Remove all of the old xmodule coffee and sass directories
+# in preparation to switching to use the xmodule_assets script
+
+rm -rf cms/static/coffee/descriptor
+rm -rf cms/static/coffee/module
+rm -rf cms/static/sass/descriptor
+rm -rf cms/static/sass/module
+rm -rf lms/static/coffee/module
+rm -rf lms/static/sass/module