refactor: CookieMonitoringMiddleware moved to edx-django-utils

The CookieMonitoringMiddleware and its related script
moved to edx-django-utils.

ARCHBOM-2054
This commit is contained in:
Robert Raposa
2022-03-16 21:17:14 -04:00
parent 7a2eee6b21
commit 9fa79809d8
15 changed files with 38 additions and 516 deletions

View File

@@ -752,16 +752,15 @@ XQUEUE_INTERFACE = {
MIDDLEWARE = [
'openedx.core.lib.x_forwarded_for.middleware.XForwardedForMiddleware',
'crum.CurrentRequestUserMiddleware',
'edx_django_utils.monitoring.DeploymentMonitoringMiddleware',
# A newer and safer request cache.
# Resets the request cache.
'edx_django_utils.cache.middleware.RequestCacheMiddleware',
'edx_django_utils.monitoring.MonitoringMemoryMiddleware',
# Cookie monitoring
'openedx.core.lib.request_utils.CookieMonitoringMiddleware',
# Various monitoring middleware
'edx_django_utils.monitoring.CookieMonitoringMiddleware',
'edx_django_utils.monitoring.DeploymentMonitoringMiddleware',
'edx_django_utils.monitoring.MonitoringMemoryMiddleware',
# Before anything that looks at cookies, especially the session middleware
'openedx.core.djangoapps.cookie_metadata.middleware.CookieNameChange',

View File

@@ -1,212 +0,0 @@
"""
This script will process logs generated from CookieMonitoringMiddleware.
Sample usage::
python lms/djangoapps/monitoring/scripts/process_cookie_monitoring_logs.py --csv_input large-cookie-logs.csv
Or for more details::
python lms/djangoapps/monitoring/scripts/process_cookie_monitoring_logs.py --help
"""
import csv
import logging
import re
from dateutil import parser
import click
logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)
# Note: Not all Open edX deployments will be affected by the same third-party cookies,
# but it is ok to have some of these cookies go unused.
PARAMETERIZED_COOKIES = [
(re.compile(r"_gac_UA-(\d|-)+"), "_gac_UA-{id}"),
(re.compile(r"_hjSession_\d+"), "_hjSession_{id}"),
(re.compile(r"_hjSessionUser_\d+"), "_hjSessionUser_{id}"),
(re.compile(r"ab\.storage\.deviceId\..*"), "ab.storage.deviceId.{id}"),
(re.compile(r"ab\.storage\.sessionId\..*"), "ab.storage.deviceId.{id}"),
(re.compile(r"ab\.storage\.userId\..*"), "ab.storage.userId.{id}"),
(re.compile(r"AMCV_\w+%40AdobeOrg"), "AMCV_{id}@AdobeOrg"),
(re.compile(r"amplitude_id_.*"), "amplitude_id_{id}"),
(re.compile(r"mp_\w+_mixpanel"), "mp_{id}_mixpanel"),
]
@click.command()
@click.option(
"--csv_input",
help="File name of .csv file with Splunk logs for large cookie headers.",
required=True
)
def main(csv_input):
"""
Reads CSV of large cookie logs and processes and provides summary output.
Expected CSV format (from Splunk export searching on "BEGIN-COOKIE-SIZES"):
\b
_raw,_time,index,
...
"""
cookie_headers = _load_csv(csv_input)
processed_cookie_headers = process_cookie_headers(cookie_headers)
print_processed_cookies(processed_cookie_headers)
def _load_csv(csv_file):
"""
Reads CSV of large cookie data and returns a dict of details.
Arguments:
csv_file (string): File name for the csv
Returns a list of dicts containing parsed details for each cookie header log entry.
"""
with open(csv_file) as file:
csv_data = file.read()
reader = csv.DictReader(csv_data.splitlines())
# Regex to match against log messages like the following:
# BEGIN-COOKIE-SIZES(total=3773) user-info: 903, csrftoken: 64, ... END-COOKIE-SIZES
cookie_log_regex = re.compile(r"BEGIN-COOKIE-SIZES\(total=(?P<total>\d+)\)(?P<cookie_sizes>.*)END-COOKIE-SIZES")
# Regex to match against just a single size, like the following:
# csrftoken: 64
cookie_size_regex = re.compile(r"(?P<name>.*): (?P<size>\d+)")
cookie_headers = []
for row in reader:
cookie_header_sizes = {}
raw_cookie_log = row.get("_raw")
cookie_begin_count = raw_cookie_log.count("BEGIN-COOKIE-SIZES")
if cookie_begin_count == 0:
logging.info("No BEGIN-COOKIE-SIZES delimiter found. Skipping row.")
elif cookie_begin_count > 1:
# Note: this wouldn't parse correctly right now, and it isn't worth coding for.
logging.warning("Multiple cookie entries found in same row. Skipping row.")
continue
match = cookie_log_regex.search(raw_cookie_log)
if not match:
logging.error("Malformed cookie entry. Skipping row.")
continue
cookie_header_size = int(match.group("total"))
if cookie_header_size == 0:
continue
cookie_sizes_str = match.group("cookie_sizes").strip()
cookie_sizes = cookie_sizes_str.split(", ")
for cookie_size in cookie_sizes:
match = cookie_size_regex.search(cookie_size)
if not match:
logging.error(f"Could not parse cookie size from: {cookie_size}")
continue
cookie_header_sizes[match.group("name")] = int(match.group("size"))
cookie_header_size_computed = max(
0, sum(len(name) + size + 3 for (name, size) in cookie_header_sizes.items()) - 2
)
cookie_headers.append({
"datetime": parser.parse(row.get("_time")),
"env": row.get("index"),
"cookie_header_size": cookie_header_size,
"cookie_header_size_computed": cookie_header_size_computed,
"cookie_sizes": cookie_header_sizes,
})
return cookie_headers
def process_cookie_headers(cookie_headers):
"""
Process the parsed cookie header log entries.
Arguments:
cookie_headers: a list of dicts containing parsed details.
Returns a dict of processed cookies.
"""
processed_cookies = {}
for cookie_header in cookie_headers:
for (name, size) in cookie_header["cookie_sizes"].items():
# Replace parameterized cookies. For example:
# _hjSessionUser_111111 => _hjSessionUser_{id}
for (regex, replacement_name) in PARAMETERIZED_COOKIES:
if regex.fullmatch(name):
logging.debug(f"Replacing {name} with {replacement_name}.")
name = replacement_name
break
processed_cookie = processed_cookies.get(name, {})
# compute the full size each cookie takes up in the cookie header, including name and delimiters
full_size = len(name) + size + 3
set_max_attribute(processed_cookie, "max_full_size", full_size)
set_min_attribute(processed_cookie, "min_full_size", full_size)
set_max_attribute(processed_cookie, "max_size", size)
set_min_attribute(processed_cookie, "min_size", size)
processed_cookie["count"] = processed_cookie.get("count", 0) + 1
# Note: The following details relate to the header, and not to the specific cookie.
# This may give a quick view of cookies associated with the largest header, or the
# header with the most cookies.
set_max_attribute(processed_cookie, "last_seen", cookie_header["datetime"])
set_min_attribute(processed_cookie, "first_seen", cookie_header["datetime"])
set_max_attribute(processed_cookie, "max_cookie_count", len(cookie_header["cookie_sizes"]))
set_min_attribute(processed_cookie, "min_cookie_count", len(cookie_header["cookie_sizes"]))
set_max_attribute(processed_cookie, "max_header_size", cookie_header["cookie_header_size"])
set_min_attribute(processed_cookie, "min_header_size", cookie_header["cookie_header_size"])
# Note: skipping cookie_header_size_calculated unless we see a need for it.
processed_cookie["envs"] = processed_cookie.get("envs", set())
processed_cookie["envs"].add(cookie_header["env"])
processed_cookies[name] = processed_cookie
return processed_cookies
def set_min_attribute(processed_cookie, key, value):
"""
Sets processed_cookie[key] to the smaller of value and its current value.
"""
processed_cookie[key] = min(value, processed_cookie.get(key, value))
def set_max_attribute(processed_cookie, key, value):
"""
Sets processed_cookie[key] to the larger of value and its current value.
"""
processed_cookie[key] = max(value, processed_cookie.get(key, value))
def print_processed_cookies(processed_cookies):
"""
Output processed cookie information.
"""
sorted_cookie_items = sorted(processed_cookies.items(), key=lambda x: x[1]["max_full_size"], reverse=True)
print("name,max_full_size,min_full_size,max_size,min_size,count,"
"last_seen,first_seen,max_cookie_count,min_cookie_count,max_header_size,min_header_size,envs")
for (name, data) in sorted_cookie_items:
print(f'{name},{data["max_full_size"]},{data["min_full_size"]},'
f'{data["max_size"]},{data["min_size"]},{data["count"]},'
f'{data["last_seen"]},{data["first_seen"]},'
f'{data["max_cookie_count"]},{data["min_cookie_count"]},'
f'{data["max_header_size"]},{data["min_header_size"]},'
f'"{",".join(sorted(data["envs"]))}"')
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter

View File

@@ -2057,25 +2057,22 @@ CREDIT_NOTIFICATION_CACHE_TIMEOUT = 5 * 60 * 60
MIDDLEWARE = [
'openedx.core.lib.x_forwarded_for.middleware.XForwardedForMiddleware',
'crum.CurrentRequestUserMiddleware',
'edx_django_utils.monitoring.DeploymentMonitoringMiddleware',
# A newer and safer request cache.
# Resets the request cache.
'edx_django_utils.cache.middleware.RequestCacheMiddleware',
# Generate code ownership attributes. Keep this immediately after RequestCacheMiddleware.
# Various monitoring middleware
'edx_django_utils.monitoring.CachedCustomMonitoringMiddleware',
'edx_django_utils.monitoring.CodeOwnerMonitoringMiddleware',
'edx_django_utils.monitoring.CookieMonitoringMiddleware',
'edx_django_utils.monitoring.DeploymentMonitoringMiddleware',
# Before anything that looks at cookies, especially the session middleware
'openedx.core.djangoapps.cookie_metadata.middleware.CookieNameChange',
# Monitoring and logging middleware
# Monitoring and logging for expected and ignored errors
'openedx.core.lib.request_utils.ExpectedErrorMiddleware',
'edx_django_utils.monitoring.CachedCustomMonitoringMiddleware',
# Cookie monitoring
'openedx.core.lib.request_utils.CookieMonitoringMiddleware',
'lms.djangoapps.mobile_api.middleware.AppVersionUpgrade',
'openedx.core.djangoapps.header_control.middleware.HeaderControlMiddleware',

View File

@@ -27,7 +27,7 @@ from openedx.core.djangoapps.user_api.accounts import ACCOUNT_VISIBILITY_PREF_KE
from openedx.core.djangoapps.user_api.models import UserPreference
from openedx.core.djangoapps.user_api.preferences.api import set_user_preference
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, FilteredQueryCountMixin, skip_unless_lms
from openedx.features.name_affirmation_api.utils import get_name_affirmation_service
from .. import ALL_USERS_VISIBILITY, CUSTOM_VISIBILITY, PRIVATE_VISIBILITY
@@ -195,7 +195,7 @@ class UserAPITestCase(APITestCase):
@ddt.ddt
@skip_unless_lms
class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
class TestOwnUsernameAPI(FilteredQueryCountMixin, CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
@@ -253,13 +253,13 @@ class TestOwnUsernameAPI(CacheIsolationTestCase, UserAPITestCase):
{'full': 50, 'small': 10},
clear=True
)
class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
class TestAccountsAPI(FilteredQueryCountMixin, CacheIsolationTestCase, UserAPITestCase):
"""
Unit tests for the Accounts API.
"""
ENABLED_CACHES = ['default']
TOTAL_QUERY_COUNT = 25
TOTAL_QUERY_COUNT = 24
FULL_RESPONSE_FIELD_COUNT = 30
def setUp(self):
@@ -700,7 +700,7 @@ class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
assert data['accomplishments_shared'] is False
self.client.login(username=self.user.username, password=TEST_PASSWORD)
verify_get_own_information(self._get_num_queries(23))
verify_get_own_information(self._get_num_queries(22))
# Now make sure that the user can get the same information, even if not active
self.user.is_active = False
@@ -720,7 +720,7 @@ class TestAccountsAPI(CacheIsolationTestCase, UserAPITestCase):
legacy_profile.save()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
with self.assertNumQueries(self._get_num_queries(23), table_ignorelist=WAFFLE_TABLES):
with self.assertNumQueries(self._get_num_queries(22), table_ignorelist=WAFFLE_TABLES):
response = self.send_get(self.client)
for empty_field in ("level_of_education", "gender", "country", "state", "bio",):
assert response.data[empty_field] is None

View File

@@ -2,7 +2,7 @@
Test utilities for waffle utilities.
"""
# Can be used with FilteredQueryCountMixin.assertNumQueries() to blacklist
# Can be used with FilteredQueryCountMixin.assertNumQueries() to ignore
# waffle tables. For example:
# QUERY_COUNT_TABLE_IGNORELIST = WAFFLE_TABLES
# with self.assertNumQueries(6, table_ignorelist=QUERY_COUNT_TABLE_IGNORELIST):

View File

@@ -170,7 +170,7 @@ class _AssertNumQueriesContext(CaptureQueriesContext):
def __exit__(self, exc_type, exc_value, traceback):
def is_unfiltered_query(query):
"""
Returns True if the query does not contain a blacklisted table, and
Returns True if the query does not contain a ignorelisted table, and
False otherwise.
Note: This is a simple naive implementation that makes no attempt
@@ -201,7 +201,7 @@ class _AssertNumQueriesContext(CaptureQueriesContext):
class FilteredQueryCountMixin:
"""
Mixin to add to any subclass of Django's TestCase that replaces
assertNumQueries with one that accepts a blacklist of tables to filter out
assertNumQueries with one that accepts a ignorelist of tables to filter out
of the count.
"""
def assertNumQueries(self, num, func=None, table_ignorelist=None, *args, **kwargs): # lint-amnesty, pylint: disable=keyword-arg-before-vararg
@@ -210,6 +210,8 @@ class FilteredQueryCountMixin:
the addition of the following argument:
table_ignorelist (List): A list of table names to filter out of the
set of queries that get counted.
Important: TestCase must include FilteredQueryCountMixin for this to work.
"""
using = kwargs.pop("using", DEFAULT_DB_ALIAS)
conn = connections[using]

View File

@@ -1,7 +1,6 @@
""" Utility functions related to HTTP requests """
import logging
import random
import re
from urllib.parse import urlparse
@@ -90,144 +89,6 @@ def course_id_from_url(url):
return None
class CookieMonitoringMiddleware:
"""
Middleware for monitoring the size and growth of all our cookies, to see if
we're running into browser limits.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Monitor at request-time to skip any cookies that may be added during the request.
log_message = None
try:
log_message = self.get_log_message_and_monitor_cookies(request)
except BaseException:
log.exception("Unexpected error logging and monitoring cookies.")
response = self.get_response(request)
# Delay logging until response-time so that the user id can be included in the log message.
if log_message:
log.info(log_message)
return response
def get_log_message_and_monitor_cookies(self, request):
"""
Add logging and custom attributes for monitoring cookie sizes.
Don't log contents of cookies because that might cause a security issue.
We just want to see if any cookies are growing out of control.
Useful NRQL Queries:
# Always available
SELECT * FROM Transaction WHERE cookies.header.size > 6000
Attributes that are added by this middleware:
For all requests:
cookies.header.size: The total size in bytes of the cookie header
If COOKIE_HEADER_SIZE_LOGGING_THRESHOLD is reached:
cookies.header.size.computed
Related Settings (see annotations for details):
- COOKIE_HEADER_SIZE_LOGGING_THRESHOLD
- COOKIE_SAMPLING_REQUEST_COUNT
Returns: The message to be logged. This is returned, rather than directly
logged, so that it can be processed at request time (before any cookies
may be changed server-side), but logged at response time, once the user
id is available for authenticated calls.
"""
raw_header_cookie = request.META.get('HTTP_COOKIE', '')
cookie_header_size = len(raw_header_cookie.encode('utf-8'))
# .. custom_attribute_name: cookies.header.size
# .. custom_attribute_description: The total size in bytes of the cookie header.
set_custom_attribute('cookies.header.size', cookie_header_size)
if cookie_header_size == 0:
return
if corrupt_cookie_count := raw_header_cookie.count('Cookie: '):
# .. custom_attribute_name: cookies.header.corrupt_count
# .. custom_attribute_description: The attribute will only appear for potentially corrupt cookie headers,
# where "Cookie: " is found in the header. If this custom attribute is seen on the same
# requests where other mysterious cookie problems are occurring, this may help troubleshoot.
# See https://openedx.atlassian.net/browse/CR-4614 for more details.
# Also see cookies.header.corrupt_key_count
set_custom_attribute('cookies.header.corrupt_count', corrupt_cookie_count)
# .. custom_attribute_name: cookies.header.corrupt_key_count
# .. custom_attribute_description: The attribute will only appear for potentially corrupt cookie headers,
# where "Cookie: " is found in some of the cookie keys. If this custom attribute is seen on the same
# requests where other mysterious cookie problems are occurring, this may help troubleshoot.
# See https://openedx.atlassian.net/browse/CR-4614 for more details.
# Also see cookies.header.corrupt_count.
set_custom_attribute(
'cookies.header.corrupt_key_count',
sum(1 for key in request.COOKIES.keys() if 'Cookie: ' in key)
)
# .. setting_name: COOKIE_HEADER_SIZE_LOGGING_THRESHOLD
# .. setting_default: None
# .. setting_description: The minimum size for the full cookie header to log a list of cookie names and sizes.
# Should be set to a relatively high threshold (suggested 9-10K) to avoid flooding the logs.
logging_threshold = getattr(settings, "COOKIE_HEADER_SIZE_LOGGING_THRESHOLD", None)
if not logging_threshold:
return
is_large_cookie_header_detected = cookie_header_size >= logging_threshold
if not is_large_cookie_header_detected:
# .. setting_name: COOKIE_SAMPLING_REQUEST_COUNT
# .. setting_default: None
# .. setting_description: This setting enables sampling cookie header logging for cookie headers smaller
# than COOKIE_HEADER_SIZE_LOGGING_THRESHOLD. The cookie header logging will happen randomly for each
# request with a chance of 1 in COOKIE_SAMPLING_REQUEST_COUNT. For example, to see approximately one
# sampled log message every 10 minutes, set COOKIE_SAMPLING_REQUEST_COUNT to the average number of
# requests in 10 minutes.
# .. setting_warning: This setting requires COOKIE_HEADER_SIZE_LOGGING_THRESHOLD to be enabled to take
# effect.
sampling_request_count = getattr(settings, "COOKIE_SAMPLING_REQUEST_COUNT", None)
# if the cookie header size is lower than the threshold, skip logging unless configured to do
# random sampling and we choose the lucky number (in this case, 1).
if not sampling_request_count or random.randint(1, sampling_request_count) > 1:
return
# The computed header size can be used to double check that there aren't large cookies that are
# duplicates in the original header (from different domains) that aren't being accounted for.
cookies_header_size_computed = max(
0, sum(len(name) + len(value) + 3 for (name, value) in request.COOKIES.items()) - 2
)
# .. custom_attribute_name: cookies.header.size.computed
# .. custom_attribute_description: The computed total size in bytes of the cookie header, based on the
# cookies found in request.COOKIES. This value will only be captured for cookie headers larger than
# COOKIE_HEADER_SIZE_LOGGING_THRESHOLD. The value can be used to double check that there aren't large
# cookies that are duplicates in the cookie header (from different domains) that aren't being accounted
# for.
set_custom_attribute('cookies.header.size.computed', cookies_header_size_computed)
# Sort starting with largest cookies
sorted_cookie_items = sorted(request.COOKIES.items(), key=lambda x: len(x[1]), reverse=True)
sizes = ', '.join(f"{name}: {len(value)}" for (name, value) in sorted_cookie_items)
if is_large_cookie_header_detected:
log_prefix = f"Large (>= {logging_threshold}) cookie header detected."
else:
log_prefix = f"Sampled small (< {logging_threshold}) cookie header."
log_message = f"{log_prefix} BEGIN-COOKIE-SIZES(total={cookie_header_size}) {sizes} END-COOKIE-SIZES"
return log_message
def expected_error_exception_handler(exc, context):
"""
Replacement for DRF's default exception handler to enable observing expected errors.

View File

@@ -12,7 +12,6 @@ from django.test.utils import override_settings
from edx_django_utils.cache import RequestCache
from openedx.core.lib.request_utils import (
CookieMonitoringMiddleware,
ExpectedErrorMiddleware,
_get_expected_error_settings_dict,
clear_cached_expected_error_settings,
@@ -103,131 +102,6 @@ class RequestUtilTestCase(unittest.TestCase):
assert course_id.run == run
@ddt.ddt
class CookieMonitoringMiddlewareTestCase(unittest.TestCase):
"""
Tests for CookieMonitoringMiddleware.
"""
def setUp(self):
super().setUp()
self.mock_response = Mock()
@patch('openedx.core.lib.request_utils.log', autospec=True)
@patch("openedx.core.lib.request_utils.set_custom_attribute")
@ddt.data(
(None, None), # logging threshold not defined
(5, None), # logging threshold too high
(5, 9999999999999999999), # logging threshold too high, and random sampling impossibly unlikely
)
@ddt.unpack
def test_cookie_monitoring_with_no_logging(
self, logging_threshold, sampling_request_count, mock_set_custom_attribute, mock_logger
):
middleware = CookieMonitoringMiddleware(self.mock_response)
cookies_dict = {'a': 'y'}
with override_settings(COOKIE_HEADER_SIZE_LOGGING_THRESHOLD=logging_threshold):
with override_settings(COOKIE_SAMPLING_REQUEST_COUNT=sampling_request_count):
middleware(self.get_mock_request(cookies_dict))
# expect monitoring of header size for all requests
mock_set_custom_attribute.assert_called_once_with('cookies.header.size', 3)
# cookie logging was not enabled, so nothing should be logged
mock_logger.info.assert_not_called()
@override_settings(COOKIE_HEADER_SIZE_LOGGING_THRESHOLD=None)
@override_settings(COOKIE_SAMPLING_REQUEST_COUNT=None)
@patch("openedx.core.lib.request_utils.set_custom_attribute")
@ddt.data(
# A corrupt cookie header contains "Cookie: ".
('corruptCookie: normal-cookie=value', 1, 1),
('corrupt1Cookie: normal-cookie1=value1;corrupt2Cookie: normal-cookie2=value2', 2, 2),
('corrupt=Cookie: value', 1, 0),
)
@ddt.unpack
def test_cookie_header_corrupt_monitoring(
self, corrupt_cookie_header, expected_corrupt_count, expected_corrupt_key_count, mock_set_custom_attribute
):
middleware = CookieMonitoringMiddleware(self.mock_response)
request = RequestFactory().request()
request.META['HTTP_COOKIE'] = corrupt_cookie_header
middleware(request)
mock_set_custom_attribute.assert_has_calls([
call('cookies.header.size', len(request.META['HTTP_COOKIE'])),
call('cookies.header.corrupt_count', expected_corrupt_count),
call('cookies.header.corrupt_key_count', expected_corrupt_key_count),
])
@override_settings(COOKIE_HEADER_SIZE_LOGGING_THRESHOLD=1)
@patch('openedx.core.lib.request_utils.log', autospec=True)
@patch("openedx.core.lib.request_utils.set_custom_attribute")
def test_log_cookie_with_threshold_met(self, mock_set_custom_attribute, mock_logger):
middleware = CookieMonitoringMiddleware(self.mock_response)
cookies_dict = {
"a": "yy",
"b": "xxx",
"c": "z",
}
middleware(self.get_mock_request(cookies_dict))
mock_set_custom_attribute.assert_has_calls([
call('cookies.header.size', 16),
call('cookies.header.size.computed', 16)
])
mock_logger.info.assert_called_once_with(
"Large (>= 1) cookie header detected. BEGIN-COOKIE-SIZES(total=16) b: 3, a: 2, c: 1 END-COOKIE-SIZES"
)
@override_settings(COOKIE_HEADER_SIZE_LOGGING_THRESHOLD=9999)
@override_settings(COOKIE_SAMPLING_REQUEST_COUNT=1)
@patch('openedx.core.lib.request_utils.log', autospec=True)
@patch("openedx.core.lib.request_utils.set_custom_attribute")
def test_log_cookie_with_sampling(self, mock_set_custom_attribute, mock_logger):
middleware = CookieMonitoringMiddleware(self.mock_response)
cookies_dict = {
"a": "yy",
"b": "xxx",
"c": "z",
}
middleware(self.get_mock_request(cookies_dict))
mock_set_custom_attribute.assert_has_calls([
call('cookies.header.size', 16),
call('cookies.header.size.computed', 16)
])
mock_logger.info.assert_called_once_with(
"Sampled small (< 9999) cookie header. BEGIN-COOKIE-SIZES(total=16) b: 3, a: 2, c: 1 END-COOKIE-SIZES"
)
@override_settings(COOKIE_HEADER_SIZE_LOGGING_THRESHOLD=9999)
@override_settings(COOKIE_SAMPLING_REQUEST_COUNT=1)
@patch('openedx.core.lib.request_utils.log', autospec=True)
@patch("openedx.core.lib.request_utils.set_custom_attribute")
def test_empty_cookie_header_skips_sampling(self, mock_set_custom_attribute, mock_logger):
middleware = CookieMonitoringMiddleware(self.mock_response)
cookies_dict = {}
middleware(self.get_mock_request(cookies_dict))
mock_set_custom_attribute.assert_has_calls([
call('cookies.header.size', 0),
])
mock_logger.info.assert_not_called()
def get_mock_request(self, cookies_dict):
"""
Return mock request with the provided cookies in the header.
"""
factory = RequestFactory()
for name, value in cookies_dict.items():
factory.cookies[name] = value
return factory.request()
class TestGetExpectedErrorSettingsDict(unittest.TestCase):
"""
Tests for processing issues in _get_expected_error_settings_dict()

View File

@@ -338,7 +338,7 @@ django-sekizai==3.0.1
# via
# -r requirements/edx/base.in
# django-wiki
django-ses==2.6.0
django-ses==2.6.1
# via -r requirements/edx/base.in
django-simple-history==3.0.0
# via
@@ -428,9 +428,10 @@ edx-django-release-util==1.2.0
# via -r requirements/edx/base.in
edx-django-sites-extensions==4.0.0
# via -r requirements/edx/base.in
edx-django-utils==4.5.0
edx-django-utils==4.6.0
# via
# -r requirements/edx/base.in
# -r requirements/edx/github.in
# django-config-models
# edx-drf-extensions
# edx-enterprise
@@ -552,7 +553,6 @@ fs-s3fs==0.1.8
# django-pyfs
future==0.18.2
# via
# django-ses
# edx-celeryutils
# pyjwkest
geoip2==4.5.0
@@ -1031,7 +1031,7 @@ uritemplate==4.1.1
# via
# coreapi
# drf-yasg
urllib3==1.26.8
urllib3==1.26.9
# via
# -r requirements/edx/paver.txt
# elasticsearch

View File

@@ -434,7 +434,7 @@ django-sekizai==3.0.1
# via
# -r requirements/edx/testing.txt
# django-wiki
django-ses==2.6.0
django-ses==2.6.1
# via -r requirements/edx/testing.txt
django-simple-history==3.0.0
# via
@@ -535,7 +535,7 @@ edx-django-release-util==1.2.0
# via -r requirements/edx/testing.txt
edx-django-sites-extensions==4.0.0
# via -r requirements/edx/testing.txt
edx-django-utils==4.5.0
edx-django-utils==4.6.0
# via
# -r requirements/edx/testing.txt
# django-config-models
@@ -699,7 +699,6 @@ fs-s3fs==0.1.8
future==0.18.2
# via
# -r requirements/edx/testing.txt
# django-ses
# edx-celeryutils
# pyjwkest
geoip2==4.5.0
@@ -1497,7 +1496,7 @@ uritemplate==4.1.1
# -r requirements/edx/testing.txt
# coreapi
# drf-yasg
urllib3==1.26.8
urllib3==1.26.9
# via
# -r requirements/edx/testing.txt
# elasticsearch

View File

@@ -84,7 +84,7 @@ stevedore==3.5.0
# via code-annotations
text-unidecode==1.3
# via python-slugify
urllib3==1.26.8
urllib3==1.26.9
# via requests
zipp==3.7.0
# via importlib-metadata

View File

@@ -75,3 +75,6 @@ git+https://github.com/edx/django-require.git@0c54adb167142383b26ea6b3edecc32118
git+https://github.com/open-craft/xblock-poll@v1.12.0#egg=xblock-poll==1.12.0
git+https://github.com/edx-solutions/xblock-drag-and-drop-v2@v2.3.5#egg=xblock-drag-and-drop-v2==2.3.5
# Temporary for testing edx-django-utils upgrade
git+https://github.com/edx/edx-django-utils.git@robrap/ARCHBOM-2054-move-cookie-monitoring-middleware#egg=edx_django_utils

View File

@@ -46,7 +46,7 @@ stevedore==3.5.0
# via
# -r requirements/edx/paver.in
# edx-opaque-keys
urllib3==1.26.8
urllib3==1.26.9
# via requests
watchdog==2.1.6
# via -r requirements/edx/paver.in

View File

@@ -420,7 +420,7 @@ django-sekizai==3.0.1
# via
# -r requirements/edx/base.txt
# django-wiki
django-ses==2.6.0
django-ses==2.6.1
# via -r requirements/edx/base.txt
django-simple-history==3.0.0
# via
@@ -519,7 +519,7 @@ edx-django-release-util==1.2.0
# via -r requirements/edx/base.txt
edx-django-sites-extensions==4.0.0
# via -r requirements/edx/base.txt
edx-django-utils==4.5.0
edx-django-utils==4.6.0
# via
# -r requirements/edx/base.txt
# django-config-models
@@ -675,7 +675,6 @@ fs-s3fs==0.1.8
future==0.18.2
# via
# -r requirements/edx/base.txt
# django-ses
# edx-celeryutils
# pyjwkest
geoip2==4.5.0
@@ -1384,7 +1383,7 @@ uritemplate==4.1.1
# -r requirements/edx/base.txt
# coreapi
# drf-yasg
urllib3==1.26.8
urllib3==1.26.9
# via
# -r requirements/edx/base.txt
# elasticsearch

View File

@@ -12,5 +12,5 @@ idna==3.3
# via requests
requests==2.27.1
# via -r scripts/xblock/requirements.in
urllib3==1.26.8
urllib3==1.26.9
# via requests