Skip to content

Commit

Permalink
refactor: put implementation behind feature flag
Browse files Browse the repository at this point in the history
  • Loading branch information
mariajgrimaldi committed Mar 21, 2024
1 parent 9a5dbd2 commit d2bf397
Show file tree
Hide file tree
Showing 23 changed files with 21,616 additions and 439 deletions.
16 changes: 15 additions & 1 deletion openassessment/assessment/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,21 @@ def create(cls, rubric, scorer_id, submission_uuid, score_type, feedback=None, s

@classmethod
def get_score_dict(cls, scores_dict, score_type="median"):
return getattr(cls, f"get_{score_type}_score_dict")(scores_dict)
"""Determine the score in a dictionary of lists of scores based on the score type
if the feature flag is enabled, otherwise use the median score calculation.
Args:
scores_dict (dict): A dictionary of lists of int values. These int values
are reduced to a single value that represents the median.
score_type (str): The type of score to calculate. Defaults to "median".
Returns:
(dict): A dictionary with criterion name keys and median score
values.
"""
if settings.FEATURES.get('ENABLE_ORA_PEER_CONFIGURABLE_GRADING'):
return getattr(cls, f"get_{score_type}_score_dict")(scores_dict)
return cls.get_median_score_dict(scores_dict)

@classmethod
def get_median_score_dict(cls, scores_dict):
Expand Down
2 changes: 1 addition & 1 deletion openassessment/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,7 @@ def _build_assessment_parts_array(cls, assessment, median_scores):
Args:
assessment - assessment containing the parts that we would like to report on.
median_scores - dictionary with criterion name keys and median score values,
as returned by Assessment.get_score_dict()
as returned by Assessment.get_median_score_dict()
Returns:
OrderedDict that contains an entries for each criterion of the assessment(s).
Expand Down
22 changes: 12 additions & 10 deletions openassessment/templates/legacy/edit/oa_edit_peer_assessment.html
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,18 @@
</p>
{% endif %}
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_grading_strategy" class="setting-label">{% trans "Grading strategy for the peer assessment" %}</label>
<select id="peer_assessment_grading_strategy" class="input setting-input">
<option value="mean" {% if assessments.peer_assessment.grading_strategy == 'mean' %}selected="true"{% endif %}>{% trans "Mean" %}</option>
<option value="median" {% if assessments.peer_assessment.grading_strategy == 'median' %}selected="true"{% endif %}>{% trans "Median (default)" %}</option>
</select>
</div>
<p class="setting-help">{% trans "Select the preferred grading strategy." %}</p>
</li>
{% if assessments.peer_assessment.enable_peer_configurable_grading %}
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_grading_strategy" class="setting-label">{% trans "Grading strategy for the peer assessment" %}</label>
<select id="peer_assessment_grading_strategy" class="input setting-input">
<option value="mean" {% if assessments.peer_assessment.grading_strategy == 'mean' %}selected="true"{% endif %}>{% trans "Mean" %}</option>
<option value="median" {% if assessments.peer_assessment.grading_strategy == 'median' %}selected="true"{% endif %}>{% trans "Median (default)" %}</option>
</select>
</div>
<p class="setting-help">{% trans "Select the preferred grading strategy." %}</p>
</li>
{% endif %}
</ul>
</div>
</div>
Expand Down
36 changes: 18 additions & 18 deletions openassessment/xblock/static/dist/manifest.json
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
{
"base_url": "/static/dist",
"openassessment-editor-textarea.js": "/openassessment-editor-textarea.2cee26d88c3441ada635.js",
"openassessment-editor-textarea.js.map": "/openassessment-editor-textarea.2cee26d88c3441ada635.js.map",
"openassessment-editor-tinymce.js": "/openassessment-editor-tinymce.0b97b77ad7f1b7150f67.js",
"openassessment-editor-tinymce.js.map": "/openassessment-editor-tinymce.0b97b77ad7f1b7150f67.js.map",
"openassessment-lms.css": "/openassessment-lms.dc8bb1e464bcaaab4668.css",
"openassessment-lms.js": "/openassessment-lms.dc8bb1e464bcaaab4668.js",
"openassessment-lms.css.map": "/openassessment-lms.dc8bb1e464bcaaab4668.css.map",
"openassessment-lms.js.map": "/openassessment-lms.dc8bb1e464bcaaab4668.js.map",
"openassessment-ltr.css": "/openassessment-ltr.7955a1e2cc11fc6948de.css",
"openassessment-ltr.js": "/openassessment-ltr.7955a1e2cc11fc6948de.js",
"openassessment-ltr.css.map": "/openassessment-ltr.7955a1e2cc11fc6948de.css.map",
"openassessment-ltr.js.map": "/openassessment-ltr.7955a1e2cc11fc6948de.js.map",
"openassessment-rtl.css": "/openassessment-rtl.9de7c9bc7c1048c07707.css",
"openassessment-rtl.js": "/openassessment-rtl.9de7c9bc7c1048c07707.js",
"openassessment-rtl.css.map": "/openassessment-rtl.9de7c9bc7c1048c07707.css.map",
"openassessment-rtl.js.map": "/openassessment-rtl.9de7c9bc7c1048c07707.js.map",
"openassessment-studio.js": "/openassessment-studio.d576fb212cefa2e4b720.js",
"openassessment-studio.js.map": "/openassessment-studio.d576fb212cefa2e4b720.js.map",
"openassessment-editor-textarea.js": "/openassessment-editor-textarea.de70b044ddf6baeaf0b7.js",
"openassessment-editor-textarea.js.map": "/openassessment-editor-textarea.de70b044ddf6baeaf0b7.js.map",
"openassessment-editor-tinymce.js": "/openassessment-editor-tinymce.a87e38bc7b19d8273858.js",
"openassessment-editor-tinymce.js.map": "/openassessment-editor-tinymce.a87e38bc7b19d8273858.js.map",
"openassessment-lms.css": "/openassessment-lms.7430e499fae20eeff7bd.css",
"openassessment-lms.js": "/openassessment-lms.7430e499fae20eeff7bd.js",
"openassessment-lms.css.map": "/openassessment-lms.7430e499fae20eeff7bd.css.map",
"openassessment-lms.js.map": "/openassessment-lms.7430e499fae20eeff7bd.js.map",
"openassessment-ltr.css": "/openassessment-ltr.5b291771f2af113d4918.css",
"openassessment-ltr.js": "/openassessment-ltr.5b291771f2af113d4918.js",
"openassessment-ltr.css.map": "/openassessment-ltr.5b291771f2af113d4918.css.map",
"openassessment-ltr.js.map": "/openassessment-ltr.5b291771f2af113d4918.js.map",
"openassessment-rtl.css": "/openassessment-rtl.731b1e1ea896e74cb5c0.css",
"openassessment-rtl.js": "/openassessment-rtl.731b1e1ea896e74cb5c0.js",
"openassessment-rtl.css.map": "/openassessment-rtl.731b1e1ea896e74cb5c0.css.map",
"openassessment-rtl.js.map": "/openassessment-rtl.731b1e1ea896e74cb5c0.js.map",
"openassessment-studio.js": "/openassessment-studio.44a98dc6a1d4b7f295cd.js",
"openassessment-studio.js.map": "/openassessment-studio.44a98dc6a1d4b7f295cd.js.map",
"fallback-default.png": "/4620b30a966533ace489dcc7afb151b9.png",
"default-avatar.svg": "/95ec738c0b7faac5b5c9126794446bbd.svg"
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

This file was deleted.

This file was deleted.

Loading

0 comments on commit d2bf397

Please sign in to comment.