Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adaptive learning: Add implementation for grade or bonus goal in learner profile #9699

Open
wants to merge 19 commits into
base: feature/adaptive-learning/learner-profile
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
73b8614
Implement aimForGradeOrBonus
JohannesStoehr Nov 7, 2024
6081d01
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 7, 2024
bd77d23
Fix architecture
JohannesStoehr Nov 7, 2024
253e989
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 8, 2024
5c0142f
Fix exercise ordering
JohannesStoehr Nov 9, 2024
74c8693
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 11, 2024
36de38b
Merge branch 'refs/heads/feature/adaptive-learning/learner-profile' i…
JohannesStoehr Nov 11, 2024
1eac74b
Fix server start up
JohannesStoehr Nov 11, 2024
6a37a21
Fix queries
JohannesStoehr Nov 11, 2024
eadd7bf
Fix tests
JohannesStoehr Nov 11, 2024
cdf4b1f
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 18, 2024
1dfc703
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 19, 2024
d174485
Fix test
JohannesStoehr Nov 19, 2024
d20b87e
Add some more tests
JohannesStoehr Nov 19, 2024
c282358
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 20, 2024
7fdca44
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 21, 2024
6a5f18c
Flo
JohannesStoehr Nov 21, 2024
8ad593c
Fix test compilation
JohannesStoehr Nov 25, 2024
1fb3f3e
Merge branch 'feature/adaptive-learning/learner-profile' into feature…
JohannesStoehr Nov 28, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package de.tum.cit.aet.artemis.atlas.domain.profile;

/**
* Enum for the preferences as lickert-scale regarding settings in the (course) learner profile, see {@link CourseLearnerProfile} and {@link LearnerProfile}.
*/
public enum PreferenceScale {

LOW(1), MEDIUM_LOW(2), MEDIUM(3), MEDIUM_HIGH(4), HIGH(5);
JohannesStoehr marked this conversation as resolved.
Show resolved Hide resolved

private final int value;

PreferenceScale(int value) {
this.value = value;
}

public int getValue() {
return value;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ public interface CourseLearnerProfileRepository extends ArtemisJpaRepository<Cou

@Transactional // ok because of delete
@Modifying
void deleteAllByCourse(Course couese);
void deleteAllByCourse(Course course);
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ default LearningPath findWithEagerUserByIdElseThrow(long learningPathId) {
@EntityGraph(type = LOAD, attributePaths = { "competencies" })
Optional<LearningPath> findWithEagerCompetenciesByCourseIdAndUserId(long courseId, long userId);

@EntityGraph(type = LOAD, attributePaths = { "course" })
Optional<LearningPath> findWithEagerCourseById(long learningPathId);

default LearningPath findWithEagerCourseByIdElseThrow(long learningPathId) {
return getValueElseThrow(findWithEagerCourseById(learningPathId), learningPathId);
}

@EntityGraph(type = LOAD, attributePaths = { "course", "competencies" })
Optional<LearningPath> findWithEagerCourseAndCompetenciesById(long learningPathId);

Expand Down Expand Up @@ -63,11 +70,23 @@ SELECT COUNT (learningPath)
""")
long countLearningPathsOfEnrolledStudentsInCourse(@Param("courseId") long courseId);

@EntityGraph(type = LOAD, attributePaths = { "competencies", "competencies.lectureUnitLinks", "competencies.lectureUnitLinks.lectureUnit", "competencies.exerciseLinks",
"competencies.exerciseLinks.exercise" })
Optional<LearningPath> findWithCompetenciesAndLectureUnitsAndExercisesById(long learningPathId);
@Query("""
SELECT l
FROM LearningPath l
LEFT JOIN FETCH l.competencies c
LEFT JOIN FETCH c.lectureUnitLinks lul
LEFT JOIN FETCH lul.lectureUnit
LEFT JOIN FETCH c.exerciseLinks el
LEFT JOIN FETCH el.exercise
LEFT JOIN FETCH l.user u
LEFT JOIN FETCH u.learnerProfile lp
LEFT JOIN FETCH lp.courseLearnerProfiles clp
WHERE l.id = :learningPathId
AND clp.course.id = l.course.id
""")
Optional<LearningPath> findWithCompetenciesAndLectureUnitsAndExercisesAndLearnerProfileById(@Param("learningPathId") long learningPathId);

default LearningPath findWithCompetenciesAndLectureUnitsAndExercisesByIdElseThrow(long learningPathId) {
return getValueElseThrow(findWithCompetenciesAndLectureUnitsAndExercisesById(learningPathId), learningPathId);
default LearningPath findWithCompetenciesAndLectureUnitsAndExercisesAndLearnerProfileByIdElseThrow(long learningPathId) {
return getValueElseThrow(findWithCompetenciesAndLectureUnitsAndExercisesAndLearnerProfileById(learningPathId), learningPathId);
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
package de.tum.cit.aet.artemis.atlas.service.learningpath;

import static de.tum.cit.aet.artemis.atlas.domain.profile.PreferenceScale.HIGH;
import static de.tum.cit.aet.artemis.atlas.domain.profile.PreferenceScale.MEDIUM_HIGH;
import static de.tum.cit.aet.artemis.core.config.Constants.PROFILE_CORE;
import static de.tum.cit.aet.artemis.exercise.domain.IncludedInOverallScore.INCLUDED_AS_BONUS;
import static de.tum.cit.aet.artemis.exercise.domain.IncludedInOverallScore.INCLUDED_COMPLETELY;

import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
Expand All @@ -14,6 +18,7 @@
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;

Expand All @@ -31,6 +36,7 @@
import de.tum.cit.aet.artemis.atlas.domain.competency.LearningPath;
import de.tum.cit.aet.artemis.atlas.domain.competency.Prerequisite;
import de.tum.cit.aet.artemis.atlas.domain.competency.RelationType;
import de.tum.cit.aet.artemis.atlas.domain.profile.CourseLearnerProfile;
import de.tum.cit.aet.artemis.atlas.repository.CompetencyProgressRepository;
import de.tum.cit.aet.artemis.atlas.repository.CompetencyRelationRepository;
import de.tum.cit.aet.artemis.atlas.repository.CourseCompetencyRepository;
Expand All @@ -39,6 +45,7 @@
import de.tum.cit.aet.artemis.exercise.domain.BaseExercise;
import de.tum.cit.aet.artemis.exercise.domain.DifficultyLevel;
import de.tum.cit.aet.artemis.exercise.domain.Exercise;
import de.tum.cit.aet.artemis.exercise.domain.IncludedInOverallScore;
import de.tum.cit.aet.artemis.lecture.domain.LectureUnit;
import de.tum.cit.aet.artemis.lecture.service.LearningObjectService;

Expand Down Expand Up @@ -98,6 +105,8 @@ public class LearningPathRecommendationService {
private static final double[][] EXERCISE_DIFFICULTY_DISTRIBUTION_LUT = new double[][] { { 0.87, 0.12, 0.01 }, { 0.80, 0.18, 0.02 }, { 0.72, 0.25, 0.03 }, { 0.61, 0.33, 0.06 },
{ 0.50, 0.40, 0.10 }, { 0.39, 0.45, 0.16 }, { 0.28, 0.48, 0.24 }, { 0.20, 0.47, 0.33 }, { 0.13, 0.43, 0.44 }, { 0.08, 0.37, 0.55 }, { 0.04, 0.29, 0.67 }, };

private static final double COMPETENCY_LINK_WEIGHT_TO_GRADE_AIM_RATIO = 2;

protected LearningPathRecommendationService(CompetencyRelationRepository competencyRelationRepository, LearningObjectService learningObjectService,
ParticipantScoreService participantScoreService, CompetencyProgressRepository competencyProgressRepository, CourseCompetencyRepository courseCompetencyRepository) {
this.competencyRelationRepository = competencyRelationRepository;
Expand Down Expand Up @@ -476,6 +485,9 @@ public List<LearningObject> getRecommendedOrderOfLearningObjects(User user, Cour
* @return the recommended ordering of learning objects
*/
public List<LearningObject> getRecommendedOrderOfLearningObjects(User user, CourseCompetency competency, double combinedPriorConfidence) {
var learnerProfile = user.getLearnerProfile();
var courseLearnerProfile = learnerProfile.getCourseLearnerProfiles().stream().findFirst().orElse(new CourseLearnerProfile());

var pendingLectureUnits = competency.getLectureUnitLinks().stream().map(CompetencyLectureUnitLink::getLectureUnit).filter(lectureUnit -> !lectureUnit.isCompletedFor(user))
.toList();
List<LearningObject> recommendedOrder = new ArrayList<>(pendingLectureUnits);
Expand All @@ -492,8 +504,7 @@ public List<LearningObject> getRecommendedOrderOfLearningObjects(User user, Cour

// First sort exercises based on title to ensure consistent ordering over multiple calls then prefer higher weighted exercises
final var pendingExercises = competency.getExerciseLinks().stream().filter(link -> !learningObjectService.isCompletedByUser(link.getExercise(), user))
.sorted(Comparator.comparing(link -> link.getExercise().getTitle())).sorted(Comparator.comparingDouble(CompetencyExerciseLink::getWeight).reversed())
.map(CompetencyExerciseLink::getExercise).toList();
.sorted(getExerciseOrderComparator(courseLearnerProfile.getAimForGradeOrBonus())).map(CompetencyExerciseLink::getExercise).toList();

final var pendingExercisePoints = pendingExercises.stream().mapToDouble(BaseExercise::getMaxPoints).sum();

Expand All @@ -504,7 +515,7 @@ public List<LearningObject> getRecommendedOrderOfLearningObjects(User user, Cour
}
final var recommendedExerciseDistribution = getRecommendedExercisePointDistribution(numberOfRequiredExercisePointsToMaster, weightedConfidence);

scheduleExercisesByDistribution(recommendedOrder, recommendedExerciseDistribution, difficultyLevelMap);
scheduleExercisesByDistribution(recommendedOrder, recommendedExerciseDistribution, difficultyLevelMap, courseLearnerProfile);
return recommendedOrder;
}

Expand All @@ -528,30 +539,30 @@ private void scheduleAllExercises(List<LearningObject> recommendedOrder, Map<Dif
* @param difficultyMap a map from difficulty level to a set of corresponding exercises
*/
private void scheduleExercisesByDistribution(List<LearningObject> recommendedOrder, double[] recommendedExercisePointDistribution,
Map<DifficultyLevel, List<Exercise>> difficultyMap) {
Map<DifficultyLevel, List<Exercise>> difficultyMap, CourseLearnerProfile courseLearnerProfile) {
final var easyExercises = new ArrayList<Exercise>();
final var mediumExercises = new ArrayList<Exercise>();
final var hardExercises = new ArrayList<Exercise>();

// choose as many exercises from the correct difficulty level as possible
final var missingEasy = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.EASY, recommendedExercisePointDistribution[0], easyExercises);
final var missingHard = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.HARD, recommendedExercisePointDistribution[2], hardExercises);
final var missingEasy = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.EASY, recommendedExercisePointDistribution[0], easyExercises, courseLearnerProfile);
final var missingHard = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.HARD, recommendedExercisePointDistribution[2], hardExercises, courseLearnerProfile);

// if there are not sufficiently many exercises per difficulty level, prefer medium difficulty
// case 1: no medium exercises available/medium exercises missing: continue to fill with easy/hard exercises
// case 2: medium exercises available: no medium exercises missing -> missing exercises must be easy/hard -> in both scenarios medium is the closest difficulty level
double mediumExercisePoints = recommendedExercisePointDistribution[1] + missingEasy + missingHard;
double numberOfMissingExercisePoints = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.MEDIUM, mediumExercisePoints, mediumExercises);
double numberOfMissingExercisePoints = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.MEDIUM, mediumExercisePoints, mediumExercises, courseLearnerProfile);

// if there are still not sufficiently many medium exercises, choose easy difficulty
// prefer easy to hard exercises to avoid student overload
if (numberOfMissingExercisePoints > 0 && !difficultyMap.get(DifficultyLevel.EASY).isEmpty()) {
numberOfMissingExercisePoints = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.EASY, numberOfMissingExercisePoints, easyExercises);
numberOfMissingExercisePoints = selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.EASY, numberOfMissingExercisePoints, easyExercises, courseLearnerProfile);
}

// fill remaining slots with hard difficulty
if (numberOfMissingExercisePoints > 0 && !difficultyMap.get(DifficultyLevel.HARD).isEmpty()) {
selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.HARD, numberOfMissingExercisePoints, hardExercises);
selectExercisesWithDifficulty(difficultyMap, DifficultyLevel.HARD, numberOfMissingExercisePoints, hardExercises, courseLearnerProfile);
}

recommendedOrder.addAll(easyExercises);
Expand All @@ -571,15 +582,62 @@ private void scheduleExercisesByDistribution(List<LearningObject> recommendedOrd
* @return amount of points that are missing, if negative the amount of points that are selected too much
*/
private static double selectExercisesWithDifficulty(Map<DifficultyLevel, List<Exercise>> difficultyMap, DifficultyLevel difficulty, double exercisePoints,
List<Exercise> exercises) {
List<Exercise> exercises, CourseLearnerProfile courseLearnerProfile) {
var remainingExercisePoints = new AtomicDouble(exercisePoints);
var selectedExercises = difficultyMap.get(difficulty).stream().takeWhile(exercise -> remainingExercisePoints.getAndAdd(-exercise.getMaxPoints()) >= 0)
.collect(Collectors.toSet());

Predicate<Exercise> exercisePredicate = getExerciseSelectionPredicate(courseLearnerProfile.getAimForGradeOrBonus(), remainingExercisePoints);

var selectedExercises = difficultyMap.get(difficulty).stream().takeWhile(exercisePredicate).toList();

exercises.addAll(selectedExercises);
difficultyMap.get(difficulty).removeAll(selectedExercises);
return remainingExercisePoints.get();
}

private static int getIncludeInOverallScoreWeight(IncludedInOverallScore includedInOverallScore) {
return switch (includedInOverallScore) {
case INCLUDED_COMPLETELY -> 0;
case INCLUDED_AS_BONUS -> 1;
case NOT_INCLUDED -> 2;
};
}

/**
* Creates a comparator that orders exercises based on the aim for grade or bonus, the link weight for the current competency and as a tiebreaker the lexicographic order of
* the exercise title. The higher the aim for the grade bonus is, the higher this metric is weighted compared to the link weight.
*
* @param aimForGradeOrBonus the aim for grade or bonus
* @return the comparator that orders the exercise based on the preference
*/
private static Comparator<CompetencyExerciseLink> getExerciseOrderComparator(int aimForGradeOrBonus) {
Comparator<CompetencyExerciseLink> exerciseComparator = Comparator.comparingDouble(exerciseLink -> (COMPETENCY_LINK_WEIGHT_TO_GRADE_AIM_RATIO * exerciseLink.getWeight())
+ aimForGradeOrBonus * getIncludeInOverallScoreWeight(exerciseLink.getExercise().getIncludedInOverallScore()));
exerciseComparator = exerciseComparator.reversed();

exerciseComparator = exerciseComparator.thenComparing(exerciseLink -> exerciseLink.getExercise().getTitle());
return exerciseComparator;
}

/**
* Creates a predicate that selects exercises based on the aim for grade or bonus and the remaining exercise points.
*
* @param aimForGradeOrBonus the aim for grade or bonus
* @param remainingExercisePoints the remaining exercise points that should be scheduled
* @return the predicate until when exercises should be selected based on the preference
*/
private static Predicate<Exercise> getExerciseSelectionPredicate(int aimForGradeOrBonus, AtomicDouble remainingExercisePoints) {
Predicate<Exercise> exercisePredicate = exercise -> remainingExercisePoints.getAndAdd(-exercise.getMaxPoints()) >= 0;
if (aimForGradeOrBonus == HIGH.getValue()) {
exercisePredicate = exercisePredicate
.or(exercise -> exercise.getIncludedInOverallScore() == INCLUDED_COMPLETELY || exercise.getIncludedInOverallScore() == INCLUDED_AS_BONUS);
}
else if (aimForGradeOrBonus == MEDIUM_HIGH.getValue()) {
exercisePredicate = exercisePredicate.or(exercise -> exercise.getIncludedInOverallScore() == INCLUDED_COMPLETELY);
}

return exercisePredicate;
}

/**
* Computes the average confidence of all prior competencies.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ public void enableLearningPathsForCourse(@NotNull Course course) {
*/
public void generateLearningPaths(@NotNull Course course) {
Set<User> students = userRepository.getStudentsWithLearnerProfile(course);
courseLearnerProfileService.createCourseLearnerProfiles(course, students);
generateLearningPaths(course, students);
}

Expand Down Expand Up @@ -401,7 +402,7 @@ public LearningPathCompetencyGraphDTO generateLearningPathCompetencyInstructorGr
* @return the navigation overview
*/
public LearningPathNavigationOverviewDTO getLearningPathNavigationOverview(long learningPathId) {
var learningPath = findWithCompetenciesAndReleasedLearningObjectsAndCompletedUsersById(learningPathId);
var learningPath = findWithCompetenciesAndReleasedLearningObjectsAndCompletedUsersAndLearnerProfileById(learningPathId);
if (!userRepository.getUser().equals(learningPath.getUser())) {
throw new AccessForbiddenException("You are not allowed to access this learning path");
}
Expand All @@ -417,8 +418,17 @@ public LearningPathNavigationOverviewDTO getLearningPathNavigationOverview(long
* @param learningPathId the id of the learning path to fetch
* @return the learning path with fetched data
*/
public LearningPath findWithCompetenciesAndReleasedLearningObjectsAndCompletedUsersById(long learningPathId) {
LearningPath learningPath = learningPathRepository.findWithCompetenciesAndLectureUnitsAndExercisesByIdElseThrow(learningPathId);
public LearningPath findWithCompetenciesAndReleasedLearningObjectsAndCompletedUsersAndLearnerProfileById(long learningPathId) {
Optional<LearningPath> optionalLearningPath = learningPathRepository.findWithCompetenciesAndLectureUnitsAndExercisesAndLearnerProfileById(learningPathId);
LearningPath learningPath;
if (optionalLearningPath.isEmpty()) {
LearningPath learningPathWithCourse = learningPathRepository.findWithEagerCourseByIdElseThrow(learningPathId);
courseLearnerProfileService.createCourseLearnerProfile(learningPathWithCourse.getCourse(), learningPathWithCourse.getUser());
learningPath = learningPathRepository.findWithCompetenciesAndLectureUnitsAndExercisesAndLearnerProfileByIdElseThrow(learningPathId);
}
else {
learningPath = optionalLearningPath.get();
}

// Remove exercises that are not visible to students
learningPath.getCompetencies().forEach(competency -> competency
Expand Down
Loading
Loading