diff --git a/src/components/AssessmentCard.tsx b/src/components/AssessmentCard.tsx new file mode 100644 index 00000000..5946e216 --- /dev/null +++ b/src/components/AssessmentCard.tsx @@ -0,0 +1,30 @@ +import React from "react"; +import { LinkButton } from "./LinkButton"; + +export interface AssessmentCardProps { + assessmentHref: string; + readmoreHref: string; + title: string; + body: string; +} +export const AssessmentCard: React.FC = ({ + assessmentHref, + readmoreHref, + title, + body, +}) => ( +
+
+

{title}

+

{body}

+
+
+ +
+
+ +
+
+
+
+); diff --git a/src/components/LinkButton.tsx b/src/components/LinkButton.tsx index 6b9edff3..f7aa00bc 100644 --- a/src/components/LinkButton.tsx +++ b/src/components/LinkButton.tsx @@ -7,11 +7,7 @@ export interface LinkButtonProps { } export const LinkButton: React.FC = ({ label, href }) => ( -
-
- - {label} - -
-
+ + {label} + ); diff --git a/src/components/QuestionCard.tsx b/src/components/QuestionCard.tsx new file mode 100644 index 00000000..289881ad --- /dev/null +++ b/src/components/QuestionCard.tsx @@ -0,0 +1,29 @@ +import React from "react"; +import Image from "../components/Image"; + +export interface QuestionCardProps { + title: string; + body: string; + image: string; +} +export const QuestionCard: React.FC = ({ + title, + body, + image, +}) => ( +
+
+
+ + assessment page view + +
+
+
+

{title}

+

{body}

+
+
+
+
+); diff --git a/src/lib/urls.ts b/src/lib/urls.ts new file mode 100644 index 00000000..d163066b --- /dev/null +++ b/src/lib/urls.ts @@ -0,0 +1,7 @@ +export const plcourseURL = { + "exam_instantFeedback": "https://www.prairielearn.org/pl/course_instance/4970/assessment/2316935", + "homework_template": "https://www.prairielearn.org/pl/course_instance/4970/assessment/2316937", + "lecture_selfGuided": "https://www.prairielearn.org/pl/course_instance/4970/assessment/2316938", + "groupWork_jupyter": "https://www.prairielearn.org/pl/course_instance/4970/assessment/2316936" +}; + diff --git a/src/pages/gallery/assessments/autogradedExams/assessment-generator.png b/src/pages/gallery/assessments/autogradedExams/assessment-generator.png new file mode 100644 index 00000000..b80640a7 Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/assessment-generator.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/index.module.scss b/src/pages/gallery/assessments/autogradedExams/index.module.scss new file mode 100644 index 00000000..411d66ad --- /dev/null +++ b/src/pages/gallery/assessments/autogradedExams/index.module.scss @@ -0,0 +1,6 @@ +@import "~bootstrap/scss/_functions.scss"; +@import "~bootstrap/scss/_variables.scss"; + +.container { + background-color: $gray-200; +} diff --git a/src/pages/gallery/assessments/autogradedExams/index.tsx b/src/pages/gallery/assessments/autogradedExams/index.tsx new file mode 100644 index 00000000..7af730e5 --- /dev/null +++ b/src/pages/gallery/assessments/autogradedExams/index.tsx @@ -0,0 +1,196 @@ +import React from "react"; +import Head from "next/head"; +import classnames from "classnames"; +import { Heading } from "../../../../components/Heading"; +import { PageBanner } from "../../../../components/Banner"; +import { LinkButton } from "../../../../components/LinkButton"; +import { QuestionCard } from "../../../../components/QuestionCard"; + +import Image from "../../../../components/Image"; +import retryImage from "./student-retry.png"; +import assessmentGenerator from "./assessment-generator.png"; +import questionGenerator from "./question-generator.png"; +import question1Image from "./question1.png" +import question2Image from "./question2.png" +import question3Image from "./question3.png" +import question4Image from "./question4.png" + +import { plcourseURL } from "../../../../lib/urls"; + +import styles from "./index.module.scss"; + +export default function DefaultExam() { + return ( + + + Exams | PrairieLearn + + + +
+
+

+ Studies have shown that learning and retention of knowledge is + enhanced through retrieval practice that incorporates feedback and + increased use of formative assessments. Here we describe how we use + PrairieLearn to create quizzes where students get immediate + feedback, shortening the feedback cycle between student learning and + assessment performance. This shorter cycle enables the use of + frequent and second-chance testing, especially in large courses, + which has been shown to lead to significant improvements in learning + outcomes and better final exam performance. +

+ + Instant feedback with retry attempts + +

+ Using the default Exam configuration in PrairieLearn, + students will receive a fixed question variant for each question + generator. This feature matches a traditional paper-and-pencil + experience, where the student receives one exam with fixed + parameters. +

+

+ By default, PrairieLearn will auto-grade each question in real-time, + and provide students with the feedback about correctness. Depending + on how instructors define the question points, students can try to + fix incorrect answers for the same parameters, and submit other + attempts for reduced credit, mimicking the concept of partial + credit. +

+ + student retry scheme + +

+ This short feedback cycle allows students to reach out to + instructors right after the test, enabling them to promptly review + and clarify any missed contepts, and consequently make adjustments + for upcoming assessments. +

+ + Creating exams from question pools + +

+ Exams that are delivered asynchronously or in online unproctored + environments create an opportunity for collaborative cheating + , where a student can gain advantage by receiving information about + the exam from another student. Generating random exams from pools of + problems has been shown to mitigate collaborative cheating. In + PrairieLearn, question generators can be selected from what we call + alternative groups or pools. For example, an + alternative group with 4 question generators can select 2 of them at + random to create a version of the exam. In the figure below, an exam + with 4 questions is created from a set of 8 question generators. +

+ + student retry scheme + +

+ In addition, question generators will create different question + variants based on randomized parameters. These question variants + will finally build a student exam, where questions appear in random + order. Our studies indicate that pools of 3-4 question generators + are effective to mitigate cheating. +

+ + Creating exams with reduced difficulty variance + +

+ A concern with randomized exams is how one can ensure students + receive problems of roughly similar difficulty. Problems can be + binned into pools by topic coverage and difficulty, but it can be + challenging to generate problems of identical difficulty. When + creating question generators for the first time, a instructor can + use previous experiences to decide which ones should be combined in + an alternative group. PrairieLearn will collect data from these + questions, which later can be used by instructors to improve + fairness of these randomized exams. +

+ + question generator scheme +
+
+ +
+
+ Example from demo course + +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+ +
+
+ +
+
+
+ ); +} diff --git a/src/pages/gallery/assessments/autogradedExams/question-generator.png b/src/pages/gallery/assessments/autogradedExams/question-generator.png new file mode 100644 index 00000000..90fe9d5c Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/question-generator.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/question1.png b/src/pages/gallery/assessments/autogradedExams/question1.png new file mode 100644 index 00000000..979e79b3 Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/question1.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/question2.png b/src/pages/gallery/assessments/autogradedExams/question2.png new file mode 100644 index 00000000..812f56a3 Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/question2.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/question3.png b/src/pages/gallery/assessments/autogradedExams/question3.png new file mode 100644 index 00000000..3ae67e6a Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/question3.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/question4.png b/src/pages/gallery/assessments/autogradedExams/question4.png new file mode 100644 index 00000000..5dc6c78e Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/question4.png differ diff --git a/src/pages/gallery/assessments/autogradedExams/student-retry.png b/src/pages/gallery/assessments/autogradedExams/student-retry.png new file mode 100644 index 00000000..ca1a0fac Binary files /dev/null and b/src/pages/gallery/assessments/autogradedExams/student-retry.png differ diff --git a/src/pages/gallery/assessments/groupWork/group-page1.png b/src/pages/gallery/assessments/groupWork/group-page1.png new file mode 100644 index 00000000..3548e076 Binary files /dev/null and b/src/pages/gallery/assessments/groupWork/group-page1.png differ diff --git a/src/pages/gallery/assessments/groupWork/index.module.scss b/src/pages/gallery/assessments/groupWork/index.module.scss new file mode 100644 index 00000000..411d66ad --- /dev/null +++ b/src/pages/gallery/assessments/groupWork/index.module.scss @@ -0,0 +1,6 @@ +@import "~bootstrap/scss/_functions.scss"; +@import "~bootstrap/scss/_variables.scss"; + +.container { + background-color: $gray-200; +} diff --git a/src/pages/gallery/assessments/groupWork/index.tsx b/src/pages/gallery/assessments/groupWork/index.tsx new file mode 100644 index 00000000..dd559414 --- /dev/null +++ b/src/pages/gallery/assessments/groupWork/index.tsx @@ -0,0 +1,166 @@ +import React from "react"; +import Head from "next/head"; +import classnames from "classnames"; +import { Heading } from "../../../../components/Heading"; +import { PageBanner } from "../../../../components/Banner"; + +import Image from "../../../../components/Image"; +import groupPage from "./group-page1.png"; + +export default function DefaultGroup() { + return ( + + + Group Work | PrairieLearn + + + +
+
+

+ Research shows that collaborative learning can increase student + persistence, improve learning outcomes, and foster better classroom + cultures. Using PrairieLearn, instructors can provide group + activities where students work collaborativelly in the same + assessment, which is shared among all the group members. +

+ + Group formation + +

+ PrairieLearn provides the option for instructors to pre-arrange + teams, or for students to create their own teams. For pre-assigned + groups, instructors can select one of the following options: +

+
    +
  • upload a CSV file with the group information
  • +
  • + let PrairieLearn assign students to groups at random, given a + mininum and maximum group sizes +
  • +
+

+ Instructors can also let students self-assign to groups. This can be + especially helpful when giving group activities during lecture, + where groups can be created "on-the-fly" depending on the proximity + of students. Instructors can also provide the minium and maximum + group sizes under this configuration. +

+

+ For self-assignment, a student will create a group providing a group + name. This student will receive a "join code" that can be used by + others that want to join the group. Once the group reaches the + minimum size, students are able to start the assessment. Every + member of the group will have access to the same question variants, + and consequently will also share the same grade. +

+ + Facilitating collaboration among teams + +

+ The simple creation of students' teams will rarely guarantee that + students will work collaboratively. However, successful and + productive collaborations can be greatly improved by careful design + of the task, assignment of team roles and the use of available + technologies to both promote collaborations among students and + support the instructors implementing these activities. +

+

+ Assessments that are based on higher level skills such as + Analyze, Evaluate and Create from the + + Bloom's Taxonomy + + produce more opportunities for students' interactions, where they + can learn from each other. Low level skills which require students + to remember simple concepts, or apply simple calculations will + emphasize the existence of domineering leaders and free loaders. + When designing group tasks, we focus on questions that produce + discussions and decision-making. +

+

+ Another strategy to enhance collaborative learning is to provide + activities that can be self-managed by the team, such that + instructors act only as facilitators instead of source of + information. In the course demo, we provide an example that uses + JupyterLab notebooks for the [group + assessment](https://www.prairielearn.org/pl/course_instance/128605/assessment/2310480). + These notebooks can include text, images, and equations for content + presentation, and also ask students to perform computations (in this + example, using a Python 3 Kernel). +

+

+ We can use PrairieLearn external grader to check content for + correctness. This will help students self-manage their progress. + Instructors can define `#grade` cells inside the JupyterLab + notebook, which will be auto-graded for instant feedback (see image + below). +

+ assessment generator scheme + +

+ Students in the same group will share the same JupyterLab, and the + same submission history and scores. The notebooks are also + synchronized in real-time for enhanced collaborations. +

+ +

+ A lack of clarity and experience in assuming team roles can lead + students to default into domineering team leaders or passive + free-loaders. Evidence-based practices such as Process Oriented + Guided Inquiry Learning ([POGIL](https://pogil.org)) have shown that + providing students with structured roles can help them participate + more equitably during collaborative learning. We are currently + implementing POGIL roles in PrairieLearn. +

+ + {/* + Exam example from demo course + +
Question 1
+

This question asks students to compute a variable $c$ given two parameters + $a$ and $b$. The formula to compute $c$ is randomized (selected from a set of 4 + different formulas) and the parameters $a$ and $b$ are randomized as well. + Students have two attempts to complete this question: the first attempt for + full credit and the second attempt for partial credit (1/3 points).

+ +
Question 2
+

This question asks students to enter the matrix corresponding to a displayed + graph, which is generated in real-time based on randomized parameters. + Students have two attempts to complete the question. They can also receive + partial credit for each attempt, since each entry of the matrix is graded + separately.

+ +
Question 3
+

Highly randomized, in essence mixing 4 different questions into one, + since the circuit diagram changes (parallel and series), and the question + prompt changes (compute current or resistance). Since the solution involves + multiple computation steps, students get 5 attempts to complete the question + for reduced credit. +

+ +
Question 4
+

This question is randomly selected out of a pool of 3 question generators, + each one of them asking students to compute a different matrix and/or vector + operation, including matrix multiplication and outer product. + Each question generator also utilizes randomized parameters. + One of the advantages of keeping similar question variants within separate + question generators is the easy access to statistics, providing information + regarding question and exam fairness. The disadvantage is the cumbersome + bookkeeping of question generators, since one may have to coordinate changes + to many files when updates are needed. +

*/} +
+
+
+ ); +} diff --git a/src/pages/gallery/assessments/index.tsx b/src/pages/gallery/assessments/index.tsx index a2b7bf7b..22f0a9d1 100644 --- a/src/pages/gallery/assessments/index.tsx +++ b/src/pages/gallery/assessments/index.tsx @@ -1,30 +1,19 @@ import React from "react"; import classnames from "classnames"; -import { GetStaticProps } from "next"; -import Link from "next/link"; import Head from "next/head"; import { Heading } from "../../../components/Heading"; import { PageBanner } from "../../../components/Banner"; import { DemoCourseCTA } from "../../../components/DemoCourse"; -import Stack from "../../../components/Stack"; +import { AssessmentCard } from "../../../components/AssessmentCard"; -import { getAssessments } from "../../../lib/gallery/assessments"; import assessmentImage from "../../../lib/images/assessment.png"; import Image from "../../../components/Image"; import styles from "./index.module.scss"; -interface Assessment { - title: string; - slug: string; - summary: string; -} +import { plcourseURL } from "../../../lib/urls"; -interface AssessmentIndexProps { - assessments: Assessment[]; -} - -const AssessmentIndex: React.FC = ({ assessments }) => { +export default function Assessment() { return ( @@ -63,21 +52,56 @@ const AssessmentIndex: React.FC = ({ assessments }) => {
Assessment Gallery - - {assessments.map((assessment) => { - const assessmentHref = `/gallery/assessments/${assessment.slug}`; - return ( - - ); - })} - + +
+
+ +
+
+ +
+
+ +
+
+ +
+
@@ -89,24 +113,4 @@ const AssessmentIndex: React.FC = ({ assessments }) => { />
); -}; - -export default AssessmentIndex; - -export const getStaticProps: GetStaticProps< - AssessmentIndexProps -> = async () => { - // Get assessments and filter out only the props we need on this page - const rawAssessments = await getAssessments(); - const assessments = rawAssessments.map(({ title, slug, summary }) => ({ - title, - slug, - summary, - })); - - return { - props: { - assessments, - }, - }; -}; +}; \ No newline at end of file diff --git a/src/pages/gallery/assessments/masteryHomework/index.module.scss b/src/pages/gallery/assessments/masteryHomework/index.module.scss new file mode 100644 index 00000000..411d66ad --- /dev/null +++ b/src/pages/gallery/assessments/masteryHomework/index.module.scss @@ -0,0 +1,6 @@ +@import "~bootstrap/scss/_functions.scss"; +@import "~bootstrap/scss/_variables.scss"; + +.container { + background-color: $gray-200; +} diff --git a/src/pages/gallery/assessments/masteryHomework/index.tsx b/src/pages/gallery/assessments/masteryHomework/index.tsx new file mode 100644 index 00000000..842e5237 --- /dev/null +++ b/src/pages/gallery/assessments/masteryHomework/index.tsx @@ -0,0 +1,248 @@ +import React from "react"; +import Head from "next/head"; +import classnames from "classnames"; +import { Heading } from "../../../../components/Heading"; +import { PageBanner } from "../../../../components/Banner"; +import { LinkButton } from "../../../../components/LinkButton"; +import { QuestionCard } from "../../../../components/QuestionCard"; + +import Image from "../../../../components/Image"; +import unlimitedVariants from "./unlimited-variants.png"; +import unlimitedAttempts from "./one-variant-unlimited-attempts.png"; +import setRetry from "./set-retry-per-variant.png"; +// import question1Image from "./question1.png" +// import question2Image from "./question2.png" +// import question3Image from "./question3.png" +// import question4Image from "./question4.png" + +import { plcourseURL } from "../../../../lib/urls"; + +import styles from "./index.module.scss"; + +export default function DefaultHomework() { + return ( + + + Homework | PrairieLearn + + + +
+
+

+ Educational research and learning theory show that small number of + single-practice problems may not be the most effective learning + strategy for students. Mastery learning theory shows that different + students require different amounts of practice to achieve + proficiency in a given skill, and that all students require repeated + practice. In addition, different learning skills require different + learning approaches. +

+ +

+ PrairieLearn can be used to create different learning experiences + for students, adjusted based on individual learning goals. We often + use these homework as formative assessments, where students receive + immediate feedback and have the opportunity to use the feedback to + enhance their learning. +

+ +

+ PrairieLearn supports the development of{" "} + question generators, defined by a set of html and + python code that generate different{" "} + question variants based on randomized parameters. A + homework is defined by a collection of question generators. +

+ + + Drilling for mastery: unlimited variants with single attempt + + +

+ In this configuration, the question generator creates unlimited + question variants, each one with a single attempt. Once students + submit an answer to a question, they receive immediate feedback, + indicating if the question was correct, partially correct, or + incorrect. The feedback may also include more detailed explanation. + No matter if a submission is correct or not, students have the + ability to generate another question variant with a single attempt. + Moreover, students are not penalized when submitting an incorrect + attempt. +

+ + unlimited variants single attempt + +

+ Instructors can define the number of times a student needs to + correctly answer a question variant to earn full credit. Since + question variants present a different version of the question, this + repetition provides students with the needed practice to achieve + mastery. Even after students reach full credit for a given question, + they can continue to work on other question variants for additional + practice. We see students coming back to homework assessments when + reviewing for exams. +

+ +

+ Question generators based on skill levels such as Remember, + Understand and Apply from the + + Bloom's Taxonomy + + often involve a solution process that requires information retrival + to answer conceptual questions or computation of simple expressions. + These skills are the most appropriate for questions using the + unlimited variants with single retry option. +

+ + + Repeated variant: unlimited variants with prescribed number of retry + attempts + + +

+ More sophisticated skill levels can require multiple steps during + the solution process, or combine knowledge of different topics. When + creating more complex questions, instructors may want to provide + students with additional attempts per question variant. This avoids + unnecessary frustration of starting fresh on a new question variant + when a small mistake is made during the solution process. Using this + configuration, students can create a new question variant if they + answer the question correctly or if they use all the retry attempts. +

+ + Unlimited variants prescribed number of attempts + +

+ Similarly to the above configuration, instructors can define the + number of times a student needs to correctly answer a question + variant to earn full credit. +

+ + + Fixed variant: unlimited retry attempts for a single question + variant + + +

+ There are some situations where we want students to receive a single + question variant and have unlimited attempts to complete the + question successfully. This is desirable when the question involves + a lot of computation, or includes specialized coding. +

+ + Unlimited retry attempts for a single question variant +
+
+ +
+
+ Example from demo course + +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+ +
+
+ +
+
+ +
+
+
+ ); +} diff --git a/src/pages/gallery/assessments/masteryHomework/one-variant-unlimited-attempts.png b/src/pages/gallery/assessments/masteryHomework/one-variant-unlimited-attempts.png new file mode 100644 index 00000000..4a9d7cb4 Binary files /dev/null and b/src/pages/gallery/assessments/masteryHomework/one-variant-unlimited-attempts.png differ diff --git a/src/pages/gallery/assessments/masteryHomework/set-retry-per-variant.png b/src/pages/gallery/assessments/masteryHomework/set-retry-per-variant.png new file mode 100644 index 00000000..259e2e62 Binary files /dev/null and b/src/pages/gallery/assessments/masteryHomework/set-retry-per-variant.png differ diff --git a/src/pages/gallery/assessments/masteryHomework/unlimited-variants.png b/src/pages/gallery/assessments/masteryHomework/unlimited-variants.png new file mode 100644 index 00000000..272f22fb Binary files /dev/null and b/src/pages/gallery/assessments/masteryHomework/unlimited-variants.png differ diff --git a/src/pages/gallery/assessments/preLectureNotebook/index.tsx b/src/pages/gallery/assessments/preLectureNotebook/index.tsx new file mode 100644 index 00000000..8f25d023 --- /dev/null +++ b/src/pages/gallery/assessments/preLectureNotebook/index.tsx @@ -0,0 +1,73 @@ +import React from "react"; +import Head from "next/head"; +import classnames from "classnames"; +import { Heading } from "../../../../components/Heading"; +import { PageBanner } from "../../../../components/Banner"; + +export default function DefaultLecture() { + return ( + + + Pre-Lecture | PrairieLearn + + + +
+
+

+ Learning can happen in a variety of ways: in classrooms with a + teacher-centered style, in flipped lectures, individual work or + collaborative activities, asynchronously or synchronously online. + Just like there are different teaching methods, people also learn in + different ways. Some learners prefer to read, some prefer to watch + videos, and others like hands-on work. Mixing different learning + approaches can improve the learner engagement. +

+ +

+ Here we describe how PrairieLearn can be used to assign pre-lecture + acvities that we combine a mixture of text, equations, plots, videos + and interactive JupyterLabs to introduce a new topic where students + can learn at their own pace. This assessment example includes short + formative questions, so that students can assess their own learning. +

+ + Example from demo course + +
Question 1
+

+ This question generator illustrates how we can combine text and + equations to introduce new content to students. It follows with + simple conceptual checkpoints to help students test their own + understanding. Students have two attempts to complete these + checkpoints, and they can create a new question variant if they + answer them correctly or if they use all the retry attempts. At the + end of each question variant, a video with the explanation of the + question is displayed. Note that embedding videos inside questions + is another method of content delivery that can be used with + PrairieLearn. +

+ +
Question 2
+

+ Here we use a JupyterLab notebook for an interactive example to + support the introduction of another concept. Students need to answer + a simple multiple-choice question to test their understanding of the + notebook. +

+ +
Question 3
+

+ The last question includes formative short questions that summarize + the concepts introduced in the previous two questions. Several + parameters are randomized, so that students can get more practice by + generating different question variants, each one with two attempts. +

+
+
+
+ ); +} diff --git a/src/pages/gallery/questions/index.tsx b/src/pages/gallery/questions/index.tsx index 8c11faf2..55a14952 100644 --- a/src/pages/gallery/questions/index.tsx +++ b/src/pages/gallery/questions/index.tsx @@ -45,10 +45,14 @@ const GalleryIndex: React.FC = ({ questions }) => { " > question example - +
+
+ +
+
Questions