From c523c0543682fa5f0943a7ed9397a5d4cbb57232 Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Mon, 23 Feb 2026 15:29:35 -0500 Subject: [PATCH 1/6] Fixed a bug where the wrong field was being picked up as an API token, and fixed the LLM peer lookup so it can actually retrieve API keys at call time --- .../runestone/controllers/peer.py | 43 +++++++++++-------- .../assignment/instructor/add_token.html | 2 +- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py index 2212a873c..b7ce06998 100644 --- a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py +++ b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py @@ -1049,16 +1049,6 @@ def _llm_enabled(): return bool(_get_course_openai_key()) #fetch the course-wide openai API key used to enable LLM-based async peer discussion (only works for openai currently) -# def _get_course_openai_key(): -# try: -# token_record = asyncio.get_event_loop().run_until_complete( -# fetch_api_token(course_id=auth.user.course_id, provider="openai") -# ) -# if token_record and token_record.token: -# return token_record.token.strip() -# except Exception: -# logger.exception("Failed to fetch course-wide OpenAI token for peer LLM") -# return "" def _get_course_openai_key(): try: course = db( @@ -1066,18 +1056,33 @@ def _get_course_openai_key(): ).select().first() if not course: - logger.warning("PEER LLM: no course row found") + logger.warning("PEER LLM: no course row found for %s", auth.user.course_name) return "" - logger.warning(f"PEER LLM course_name={auth.user.course_name}") - logger.warning(f"PEER LLM auth.user.course_id={auth.user.course_id}") - logger.warning(f"PEER LLM resolved course.id={course.id if course else None}") - token_record = asyncio.get_event_loop().run_until_complete( - fetch_api_token(course_id=course.id, provider="openai") - ) - if token_record and token_record.token: - return token_record.token.strip() + logger.warning("PEER LLM: looking up token for course_id=%s (%s)", + course.id, auth.user.course_name) + rows = db.executesql( + "SELECT token FROM api_tokens " + "WHERE course_id = %s AND provider = %s " + "ORDER BY last_used ASC NULLS FIRST LIMIT 1", + placeholders=[course.id, "openai"], + ) + logger.warning("PEER LLM: executesql returned %d rows", len(rows) if rows else 0) + + if rows and rows[0][0]: + from cryptography.fernet import Fernet + secret = os.environ.get("FERNET_SECRET", "").strip() + if not secret: + raise RuntimeError("FERNET_SECRET environment variable is not set") + f = Fernet(secret.encode() if isinstance(secret, str) else secret) + encrypted = rows[0][0] + decrypted = f.decrypt(encrypted.encode()).decode().strip() + logger.warning("PEER LLM: decrypted key for course %s: %s****", + course.id, decrypted[:4]) + return decrypted + + logger.warning("PEER LLM: no openai token found for course_id=%s", course.id) except Exception: logger.exception("Failed to fetch course-wide OpenAI token for peer LLM") diff --git a/components/rsptx/templates/assignment/instructor/add_token.html b/components/rsptx/templates/assignment/instructor/add_token.html index 76a2a46e3..1e6e1e74a 100644 --- a/components/rsptx/templates/assignment/instructor/add_token.html +++ b/components/rsptx/templates/assignment/instructor/add_token.html @@ -176,7 +176,7 @@

Current Tokens

From 97f87650215382a7f1f13f9a10b9bc25f79b2772 Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Tue, 24 Feb 2026 11:24:56 -0500 Subject: [PATCH 2/6] clean up async messaging when there is no API Key --- .../runestone/controllers/peer.py | 57 ++++++++++++++++--- 1 file changed, 49 insertions(+), 8 deletions(-) diff --git a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py index b7ce06998..966f58c9d 100644 --- a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py +++ b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py @@ -787,16 +787,57 @@ def get_async_explainer(): & (db.useinfo.course_id == course_name) ).select(orderby=db.useinfo.id) - if len(messages) == 0: + # Deduplicate sendmessage events — keep last message per student + seen = {} + for row in messages: + try: + msg = row.act.split(":", 2)[2] + except Exception: + msg = row.act + seen[row.sid] = msg + + # Fetch LLM conversation turns for this question + llm_turns = db( + (db.useinfo.event == "pi_llm_turn") + & (db.useinfo.div_id == div_id) + & (db.useinfo.course_id == course_name) + ).select(orderby=db.useinfo.id) + + # Group turns by sid, keeping only the most recent pi_attempt_id per student + llm_by_sid = {} + for row in llm_turns: + try: + turn = json.loads(row.act) + attempt_id = turn.get("pi_attempt_id", "") + turn_index = turn.get("turn_index", 0) + role = turn.get("role", "") + content = turn.get("content", "") + if row.sid not in llm_by_sid: + llm_by_sid[row.sid] = {} + if attempt_id not in llm_by_sid[row.sid]: + llm_by_sid[row.sid][attempt_id] = [] + llm_by_sid[row.sid][attempt_id].append((turn_index, role, content)) + except Exception: + pass + + parts = [] + all_sids = set(list(seen.keys()) + list(llm_by_sid.keys())) + for sid in all_sids: + if sid in seen: + parts.append(f"
  • {sid} said: {seen[sid]}
  • ") + if sid in llm_by_sid: + latest_attempt = max( + llm_by_sid[sid].keys(), + key=lambda a: max(t[0] for t in llm_by_sid[sid][a]) + ) + turns = sorted(llm_by_sid[sid][latest_attempt], key=lambda t: t[0]) + for _, role, content in turns: + label = sid if role == "user" else "LLM Peer" + parts.append(f"
  • {label} said: {content}
  • ") + + if not parts: mess = "Sorry there are no explanations yet." else: - parts = [] - for row in messages: - try: - msg = row.act.split(":", 2)[2] - except Exception: - msg = row.act - parts.append(f"
  • {row.sid} said: {msg}
  • ") mess = "
      " + "".join(parts) + "
    " logger.debug(f"Get message for {div_id}") From 8e375a9ca54bf9453a2db08b3a64929df1b4c7d4 Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Tue, 24 Feb 2026 14:22:32 -0500 Subject: [PATCH 3/6] Fix message ordering in async peer chat display --- .../applications/runestone/controllers/peer.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py index 966f58c9d..6b52bb803 100644 --- a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py +++ b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py @@ -787,7 +787,6 @@ def get_async_explainer(): & (db.useinfo.course_id == course_name) ).select(orderby=db.useinfo.id) - # Deduplicate sendmessage events — keep last message per student seen = {} for row in messages: try: @@ -796,15 +795,14 @@ def get_async_explainer(): msg = row.act seen[row.sid] = msg - # Fetch LLM conversation turns for this question llm_turns = db( (db.useinfo.event == "pi_llm_turn") & (db.useinfo.div_id == div_id) & (db.useinfo.course_id == course_name) ).select(orderby=db.useinfo.id) - # Group turns by sid, keeping only the most recent pi_attempt_id per student llm_by_sid = {} + sid_order = [] for row in llm_turns: try: turn = json.loads(row.act) @@ -814,15 +812,19 @@ def get_async_explainer(): content = turn.get("content", "") if row.sid not in llm_by_sid: llm_by_sid[row.sid] = {} + sid_order.append(row.sid) if attempt_id not in llm_by_sid[row.sid]: llm_by_sid[row.sid][attempt_id] = [] llm_by_sid[row.sid][attempt_id].append((turn_index, role, content)) except Exception: pass + for sid in seen: + if sid not in llm_by_sid: + sid_order.append(sid) + parts = [] - all_sids = set(list(seen.keys()) + list(llm_by_sid.keys())) - for sid in all_sids: + for sid in sid_order: if sid in seen: parts.append(f"
  • {sid} said: {seen[sid]}
  • ") if sid in llm_by_sid: @@ -832,8 +834,8 @@ def get_async_explainer(): ) turns = sorted(llm_by_sid[sid][latest_attempt], key=lambda t: t[0]) for _, role, content in turns: - label = sid if role == "user" else "LLM Peer" - parts.append(f"
  • {label} said: {content}
  • ") + if role == "assistant": + parts.append(f"
  • LLM Peer said: {content}
  • ") if not parts: mess = "Sorry there are no explanations yet." From c34931a8f9b96c491ecd0d1c63fb0b9ffc7f4ae5 Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Wed, 25 Feb 2026 10:44:17 -0500 Subject: [PATCH 4/6] update prompt and async peer messaging --- .../runestone/controllers/peer.py | 52 ++++++++++++------- 1 file changed, 32 insertions(+), 20 deletions(-) diff --git a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py index 6b52bb803..98b18a8e1 100644 --- a/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py +++ b/bases/rsptx/web2py_server/applications/runestone/controllers/peer.py @@ -782,18 +782,24 @@ def get_async_explainer(): div_id = request.vars.div_id messages = db( - (db.useinfo.event == "sendmessage") + (db.useinfo.event.belongs(["sendmessage", "reflection"])) & (db.useinfo.div_id == div_id) & (db.useinfo.course_id == course_name) ).select(orderby=db.useinfo.id) - seen = {} + all_msgs = [] #list of (sid, msg) in insertion order + last_per_sid = {} for row in messages: - try: - msg = row.act.split(":", 2)[2] - except Exception: + if row.event == "reflection": msg = row.act - seen[row.sid] = msg + else: + try: + msg = row.act.split(":", 2)[2] + except Exception: + msg = row.act + if last_per_sid.get(row.sid) != msg: #skip exact consecutive duplicates only + all_msgs.append((row.sid, msg)) + last_per_sid[row.sid] = msg llm_turns = db( (db.useinfo.event == "pi_llm_turn") @@ -802,7 +808,6 @@ def get_async_explainer(): ).select(orderby=db.useinfo.id) llm_by_sid = {} - sid_order = [] for row in llm_turns: try: turn = json.loads(row.act) @@ -812,22 +817,18 @@ def get_async_explainer(): content = turn.get("content", "") if row.sid not in llm_by_sid: llm_by_sid[row.sid] = {} - sid_order.append(row.sid) if attempt_id not in llm_by_sid[row.sid]: llm_by_sid[row.sid][attempt_id] = [] llm_by_sid[row.sid][attempt_id].append((turn_index, role, content)) except Exception: pass - for sid in seen: - if sid not in llm_by_sid: - sid_order.append(sid) - parts = [] - for sid in sid_order: - if sid in seen: - parts.append(f"
  • {sid} said: {seen[sid]}
  • ") - if sid in llm_by_sid: + sids_with_llm_shown = set() + for sid, msg in all_msgs: + parts.append(f"
  • {sid} said: {msg}
  • ") + if sid in llm_by_sid and sid not in sids_with_llm_shown: + sids_with_llm_shown.add(sid) latest_attempt = max( llm_by_sid[sid].keys(), key=lambda a: max(t[0] for t in llm_by_sid[sid][a]) @@ -837,6 +838,14 @@ def get_async_explainer(): if role == "assistant": parts.append(f"
  • LLM Peer said: {content}
  • ") + for sid, attempts in llm_by_sid.items(): + if sid not in sids_with_llm_shown and not any(s == sid for s, _ in all_msgs): + latest_attempt = max(attempts.keys(), key=lambda a: max(t[0] for t in attempts[a])) + turns = sorted(attempts[latest_attempt], key=lambda t: t[0]) + for _, role, content in turns: + if role == "assistant": + parts.append(f"
  • LLM Peer said: {content}
  • ") + if not parts: mess = "Sorry there are no explanations yet." else: @@ -932,20 +941,23 @@ def get_async_llm_reflection(): "do not use new lines.\n" "do not sound like a teacher.\n" "do not explain step by step.\n" - "do not mention libraries formally.\n" "never say something is right or wrong.\n" + "your answer can shift throughout the conversation\n" "never mention a choice letter as the correct answer.\n" "never clearly describe the final result of the code.\n" "never fully state what the program prints.\n" - "keep reasoning partial or uncertain.\n" "use common misconceptions relating to the specific problem.\n" "refer to code loosely like 'that line' or 'the loop' or 'the head' or 'the print'.\n" "often hedge with uncertainty.\n" - "never agree with the other student's interpretation even if it sounds correct.\n" + #"never agree with the other student's interpretation even if it sounds correct.\n" #porter found when PI is adversarial students disengage "use content from the other multiple choice options in your reponses when needed\n" - "maintain a mistaken or incomplete mental model throughout the conversation.\n" + "let your mental model shift slightly under pressure but keep reasoning partial and never fully resolve\n" "if the other student clearly sounds confident or repeats the same answer twice stop debating and tell them to vote again or submit it.\n" "do not continue reasoning after telling them to vote again.\n" + "sometimes question whether you even read the code correctly before forming an opinion.\n" + "occasionally bring up a wrong answer option as if it might be right without committing to it.\n" + "pick an answer choice different than the one the student selected and ask the student to explain why it cannot be correct.\n" + "show reasoning process not conclusions, think out loud rather than arriving anywhere.\n" "focus on reasoning not teaching.\n\n" ) From eca2ee17cddbdc097923e03ab54c941256fdf58c Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Wed, 25 Feb 2026 13:24:30 -0500 Subject: [PATCH 5/6] fix dark mode for dropdown menu in course home and for chapter navigation --- .../css/runestone-custom-sphinx-bootstrap.css | 21 +++++++++++++++++++ .../runestone/common/css/user-highlights.css | 11 ++++++++++ .../css/runestone-custom-sphinx-bootstrap.css | 21 +++++++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css b/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css index f36e690e1..9364e9ce7 100644 --- a/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css +++ b/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css @@ -393,6 +393,27 @@ div.container { background-color: #f5f5f5; } +/*Dark mode-force dropdown menus to black background with white text */ +[data-theme="dark"] .dropdown-menu { + background-color: #000000; + color: #ffffff; +} + +[data-theme="dark"] .dropdown-menu > li > a, +[data-theme="dark"] .dropdown-menu > li > span, +[data-theme="dark"] .dropdown-menu > li > div, +[data-theme="dark"] .dropdown-menu > li > label { + color: #ffffff; +} + +[data-theme="dark"] .dropdown-menu > li > a:hover, +[data-theme="dark"] .dropdown-menu > li > a:focus, +[data-theme="dark"] .dropdown-menu > li > label:hover, +[data-theme="dark"] .dropdown-menu > li > label:focus { + color: #ffffff; + background-color: #333333; +} + .dropdown-menu > li > a { color: var(--grayToWhite); } diff --git a/bases/rsptx/interactives/runestone/common/css/user-highlights.css b/bases/rsptx/interactives/runestone/common/css/user-highlights.css index 089b3e878..49dbb02b8 100644 --- a/bases/rsptx/interactives/runestone/common/css/user-highlights.css +++ b/bases/rsptx/interactives/runestone/common/css/user-highlights.css @@ -99,6 +99,17 @@ div.sphinxsidebar { margin-left: auto; margin-right: auto; } + +[data-theme="dark"] #jump-to-chapter { + background-color: #000000; + color: #ffffff; + border-color: #555555; +} + +[data-theme="dark"] #jump-to-chapter option { + background-color: #000000; + color: #ffffff; +} #navigation-links{ background-color: #F8F8F8; border: 1px solid #CCCCCC; diff --git a/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css b/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css index ec4447f6e..496c16487 100644 --- a/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css +++ b/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css @@ -140,6 +140,27 @@ div.section { white-space: nowrap; } +/*Dark mode- force dropdown menus to black background with white text */ +[data-theme="dark"] .dropdown-menu { + background-color: #000000 !important; + color: #ffffff !important; +} + +[data-theme="dark"] .dropdown-menu > li > a, +[data-theme="dark"] .dropdown-menu > li > span, +[data-theme="dark"] .dropdown-menu > li > div, +[data-theme="dark"] .dropdown-menu > li > label { + color: #ffffff !important; +} + +[data-theme="dark"] .dropdown-menu > li > a:hover, +[data-theme="dark"] .dropdown-menu > li > a:focus, +[data-theme="dark"] .dropdown-menu > li > label:hover, +[data-theme="dark"] .dropdown-menu > li > label:focus { + color: #ffffff !important; + background-color: #333333 !important; +} + .loggedinuser { font-weight: bold; } From cacd4e49a8bf17d4778ff3da2b13064fb3c53462 Mon Sep 17 00:00:00 2001 From: Seth Bernstein Date: Thu, 26 Feb 2026 09:14:16 -0500 Subject: [PATCH 6/6] Revert "fix dark mode for dropdown menu in course home and for chapter navigation" This reverts commit eca2ee17cddbdc097923e03ab54c941256fdf58c. --- .../css/runestone-custom-sphinx-bootstrap.css | 21 ------------------- .../runestone/common/css/user-highlights.css | 11 ---------- .../css/runestone-custom-sphinx-bootstrap.css | 21 ------------------- 3 files changed, 53 deletions(-) diff --git a/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css b/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css index 9364e9ce7..f36e690e1 100644 --- a/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css +++ b/bases/rsptx/interactives/runestone/common/css/runestone-custom-sphinx-bootstrap.css @@ -393,27 +393,6 @@ div.container { background-color: #f5f5f5; } -/*Dark mode-force dropdown menus to black background with white text */ -[data-theme="dark"] .dropdown-menu { - background-color: #000000; - color: #ffffff; -} - -[data-theme="dark"] .dropdown-menu > li > a, -[data-theme="dark"] .dropdown-menu > li > span, -[data-theme="dark"] .dropdown-menu > li > div, -[data-theme="dark"] .dropdown-menu > li > label { - color: #ffffff; -} - -[data-theme="dark"] .dropdown-menu > li > a:hover, -[data-theme="dark"] .dropdown-menu > li > a:focus, -[data-theme="dark"] .dropdown-menu > li > label:hover, -[data-theme="dark"] .dropdown-menu > li > label:focus { - color: #ffffff; - background-color: #333333; -} - .dropdown-menu > li > a { color: var(--grayToWhite); } diff --git a/bases/rsptx/interactives/runestone/common/css/user-highlights.css b/bases/rsptx/interactives/runestone/common/css/user-highlights.css index 49dbb02b8..089b3e878 100644 --- a/bases/rsptx/interactives/runestone/common/css/user-highlights.css +++ b/bases/rsptx/interactives/runestone/common/css/user-highlights.css @@ -99,17 +99,6 @@ div.sphinxsidebar { margin-left: auto; margin-right: auto; } - -[data-theme="dark"] #jump-to-chapter { - background-color: #000000; - color: #ffffff; - border-color: #555555; -} - -[data-theme="dark"] #jump-to-chapter option { - background-color: #000000; - color: #ffffff; -} #navigation-links{ background-color: #F8F8F8; border: 1px solid #CCCCCC; diff --git a/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css b/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css index 496c16487..ec4447f6e 100644 --- a/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css +++ b/components/rsptx/templates/staticAssets/css/runestone-custom-sphinx-bootstrap.css @@ -140,27 +140,6 @@ div.section { white-space: nowrap; } -/*Dark mode- force dropdown menus to black background with white text */ -[data-theme="dark"] .dropdown-menu { - background-color: #000000 !important; - color: #ffffff !important; -} - -[data-theme="dark"] .dropdown-menu > li > a, -[data-theme="dark"] .dropdown-menu > li > span, -[data-theme="dark"] .dropdown-menu > li > div, -[data-theme="dark"] .dropdown-menu > li > label { - color: #ffffff !important; -} - -[data-theme="dark"] .dropdown-menu > li > a:hover, -[data-theme="dark"] .dropdown-menu > li > a:focus, -[data-theme="dark"] .dropdown-menu > li > label:hover, -[data-theme="dark"] .dropdown-menu > li > label:focus { - color: #ffffff !important; - background-color: #333333 !important; -} - .loggedinuser { font-weight: bold; }