From 1a56b766822bad82a7dd38efe20705f81d7bcece Mon Sep 17 00:00:00 2001 From: Nuno Busch Date: Mon, 28 Jul 2025 19:22:25 +0200 Subject: [PATCH 1/2] extending debugging section --- .../bayesian_cognitive_modeling.Rmd | 47 +++++++++++++++++++ .../bayesian_cognitive_modeling.md | 47 ++++++++++++++++++- 2 files changed, 93 insertions(+), 1 deletion(-) diff --git a/BayesianCognitiveModeling/bayesian_cognitive_modeling.Rmd b/BayesianCognitiveModeling/bayesian_cognitive_modeling.Rmd index 1cd675d..bf304bb 100644 --- a/BayesianCognitiveModeling/bayesian_cognitive_modeling.Rmd +++ b/BayesianCognitiveModeling/bayesian_cognitive_modeling.Rmd @@ -1636,3 +1636,50 @@ As always in everyday life when working with code, it is likely that you will ru While there is an unpredictable number of errors that JAGS can throw out, here, we treat the below list as a work-in-progress documentation of common errors that we have encountered, and what might be potential ways to solve them. We will continue expanding this section. + +### I.) Node inconsistent with parents + +Sometimes you might get errors of the following format: + +``` +Error in checkForRemoteErrors(val) : + 8 nodes produced errors; first error: Error in node choices[31,22] +Node inconsistent with parents +``` + +These are very common in JAGS and it is often not immediately clear what the cause is. It is recommendable to take a very close look at the underlying data at the specific datapoint. +In this example, it would mean to check what the outcome and probability information of problem 31 for person 22 is, that the model has to work with, and what the choice was that the individual made. Here, the individual chose option B (choice = 0): + +``` +> choices[31,22] +07-JE-SU-MU + 0 +``` + +... even though the choice problem had a clear dominant option of a sure win in option A compared to a sure high loss in option B: + +``` +> prospectsA[31,,22] +[1] 150 1 0 0 -730 0 +> prospectsB[31,,22] +[1] 810 0 0 0 -1000 1 +``` + +Consequently, the model would predict a choice of A, whereas the empirical data (the participant) chose otherwise. +Safe options like this (where one probability = 1) can appear frequently in experience-based choice data, when people do not sample a lot. + +Here, the JAGS error might be caused by a computational problem, when the model predicts a choice probability of 0 which causes the model to crash. +We can circumvent the problem in this case by constraining the predicted choice probability to a range between 0.00001 and 0.99999, for example. +I.e., in the model code, we substitute + +``` +choices[i,j] ~ dbern(binval[i,j]) +``` +with +``` +choices[i,j] ~ dbern(min(max(binval[i,j],0.00001),0.99999)) +``` + + + + diff --git a/BayesianCognitiveModeling/bayesian_cognitive_modeling.md b/BayesianCognitiveModeling/bayesian_cognitive_modeling.md index bdcf7dd..9a9a465 100644 --- a/BayesianCognitiveModeling/bayesian_cognitive_modeling.md +++ b/BayesianCognitiveModeling/bayesian_cognitive_modeling.md @@ -2,7 +2,7 @@ Bayesian Cognitive Modeling Tutorial ================ Linus Hof & Nuno Busch -Last updated: 2025-07-25 +Last updated: 2025-07-28 ## Part I: Risky Choice and its Models @@ -2416,3 +2416,48 @@ as a work-in-progress documentation of common errors that we have encountered, and what might be potential ways to solve them. We will continue expanding this section. + +### I). Node inconsistent with parents + +Sometimes you might get errors of the following format: + + Error in checkForRemoteErrors(val) : + 8 nodes produced errors; first error: Error in node choices[31,22] + Node inconsistent with parents + +These are very common in JAGS and it is often not immediately clear what +the cause is. It is recommendable to take a very close look at the +underlying data at the specific datapoint. In this example, it would +mean to check what the outcome and probability information of problem 31 +for person 22 is, that the model has to work with, and what the choice +was that the individual made. Here, the individual chose option B +(choice = 0): + + > choices[31,22] + 07-JE-SU-MU + 0 + +… even though the choice problem had a clear dominant option of a sure +win in option A compared to a sure high loss in option B: + + > prospectsA[31,,22] + [1] 150 1 0 0 -730 0 + > prospectsB[31,,22] + [1] 810 0 0 0 -1000 1 + +Consequently, the model would predict a choice of A, whereas the +empirical data (the participant) chose otherwise. Safe options like this +(where one probability = 1) can appear frequently in experience-based +choice data, when people do not sample a lot. + +Here, the JAGS error might be caused by a computational problem, when +the model predicts a choice probability of 0 which causes the model to +crash. We can circumvent the problem in this case by constraining the +predicted choice probability to a range between 0.00001 and 0.99999, for +example. I.e., in the model code, we substitute + + choices[i,j] ~ dbern(binval[i,j]) + +with + + choices[i,j] ~ dbern(min(max(binval[i,j],0.00001),0.99999)) From 6b4ae5969d7de59e72f1ead861ef36a84cd946e7 Mon Sep 17 00:00:00 2001 From: Nuno Busch Date: Thu, 21 Aug 2025 23:45:21 +0200 Subject: [PATCH 2/2] added fontsize and condition arguments to value function. still to be added to weighting functions. --- .../helper_functions/CPT_plotting.R | 53 +++++++++++++++---- 1 file changed, 44 insertions(+), 9 deletions(-) diff --git a/BayesianCognitiveModeling/helper_functions/CPT_plotting.R b/BayesianCognitiveModeling/helper_functions/CPT_plotting.R index 6b39262..434fd14 100644 --- a/BayesianCognitiveModeling/helper_functions/CPT_plotting.R +++ b/BayesianCognitiveModeling/helper_functions/CPT_plotting.R @@ -7,7 +7,9 @@ color = rgb(1/255, 100/255, 200/255, alpha = 1) ####### value function TK92 ########## ###################################### vf_TK92 <- function(samples = samples, # JAGS output object name - color = rgb(1/255, 100/255, 200/255, alpha = 1), # color for lines + color = rgb(1/255, 100/255, 200/255, alpha = 1), # color for lines + fontsize = 1, # controls font size of axis and tick labels + condition = "", # condition for title of plot alpha_subj = "alpha", # individual parameter names lambda_subj = "lambda", beta_subj = "alpha", # by default, the function will assume no separate curvature parameter for the loss domain. if there is one, that can be specified here @@ -15,15 +17,48 @@ vf_TK92 <- function(samples = samples, # JAGS output object name lambda_mean = "mu.lambda", beta_mean = "mu.alpha" # by default, the function will assume no separate curvature parameter for the loss domain. if there is one, that can be specified here ) { + + par(mfrow=c(1,1)) a <- seq(-100, 100, by=0.1) - plot(a, a, "l", axes=FALSE, xlab='', ylab='', cex.axis=.7, lty=2, lwd=1, ylim=c(-10, 10), xlim=c(-20, 20), col="white") - par(xpd=FALSE) - title(paste("Value function"), cex.main=1.5, font.main=1) - axis(1, seq(from=-20, to=20, by=5), pos=0, cex.axis=.6, mgp=c(3, .1, 1), tck=-.01) - axis(2, seq(from=-10, to=10, by=2), pos=0, cex.axis=.6, tck=-.01, las=1, mgp=c(3, 0.6, 0)) - mtext(side=1, text="Outcome", line=1) - mtext(side=2, text="Subjective Value", line=.5) + + # scale gaps to font size + lab_gap <- 0.6 * fontsize # distance of tick labels from axis line + title_gap <- 1.2 + 0.4*(fontsize - 1) # distance of axis titles (mtext) from plot + + plot(a, a, type = "l", axes = FALSE, xlab = "", ylab = "", + cex.axis = fontsize * 1.1, lty = 2, lwd = 1, + ylim = c(-10, 10), xlim = c(-20, 20), col = "white") + par(xpd = FALSE) + title(paste0("Value function", condition), cex.main = fontsize * 1.5, font.main = 1) + + axis(1, seq(-20, 20, by = 5), pos = 0, + cex.axis = 0.9* fontsize, tck = -0.01, mgp = c(3, lab_gap, 0)) + axis(2, seq(-10, 10, by = 2), pos = 0, + cex.axis = 0.9*fontsize, tck = -0.01, las = 1, mgp = c(3, lab_gap, 0)) + + mtext(text = "Outcome", side = 1, line = title_gap, cex = fontsize) + mtext(text = "Subjective Value", side = 2, line = title_gap, cex = fontsize) + + + + # plot(a, a, "l", axes=FALSE, xlab='', ylab='', + # cex.axis=fontsize*1.1, # .6, + # lty=2, lwd=1, ylim=c(-10, 10), xlim=c(-20, 20), col="white") + # par(xpd=FALSE) + # title(paste("Value function"), cex.main=1.5, font.main=1) + # axis(1, seq(from=-20, to=20, by=5), pos=0, + # cex.axis= fontsize, # .6, + # mgp=c(3, .1, 1), tck=-.01) + # axis(2, seq(from=-10, to=10, by=2), pos=0, + # cex.axis=fontsize, # .6, + # tck=-.01, las=1, mgp=c(3, 0.6, 0)) + # mtext(side=1, text="Outcome", + # cex = fontsize, + # line=1) + # mtext(side=2, text="Subjective Value", + # cex = fontsize, + # line=.5) # plot dashed line lines(a,a,col="black",lty=2,lwd=1) @@ -66,7 +101,7 @@ vf_TK92 <- function(samples = samples, # JAGS output object name legend(1, -2, inset=0, legend = c(expression("Group-level estimate"), expression("Individual estimates")), - cex = 1.2, + cex = 0.9 * fontsize, col = c(color, color_subj), horiz = F,bty = "n", lty = 1, # Solid line lwd = 2 # Line width