diff --git a/ddToolbox/CODA/CODA.m b/ddToolbox/CODA/CODA.m index b21b7955..2ab299ef 100644 --- a/ddToolbox/CODA/CODA.m +++ b/ddToolbox/CODA/CODA.m @@ -213,6 +213,7 @@ function plotUnivariateSummaries(obj, variables, plotOptions, idNames) % call plotting function for each variable (subplot) for n = 1:numel(variables) subplot( subplot_handles(n) ) + hold off pointEstVals = obj.getStats(plotOptions.pointEstimateType, variables{n}); hdi =[obj.getStats('hdi_low',variables{n}),... diff --git a/ddToolbox/models/Model.m b/ddToolbox/models/Model.m index 659ea042..bd2b30e9 100644 --- a/ddToolbox/models/Model.m +++ b/ddToolbox/models/Model.m @@ -1,6 +1,6 @@ classdef (Abstract) Model - %Model Base class model. - + %Model Base class model. + % Allow acces to these via Model, but we still only get access to these % class's public interface. properties (Hidden = true, SetAccess = protected, GetAccess = public) @@ -27,7 +27,7 @@ methods (Abstract, Access = public) plot() plotExperimentOverviewFigure() - dispModelInfo() + dispModelInfo() end methods (Abstract, Access = protected) initialiseChainValues() @@ -71,14 +71,14 @@ obj.varList.responseErrorParams(2).label = 'error rate, $\epsilon$'; end - + function export(obj) - %model.export Exports summary information about the model - % model.EXPORT() exports the following: - % - extensive MCMC convergence information - % - an experiment-level table of point estimates and model - % checking quantities - + %model.export Exports summary information about the model + % model.EXPORT() exports the following: + % - extensive MCMC convergence information + % - an experiment-level table of point estimates and model + % checking quantities + % TODO: This should be a method of CODA % TODO: better function name. What does it do? Calculate or % export, or both? @@ -87,8 +87,8 @@ function export(obj) obj.data.getIDnames('all')) exporter = ResultsExporter(obj.coda,... - obj.data,... - obj.getAUCasTable,... + obj.data,... + obj.getAUCasTable,... obj.postPred,... obj.varList,... obj.plotOptions); @@ -98,44 +98,44 @@ function export(obj) end end - - methods (Hidden = true) - - function obj = conductInference(obj) - % Not intended to be called directly by user - - % pre-sampling preparation - obj.observedData = obj.constructObservedDataForMCMC(); - path_of_model_file = makeProbModelsPath(obj.modelFilename, obj.samplerType); - % sampling - samplerFunction = obj.selectSampler(obj.samplerType); - obj.coda = samplerFunction(... - path_of_model_file,... - obj.observedData,... - obj.mcmcParams,... - obj.initialiseChainValues(obj.mcmcParams.nchains),... - obj.varList.monitored); - % This is a separate method, to allow for overriding in sub classes - obj = obj.postSamplingActivities(); - end - end + + methods (Hidden = true) + + function obj = conductInference(obj) + % Not intended to be called directly by user + + % pre-sampling preparation + obj.observedData = obj.constructObservedDataForMCMC(); + path_of_model_file = makeProbModelsPath(obj.modelFilename, obj.samplerType); + % sampling + samplerFunction = obj.selectSampler(obj.samplerType); + obj.coda = samplerFunction(... + path_of_model_file,... + obj.observedData,... + obj.mcmcParams,... + obj.initialiseChainValues(obj.mcmcParams.nchains),... + obj.varList.monitored); + % This is a separate method, to allow for overriding in sub classes + obj = obj.postSamplingActivities(); + end + end %% GETTERS methods function [predicted_subjective_values] = getInferredPresentSubjectiveValues(obj) - % info = model.getInferredPresentSubjectiveValues Returns information - % on the dataset along with inferred present subjective values of - % each of the objective offers present in the dataset. - % - % This is useful if you want to do trial-by-trial analysis, or - % have other measures (eg electrophysiological, or imagine) and - % would like to examine how these measures relate to the inferred - % present subjective values of the rewards run in the experiment. - % - % NOTE: We do have access to full distributions of inferred present - % subjective values, but currently we return the point estimates. + % info = model.getInferredPresentSubjectiveValues Returns information + % on the dataset along with inferred present subjective values of + % each of the objective offers present in the dataset. + % + % This is useful if you want to do trial-by-trial analysis, or + % have other measures (eg electrophysiological, or imagine) and + % would like to examine how these measures relate to the inferred + % present subjective values of the rewards run in the experiment. + % + % NOTE: We do have access to full distributions of inferred present + % subjective values, but currently we return the point estimates. %% return point estimates of present subjective values... all_data_table = obj.data.groupTable; @@ -148,15 +148,15 @@ function export(obj) % predicted_subjective_values.A_full_posterior = % predicted_subjective_values.B_full_posterior = end - - function [auc] = getAUCasStochasticArray(obj) - %getAUCasStochasticArray() returns an array of Stochastic objects each describing a posterior distribution of AUC. - - auc = obj.auc; - end - - function [T] = getAUCasTable(obj) - %getAUCasTable() returns a Table with AUC information. + + function [auc] = getAUCasStochasticArray(obj) + %getAUCasStochasticArray() returns an array of Stochastic objects each describing a posterior distribution of AUC. + + auc = obj.auc; + end + + function [T] = getAUCasTable(obj) + %getAUCasTable() returns a Table with AUC information. stochasticArray = obj.auc; @@ -180,19 +180,19 @@ function export(obj) % information rather than paste it on here after - to avoid any % possible error with reordering for example. T.Properties.RowNames = obj.data.getIDnames('experiments'); - end - - end - - methods (Hidden = true) - - function discountFunctionVariables = getDiscountFunctionVariables(obj) - discountFunctionVariables = {obj.varList.discountFunctionParams.name}; - end - - function responseErrorVariables = getResponseErrorVariables(obj) - responseErrorVariables = {obj.varList.responseErrorParams.name}; - end + end + + end + + methods (Hidden = true) + + function discountFunctionVariables = getDiscountFunctionVariables(obj) + discountFunctionVariables = {obj.varList.discountFunctionParams.name}; + end + + function responseErrorVariables = getResponseErrorVariables(obj) + responseErrorVariables = {obj.varList.responseErrorParams.name}; + end end @@ -259,7 +259,7 @@ function export(obj) end function auc = calcAreaUnderCurveForAll(obj, MAX_DELAY) - % Calculate Area Under Curve. + % Calculate Area Under Curve. % Returns an array of Stochastic objects % TODO: store experiment filenames along with the auc data to @@ -287,12 +287,12 @@ function export(obj) end function displayPublicMethods(obj) - display('For more info (assuming your model is named ''model'') then you can type') - display(' >> help model.methodName') - linebreak - display('Available methods are:') - methodsAvailable = methods(obj); - disp(methodsAvailable) + display('For more info (assuming your model is named ''model'') then you can type') + display(' >> help model.methodName') + linebreak + display('Available methods are:') + methodsAvailable = methods(obj); + disp(methodsAvailable) end function obj = addUnobservedParticipant(obj, str) @@ -346,16 +346,16 @@ function displayPublicMethods(obj) methods (Access = public) function plotDiscountFunction(obj, ind, varargin) - %model.PLOTDISCOUNTFUNCTION(N) plots a discount - % function where H is a handle to a subplot, and IND is the nth - % experiment to plot. - % - % Optional arguments as key/value pairs - % 'axisHandle' - handle to axes - % 'figureHandle' - handle to figure - - parseFigureAndAxisRequested(varargin{:}) - + %model.PLOTDISCOUNTFUNCTION(N) plots a discount + % function where H is a handle to a subplot, and IND is the nth + % experiment to plot. + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + + parseFigureAndAxisRequested(varargin{:}); + p = inputParser; p.FunctionName = mfilename; p.KeepUnmatched = true; @@ -379,19 +379,19 @@ function plotDiscountFunction(obj, ind, varargin) discountFunction.plot(plotOptions); end - - function plotDiscountFunctionGrid(obj, varargin) - %plotDiscountFunctionGrid Plots a montage of discount functions - % model.PLOTDISCOUNTFUNCTIONGRID() plots discount functions for - % all experiment, laid out in a grid. - % - % Optional arguments as key/value pairs - % 'axisHandle' - handle to axes - % 'figureHandle' - handle to figure - - [figureHandle, axisHandle] = parseFigureAndAxisRequested(varargin{:}); - - %figure(7), clf + + function plotDiscountFunctionGrid(obj, varargin) + %plotDiscountFunctionGrid Plots a montage of discount functions + % model.PLOTDISCOUNTFUNCTIONGRID() plots discount functions for + % all experiment, laid out in a grid. + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + + [figureHandle, ~] = parseFigureAndAxisRequested(varargin{:}); + + figure(figureHandle), clf latex_fig(12, 11,11) drawnow @@ -409,29 +409,29 @@ function plotDiscountFunctionGrid(obj, varargin) % Iterate over files, plotting disp('Plotting...') for n = 1:numel(names) - obj.plotDiscountFunction(n, 'axisHandle', subplot_handles(n)) - title(names{n}, 'FontSize',10) - set(gca,'FontSize',10) + obj.plotDiscountFunction(n, 'axisHandle', subplot_handles(n)); + title(names{n}, 'FontSize',10); + set(gca, 'FontSize',10); end drawnow end function plotDiscountFunctionsOverlaid(obj, varargin) - %plotDiscountFunctionsOverlaid Plots all discount functions in one figure - % model.PLOTDISCOUNTFUNCTIONSOVERLAID() plots discount functions for - % all experiment, overlaid in one figure. - % - % Optional arguments as key/value pairs - % 'axisHandle' - handle to axes - % 'figureHandle' - handle to figure - + %plotDiscountFunctionsOverlaid Plots all discount functions in one figure + % model.PLOTDISCOUNTFUNCTIONSOVERLAID() plots discount functions for + % all experiment, overlaid in one figure. + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + latex_fig(12, 8,6) clf drawnow - [figureHandle, axisHandle] = parseFigureAndAxisRequested(varargin{:}); - + [figureHandle, axisHandle] = parseFigureAndAxisRequested(varargin{:}); + % don't want the group level estimate, so not asking for 'all' names = obj.data.getIDnames('experiments'); @@ -450,12 +450,11 @@ function plotDiscountFunctionsOverlaid(obj, varargin) % set(gca,'FontSize',10) drawnow end - + function plotPosteriorAUC(obj, ind, varargin) - %model.plotPosteriorAUC(N) plots a discount - % function where H is a handle to a subplot, and IND is the nth - % experiment to plot. + %model.plotPosteriorAUC(N, varargin) plots the posterior distribution + % of AUC values where N is the Nth experiment. % % Optional arguments as key/value pairs % 'axisHandle' - handle to axes @@ -470,68 +469,74 @@ function plotPosteriorAUC(obj, ind, varargin) set(gca, 'XLim', [0, a(2)]); end - - function plotUnivarateSummary(obj, varargin) - %plotUnivarateSummary Creates a forest plot of univariate summary stats about the model parameters requested - - p = inputParser; - p.FunctionName = mfilename; - p.addParameter('variablesToPlot', 'all', @(x) isstr(x)|iscellstr(x)); - p.parse(varargin{:}); - - if iscellstr(p.Results.variablesToPlot) - % user is providing a cell array of strings, which we will assume corresponds to specific variable names. - variables = p.Results.variablesToPlot; - else - % user privided a string, hopefully one of the acceptable inputs below - switch p.Results.variablesToPlot - case{'all'} - variables = obj.varList.participantLevel; - otherwise - error('unrecognised input provided for ''variablesToPlot''.') - end - end - - obj.coda.plotUnivariateSummaries(variables,... - obj.plotOptions,... - obj.data.getParticipantNames()); - - plot_savename = 'UnivariateSummary'; - obj.pleaseExportFigure(plot_savename) - - end - - function plotPosteriorClusterPlot(obj, varargin) - %plotPosteriorClusterPlot(H) Plots posterior distributions for - % all experiments, in the axis handle H. - % - % Optional arguments as key/value pairs - % 'axisHandle' - handle to axes - % 'figureHandle' - handle to figure - - parseFigureAndAxisRequested(varargin{:}) - - % TODO: put clusterPlot function code here rather than have it as a separate function? - clusterPlot(... - obj.coda,... - obj.data,... - [1 0 0],... - obj.plotOptions,... - obj.varList.discountFunctionParams) - end + + function plotUnivarateSummary(obj, varargin) + %plotUnivarateSummary(varargin) Produces a univariate summary plot + % Creates a forest plot of univariate summary stats + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + + p = inputParser; + p.FunctionName = mfilename; + p.addParameter('variablesToPlot', 'all', @(x) isstr(x)|iscellstr(x)); + p.parse(varargin{:}); + + if iscellstr(p.Results.variablesToPlot) + % user is providing a cell array of strings, which we will assume + % corresponds to specific variable names. + variables = p.Results.variablesToPlot; + else + % user privided a string, hopefully one of the acceptable inputs below + switch p.Results.variablesToPlot + case{'all'} + variables = obj.varList.participantLevel; + otherwise + error('unrecognised input provided for ''variablesToPlot''.') + end + end + + obj.coda.plotUnivariateSummaries(variables,... + obj.plotOptions,... + obj.data.getParticipantNames()); + + plot_savename = 'UnivariateSummary'; + obj.pleaseExportFigure(plot_savename) + + end + + function plotPosteriorClusterPlot(obj, varargin) + %plotPosteriorClusterPlot(H, varargin) Plots posterior distributions + % for all experiments. + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + + parseFigureAndAxisRequested(varargin{:}) + + % TODO: put clusterPlot function code here rather than have it as a separate function? + clusterPlot(... + obj.coda,... + obj.data,... + [1 0 0],... + obj.plotOptions,... + obj.varList.discountFunctionParams) + end end methods (Access = protected) - - function pleaseExportFigure(obj, savename) - if obj.plotOptions.shouldExportPlots - myExport(obj.plotOptions.savePath,... - savename,... - 'suffix', obj.modelFilename,... - 'formats', obj.plotOptions.exportFormats) - end - end + + function pleaseExportFigure(obj, savename) + if obj.plotOptions.shouldExportPlots + myExport(obj.plotOptions.savePath,... + savename,... + 'suffix', obj.modelFilename,... + 'formats', obj.plotOptions.exportFormats) + end + end function plotAllExperimentFigures(obj) % this is a wrapper function to loop over all data files, producing multi-panel figures. This is implemented by the plotExperimentOverviewFigure method, which may be overridden by subclasses if need be. @@ -554,32 +559,32 @@ function plotAllExperimentFigures(obj) close(fh); end end - - - function plotPosteriorErrorParams(obj, ind, varargin) - %plotPosteriorErrorParams(H, N) Plots the posterior distributions over - % response error related parameters, for experiment N. The plot is - % sent to subplot axis H. - % - % Optional arguments as key/value pairs - % 'axisHandle' - handle to axes - % 'figureHandle' - handle to figure - - [figureHandle, axisHandle] = parseFigureAndAxisRequested(varargin{:}); - - responseErrorVariables = obj.getResponseErrorVariables(); - - % TODO: remove duplication of "opts" in mulitple places, but also should perhaps be a single coherent structure in the first place. - opts.pointEstimateType = obj.plotOptions.pointEstimateType; - opts.timeUnits = obj.timeUnits; - opts.dataPlotType = obj.plotOptions.dataPlotType; - - obj.coda.plot_bivariate_distribution(axisHandle,... - responseErrorVariables(1),... - responseErrorVariables(2),... - ind,... - opts) - end + + + function plotPosteriorErrorParams(obj, ind, varargin) + %plotPosteriorErrorParams(H, N) Plots the posterior distributions over + % response error related parameters, for experiment N. The plot is + % sent to subplot axis H. + % + % Optional arguments as key/value pairs + % 'axisHandle' - handle to axes + % 'figureHandle' - handle to figure + + [figureHandle, axisHandle] = parseFigureAndAxisRequested(varargin{:}); + + responseErrorVariables = obj.getResponseErrorVariables(); + + % TODO: remove duplication of "opts" in mulitple places, but also should perhaps be a single coherent structure in the first place. + opts.pointEstimateType = obj.plotOptions.pointEstimateType; + opts.timeUnits = obj.timeUnits; + opts.dataPlotType = obj.plotOptions.dataPlotType; + + obj.coda.plot_bivariate_distribution(axisHandle,... + responseErrorVariables(1),... + responseErrorVariables(2),... + ind,... + opts) + end end @@ -604,42 +609,42 @@ function plotPosteriorErrorParams(obj, ind, varargin) end end - - methods (Hidden = true) - - function disp(obj) - - disp('Discounting model object') - linebreak - fprintf('Model class: %s\n', class(obj)) + + methods (Hidden = true) + + function disp(obj) + + disp('Discounting model object') + linebreak + fprintf('Model class: %s\n', class(obj)) obj.dispModelInfo - - linebreak + + linebreak disp(obj.data) - linebreak + linebreak disp('MCMC options used in parameter estimation:') - disp(obj.mcmcParams) - - linebreak + disp(obj.mcmcParams) + + linebreak disp('Model parameters (Discounting related):') - disp(obj.getDiscountFunctionVariables()) + disp(obj.getDiscountFunctionVariables()) disp('Model parameters (Response error related):') - disp(obj.getResponseErrorVariables()) + disp(obj.getResponseErrorVariables()) - linebreak + linebreak disp('Point estimates of parameters:') - exporter = ResultsExporter(obj.coda,... + exporter = ResultsExporter(obj.coda,... obj.data,... obj.getAUCasTable,... obj.postPred,... obj.varList,... obj.plotOptions); exporter.printToScreen(); - - linebreak - obj.displayPublicMethods() - end - end + + linebreak + obj.displayPublicMethods() + end + end end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/BetaDelta.m b/ddToolbox/models/parametric_models/beta_delta_models/BetaDelta.m new file mode 100644 index 00000000..2fc9cefc --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/BetaDelta.m @@ -0,0 +1,30 @@ +classdef (Abstract) BetaDelta < Parametric + %Hyperbolic1 Hyperbolic1 is a subclass of Model for examining the 1-parameter hyperbolic discounting function. + + methods (Access = public) + + function obj = BetaDelta(data, varargin) + obj = obj@Parametric(data, varargin{:}); + + obj.dfClass = @DF_BetaDelta; + + % Create variables + obj.varList.participantLevel = {'beta','delta','epsilon'}; + obj.varList.monitored = {'beta','delta', 'alpha','epsilon', 'Rpostpred', 'P', 'VA', 'VB'}; + obj.varList.discountFunctionParams(1).name = 'beta'; + obj.varList.discountFunctionParams(1).label = '$\beta$'; + obj.varList.discountFunctionParams(2).name = 'delta'; + obj.varList.discountFunctionParams(2).label = '$delta$'; + + obj.plotOptions.dataPlotType = '2D'; + end + + end + + methods (Hidden = true) + function dispModelInfo(obj) + display('Discount function: V = beta * delta^delay)') + end + end + +end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/DF_BetaDelta.m b/ddToolbox/models/parametric_models/beta_delta_models/DF_BetaDelta.m new file mode 100644 index 00000000..021409fa --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/DF_BetaDelta.m @@ -0,0 +1,33 @@ +classdef DF_BetaDelta < DF1 +%DF_BetaDelta Deterministic object for the Beta Delta discount function. + + properties (Dependent) + + end + + methods (Access = public) + + function obj = DF_BetaDelta(varargin) + obj = obj@DF1(varargin{:}); + end + + end + + + methods (Static, Access = protected) + + function y = function_evaluation(x, theta) + if verLessThan('matlab','9.1') + y = bsxfun(@times, theta.beta, bsxfun(@power, theta.delta, x )); + else + % use new array broadcasting in 2016b + y = theta.beta .* (theta.delta .^ x); + end + % set y=1 when delay=0 + y(x==0) = 1; + end + + end + + +end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/ModelHierarchicalBetaDelta.m b/ddToolbox/models/parametric_models/beta_delta_models/ModelHierarchicalBetaDelta.m new file mode 100644 index 00000000..e4f30cdc --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/ModelHierarchicalBetaDelta.m @@ -0,0 +1,31 @@ +classdef ModelHierarchicalBetaDelta < BetaDelta + %ModelHierarchicalBetaDelta Model class for fully hierarchical estimation of the beta-delta discount function. + % All parameters are estimated hierarchically. + + methods (Access = public, Hidden = true) + + function [obj] = ModelHierarchicalBetaDelta(data, varargin) + obj = obj@BetaDelta(data, varargin{:}); + obj.modelFilename = 'hierarchicalBetaDelta'; + obj = obj.addUnobservedParticipant('GROUP'); + + % MUST CALL THIS METHOD AT THE END OF ALL MODEL-SUBCLASS CONSTRUCTORS + obj = obj.conductInference(); + end + + end + + methods (Access = protected) + + function initialParams = initialiseChainValues(obj, nchains) + % Generate initial values of the root nodes + for chain = 1:nchains + initialParams(chain).groupW = rand; + initialParams(chain).groupALPHAmu = rand*10; + initialParams(chain).groupALPHAsigma = rand*5; + end + end + + end + +end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/ModelMixedBetaDelta.m b/ddToolbox/models/parametric_models/beta_delta_models/ModelMixedBetaDelta.m new file mode 100644 index 00000000..119839e0 --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/ModelMixedBetaDelta.m @@ -0,0 +1,30 @@ +classdef ModelMixedBetaDelta < BetaDelta + %ModelMixedBetaDelta Model for BetaDelta discount function, partial pooling + % SOME parameters are estimated hierarchically. + + methods (Access = public, Hidden = true) + function obj = ModelMixedBetaDelta(data, varargin) + obj = obj@BetaDelta(data, varargin{:}); + obj.modelFilename = 'mixedBetaDelta'; + obj = obj.addUnobservedParticipant('GROUP'); + + % MUST CALL THIS METHOD AT THE END OF ALL MODEL-SUBCLASS CONSTRUCTORS + obj = obj.conductInference(); + end + end + + methods (Access = protected) + + function initialParams = initialiseChainValues(obj, nchains) + % Generate initial values of the root nodes + nExperimentFiles = obj.data.getNExperimentFiles(); + for chain = 1:nchains + initialParams(chain).groupW = rand; + initialParams(chain).groupALPHAmu = rand*100; + initialParams(chain).groupALPHAsigma = rand*100; + end + end + + end + +end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/ModelSeparateBetaDelta.m b/ddToolbox/models/parametric_models/beta_delta_models/ModelSeparateBetaDelta.m new file mode 100644 index 00000000..ffbee788 --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/ModelSeparateBetaDelta.m @@ -0,0 +1,30 @@ +classdef ModelSeparateBetaDelta < BetaDelta + %ModelSeparateBetaDelta Model for BetaDelta discount function, no parameter pooling + % NO parameters are estimated hierarchically. + + methods (Access = public, Hidden = true) + function obj = ModelSeparateBetaDelta(data, varargin) + obj = obj@BetaDelta(data, varargin{:}); + obj.modelFilename = 'separateBetaDelta'; + + % MUST CALL THIS METHOD AT THE END OF ALL MODEL-SUBCLASS CONSTRUCTORS + obj = obj.conductInference(); + end + end + + methods (Access = protected) + + function initialParams = initialiseChainValues(obj, nchains) + % Generate initial values of the root nodes + nExperimentFiles = obj.data.getNExperimentFiles(); + for chain = 1:nchains + initialParams(chain).beta = abs(normrnd(0.84, 2, [nExperimentFiles,1])); + initialParams(chain).delta = normrnd(1, 0.5, [nExperimentFiles,1]); + initialParams(chain).epsilon = 0.1 + rand([nExperimentFiles,1])/10; + initialParams(chain).alpha = abs(normrnd(0.01,10,[nExperimentFiles,1])); + end + end + + end + +end diff --git a/ddToolbox/models/parametric_models/beta_delta_models/hierarchicalBetaDelta.jags b/ddToolbox/models/parametric_models/beta_delta_models/hierarchicalBetaDelta.jags new file mode 100644 index 00000000..a7eea094 --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/hierarchicalBetaDelta.jags @@ -0,0 +1,74 @@ +# RANDOM FACTORS: beta[p], delta[p], epsilon[p], alpha[p] +# HYPER-PRIORS ON: beta[p], delta[p], epsilon[p], alpha[p] + +model{ + +# DISCOUNT FUNCTION PARAMETERS ================================================= +# RANDOM (BY PARTICIPANT) FACTORS; HYPER-PRIORS = NO + +group_delta_MEAN <- 0.9995 +group_delta_PRECISION <- 1/(0.0009^2) + +group_beta_MEAN <- 0.84 +group_beta_PRECISION <- 1/(0.11^2) + +for (p in 1:nRealExperimentFiles){ + + # get some instability if we use the reparameterisation, so we'll avoid this for the moment + #delta_offset[p] ~ dnorm(0,1) + #delta[p] <- group_delta_MEAN + delta_offset[p] * group_delta_PRECISION + delta[p] ~ dnorm(group_delta_MEAN, group_delta_PRECISION) + + # using reparameterisation to avoid funnel of hell + beta_offset[p] ~ dnorm(0,1) + beta[p] <- group_beta_MEAN + beta_offset[p] * group_beta_PRECISION + #beta[p] ~ dnorm(group_beta_MEAN, group_beta_PRECISION) +} + +# MODEL-SPECIFIC: CALCULATION OF PRESENT SUBJECTIVE VALUES +for (t in 1:length(ID)) { + # calculate present subjective value for each reward + VA[t] <- A[t] * beta[ID[t]] * (delta[ID[t]] ^ DA[t]) + VB[t] <- B[t] * beta[ID[t]] * (delta[ID[t]] ^ DB[t]) +} + +# RESPONSE ERROR PARAMETERS ==================================================== +# comparison acuity (alpha) +groupALPHAmu ~ dnorm(0,1/(100^2)) T(0,) ## UPDATED SINCE PAPER +groupALPHAsigma ~ dexp(0.5) + +# error rates (epsilon) +groupW ~ dbeta(1.1, 10.9) # mode for lapse rate +groupKminus2 ~ dgamma(0.5,0.5) # concentration parameter ## UPDATED SINCE PAPER +groupK <- groupKminus2+2 + +epsilon_alpha <- groupW*(groupK-2)+1 +epsilon_beta <- (1-groupW)*(groupK-2)+1 + +for (p in 1:(nRealExperimentFiles+1)){ # +1 for unobserved participant + epsilon[p] ~ dbeta(epsilon_alpha , epsilon_beta ) T(,0.5) + + # using reparameterisation to avoid funnel of hell + alpha_offset[p] ~ dnorm(0,1) T(0,) + alpha[p] <- groupALPHAmu + alpha_offset[p] * groupALPHAsigma +} + + +# MODEL IN-SPECIFIC CODE BELOW... SHOULD NOT CHANGE ACROSS MODELS ============== + +# Psychometric function +for (t in 1:length(ID)) { + P[t] <- epsilon[ID[t]] + (1-2*epsilon[ID[t]]) * phi( (VB[t]-VA[t]) / alpha[ID[t]] ) +} + +# response likelihood +for (t in 1:length(ID)) { + R[t] ~ dbern(P[t]) # likelihood of actual response +} + +# POSTERIOR PREDICTION +for (t in 1:length(ID)) { + Rpostpred[t] ~ dbern(P[t]) +} + +} diff --git a/ddToolbox/models/parametric_models/beta_delta_models/mixedBetaDelta.jags b/ddToolbox/models/parametric_models/beta_delta_models/mixedBetaDelta.jags new file mode 100644 index 00000000..916539f3 --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/mixedBetaDelta.jags @@ -0,0 +1,67 @@ +# RANDOM FACTORS: logk[p], epsilon[p], alpha[p] +# HYPER-PRIORS ON: epsilon[p], alpha[p] + +model{ + +# DISCOUNT FUNCTION PARAMETERS ================================================= +# RANDOM (BY PARTICIPANT) FACTORS; HYPER-PRIORS = NO + +# estimates of mean/sd of beta and delta were taken from Franck, C. T., Koffarnus, M. N., House, L. L., & Bickel, W. K. (2014). Accurate characterization of delay discounting: A multiple model approach using approximate bayesian model selection and a unified discounting measure. Journal of the Experimental Analysis of Behavior, 103(1), 218–233. + +delta_MEAN <- 0.9995 +delta_PRECISION <- 1/(0.0009^2) +beta_MEAN <- 0.84 +beta_PRECISION <- 1/(0.11^2) + +for (p in 1:nRealExperimentFiles){ + delta[p] ~ dnorm(delta_MEAN, delta_PRECISION) + beta[p] ~ dnorm(beta_MEAN, beta_PRECISION) T(0,) +} + + +# RESPONSE ERROR PARAMETERS ==================================================== +# comparison acuity (alpha) +groupALPHAmu ~ dnorm(0,1/(100^2)) T(0,) ## UPDATED SINCE PAPER +groupALPHAsigma ~ dexp(0.5) ## UPDATED SINCE PAPER + +# error rates (epsilon) +groupW ~ dbeta(1.1, 10.9) # mode for lapse rate +groupKminus2 ~ dgamma(0.5,0.5) # concentration parameter ## UPDATED SINCE PAPER +groupK <- groupKminus2+2 + +epsilon_alpha <- groupW*(groupK-2)+1 +epsilon_beta <- (1-groupW)*(groupK-2)+1 + +for (p in 1:(nRealExperimentFiles+1)){ # +1 for unobserved participant + epsilon[p] ~ dbeta(epsilon_alpha , epsilon_beta ) T(,0.5) + + # using reparameterisation to avoid funnel of hell + alpha_offset[p] ~ dnorm(0,1) T(0,) + alpha[p] <- groupALPHAmu + alpha_offset[p] * groupALPHAsigma +} + + +# MODEL-SPECIFIC: CALCULATION OF PRESENT SUBJECTIVE VALUES +for (t in 1:length(ID)) { + VA[t] <- A[t] * beta[ID[t]] * delta[ID[t]] ^ DA[t] + VB[t] <- B[t] * beta[ID[t]] * delta[ID[t]] ^ DB[t] +} + +# MODEL IN-SPECIFIC CODE BELOW... SHOULD NOT CHANGE ACROSS MODELS ============== + +# Psychometric function +for (t in 1:length(ID)) { + P[t] <- epsilon[ID[t]] + (1-2*epsilon[ID[t]]) * phi( (VB[t]-VA[t]) / alpha[ID[t]] ) +} + +# response likelihood +for (t in 1:length(ID)) { + R[t] ~ dbern(P[t]) # likelihood of actual response +} + +# POSTERIOR PREDICTION +for (t in 1:length(ID)) { + Rpostpred[t] ~ dbern(P[t]) +} + +} diff --git a/ddToolbox/models/parametric_models/beta_delta_models/separateBetaDelta.jags b/ddToolbox/models/parametric_models/beta_delta_models/separateBetaDelta.jags new file mode 100644 index 00000000..f88d0b2a --- /dev/null +++ b/ddToolbox/models/parametric_models/beta_delta_models/separateBetaDelta.jags @@ -0,0 +1,54 @@ +# RANDOM FACTORS: beta[p], delta[p], epsilon[p], alpha[p] +# HYPER-PRIORS ON: none + +model{ + +# DISCOUNT FUNCTION PARAMETERS ================================================= +# RANDOM (BY PARTICIPANT) FACTORS; HYPER-PRIORS = NO + +# estimates of mean/sd of beta and delta were taken from Franck, C. T., Koffarnus, M. N., House, L. L., & Bickel, W. K. (2014). Accurate characterization of delay discounting: A multiple model approach using approximate bayesian model selection and a unified discounting measure. Journal of the Experimental Analysis of Behavior, 103(1), 218–233. + +delta_MEAN <- 0.9995 +delta_PRECISION <- 1/(0.0009^2) +beta_MEAN <- 0.84 +beta_PRECISION <- 1/(0.11^2) + +for (p in 1:nRealExperimentFiles){ + delta[p] ~ dnorm(delta_MEAN, delta_PRECISION) + beta[p] ~ dnorm(beta_MEAN, beta_PRECISION) T(0,) +} + +# MODEL-SPECIFIC: CALCULATION OF PRESENT SUBJECTIVE VALUES +for (t in 1:length(ID)) { + VA[t] <- A[t] * beta[ID[t]] * delta[ID[t]] ^ DA[t] + VB[t] <- B[t] * beta[ID[t]] * delta[ID[t]] ^ DB[t] +} + +# RESPONSE ERROR PARAMETERS ==================================================== + +epsilon_alpha <- 1.1 +epsilon_beta <- 10.9 + +for (p in 1:nRealExperimentFiles){ + epsilon[p] ~ dbeta(epsilon_alpha , epsilon_beta ) T(,0.5) + alpha[p] ~ dexp(0.01) +} + +# MODEL IN-SPECIFIC CODE BELOW... SHOULD NOT CHANGE ACROSS MODELS ============== + +# Psychometric function +for (t in 1:length(ID)) { + P[t] <- epsilon[ID[t]] + (1-2*epsilon[ID[t]]) * phi( (VB[t]-VA[t]) / alpha[ID[t]] ) +} + +# response likelihood +for (t in 1:length(ID)) { + R[t] ~ dbern(P[t]) # likelihood of actual response +} + +# POSTERIOR PREDICTION +for (t in 1:length(ID)) { + Rpostpred[t] ~ dbern(P[t]) +} + +} diff --git a/ddToolbox/utils-plot/create_subplots.m b/ddToolbox/utils-plot/create_subplots.m index ca437000..76c5184d 100644 --- a/ddToolbox/utils-plot/create_subplots.m +++ b/ddToolbox/utils-plot/create_subplots.m @@ -2,31 +2,24 @@ %CREATE_SUBPLOTS % This function creates a spatial layout of N subplots with the facetStyle % provided. It returns a set of handles. +% `facetStyle` = {'row' | 'col' | 'square'} assert(isscalar(N)) assert(ischar(facetStyle)) subplot_handles = zeros(N,1); -% CREATE SUBPLOTS - switch(facetStyle) case{'row'} rows = 1; cols = N; - - % arrayfun(@(x) subplot(rows, cols, x), [1:5]) - for n = 1:N - subplot_handles(n) = subplot(rows, cols, n); - end + subplot_handles = create_subplot_handles(rows,cols, N); case{'col'} rows = N; cols = 1; - for n = 1:N - subplot_handles(n) = subplot(rows, cols, n); - end + subplot_handles = create_subplot_handles(rows,cols, N); case{'square'} cols = ceil(sqrt(N)); @@ -34,7 +27,13 @@ while rows*cols < N cols = cols + 1; end - for n = 1:N - subplot_handles(n) = subplot(rows, cols, n); - end + subplot_handles = create_subplot_handles(rows,cols, N); +end + +end + +function subplot_handles = create_subplot_handles(rows,cols, N) +for n = 1:N + subplot_handles(n) = subplot(rows, cols, n); end +end \ No newline at end of file diff --git a/ddToolbox/utils-plot/parseFigureAndAxisRequested.m b/ddToolbox/utils-plot/parseFigureAndAxisRequested.m index 73f62e34..3ce20593 100644 --- a/ddToolbox/utils-plot/parseFigureAndAxisRequested.m +++ b/ddToolbox/utils-plot/parseFigureAndAxisRequested.m @@ -10,7 +10,7 @@ figureHandle = p.Results.figureHandle; axisHandle = p.Results.axisHandle; -figure(figureHandle) -subplot(axisHandle) +figure(figureHandle); +subplot(axisHandle); end diff --git a/demo/untitled.fig b/demo/untitled.fig new file mode 100644 index 00000000..0126ec18 Binary files /dev/null and b/demo/untitled.fig differ diff --git a/tests/getAllParametricModelNames.m b/tests/getAllParametricModelNames.m index 696716ff..0bec215a 100644 --- a/tests/getAllParametricModelNames.m +++ b/tests/getAllParametricModelNames.m @@ -4,6 +4,7 @@ 'ModelHierarchicalME', 'ModelMixedME', 'ModelSeparateME',... 'ModelHierarchicalLogK', 'ModelMixedLogK', 'ModelSeparateLogK',... 'ModelHierarchicalHyperboloid', 'ModelMixedHyperboloid', 'ModelSeparateHyperboloid',... + 'ModelHierarchicalBetaDelta', 'ModelMixedBetaDelta', 'ModelSeparateBetaDelta',... 'ModelHierarchicalExp1', 'ModelMixedExp1', 'ModelSeparateExp1',... 'ModelSeparateExpPower', 'ModelMixedExpPower',... % no hierarchical model 'ModelSeparateExpLog', 'ModelMixedExpLog',... % no hierarchical model diff --git a/tests/test_public_plot_methods.m b/tests/test_public_plot_methods.m new file mode 100644 index 00000000..34f13fa6 --- /dev/null +++ b/tests/test_public_plot_methods.m @@ -0,0 +1,100 @@ +classdef test_public_plot_methods < matlab.unittest.TestCase + + properties + data + model + savePath = tempname(); % matlab auto-generated temp folders + end + + properties (TestParameter) + % % just test with a couple of selected models + % %model = {'ModelHierarchicalME', 'ModelHierarchicalLogK', 'ModelSeparateNonParametric'}; + % model = {'ModelHierarchicalME'}; + % dataset = {'non_parametric'}; + end + + %% CLASS-LEVEL SETUP/TEARDOWN ----------------------------------------- + methods (TestClassSetup) + function setup(testCase) + + model = 'ModelHierarchicalLogK'; + dataset = 'non_parametric'; + + %% Set up data + datapath = ['~/git-local/delay-discounting-analysis/demo/datasets/' dataset]; + % assuming this is running on my maching + addpath('~/git-local/delay-discounting-analysis/ddToolbox') + % only analyse 2 people, for speed of running tests + filesToAnalyse = allFilesInFolder(datapath, 'txt'); + filesToAnalyse = filesToAnalyse(1:2); + testCase.data = Data(datapath, 'files', filesToAnalyse); + + %% Run model + makeModelFunction = str2func(model); + testCase.model = makeModelFunction(testCase.data,... + 'savePath', testCase.savePath,... + 'shouldPlot', 'no',... + 'shouldExportPlots', false,... + 'mcmcParams', struct('nsamples', get_numer_of_samples_for_tests(),... + 'nchains', 2,... + 'nburnin', get_burnin_for_tests() )); + end + end + + + methods(TestClassTeardown) + + function remove_temp_directory(testCase) + rmdir(testCase.savePath,'s') + end + + function delete_stan_related_outputs(testCase) + delete('*.data.R') + delete('*.csv') + end + + function close_figures(testCase) + close all + end + + end + + + %% THE ACTUAL TESTS --------------------------------------------------- + + methods (Test) + + function plotDiscountFunction(testCase) + expt_to_plot = 1; + testCase.model.plotDiscountFunction(expt_to_plot); + close all + end + + function plotDiscountFunctionGrid(testCase) + testCase.model.plotDiscountFunctionGrid(); + close all + end + + function plotDiscountFunctionsOverlaid(testCase) + testCase.model.plotDiscountFunctionsOverlaid(); + close all + end + + function plotPosteriorAUC(testCase) + expt_to_plot = 1; + testCase.model.plotPosteriorAUC(expt_to_plot); + close all + end + + function plotPosteriorClusterPlot(testCase) + testCase.model.plotPosteriorClusterPlot(); + close all + end + + function plotUnivarateSummary(testCase) + testCase.model.plotUnivarateSummary(); + close all + end + end + +end