% Cesanek et al. (2021) Figure 2 Source Code (MATLAB code - rename with .m suffix to run)
% Creates all figure subplots and runs all relevant reported analyses

clearvars
close all
clc

if exist('gramm','file')==0
    error('The gramm plotting library is required to run this code.\nYou can download it from https://github.com/piermorel/gramm).\n');
end

if ~exist('Figure2_SourceData1.txt','file')
    error('This analysis requires the following source data files: Figure2_SourceData1.txt.');
end
F2 = readtable('Figure2_SourceData1.txt');

Subject = F2.Subject';
ExperimentName = F2.ExperimentName';
Block = F2.Block';
ObjID = F2.ObjID';
AF = F2.AF';
RF = round(F2.RF,4)';
RT = F2.RT';
Phase = F2.Phase';
Trial = F2.Trial';

famcolors = [255 124  49;
             255  97  20;
             215  77  18;
             175  56  18;
             137  34  17]/255;
         
%% Fig 2, left column: Anticipatory Force Timelines
[groups, ~, b, p, o, e] = findgroups(Subject,Block,Phase,ObjID,ExperimentName);
AF_m = splitapply(@nanmean,AF,groups);
RF_m = splitapply(@unique,RF,groups);

row_order = [find(strcmpi('Linear+',unique(e))) find(strcmpi('Linear++',unique(e))) find(strcmpi('Uncorr+',unique(e))) ];

close(figure(21));
figh = figure(21);

pixPerInch = 500/6.95;
WidthInches = 4.5;
HeightInches = 3.25;
figh.Position(1) = 0;
figh.Position(2) = 0;
figh.Position(3) = pixPerInch*WidthInches;
figh.Position(4) = pixPerInch*HeightInches;

clear g
g = gramm('x',b, 'y',AF_m, 'color',(RF_m+10*strcmpi(e,'Uncorr+')), 'group',p, 'subset',o~=3);
g.set_layout_options('redraw',false,'margin_height',[.1 .02],'margin_width',[.07 .02],'gap',[0.02 0.02]);
g.no_legend();
g.set_names('x','','y','','color','Weights (kg)','Column','','Row','');
g.axe_property('XLim',[-1 10*ceil(max(b-.5)/10)+1],'YLim',[4 17]);
g.facet_grid(e,[]);
g.set_order_options('row',row_order);
g.set_point_options('base_size',2);
g.set_line_options('base_size',0.5);
g.set_color_options('map',famcolors([1 2 4 5 5 1 4 2],:),'n_color',8,'n_lightness',1);
g.stat_summary('geom',{'area'},'width',1,'type','sem');
g.draw();
g.update('y',RF_m);
g.no_legend();
g.stat_summary('geom',{'line'});
g.set_line_options('styles',{':'},'base_size',0.5);
g.draw();
g.update('y',RF_m,'color',[],'subset',o==3);
g.no_legend();
g.set_point_options('base_size',3);
g.set_line_options('base_size',1);
g.set_color_options('map',famcolors(3,:),'n_color',1,'n_lightness',1);
g.stat_summary('geom',{'line'});
g.set_line_options('styles',{':'});
g.draw();
g.update('y',AF_m);
g.no_legend();
g.set_line_options('styles',{'-'});
g.stat_summary('geom',{'area','point'});
g.draw();

%% Fig 2, middle column: End of Test Phase averages for Linear+, Linear++, and Uncorr+ 
NumEndTestBins = 16;

% Data for the family
TimePeriods = Block>(max(Block)-NumEndTestBins);
subset = TimePeriods & ObjID~=3;
[groups, ~, e, ~, p] = findgroups(Subject(subset), ExperimentName(subset), ObjID(subset), Phase(subset));
AF_fam = splitapply(@nanmean,AF(subset),groups);
Mass_fam = splitapply(@nanmean,RF(subset)/9.81,groups);

% Helper functions
mySlope = @(x,y) subsref( ([ones(length(x),1) x']\y'), struct('type','()','subs',{{2}}));
myIntercept = @(x,y) subsref( ([ones(length(x),1) x']\y'), struct('type','()','subs',{{1}}));
% Fit a linear function for each participant using OLS on Training Objects
[groupsLR, ~, eLR] = findgroups(Subject(subset),ExperimentName(subset));
WindowSlopes = splitapply(mySlope,RF(subset)/9.81,AF(subset),groupsLR);
WindowIntercepts = splitapply(myIntercept,RF(subset)/9.81,AF(subset),groupsLR);
xpred = 0.5:0.01:1.3;
ypred = repmat(WindowIntercepts',1,length(xpred))+repmat(WindowSlopes',1,length(xpred)).*repmat(xpred,length(WindowSlopes),1);

% Data for the outlier (there won't be any for Training phases)
TimePeriods = Block>(max(Block)-NumEndTestBins);
subset2 = TimePeriods & ObjID==3;
[groups2, ~, e2, p2] = findgroups(Subject(subset2),ExperimentName(subset2),Phase(subset2));
WindowOutlierAF = splitapply(@nanmean, AF(subset2), groups2);
WindowOutlierIW = repmat(0.9,size(WindowOutlierAF));

row_order = [find(strcmpi('Linear+',unique(e))) find(strcmpi('Linear++',unique(e))) find(strcmpi('Uncorr+',unique(e)))];

close(figure(22));
figh = figure(22);

pixPerInch = 500/6.95;
WidthInches = 1.2;
HeightInches = 3.25;
figh.Position(1) = 0;
figh.Position(2) = 0;
figh.Position(3) = pixPerInch*WidthInches;
figh.Position(4) = pixPerInch*HeightInches;

clear g
% First we draw the regression line for the family
g = gramm('x',xpred,'y',ypred);
g.set_layout_options('redraw',false,'margin_height',[.1 .02],'margin_width',[.2 .02],'gap',[0.02 0.02]);
g.no_legend();
g.set_names('x','', 'y','', 'color','','Row','','Column','');
g.axe_property('XLim',[.5 1.3],'YLim',[4 17],'XTick',[.6 .75 .9 1.05 1.2],'TickLength',[0.03 0.03]);
g.set_order_options('row',row_order);
g.set_line_options('base_size',1);
g.set_color_options('lightness',0,'chroma',0);
g.facet_grid(eLR,[]);
g.geom_abline('slope',9.81,'intercept',0,'style','k:'); % If Mass on x-axis, slope of unity line = +gravity
g.stat_summary('type','sem');
g.draw();
% Plot family data points over the regression line
% We force the color codes for Random to be higher so we can set a different color ordering
g.update('x',Mass_fam,'y',AF_fam,'color',single(Mass_fam+strcmpi(e,'Uncorr+'))); % Add the color grouping, cast as single to fix floating point issues
g.no_legend();
g.facet_grid(e,p);
g.set_point_options('base_size',6);
g.set_color_options('map',famcolors([1 2 4 5 5 1 4 2],:),'n_color',8,'n_lightness',1);
g.stat_summary('geom',{'point'},'type','sem');
g.draw();
g.update();
g.no_legend();
g.set_line_options('base_size',1.5);
g.set_color_options('map',famcolors([1 2 4 5 5 1 4 2],:)*0.8,'n_color',8,'n_lightness',1);
g.stat_summary('geom',{'errorbar'},'type','sem','width',0);
g.draw();
% Plot the outlier average
g.update('x',WindowOutlierIW,'y',WindowOutlierAF);
g.no_legend();
g.set_point_options('base_size',6);
g.set_color_options('map',famcolors(3,:),'n_color',1,'n_lightness',1);
g.facet_grid(e2,p2);
g.stat_summary('geom',{'point'},'type','sem');
g.draw();
g.update();
g.no_legend();
g.set_line_options('base_size',1.5);
g.set_color_options('map',famcolors(3,:)*0.8,'n_color',1,'n_lightness',1);
g.stat_summary('geom',{'errorbar'},'type','sem','width',0);
g.draw();

% We want to draw lines for the correct outlier weight
RFs_byExp = [1.2 1.5 1.2]*9.81; % Y-axis units (Force in Newtons)
exps = unique(e);
exps = exps(row_order);
phases = unique(p);

outlierX = .9;
lineWidth = 0.075;
xData = repmat(outlierX+[-1 1]*lineWidth,1,length(exps));
yData = repelem(RFs_byExp,2);
eData = repelem(exps,2);
pData = repelem(phases,6); 
g.update('x',xData,'y',yData);
g.no_legend();
g.facet_grid(eData,pData);
g.set_color_options('map',famcolors(3,:),'n_color',1,'n_lightness',1);
g.set_line_options('styles',{':'},'base_size',1.5);
g.geom_line();
g.draw();

%% Fig 2, right: Generalization
excludeTrials = [3 4 5]; % index of trials (in trial cycle) to exclude
excludeTrials = mod(excludeTrials,5);

% End of training phase
subset_nonGen = Phase==2 & Block>26;
groups = findgroups(Subject(subset_nonGen),ExperimentName(subset_nonGen));
AF_nonGen = splitapply(@nanmean, AF(subset_nonGen), groups);

% Early test phase
subset_genEarly = ObjID~=3 & ismember(Block,31:34) & ~ismember(mod(Trial,5),excludeTrials);
groups2 = findgroups(Subject(subset_genEarly),ExperimentName(subset_genEarly));
AF_genEarly = splitapply(@nanmean, AF(subset_genEarly), groups2);

% Late test phase
subset_genLate = ObjID~=3 & ismember(Block,55:70) & ~ismember(mod(Trial,5),excludeTrials);
[groups2, s2, e2] = findgroups(Subject(subset_genLate),ExperimentName(subset_genLate));
AF_genLate = splitapply(@nanmean, AF(subset_genLate), groups2);

n2 = repelem(0.3*9.81,length(e2));
n2(strcmpi(e2,'Linear++')) = 0.6*9.81;
yl = [-0.05 0.7];

late_subset = strcmpi(e2,'Uncorr+') | strcmpi(e2,'Linear+') | strcmpi(e2,'Linear++');
GenByExp = [(AF_genEarly-AF_nonGen)./n2 (AF_genLate-AF_nonGen)./n2];
p3 = repelem({'Early' 'End'},length(s2));

close(figure(23));
figh=figure(23);

% Move after deleting labels so the resize function goes
pixPerInch = 500/6.95;
WidthInches = 0.8;
HeightInches = 3.25;
figh.Position(1) = 0;
figh.Position(2) = 0;
figh.Position(3) = pixPerInch*WidthInches;
figh.Position(4) = pixPerInch*HeightInches;

g = gramm('x',p3,'y',GenByExp,'color',p3);
g.set_layout_options('position',[0 1 1 1],'redraw',false,'margin_height',[.1 .02],'margin_width',[.2 .02],'gap',[0.02 0.02]);
g.axe_property('YLim',yl,'TickLength',[0.05 0.05]);
g.no_legend();
g.geom_abline('slope',0,'intercept',0,'style','k-');
g.set_color_options('map',[0 0 0; 0.4 0.4 0.4],'n_color',2,'n_lightness',1);
g.facet_grid([e2 e2(late_subset)],[]);
g.set_names('x','','y','','color','','row','');
g.stat_summary('geom',{'bar'},'type','sem','width',1.3);
g.stat_summary('geom',{'errorbar'},'type','sem','width',0);
g.draw();

%% Fig 2, bottom: RT Timelines for Linear+, Linear++, and Uncorr+ 
subset = ObjID~=3;
[groups, ~, b, p, e] = findgroups(Subject(subset),Block(subset),Phase(subset),ExperimentName(subset));
RT_m = splitapply(@nanmean,RT(subset),groups);

% Combine the Linear groups
e(strcmp(e,'Linear++') | strcmp(e,'Linear+')) = {'Linear'};

close(figure(24));
figh = figure(24);
clf
pixPerInch = 500/6.95;
WidthInches = 4.5;
HeightInches = 1;
figh.Position(1) = 0;
figh.Position(2) = 0;
figh.Position(3) = pixPerInch*WidthInches;
figh.Position(4) = pixPerInch*HeightInches;

clear g
g = gramm('x',b, 'y',RT_m, 'color', e, 'group',p);
g.set_layout_options('redraw',false,'margin_height',[.2 .05],'margin_width',[.07 .02],'gap',[0.05 0.05]);
g.no_legend();
g.set_names('x','','y','','row',' ');
g.axe_property('XLim',[-1 10*ceil(max(b-.5)/10)+1],'YLim',[0 4]);
g.set_point_options('base_size',2);
g.set_color_options('map',[228 26 28; 55 126 184]/255,'n_color',2,'n_lightness',1);
g.set_line_options('base_size',0.5);
g.stat_summary('geom',{'line','area'},'width',1,'type','sem');
g.draw();

%% Analysis

% Map binary test outcomes onto appropriate text for console output
outcomes = {'not significant', 'significant'};

% Easier way to remember how the tails on paired t-tests work (see 'comparison' variable below) 
tails = {'both','left','right'};
comps = {'different','less','greater'};

%% Correlation of anticipatory force with object weight, end of training
NumEndTrainingBins = 8;
fprintf('\n\n** Correlation of Anticipatory Force and Object Weight @ End of Training (%i trial cycles) (Fig. 2a, c, e) **\n',NumEndTrainingBins);
for ee = {'Linear+','Linear++','Uncorr+'}
    expName = ee{1};
    subset = strcmpi(ExperimentName,expName) & Phase==2 & Block>30-NumEndTrainingBins;
    [groups, ~] = findgroups(Subject(subset));
    EarlyCorrsFisherZ = splitapply(@(x,y)atanh(corr(x,y,'rows','pairwise')),RF(subset)',AF(subset)',groups');

    % Means and SEs
    LinearEarlyMeanCorr = tanh(mean(EarlyCorrsFisherZ));
    LinearEarlyMeanCorr_UB = tanh(mean(EarlyCorrsFisherZ)+1.96*std(EarlyCorrsFisherZ)/sqrt(length(EarlyCorrsFisherZ)));
    LinearEarlyMeanCorr_LB = tanh(mean(EarlyCorrsFisherZ)-1.96*std(EarlyCorrsFisherZ)/sqrt(length(EarlyCorrsFisherZ)));
    
    fprintf('\n%s: r = %.2f, 95%% CI = [%.2f, %.2f]\n', ...
        expName, ...
        LinearEarlyMeanCorr,LinearEarlyMeanCorr_LB,LinearEarlyMeanCorr_UB);
end

%% Average anticipatory force on first test trial
subset = Block==31 & ObjID==3;
[groups, ~, e] = findgroups(Subject(subset),ExperimentName(subset));
firstTestTrialOutlierAF = splitapply(@nanmean, AF(subset), groups);
[groups2, e2] = findgroups(e);
firstTestTrialMeans = splitapply(@nanmean, firstTestTrialOutlierAF, groups2);
firstTestTrialCIs = splitapply(@(x)(1.96*nanstd(x)/sqrt(sum(~isnan(x)))),firstTestTrialOutlierAF,groups2);
firstTestTrialLBs = firstTestTrialMeans-firstTestTrialCIs;
firstTestTrialUBs = firstTestTrialMeans+firstTestTrialCIs;

fprintf('\n\n** Average Anticipatory Force on First Test Trial (Fig. 2a, c, e) **\n');
for ee = {'Linear+','Linear++','Uncorr+'}
    expName = ee{1};
    eii=find(strcmpi(e2,expName));
    fprintf('\n%s: %.2f N, 95%% CI = [%.2f, %.2f]\n', ...
        expName, ...
        firstTestTrialMeans(eii),firstTestTrialLBs(eii),firstTestTrialUBs(eii));
end

%% Test of Outlier Learning: AF minus Family-predicted weight
window = 'End of Test'; NumLateTestBins = 16;
% Helper functions
mySlope = @(x,y) subsref( ([ones(length(x(~isnan(y))),1) x(~isnan(y))']\y(~isnan(y))'), struct('type','()','subs',{{2}}));
myIntercept = @(x,y) subsref( ([ones(length(x(~isnan(y))),1) x(~isnan(y))']\y(~isnan(y))'), struct('type','()','subs',{{1}}));

fprintf('\n\n** Test for Learning of Test Object @ %s (%i trial cycles) (Fig. 2a-f) **\n',window,NumLateTestBins);
exps = {'Linear+','Linear++','Uncorr+'};
insideWindow = Block>(max(Block)-NumLateTestBins) & Block<=max(Block);

% Fit a linear function for each participant using OLS on Training Objects
subset = ismember(ExperimentName,exps) & insideWindow & ObjID~=3;
groups = findgroups(Subject(subset),ExperimentName(subset));
WindowSlopes = splitapply(mySlope,RF(subset),AF(subset),groups);
WindowIntercepts = splitapply(myIntercept,RF(subset),AF(subset),groups);

% Get the Anticipatory Force for the Test Object
subset2 = ismember(ExperimentName,exps) & insideWindow & ObjID==3;
[groups2, s2, e2] = findgroups(Subject(subset2),ExperimentName(subset2));
WindowOutlierAF = splitapply(@nanmean, AF(subset2), groups2);

familyPredicted = WindowSlopes*0.9*9.81+WindowIntercepts;
plusNull = 1.2*9.81;
plusPlusNull = 1.5*9.81;

a1_sub = s2(strcmpi(e2,'Linear+'));
a1=WindowOutlierAF(strcmpi(e2,'Linear+'));
familyPredicted_a1 = familyPredicted(strcmpi(e2,'Linear+'));
[~,~,CI,~] = ttest(a1);
[~,~,CIpred,~] = ttest(familyPredicted_a1);
fprintf('\nLinear+ anticipatory force: %.2f N, 95%% CI = [%.2f, %.2f]',mean(CI),CI);
fprintf('\nLinear+ family-predicted weight: %.2f N, 95%% CI = [%.2f, %.2f]\n',mean(CIpred),CIpred);
comparison = find(strcmpi(comps,'greater'));
[H,P,~,STATS] = ttest(a1,familyPredicted_a1,'tail',tails{comparison});
fprintf('AF %sly %s than family-predicted weight (t(%i) = %.2f, p = %.2g)\n',outcomes{H+1},comps{comparison}, STATS.df, STATS.tstat, P);

a2_sub = s2(strcmpi(e2,'Linear++'));
a2=WindowOutlierAF(strcmpi(e2,'Linear++'));
familyPredicted_a2 = familyPredicted(strcmpi(e2,'Linear++'));
[~,~,CI,~] = ttest(a2);
[~,~,CIpred,~] = ttest(familyPredicted_a2);
fprintf('\nLinear++ anticipatory force: %.2f N, 95%% CI = [%.2f, %.2f]',mean(CI),CI);
fprintf('\nLinear++ family-predicted weight: %.2f N, 95%% CI = [%.2f, %.2f]\n',mean(CIpred),CIpred);
comparison = find(strcmpi(comps,'greater'));
[H,P,~,STATS] = ttest(a2,familyPredicted_a2,'tail',tails{comparison});
fprintf('AF %sly %s than family-predicted weight (t(%i) = %.2f, p = %.2g)\n',outcomes{H+1},comps{comparison}, STATS.df, STATS.tstat, P);

b1_sub = s2(strcmpi(e2,'Uncorr+'));
b1=WindowOutlierAF(strcmpi(e2,'Uncorr+'));
familyPredicted_b1 = familyPredicted(strcmpi(e2,'Uncorr+'));
[~,~,CI,~] = ttest(b1);
[~,~,CIpred,~] = ttest(familyPredicted_b1);
fprintf('\nUncorr+ anticipatory force: %.2f N, 95%% CI = [%.2f, %.2f]',mean(CI),CI);
fprintf('\nUncorr+ family-predicted weight: %.2f N, 95%% CI = [%.2f, %.2f]\n',mean(CIpred),CIpred);
comparison = find(strcmpi(comps,'greater'));
[H,P,~,STATS] = ttest(b1,familyPredicted_b1,'tail',tails{comparison});
fprintf('AF %sly %s than family-predicted weight (t(%i) = %.2f, p = %.2g)\n',outcomes{H+1},comps{comparison}, STATS.df, STATS.tstat, P);

%% Generalization analysis
% End of training phase
subset_nonGen = Phase==2 & Block>26;
groups = findgroups(Subject(subset_nonGen),ExperimentName(subset_nonGen));
AF_nonGen = splitapply(@nanmean, AF(subset_nonGen), groups);

% Early test phase
subset_genEarly = ObjID~=3 & ismember(Block,31:34) & mod(Trial,5)==2;
groups2 = findgroups(Subject(subset_genEarly),ExperimentName(subset_genEarly));
AF_genEarly = splitapply(@nanmean, AF(subset_genEarly), groups2);

% Late test phase
subset_genLate = ObjID~=3 & ismember(Block,55:70) & mod(Trial,5)==2;
[groups2, s2, e2] = findgroups(Subject(subset_genLate),ExperimentName(subset_genLate));
AF_genLate = splitapply(@nanmean, AF(subset_genLate), groups2);

% Normalize and concatenate early and end
n2 = repelem(0.3*9.81,length(e2));
n2(strcmpi(e2,'Linear++')) = 0.6*9.81;
GenByExp = [(AF_genEarly-AF_nonGen)./n2 (AF_genLate-AF_nonGen)./n2];
p3 = repelem({'Early' 'End'},length(s2));

earlyGen_a1 = GenByExp(strcmpi(p3,'Early') & strcmpi([e2 e2],'Linear+'));
lateGen_a1 = GenByExp(strcmpi(p3,'End') & strcmpi([e2 e2],'Linear+'));
earlyGen_a2 = GenByExp(strcmpi(p3,'Early') & strcmpi([e2 e2],'Linear++'));
lateGen_a2 = GenByExp(strcmpi(p3,'End') & strcmpi([e2 e2],'Linear++'));
earlyGen_b1 = GenByExp(strcmpi(p3,'Early') & strcmpi([e2 e2],'Uncorr+'));
lateGen_b1 = GenByExp(strcmpi(p3,'End') & strcmpi([e2 e2],'Uncorr+'));

% Tests
comparison = find(strcmpi(comps,'different'));
fprintf('\n\n** Generalization Analysis (Fig. 2g-i) ****\n');
[h,p,ci,stats] = ttest(earlyGen_a1,0,'tail',tails{comparison});
fprintf('\nLinear+: Early generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,ci,stats] = ttest(lateGen_a1,0,'tail',tails{comparison});
fprintf('Linear+: Late generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,~,stats] = ttest(earlyGen_a1, lateGen_a1,'tail',tails{comparison});
fprintf('Linear+: Late generalization %sly %s than early generalization (t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, stats.df, stats.tstat, p);

[h,p,ci,stats] = ttest(earlyGen_a2,0,'tail',tails{comparison});
fprintf('\nLinear++: Early generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,ci,stats] = ttest(lateGen_a2,0,'tail',tails{comparison});
fprintf('Linear++: Late generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,~,stats] = ttest(earlyGen_a2, lateGen_a2,'tail',tails{comparison});
fprintf('Linear++: Late generalization %sly %s than early generalization (t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, stats.df, stats.tstat, p);

[h,p,ci,stats] = ttest(earlyGen_b1,0,'tail',tails{comparison});
fprintf('\nUncorr+: Early generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,ci,stats] = ttest(lateGen_b1,0,'tail',tails{comparison});
fprintf('Uncorr+: Late generalization %sly %s than zero (%.2f, t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, mean(ci), stats.df, stats.tstat, p);
[h,p,~,stats] = ttest(earlyGen_b1, lateGen_b1,'tail',tails{comparison});
fprintf('Uncorr+: Late generalization %sly %s than early generalization (t(%i) = %.2f, p = %.2g)\n',outcomes{h+1},comps{comparison}, stats.df, stats.tstat, p);


%% rmANOVA for log(RT)
% Get relevant data
subset = ObjID~=3;
[groups, s, e, p] = findgroups(Subject(subset),ExperimentName(subset),Phase(subset));
dv = splitapply(@nanmean,log(RT(subset)),groups);
e(strcmpi(e,'Linear++')) = {'Linear+'}; % combine Linear+ and Linear++ into one group

Nlev_f = 4; % number of levels of each within factor
Nsubj = length(unique(s)); % number of subjects
% Wrangle DV into a matrix of dimension (Nsubj x Nwithincells) 
%  where Nwithincells = Nlev_f1 * Nlev_f2 ... * Nlev_fn
D = reshape(dv,[Nlev_f Nsubj])';
T = table;
for ci = 1:prod(Nlev_f)
    T = addvars(T,D(:,ci));
end
% Within factor levels are indicated in a separate design matrix
f1 = (1:Nlev_f(1))';

within = table;
within = addvars(within, categorical(f1,'Ordinal',true), 'NewVariableNames', 'Phase');

% Between factor levels are indicated in columns of T
bfnames = {'Group'};
% Apply the same reshaping that we used for the dv to get subject-specific labels
bf1 = reshape(e,[Nlev_f Nsubj])';
% Do this for all between factors...
bf = bf1(:,1); % Then just take one column of each and collect them in a matrix
for bi = 1:size(bf,2)
    T = addvars(T, categorical(bf(:,bi)), 'NewVariableNames','Group');
end

mdl = fitrm(T,'Var1-Var4~Group','WithinDesign',within);

test = ranova(mdl,'WithinModel','Phase');
fprintf('\n\n** 2-way (Group X Phase) ANOVA on log-transformed response times (Fig. 2j) **\n\n');
disp(test(:,1:6))

% t-tests in each phase
[h1,p1,ci1,stats1] = ttest2(T.Var1(T.Group==categorical({'Linear+'})),T.Var1(T.Group==categorical({'Uncorr+'})),'tail','left');
[h2,p2,ci2,stats2] = ttest2(T.Var2(T.Group==categorical({'Linear+'})),T.Var2(T.Group==categorical({'Uncorr+'})),'tail','left');
[h3,p3,ci3,stats3] = ttest2(T.Var3(T.Group==categorical({'Linear+'})),T.Var3(T.Group==categorical({'Uncorr+'})),'tail','left');
[h4,p4,ci4,stats4] = ttest2(T.Var4(T.Group==categorical({'Linear+'})),T.Var4(T.Group==categorical({'Uncorr+'})),'tail','left');
fprintf('\n\n log(RT) t-tests in each phase, Uncorr+ slower than combined Linear+ and Linear++?');
fprintf('\n Phase 1: t(%.2g)=%.2f, p=%.2g',stats1.df,stats1.tstat,p1);
fprintf('\n Phase 2: t(%.2g)=%.2f, p=%.2g',stats2.df,stats2.tstat,p2);
fprintf('\n Phase 3: t(%.2g)=%.2f, p=%.2g',stats3.df,stats3.tstat,p3);
fprintf('\n Phase 4: t(%.2g)=%.2f, p=%.2g\n\n',stats4.df,stats4.tstat,p4);
