Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hierarchical optimization in D2D #147

Open
wants to merge 24 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
dc82ca3
Implement basic hierarchical optimization
gdudziuk Jan 30, 2020
ae655ee
Minor cleanup in hierarchical optimization
gdudziuk Jan 30, 2020
ec877ff
Use argument sensi in arCalcHierarchical
gdudziuk Jan 31, 2020
539a55c
Autocompute hierarchical scales with fine simu
gdudziuk Jan 31, 2020
ff37968
Support hierarchical optimization with Matlab <R2019a
gdudziuk Jan 31, 2020
22b64d0
Fix bug in arInitHierarchical
gdudziuk Feb 4, 2020
3211892
Add necessary assertion checks in arCalcHierarchical
gdudziuk Feb 4, 2020
2c16a0e
Improve assertion messages in arCalcHierarchical
gdudziuk Feb 4, 2020
c3bbe5a
Remove some settings changes from arInitHierarchical
gdudziuk Feb 4, 2020
35b4c30
Improve error fitting check in arCalcHierarchical
gdudziuk Feb 5, 2020
f619f6a
Update the help text of arInitHierarchical
gdudziuk Feb 7, 2020
70ef579
Test feasibility of hierarchical optimization
gdudziuk Feb 9, 2020
b0cc36b
Add a function for disabling hierarchical optimization
gdudziuk Feb 9, 2020
1b7fb04
Fix a bug in assertions in arCalcHierarchical
gdudziuk Feb 11, 2020
03c9f23
Remove some redundant lines in arCalcHierarchical
gdudziuk Mar 2, 2020
a35790f
Fix a bug in arInitHierarchical
gdudziuk Mar 2, 2020
0850ee7
Fix a bug in arCalcHierarchical
gdudziuk Mar 3, 2020
7feb387
Fix a bug in arCalcHierarchical - support data repetitions
gdudziuk Mar 4, 2020
7217136
Minor improvement in arInitHierarchical
gdudziuk Mar 4, 2020
050f114
Switch off unnecessary warnings in arInitHierarchical
gdudziuk Mar 4, 2020
60c4617
Loosen the assertion tests in arCalcHierarchical
gdudziuk Mar 4, 2020
aa40219
Correct an inadequate error message in arCalcHierarchical
gdudziuk Mar 4, 2020
855c543
More informative final message of arInitHierarchical
gdudziuk Mar 5, 2020
14319ec
Fix a bug in arCalcHierarchical - support missing observations
gdudziuk Mar 5, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
163 changes: 163 additions & 0 deletions arFramework3/Subfunctions/arCalcHierarchical.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
function arCalcHierarchical(sensi)
%arCalcHierarchical([sensi]) Do the following things:
% - For each data series with hierarchical scale parameter, assign
% respective x or z values from the xExpSimu or zExpSimu field in the
% corresponding condition structure.
% - Compute hierarchical scale parameters, overriding any custom values
% concerning these parameters.
% - For each data series with hierarchical scale parameter, use the newly
% computed scale value to recompute values of observables stored in the
% yExpSimu field, overriding the results of arSimu.
% - If sensi is true, do analogous things for the scale gradients and
% sensitivities of observables. Namely, compute scale gradients based
% on sxExpSimu or szExpSimu fields in the condition structures and
% subsequently recompute syExpSimu fields in the data structures.
%
% sensi logical indicating whether to calculate scale gradients [true]
% and respectively update sensitivities of observables

if ~exist('sensi','var') || isempty(sensi)
sensi = 1;
end

global ar

% Check settings which are required to carry out hierarchical optimization
assert(isfield(ar.config,'useHierarchical') && ar.config.useHierarchical, ...
['Hierarchical optimization is not enabled. You have not called arInitHierarchical ', ...
'or there are no scale parameters suitable for hierarchical optimization in your observables.'])
for is = 1:length(ar.scales)
assert(~ar.qLog10(ar.scales(is).pLink),sprintf('Please disable log10 scale for parameter %s when using hierarchical optimization.',ar.pLabel{ar.scales(is).pLink}))
assert(ar.qFit(ar.scales(is).pLink)~=1,sprintf('Please disable fitting for parameter %s when using hierarchical optimization.',ar.pLabel{ar.scales(is).pLink}))
end

% Check for features which are yet not supported along with hierarchical optimization (but possibly may be supported in the future)
if ar.config.fiterrors==0
haveNanExpStd = false;
for im = 1:length(ar.model)
for id = 1:length(ar.model(im).data)
for iy = 1:length(ar.model(im).data(id).fy)
if ar.model(im).data(id).useHierarchical(iy)
ystd = ar.model(im).data(id).yExpStd(:,iy);
yExp = ar.model(im).data(id).yExp(:,iy);
if any(isnan(ystd(~isnan(yExp))))
haveNanExpStd = true;
end
end
end
end
end
end
useErrorModel = (ar.config.fiterrors==1 || (ar.config.fiterrors==0 && haveNanExpStd));
assert(~useErrorModel,'Hierarchical optimization in combination with model-based errors is not supported yet. Please use experimental errors for the observables which have parameters for hierarchical optimization.')
useCustomResidual = isfield(ar.config,'user_residual_fun') && ~isempty(ar.config.user_residual_fun);
assert(~useCustomResidual,'Hierarchical optimization in combination with custom residuals is not supported yet.')
assert(~isfield(ar,'conditionconstraints'),'Hierarchical optimization in combination with condition constraints is not supported yet.')
assert(sum(ar.type~=0)==0,'Hierarchical optimization in combination with priors other than flat box is not supported yet.')
assert(~isfield(ar,'random'),'Hierarchical optimization in combination with random effects is not supported yet.')
for im = 1:length(ar.model)
for ic = 1:length(ar.model(im).condition)
qssEnabled = isfield(ar.model(im).condition(ic), 'qSteadyState') && sum(ar.model(im).condition(ic).qSteadyState==1)>0;
assert(~qssEnabled,'Hierarchical optimization in combination with qSteadyState is not supported yet.')
end
end
for im = 1:length(ar.model)
for id = 1:length(ar.model(im).data)
for iy = 1:length(ar.model(im).data(id).fy)
if ar.model(im).data(id).useHierarchical(iy)
assert(ar.model(im).data(id).logfitting(iy)==0,'Hierarchical optimization in combination with fitting of observables in log10 scale is not supported yet. Please switch off fitting in log10 scale for the observables which have parameters for hierarchical optimization.')
end
end
if any(ar.model(im).data(id).useHierarchical)
useDetectionLimit = isfield( ar.model(im).data(id), 'resfunction' ) && isstruct( ar.model(im).data(id).resfunction ) && ar.model(im).data(id).resfunction.active;
assert(~useDetectionLimit,'Hierarchical optimization in combination with detection limits is not supported yet. Please switch off detection limits for the data sets which have parameters for hierarchical optimization.')
end
end
end

% For each data, extract corresponding xExpSimu/zExpSimu values and their sensitivities
for im = 1:length(ar.model)
for id = 1:length(ar.model(im).data)

% This check is only for performance - continue early if no reason to stuck in this iteration
if ~any(ar.model(im).data(id).useHierarchical)
continue
end

ip = ar.model(im).data(id).pCondLink;
ic = ar.model(im).data(id).cLink;
td = ar.model(im).data(id).tExp;
tc = ar.model(im).condition(ic).tExp;

% D2D initializes yExpSimu and syExpSimu for missing observations to NaNs
% and zeros, respectively. We do the same for xzExpSimu and sxzExpSimu.
ar.model(im).data(id).xzExpSimu = nan(length(td),length(ar.model(im).data(id).fy));
if sensi
ar.model(im).data(id).sxzExpSimu = zeros(length(td),length(ar.model(im).data(id).fy),length(ar.model(im).data(id).p));
end
for iy = 1:length(ar.model(im).data(id).fy)
if ar.model(im).data(id).useHierarchical(iy)
inds = ~isnan(ar.model(im).data(id).yExp(:,iy)); % Indexes of those data entries for which there are observations
ixz = ar.model(im).data(id).xzLink(iy);
xzType = ar.model(im).data(id).xzType{iy};
xzExpSimu = ar.model(im).condition(ic).(sprintf('%sExpSimu',xzType))(:,ixz); % TODO: Test the performance impact of sprintf
ar.model(im).data(id).xzExpSimu(inds,iy) = interp1(tc,xzExpSimu,td(inds),'nearest'); % NOTE: td(inds) should always be a subset of tc, hence the 'nearest' option
if sensi
sxzExpSimu = ar.model(im).condition(ic).(sprintf('s%sExpSimu',xzType))(:,ixz,:);
ar.model(im).data(id).sxzExpSimu(inds,iy,ip) = interp1(tc,sxzExpSimu,td(inds),'nearest');
end
end
end

end
end

% Compute each scale parameter and their gradients
for is = 1:length(ar.scales)
num = 0;
den = 0;
numGrad = 0;
denGrad = 0;
for il = 1:length(ar.scales(is).links)

im = ar.scales(is).links(il).m;
id = ar.scales(is).links(il).d;
iy = ar.scales(is).links(il).fy;
ystd = ar.model(im).data(id).yExpStd(:,iy);
yExp = ar.model(im).data(id).yExp(:,iy);
xzExpSimu = ar.model(im).data(id).xzExpSimu(:,iy);
num = num + sum(yExp.*xzExpSimu./(ystd.^2),'omitnan');
den = den + sum(xzExpSimu.^2./(ystd.^2),'omitnan');
if sensi
sxzExpSimu = squeeze(ar.model(im).data(id).sxzExpSimu(:,iy,:));
numGrad = numGrad + sum(yExp.*sxzExpSimu./(ystd.^2),1,'omitnan');
denGrad = denGrad + 2*sum(xzExpSimu.*sxzExpSimu./(ystd.^2),1,'omitnan');
end
end
assert(den>0,sprintf('The solution of your model corresponding to the scale %s is zero at all measurement times. Hierarchical optimization is not feasible.',ar.scales(is).scaleLabel))
ar.scales(is).scale = num/den;
if sensi
ar.scales(is).scaleGrad = (numGrad*den - denGrad*num)/(den^2);
end
ar.p(ar.scales(is).pLink) = ar.scales(is).scale;
end

% Update observables and their sensitivities
for is = 1:length(ar.scales)
scale = ar.scales(is).scale;
if sensi
scaleGrad = ar.scales(is).scaleGrad;
end
for il = 1:length(ar.scales(is).links)
im = ar.scales(is).links(il).m;
id = ar.scales(is).links(il).d;
iy = ar.scales(is).links(il).fy;
xzExpSimu = ar.model(im).data(id).xzExpSimu(:,iy);
ar.model(im).data(id).yExpSimu(:,iy) = scale.*xzExpSimu;
if sensi
sxzExpSimu = squeeze(ar.model(im).data(id).sxzExpSimu(:,iy,:));
ar.model(im).data(id).syExpSimu(:,iy,:) = scale.*sxzExpSimu + scaleGrad.*xzExpSimu;
end
end
end
% NOTE: Alternatively, we could iterate over the data instead of over the scales in the latter loop.
4 changes: 4 additions & 0 deletions arFramework3/Subfunctions/arCalcRes.m
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,10 @@ function arCalcRes(sensi)

fiterrors_correction_factor = ar.config.fiterrors_correction;

if isfield(ar.config,'useHierarchical') && ar.config.useHierarchical
arCalcHierarchical(sensi)
end

for m=1:length(ar.model)
if ( isfield( ar.model(m), 'data' ) )
for d=1:length(ar.model(m).data)
Expand Down
40 changes: 40 additions & 0 deletions arFramework3/arDisableHierarchical.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
function arDisableHierarchical()
%arDisableHierarchical Restore the original user settings overriden by
% arInitHierarchical and set ar.config.useHierarchical to false.
%
% One may say that arInitHierarchical initializes the "hierarchical mode"
% of optimization and this function switches back to the "normal mode".
%
% This function in particular attempts to restore the lower and upper
% bounds for the scale parameters subjected to hierarchical optimization.
% If the scales computed in the run of hierarchical optimization do not
% fit the original bounds, then these bounds are adjusted to cover the
% computed value.
%
% See also ARINITHIERARCHICAL

global ar

assert(isfield(ar.config,'useHierarchical') && ar.config.useHierarchical, ...
['Hierarchical optimization is not enabled. You have not called arInitHierarchical ', ...
'or there are no scale parameters suitable for hierarchical optimization in your observables.'])

for is = 1:length(ar.scales)
pLink = ar.scales(is).pLink;
ar.qFit(pLink) = ar.scales(is).backup_user.qFit;
ar.qLog10(pLink) = ar.scales(is).backup_user.qLog10;
ar.lb(pLink) = ar.scales(is).backup_user.lb;
ar.ub(pLink) = ar.scales(is).backup_user.ub;
if isnan(ar.p(pLink)) % This happens when this function is called immediately after arInitHierarchical
ar.p(pLink) = ar.scales(is).backup_user.p;
elseif ar.qLog10(pLink) % Hierarchical mode always uses linear scale for the scale parameters, so when disabling this mode we apply the log10 scale if relevant
ar.p(pLink) = log10(ar.p(pLink));
end
if ar.lb(pLink) > ar.p(pLink)
ar.lb(pLink) = ar.lb(pLink) - 0.2*(ar.ub(pLink) - ar.lb(pLink));
elseif ar.ub(pLink) < ar.p(pLink)
ar.ub(pLink) = ar.ub(pLink) + 0.2*(ar.ub(pLink) - ar.lb(pLink));
end
end

ar.config.useHierarchical = false;
Loading