function [MLE] = HoopStats_HardCluster(Responses,Targets)
% "HoopStats_HardCluster"
% R02.00
% By Sam Berens (sam.berens@york.ac.uk).
%
% Estimates a mixture model for circularly distributed accuracy data using
% a hard clustering method. At present, this implementation is only setup
% to fit one target distribution and one uniform component.
%
% [MLE] = HoopStats_HardCluster(Responses,Targets)
%
% INPUT:
%   - "Responses" : an nx1 numeric denoting n angles (in radians) of
%                   circular responses;
%   - "Targets" : an nxt numeric denoting n angles (in radians) of t
%                 targets distributions. If sum(isnan(Targets),2) ==
%                 size(Targets,1), then response to target mappings are
%                 assumed to be mutually exclusive;
%
% OUTPUT:
%   - "MLE" : The best fitting model;

%% Initial setup:
% Convert the response and target angles into angular accuracy statistics
% (in Acc) with accuracy of zero indicating a perfect response.
Accuracy = wrapToPi(Responses - Targets);

% Get SortedIndices which lists the index positions of angles in Accuracy
% sorted from lowest, in SortedIndices(1), to highest, in
% SortedIndices(end):
[~,SortedIndices] = sort(abs(Accuracy));

% Preallocate arrays for the estimated values of Prior and K parameters as
% well as the negative log-likelihood:
T0_Prior = nan(30,1);
T0_K = nan(30,1);
NLL = nan(30,1);

% Set the probability density for the circular uniform component:
C0_Pd = ones(size(Accuracy)) .* (1/(2*pi));

%% Search through plausible values of T0_Prior:
% Loop over a number of values for the prior probability for the target
% distribution. Each time increase the prior to such that number of
% responses bonging to the target distribution (nHits) increases by one:
SearchLim = 40;
for nHits = 2:1:SearchLim
    
    % P denotes the current value of T0_Prior being tested:
    P = nHits / numel(Accuracy);
    
    % Get the indices of responses that are being grouped into the von
    % Mises distribution:
    IndicesOfHits = SortedIndices(1:nHits);
    
    % Get the subset of responses that have been grouped into the von Mises
    % distribution (T0Responses) using a logical selector, W:
    W = zeros(size(Accuracy));
    W(IndicesOfHits) = 1;
    W = logical(W);
    T0Responses = Accuracy(W);
    
    % Compute the resultant vector of T0Responses (R), convert it to a K,
    % and correct this value for biases that effect small samples:
    R = real(mean(exp(T0Responses.*1i)));
    K = HoopStats_R2K(R);
    K = HoopStats_KCorrection(numel(Accuracy),P,K);
    
    % Calculate the wighted likelihood of each response:
    T0_Pd = HoopStats_VonmFit(0,K,Accuracy);
    W = (P.*T0_Pd) ./ ((P.*T0_Pd) + ((1-P).*C0_Pd));
    Lpost = (W.*T0_Pd) + ((1-W).*C0_Pd);
    
    % Save the current values of P, K and the native log-likelihood (NLL):
    T0_Prior(nHits) = P;
    T0_K(nHits) = K;
    NLL(nHits) = -sum(log(Lpost));
    
end

%% Identify the best fitting step:
% Loop through each value of the negative log-likelihood and find a local
% minimum value by breaking out of the loop when the negative
% log-likelihood increases:
for iNLL = 2:1:(SearchLim-1)
    dL = NLL(iNLL+1) - NLL(iNLL);
    if dL >= 0
        break
    end
end

% If we reached the end of the above loop without finding a local minimum,
% set the index in iNLL to be 1. This will point to the first entries of
% T0_Prior, T0_K, and NLL which by definition are all NaNs:
if iNLL == (SearchLim-1)
    iNLL = 1;
end

% Pull out the best fitting parameters using the index (iNLL) identified
% above:
Best.T0_Prior = T0_Prior(iNLL);
Best.T0_K = T0_K(iNLL);
Best.NLL = NLL(iNLL);

% Calculate the Best delta BIC:
Best.dBIC = 2*(log(numel(Targets))...
    + Best.NLL + (log(1/(2*pi))*numel(Targets)));

% Cap the estimate of K to have a maximum value of 30. Higher values most
% likely indicate singularities:
if Best.T0_K > 30
    Best.T0_K = 30;
end


if ((Best.T0_Prior * numel(Targets)) < 8)...
        || (Best.dBIC > -10) || isnan(Best.NLL)
    % Do not trust the results of the hard clustering method if it is based
    % on less than 8 responses, the NLL is greater than an arbitrary
    % threshold, or no local minimum was found in the negative
    % log-likelihood function (indicated by Best.NLL being a NaN):
    MLE = [];
    
else
    % Otherwise, save the result in a MLE output structure:
    MLE.Iteration = NaN;
    MLE.T0.Mu = 0;
    MLE.T0.K = Best.T0_K;
    MLE.T0.Prior = Best.T0_Prior;
    MLE.C0.Prior = 1 - Best.T0_Prior;
    MLE.NLL = Best.NLL;
    MLE.MaxDeltaParam = NaN;
    MLE.deltaBIC = Best.dBIC;
end
return