1 /++
2 Module contains classes that perform optimization of mixture weights over sliding window.
3 ------
4 problem: p' = argmin f(p), p_i >= 0, Σ_i p_i = 1.
5 
6 p - mixture weights,
7 f = u(Wp),
8 u(ω) - convex function,
9 W - matrix of features(n rows, k columns),
10 k - length of mixture weights,
11 n - length of sample, n may vary (sliding window).
12 ------
13 
14 The same for likelihood maximization:
15 ------
16 problem: p' = argmax L(p), p_i >= 0, Σ_i p_i = 1.
17 
18 L(ω) = u(Wp)
19 ω = Wp,
20 u(ω) = Σ_j log(ω_j)  (LogLikelihood)
21 
22 p - mixture weights,
23 f_i - mixture components
24 W - matrix of posterior probabilities(n rows, k columns),
25 W[j, i] = f_i(x_j), 1 <= i <= k, 1 <= j <= n.
26 
27 k - length of mixture weights (count of ),
28 n - length of sample, n may vary (sliding window).
29 ------
30 
31 +/
32 /**
33 Authors: [Ilya Yaroshenko](http://9il.github.io)
34 
35 Copyright: © 2014-2015 [Ilya Yaroshenko](http://9il.github.io)
36 
37 License: MIT
38 */
39 module atmosphere.mixture;
40 
41 import core.stdc.tgmath;
42 
43 import atmosphere.internal;
44 import std.range;
45 import std.traits;
46 import std.numeric : dotProduct;
47 import std.algorithm;
48 import std.typecons;
49 
50 import atmosphere.internal;
51 
52 import std.math : isNormal, LN2;
53 
54 /++
55 Exception thrown for MixtureOptimizer.
56 +/
57 class MixtureOptimizerException : Exception
58 {
59 	/++
60 	Constructor which takes an error message.
61 	Params:
62 		msg  = Message describing the error.
63 		file = The file where the error occurred.
64 		line = The line where the error occurred.
65 	+/
66 	this(string msg, 
67 		string file = __FILE__, 
68 		size_t line = __LINE__) 
69 		@safe pure
70 	{
71 		super(msg, file, line);
72 	}
73 }
74 
75 ///
76 class FeaturesException : MixtureOptimizerException
77 {
78 	/++
79 	Constructor which takes an error message.
80 	Params:
81 		msg  = Message describing the error.
82 		file = The file where the error occurred.
83 		line = The line where the error occurred.
84 	+/
85 	this(string msg = "There is value in the sample that incompatible with all PDFs or machine precision is insufficient.", 
86 		string file = __FILE__, 
87 		size_t line = __LINE__) 
88 		@safe pure
89 	{
90 		super(msg, file, line);
91 	}
92 }
93 
94 
95 /++
96 +/
97 abstract class MixtureOptimizer(T)
98 	if(isFloatingPoint!T)
99 {
100 	import std.datetime;
101 
102 	package SlidingWindow!T _featuresT;
103 	package T[] _weights;
104 	package T[] _mixture;
105 
106 	/++
107 	Constructor
108 	Params:
109 		k = number of components
110 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
111 	+/
112 	this(size_t k, size_t maxLength)
113 	in
114 	{
115 		assert(k);
116 		assert(maxLength);
117 	}
118 	body
119 	{
120 		_featuresT = SlidingWindow!T(k, maxLength);
121 		_weights = new T[k];
122 		_weights[] = T(1)/k;
123 		_mixture = new T[maxLength];
124 	}
125 
126 	/++
127 	Perform k (1) iterations of coordinate (gradient or EM) descent optimization algorithm.
128 	Params:
129 		findRootTolerance = Defines an early termination condition. 
130 			Receives the current upper and lower bounds on the root. 
131 			The delegate must return true when these bounds are acceptable.
132 	See_Also: $(STDREF numeric, findRoot)
133 		evaluate
134 		optimize
135 	+/
136 	abstract void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null);
137 
138 	/++
139 	update method is called when mixture changes occur.
140 	+/
141 	abstract void update();
142 
143 final:
144 
145 	/++
146 	Perform `itersCount` iterations of `eval`.
147 	Params:
148 		itersCount = count of iterations
149 		findRootTolerance = Defines an early termination condition. 
150 			Receives the current upper and lower bounds on the root. 
151 			The delegate must return true when these bounds are acceptable.
152 	See_Also: $(STDREF numeric, findRoot)
153 		eval
154 		optimize
155 	+/
156 	void evaluate(size_t itersCount, scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
157 	{
158 		foreach(_; 0..itersCount)
159 			eval(findRootTolerance);
160 	}
161 
162 	/++
163 	Perform `eval` during `time`.
164 	Params:
165 		time = time duration
166 		findRootTolerance = Defines an early termination condition. 
167 			Receives the current upper and lower bounds on the root. 
168 			The delegate must return true when these bounds are acceptable.
169 	See_Also: $(STDREF numeric, findRoot)
170 		eval
171 		optimize
172 	+/
173 	Tuple!(TickDuration, "duration", size_t, "itersCount") 
174 	evaluate(TickDuration time, scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
175 	{
176 		TickDuration dur;
177 		size_t iterCount;
178 		auto sw = StopWatch(AutoStart.yes);
179 		do
180 		{
181 			eval(findRootTolerance);	
182 			iterCount++;
183 			dur = sw.peek;	
184 		}
185 		while(dur < time);
186 		return typeof(return)(dur, iterCount);
187 	}
188 
189 	/++
190 	Performs optimization.
191 	Params:
192 		objectiveFunction = accepts mixture.
193 		tolerance = Defines an early termination condition. 
194 			Receives the current and previous versions of `objectiveFunction(mixture))` and weights. 
195 			The delegate must return true when mixture and weights are acceptable. 
196 		findRootTolerance = Tolerance for inner optimization.
197 	See_Also: $(STDREF numeric, findRoot)
198 		eval
199 		evaluate
200 	+/
201 	void optimize(
202 			scope T delegate(in T[] mixture) objectiveFunction, 
203 			scope bool delegate (T objectiveFunctionValuePrev, T objectiveFunctionValue, in T[] weightsPrev, in T[] weights) tolerance,
204 			scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
205 		)
206 	{
207 		assert(length);
208 		T objectiveFunctionValuePrev;
209 		T objectiveFunctionValue = objectiveFunction(mixture);
210 		T[] weightsPrev = new T[weights.length];
211 		do
212 		{
213 			objectiveFunctionValuePrev = objectiveFunctionValue;
214 			weightsPrev[] = weights[];
215 			eval(findRootTolerance);
216 			objectiveFunctionValue = objectiveFunction(mixture);
217 		}
218 		while(!tolerance(objectiveFunctionValuePrev, objectiveFunctionValue, weightsPrev, weights));
219 	}
220 
221 	/++
222 	Performs optimization.
223 	Params:
224 		objectiveFunction = accepts mixture.
225 		tolerance = Defines an early termination condition. 
226 			Receives the current and previous versions of `objectiveFunction(mixture))`. 
227 			The delegate must return true when mixture are acceptable. 
228 		findRootTolerance = Tolerance for inner optimization.
229 	See_Also: $(STDREF numeric, findRoot)
230 		eval
231 		evaluate
232 	+/
233 	void optimize
234 	(
235 		scope T delegate(in T[] mixture) objectiveFunction, 
236 		scope bool delegate (T objectiveFunctionValuePrev, T objectiveFunctionValue) tolerance,
237 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
238 	)
239 	{
240 		assert(length);
241 		T objectiveFunctionValuePrev;
242 		T objectiveFunctionValue = objectiveFunction(mixture);
243 		do
244 		{
245 			objectiveFunctionValuePrev = objectiveFunctionValue;
246 			eval(findRootTolerance);
247 			objectiveFunctionValue = objectiveFunction(mixture);
248 		}
249 		while(!tolerance(objectiveFunctionValuePrev, objectiveFunctionValue));
250 	}
251 
252 	/++
253 	Performs optimization.
254 	Params:
255 		tolerance = Defines an early termination condition. 
256 			Receives the current and previous versions of weights. 
257 			The delegate must return true when mixture and weights are acceptable. 
258 		findRootTolerance = Tolerance for inner optimization.
259 	See_Also: $(STDREF numeric, findRoot)
260 		eval
261 		evaluate
262 	+/
263 	void optimize
264 	(
265 		scope bool delegate (in T[] weightsPrev, in T[] weights) tolerance,
266 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
267 	)
268 	{
269 		assert(length);
270 		T[] weightsPrev = new T[weights.length];
271 		do
272 		{
273 			weightsPrev[] = weights[];
274 			eval(findRootTolerance);
275 		}
276 		while(!tolerance(weightsPrev, weights));
277 	}
278 
279 	/++
280 	Puts back new feature for each components.
281 	Params:
282 		features = One feature per component.
283 	+/
284 	void put(Range)(Range features)
285 	if (isInputRange!Range && hasLength!Range && isNumeric!(ElementType!Range))
286 	in
287 	{
288 		assert(_featuresT.matrix.length == featuresROR.length);
289 		assert(_featuresT.matrix.width < _featuresT.matrix.shift);
290 	}
291 	body
292 	{
293 		_featuresT.put(features);
294 		updateMixtureBack;
295 	}
296 
297 	/++
298 	Puts back new features for each components.
299 	Params:
300 		featuresROR = Range of ranges of features per component.
301 	+/
302 	void put(RangeOfRanges)(RangeOfRanges featuresROR)
303 	if (isInputRange!RangeOfRanges && hasLength!RangeOfRanges && 
304 		isInputRange!(ElementType!RangeOfRanges) && hasLength!(ElementType!RangeOfRanges) && 
305 		isNumeric!(ElementType!(ElementType!RangeOfRanges)))
306 	in
307 	{
308 		assert(_featuresT.matrix.length == featuresROR.front.length);
309 		assert(_featuresT.matrix.width + featuresROR.length <= _featuresT.matrix.shift);
310 	}
311 	body
312 	{
313 		const n = featuresROR.length;
314 		.put(_featuresT, featuresROR);
315 		updateMixtureBackN(n);
316 	}
317 
318 	/++
319 	Returns: current length of features for each component
320 	+/
321 	size_t length() @property const
322 	{
323 		return _featuresT.matrix.width;
324 	}
325 
326 	/++
327 	Returns: maximal allowed length of features
328 	+/
329 	size_t maxLength() @property const
330 	{
331 		return _featuresT.matrix.shift;
332 	}
333 
334 
335 	/++
336 	Reset length of features to zero.
337 	+/
338 	void reset()
339 	{
340 		_featuresT.reset;
341 	}
342 
343 	/++
344 	Remove one front feature for each component.
345 	+/
346 	void popFront()
347 	in
348 	{
349 		assert(length);
350 	}
351 	body
352 	{
353 		_featuresT.popFront;
354 		_mixture[0.._featuresT.length] = _mixture[1.._featuresT.length+1];
355 	}
356 
357 	/++
358 	Remove n front features for each component.
359 	Params:
360 		n = features will be removed
361 	+/
362 	void popFrontN(size_t n)
363 	in
364 	{
365 		assert(length >= n);
366 	}
367 	body
368 	{
369 		_featuresT.popFrontN(n);
370 		_mixture[0.._featuresT.length] = _mixture[n.._featuresT.length+n];
371 	}
372 
373 	/++
374 	Returns: Range of range of features for each component. A matrix with k rows and n columns.
375 		This is internal representation, and can be discarded after any methods calls.
376 	+/
377 	Matrix!(const(T)) features() const
378 	{
379 		return cast(typeof(return))_featuresT.matrix;
380 	}
381 
382 	/++
383 	Returns: Const slice of the internal mixture representation.
384 	Example:
385 	---
386 	double objectiveFunction(in double[])
387 	{
388 		...
389 	}
390 
391 	//save slice
392 	auto mixture = optimizer.mixture;
393 
394 	auto value0 = objectiveFunction(mixture);
395 	optimizer.eval;
396 	auto value1 = objectiveFunction(mixture);
397 
398 
399 	//use `.dup` or copy to save current mixture
400 
401 	//1: .dup
402 	auto mixtureSave1 = mixture.dup;
403 
404 	//2: create array
405 	auto mixtureSave2 = new double[mixture.length];
406 	//2: copy
407 	mixtureSave2[] = mixture[];
408 	---
409 	+/
410 	const(T)[] mixture() @property const
411 	{
412 		return _mixture[0.._featuresT.length];
413 	}
414 
415 	/++
416 	Returns: Const slice of the internal weights representation.
417 	Example:
418 	---
419 	//save slice
420 	auto weights = optimizer.weights;
421 
422 	//use `.dup` or copy to save current weights
423 
424 	//1: .dup
425 	auto weightsSave1 = weights.dup;
426 
427 	//2: create array
428 	auto weightsSave2 = new double[weights.length];
429 	//2: copy
430 	weightsSave2[] = weights[];
431 	---
432 	+/
433 	const(T)[] weights() @property const
434 	{
435 		return _weights;
436 	}
437 
438 	/++
439 	Set the mixture weights and calls [update](update).
440 	Params:
441 		_weights = new mixture weights
442 	+/
443 	void weights(Range)(Range _weights) @property
444 		if(isInputRange!Range) 
445 	{
446 		_weights.copy(this._weights);
447 		updateMixture;
448 	}
449 
450 	package void updateMixture()
451 	{
452 		if (length)
453 		{
454 			mix(cast(Matrix!(const T))_featuresT.matrix, _weights, _mixture[0.._featuresT.matrix.width]);
455 			update();			
456 		}
457 	}
458 
459 	package void updateMixtureBack()
460 	in
461 	{
462 		assert(length);
463 	}
464 	body
465 	{
466 		_mixture[_featuresT.matrix.width-1] = dotProduct(_weights, _featuresT.back);
467 		update();
468 	}
469 
470 	package void updateMixtureBackN(size_t n)
471 	in
472 	{
473 		assert(length >= n);
474 	}
475 	body
476 	{
477 		mix(cast(Matrix!(const T))_featuresT[$-n..$].matrix, _weights, _mixture[0.._featuresT.matrix.width]);
478 		update();
479 	}
480 
481 	package void updateMixturePopBack()
482 	in
483 	{
484 		assert(length);
485 	}
486 	body
487 	{
488 		updateMixturePopBackN(1);
489 	}
490 
491 	package void updateMixturePopBackN(size_t n)
492 	in
493 	{
494 		assert(length >= n);
495 	}
496 	body
497 	{
498 		_mixture[0.._featuresT.matrix.width-n] = _mixture[n.._featuresT.matrix.width];
499 		update();
500 	}
501 }
502 
503 
504 /++
505 Params:
506 	Gradient = Gradient of the objective function. `Gradient(a, b)` should perform `b = grad_f(a)`.
507 +/
508 class ExpectationMaximization(alias Gradient, T) : MixtureOptimizer!T
509 	if(isFloatingPoint!T)
510 {
511 	private T[] pi;
512 	private T[] c;
513 
514 	/++
515 	Constructor
516 	Params:
517 		k = number of components
518 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
519 	+/
520 	this(size_t k, size_t maxLength)
521 	{
522 		super(k, maxLength);
523 		pi = new T[maxLength];
524 		c = new T[k];
525 	}
526 
527 	final override void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
528 	{
529 		EMIteration!(Gradient, T)(cast(Matrix!(const T))_featuresT.matrix, _weights, mixture, pi[0.._featuresT.matrix.width], c);
530 		updateMixture;
531 	}
532 
533 	override void update(){};
534 }
535 
536 
537 /++
538 Params:
539 	Gradient = Gradient of the objective function. `Gradient(a, b)` should perform `b = grad_f(a)`.
540 +/
541 class GradientDescent(alias Gradient, T) : MixtureOptimizer!T
542 	if(isFloatingPoint!T)
543 {
544 	private T[] pi;
545 	private T[] xi;
546 	private T[] gamma;
547 	private T[] c;
548 
549 	/++
550 	Constructor
551 	Params:
552 		k = number of components
553 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
554 	+/
555 	this(size_t k, size_t maxLength)
556 	{
557 		super(k, maxLength);
558 		pi = new T[maxLength];
559 		xi = new T[maxLength];
560 		gamma = new T[maxLength];
561 		c = new T[k];
562 	}
563 
564 	final override void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
565 	{
566 		gradientDescentIteration!(Gradient, T)(cast(Matrix!(const T))_featuresT.matrix, _weights, mixture, pi[0.._featuresT.matrix.width], xi[0.._featuresT.matrix.width], gamma[0.._featuresT.matrix.width], c, findRootTolerance is null ? (a, b) => false : findRootTolerance);
567 		updateMixture;
568 	}
569 
570 	override void update(){};
571 }
572 
573 
574 /++
575 Params:
576 	PartialDerivative = Partial derivative `y` of objective convex function `u`: `du/dω_j = y(ω_j), 1 <= j <= n`.
577 +/
578 class GradientDescentPartial(alias PartialDerivative, T) : MixtureOptimizer!T
579 	if(isFloatingPoint!T)
580 {
581 	private T[] pi;
582 	private T[] xi;
583 	private T[] c;
584 
585 	/++
586 	Constructor
587 	Params:
588 		k = number of components
589 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
590 	+/
591 	this(size_t k, size_t maxLength)
592 	{
593 		super(k, maxLength);
594 		pi = new T[maxLength];
595 		xi = new T[maxLength];
596 		c = new T[k];
597 	}
598 
599 	final override void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
600 	{
601 		gradientDescentIterationPartial!(PartialDerivative, T)(cast(Matrix!(const T))_featuresT.matrix, _weights, _mixture[0.._featuresT.matrix.width], pi[0.._featuresT.matrix.width], xi[0.._featuresT.matrix.width], c, findRootTolerance is null ? (a, b) => false : findRootTolerance);
602 		updateMixture;
603 	}
604 
605 	override void update(){};
606 }
607 
608 
609 /++
610 Params:
611 	Gradient = Gradient of the objective function. `Gradient(a, b)` should perform `b = grad_f(a)`.
612 +/
613 class CoordinateDescent(alias Gradient, T) : MixtureOptimizer!T
614 	if(isFloatingPoint!T)
615 {
616 	private T[] pi;
617 	private T[] xi;
618 	private T[] gamma;
619 
620 	/++
621 	Constructor
622 	Params:
623 		k = number of components
624 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
625 	+/
626 	this(size_t k, size_t maxLength)
627 	{
628 		super(k, maxLength);
629 		pi = new T[maxLength];
630 		xi = new T[maxLength];
631 		gamma = new T[maxLength];
632 	}
633 
634 	final override void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
635 	{
636 		immutable n = _featuresT.matrix.width;
637 		coordinateDescentIteration!(Gradient, T)(cast(Matrix!(const T))_featuresT.matrix, _weights, _mixture[0..n], pi[0..n], xi[0..n], gamma[0..n], findRootTolerance is null ? (a, b) => false : findRootTolerance);
638 		updateMixture;
639 	}
640 
641 	override void update(){};
642 }
643 
644 
645 /++
646 Params:
647 	PartialDerivative = Partial derivative `y` of objective convex function `u`: `du/dω_j = y(ω_j), 1 <= j <= n`.
648 +/
649 class CoordinateDescentPartial(alias PartialDerivative, T) : MixtureOptimizer!T
650 	if(isFloatingPoint!T)
651 {
652 	private T[] pi;
653 
654 	/++
655 	Constructor
656 	Params:
657 		k = number of components
658 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
659 	+/
660 	this(size_t k, size_t maxLength)
661 	{
662 		super(k, maxLength);
663 		pi = new T[maxLength];
664 	}
665 
666 	final override void eval(scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null)
667 	{
668 		coordinateDescentIterationPartial!(PartialDerivative, T)(cast(Matrix!(const T))_featuresT.matrix, _weights, _mixture[0.._featuresT.matrix.width], pi[0.._featuresT.matrix.width], findRootTolerance is null ? (a, b) => false : findRootTolerance);
669 		updateMixture;
670 	}
671 
672 	override void update(){};
673 }
674 
675 
676 /++
677 +/
678 interface LikelihoodAscent(T)
679 	if(isFloatingPoint!T)
680 {
681 	/++
682 	Params:
683 		pdfs = Probability distribution *functions*
684 		sample = Sample
685 	See_Also: $(STDREF traits, isCallable)
686 	+/
687 	void put(PDFRange, SampleRange)(PDFRange pdfs, SampleRange sample)
688 		if (isInputRange!PDFRange && hasLength!PDFRange && isCallable!(ElementType!PDFRange));
689 
690 	/++
691 	Performs optimization.
692 	Params:
693 		tolerance = Defines an early termination condition. 
694 			Receives the current and previous versions of normalized log-likelihood and weights. 
695 			The delegate must return true when likelihood and weights are acceptable. 
696 		findRootTolerance = Tolerance for inner optimization.
697 	See_Also: $(STDREF numeric, findRoot)
698 	+/
699 	void optimize
700 	(
701 		scope bool delegate (T likelihoodValuePrev, T likelihoodValue, in T[] weightsPrev, in T[] weights) tolerance,
702 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
703 	);
704 
705 	/++
706 	Performs optimization.
707 	Params:
708 		tolerance = Defines an early termination condition. 
709 			Receives the current and previous versions of normalized log-likelihood. 
710 			The delegate must return true when likelihood are acceptable. 
711 		findRootTolerance = Tolerance for inner optimization.
712 	Throws: [FeaturesException](atmosphere/mixture/FeaturesException.html) if [isFeaturesCorrect](atmosphere/mixture/LikelihoodAscent.isFeaturesCorrect.html) is false.
713 	See_Also: $(STDREF numeric, findRoot)
714 	+/
715 	void optimize
716 	(
717 		scope bool delegate (T likelihoodValuePrev, T likelihoodValue) tolerance,
718 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
719 	);
720 
721 	/++
722 	Returns true if for all values in sample exists probability density function from mixture such that `isNormal(PDF(value) * (1.0 / sample.length))`
723 	See_Also: $(STDREF math, isNormal)
724 	+/
725 	bool isFeaturesCorrect() const;
726 
727 
728 	/++
729 	Returns: normalized log-likelihood.
730 	+/
731 	T likelihood() @property const;
732 }
733 
734 
735 /++
736 +/
737 class LikelihoodAscentCoordinate(T) : CoordinateDescentPartial!("-1/a", T), LikelihoodAscent!T
738 	if(isFloatingPoint!T)
739 {
740 	/++
741 	Constructor
742 	Params:
743 		k = number of components
744 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
745 	+/
746 	this(size_t k, size_t maxLength)
747 	{
748 		super(k, maxLength);
749 	}
750 
751 	mixin LikelihoodAscentTemplate!T;
752 }
753 
754 
755 /++
756 +/
757 class LikelihoodAscentGradient(T) : GradientDescentPartial!("-1/a", T), LikelihoodAscent!T
758 	if(isFloatingPoint!T)
759 {
760 	/++
761 	Constructor
762 	Params:
763 		k = number of components
764 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
765 	+/
766 	this(size_t k, size_t maxLength)
767 	{
768 		super(k, maxLength);
769 	}
770 
771 	mixin LikelihoodAscentTemplate!T;
772 }
773 
774 
775 /++
776 +/
777 class LikelihoodAscentEM(T) : ExpectationMaximization!((a, b){foreach(i, e; a) b[i] = 1/e;}, T), LikelihoodAscent!T
778 	if(isFloatingPoint!T)
779 {
780 	/++
781 	Constructor
782 	Params:
783 		k = number of components
784 		maxLength = maximal length of features. In terms of likelihood maximization maxLength is maximal length of a sample.
785 	+/
786 	this(size_t k, size_t maxLength)
787 	{
788 		super(k, maxLength);
789 	}
790 
791 	mixin LikelihoodAscentTemplate!T;
792 }
793 
794 
795 package mixin template LikelihoodAscentTemplate(T)
796 {
797 	void put(PDFRange, SampleRange)(PDFRange pdfs, SampleRange sample)
798 		if (isInputRange!PDFRange && hasLength!PDFRange && isCallable!(ElementType!PDFRange))
799 	in
800 	{
801 		assert(pdfs.length == _featuresT.matrix.height);
802 	}
803 	body
804 	{
805 		import std.algorithm : map;
806 		super.put(sample.map!(x => pdfs.map!(pdf => pdf(x))));
807 		if (!isFeaturesCorrect)
808 			throw new FeaturesException;
809 	}
810 
811 	void optimize
812 	(
813 		scope bool delegate (T likelihoodValuePrev, T likelihoodValue, in T[] weightsPrev, in T[] weights) tolerance,
814 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
815 	)
816 	{
817 		import std.functional : toDelegate;
818 		if (!isFeaturesCorrect)
819 			throw new FeaturesException;
820 		super.optimize(toDelegate(&_likelihood_!T), tolerance, findRootTolerance);
821 	}
822 
823 	void optimize
824 	(
825 		scope bool delegate (T likelihoodValuePrev, T likelihoodValue) tolerance,
826 		scope bool delegate(T a, T b) @nogc nothrow findRootTolerance = null,
827 	)
828 	{
829 		import std.functional : toDelegate;
830 		if (!isFeaturesCorrect)
831 			throw new FeaturesException;
832 		super.optimize(toDelegate(&_likelihood_!T), tolerance, findRootTolerance);
833 	}
834 
835 	bool isFeaturesCorrect() const
836 	{
837 		immutable c = T(1) / features.length;
838 		return features.transposed.all!(any!(x => isNormal(c * x)));
839 	}
840 
841 	T likelihood() @property const
842 	{
843 		return _likelihood_!T(mixture);
844 	}
845 }
846 
847 
848 unittest {
849 	alias C0 = CoordinateDescent!((a, b){}, float);
850 	alias C3 = ExpectationMaximization!((a, b){}, float);
851 	alias C1 = LikelihoodAscent!float;
852 	alias C10 = LikelihoodAscentGradient!float;
853 	alias C11 = LikelihoodAscentCoordinate!float;
854 	alias C12 = LikelihoodAscentEM!float;
855 	alias C2 = GradientDescent!((a, b){}, float);
856 }
857 
858 
859 unittest {
860 	alias C0 = CoordinateDescent!((a, b){}, double);
861 	alias C3 = ExpectationMaximization!((a, b){}, double);
862 	alias C1 = LikelihoodAscent!double;
863 	alias C10 = LikelihoodAscentGradient!double;
864 	alias C11 = LikelihoodAscentCoordinate!double;
865 	alias C12 = LikelihoodAscentEM!double;
866 	alias C2 = GradientDescent!((a, b){}, double);
867 }
868 
869 
870 unittest {
871 	alias C0 = CoordinateDescent!((a, b){}, real);
872 	alias C3 = ExpectationMaximization!((a, b){}, real);
873 	alias C1 = LikelihoodAscent!real;
874 	alias C10 = LikelihoodAscentGradient!real;
875 	alias C11 = LikelihoodAscentCoordinate!real;
876 	alias C12 = LikelihoodAscentEM!real;
877 	alias C2 = GradientDescent!((a, b){}, real);
878 }
879 
880 package T _likelihood_(T)(in T[] mixture)
881 {
882 	import atmosphere.summation : sumOfLog2s;
883 	// FIXME: Unqual
884 	return T(LN2) * mixture.map!(x => cast(Unqual!T)x).sumOfLog2s / mixture.length;
885 }