001/*- 002 ******************************************************************************* 003 * Copyright (c) 2011, 2016 Diamond Light Source Ltd. 004 * All rights reserved. This program and the accompanying materials 005 * are made available under the terms of the Eclipse Public License v1.0 006 * which accompanies this distribution, and is available at 007 * http://www.eclipse.org/legal/epl-v10.html 008 * 009 * Contributors: 010 * Peter Chang - initial API and implementation and/or initial documentation 011 *******************************************************************************/ 012 013package org.eclipse.january.dataset; 014 015import java.lang.ref.SoftReference; 016import java.util.ArrayList; 017import java.util.Collections; 018import java.util.HashMap; 019import java.util.List; 020import java.util.Map; 021import java.util.TreeMap; 022 023import org.apache.commons.math3.complex.Complex; 024import org.apache.commons.math3.stat.descriptive.moment.Kurtosis; 025import org.apache.commons.math3.stat.descriptive.moment.Skewness; 026import org.eclipse.january.metadata.Dirtiable; 027import org.eclipse.january.metadata.MetadataType; 028 029 030/** 031 * Statistics class 032 * 033 * TODO Where is mode? http://en.wikipedia.org/wiki/Mode_(statistics) 034 * 035 */ 036public class Stats { 037 038 private static class ReferencedDataset extends SoftReference<Dataset> { 039 public ReferencedDataset(Dataset d) { 040 super(d); 041 } 042 } 043 044 private static class QStatisticsImpl<T> implements MetadataType { 045 private static final long serialVersionUID = -3296671666463190388L; 046 final static Double Q1 = 0.25; 047 final static Double Q2 = 0.5; 048 final static Double Q3 = 0.75; 049 Map<Double, T> qmap = new HashMap<Double, T>(); 050 transient Map<Integer, Map<Double, ReferencedDataset>> aqmap = new HashMap<Integer, Map<Double, ReferencedDataset>>(); 051 transient ReferencedDataset s; // store 0th element 052 transient Map<Integer, ReferencedDataset> smap = new HashMap<>(); 053 054 @Dirtiable 055 private boolean isDirty = true; 056 057 @Override 058 public QStatisticsImpl<T> clone() { 059 return new QStatisticsImpl<T>(this); 060 } 061 062 public QStatisticsImpl() { 063 } 064 065 private QStatisticsImpl(QStatisticsImpl<T> qstats) { 066 if (qstats.s != null && qstats.s.get() != null) { 067 s = new ReferencedDataset(qstats.s.get().getView(false)); 068 } 069 qmap.putAll(qstats.qmap); 070 for (Integer i : qstats.aqmap.keySet()) { 071 aqmap.put(i, new HashMap<>(qstats.aqmap.get(i))); 072 } 073 smap.putAll(qstats.smap); 074 isDirty = qstats.isDirty; 075 } 076 077 public void setQuantile(double q, T v) { 078 qmap.put(q, v); 079 } 080 081 public T getQuantile(double q) { 082 return qmap.get(q); 083 } 084 085 private Map<Double, ReferencedDataset> getMap(int axis) { 086 Map<Double, ReferencedDataset> qm = aqmap.get(axis); 087 if (qm == null) { 088 qm = new HashMap<>(); 089 aqmap.put(axis, qm); 090 } 091 return qm; 092 } 093 094 public void setQuantile(int axis, double q, Dataset v) { 095 Map<Double, ReferencedDataset> qm = getMap(axis); 096 qm.put(q, new ReferencedDataset(v)); 097 } 098 099 public Dataset getQuantile(int axis, double q) { 100 Map<Double, ReferencedDataset> qm = getMap(axis); 101 ReferencedDataset rd = qm.get(q); 102 return rd == null ? null : rd.get(); 103 } 104 105 Dataset getSortedDataset(int axis) { 106 return smap.containsKey(axis) ? smap.get(axis).get() : null; 107 } 108 109 void setSortedDataset(int axis, Dataset v) { 110 smap.put(axis, new ReferencedDataset(v)); 111 } 112 } 113 114 // calculates statistics and returns sorted dataset (0th element if compound) 115 private static QStatisticsImpl<?> calcQuartileStats(final Dataset a) { 116 Dataset s = null; 117 final int is = a.getElementsPerItem(); 118 119 if (is == 1) { 120 s = DatasetUtils.sort(a); 121 122 QStatisticsImpl<Double> qstats = new QStatisticsImpl<Double>(); 123 124 qstats.setQuantile(QStatisticsImpl.Q1, pQuantile(s, QStatisticsImpl.Q1)); 125 qstats.setQuantile(QStatisticsImpl.Q2, pQuantile(s, QStatisticsImpl.Q2)); 126 qstats.setQuantile(QStatisticsImpl.Q3, pQuantile(s, QStatisticsImpl.Q3)); 127 qstats.s = new ReferencedDataset(s); 128 return qstats; 129 } 130 131 QStatisticsImpl<double[]> qstats = new QStatisticsImpl<double[]>(); 132 133 Dataset w = DatasetFactory.zeros(1, a.getClass(), a.getShapeRef()); 134 double[] q1 = new double[is]; 135 double[] q2 = new double[is]; 136 double[] q3 = new double[is]; 137 qstats.setQuantile(QStatisticsImpl.Q1, q1); 138 qstats.setQuantile(QStatisticsImpl.Q2, q2); 139 qstats.setQuantile(QStatisticsImpl.Q3, q3); 140 for (int j = 0; j < is; j++) { 141 ((CompoundDataset) a).copyElements(w, j); 142 w.sort(null); 143 144 q1[j] = pQuantile(w, QStatisticsImpl.Q1); 145 q2[j] = pQuantile(w, QStatisticsImpl.Q2); 146 q3[j] = pQuantile(w, QStatisticsImpl.Q3); 147 if (j == 0) 148 s = w.clone(); 149 } 150 qstats.s = new ReferencedDataset(s); 151 152 return qstats; 153 } 154 155 static private QStatisticsImpl<?> getQStatistics(final Dataset a) { 156 QStatisticsImpl<?> m = a.getFirstMetadata(QStatisticsImpl.class); 157 if (m == null || m.isDirty) { 158 m = calcQuartileStats(a); 159 a.setMetadata(m); 160 } 161 return m; 162 } 163 164 static private QStatisticsImpl<?> getQStatistics(final Dataset a, final int axis) { 165 final int is = a.getElementsPerItem(); 166 QStatisticsImpl<?> qstats = a.getFirstMetadata(QStatisticsImpl.class); 167 168 if (qstats == null || qstats.isDirty) { 169 if (is == 1) { 170 qstats = new QStatisticsImpl<Double>(); 171 } else { 172 qstats = new QStatisticsImpl<double[]>(); 173 } 174 a.setMetadata(qstats); 175 } 176 177 if (qstats.getQuantile(axis, QStatisticsImpl.Q2) == null) { 178 if (is == 1) { 179 Dataset s = DatasetUtils.sort(a, axis); 180 181 qstats.setQuantile(axis, QStatisticsImpl.Q1, pQuantile(s, axis, QStatisticsImpl.Q1)); 182 qstats.setQuantile(axis, QStatisticsImpl.Q2, pQuantile(s, axis, QStatisticsImpl.Q2)); 183 qstats.setQuantile(axis, QStatisticsImpl.Q3, pQuantile(s, axis, QStatisticsImpl.Q3)); 184 qstats.setSortedDataset(axis, s); 185 } else { 186 Dataset w = DatasetFactory.zeros(1, a.getClass(), a.getShapeRef()); 187 CompoundDoubleDataset q1 = null, q2 = null, q3 = null; 188 for (int j = 0; j < is; j++) { 189 ((CompoundDataset) a).copyElements(w, j); 190 w.sort(axis); 191 192 final Dataset c = pQuantile(w, axis, QStatisticsImpl.Q1); 193 if (j == 0) { 194 q1 = DatasetFactory.zeros(is, CompoundDoubleDataset.class, c.getShapeRef()); 195 q2 = DatasetFactory.zeros(is, CompoundDoubleDataset.class, c.getShapeRef()); 196 q3 = DatasetFactory.zeros(is, CompoundDoubleDataset.class, c.getShapeRef()); 197 } 198 q1.setElements(c, j); 199 200 q2.setElements(pQuantile(w, axis, QStatisticsImpl.Q2), j); 201 202 q3.setElements(pQuantile(w, axis, QStatisticsImpl.Q3), j); 203 } 204 qstats.setQuantile(axis, QStatisticsImpl.Q1, q1); 205 qstats.setQuantile(axis, QStatisticsImpl.Q2, q2); 206 qstats.setQuantile(axis, QStatisticsImpl.Q3, q3); 207 } 208 } 209 210 return qstats; 211 } 212 213 // process a sorted dataset 214 private static double pQuantile(final Dataset s, final double q) { 215 double f = (s.getSize() - 1) * q; // fraction of sample number 216 if (f < 0) 217 return Double.NaN; 218 int qpt = (int) Math.floor(f); // quantile point 219 f -= qpt; 220 221 double quantile = s.getElementDoubleAbs(qpt); 222 if (f > 0) { 223 quantile = (1-f)*quantile + f*s.getElementDoubleAbs(qpt+1); 224 } 225 return quantile; 226 } 227 228 // process a sorted dataset and returns a double or compound double dataset 229 private static Dataset pQuantile(final Dataset s, final int axis, final double q) { 230 final int rank = s.getRank(); 231 final int is = s.getElementsPerItem(); 232 233 int[] oshape = s.getShape(); 234 235 double f = (oshape[axis] - 1) * q; // fraction of sample number 236 int qpt = (int) Math.floor(f); // quantile point 237 f -= qpt; 238 239 oshape[axis] = 1; 240 int[] qshape = ShapeUtils.squeezeShape(oshape, false); 241 Dataset qds = DatasetFactory.zeros(is, CompoundDoubleDataset.class, qshape); 242 243 IndexIterator qiter = qds.getIterator(true); 244 int[] qpos = qiter.getPos(); 245 int[] spos = oshape; 246 247 while (qiter.hasNext()) { 248 int i = 0; 249 for (; i < axis; i++) { 250 spos[i] = qpos[i]; 251 } 252 spos[i++] = qpt; 253 for (; i < rank; i++) { 254 spos[i] = qpos[i-1]; 255 } 256 257 Object obj = s.getObject(spos); 258 qds.set(obj, qpos); 259 } 260 261 if (f > 0) { 262 qiter = qds.getIterator(true); 263 qpos = qiter.getPos(); 264 qpt++; 265 Dataset rds = DatasetFactory.zeros(is, CompoundDoubleDataset.class, qshape); 266 267 while (qiter.hasNext()) { 268 int i = 0; 269 for (; i < axis; i++) { 270 spos[i] = qpos[i]; 271 } 272 spos[i++] = qpt; 273 for (; i < rank; i++) { 274 spos[i] = qpos[i-1]; 275 } 276 277 Object obj = s.getObject(spos); 278 rds.set(obj, qpos); 279 } 280 rds.imultiply(f); 281 qds.imultiply(1-f); 282 qds.iadd(rds); 283 } 284 285 return qds; 286 } 287 288 /** 289 * Calculate quantile of dataset which is defined as the inverse of the cumulative distribution function (CDF) 290 * @param a 291 * @param q 292 * @return point at which CDF has value q 293 */ 294 @SuppressWarnings("unchecked") 295 public static double quantile(final Dataset a, final double q) { 296 if (q < 0 || q > 1) { 297 throw new IllegalArgumentException("Quantile requested is outside [0,1]"); 298 } 299 QStatisticsImpl<Double> qs = (QStatisticsImpl<Double>) getQStatistics(a); 300 Double qv = qs.getQuantile(q); 301 if (qv == null) { 302 qv = pQuantile(qs.s.get(), q); 303 qs.setQuantile(q, qv); 304 } 305 return qv; 306 } 307 308 /** 309 * Calculate quantiles of dataset which is defined as the inverse of the cumulative distribution function (CDF) 310 * @param a 311 * @param values 312 * @return points at which CDF has given values 313 */ 314 @SuppressWarnings("unchecked") 315 public static double[] quantile(final Dataset a, final double... values) { 316 final double[] points = new double[values.length]; 317 QStatisticsImpl<Double> qs = (QStatisticsImpl<Double>) getQStatistics(a); 318 for (int i = 0; i < points.length; i++) { 319 final double q = values[i]; 320 if (q < 0 || q > 1) { 321 throw new IllegalArgumentException("Quantile requested is outside [0,1]"); 322 } 323 Double qv = qs.getQuantile(q); 324 if (qv == null) { 325 qv = pQuantile(qs.s.get(), q); 326 qs.setQuantile(q, qv); 327 } 328 points[i] = qv; 329 } 330 331 return points; 332 } 333 334 /** 335 * Calculate quantiles of dataset which is defined as the inverse of the cumulative distribution function (CDF) 336 * @param a 337 * @param axis 338 * @param values 339 * @return points at which CDF has given values 340 */ 341 @SuppressWarnings({ "unchecked" }) 342 public static Dataset[] quantile(final Dataset a, int axis, final double... values) { 343 final Dataset[] points = new Dataset[values.length]; 344 final int is = a.getElementsPerItem(); 345 axis = a.checkAxis(axis); 346 347 if (is == 1) { 348 QStatisticsImpl<Double> qs = (QStatisticsImpl<Double>) getQStatistics(a, axis); 349 for (int i = 0; i < points.length; i++) { 350 final double q = values[i]; 351 if (q < 0 || q > 1) { 352 throw new IllegalArgumentException("Quantile requested is outside [0,1]"); 353 } 354 Dataset qv = qs.getQuantile(axis, q); 355 if (qv == null) { 356 qv = pQuantile(qs.getSortedDataset(axis), axis, q); 357 qs.setQuantile(axis, q, qv); 358 } 359 points[i] = qv; 360 } 361 } else { 362 QStatisticsImpl<double[]> qs = (QStatisticsImpl<double[]>) getQStatistics(a); 363 Dataset w = DatasetFactory.zeros(1, a.getClass(), a.getShapeRef()); 364 for (int j = 0; j < is; j++) { 365 boolean copied = false; 366 367 for (int i = 0; i < points.length; i++) { 368 final double q = values[i]; 369 if (q < 0 || q > 1) { 370 throw new IllegalArgumentException("Quantile requested is outside [0,1]"); 371 } 372 Dataset qv = qs.getQuantile(axis, q); 373 if (qv == null) { 374 if (!copied) { 375 copied = true; 376 ((CompoundDataset) a).copyElements(w, j); 377 w.sort(axis); 378 } 379 qv = pQuantile(w, axis, q); 380 qs.setQuantile(axis, q, qv); 381 if (j == 0) { 382 points[i] = DatasetFactory.zeros(is, qv.getClass(), qv.getShapeRef()); 383 } 384 ((CompoundDoubleDataset) points[i]).setElements(qv, j); 385 } 386 } 387 } 388 } 389 390 return points; 391 } 392 393 /** 394 * @param a dataset 395 * @param axis 396 * @return median 397 */ 398 public static Dataset median(final Dataset a, int axis) { 399 axis = a.checkAxis(axis); 400 return getQStatistics(a, axis).getQuantile(axis, QStatisticsImpl.Q2); 401 } 402 403 /** 404 * @param a dataset 405 * @return median 406 */ 407 public static Object median(final Dataset a) { 408 return getQStatistics(a).getQuantile(QStatisticsImpl.Q2); 409 } 410 411 /** 412 * Interquartile range: Q3 - Q1 413 * @param a 414 * @return range 415 */ 416 @SuppressWarnings("unchecked") 417 public static Object iqr(final Dataset a) { 418 final int is = a.getElementsPerItem(); 419 if (is == 1) { 420 QStatisticsImpl<Double> qs = (QStatisticsImpl<Double>) getQStatistics(a); 421 return qs.getQuantile(QStatisticsImpl.Q3) - qs.getQuantile(QStatisticsImpl.Q1); 422 } 423 424 QStatisticsImpl<double[]> qs = (QStatisticsImpl<double[]>) getQStatistics(a); 425 double[] q1 = (double[]) qs.getQuantile(QStatisticsImpl.Q1); 426 double[] q3 = ((double[]) qs.getQuantile(QStatisticsImpl.Q3)).clone(); 427 for (int j = 0; j < is; j++) { 428 q3[j] -= q1[j]; 429 } 430 return q3; 431 } 432 433 /** 434 * Interquartile range: Q3 - Q1 435 * @param a 436 * @param axis 437 * @return range 438 */ 439 public static Dataset iqr(final Dataset a, int axis) { 440 axis = a.checkAxis(axis); 441 QStatisticsImpl<?> qs = getQStatistics(a, axis); 442 Dataset q3 = qs.getQuantile(axis, QStatisticsImpl.Q3); 443 444 return Maths.subtract(q3, qs.getQuantile(axis, QStatisticsImpl.Q1)); 445 } 446 447 static private HigherStatisticsImpl<?> getHigherStatistic(final Dataset a, boolean[] ignoreInvalids) { 448 boolean ignoreNaNs = false; 449 boolean ignoreInfs = false; 450 if (a.hasFloatingPointElements()) { 451 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 452 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 453 } 454 455 HigherStatisticsImpl<?> stats = a.getFirstMetadata(HigherStatisticsImpl.class); 456 if (stats == null || stats.isDirty) { 457 stats = calculateHigherMoments(a, ignoreNaNs, ignoreInfs); 458 a.setMetadata(stats); 459 } 460 461 return stats; 462 } 463 464 static private HigherStatisticsImpl<?> getHigherStatistic(final Dataset a, final boolean[] ignoreInvalids, final int axis) { 465 boolean ignoreNaNs = false; 466 boolean ignoreInfs = false; 467 if (a.hasFloatingPointElements()) { 468 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 469 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 470 } 471 472 HigherStatisticsImpl<?> stats = a.getFirstMetadata(HigherStatisticsImpl.class); 473 if (stats == null || stats.getSkewness(axis) == null || stats.isDirty) { 474 stats = calculateHigherMoments(a, ignoreNaNs, ignoreInfs, axis); 475 a.setMetadata(stats); 476 } 477 478 return stats; 479 } 480 481 private static class HigherStatisticsImpl<T> implements MetadataType { 482 private static final long serialVersionUID = -6587974784104116792L; 483 T skewness; 484 T kurtosis; 485 transient Map<Integer, ReferencedDataset> smap = new HashMap<>(); 486 transient Map<Integer, ReferencedDataset> kmap = new HashMap<>(); 487 488 @Dirtiable 489 private boolean isDirty = true; 490 491 @Override 492 public HigherStatisticsImpl<T> clone() { 493 return new HigherStatisticsImpl<>(this); 494 } 495 496 public HigherStatisticsImpl() { 497 } 498 499 private HigherStatisticsImpl(HigherStatisticsImpl<T> hstats) { 500 skewness = hstats.skewness; 501 kurtosis = hstats.kurtosis; 502 smap.putAll(hstats.smap); 503 kmap.putAll(hstats.kmap); 504 isDirty = hstats.isDirty; 505 } 506 507// public void setSkewness(T skewness) { 508// this.skewness = skewness; 509// } 510// 511// public void setKurtosis(T kurtosis) { 512// this.kurtosis = kurtosis; 513// } 514// 515// public T getSkewness() { 516// return skewness; 517// } 518// 519// public T getKurtosis() { 520// return kurtosis; 521// } 522 523 public Dataset getSkewness(int axis) { 524 ReferencedDataset rd = smap.get(axis); 525 return rd == null ? null : rd.get(); 526 } 527 528 public Dataset getKurtosis(int axis) { 529 ReferencedDataset rd = kmap.get(axis); 530 return rd == null ? null : rd.get(); 531 } 532 533 public void setSkewness(int axis, Dataset s) { 534 smap.put(axis, new ReferencedDataset(s)); 535 } 536 537 public void setKurtosis(int axis, Dataset k) { 538 kmap.put(axis, new ReferencedDataset(k)); 539 } 540 } 541 542 static private HigherStatisticsImpl<?> calculateHigherMoments(final Dataset a, final boolean ignoreNaNs, final boolean ignoreInfs) { 543 final int is = a.getElementsPerItem(); 544 final IndexIterator iter = a.getIterator(); 545 546 if (is == 1) { 547 Skewness s = new Skewness(); 548 Kurtosis k = new Kurtosis(); 549 if (ignoreNaNs) { 550 while (iter.hasNext()) { 551 final double x = a.getElementDoubleAbs(iter.index); 552 if (Double.isNaN(x)) 553 continue; 554 s.increment(x); 555 k.increment(x); 556 } 557 } else { 558 while (iter.hasNext()) { 559 final double x = a.getElementDoubleAbs(iter.index); 560 s.increment(x); 561 k.increment(x); 562 } 563 } 564 565 HigherStatisticsImpl<Double> stats = new HigherStatisticsImpl<Double>(); 566 stats.skewness = s.getResult(); 567 stats.kurtosis = k.getResult(); 568 return stats; 569 } 570 final Skewness[] s = new Skewness[is]; 571 final Kurtosis[] k = new Kurtosis[is]; 572 573 for (int j = 0; j < is; j++) { 574 s[j] = new Skewness(); 575 k[j] = new Kurtosis(); 576 } 577 if (ignoreNaNs) { 578 while (iter.hasNext()) { 579 boolean skip = false; 580 for (int j = 0; j < is; j++) { 581 if (Double.isNaN(a.getElementDoubleAbs(iter.index + j))) { 582 skip = true; 583 break; 584 } 585 } 586 if (!skip) 587 for (int j = 0; j < is; j++) { 588 final double val = a.getElementDoubleAbs(iter.index + j); 589 s[j].increment(val); 590 k[j].increment(val); 591 } 592 } 593 } else { 594 while (iter.hasNext()) { 595 for (int j = 0; j < is; j++) { 596 final double val = a.getElementDoubleAbs(iter.index + j); 597 s[j].increment(val); 598 k[j].increment(val); 599 } 600 } 601 } 602 final double[] ts = new double[is]; 603 final double[] tk = new double[is]; 604 for (int j = 0; j < is; j++) { 605 ts[j] = s[j].getResult(); 606 tk[j] = k[j].getResult(); 607 } 608 609 HigherStatisticsImpl<double[]> stats = new HigherStatisticsImpl<double[]>(); 610 stats.skewness = ts; 611 stats.kurtosis = tk; 612 return stats; 613 } 614 615 static private HigherStatisticsImpl<?> calculateHigherMoments(final Dataset a, final boolean ignoreNaNs, final boolean ignoreInfs, final int axis) { 616 final int rank = a.getRank(); 617 final int is = a.getElementsPerItem(); 618 final int[] oshape = a.getShape(); 619 final int alen = oshape[axis]; 620 oshape[axis] = 1; 621 622 final int[] nshape = ShapeUtils.squeezeShape(oshape, false); 623 final Dataset sk; 624 final Dataset ku; 625 HigherStatisticsImpl<?> stats = null; 626 627 if (is == 1) { 628 if (stats == null) { 629 stats = new HigherStatisticsImpl<Double>(); 630 a.setMetadata(stats); 631 } 632 sk = DatasetFactory.zeros(DoubleDataset.class, nshape); 633 ku = DatasetFactory.zeros(DoubleDataset.class, nshape); 634 final IndexIterator qiter = sk.getIterator(true); 635 final int[] qpos = qiter.getPos(); 636 final int[] spos = oshape; 637 638 while (qiter.hasNext()) { 639 int i = 0; 640 for (; i < axis; i++) { 641 spos[i] = qpos[i]; 642 } 643 spos[i++] = 0; 644 for (; i < rank; i++) { 645 spos[i] = qpos[i - 1]; 646 } 647 648 Skewness s = new Skewness(); 649 Kurtosis k = new Kurtosis(); 650 if (ignoreNaNs) { 651 for (int j = 0; j < alen; j++) { 652 spos[axis] = j; 653 final double val = a.getDouble(spos); 654 if (Double.isNaN(val)) 655 continue; 656 657 s.increment(val); 658 k.increment(val); 659 } 660 } else { 661 for (int j = 0; j < alen; j++) { 662 spos[axis] = j; 663 final double val = a.getDouble(spos); 664 s.increment(val); 665 k.increment(val); 666 } 667 } 668 sk.set(s.getResult(), qpos); 669 ku.set(k.getResult(), qpos); 670 } 671 } else { 672 if (stats == null) { 673 stats = new HigherStatisticsImpl<double[]>(); 674 a.setMetadata(stats); 675 } 676 sk = DatasetFactory.zeros(is, CompoundDoubleDataset.class, nshape); 677 ku = DatasetFactory.zeros(is, CompoundDoubleDataset.class, nshape); 678 final IndexIterator qiter = sk.getIterator(true); 679 final int[] qpos = qiter.getPos(); 680 final int[] spos = oshape; 681 final Skewness[] s = new Skewness[is]; 682 final Kurtosis[] k = new Kurtosis[is]; 683 final double[] ts = new double[is]; 684 final double[] tk = new double[is]; 685 686 for (int j = 0; j < is; j++) { 687 s[j] = new Skewness(); 688 k[j] = new Kurtosis(); 689 } 690 while (qiter.hasNext()) { 691 int i = 0; 692 for (; i < axis; i++) { 693 spos[i] = qpos[i]; 694 } 695 spos[i++] = 0; 696 for (; i < rank; i++) { 697 spos[i] = qpos[i-1]; 698 } 699 700 for (int j = 0; j < is; j++) { 701 s[j].clear(); 702 k[j].clear(); 703 } 704 int index = a.get1DIndex(spos); 705 if (ignoreNaNs) { 706 boolean skip = false; 707 for (int j = 0; j < is; j++) { 708 if (Double.isNaN(a.getElementDoubleAbs(index + j))) { 709 skip = true; 710 break; 711 } 712 } 713 if (!skip) 714 for (int j = 0; j < is; j++) { 715 final double val = a.getElementDoubleAbs(index + j); 716 s[j].increment(val); 717 k[j].increment(val); 718 } 719 } else { 720 for (int j = 0; j < is; j++) { 721 final double val = a.getElementDoubleAbs(index + j); 722 s[j].increment(val); 723 k[j].increment(val); 724 } 725 } 726 for (int j = 0; j < is; j++) { 727 ts[j] = s[j].getResult(); 728 tk[j] = k[j].getResult(); 729 } 730 sk.set(ts, qpos); 731 ku.set(tk, qpos); 732 } 733 } 734 735 stats.setSkewness(axis, sk); 736 stats.setKurtosis(axis, ku); 737 return stats; 738 } 739 740 /** 741 * @param a dataset 742 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 743 * @return skewness 744 * @since 2.0 745 */ 746 public static Object skewness(final Dataset a, final boolean... ignoreInvalids) { 747 return getHigherStatistic(a, ignoreInvalids).skewness; 748 } 749 750 /** 751 * @param a dataset 752 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 753 * @return kurtosis 754 * @since 2.0 755 */ 756 public static Object kurtosis(final Dataset a, final boolean... ignoreInvalids) { 757 return getHigherStatistic(a, ignoreInvalids).kurtosis; 758 } 759 760 /** 761 * @param a dataset 762 * @param axis 763 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 764 * @return skewness along axis in dataset 765 * @since 2.0 766 */ 767 public static Dataset skewness(final Dataset a, int axis, final boolean... ignoreInvalids) { 768 axis = a.checkAxis(axis); 769 return getHigherStatistic(a, ignoreInvalids, axis).getSkewness(axis); 770 } 771 772 /** 773 * @param a dataset 774 * @param axis 775 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 776 * @return kurtosis along axis in dataset 777 * @since 2.0 778 */ 779 public static Dataset kurtosis(final Dataset a, int axis, final boolean... ignoreInvalids) { 780 axis = a.checkAxis(axis); 781 return getHigherStatistic(a, ignoreInvalids, axis).getKurtosis(axis); 782 } 783 784 /** 785 * @param a dataset 786 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 787 * @return sum of dataset 788 * @since 2.0 789 */ 790 public static Object sum(final Dataset a, final boolean... ignoreInvalids) { 791 return a.sum(ignoreInvalids); 792 } 793 794 /** 795 * @param clazz dataset class 796 * @param a dataset 797 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 798 * @return typed sum of dataset 799 * @since 2.3 800 */ 801 public static Object typedSum(final Class<? extends Dataset> clazz, final Dataset a, final boolean... ignoreInvalids) { 802 if (a.isComplex()) { 803 Complex m = (Complex) a.sum(ignoreInvalids); 804 return m; 805 } else if (a instanceof CompoundDataset) { 806 return InterfaceUtils.fromDoublesToBiggestPrimitives(clazz, (double[]) a.sum(ignoreInvalids)); 807 } 808 return InterfaceUtils.fromDoubleToBiggestNumber(clazz, DTypeUtils.toReal(a.sum(ignoreInvalids))); 809 } 810 811 /** 812 * @param a dataset 813 * @param dtype 814 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 815 * @return typed sum of dataset 816 * @since 2.0 817 * @deprecated Please use the class-based method {@link #typedSum(Class, Dataset, boolean...)} 818 */ 819 @Deprecated 820 public static Object typedSum(final Dataset a, int dtype, final boolean... ignoreInvalids) { 821 return typedSum(DTypeUtils.getInterface(dtype), a, ignoreInvalids); 822 } 823 824 /** 825 * @param clazz dataset class 826 * @param a dataset 827 * @param axis 828 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 829 * @return typed sum of items along axis in dataset 830 * @since 2.3 831 */ 832 public static <T extends Dataset> T typedSum(final Class<T> clazz, final Dataset a, int axis, final boolean... ignoreInvalids) { 833 return a.sum(axis, ignoreInvalids).cast(clazz); 834 } 835 836 /** 837 * @param clazz dataset class 838 * @param a dataset 839 * @param axes 840 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 841 * @return typed sum of items along axes in dataset 842 * @since 2.3 843 */ 844 public static <T extends Dataset> T typedSum(final Class<T> clazz, final Dataset a, int[] axes, final boolean... ignoreInvalids) { 845 return a.sum(axes, ignoreInvalids).cast(clazz); 846 } 847 848 /** 849 * @param a dataset 850 * @param dtype 851 * @param axis 852 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 853 * @return typed sum of items along axis in dataset 854 * @since 2.0 855 * @deprecated Please use the class-based method {@link #typedSum(Class, Dataset, axis, boolean...)} 856 */ 857 @Deprecated 858 public static Dataset typedSum(final Dataset a, int dtype, int axis, final boolean... ignoreInvalids) { 859 return DatasetUtils.cast(a.sum(axis, ignoreInvalids), dtype); 860 } 861 862 /** 863 * @param a dataset 864 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 865 * @return product of all items in dataset 866 * @since 2.0 867 */ 868 public static Object product(final Dataset a, final boolean... ignoreInvalids) { 869 return typedProduct(a.getClass(), a, ignoreInvalids); 870 } 871 872 /** 873 * @param a dataset 874 * @param axis 875 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 876 * @return product of items along axis in dataset 877 * @since 2.0 878 */ 879 public static Dataset product(final Dataset a, final int axis, final boolean... ignoreInvalids) { 880 return typedProduct(a.getClass(), a, axis, ignoreInvalids); 881 } 882 883 /** 884 * @param a dataset 885 * @param axes 886 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 887 * @return product of items along axes in dataset 888 * @since 2.2 889 */ 890 public static Dataset product(final Dataset a, final int[] axes, final boolean... ignoreInvalids) { 891 return typedProduct(a.getClass(), a, axes, ignoreInvalids); 892 } 893 894 /** 895 * @param clazz dataset class 896 * @param a dataset 897 * @param dtype 898 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 899 * @return typed product of all items in dataset 900 * @since 2.3 901 */ 902 public static Object typedProduct(final Class<? extends Dataset> clazz, final Dataset a, final boolean... ignoreInvalids) { 903 return typedProduct(a, DTypeUtils.getDType(clazz), ignoreInvalids); 904 } 905 906 /** 907 * @param a dataset 908 * @param dtype 909 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 910 * @return typed product of all items in dataset 911 * @since 2.0 912 * @deprecated Please use the class-based method {@link #typedProduct(Class, Dataset, boolean...)} 913 */ 914 @Deprecated 915 public static Object typedProduct(final Dataset a, final int dtype, final boolean... ignoreInvalids) { 916 final boolean ignoreNaNs; 917 final boolean ignoreInfs; 918 if (a.hasFloatingPointElements()) { 919 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 920 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 921 } else { 922 ignoreNaNs = false; 923 ignoreInfs = false; 924 } 925 926 if (a.isComplex()) { 927 IndexIterator it = a.getIterator(); 928 double rv = 1, iv = 0; 929 930 while (it.hasNext()) { 931 final double r1 = a.getElementDoubleAbs(it.index); 932 final double i1 = a.getElementDoubleAbs(it.index + 1); 933 if (ignoreNaNs && (Double.isNaN(r1) || Double.isNaN(i1))) { 934 continue; 935 } 936 if (ignoreInfs && (Double.isInfinite(r1) || Double.isInfinite(i1))) { 937 continue; 938 } 939 final double tv = r1*rv - i1*iv; 940 iv = r1*iv + i1*rv; 941 rv = tv; 942 if (Double.isNaN(rv) && Double.isNaN(iv)) { 943 break; 944 } 945 } 946 947 return new Complex(rv, iv); 948 } 949 950 IndexIterator it = a.getIterator(); 951 final int is; 952 final long[] lresults; 953 final double[] dresults; 954 955 switch (dtype) { 956 case Dataset.BOOL: 957 case Dataset.INT8: case Dataset.INT16: 958 case Dataset.INT32: case Dataset.INT64: 959 long lresult = 1; 960 while (it.hasNext()) { 961 lresult *= a.getElementLongAbs(it.index); 962 } 963 return Long.valueOf(lresult); 964 case Dataset.ARRAYINT8: case Dataset.ARRAYINT16: 965 case Dataset.ARRAYINT32: case Dataset.ARRAYINT64: 966 lresult = 1; 967 is = a.getElementsPerItem(); 968 lresults = new long[is]; 969 for (int j = 0; j < is; j++) { 970 lresults[j] = 1; 971 } 972 while (it.hasNext()) { 973 for (int j = 0; j < is; j++) 974 lresults[j] *= a.getElementLongAbs(it.index+j); 975 } 976 return lresults; 977 case Dataset.FLOAT32: case Dataset.FLOAT64: 978 double dresult = 1.; 979 while (it.hasNext()) { 980 final double x = a.getElementDoubleAbs(it.index); 981 if (ignoreNaNs && Double.isNaN(x)) { 982 continue; 983 } 984 if (ignoreInfs && Double.isInfinite(x)) { 985 continue; 986 } 987 dresult *= x; 988 if (Double.isNaN(dresult)) { 989 break; 990 } 991 } 992 return Double.valueOf(dresult); 993 case Dataset.ARRAYFLOAT32: 994 case Dataset.ARRAYFLOAT64: 995 is = a.getElementsPerItem(); 996 double[] vals = new double[is]; 997 dresults = new double[is]; 998 for (int j = 0; j < is; j++) { 999 dresults[j] = 1.; 1000 } 1001 while (it.hasNext()) { 1002 boolean okay = true; 1003 for (int j = 0; j < is; j++) { 1004 final double val = a.getElementDoubleAbs(it.index + j); 1005 if (ignoreNaNs && Double.isNaN(val)) { 1006 okay = false; 1007 break; 1008 } 1009 if (ignoreInfs && Double.isInfinite(val)) { 1010 okay = false; 1011 break; 1012 } 1013 vals[j] = val; 1014 } 1015 if (okay) { 1016 for (int j = 0; j < is; j++) { 1017 double val = vals[j]; 1018 dresults[j] *= val; 1019 } 1020 } 1021 } 1022 return dresults; 1023 } 1024 1025 return null; 1026 } 1027 1028 /** 1029 * @param clazz dataset class 1030 * @param a dataset 1031 * @param axis 1032 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1033 * @return typed product of items along axis in dataset 1034 * @since 2.3 1035 */ 1036 public static <T extends Dataset> T typedProduct(final Class<T> clazz, final Dataset a, int axis, final boolean... ignoreInvalids) { 1037 return typedProduct(clazz, a, new int[] {axis}, ignoreInvalids); 1038 } 1039 1040 /** 1041 * @param clazz dataset class 1042 * @param a dataset 1043 * @param axes 1044 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1045 * @return typed product of items in axes of dataset 1046 * @since 2.3 1047 */ 1048 @SuppressWarnings("unchecked") 1049 public static <T extends Dataset> T typedProduct(final Class<T> clazz, final Dataset a, int[] axes, final boolean... ignoreInvalids) { 1050 return (T) typedProduct(a, DTypeUtils.getDType(clazz), axes, ignoreInvalids); 1051 } 1052 1053 /** 1054 * @param a dataset 1055 * @param dtype 1056 * @param axis 1057 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1058 * @return typed product of items along axis in dataset 1059 * @since 2.0 1060 * @deprecated Please use the class-based method {@link #typedProduct(Class, Dataset, int, boolean...)} 1061 */ 1062 @Deprecated 1063 public static Dataset typedProduct(final Dataset a, final int dtype, int axis, final boolean... ignoreInvalids) { 1064 return typedProduct(a, dtype, new int[] {axis}, ignoreInvalids); 1065 } 1066 1067 /** 1068 * @param a dataset 1069 * @param dtype 1070 * @param axes 1071 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1072 * @return typed product of items in axes of dataset 1073 * @since 2.2 1074 * @deprecated Please use the class-based method {@link #typedProduct(Class, Dataset, int[], boolean...)} 1075 */ 1076 @Deprecated 1077 public static Dataset typedProduct(final Dataset a, final int dtype, int[] axes, final boolean... ignoreInvalids) { 1078 axes = ShapeUtils.checkAxes(a.getRank(), axes); 1079 SliceNDIterator siter = new SliceNDIterator(new SliceND(a.getShapeRef()), axes); 1080 1081 int[] nshape = siter.getUsedSlice().getSourceShape(); 1082 final int is = a.getElementsPerItem(); 1083 1084 final boolean ignoreNaNs; 1085 final boolean ignoreInfs; 1086 if (a.hasFloatingPointElements()) { 1087 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 1088 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 1089 } else { 1090 ignoreNaNs = false; 1091 ignoreInfs = false; 1092 } 1093 Dataset result = DatasetFactory.zeros(is, nshape, dtype); 1094 1095 int[] spos = siter.getUsedPos(); 1096 1097 // TODO add getLongArray(long[], int...) to CompoundDataset 1098 final boolean isComplex = a.isComplex(); 1099 while (siter.hasNext()) { 1100 IndexIterator iter = a.getSliceIterator(siter.getCurrentSlice()); 1101 final int[] pos = iter.getPos(); 1102 1103 if (isComplex) { 1104 double rv = 1, iv = 0; 1105 switch (dtype) { 1106 case Dataset.COMPLEX64: 1107 ComplexFloatDataset af = (ComplexFloatDataset) a; 1108 while (iter.hasNext()) { 1109 final float r1 = af.getReal(pos); 1110 final float i1 = af.getImag(pos); 1111 if (ignoreNaNs && (Float.isNaN(r1) || Float.isNaN(i1))) { 1112 continue; 1113 } 1114 if (ignoreInfs && (Float.isInfinite(r1) || Float.isInfinite(i1))) { 1115 continue; 1116 } 1117 final double tv = r1*rv - i1*iv; 1118 iv = r1*iv + i1*rv; 1119 rv = tv; 1120 if (Double.isNaN(rv) && Double.isNaN(iv)) { 1121 break; 1122 } 1123 } 1124 break; 1125 case Dataset.COMPLEX128: 1126 ComplexDoubleDataset ad = (ComplexDoubleDataset) a; 1127 while (iter.hasNext()) { 1128 final double r1 = ad.getReal(pos); 1129 final double i1 = ad.getImag(pos); 1130 if (ignoreNaNs && (Double.isNaN(r1) || Double.isNaN(i1))) { 1131 continue; 1132 } 1133 if (ignoreInfs && (Double.isInfinite(r1) || Double.isInfinite(i1))) { 1134 continue; 1135 } 1136 final double tv = r1*rv - i1*iv; 1137 iv = r1*iv + i1*rv; 1138 rv = tv; 1139 if (Double.isNaN(rv) && Double.isNaN(iv)) { 1140 break; 1141 } 1142 } 1143 break; 1144 } 1145 1146 result.set(new Complex(rv, iv), spos); 1147 } else { 1148 final long[] lresults; 1149 final double[] dresults; 1150 1151 switch (dtype) { 1152 case Dataset.BOOL: 1153 case Dataset.INT8: case Dataset.INT16: 1154 case Dataset.INT32: case Dataset.INT64: 1155 long lresult = 1; 1156 while (iter.hasNext()) { 1157 lresult *= a.getInt(pos); 1158 } 1159 result.set(lresult, spos); 1160 break; 1161 case Dataset.ARRAYINT8: 1162 lresults = new long[is]; 1163 for (int k = 0; k < is; k++) { 1164 lresults[k] = 1; 1165 } 1166 while (iter.hasNext()) { 1167 final byte[] va = (byte[]) a.getObject(pos); 1168 for (int k = 0; k < is; k++) { 1169 lresults[k] *= va[k]; 1170 } 1171 } 1172 result.set(lresults, spos); 1173 break; 1174 case Dataset.ARRAYINT16: 1175 lresults = new long[is]; 1176 for (int k = 0; k < is; k++) { 1177 lresults[k] = 1; 1178 } 1179 while (iter.hasNext()) { 1180 final short[] va = (short[]) a.getObject(pos); 1181 for (int k = 0; k < is; k++) { 1182 lresults[k] *= va[k]; 1183 } 1184 } 1185 result.set(lresults, spos); 1186 break; 1187 case Dataset.ARRAYINT32: 1188 lresults = new long[is]; 1189 for (int k = 0; k < is; k++) { 1190 lresults[k] = 1; 1191 } 1192 while (iter.hasNext()) { 1193 final int[] va = (int[]) a.getObject(pos); 1194 for (int k = 0; k < is; k++) { 1195 lresults[k] *= va[k]; 1196 } 1197 } 1198 result.set(lresults, spos); 1199 break; 1200 case Dataset.ARRAYINT64: 1201 lresults = new long[is]; 1202 for (int k = 0; k < is; k++) { 1203 lresults[k] = 1; 1204 } 1205 while (iter.hasNext()) { 1206 final long[] va = (long[]) a.getObject(pos); 1207 for (int k = 0; k < is; k++) { 1208 lresults[k] *= va[k]; 1209 } 1210 } 1211 result.set(lresults, spos); 1212 break; 1213 case Dataset.FLOAT32: case Dataset.FLOAT64: 1214 double dresult = 1.; 1215 while (iter.hasNext()) { 1216 final double x = a.getElementDoubleAbs(iter.index); 1217 if (ignoreNaNs && Double.isNaN(x)) { 1218 continue; 1219 } 1220 if (ignoreInfs && Double.isInfinite(x)) { 1221 continue; 1222 } 1223 dresult *= x; 1224 if (Double.isNaN(dresult)) { 1225 break; 1226 } 1227 } 1228 result.set(dresult, spos); 1229 break; 1230 case Dataset.ARRAYFLOAT32: case Dataset.ARRAYFLOAT64: 1231 CompoundDataset da = (CompoundDataset) a; 1232 double[] dvalues = new double[is]; 1233 dresults = new double[is]; 1234 for (int k = 0; k < is; k++) { 1235 dresults[k] = 1.; 1236 } 1237 while (iter.hasNext()) { 1238 da.getDoubleArrayAbs(iter.index, dvalues); 1239 boolean okay = true; 1240 for (int k = 0; k < is; k++) { 1241 final double val = dvalues[k]; 1242 if (ignoreNaNs && Double.isNaN(val)) { 1243 okay = false; 1244 break; 1245 } 1246 if (ignoreInfs && Double.isInfinite(val)) { 1247 okay = false; 1248 break; 1249 } 1250 } 1251 if (okay) { 1252 for (int k = 0; k < is; k++) { 1253 dresults[k] *= dvalues[k]; 1254 } 1255 } 1256 } 1257 result.set(dresults, spos); 1258 break; 1259 } 1260 } 1261 } 1262 1263// result.setShape(ShapeUtils.squeezeShape(oshape, axes)); 1264 return result; 1265 } 1266 1267 /** 1268 * @param a dataset 1269 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1270 * @return cumulative product of items in flattened dataset 1271 * @since 2.0 1272 */ 1273 public static Dataset cumulativeProduct(final Dataset a, final boolean... ignoreInvalids) { 1274 return cumulativeProduct(a.flatten(), 0, ignoreInvalids); 1275 } 1276 1277 /** 1278 * @param a dataset 1279 * @param axis 1280 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 1281 * @return cumulative product of items along axis in dataset 1282 * @since 2.0 1283 */ 1284 public static Dataset cumulativeProduct(final Dataset a, int axis, final boolean... ignoreInvalids) { 1285 axis = a.checkAxis(axis); 1286 int dtype = a.getDType(); 1287 int[] oshape = a.getShape(); 1288 int alen = oshape[axis]; 1289 oshape[axis] = 1; 1290 1291 final boolean ignoreNaNs; 1292 final boolean ignoreInfs; 1293 if (a.hasFloatingPointElements()) { 1294 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 1295 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 1296 } else { 1297 ignoreNaNs = false; 1298 ignoreInfs = false; 1299 } 1300 Dataset result = DatasetFactory.zeros(a); 1301 PositionIterator pi = result.getPositionIterator(axis); 1302 1303 int[] pos = pi.getPos(); 1304 1305 while (pi.hasNext()) { 1306 1307 if (a.isComplex()) { 1308 double rv = 1, iv = 0; 1309 switch (dtype) { 1310 case Dataset.COMPLEX64: 1311 ComplexFloatDataset af = (ComplexFloatDataset) a; 1312 ComplexFloatDataset rf = (ComplexFloatDataset) result; 1313 for (int j = 0; j < alen; j++) { 1314 if (!Double.isNaN(rv) || !Double.isNaN(iv)) { 1315 pos[axis] = j; 1316 final float r1 = af.getReal(pos); 1317 final float i1 = af.getImag(pos); 1318 if (ignoreNaNs && (Float.isNaN(r1) || Float.isNaN(i1))) { 1319 continue; 1320 } 1321 if (ignoreInfs && (Float.isInfinite(r1) || Float.isInfinite(i1))) { 1322 continue; 1323 } 1324 final double tv = r1*rv - i1*iv; 1325 iv = r1*iv + i1*rv; 1326 rv = tv; 1327 } 1328 rf.set((float) rv, (float) iv, pos); 1329 } 1330 break; 1331 case Dataset.COMPLEX128: 1332 ComplexDoubleDataset ad = (ComplexDoubleDataset) a; 1333 ComplexDoubleDataset rd = (ComplexDoubleDataset) result; 1334 for (int j = 0; j < alen; j++) { 1335 if (!Double.isNaN(rv) || !Double.isNaN(iv)) { 1336 pos[axis] = j; 1337 final double r1 = ad.getReal(pos); 1338 final double i1 = ad.getImag(pos); 1339 if (ignoreNaNs && (Double.isNaN(r1) || Double.isNaN(i1))) { 1340 continue; 1341 } 1342 if (ignoreInfs && (Double.isInfinite(r1) || Double.isInfinite(i1))) { 1343 continue; 1344 } 1345 final double tv = r1*rv - i1*iv; 1346 iv = r1*iv + i1*rv; 1347 rv = tv; 1348 } 1349 rd.set(rv, iv, pos); 1350 } 1351 break; 1352 } 1353 } else { 1354 final int is; 1355 final long[] lresults; 1356 final double[] dresults; 1357 1358 switch (dtype) { 1359 case Dataset.BOOL: 1360 case Dataset.INT8: case Dataset.INT16: 1361 case Dataset.INT32: case Dataset.INT64: 1362 long lresult = 1; 1363 for (int j = 0; j < alen; j++) { 1364 pos[axis] = j; 1365 lresult *= a.getInt(pos); 1366 result.set(lresult, pos); 1367 } 1368 break; 1369 case Dataset.ARRAYINT8: 1370 is = a.getElementsPerItem(); 1371 lresults = new long[is]; 1372 for (int k = 0; k < is; k++) { 1373 lresults[k] = 1; 1374 } 1375 for (int j = 0; j < alen; j++) { 1376 pos[axis] = j; 1377 final byte[] va = (byte[]) a.getObject(pos); 1378 for (int k = 0; k < is; k++) { 1379 lresults[k] *= va[k]; 1380 } 1381 result.set(lresults, pos); 1382 } 1383 break; 1384 case Dataset.ARRAYINT16: 1385 is = a.getElementsPerItem(); 1386 lresults = new long[is]; 1387 for (int k = 0; k < is; k++) { 1388 lresults[k] = 1; 1389 } 1390 for (int j = 0; j < alen; j++) { 1391 pos[axis] = j; 1392 final short[] va = (short[]) a.getObject(pos); 1393 for (int k = 0; k < is; k++) { 1394 lresults[k] *= va[k]; 1395 } 1396 result.set(lresults, pos); 1397 } 1398 break; 1399 case Dataset.ARRAYINT32: 1400 is = a.getElementsPerItem(); 1401 lresults = new long[is]; 1402 for (int k = 0; k < is; k++) { 1403 lresults[k] = 1; 1404 } 1405 for (int j = 0; j < alen; j++) { 1406 pos[axis] = j; 1407 final int[] va = (int[]) a.getObject(pos); 1408 for (int k = 0; k < is; k++) { 1409 lresults[k] *= va[k]; 1410 } 1411 result.set(lresults, pos); 1412 } 1413 break; 1414 case Dataset.ARRAYINT64: 1415 is = a.getElementsPerItem(); 1416 lresults = new long[is]; 1417 for (int k = 0; k < is; k++) { 1418 lresults[k] = 1; 1419 } 1420 for (int j = 0; j < alen; j++) { 1421 pos[axis] = j; 1422 final long[] va = (long[]) a.getObject(pos); 1423 for (int k = 0; k < is; k++) { 1424 lresults[k] *= va[k]; 1425 } 1426 result.set(lresults, pos); 1427 } 1428 break; 1429 case Dataset.FLOAT32: case Dataset.FLOAT64: 1430 double dresult = 1.; 1431 for (int j = 0; j < alen; j++) { 1432 if (!Double.isNaN(dresult)) { 1433 pos[axis] = j; 1434 final double x = a.getDouble(pos); 1435 if (ignoreNaNs && Double.isNaN(x)) { 1436 continue; 1437 } 1438 if (ignoreInfs && Double.isInfinite(x)) { 1439 continue; 1440 } 1441 dresult *= x; 1442 } 1443 result.set(dresult, pos); 1444 } 1445 break; 1446 case Dataset.ARRAYFLOAT32: case Dataset.ARRAYFLOAT64: 1447 is = a.getElementsPerItem(); 1448 CompoundDataset da = (CompoundDataset) a; 1449 double[] dvalues = new double[is]; 1450 dresults = new double[is]; 1451 for (int k = 0; k < is; k++) { 1452 dresults[k] = 1.; 1453 } 1454 for (int j = 0; j < alen; j++) { 1455 pos[axis] = j; 1456 da.getDoubleArray(dvalues, pos); 1457 boolean okay = true; 1458 for (int k = 0; k < is; k++) { 1459 final double val = dvalues[k]; 1460 if (ignoreNaNs && Double.isNaN(val)) { 1461 okay = false; 1462 break; 1463 } 1464 if (ignoreInfs && Double.isInfinite(val)) { 1465 okay = false; 1466 break; 1467 } 1468 } 1469 if (okay) { 1470 for (int k = 0; k < is; k++) { 1471 dresults[k] *= dvalues[k]; 1472 } 1473 } 1474 result.set(dresults, pos); 1475 } 1476 break; 1477 } 1478 } 1479 } 1480 1481 return result; 1482 } 1483 1484 /** 1485 * @param a dataset 1486 * @param ignoreInvalids see {@link IDataset#max(boolean...)} 1487 * @return cumulative sum of items in flattened dataset 1488 * @since 2.0 1489 */ 1490 public static Dataset cumulativeSum(final Dataset a, final boolean... ignoreInvalids) { 1491 return cumulativeSum(a.flatten(), 0, ignoreInvalids); 1492 } 1493 1494 /** 1495 * @param a dataset 1496 * @param axis 1497 * @param ignoreInvalids see {@link Dataset#max(int, boolean...)} 1498 * @return cumulative sum of items along axis in dataset 1499 * @since 2.0 1500 */ 1501 public static Dataset cumulativeSum(final Dataset a, int axis, final boolean... ignoreInvalids) { 1502 axis = a.checkAxis(axis); 1503 int dtype = a.getDType(); 1504 int[] oshape = a.getShape(); 1505 int alen = oshape[axis]; 1506 oshape[axis] = 1; 1507 1508 final boolean ignoreNaNs; 1509 final boolean ignoreInfs; 1510 if (a.hasFloatingPointElements()) { 1511 ignoreNaNs = ignoreInvalids != null && ignoreInvalids.length > 0 ? ignoreInvalids[0] : false; 1512 ignoreInfs = ignoreInvalids != null && ignoreInvalids.length > 1 ? ignoreInvalids[1] : ignoreNaNs; 1513 } else { 1514 ignoreNaNs = false; 1515 ignoreInfs = false; 1516 } 1517 Dataset result = DatasetFactory.zeros(a); 1518 PositionIterator pi = result.getPositionIterator(axis); 1519 1520 int[] pos = pi.getPos(); 1521 1522 while (pi.hasNext()) { 1523 1524 if (a.isComplex()) { 1525 double rv = 0, iv = 0; 1526 switch (dtype) { 1527 case Dataset.COMPLEX64: 1528 ComplexFloatDataset af = (ComplexFloatDataset) a; 1529 ComplexFloatDataset rf = (ComplexFloatDataset) result; 1530 for (int j = 0; j < alen; j++) { 1531 if (!Double.isNaN(rv) || !Double.isNaN(iv)) { 1532 pos[axis] = j; 1533 final float r1 = af.getReal(pos); 1534 final float i1 = af.getImag(pos); 1535 if (ignoreNaNs && (Float.isNaN(r1) || Float.isNaN(i1))) { 1536 continue; 1537 } 1538 if (ignoreInfs && (Float.isInfinite(r1) || Float.isInfinite(i1))) { 1539 continue; 1540 } 1541 rv += r1; 1542 iv += i1; 1543 } 1544 rf.set((float) rv, (float) iv, pos); 1545 } 1546 break; 1547 case Dataset.COMPLEX128: 1548 ComplexDoubleDataset ad = (ComplexDoubleDataset) a; 1549 ComplexDoubleDataset rd = (ComplexDoubleDataset) result; 1550 for (int j = 0; j < alen; j++) { 1551 if (!Double.isNaN(rv) || !Double.isNaN(iv)) { 1552 pos[axis] = j; 1553 final double r1 = ad.getReal(pos); 1554 final double i1 = ad.getImag(pos); 1555 if (ignoreNaNs && (Double.isNaN(r1) || Double.isNaN(i1))) { 1556 continue; 1557 } 1558 if (ignoreInfs && (Double.isInfinite(r1) || Double.isInfinite(i1))) { 1559 continue; 1560 } 1561 rv += r1; 1562 iv += i1; 1563 } 1564 rd.set(rv, iv, pos); 1565 } 1566 break; 1567 } 1568 } else { 1569 final int is; 1570 final long[] lresults; 1571 final double[] dresults; 1572 1573 switch (dtype) { 1574 case Dataset.BOOL: 1575 case Dataset.INT8: case Dataset.INT16: 1576 case Dataset.INT32: case Dataset.INT64: 1577 long lresult = 0; 1578 for (int j = 0; j < alen; j++) { 1579 pos[axis] = j; 1580 lresult += a.getInt(pos); 1581 result.set(lresult, pos); 1582 } 1583 break; 1584 case Dataset.ARRAYINT8: 1585 is = a.getElementsPerItem(); 1586 lresults = new long[is]; 1587 for (int j = 0; j < alen; j++) { 1588 pos[axis] = j; 1589 final byte[] va = (byte[]) a.getObject(pos); 1590 for (int k = 0; k < is; k++) { 1591 lresults[k] += va[k]; 1592 } 1593 result.set(lresults, pos); 1594 } 1595 break; 1596 case Dataset.ARRAYINT16: 1597 is = a.getElementsPerItem(); 1598 lresults = new long[is]; 1599 for (int j = 0; j < alen; j++) { 1600 pos[axis] = j; 1601 final short[] va = (short[]) a.getObject(pos); 1602 for (int k = 0; k < is; k++) { 1603 lresults[k] += va[k]; 1604 } 1605 result.set(lresults, pos); 1606 } 1607 break; 1608 case Dataset.ARRAYINT32: 1609 is = a.getElementsPerItem(); 1610 lresults = new long[is]; 1611 for (int j = 0; j < alen; j++) { 1612 pos[axis] = j; 1613 final int[] va = (int[]) a.getObject(pos); 1614 for (int k = 0; k < is; k++) { 1615 lresults[k] += va[k]; 1616 } 1617 result.set(lresults, pos); 1618 } 1619 break; 1620 case Dataset.ARRAYINT64: 1621 is = a.getElementsPerItem(); 1622 lresults = new long[is]; 1623 for (int j = 0; j < alen; j++) { 1624 pos[axis] = j; 1625 final long[] va = (long[]) a.getObject(pos); 1626 for (int k = 0; k < is; k++) { 1627 lresults[k] += va[k]; 1628 } 1629 result.set(lresults, pos); 1630 } 1631 break; 1632 case Dataset.FLOAT32: case Dataset.FLOAT64: 1633 double dresult = 0.; 1634 for (int j = 0; j < alen; j++) { 1635 if (!Double.isNaN(dresult)) { 1636 pos[axis] = j; 1637 final double x = a.getDouble(pos); 1638 if (ignoreNaNs && Double.isNaN(x)) { 1639 continue; 1640 } 1641 if (ignoreInfs && Double.isInfinite(x)) { 1642 continue; 1643 } 1644 dresult += x; 1645 } 1646 result.set(dresult, pos); 1647 } 1648 break; 1649 case Dataset.ARRAYFLOAT32: case Dataset.ARRAYFLOAT64: 1650 is = a.getElementsPerItem(); 1651 CompoundDataset da = (CompoundDataset) a; 1652 double[] dvalues = new double[is]; 1653 dresults = new double[is]; 1654 for (int j = 0; j < alen; j++) { 1655 pos[axis] = j; 1656 da.getDoubleArray(dvalues, pos); 1657 boolean okay = true; 1658 for (int k = 0; k < is; k++) { 1659 final double val = dvalues[k]; 1660 if (ignoreNaNs && Double.isNaN(val)) { 1661 okay = false; 1662 break; 1663 } 1664 if (ignoreInfs && Double.isInfinite(val)) { 1665 okay = false; 1666 break; 1667 } 1668 } 1669 if (okay) { 1670 for (int k = 0; k < is; k++) { 1671 dresults[k] += dvalues[k]; 1672 } 1673 } 1674 result.set(dresults, pos); 1675 } 1676 break; 1677 } 1678 } 1679 } 1680 1681 return result; 1682 } 1683 1684 /** 1685 * @param a 1686 * @return average deviation value of all items the dataset 1687 */ 1688 public static Object averageDeviation(final Dataset a) { 1689 final IndexIterator it = a.getIterator(); 1690 final int is = a.getElementsPerItem(); 1691 1692 if (is == 1) { 1693 double mean = (Double) a.mean(); 1694 double sum = 0.0; 1695 1696 while (it.hasNext()) { 1697 sum += Math.abs(a.getElementDoubleAbs(it.index) - mean); 1698 } 1699 1700 return sum / a.getSize(); 1701 } 1702 1703 double[] means = (double[]) a.mean(); 1704 double[] sums = new double[is]; 1705 1706 while (it.hasNext()) { 1707 for (int j = 0; j < is; j++) 1708 sums[j] += Math.abs(a.getElementDoubleAbs(it.index + j) - means[j]); 1709 } 1710 1711 double n = a.getSize(); 1712 for (int j = 0; j < is; j++) 1713 sums[j] /= n; 1714 1715 return sums; 1716 } 1717 1718 /** 1719 * The residual is the sum of squared differences 1720 * @param a 1721 * @param b 1722 * @return residual value 1723 */ 1724 public static double residual(final Dataset a, final Dataset b) { 1725 return a.residual(b); 1726 } 1727 1728 /** 1729 * The residual is the sum of squared differences 1730 * @param a 1731 * @param b 1732 * @param w 1733 * @return residual value 1734 */ 1735 public static double weightedResidual(final Dataset a, final Dataset b, final Dataset w) { 1736 return a.residual(b, w, false); 1737 } 1738 1739 /** 1740 * Calculate approximate outlier values. These are defined as the values in the dataset 1741 * that are approximately below and above the given thresholds - in terms of percentages 1742 * of dataset size. 1743 * <p> 1744 * It approximates by limiting the number of items (given by length) used internally by 1745 * data structures - the larger this is, the more accurate will those outlier values become. 1746 * The actual thresholds used are returned in the array. 1747 * <p> 1748 * Also, the low and high values will be made distinct if possible by adjusting the thresholds 1749 * @param a 1750 * @param lo percentage threshold for lower limit 1751 * @param hi percentage threshold for higher limit 1752 * @param length maximum number of items used internally, if negative, then unlimited 1753 * @return double array with low and high values, and low and high percentage thresholds 1754 */ 1755 public static double[] outlierValues(final Dataset a, double lo, double hi, final int length) { 1756 return outlierValues(a, null, true, lo, hi, length); 1757 } 1758 1759 /** 1760 * Calculate approximate outlier values. These are defined as the values in the dataset 1761 * that are approximately below and above the given thresholds - in terms of percentages 1762 * of dataset size. 1763 * <p> 1764 * It approximates by limiting the number of items (given by length) used internally by 1765 * data structures - the larger this is, the more accurate will those outlier values become. 1766 * The actual thresholds used are returned in the array. 1767 * <p> 1768 * Also, the low and high values will be made distinct if possible by adjusting the thresholds 1769 * @param a 1770 * @param mask can be null 1771 * @param value value of mask to match to include for calculation 1772 * @param lo percentage threshold for lower limit 1773 * @param hi percentage threshold for higher limit 1774 * @param length maximum number of items used internally, if negative, then unlimited 1775 * @return double array with low and high values, and low and high percentage thresholds 1776 * @since 2.1 1777 */ 1778 public static double[] outlierValues(final Dataset a, final Dataset mask, final boolean value, double lo, double hi, final int length) { 1779 if (lo <= 0 || hi <= 0 || lo >= hi || hi >= 100 || Double.isNaN(lo)|| Double.isNaN(hi)) { 1780 throw new IllegalArgumentException("Thresholds must be between (0,100) and in order"); 1781 } 1782 final int size = a.getSize(); 1783 int nl = Math.max((int) ((lo*size)/100), 1); 1784 if (length > 0 && nl > length) 1785 nl = length; 1786 int nh = Math.max((int) (((100-hi)*size)/100), 1); 1787 if (length > 0 && nh > length) 1788 nh = length; 1789 1790 IndexIterator it = mask == null ? a.getIterator() : a.getBooleanIterator(mask, value); 1791 double[] results = Math.max(nl, nh) > 640 ? outlierValuesMap(a, it, nl, nh) : outlierValuesList(a, it, nl, nh); 1792 1793 results[2] = results[2]*100./size; 1794 results[3] = 100. - results[3]*100./size; 1795 return results; 1796 } 1797 1798 static double[] outlierValuesMap(final Dataset a, final IndexIterator it, int nl, int nh) { 1799 final TreeMap<Double, Integer> lMap = new TreeMap<Double, Integer>(); 1800 final TreeMap<Double, Integer> hMap = new TreeMap<Double, Integer>(); 1801 1802 int ml = 0; 1803 int mh = 0; 1804 while (it.hasNext()) { 1805 Double x = a.getElementDoubleAbs(it.index); 1806 if (Double.isNaN(x)) { 1807 continue; 1808 } 1809 Integer i; 1810 if (ml == nl) { 1811 Double k = lMap.lastKey(); 1812 if (x < k) { 1813 i = lMap.get(k) - 1; 1814 if (i == 0) { 1815 lMap.remove(k); 1816 } else { 1817 lMap.put(k, i); 1818 } 1819 i = lMap.get(x); 1820 if (i == null) { 1821 lMap.put(x, 1); 1822 } else { 1823 lMap.put(x, i + 1); 1824 } 1825 } 1826 } else { 1827 i = lMap.get(x); 1828 if (i == null) { 1829 lMap.put(x, 1); 1830 } else { 1831 lMap.put(x, i + 1); 1832 } 1833 ml++; 1834 } 1835 1836 if (mh == nh) { 1837 Double k = hMap.firstKey(); 1838 if (x > k) { 1839 i = hMap.get(k) - 1; 1840 if (i == 0) { 1841 hMap.remove(k); 1842 } else { 1843 hMap.put(k, i); 1844 } 1845 i = hMap.get(x); 1846 if (i == null) { 1847 hMap.put(x, 1); 1848 } else { 1849 hMap.put(x, i+1); 1850 } 1851 } 1852 } else { 1853 i = hMap.get(x); 1854 if (i == null) { 1855 hMap.put(x, 1); 1856 } else { 1857 hMap.put(x, i+1); 1858 } 1859 mh++; 1860 } 1861 } 1862 1863 // Attempt to make values distinct 1864 double lx = lMap.lastKey(); 1865 double hx = hMap.firstKey(); 1866 if (lx >= hx) { 1867 Double h = hMap.higherKey(lx); 1868 if (h != null) { 1869 hx = h; 1870 mh--; 1871 } else { 1872 Double l = lMap.lowerKey(hx); 1873 if (l != null) { 1874 lx = l; 1875 ml--; 1876 } 1877 } 1878 1879 } 1880 return new double[] {lMap.lastKey(), hMap.firstKey(), ml, mh}; 1881 } 1882 1883 static double[] outlierValuesList(final Dataset a, final IndexIterator it, int nl, int nh) { 1884 final List<Double> lList = new ArrayList<Double>(nl); 1885 final List<Double> hList = new ArrayList<Double>(nh); 1886 1887 double lx = Double.POSITIVE_INFINITY; 1888 double hx = Double.NEGATIVE_INFINITY; 1889 1890 while (it.hasNext()) { 1891 double x = a.getElementDoubleAbs(it.index); 1892 if (Double.isNaN(x)) { 1893 continue; 1894 } 1895 if (x < lx) { 1896 if (lList.size() == nl) { 1897 lList.remove(lx); 1898 } 1899 lList.add(x); 1900 lx = Collections.max(lList); 1901 } else if (x == lx) { 1902 if (lList.size() < nl) { 1903 lList.add(x); 1904 } 1905 } 1906 1907 if (x > hx) { 1908 if (hList.size() == nh) { 1909 hList.remove(hx); 1910 } 1911 hList.add(x); 1912 hx = Collections.min(hList); 1913 } else if (x == hx) { 1914 if (hList.size() < nh) { 1915 hList.add(x); 1916 } 1917 } 1918 } 1919 1920 nl = lList.size(); 1921 nh = hList.size(); 1922 1923 // Attempt to make values distinct 1924 if (lx >= hx) { 1925 Collections.sort(hList); 1926 for (double h : hList) { 1927 if (h > hx) { 1928 hx = h; 1929 break; 1930 } 1931 nh--; 1932 } 1933 if (lx >= hx) { 1934 Collections.sort(lList); 1935 Collections.reverse(lList); 1936 for (double l : lList) { 1937 if (l < lx) { 1938 lx = l; 1939 break; 1940 } 1941 nl--; 1942 } 1943 } 1944 } 1945 return new double[] {lx, hx, nl, nh}; 1946 } 1947 1948 /** 1949 * See {@link #covariance(Dataset a, Dataset b, boolean rowvar, boolean bias, Integer ddof)} with b = null, rowvar = true, bias = false and ddof = null. 1950 * @param a 1951 * @return covariance array of a 1952 */ 1953 public static Dataset covariance(final Dataset a) { 1954 return covariance(a, true, false, null); 1955 } 1956 1957 /** 1958 * See {@link #covariance(Dataset a, Dataset b, boolean rowvar, boolean bias, Integer ddof)} with b = null. 1959 * @param a 1960 * @return covariance array of a 1961 * @since 2.0 1962 */ 1963 public static Dataset covariance(final Dataset a, 1964 boolean rowvar, boolean bias, Integer ddof) { 1965 return covariance(a, null, rowvar, bias, ddof); 1966 } 1967 1968 /** 1969 * See {@link #covariance(Dataset a, Dataset b, boolean rowvar, boolean bias, Integer ddof)} with b = null, rowvar = true, bias = false and ddof = null. 1970 * @param a 1971 * @return covariance array of a concatenated with b 1972 */ 1973 public static Dataset covariance(final Dataset a, final Dataset b) { 1974 return covariance(a, b, true, false, null); 1975 } 1976 1977 /** 1978 * Calculate the covariance matrix (array) of a concatenated with b. This 1979 * method is directly based on the implementation in numpy (cov). 1980 * @param a Array containing multiple variable and observations. Each row represents a variable, each column an observation. 1981 * @param b An extra set of variables and observations. Must be of same type as a and have a compatible shape. 1982 * @param rowvar When true (default), each row is a variable; when false each column is a variable. 1983 * @param bias Default normalisation is (N - 1) - N is number of observations. If set true, normalisation is (N). 1984 * @param ddof Default normalisation is (N - 1). If ddof is set, then normalisation is (N - ddof). 1985 * @return covariance array of a concatenated with b 1986 * @since 2.0 1987 */ 1988 public static Dataset covariance (final Dataset a, final Dataset b, 1989 boolean rowvar, boolean bias, Integer ddof) { 1990 1991 //Create a working copy of the dataset & check its rank. 1992 Dataset vars = a.clone(); 1993 if (a.getRank() == 1) { 1994 vars.setShape(1, a.getShapeRef()[0]); 1995 } 1996 1997 //1D of variables, so consider rows as variables 1998 if (vars.getShapeRef()[0] == 1) { 1999 rowvar = true; 2000 } 2001 2002 //nr is the number of records; axis is index 2003 int nr, axis; 2004 if (rowvar) { 2005 nr = vars.getShapeRef()[1]; 2006 axis = 0; 2007 } else { 2008 nr = vars.getShapeRef()[0]; 2009 axis = 1; 2010 } 2011 2012 //Set the reduced degrees of freedom & normalisation factor 2013 if (ddof == null) { 2014 if (bias == false) { 2015 ddof = 1; 2016 } else { 2017 ddof = 0; 2018 } 2019 } 2020 double norm_fact = nr - ddof; 2021 if (norm_fact <= 0.) { 2022 //TODO Some sort of warning here? 2023 norm_fact = 0.; 2024 } 2025 2026 //Concatenate additional set of variables with main set 2027 if (b != null) { 2028 //Create a working copy of the dataset & check its rank. 2029 Dataset extraVars = b.clone(); 2030 if (b.getRank() == 1) { 2031 extraVars.setShape(1, a.getShapeRef()[0]); 2032 } 2033 vars = DatasetUtils.concatenate(new Dataset[]{vars, extraVars}, axis); 2034 } 2035 2036 //Calculate deviations & covariance matrix 2037 Dataset varsMean = vars.mean(1-axis, false); 2038 //-vars & varsMean need same shape (this is a hack!) 2039 int[] meanShape = vars.getShape(); 2040 meanShape[1-axis] = 1; 2041 varsMean.setShape(meanShape); 2042 vars.isubtract(varsMean); 2043 Dataset cov; 2044 if (rowvar) { 2045 cov = Maths.divide(LinearAlgebra.dotProduct(vars, Maths.conjugate(vars.transpose())), norm_fact).squeeze(); 2046 } else { 2047 cov = Maths.divide(LinearAlgebra.dotProduct(vars.transpose(), Maths.conjugate(vars)), norm_fact).squeeze(); 2048 } 2049 return cov; 2050 } 2051}