APISonar


net.jafama.FastMath.log

> net > jafama > FastMath > log
net APIs jafama APIs FastMath APIs log APIs

Example 1
public double nextRandom(Random random) {
    double u = random.nextDouble();
    return location + scale * FastMath.log(u / (1. - u));
  }
Example 2
private double getHalfLogDeterminant(CholeskyDecomposition chol) {
    double[][] l = chol.getL();
    double logdet = FastMath.log(l[0][0]);
    for(int i = 1; i < l.length; i++) {
      // We get half the log(det), because we did not square values here.
      logdet += FastMath.log(l[i][i]);
    }
    return logdet;
  }
Example 3
public AbstractCoverTree(Relation<O> relation, Distance<? super O> distance, double expansion, int truncate) {
    super(relation);
    this.distance = distance;
    this.distanceQuery = distance.instantiate(relation);
    this.truncate = truncate;
    this.expansion = expansion;
    this.invLogExpansion = 1. / FastMath.log(expansion);
    this.scaleBottom = (int) Math.ceil(FastMath.log(Double.MIN_NORMAL) * invLogExpansion);
  }
Example 4
public double distance(NumberVector v1, NumberVector v2) {
    final int dim = dimensionality(v1, v2);
    double agg = 0.;
    for(int d = 0; d < dim; d++) {
      final double xd = v1.doubleValue(d), yd = v2.doubleValue(d);
      if(yd <= 0.) {
        return Double.POSITIVE_INFINITY;
      }
      if(xd > 0.) {
        agg += xd * FastMath.log(xd / yd);
      }
    }
    return agg;
  }
Example 5
public MkCoPTree(Relation<O> relation, PageFile<MkCoPTreeNode<O>> pagefile, MkTreeSettings<O, MkCoPTreeNode<O>, MkCoPEntry> settings) {
    super(relation, pagefile, settings);
    // init log k
    log_k = new double[settings.kmax];
    for(int k = 1; k <= settings.kmax; k++) {
      log_k[k - 1] = FastMath.log(k);
    }
  }
Example 6
protected void prepareComplete() {
    final double dbsize = objcnt;
    // Compute IDF values
    for(ObjectIterator<Int2DoubleMap.Entry> iter = idf.int2DoubleEntrySet().fastIterator(); iter.hasNext();) {
      Int2DoubleMap.Entry entry = iter.next();
      entry.setValue(FastMath.log(dbsize / entry.getDoubleValue()));
    }
  }
Example 7
public <A> void prepare(A array, NumberArrayAdapter<?, A> adapter) {
    DoubleMinMax mm = new DoubleMinMax();
    final int size = adapter.size(array);
    for(int i = 0; i < size; i++) {
      double val = adapter.getDouble(array, i);
      if(!Double.isNaN(val) && !Double.isInfinite(val)) {
        mm.put(val);
      }
    }
    max = mm.getMax();
    mlogmax = -FastMath.log(mm.getMin() / max);
  }
Example 8
public void prepare(OutlierResult or) {
    DoubleMinMax mm = new DoubleMinMax();
    DoubleRelation scores = or.getScores();
    for(DBIDIter id = scores.iterDBIDs(); id.valid(); id.advance()) {
      double val = scores.doubleValue(id);
      if(!Double.isNaN(val) && !Double.isInfinite(val)) {
        mm.put(val);
      }
    }
    max = mm.getMax();
    mlogmax = -FastMath.log(mm.getMin() / max);
  }
Example 9
public <V extends NumberVector> double quality(Clustering<? extends MeanModel> clustering, NumberVectorDistance<? super V> distance, Relation<V> relation) {
    return logLikelihoodXMeans(relation, clustering, distance) //
        - (.5 * numberOfFreeParameters(relation, clustering)) * FastMath.log(numPoints(clustering));
  }
Example 10
public double minDist(SpatialComparable mbr1, SpatialComparable mbr2) {
    final int dim = dimensionality(mbr1, mbr2);
    double agg = 0;
    for(int d = 0; d < dim; d++) {
      final double min1 = mbr1.getMin(d), min2 = mbr2.getMin(d);
      final double md = .5 * (mbr1.getMax(d) + mbr2.getMax(d));
      if(!(md > 0.)) {
        continue;
      }
      agg += (min1 > 0 ? min1 * FastMath.log(min1 / md) : 0) //
          + (min2 > 0 ? min2 * FastMath.log(min2 / md) : 0);
    }
    return agg > 0 ? agg : 0;
  }
Example 11
private double loglikelihoodNormal(DBIDs objids, SetDBIDs anomalous, CovarianceMatrix builder, Relation<V> relation) {
    double[] mean = builder.getMeanVector();
    final LUDecomposition lu = new LUDecomposition(builder.makeSampleMatrix());
    double[][] covInv = lu.inverse();
    // for each object compute probability and sum
    double prob = (objids.size() - anomalous.size()) * -FastMath.log(FastMath.sqrt(MathUtil.powi(MathUtil.TWOPI, RelationUtil.dimensionality(relation)) * lu.det()));
    for(DBIDIter iter = objids.iter(); iter.valid(); iter.advance()) {
      if(!anomalous.contains(iter)) {
        double[] xcent = minusEquals(relation.get(iter).toArray(), mean);
        prob -= .5 * transposeTimesTimes(xcent, covInv, xcent);
      }
    }
    return prob;
  }