Skip to content
This repository has been archived by the owner on Mar 27, 2021. It is now read-only.

Commit

Permalink
Add distribution query integration test. (Part 2 of 2) (#737)
Browse files Browse the repository at this point in the history
* Add distribution query integration test. (Part 2 of 2)

* Addresses PR comments

* Addresses PR comments
  • Loading branch information
ao2017 authored Jan 11, 2021
1 parent 037a871 commit 20e3327
Show file tree
Hide file tree
Showing 11 changed files with 441 additions and 75 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,25 @@
package com.spotify.heroic.aggregation;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.spotify.heroic.common.Series;
import com.spotify.heroic.metric.MetricCollection;
import com.spotify.heroic.metric.MetricType;
import com.spotify.heroic.metric.Point;
import com.spotify.heroic.metric.ShardedResultGroup;

import com.spotify.heroic.metric.TdigestPoint;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

public interface AggregationCombiner {
ImmutableList<MetricType> TDIGEST_TYPE = ImmutableList.of(MetricType.TDIGEST_POINT,
MetricType.DISTRIBUTION_POINTS);

List<ShardedResultGroup> combine(List<List<ShardedResultGroup>> all);

AggregationCombiner DEFAULT = new AggregationCombiner() {
Expand All @@ -48,4 +62,42 @@ public String toString() {
return "DEFAULT";
}
};

default void compute(final ImmutableList.Builder<ShardedResultGroup> groups,
final AggregationOutput out,
final long cadence) {
final List<TdigestPoint> metrics = out.getMetrics().getDataAs(TdigestPoint.class);
final Map<ComputeDistributionStat.Percentile, List<Point>> resMap = new HashMap<>();
for (TdigestPoint tdigestPoint : metrics) {
ComputeDistributionStat
.Percentile
.DEFAULT
.forEach(p -> compute(tdigestPoint, resMap, p));
}
for (Map.Entry<ComputeDistributionStat.Percentile,
List<Point>> entry : resMap.entrySet()) {
Set<Series> newSet = new HashSet<>();
out.getSeries().forEach(s -> updateMetadata(s, entry.getKey(), newSet));
groups.add(new ShardedResultGroup(ImmutableMap.of(), out.getKey(), newSet,
MetricCollection.points(entry.getValue()), cadence));
}
}

private void updateMetadata(final Series s,
final ComputeDistributionStat.Percentile percentile,
final Set<Series> newSet) {
Map<String, String> tags = new HashMap<>(s.getTags());
tags.put("tdigeststat", percentile.getName());
Series newSeries = Series.of(s.getKey(), tags, s.getResource());
newSet.add(newSeries);
}

private void compute(final TdigestPoint tdigestPoint,
final Map<ComputeDistributionStat.Percentile, List<Point>> resMap,
final ComputeDistributionStat.Percentile percentile) {
Point point = ComputeDistributionStat.computePercentile(tdigestPoint, percentile);
List<Point> points = resMap.getOrDefault(percentile, new ArrayList<>());
points.add(point);
resMap.put(percentile, points);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Copyright (c) 2020 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.spotify.heroic.aggregation;

import com.google.common.collect.ImmutableList;
import com.spotify.heroic.metric.Point;
import com.spotify.heroic.metric.TdigestPoint;
import com.tdunning.math.stats.TDigest;

public class ComputeDistributionStat {

public static Point computePercentile(final TdigestPoint point,
final Percentile percentile) {
final long timestamp = point.getTimestamp();
final TDigest datasketch = point.value();
return (datasketch == null || datasketch.size() == 0L) ? new Point(timestamp, Double.NaN) :
new Point(timestamp, datasketch.quantile(percentile.quantile));
}

public static Point computePercentile(final TDigest datasketch,
final long timestamp,
final Percentile percentile) {
return (datasketch == null || datasketch.size() == 0L) ? new Point(timestamp, Double.NaN) :
new Point(timestamp, datasketch.quantile(percentile.quantile));
}

public static class Percentile {
public double getQuantile() {
return quantile;
}

public String getName() {
return name;
}

private final double quantile;
private final String name;

public Percentile(final String name, final double quantile) {
this.name = name;
this.quantile = quantile;
}

static final ImmutableList<Percentile> DEFAULT = ImmutableList.of(
new Percentile("P99", 0.99),
new Percentile("P50", 0.50),
new Percentile("P75", 0.75));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,12 @@ public List<ShardedResultGroup> combine(
final AggregationResult result = session.result();

for (final AggregationOutput out : result.getResult()) {
groups.add(new ShardedResultGroup(ImmutableMap.of(), out.getKey(), out.getSeries(),
out.getMetrics(), cadence));
if (TDIGEST_TYPE.contains(out.getMetrics().getType())) {
compute(groups, out, cadence);
} else {
groups.add(new ShardedResultGroup(ImmutableMap.of(), out.getKey(), out.getSeries(),
out.getMetrics(), cadence));
}
}

return groups.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
@JsonSubTypes.Type(MetricCollection.GroupCollection.class),
@JsonSubTypes.Type(MetricCollection.CardinalityCollection.class),
@JsonSubTypes.Type(MetricCollection.DistributionPointCollection.class),
@JsonSubTypes.Type(MetricCollection.TDigestPointCollection.class)
})
public interface MetricCollection {
/**
Expand Down
2 changes: 2 additions & 0 deletions heroic-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ dependencies {
implementation 'com.typesafe:config:1.3.2'
implementation 'net.jodah:expiringmap:0.5.1'

implementation 'com.tdunning:t-digest'

// Default usage tracking module. Normally different module dependencies are handled in
// heroic-dist, but defaults need to be accessible in heroic-core.
implementation project(':heroic-usage-tracking-google-analytics')
Expand Down
Loading

0 comments on commit 20e3327

Please sign in to comment.