diff --git a/README.md b/README.md
index ea1fab0..2849249 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
 
 This is an implementation of Ted Dunning's [t-digest](https://github.com/tdunning/t-digest/) in Go.
 
-The implementaion is based off [Derrick Burns' C++ implementation](https://github.com/derrickburns/tdigest).
+The implementation is based off [Derrick Burns' C++ implementation](https://github.com/derrickburns/tdigest).
 
 ## Example
 
@@ -35,8 +35,3 @@ func main() {
 	log.Println("CDF(5) = ", td.CDF(5))
 }
 ```
-
-## TODO
-
-Only the methods for a single TDigest have been implemented.
-The methods to merge two or more existing t-digests into a single t-digest have yet to be implemented.
diff --git a/centroid.go b/centroid.go
index b79cada..88db1b4 100644
--- a/centroid.go
+++ b/centroid.go
@@ -43,8 +43,9 @@ func (c *Centroid) Add(r Centroid) error {
 // CentroidList is sorted by the Mean of the centroid, ascending.
 type CentroidList []Centroid
 
+// Clear clears the list.
 func (l *CentroidList) Clear() {
-	*l = (*l)[0:0]
+	*l = (*l)[:0]
 }
 
 func (l CentroidList) Len() int           { return len(l) }
diff --git a/debian/changelog b/debian/changelog
index 6377da6..a54bc0d 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,10 +1,11 @@
-golang-github-influxdata-tdigest (0.0~git20180711.a7d76c6-2) UNRELEASED; urgency=low
+golang-github-influxdata-tdigest (0.0.1+git20210216.1.fc98d27-1) UNRELEASED; urgency=low
 
   * Bump debhelper from old 11 to 12.
   * Set debhelper-compat version in Build-Depends.
   * Set upstream metadata fields: Bug-Database, Bug-Submit.
+  * New upstream snapshot.
 
- -- Debian Janitor <janitor@jelmer.uk>  Wed, 01 Jul 2020 16:59:25 -0000
+ -- Debian Janitor <janitor@jelmer.uk>  Mon, 07 Jun 2021 18:47:45 -0000
 
 golang-github-influxdata-tdigest (0.0~git20180711.a7d76c6-1) unstable; urgency=medium
 
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..a9ac269
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,10 @@
+module github.com/influxdata/tdigest
+
+require (
+	github.com/google/go-cmp v0.2.0
+	golang.org/x/exp v0.0.0-20180321215751-8460e604b9de
+	gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca
+	gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6 // indirect
+)
+
+go 1.13
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..996169f
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,9 @@
+github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+golang.org/x/exp v0.0.0-20180321215751-8460e604b9de h1:xSjD6HQTqT0H/k60N5yYBtnN1OEkVy7WIo/DYyxKRO0=
+golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca h1:PupagGYwj8+I4ubCxcmcBRk3VlUWtTg5huQpZR9flmE=
+gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
+gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6 h1:4WsZyVtkthqrHTbDCJfiTs8IWNYE4uvsSDgaV6xpp+o=
+gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
diff --git a/tdigest.go b/tdigest.go
index 9efdc47..adab1c8 100644
--- a/tdigest.go
+++ b/tdigest.go
@@ -5,6 +5,8 @@ import (
 	"sort"
 )
 
+// TDigest is a data structure for accurate on-line accumulation of
+// rank-based statistics such as quantiles and trimmed means.
 type TDigest struct {
 	Compression float64
 
@@ -19,46 +21,81 @@ type TDigest struct {
 	max               float64
 }
 
+// New initializes a new distribution with a default compression.
 func New() *TDigest {
 	return NewWithCompression(1000)
 }
+
+// NewWithCompression initializes a new distribution with custom compression.
 func NewWithCompression(c float64) *TDigest {
 	t := &TDigest{
 		Compression: c,
 	}
 	t.maxProcessed = processedSize(0, t.Compression)
 	t.maxUnprocessed = unprocessedSize(0, t.Compression)
-	t.processed = make([]Centroid, 0, t.maxProcessed)
-	t.unprocessed = make([]Centroid, 0, t.maxUnprocessed+1)
+	t.processed = make(CentroidList, 0, t.maxProcessed)
+	t.unprocessed = make(CentroidList, 0, t.maxUnprocessed+1)
+	t.Reset()
+	return t
+}
+
+// Calculate number of bytes needed for a tdigest of size c,
+// where c is the compression value
+func ByteSizeForCompression(comp float64) int {
+	c := int(comp)
+	// // A centroid is 2 float64s, so we need 16 bytes for each centroid
+	// float_size := 8
+	// centroid_size := 2 * float_size
+
+	// // Unprocessed and processed can grow up to length c
+	// unprocessed_size := centroid_size * c
+	// processed_size := unprocessed_size
+
+	// // the cumulative field can also be of length c, but each item is a single float64
+	// cumulative_size := float_size * c // <- this could also be unprocessed_size / 2
+
+	// return unprocessed_size + processed_size + cumulative_size
+
+	// // or, more succinctly:
+	// return float_size * c * 5
+
+	// or even more succinctly
+	return c * 40
+}
+
+// Reset resets the distribution to its initial state.
+func (t *TDigest) Reset() {
+	t.processed = t.processed[:0]
+	t.unprocessed = t.unprocessed[:0]
+	t.cumulative = t.cumulative[:0]
+	t.processedWeight = 0
+	t.unprocessedWeight = 0
 	t.min = math.MaxFloat64
 	t.max = -math.MaxFloat64
-	return t
 }
 
+// Add adds a value x with a weight w to the distribution.
 func (t *TDigest) Add(x, w float64) {
-	if math.IsNaN(x) {
-		return
-	}
 	t.AddCentroid(Centroid{Mean: x, Weight: w})
 }
 
+// AddCentroidList can quickly add multiple centroids.
 func (t *TDigest) AddCentroidList(c CentroidList) {
-	l := c.Len()
-	for i := 0; i < l; i++ {
-		diff := l - i
-		room := t.maxUnprocessed - t.unprocessed.Len()
-		mid := i + diff
-		if room < diff {
-			mid = i + room
-		}
-		for i < mid {
-			t.AddCentroid(c[i])
-			i++
-		}
+	// It's possible to optimize this by bulk-copying the slice, but this
+	// yields just a 1-2% speedup (most time is in process()), so not worth
+	// the complexity.
+	for i := range c {
+		t.AddCentroid(c[i])
 	}
 }
 
+// AddCentroid adds a single centroid.
+// Weights which are not a number or are <= 0 are ignored, as are NaN means.
 func (t *TDigest) AddCentroid(c Centroid) {
+	if math.IsNaN(c.Mean) || c.Weight <= 0 || math.IsNaN(c.Weight) || math.IsInf(c.Weight, 1) {
+		return
+	}
+
 	t.unprocessed = append(t.unprocessed, c)
 	t.unprocessedWeight += c.Weight
 
@@ -68,6 +105,14 @@ func (t *TDigest) AddCentroid(c Centroid) {
 	}
 }
 
+// Merges the supplied digest into this digest. Functionally equivalent to
+// calling t.AddCentroidList(t2.Centroids(nil)), but avoids making an extra
+// copy of the CentroidList.
+func (t *TDigest) Merge(t2 *TDigest) {
+	t2.process()
+	t.AddCentroidList(t2.processed)
+}
+
 func (t *TDigest) process() {
 	if t.unprocessed.Len() > 0 ||
 		t.processed.Len() > t.maxProcessed {
@@ -98,13 +143,42 @@ func (t *TDigest) process() {
 		}
 		t.min = math.Min(t.min, t.processed[0].Mean)
 		t.max = math.Max(t.max, t.processed[t.processed.Len()-1].Mean)
-		t.updateCumulative()
 		t.unprocessed.Clear()
 	}
 }
 
+// Centroids returns a copy of processed centroids.
+// Useful when aggregating multiple t-digests.
+//
+// Centroids are appended to the passed CentroidList; if you're re-using a
+// buffer, be sure to pass cl[:0].
+func (t *TDigest) Centroids(cl CentroidList) CentroidList {
+	t.process()
+	return append(cl, t.processed...)
+}
+
+func (t *TDigest) Count() float64 {
+	t.process()
+
+	// t.process always updates t.processedWeight to the total count of all
+	// centroids, so we don't need to re-count here.
+	return t.processedWeight
+}
+
 func (t *TDigest) updateCumulative() {
-	t.cumulative = make([]float64, t.processed.Len()+1)
+	// Weight can only increase, so the final cumulative value will always be
+	// either equal to, or less than, the total weight. If they are the same,
+	// then nothing has changed since the last update.
+	if len(t.cumulative) > 0 && t.cumulative[len(t.cumulative)-1] == t.processedWeight {
+		return
+	}
+
+	if n := t.processed.Len() + 1; n <= cap(t.cumulative) {
+		t.cumulative = t.cumulative[:n]
+	} else {
+		t.cumulative = make([]float64, n)
+	}
+
 	prev := 0.0
 	for i, centroid := range t.processed {
 		cur := centroid.Weight
@@ -114,8 +188,12 @@ func (t *TDigest) updateCumulative() {
 	t.cumulative[t.processed.Len()] = prev
 }
 
+// Quantile returns the (approximate) quantile of
+// the distribution. Accepted values for q are between 0.0 and 1.0.
+// Returns NaN if Count is zero or bad inputs.
 func (t *TDigest) Quantile(q float64) float64 {
 	t.process()
+	t.updateCumulative()
 	if q < 0 || q > 1 || t.processed.Len() == 0 {
 		return math.NaN()
 	}
@@ -142,8 +220,10 @@ func (t *TDigest) Quantile(q float64) float64 {
 	return weightedAverage(t.processed[t.processed.Len()-1].Mean, z1, t.max, z2)
 }
 
+// CDF returns the cumulative distribution function for a given value x.
 func (t *TDigest) CDF(x float64) float64 {
 	t.process()
+	t.updateCumulative()
 	switch t.processed.Len() {
 	case 0:
 		return 0.0
diff --git a/tdigest_test.go b/tdigest_test.go
index 6116ffa..5b1ac7d 100644
--- a/tdigest_test.go
+++ b/tdigest_test.go
@@ -1,11 +1,14 @@
 package tdigest_test
 
 import (
-	"math/rand"
+	"fmt"
+	"math"
+	"reflect"
 	"testing"
 
-	"github.com/gonum/stat/distuv"
 	"github.com/influxdata/tdigest"
+	"golang.org/x/exp/rand"
+	"gonum.org/v1/gonum/stat/distuv"
 )
 
 const (
@@ -25,9 +28,9 @@ var UniformDigest *tdigest.TDigest
 
 func init() {
 	dist := distuv.Normal{
-		Mu:     Mu,
-		Sigma:  Sigma,
-		Source: rand.New(rand.NewSource(seed)),
+		Mu:    Mu,
+		Sigma: Sigma,
+		Src:   rand.New(rand.NewSource(seed)),
 	}
 	uniform := rand.New(rand.NewSource(seed))
 
@@ -46,6 +49,91 @@ func init() {
 	}
 }
 
+// Compares the quantile results of two digests, and fails if the
+// fractional err exceeds maxErr.
+// Always fails if the total count differs.
+func compareQuantiles(td1, td2 *tdigest.TDigest, maxErr float64) error {
+	if td1.Count() != td2.Count() {
+		return fmt.Errorf("counts are not equal, %d vs %d", int64(td1.Count()), int64(td2.Count()))
+	}
+	for q := 0.05; q < 1; q += 0.05 {
+		if math.Abs(td1.Quantile(q)-td2.Quantile(q))/td1.Quantile(q) > maxErr {
+			return fmt.Errorf("quantile %g differs, %g vs %g", q, td1.Quantile(q), td2.Quantile(q))
+		}
+	}
+	return nil
+}
+
+// All Add methods should yield equivalent results.
+func TestTdigest_AddFuncs(t *testing.T) {
+	centroids := NormalDigest.Centroids(nil)
+
+	addDigest := tdigest.NewWithCompression(100)
+	addCentroidDigest := tdigest.NewWithCompression(100)
+	addCentroidListDigest := tdigest.NewWithCompression(100)
+
+	for _, c := range centroids {
+		addDigest.Add(c.Mean, c.Weight)
+		addCentroidDigest.AddCentroid(c)
+	}
+	addCentroidListDigest.AddCentroidList(centroids)
+
+	if err := compareQuantiles(addDigest, addCentroidDigest, 0.01); err != nil {
+		t.Errorf("AddCentroid() differs from from Add(): %s", err.Error())
+	}
+	if err := compareQuantiles(addDigest, addCentroidListDigest, 0.01); err != nil {
+		t.Errorf("AddCentroidList() differs from from Add(): %s", err.Error())
+	}
+}
+
+func TestTdigest_Count(t *testing.T) {
+	tests := []struct {
+		name   string
+		data   []float64
+		digest *tdigest.TDigest
+		want   float64
+	}{
+		{
+			name: "empty",
+			data: []float64{},
+			want: 0,
+		},
+		{
+			name: "not empty",
+			data: []float64{5, 4},
+			want: 2,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			td := tt.digest
+			if td == nil {
+				td = tdigest.NewWithCompression(1000)
+				for _, x := range tt.data {
+					td.Add(x, 1)
+				}
+			}
+			got := td.Count()
+			if got != tt.want {
+				t.Errorf("unexpected count, got %g want %g", got, tt.want)
+			}
+		})
+	}
+
+	got := NormalDigest.Count()
+	want := float64(len(NormalData))
+	if got != want {
+		t.Errorf("unexpected count for NormalDigest, got %g want %g", got, want)
+	}
+
+	got = UniformDigest.Count()
+	want = float64(len(UniformData))
+	if got != want {
+		t.Errorf("unexpected count for UniformDigest, got %g want %g", got, want)
+	}
+}
+
 func TestTdigest_Quantile(t *testing.T) {
 	tests := []struct {
 		name     string
@@ -82,37 +170,37 @@ func TestTdigest_Quantile(t *testing.T) {
 			name:     "normal 50",
 			quantile: 0.5,
 			digest:   NormalDigest,
-			want:     9.997821231634168,
+			want:     10.000673533707138,
 		},
 		{
 			name:     "normal 90",
 			quantile: 0.9,
 			digest:   NormalDigest,
-			want:     13.843815760607427,
+			want:     13.842132136909889,
 		},
 		{
 			name:     "uniform 50",
 			quantile: 0.5,
 			digest:   UniformDigest,
-			want:     50.02682856274754,
+			want:     49.992502345843555,
 		},
 		{
 			name:     "uniform 90",
 			quantile: 0.9,
 			digest:   UniformDigest,
-			want:     90.02117754660424,
+			want:     89.98281777095822,
 		},
 		{
 			name:     "uniform 99",
 			quantile: 0.99,
 			digest:   UniformDigest,
-			want:     99.00246731511771,
+			want:     98.98503400959562,
 		},
 		{
 			name:     "uniform 99.9",
 			quantile: 0.999,
 			digest:   UniformDigest,
-			want:     99.90178495422307,
+			want:     99.90103781043621,
 		},
 	}
 	for _, tt := range tests {
@@ -162,7 +250,7 @@ func TestTdigest_CDFs(t *testing.T) {
 			name: "normal mean",
 			cdf:  10,
 			data: NormalData,
-			want: 0.500298235578106,
+			want: 0.4999156505250766,
 		},
 		{
 			name: "normal high",
@@ -180,7 +268,7 @@ func TestTdigest_CDFs(t *testing.T) {
 			name: "uniform 50",
 			cdf:  50,
 			data: UniformData,
-			want: 0.49972989818712815,
+			want: 0.5000756133965755,
 		},
 		{
 			name: "uniform min",
@@ -198,13 +286,13 @@ func TestTdigest_CDFs(t *testing.T) {
 			name: "uniform 10",
 			cdf:  10,
 			data: UniformData,
-			want: 0.099715527526992,
+			want: 0.09987932577650871,
 		},
 		{
 			name: "uniform 90",
 			cdf:  90,
 			data: UniformData,
-			want: 0.8997838903965611,
+			want: 0.9001667885256108,
 		},
 	}
 	for _, tt := range tests {
@@ -224,6 +312,76 @@ func TestTdigest_CDFs(t *testing.T) {
 	}
 }
 
+func TestTdigest_Reset(t *testing.T) {
+	td := tdigest.New()
+	for _, x := range NormalData {
+		td.Add(x, 1)
+	}
+	q1 := td.Quantile(0.9)
+
+	td.Reset()
+	for _, x := range NormalData {
+		td.Add(x, 1)
+	}
+	if q2 := td.Quantile(0.9); q2 != q1 {
+		t.Errorf("unexpected quantile, got %g want %g", q2, q1)
+	}
+}
+
+func TestTdigest_OddInputs(t *testing.T) {
+	td := tdigest.New()
+	td.Add(math.NaN(), 1)
+	td.Add(1, math.NaN())
+	td.Add(1, 0)
+	td.Add(1, -1000)
+	if td.Count() != 0 {
+		t.Error("invalid value was alloed to be added")
+	}
+
+	// Infinite values are allowed.
+	td.Add(1, 1)
+	td.Add(2, 1)
+	td.Add(math.Inf(1), 1)
+	if q := td.Quantile(0.5); q != 2 {
+		t.Errorf("expected median value 2, got %f", q)
+	}
+	if q := td.Quantile(0.9); !math.IsInf(q, 1) {
+		t.Errorf("expected median value 2, got %f", q)
+	}
+}
+
+func TestTdigest_Merge(t *testing.T) {
+	// Repeat merges enough times to ensure we call compress()
+	numRepeats := 20
+	addDigest := tdigest.New()
+	for i := 0; i < numRepeats; i++ {
+		for _, c := range NormalDigest.Centroids(nil) {
+			addDigest.AddCentroid(c)
+		}
+		for _, c := range UniformDigest.Centroids(nil) {
+			addDigest.AddCentroid(c)
+		}
+	}
+
+	mergeDigest := tdigest.New()
+	for i := 0; i < numRepeats; i++ {
+		mergeDigest.Merge(NormalDigest)
+		mergeDigest.Merge(UniformDigest)
+	}
+
+	if err := compareQuantiles(addDigest, mergeDigest, 0.001); err != nil {
+		t.Errorf("AddCentroid() differs from from Merge(): %s", err.Error())
+	}
+
+	// Empty merge does nothing and has no effect on underlying centroids.
+	c1 := addDigest.Centroids(nil)
+	addDigest.Merge(tdigest.New())
+	c2 := addDigest.Centroids(nil)
+	if !reflect.DeepEqual(c1, c2) {
+		t.Error("Merging an empty digest altered data")
+	}
+}
+
 var quantiles = []float64{0.1, 0.5, 0.9, 0.99, 0.999}
 
 func BenchmarkTDigest_Add(b *testing.B) {
@@ -234,6 +392,56 @@ func BenchmarkTDigest_Add(b *testing.B) {
 		}
 	}
 }
+
+func BenchmarkTDigest_AddCentroid(b *testing.B) {
+	centroids := make(tdigest.CentroidList, len(NormalData))
+	for i := range centroids {
+		centroids[i].Mean = NormalData[i]
+		centroids[i].Weight = 1
+	}
+
+	b.ResetTimer()
+	for n := 0; n < b.N; n++ {
+		td := tdigest.NewWithCompression(1000)
+		for i := range centroids {
+			td.AddCentroid(centroids[i])
+		}
+	}
+}
+
+func BenchmarkTDigest_AddCentroidList(b *testing.B) {
+	centroids := make(tdigest.CentroidList, len(NormalData))
+	for i := range centroids {
+		centroids[i].Mean = NormalData[i]
+		centroids[i].Weight = 1
+	}
+
+	b.ResetTimer()
+	for n := 0; n < b.N; n++ {
+		td := tdigest.NewWithCompression(1000)
+		td.AddCentroidList(centroids)
+	}
+}
+
+func BenchmarkTDigest_Merge(b *testing.B) {
+	b.Run("AddCentroid", func(b *testing.B) {
+		var cl tdigest.CentroidList
+		td := tdigest.New()
+		for n := 0; n < b.N; n++ {
+			cl = NormalDigest.Centroids(cl[:0])
+			for i := range cl {
+				td.AddCentroid(cl[i])
+			}
+		}
+	})
+	b.Run("Merge", func(b *testing.B) {
+		td := tdigest.New()
+		for n := 0; n < b.N; n++ {
+			td.Merge(NormalDigest)
+		}
+	})
+}
+
 func BenchmarkTDigest_Quantile(b *testing.B) {
 	td := tdigest.NewWithCompression(1000)
 	for _, x := range NormalData {
@@ -247,3 +455,55 @@ func BenchmarkTDigest_Quantile(b *testing.B) {
 		}
 	}
 }
+
+func TestTdigest_Centroids(t *testing.T) {
+	tests := []struct {
+		name   string
+		data   []float64
+		digest *tdigest.TDigest
+		want   tdigest.CentroidList
+	}{
+		{
+			name: "increasing",
+			data: []float64{1, 2, 3, 4, 5},
+			want: tdigest.CentroidList{
+				tdigest.Centroid{
+					Mean:   1.0,
+					Weight: 1.0,
+				},
+
+				tdigest.Centroid{
+					Mean:   2.5,
+					Weight: 2.0,
+				},
+
+				tdigest.Centroid{
+					Mean:   4.0,
+					Weight: 1.0,
+				},
+
+				tdigest.Centroid{
+					Mean:   5.0,
+					Weight: 1.0,
+				},
+			},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			var got tdigest.CentroidList
+			td := tt.digest
+			if td == nil {
+				td = tdigest.NewWithCompression(3)
+				for _, x := range tt.data {
+					td.Add(x, 1)
+				}
+			}
+			got = td.Centroids(got[:0])
+			if !reflect.DeepEqual(got, tt.want) {
+				t.Errorf("unexpected list got %g want %g", got, tt.want)
+			}
+		})
+	}
+}
diff --git a/test/gen.go b/test/gen/main.go
similarity index 88%
rename from test/gen.go
rename to test/gen/main.go
index 8a92f7b..166a2cf 100644
--- a/test/gen.go
+++ b/test/gen/main.go
@@ -1,11 +1,11 @@
 package main
 
 import (
-	"math/rand"
 	"os"
 	"strconv"
 
-	"github.com/gonum/stat/distuv"
+	"golang.org/x/exp/rand"
+	"gonum.org/v1/gonum/stat/distuv"
 )
 
 const (
@@ -20,9 +20,9 @@ func main() {
 	// Generate uniform and normal data
 	uniform := rand.New(rand.NewSource(seed))
 	dist := distuv.Normal{
-		Mu:     Mu,
-		Sigma:  Sigma,
-		Source: rand.New(rand.NewSource(seed)),
+		Mu:    Mu,
+		Sigma: Sigma,
+		Src:   rand.New(rand.NewSource(seed)),
 	}
 
 	uniformData := make([]float64, N)
diff --git a/test/test.sh b/test/test.sh
index 81b54d2..1e07c0d 100755
--- a/test/test.sh
+++ b/test/test.sh
@@ -5,10 +5,10 @@ set -e
 DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 cd "$DIR"
 
-go run gen.go
+go run gen/main.go
 go run main.go
-g++ -o cpp.test main.cpp
+g++ -std=c++11 -o cpp.test main.cpp
 ./cpp.test 2>/dev/null
 rm cpp.test
 
-go run validate.go
+go run validate/main.go
diff --git a/test/validate.go b/test/validate/main.go
similarity index 100%
rename from test/validate.go
rename to test/validate/main.go